Reland of Combine webrtc/api/java/android and webrtc/api/java/src. (patchset #1 id:1 of https://codereview.webrtc.org/2106333005/ )
Reason for revert:
Issues fixed
Original issue's description:
> Revert of Combine webrtc/api/java/android and webrtc/api/java/src. (patchset #1 id:1 of https://codereview.webrtc.org/2111823002/ )
>
> Reason for revert:
> Breaks downstream dependencies
>
> Original issue's description:
> > Combine webrtc/api/java/android and webrtc/api/java/src.
> >
> > It used to be that there was a Java api for devices not running Android
> > but that is no longer the case. I combined the directories and made
> > the folder structure chromium style.
> >
> > BUG=webrtc:6067
> > R=magjed@webrtc.org, tommi@webrtc.org
> >
> > Committed: https://chromium.googlesource.com/external/webrtc/+/ceefe20dd65387d83059d9fc1ce84842650ed5e2
>
> TBR=magjed@webrtc.org,tommi@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=webrtc:6067
>
> Committed: https://chromium.googlesource.com/external/webrtc/+/9b0dc622d46b6b09bb7ccaca6b2f68739c87ff8d
TBR=magjed@webrtc.org,tommi@webrtc.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:6067
Review-Url: https://codereview.webrtc.org/2111923003
Cr-Commit-Position: refs/heads/master@{#13363}
diff --git a/webrtc/api/android/README b/webrtc/api/android/README
new file mode 100644
index 0000000..f367556
--- /dev/null
+++ b/webrtc/api/android/README
@@ -0,0 +1,10 @@
+This directory holds a Java implementation of the webrtc::PeerConnection API, as
+well as the JNI glue C++ code that lets the Java implementation reuse the C++
+implementation of the same API.
+
+To build the Java API and related tests, build with OS=android in $GYP_DEFINES.
+
+To use the Java API, start by looking at the public interface of
+org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
+
+To understand the implementation of the API, see the native code in jni/.
diff --git a/webrtc/api/android/java/src/org/webrtc/AudioSource.java b/webrtc/api/android/java/src/org/webrtc/AudioSource.java
new file mode 100644
index 0000000..99fcad1
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/AudioSource.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
+ * more {@code AudioTrack} objects.
+ */
+public class AudioSource extends MediaSource {
+ public AudioSource(long nativeSource) {
+ super(nativeSource);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/AudioTrack.java b/webrtc/api/android/java/src/org/webrtc/AudioTrack.java
new file mode 100644
index 0000000..c33a9d8
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/AudioTrack.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ AudioTrackInterface */
+public class AudioTrack extends MediaStreamTrack {
+ public AudioTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java b/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
new file mode 100644
index 0000000..47b4641
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/CallSessionFileRotatingLogSink.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public class CallSessionFileRotatingLogSink {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private long nativeSink;
+
+ public static byte[] getLogData(String dirPath) {
+ return nativeGetLogData(dirPath);
+ }
+
+ public CallSessionFileRotatingLogSink(
+ String dirPath, int maxFileSize, Logging.Severity severity) {
+ nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
+ }
+
+ public void dispose() {
+ if (nativeSink != 0) {
+ nativeDeleteSink(nativeSink);
+ nativeSink = 0;
+ }
+ }
+
+ private static native long nativeAddSink(
+ String dirPath, int maxFileSize, int severity);
+ private static native void nativeDeleteSink(long nativeSink);
+ private static native byte[] nativeGetLogData(String dirPath);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java b/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java
new file mode 100644
index 0000000..c10d974
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/Camera1Enumerator.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import android.os.SystemClock;
+
+import java.util.ArrayList;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class Camera1Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera1Enumerator";
+ // Each entry contains the supported formats for corresponding camera index. The formats for all
+ // cameras are enumerated on the first call to getSupportedFormats(), and cached for future
+ // reference.
+ private static List<List<CaptureFormat>> cachedSupportedFormats;
+
+ private final boolean captureToTexture;
+
+ public Camera1Enumerator() {
+ this(true /* captureToTexture */);
+ }
+
+ public Camera1Enumerator(boolean captureToTexture) {
+ this.captureToTexture = captureToTexture;
+ }
+
+ // Returns device names that can be used to create a new VideoCapturerAndroid.
+ @Override
+ public String[] getDeviceNames() {
+ String[] names = new String[android.hardware.Camera.getNumberOfCameras()];
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ names[i] = getDeviceName(i);
+ }
+ return names;
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
+ return info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(String deviceName,
+ CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new VideoCapturerAndroid(deviceName, eventsHandler, captureToTexture);
+ }
+
+ private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+ android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ try {
+ android.hardware.Camera.getCameraInfo(index, info);
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo failed on index " + index,e);
+ return null;
+ }
+ return info;
+ }
+
+ static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
+ if (cachedSupportedFormats == null) {
+ cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
+ for (int i = 0; i < CameraEnumerationAndroid.getDeviceCount(); ++i) {
+ cachedSupportedFormats.add(enumerateFormats(i));
+ }
+ }
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ private static List<CaptureFormat> enumerateFormats(int cameraId) {
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+ final android.hardware.Camera.Parameters parameters;
+ android.hardware.Camera camera = null;
+ try {
+ Logging.d(TAG, "Opening camera with index " + cameraId);
+ camera = android.hardware.Camera.open(cameraId);
+ parameters = camera.getParameters();
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
+ return new ArrayList<CaptureFormat>();
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ try {
+ int minFps = 0;
+ int maxFps = 0;
+ final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
+ if (listFpsRange != null) {
+ // getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
+ // corresponding to the highest fps.
+ final int[] range = listFpsRange.get(listFpsRange.size() - 1);
+ minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ }
+ for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
+ formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
+ }
+
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+
+ // Convert from android.hardware.Camera.Size to Size.
+ static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
+ final List<Size> sizes = new ArrayList<Size>();
+ for (android.hardware.Camera.Size size : cameraSizes) {
+ sizes.add(new Size(size.width, size.height));
+ }
+ return sizes;
+ }
+
+ // Convert from int[2] to CaptureFormat.FramerateRange.
+ static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (int[] range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
+ }
+ return ranges;
+ }
+
+ // Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
+ // if no such camera can be found.
+ static int getCameraIndex(String deviceName) {
+ Logging.d(TAG, "getCameraIndex: " + deviceName);
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ if (deviceName.equals(CameraEnumerationAndroid.getDeviceName(i))) {
+ return i;
+ }
+ }
+ throw new IllegalArgumentException("No such camera: " + deviceName);
+ }
+
+ // Returns the name of the camera with camera index. Returns null if the
+ // camera can not be used.
+ static String getDeviceName(int index) {
+ android.hardware.Camera.CameraInfo info = getCameraInfo(index);
+
+ String facing =
+ (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
+ return "Camera " + index + ", Facing " + facing
+ + ", Orientation " + info.orientation;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java b/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java
new file mode 100644
index 0000000..8e44d69
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/Camera2Capturer.java
@@ -0,0 +1,926 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.Handler;
+import android.os.SystemClock;
+import android.util.Range;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.Semaphore;
+
+@TargetApi(21)
+public class Camera2Capturer implements
+ CameraVideoCapturer,
+ SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final static String TAG = "Camera2Capturer";
+
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private final static int STOP_TIMEOUT = 10000;
+ private final static int START_TIMEOUT = 10000;
+ private final static Object STOP_TIMEOUT_RUNNABLE_TOKEN = new Object();
+
+ // In the Camera2 API, starting a camera is inherently asynchronous, and this state is
+ // represented with 'STARTING'. Stopping is also asynchronous and this state is 'STOPPING'.
+ private static enum CameraState { IDLE, STARTING, RUNNING, STOPPING }
+
+ // Thread safe objects.
+ // --------------------
+ private final CameraManager cameraManager;
+ private final CameraEventsHandler eventsHandler;
+
+
+ // Shared state - guarded by cameraStateLock. Will only be edited from camera thread (when it is
+ // running).
+ // ---------------------------------------------------------------------------------------------
+ private final Object cameraStateLock = new Object();
+ private CameraState cameraState = CameraState.IDLE;
+ // |cameraThreadHandler| must be synchronized on |cameraStateLock| when not on the camera thread,
+ // or when modifying the reference. Use postOnCameraThread() instead of posting directly to
+ // the handler - this way all callbacks with a specifed token can be removed at once.
+ // |cameraThreadHandler| must be null if and only if CameraState is IDLE.
+ private Handler cameraThreadHandler;
+ // Remember the requested format in case we want to switch cameras.
+ private int requestedWidth;
+ private int requestedHeight;
+ private int requestedFramerate;
+
+ // Will only be edited while camera state is IDLE and cameraStateLock is acquired.
+ private String cameraName;
+ private boolean isFrontCamera;
+ private int cameraOrientation;
+
+ // Semaphore for allowing only one switch at a time.
+ private final Semaphore pendingCameraSwitchSemaphore = new Semaphore(1);
+ // Guarded by pendingCameraSwitchSemaphore
+ private CameraSwitchHandler switchEventsHandler;
+
+ // Internal state - must only be modified from camera thread
+ // ---------------------------------------------------------
+ private CaptureFormat captureFormat;
+ private Context applicationContext;
+ private CapturerObserver capturerObserver;
+ private CameraStatistics cameraStatistics;
+ private SurfaceTextureHelper surfaceTextureHelper;
+ private CameraCaptureSession captureSession;
+ private Surface surface;
+ private CameraDevice cameraDevice;
+ private CameraStateCallback cameraStateCallback;
+
+ // Factor to convert between Android framerates and CaptureFormat.FramerateRange. It will be
+ // either 1 or 1000.
+ private int fpsUnitFactor;
+ private boolean firstFrameReported;
+ private int consecutiveCameraOpenFailures;
+
+ public Camera2Capturer(
+ Context context, String cameraName, CameraEventsHandler eventsHandler) {
+ Logging.d(TAG, "Camera2Capturer ctor, camera name: " + cameraName);
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ this.eventsHandler = eventsHandler;
+
+ setCameraName(cameraName);
+ }
+
+ /**
+ * Helper method for checking method is executed on camera thread. Also allows calls from other
+ * threads if camera is closed.
+ */
+ private void checkIsOnCameraThread() {
+ if (cameraState == CameraState.IDLE) {
+ return;
+ }
+
+ checkIsStrictlyOnCameraThread();
+ }
+
+ /**
+ * Like checkIsOnCameraThread but doesn't allow the camera to be stopped.
+ */
+ private void checkIsStrictlyOnCameraThread() {
+ if (cameraThreadHandler == null) {
+ throw new IllegalStateException("Camera is closed.");
+ }
+
+ if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ /**
+ * Checks method is not invoked on the camera thread. Used in functions waiting for the camera
+ * state to change since executing them on the camera thread would cause a deadlock.
+ */
+ private void checkNotOnCameraThread() {
+ if (cameraThreadHandler == null) {
+ return;
+ }
+
+ if (Thread.currentThread() == cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException(
+ "Method waiting for camera state to change executed on camera thread");
+ }
+ }
+
+ private void waitForCameraToExitTransitionalState(
+ CameraState transitionalState, long timeoutMs) {
+ checkNotOnCameraThread();
+
+ // We probably should already have the lock when this is called but acquire it in case
+ // we don't have it.
+ synchronized (cameraStateLock) {
+ long timeoutAt = SystemClock.uptimeMillis() + timeoutMs;
+
+ while (cameraState == transitionalState) {
+ Logging.d(TAG, "waitForCameraToExitTransitionalState waiting: "
+ + cameraState);
+
+ long timeLeft = timeoutAt - SystemClock.uptimeMillis();
+
+ if (timeLeft <= 0) {
+ Logging.e(TAG, "Camera failed to exit transitional state " + transitionalState
+ + " within the time limit.");
+ break;
+ }
+
+ try {
+ cameraStateLock.wait(timeLeft);
+ } catch (InterruptedException e) {
+ Logging.w(TAG, "Trying to interrupt while waiting to exit transitional state "
+ + transitionalState + ", ignoring: " + e);
+ }
+ }
+ }
+ }
+
+ /**
+ * Waits until camera state is not STOPPING.
+ */
+ private void waitForCameraToStopIfStopping() {
+ waitForCameraToExitTransitionalState(CameraState.STOPPING, STOP_TIMEOUT);
+ }
+
+ /**
+ * Wait until camera state is not STARTING.
+ */
+ private void waitForCameraToStartIfStarting() {
+ waitForCameraToExitTransitionalState(CameraState.STARTING, START_TIMEOUT);
+ }
+
+ /**
+ * Sets the name of the camera. Camera must be stopped or stopping when this is called.
+ */
+ private void setCameraName(String cameraName) {
+ final CameraCharacteristics characteristics;
+ try {
+ final String[] cameraIds = cameraManager.getCameraIdList();
+
+ if (cameraName.isEmpty() && cameraIds.length != 0) {
+ cameraName = cameraIds[0];
+ }
+
+ if (!Arrays.asList(cameraIds).contains(cameraName)) {
+ throw new IllegalArgumentException(
+ "Camera name: " + cameraName + " does not match any known camera device:");
+ }
+
+ characteristics = cameraManager.getCameraCharacteristics(cameraName);
+ } catch (CameraAccessException e) {
+ throw new RuntimeException("Camera access exception: " + e);
+ }
+
+ synchronized (cameraStateLock) {
+ waitForCameraToStopIfStopping();
+
+ if (cameraState != CameraState.IDLE) {
+ throw new RuntimeException("Changing camera name on running camera.");
+ }
+
+ // Note: Usually changing camera state from outside camera thread is not allowed. It is
+ // allowed here because camera is not running.
+ this.cameraName = cameraName;
+ isFrontCamera = characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+
+ /*
+ * Clockwise angle through which the output image needs to be rotated to be upright on the
+ * device screen in its native orientation.
+ * Also defines the direction of rolling shutter readout, which is from top to bottom in the
+ * sensor's coordinate system.
+ * Units: Degrees of clockwise rotation; always a multiple of 90
+ */
+ cameraOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ }
+ }
+
+ /**
+ * Triggers appropriate error handlers based on the camera state. Must be called on the camera
+ * thread and camera must not be stopped.
+ */
+ private void reportError(String errorDescription) {
+ checkIsStrictlyOnCameraThread();
+ Logging.e(TAG, "Error in camera at state " + cameraState + ": " + errorDescription);
+
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError(errorDescription);
+ switchEventsHandler = null;
+ pendingCameraSwitchSemaphore.release();
+ }
+
+ switch (cameraState) {
+ case STARTING:
+ capturerObserver.onCapturerStarted(false /* success */);
+ // fall through
+ case RUNNING:
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError(errorDescription);
+ }
+ break;
+ case STOPPING:
+ setCameraState(CameraState.IDLE);
+ Logging.e(TAG, "Closing camera failed: " + errorDescription);
+ return; // We don't want to call closeAndRelease in this case.
+ default:
+ throw new RuntimeException("Unknown camera state: " + cameraState);
+ }
+ closeAndRelease();
+ }
+
+ private void closeAndRelease() {
+ checkIsStrictlyOnCameraThread();
+
+ Logging.d(TAG, "Close and release.");
+ setCameraState(CameraState.STOPPING);
+
+ // Remove all pending Runnables posted from |this|.
+ cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
+ applicationContext = null;
+ capturerObserver = null;
+ if (cameraStatistics != null) {
+ cameraStatistics.release();
+ cameraStatistics = null;
+ }
+ if (surfaceTextureHelper != null) {
+ surfaceTextureHelper.stopListening();
+ surfaceTextureHelper = null;
+ }
+ if (captureSession != null) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (surface != null) {
+ surface.release();
+ surface = null;
+ }
+ if (cameraDevice != null) {
+ // Add a timeout for stopping the camera.
+ cameraThreadHandler.postAtTime(new Runnable() {
+ @Override
+ public void run() {
+ Logging.e(TAG, "Camera failed to stop within the timeout. Force stopping.");
+ setCameraState(CameraState.IDLE);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera failed to stop (timeout).");
+ }
+ }
+ }, STOP_TIMEOUT_RUNNABLE_TOKEN, SystemClock.uptimeMillis() + STOP_TIMEOUT);
+
+ cameraDevice.close();
+ cameraDevice = null;
+ } else {
+ Logging.w(TAG, "closeAndRelease called while cameraDevice is null");
+ setCameraState(CameraState.IDLE);
+ }
+ this.cameraStateCallback = null;
+ }
+
+ /**
+ * Sets the camera state while ensuring constraints are followed.
+ */
+ private void setCameraState(CameraState newState) {
+ // State must only be modified on the camera thread. It can be edited from other threads
+ // if cameraState is IDLE since there is no camera thread.
+ checkIsOnCameraThread();
+
+ if (newState != CameraState.IDLE) {
+ if (cameraThreadHandler == null) {
+ throw new IllegalStateException(
+ "cameraThreadHandler must be null if and only if CameraState is IDLE.");
+ }
+ } else {
+ cameraThreadHandler = null;
+ }
+
+ switch (newState) {
+ case STARTING:
+ if (cameraState != CameraState.IDLE) {
+ throw new IllegalStateException("Only stopped camera can start.");
+ }
+ break;
+ case RUNNING:
+ if (cameraState != CameraState.STARTING) {
+ throw new IllegalStateException("Only starting camera can go to running state.");
+ }
+ break;
+ case STOPPING:
+ if (cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING) {
+ throw new IllegalStateException("Only starting or running camera can stop.");
+ }
+ break;
+ case IDLE:
+ if (cameraState != CameraState.STOPPING) {
+ throw new IllegalStateException("Only stopping camera can go to idle state.");
+ }
+ break;
+ default:
+ throw new RuntimeException("Unknown camera state: " + newState);
+ }
+
+ synchronized (cameraStateLock) {
+ cameraState = newState;
+ cameraStateLock.notifyAll();
+ }
+ }
+
+ /**
+ * Internal method for opening the camera. Must be called on the camera thread.
+ */
+ private void openCamera() {
+ try {
+ checkIsStrictlyOnCameraThread();
+
+ if (cameraState != CameraState.STARTING) {
+ throw new IllegalStateException("Camera should be in state STARTING in openCamera.");
+ }
+
+ if (cameraThreadHandler == null) {
+ throw new RuntimeException("Someone set cameraThreadHandler to null while the camera "
+ + "state was STARTING. This should never happen");
+ }
+
+ // Camera is in state STARTING so cameraName will not be edited.
+ cameraManager.openCamera(cameraName, cameraStateCallback, cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to open camera: " + e);
+ }
+ }
+
+ private void startCaptureOnCameraThread(
+ final int requestedWidth, final int requestedHeight, final int requestedFramerate,
+ final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
+ final CapturerObserver capturerObserver) {
+ checkIsStrictlyOnCameraThread();
+
+ firstFrameReported = false;
+ consecutiveCameraOpenFailures = 0;
+
+ this.applicationContext = applicationContext;
+ this.capturerObserver = capturerObserver;
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.cameraStateCallback = new CameraStateCallback();
+
+ synchronized (cameraStateLock) {
+ // Remember the requested format in case we want to switch cameras.
+ this.requestedWidth = requestedWidth;
+ this.requestedHeight = requestedHeight;
+ this.requestedFramerate = requestedFramerate;
+ }
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ // Camera is in state STARTING so cameraName will not be edited.
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraName);
+ } catch (CameraAccessException e) {
+ reportError("getCameraCharacteristics(): " + e.getMessage());
+ return;
+ }
+
+ List<CaptureFormat.FramerateRange> framerateRanges =
+ Camera2Enumerator.getSupportedFramerateRanges(cameraCharacteristics);
+ List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
+
+ if (framerateRanges.isEmpty() || sizes.isEmpty()) {
+ reportError("No supported capture formats.");
+ }
+
+ // Some LEGACY camera implementations use fps rates that are multiplied with 1000. Make sure
+ // all values are multiplied with 1000 for consistency.
+ this.fpsUnitFactor = (framerateRanges.get(0).max > 1000) ? 1 : 1000;
+
+ final CaptureFormat.FramerateRange bestFpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(
+ framerateRanges, requestedFramerate);
+
+ final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ sizes, requestedWidth, requestedHeight);
+
+ this.captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
+ Logging.d(TAG, "Using capture format: " + captureFormat);
+
+ Logging.d(TAG, "Opening camera " + cameraName);
+ if (eventsHandler != null) {
+ int cameraIndex = -1;
+ try {
+ cameraIndex = Integer.parseInt(cameraName);
+ } catch (NumberFormatException e) {
+ Logging.d(TAG, "External camera with non-int identifier: " + cameraName);
+ }
+ eventsHandler.onCameraOpening(cameraIndex);
+ }
+
+ openCamera();
+ }
+
+ /**
+ * Starts capture using specified settings. This is automatically called for you by
+ * VideoCapturerTrackSource if you are just using the camera as source for video track.
+ */
+ @Override
+ public void startCapture(
+ final int requestedWidth, final int requestedHeight, final int requestedFramerate,
+ final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
+ final CapturerObserver capturerObserver) {
+ Logging.d(TAG, "startCapture requested: " + requestedWidth + "x" + requestedHeight
+ + "@" + requestedFramerate);
+ if (surfaceTextureHelper == null) {
+ throw new IllegalArgumentException("surfaceTextureHelper not set.");
+ }
+ if (applicationContext == null) {
+ throw new IllegalArgumentException("applicationContext not set.");
+ }
+ if (capturerObserver == null) {
+ throw new IllegalArgumentException("capturerObserver not set.");
+ }
+
+ synchronized (cameraStateLock) {
+ waitForCameraToStopIfStopping();
+ if (cameraState != CameraState.IDLE) {
+ Logging.e(TAG, "Unexpected camera state for startCapture: " + cameraState);
+ return;
+ }
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ setCameraState(CameraState.STARTING);
+ }
+
+ postOnCameraThread(new Runnable() {
+ @Override
+ public void run() {
+ startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate,
+ surfaceTextureHelper, applicationContext, capturerObserver);
+ }
+ });
+ }
+
+ final class CameraStateCallback extends CameraDevice.StateCallback {
+ private String getErrorDescription(int errorCode) {
+ switch (errorCode) {
+ case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
+ return "Camera device has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
+ return "Camera device could not be opened due to a device policy.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
+ return "Camera device is in use already.";
+ case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
+ return "Camera service has encountered a fatal error.";
+ case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
+ return "Camera device could not be opened because"
+ + " there are too many other open camera devices.";
+ default:
+ return "Unknown camera error: " + errorCode;
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ checkIsStrictlyOnCameraThread();
+ cameraDevice = camera;
+ reportError("Camera disconnected.");
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int errorCode) {
+ checkIsStrictlyOnCameraThread();
+ cameraDevice = camera;
+
+ if (cameraState == CameraState.STARTING && (
+ errorCode == CameraDevice.StateCallback.ERROR_CAMERA_IN_USE ||
+ errorCode == CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE)) {
+ consecutiveCameraOpenFailures++;
+
+ if (consecutiveCameraOpenFailures < MAX_OPEN_CAMERA_ATTEMPTS) {
+ Logging.w(TAG, "Opening camera failed, trying again: " + getErrorDescription(errorCode));
+
+ postDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
+ public void run() {
+ openCamera();
+ }
+ });
+ return;
+ } else {
+ Logging.e(TAG, "Opening camera failed too many times. Passing the error.");
+ }
+ }
+
+ reportError(getErrorDescription(errorCode));
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ checkIsStrictlyOnCameraThread();
+
+ Logging.d(TAG, "Camera opened.");
+ if (cameraState != CameraState.STARTING) {
+ throw new IllegalStateException("Unexpected state when camera opened: " + cameraState);
+ }
+
+ cameraDevice = camera;
+ final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
+ surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
+ surface = new Surface(surfaceTexture);
+ try {
+ camera.createCaptureSession(
+ Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to create capture session. " + e);
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ checkIsStrictlyOnCameraThread();
+
+ Logging.d(TAG, "Camera device closed.");
+
+ if (cameraState != CameraState.STOPPING) {
+ Logging.e(TAG, "Camera state was not STOPPING in onClosed. Most likely camera didn't stop "
+ + "within timelimit and this method was invoked twice.");
+ return;
+ }
+
+ cameraThreadHandler.removeCallbacksAndMessages(STOP_TIMEOUT_RUNNABLE_TOKEN);
+ setCameraState(CameraState.IDLE);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraClosed();
+ }
+ }
+ }
+
+ final class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ checkIsStrictlyOnCameraThread();
+ captureSession = session;
+ reportError("Failed to configure capture session.");
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ checkIsStrictlyOnCameraThread();
+ Logging.d(TAG, "Camera capture session configured.");
+ captureSession = session;
+ try {
+ /*
+ * The viable options for video capture requests are:
+ * TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
+ * post-processing.
+ * TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
+ * quality.
+ */
+ final CaptureRequest.Builder captureRequestBuilder =
+ cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
+ // Set auto exposure fps range.
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, new Range<Integer>(
+ captureFormat.framerate.min / fpsUnitFactor,
+ captureFormat.framerate.max / fpsUnitFactor));
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON);
+ captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
+
+ captureRequestBuilder.addTarget(surface);
+ session.setRepeatingRequest(
+ captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
+ } catch (CameraAccessException e) {
+ reportError("Failed to start capture request. " + e);
+ return;
+ }
+
+ Logging.d(TAG, "Camera device successfully started.");
+ surfaceTextureHelper.startListening(Camera2Capturer.this);
+ capturerObserver.onCapturerStarted(true /* success */);
+ cameraStatistics = new CameraStatistics(surfaceTextureHelper, eventsHandler);
+ setCameraState(CameraState.RUNNING);
+
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(isFrontCamera);
+ switchEventsHandler = null;
+ pendingCameraSwitchSemaphore.release();
+ }
+ }
+ }
+
+ final class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ static final int MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES = 10;
+ int consecutiveCameraCaptureFailures;
+
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ checkIsOnCameraThread();
+ ++consecutiveCameraCaptureFailures;
+ if (consecutiveCameraCaptureFailures > MAX_CONSECUTIVE_CAMERA_CAPTURE_FAILURES) {
+ reportError("Capture failed " + consecutiveCameraCaptureFailures + " consecutive times.");
+ }
+ }
+
+ @Override
+ public void onCaptureCompleted(
+ CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
+ // TODO(sakal): This sometimes gets called after camera has stopped, investigate
+ checkIsOnCameraThread();
+ consecutiveCameraCaptureFailures = 0;
+ }
+ }
+
+
+
+ // Switch camera to the next valid camera id. This can only be called while
+ // the camera is running.
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ final String[] cameraIds;
+ try {
+ cameraIds = cameraManager.getCameraIdList();
+ } catch (CameraAccessException e) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("Could not get camera names: " + e);
+ }
+ return;
+ }
+ if (cameraIds.length < 2) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("No camera to switch to.");
+ }
+ return;
+ }
+ // Do not handle multiple camera switch request to avoid blocking camera thread by handling too
+ // many switch request from a queue. We have to be careful to always release this.
+ if (!pendingCameraSwitchSemaphore.tryAcquire()) {
+ Logging.w(TAG, "Ignoring camera switch request.");
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
+ }
+ return;
+ }
+
+ final String newCameraId;
+ final SurfaceTextureHelper surfaceTextureHelper;
+ final Context applicationContext;
+ final CapturerObserver capturerObserver;
+ final int requestedWidth;
+ final int requestedHeight;
+ final int requestedFramerate;
+
+ synchronized (cameraStateLock) {
+ waitForCameraToStartIfStarting();
+
+ if (cameraState != CameraState.RUNNING) {
+ Logging.e(TAG, "Calling swithCamera() on stopped camera.");
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("Camera is stopped.");
+ }
+ pendingCameraSwitchSemaphore.release();
+ return;
+ }
+
+ // Calculate new camera index and camera id. Camera is in state RUNNING so cameraName will
+ // not be edited.
+ final int currentCameraIndex = Arrays.asList(cameraIds).indexOf(cameraName);
+ if (currentCameraIndex == -1) {
+ Logging.e(TAG, "Couldn't find current camera id " + cameraName
+ + " in list of camera ids: " + Arrays.toString(cameraIds));
+ }
+ final int newCameraIndex = (currentCameraIndex + 1) % cameraIds.length;
+ newCameraId = cameraIds[newCameraIndex];
+
+ // Remember parameters. These are not null since camera is in RUNNING state. They aren't
+ // edited either while camera is in RUNNING state.
+ surfaceTextureHelper = this.surfaceTextureHelper;
+ applicationContext = this.applicationContext;
+ capturerObserver = this.capturerObserver;
+ requestedWidth = this.requestedWidth;
+ requestedHeight = this.requestedHeight;
+ requestedFramerate = this.requestedFramerate;
+ this.switchEventsHandler = switchEventsHandler;
+ }
+
+ // Make the switch.
+ stopCapture();
+ setCameraName(newCameraId);
+ startCapture(requestedWidth, requestedHeight, requestedFramerate, surfaceTextureHelper,
+ applicationContext, capturerObserver);
+
+ // Note: switchEventsHandler will be called from onConfigured / reportError.
+ }
+
+ // Requests a new output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
+ // TODO(magjed/perkj): Document what this function does. Change name?
+ @Override
+ public void onOutputFormatRequest(final int width, final int height, final int framerate) {
+ postOnCameraThread(new Runnable() {
+ @Override
+ public void run() {
+ if (capturerObserver == null) {
+ Logging.e(TAG, "Calling onOutputFormatRequest() on stopped camera.");
+ return;
+ }
+ Logging.d(TAG,
+ "onOutputFormatRequestOnCameraThread: " + width + "x" + height + "@" + framerate);
+ capturerObserver.onOutputFormatRequest(width, height, framerate);
+ }
+ });
+ }
+
+ // Reconfigure the camera to capture in a new format. This should only be called while the camera
+ // is running.
+ @Override
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ final SurfaceTextureHelper surfaceTextureHelper;
+ final Context applicationContext;
+ final CapturerObserver capturerObserver;
+
+ synchronized (cameraStateLock) {
+ waitForCameraToStartIfStarting();
+
+ if (cameraState != CameraState.RUNNING) {
+ Logging.e(TAG, "Calling changeCaptureFormat() on stopped camera.");
+ return;
+ }
+
+ requestedWidth = width;
+ requestedHeight = height;
+ requestedFramerate = framerate;
+
+ surfaceTextureHelper = this.surfaceTextureHelper;
+ applicationContext = this.applicationContext;
+ capturerObserver = this.capturerObserver;
+ }
+
+ // Make the switch.
+ stopCapture();
+ // TODO(magjed/sakal): Just recreate session.
+ startCapture(width, height, framerate,
+ surfaceTextureHelper, applicationContext, capturerObserver);
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats() {
+ synchronized (cameraState) {
+ return Camera2Enumerator.getSupportedFormats(this.cameraManager, cameraName);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ synchronized (cameraStateLock) {
+ waitForCameraToStopIfStopping();
+
+ if (cameraState != CameraState.IDLE) {
+ throw new IllegalStateException("Unexpected camera state for dispose: " + cameraState);
+ }
+ }
+ }
+
+ // Blocks until camera is known to be stopped.
+ @Override
+ public void stopCapture() {
+ final CountDownLatch cameraStoppingLatch = new CountDownLatch(1);
+
+ Logging.d(TAG, "stopCapture");
+ checkNotOnCameraThread();
+
+ synchronized (cameraStateLock) {
+ waitForCameraToStartIfStarting();
+
+ if (cameraState != CameraState.RUNNING) {
+ Logging.w(TAG, "stopCapture called for already stopped camera.");
+ return;
+ }
+
+ postOnCameraThread(new Runnable() {
+ @Override
+ public void run() {
+ Logging.d(TAG, "stopCaptureOnCameraThread");
+
+ // Stop capture.
+ closeAndRelease();
+ cameraStoppingLatch.countDown();
+ }
+ });
+ }
+
+ // Wait for the stopping to start
+ ThreadUtils.awaitUninterruptibly(cameraStoppingLatch);
+
+ Logging.d(TAG, "stopCapture done");
+ }
+
+ private void postOnCameraThread(Runnable runnable) {
+ postDelayedOnCameraThread(0 /* delayMs */, runnable);
+ }
+
+ private void postDelayedOnCameraThread(int delayMs, Runnable runnable) {
+ synchronized (cameraStateLock) {
+ if ((cameraState != CameraState.STARTING && cameraState != CameraState.RUNNING)
+ || !cameraThreadHandler.postAtTime(
+ runnable, this /* token */, SystemClock.uptimeMillis() + delayMs)) {
+ Logging.w(TAG, "Runnable not scheduled even though it was requested.");
+ }
+ }
+ }
+
+ private int getDeviceOrientation() {
+ int orientation = 0;
+
+ WindowManager wm = (WindowManager) applicationContext.getSystemService(
+ Context.WINDOW_SERVICE);
+ switch(wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ orientation = 90;
+ break;
+ case Surface.ROTATION_180:
+ orientation = 180;
+ break;
+ case Surface.ROTATION_270:
+ orientation = 270;
+ break;
+ case Surface.ROTATION_0:
+ default:
+ orientation = 0;
+ break;
+ }
+ return orientation;
+ }
+
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ checkIsStrictlyOnCameraThread();
+
+ if (eventsHandler != null && !firstFrameReported) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameReported = true;
+ }
+
+ int rotation;
+ if (isFrontCamera) {
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ rotation = cameraOrientation + getDeviceOrientation();
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
+ } else {
+ rotation = cameraOrientation - getDeviceOrientation();
+ }
+ // Make sure |rotation| is between 0 and 360.
+ rotation = (360 + rotation % 360) % 360;
+
+ // Undo camera orientation - we report it as rotation instead.
+ transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
+
+ cameraStatistics.addFrame();
+ capturerObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+ transformMatrix, rotation, timestampNs);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java b/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java
new file mode 100644
index 0000000..fe2b259
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/Camera2Enumerator.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import android.annotation.TargetApi;
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.os.SystemClock;
+import android.util.Range;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@TargetApi(21)
+public class Camera2Enumerator implements CameraEnumerator {
+ private final static String TAG = "Camera2Enumerator";
+ private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
+
+ // Each entry contains the supported formats for a given camera index. The formats are enumerated
+ // lazily in getSupportedFormats(), and cached for future reference.
+ private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
+ new HashMap<String, List<CaptureFormat>>();
+
+ final Context context;
+ final CameraManager cameraManager;
+
+ public Camera2Enumerator(Context context) {
+ this.context = context;
+ this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ }
+
+ @Override
+ public String[] getDeviceNames() {
+ try {
+ return cameraManager.getCameraIdList();
+ } catch (CameraAccessException e) {
+ Logging.e(TAG, "Camera access exception: " + e);
+ return new String[] {};
+ }
+ }
+
+ @Override
+ public boolean isFrontFacing(String deviceName) {
+ CameraCharacteristics characteristics
+ = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_FRONT;
+ }
+
+ @Override
+ public boolean isBackFacing(String deviceName) {
+ CameraCharacteristics characteristics
+ = getCameraCharacteristics(deviceName);
+
+ return characteristics != null
+ && characteristics.get(CameraCharacteristics.LENS_FACING)
+ == CameraMetadata.LENS_FACING_BACK;
+ }
+
+ @Override
+ public CameraVideoCapturer createCapturer(String deviceName,
+ CameraVideoCapturer.CameraEventsHandler eventsHandler) {
+ return new Camera2Capturer(context, deviceName, eventsHandler);
+ }
+
+ private CameraCharacteristics getCameraCharacteristics(String deviceName) {
+ try {
+ return cameraManager.getCameraCharacteristics(deviceName);
+ } catch (CameraAccessException e) {
+ Logging.e(TAG, "Camera access exception: " + e);
+ return null;
+ }
+ }
+
+ public static boolean isSupported() {
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP;
+ }
+
+ static List<CaptureFormat.FramerateRange> getSupportedFramerateRanges(
+ CameraCharacteristics cameraCharacteristics) {
+ final Range<Integer>[] fpsRanges =
+ cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+
+ if (fpsRanges == null) {
+ return new ArrayList<CaptureFormat.FramerateRange>();
+ }
+
+ int maxFps = 0;
+ for (Range<Integer> fpsRange : fpsRanges) {
+ maxFps = Math.max(maxFps, fpsRange.getUpper());
+ }
+ int unitFactor = maxFps < 1000 ? 1000 : 1;
+ return convertFramerates(fpsRanges, unitFactor);
+ }
+
+ static List<Size> getSupportedSizes(
+ CameraCharacteristics cameraCharacteristics) {
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ final android.util.Size[] sizes = streamMap.getOutputSizes(SurfaceTexture.class);
+ if (sizes == null) {
+ Logging.e(TAG, "No supported camera output sizes.");
+ return new ArrayList<Size>();
+ }
+ return convertSizes(sizes);
+ }
+
+ static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
+ return getSupportedFormats(
+ (CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
+ }
+
+ static List<CaptureFormat> getSupportedFormats(
+ CameraManager cameraManager, String cameraId) {
+ synchronized (cachedSupportedFormats) {
+ if (cachedSupportedFormats.containsKey(cameraId)) {
+ return cachedSupportedFormats.get(cameraId);
+ }
+
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
+ final long startTimeMs = SystemClock.elapsedRealtime();
+
+ final CameraCharacteristics cameraCharacteristics;
+ try {
+ cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
+ } catch (Exception ex) {
+ Logging.e(TAG, "getCameraCharacteristics(): " + ex);
+ return new ArrayList<CaptureFormat>();
+ }
+
+ final StreamConfigurationMap streamMap =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ List<CaptureFormat.FramerateRange> framerateRanges = getSupportedFramerateRanges(
+ cameraCharacteristics);
+ List<Size> sizes = getSupportedSizes(cameraCharacteristics);
+
+ int defaultMaxFps = 0;
+ for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
+ defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
+ }
+
+ final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
+ for (Size size : sizes) {
+ long minFrameDurationNs = 0;
+ try {
+ minFrameDurationNs = streamMap.getOutputMinFrameDuration(SurfaceTexture.class,
+ new android.util.Size(size.width, size.height));
+ } catch (Exception e) {
+ // getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
+ }
+ final int maxFps = (minFrameDurationNs == 0)
+ ? defaultMaxFps
+ : (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
+ formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
+ Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
+ }
+
+ cachedSupportedFormats.put(cameraId, formatList);
+ final long endTimeMs = SystemClock.elapsedRealtime();
+ Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
+ + " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
+ return formatList;
+ }
+ }
+
+ // Convert from android.util.Size to Size.
+ private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
+ final List<Size> sizes = new ArrayList<Size>();
+ for (android.util.Size size : cameraSizes) {
+ sizes.add(new Size(size.getWidth(), size.getHeight()));
+ }
+ return sizes;
+ }
+
+ // Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
+ private static List<CaptureFormat.FramerateRange> convertFramerates(
+ Range<Integer>[] arrayRanges, int unitFactor) {
+ final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
+ for (Range<Integer> range : arrayRanges) {
+ ranges.add(new CaptureFormat.FramerateRange(
+ range.getLower() * unitFactor,
+ range.getUpper() * unitFactor));
+ }
+ return ranges;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java b/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java
new file mode 100644
index 0000000..b3c5062
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/CameraEnumerationAndroid.java
@@ -0,0 +1,237 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static java.lang.Math.abs;
+
+import android.graphics.ImageFormat;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+@SuppressWarnings("deprecation")
+public class CameraEnumerationAndroid {
+ private final static String TAG = "CameraEnumerationAndroid";
+
+ public static class CaptureFormat {
+ // Class to represent a framerate range. The framerate varies because of lightning conditions.
+ // The values are multiplied by 1000, so 1000 represents one frame per second.
+ public static class FramerateRange {
+ public int min;
+ public int max;
+
+ public FramerateRange(int min, int max) {
+ this.min = min;
+ this.max = max;
+ }
+
+ @Override
+ public String toString() {
+ return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof FramerateRange)) {
+ return false;
+ }
+ final FramerateRange otherFramerate = (FramerateRange) other;
+ return min == otherFramerate.min && max == otherFramerate.max;
+ }
+
+ @Override
+ public int hashCode() {
+ // Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
+ return 1 + 65537 * min + max;
+ }
+ }
+
+ public final int width;
+ public final int height;
+ public final FramerateRange framerate;
+
+ // TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
+ // needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
+ // all imageFormats.
+ public final int imageFormat = ImageFormat.NV21;
+
+ public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = new FramerateRange(minFramerate, maxFramerate);
+ }
+
+ public CaptureFormat(int width, int height, FramerateRange framerate) {
+ this.width = width;
+ this.height = height;
+ this.framerate = framerate;
+ }
+
+ // Calculates the frame size of this capture format.
+ public int frameSize() {
+ return frameSize(width, height, imageFormat);
+ }
+
+ // Calculates the frame size of the specified image format. Currently only
+ // supporting ImageFormat.NV21.
+ // The size is width * height * number of bytes per pixel.
+ // http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
+ public static int frameSize(int width, int height, int imageFormat) {
+ if (imageFormat != ImageFormat.NV21) {
+ throw new UnsupportedOperationException("Don't know how to calculate "
+ + "the frame size of non-NV21 image formats.");
+ }
+ return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + "@" + framerate;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof CaptureFormat)) {
+ return false;
+ }
+ final CaptureFormat otherFormat = (CaptureFormat) other;
+ return width == otherFormat.width && height == otherFormat.height
+ && framerate.equals(otherFormat.framerate);
+ }
+
+ @Override
+ public int hashCode() {
+ return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
+ }
+ }
+
+ /**
+ * @deprecated
+ * Please use Camera1Enumerator.getDeviceNames() instead.
+ */
+ @Deprecated
+ public static String[] getDeviceNames() {
+ return new Camera1Enumerator().getDeviceNames();
+ }
+
+
+ /**
+ * @deprecated
+ * Please use Camera1Enumerator.getDeviceNames().length instead.
+ */
+ @Deprecated
+ public static int getDeviceCount() {
+ return new Camera1Enumerator().getDeviceNames().length;
+ }
+
+ /**
+ * @deprecated
+ * Please use Camera1Enumerator.getDeviceNames().get(index) instead.
+ */
+ @Deprecated
+ public static String getDeviceName(int index) {
+ return new Camera1Enumerator().getDeviceName(index);
+ }
+
+ /**
+ * @deprecated
+ * Please use Camera1Enumerator.isFrontFacing(String deviceName) instead.
+ */
+ @Deprecated
+ public static String getNameOfFrontFacingDevice() {
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+
+ /**
+ * @deprecated
+ * Please use Camera1Enumerator.isBackFacing(String deviceName) instead.
+ */
+ @Deprecated
+ public static String getNameOfBackFacingDevice() {
+ return getNameOfDevice(android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK);
+ }
+
+ // Helper class for finding the closest supported format for the two functions below. It creates a
+ // comparator based on the difference to some requested parameters, where the element with the
+ // minimum difference is the element that is closest to the requested parameters.
+ private static abstract class ClosestComparator<T> implements Comparator<T> {
+ // Difference between supported and requested parameter.
+ abstract int diff(T supportedParameter);
+
+ @Override
+ public int compare(T t1, T t2) {
+ return diff(t1) - diff(t2);
+ }
+ }
+
+ // Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
+ // lower bound, to allow the framerate to fluctuate based on lightning conditions.
+ public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
+ List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
+ return Collections.min(supportedFramerates,
+ new ClosestComparator<CaptureFormat.FramerateRange>() {
+ // Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
+ // from requested.
+ private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
+ private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
+ private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
+
+ // Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
+ private static final int MIN_FPS_THRESHOLD = 8000;
+ private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
+ private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
+
+ // Use one weight for small |value| less than |threshold|, and another weight above.
+ private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
+ return (value < threshold)
+ ? value * lowWeight
+ : threshold * lowWeight + (value - threshold) * highWeight;
+ }
+
+ @Override
+ int diff(CaptureFormat.FramerateRange range) {
+ final int minFpsError = progressivePenalty(range.min,
+ MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
+ final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
+ MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
+ return minFpsError + maxFpsError;
+ }
+ });
+ }
+
+ public static Size getClosestSupportedSize(
+ List<Size> supportedSizes, final int requestedWidth,
+ final int requestedHeight) {
+ return Collections.min(supportedSizes,
+ new ClosestComparator<Size>() {
+ @Override
+ int diff(Size size) {
+ return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
+ }
+ });
+ }
+
+ private static String getNameOfDevice(int facing) {
+ final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
+ for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
+ try {
+ android.hardware.Camera.getCameraInfo(i, info);
+ if (info.facing == facing) {
+ return getDeviceName(i);
+ }
+ } catch (Exception e) {
+ Logging.e(TAG, "getCameraInfo() failed on index " + i, e);
+ }
+ }
+ return null;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java b/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java
new file mode 100644
index 0000000..752e10a
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/CameraEnumerator.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+public interface CameraEnumerator {
+ public String[] getDeviceNames();
+ public boolean isFrontFacing(String deviceName);
+ public boolean isBackFacing(String deviceName);
+
+ public CameraVideoCapturer createCapturer(String deviceName,
+ CameraVideoCapturer.CameraEventsHandler eventsHandler);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java
new file mode 100644
index 0000000..46432d4
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/CameraVideoCapturer.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
+ * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
+ * class for detecting camera freezes.
+ */
+public interface CameraVideoCapturer extends VideoCapturer {
+ /**
+ * Camera events handler - can be used to be notifed about camera events. The callbacks are
+ * executed from an arbitrary thread.
+ */
+ public interface CameraEventsHandler {
+ // Camera error handler - invoked when camera can not be opened
+ // or any camera exception happens on camera thread.
+ void onCameraError(String errorDescription);
+
+ // Invoked when camera stops receiving frames.
+ void onCameraFreezed(String errorDescription);
+
+ // Callback invoked when camera is opening.
+ void onCameraOpening(int cameraId);
+
+ // Callback invoked when first camera frame is available after camera is started.
+ void onFirstFrameAvailable();
+
+ // Callback invoked when camera is closed.
+ void onCameraClosed();
+ }
+
+ /**
+ * Camera switch handler - one of these functions are invoked with the result of switchCamera().
+ * The callback may be called on an arbitrary thread.
+ */
+ public interface CameraSwitchHandler {
+ // Invoked on success. |isFrontCamera| is true if the new camera is front facing.
+ void onCameraSwitchDone(boolean isFrontCamera);
+
+ // Invoked on failure, e.g. camera is stopped or only one camera available.
+ void onCameraSwitchError(String errorDescription);
+ }
+
+ /**
+ * Switch camera to the next valid camera id. This can only be called while the camera is running.
+ * This function can be called from any thread.
+ */
+ void switchCamera(CameraSwitchHandler switchEventsHandler);
+
+ /**
+ * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
+ * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
+ * thread.
+ */
+ public static class CameraStatistics {
+ private final static String TAG = "CameraStatistics";
+ private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
+ private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
+
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ private final CameraEventsHandler eventsHandler;
+ private int frameCount;
+ private int freezePeriodCount;
+ // Camera observer - monitors camera framerate. Observer is executed on camera thread.
+ private final Runnable cameraObserver = new Runnable() {
+ @Override
+ public void run() {
+ final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
+ Logging.d(TAG, "Camera fps: " + cameraFps +".");
+ if (frameCount == 0) {
+ ++freezePeriodCount;
+ if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
+ && eventsHandler != null) {
+ Logging.e(TAG, "Camera freezed.");
+ if (surfaceTextureHelper.isTextureInUse()) {
+ // This can only happen if we are capturing to textures.
+ eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
+ } else {
+ eventsHandler.onCameraFreezed("Camera failure.");
+ }
+ return;
+ }
+ } else {
+ freezePeriodCount = 0;
+ }
+ frameCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
+ }
+ };
+
+ public CameraStatistics(
+ SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
+ if (surfaceTextureHelper == null) {
+ throw new IllegalArgumentException("SurfaceTextureHelper is null");
+ }
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ this.eventsHandler = eventsHandler;
+ this.frameCount = 0;
+ this.freezePeriodCount = 0;
+ surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
+ }
+
+ private void checkThread() {
+ if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+
+ public void addFrame() {
+ checkThread();
+ ++frameCount;
+ }
+
+ public void release() {
+ checkThread();
+ surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
+ }
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/DataChannel.java b/webrtc/api/android/java/src/org/webrtc/DataChannel.java
new file mode 100644
index 0000000..909d26f
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/DataChannel.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/** Java wrapper for a C++ DataChannelInterface. */
+public class DataChannel {
+ /** Java wrapper for WebIDL RTCDataChannel. */
+ public static class Init {
+ public boolean ordered = true;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmitTimeMs = -1;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int maxRetransmits = -1;
+ public String protocol = "";
+ public boolean negotiated = false;
+ // Optional unsigned short in WebIDL, -1 means unspecified.
+ public int id = -1;
+
+ public Init() {}
+
+ // Called only by native code.
+ private Init(
+ boolean ordered, int maxRetransmitTimeMs, int maxRetransmits,
+ String protocol, boolean negotiated, int id) {
+ this.ordered = ordered;
+ this.maxRetransmitTimeMs = maxRetransmitTimeMs;
+ this.maxRetransmits = maxRetransmits;
+ this.protocol = protocol;
+ this.negotiated = negotiated;
+ this.id = id;
+ }
+ }
+
+ /** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
+ public static class Buffer {
+ /** The underlying data. */
+ public final ByteBuffer data;
+
+ /**
+ * Indicates whether |data| contains UTF-8 text or "binary data"
+ * (i.e. anything else).
+ */
+ public final boolean binary;
+
+ public Buffer(ByteBuffer data, boolean binary) {
+ this.data = data;
+ this.binary = binary;
+ }
+ }
+
+ /** Java version of C++ DataChannelObserver. */
+ public interface Observer {
+ /** The data channel's bufferedAmount has changed. */
+ public void onBufferedAmountChange(long previousAmount);
+ /** The data channel state has changed. */
+ public void onStateChange();
+ /**
+ * A data buffer was successfully received. NOTE: |buffer.data| will be
+ * freed once this function returns so callers who want to use the data
+ * asynchronously must make sure to copy it first.
+ */
+ public void onMessage(Buffer buffer);
+ }
+
+ /** Keep in sync with DataChannelInterface::DataState. */
+ public enum State { CONNECTING, OPEN, CLOSING, CLOSED };
+
+ private final long nativeDataChannel;
+ private long nativeObserver;
+
+ public DataChannel(long nativeDataChannel) {
+ this.nativeDataChannel = nativeDataChannel;
+ }
+
+ /** Register |observer|, replacing any previously-registered observer. */
+ public void registerObserver(Observer observer) {
+ if (nativeObserver != 0) {
+ unregisterObserverNative(nativeObserver);
+ }
+ nativeObserver = registerObserverNative(observer);
+ }
+ private native long registerObserverNative(Observer observer);
+
+ /** Unregister the (only) observer. */
+ public void unregisterObserver() {
+ unregisterObserverNative(nativeObserver);
+ }
+ private native void unregisterObserverNative(long nativeObserver);
+
+ public native String label();
+
+ public native State state();
+
+ /**
+ * Return the number of bytes of application data (UTF-8 text and binary data)
+ * that have been queued using SendBuffer but have not yet been transmitted
+ * to the network.
+ */
+ public native long bufferedAmount();
+
+ /** Close the channel. */
+ public native void close();
+
+ /** Send |data| to the remote peer; return success. */
+ public boolean send(Buffer buffer) {
+ // TODO(fischman): this could be cleverer about avoiding copies if the
+ // ByteBuffer is direct and/or is backed by an array.
+ byte[] data = new byte[buffer.data.remaining()];
+ buffer.data.get(data);
+ return sendNative(data, buffer.binary);
+ }
+ private native boolean sendNative(byte[] data, boolean binary);
+
+ /** Dispose of native resources attached to this channel. */
+ public native void dispose();
+};
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase.java b/webrtc/api/android/java/src/org/webrtc/EglBase.java
new file mode 100644
index 0000000..05dd806
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.view.Surface;
+
+import javax.microedition.khronos.egl.EGL10;
+
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+public abstract class EglBase {
+ // EGL wrapper for an actual EGLContext.
+ public static class Context {
+ }
+
+ // According to the documentation, EGL can be used from multiple threads at the same time if each
+ // thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
+ // Therefore, synchronize on this global lock before calling dangerous EGL functions that might
+ // deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ public static final Object lock = new Object();
+
+ // These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ // https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
+ // This is similar to how GlSurfaceView does:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+ // Android-specific extension.
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142;
+
+ public static final int[] CONFIG_PLAIN = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RGBA = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_ALPHA_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
+ EGL10.EGL_NONE
+ };
+ public static final int[] CONFIG_RECORDABLE = {
+ EGL10.EGL_RED_SIZE, 8,
+ EGL10.EGL_GREEN_SIZE, 8,
+ EGL10.EGL_BLUE_SIZE, 8,
+ EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL10.EGL_NONE
+ };
+
+ // Create a new context with the specified config attributes, sharing data with sharedContext.
+ // |sharedContext| can be null.
+ public static EglBase create(Context sharedContext, int[] configAttributes) {
+ return (EglBase14.isEGL14Supported()
+ && (sharedContext == null || sharedContext instanceof EglBase14.Context))
+ ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
+ : new EglBase10((EglBase10.Context) sharedContext, configAttributes);
+ }
+
+ public static EglBase create() {
+ return create(null, CONFIG_PLAIN);
+ }
+
+ public static EglBase create(Context sharedContext) {
+ return create(sharedContext, CONFIG_PLAIN);
+ }
+
+ public abstract void createSurface(Surface surface);
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ public abstract void createSurface(SurfaceTexture surfaceTexture);
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ public abstract void createDummyPbufferSurface();
+
+ public abstract void createPbufferSurface(int width, int height);
+
+ public abstract Context getEglBaseContext();
+
+ public abstract boolean hasSurface();
+
+ public abstract int surfaceWidth();
+
+ public abstract int surfaceHeight();
+
+ public abstract void releaseSurface();
+
+ public abstract void release();
+
+ public abstract void makeCurrent();
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ public abstract void detachCurrent();
+
+ public abstract void swapBuffers();
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase10.java b/webrtc/api/android/java/src/org/webrtc/EglBase10.java
new file mode 100644
index 0000000..8a95b03
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase10.java
@@ -0,0 +1,301 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Canvas;
+import android.graphics.SurfaceTexture;
+import android.graphics.Rect;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.egl.EGLSurface;
+
+/**
+ * Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+final class EglBase10 extends EglBase {
+ // This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
+ private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+
+ private final EGL10 egl;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
+
+ // EGL wrapper for an actual EGLContext.
+ public static class Context extends EglBase.Context {
+ private final EGLContext eglContext;
+
+ public Context(EGLContext eglContext) {
+ this.eglContext = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ EglBase10(Context sharedContext, int[] configAttributes) {
+ this.egl = (EGL10) EGLContext.getEGL();
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ @Override
+ public void createSurface(Surface surface) {
+ /**
+ * We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
+ * couldn't actually take a Surface object until API 17. Older versions fortunately just call
+ * SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
+ */
+ class FakeSurfaceHolder implements SurfaceHolder {
+ private final Surface surface;
+
+ FakeSurfaceHolder(Surface surface) {
+ this.surface = surface;
+ }
+
+ @Override
+ public void addCallback(Callback callback) {}
+
+ @Override
+ public void removeCallback(Callback callback) {}
+
+ @Override
+ public boolean isCreating() {
+ return false;
+ }
+
+ @Deprecated
+ @Override
+ public void setType(int i) {}
+
+ @Override
+ public void setFixedSize(int i, int i2) {}
+
+ @Override
+ public void setSizeFromLayout() {}
+
+ @Override
+ public void setFormat(int i) {}
+
+ @Override
+ public void setKeepScreenOn(boolean b) {}
+
+ @Override
+ public Canvas lockCanvas() {
+ return null;
+ }
+
+ @Override
+ public Canvas lockCanvas(Rect rect) {
+ return null;
+ }
+
+ @Override
+ public void unlockCanvasAndPost(Canvas canvas) {}
+
+ @Override
+ public Rect getSurfaceFrame() {
+ return null;
+ }
+
+ @Override
+ public Surface getSurface() {
+ return surface;
+ }
+ }
+
+ createSurfaceInternal(new FakeSurfaceHolder(surface));
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
+ private void createSurfaceInternal(Object nativeWindow) {
+ if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_NONE};
+ eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ // Create dummy 1x1 pixel buffer surface so the context can be made current.
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
+ eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException(
+ "Failed to create pixel buffer surface with size: " + width + "x" + height);
+ }
+ }
+
+ @Override
+ public org.webrtc.EglBase.Context getEglBaseContext() {
+ return new EglBase10.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL10.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL10.EGL_NO_SURFACE) {
+ egl.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL10.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ egl.eglDestroyContext(eglDisplay, eglContext);
+ egl.eglTerminate(eglDisplay);
+ eglContext = EGL10.EGL_NO_CONTEXT;
+ eglDisplay = EGL10.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!egl.eglMakeCurrent(
+ eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglDetachCurrent failed");
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL10.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ egl.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL10 display");
+ }
+ int[] version = new int[2];
+ if (!egl.eglInitialize(eglDisplay, version)) {
+ throw new RuntimeException("Unable to initialize EGL10");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!egl.eglChooseConfig(
+ eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
+ throw new RuntimeException("eglChooseConfig failed");
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private EGLContext createEglContext(
+ Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
+ }
+ if (eglContext == EGL10.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/EglBase14.java b/webrtc/api/android/java/src/org/webrtc/EglBase14.java
new file mode 100644
index 0000000..71d6d99
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/EglBase14.java
@@ -0,0 +1,258 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.graphics.SurfaceTexture;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLExt;
+import android.opengl.EGLSurface;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+/**
+ * Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
+ * and an EGLSurface.
+ */
+@TargetApi(18)
+public final class EglBase14 extends EglBase {
+ private static final String TAG = "EglBase14";
+ private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
+ private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
+ private EGLContext eglContext;
+ private EGLConfig eglConfig;
+ private EGLDisplay eglDisplay;
+ private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
+
+ // EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
+ // time stamp on a surface is supported from 18 so we require 18.
+ public static boolean isEGL14Supported() {
+ Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ + ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
+ return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
+ }
+
+ public static class Context extends EglBase.Context {
+ private final android.opengl.EGLContext egl14Context;
+
+ public Context(android.opengl.EGLContext eglContext) {
+ this.egl14Context = eglContext;
+ }
+ }
+
+ // Create a new context with the specified config type, sharing data with sharedContext.
+ // |sharedContext| may be null.
+ public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
+ eglDisplay = getEglDisplay();
+ eglConfig = getEglConfig(eglDisplay, configAttributes);
+ eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
+ }
+
+ // Create EGLSurface from the Android Surface.
+ @Override
+ public void createSurface(Surface surface) {
+ createSurfaceInternal(surface);
+ }
+
+ // Create EGLSurface from the Android SurfaceTexture.
+ @Override
+ public void createSurface(SurfaceTexture surfaceTexture) {
+ createSurfaceInternal(surfaceTexture);
+ }
+
+ // Create EGLSurface from either Surface or SurfaceTexture.
+ private void createSurfaceInternal(Object surface) {
+ if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
+ throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
+ }
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Failed to create window surface");
+ }
+ }
+
+ @Override
+ public void createDummyPbufferSurface() {
+ createPbufferSurface(1, 1);
+ }
+
+ @Override
+ public void createPbufferSurface(int width, int height) {
+ checkIsNotReleased();
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("Already has an EGLSurface");
+ }
+ int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
+ eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException(
+ "Failed to create pixel buffer surface with size: " + width + "x" + height);
+ }
+ }
+
+ @Override
+ public Context getEglBaseContext() {
+ return new EglBase14.Context(eglContext);
+ }
+
+ @Override
+ public boolean hasSurface() {
+ return eglSurface != EGL14.EGL_NO_SURFACE;
+ }
+
+ @Override
+ public int surfaceWidth() {
+ final int widthArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
+ return widthArray[0];
+ }
+
+ @Override
+ public int surfaceHeight() {
+ final int heightArray[] = new int[1];
+ EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
+ return heightArray[0];
+ }
+
+ @Override
+ public void releaseSurface() {
+ if (eglSurface != EGL14.EGL_NO_SURFACE) {
+ EGL14.eglDestroySurface(eglDisplay, eglSurface);
+ eglSurface = EGL14.EGL_NO_SURFACE;
+ }
+ }
+
+ private void checkIsNotReleased() {
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
+ || eglConfig == null) {
+ throw new RuntimeException("This object has been released");
+ }
+ }
+
+ @Override
+ public void release() {
+ checkIsNotReleased();
+ releaseSurface();
+ detachCurrent();
+ EGL14.eglDestroyContext(eglDisplay, eglContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(eglDisplay);
+ eglContext = EGL14.EGL_NO_CONTEXT;
+ eglDisplay = EGL14.EGL_NO_DISPLAY;
+ eglConfig = null;
+ }
+
+ @Override
+ public void makeCurrent() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't make current");
+ }
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+ }
+
+ // Detach the current EGL context, so that it can be made current on another thread.
+ @Override
+ public void detachCurrent() {
+ synchronized (EglBase.lock) {
+ if (!EGL14.eglMakeCurrent(
+ eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
+ throw new RuntimeException("eglDetachCurrent failed");
+ }
+ }
+ }
+
+ @Override
+ public void swapBuffers() {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ public void swapBuffers(long timeStampNs) {
+ checkIsNotReleased();
+ if (eglSurface == EGL14.EGL_NO_SURFACE) {
+ throw new RuntimeException("No EGLSurface - can't swap buffers");
+ }
+ synchronized (EglBase.lock) {
+ // See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
+ EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
+ EGL14.eglSwapBuffers(eglDisplay, eglSurface);
+ }
+ }
+
+ // Return an EGLDisplay, or die trying.
+ private static EGLDisplay getEglDisplay() {
+ EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("Unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
+ throw new RuntimeException("Unable to initialize EGL14");
+ }
+ return eglDisplay;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(
+ eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
+ throw new RuntimeException("eglChooseConfig failed");
+ }
+ if (numConfigs[0] <= 0) {
+ throw new RuntimeException("Unable to find any matching EGL config");
+ }
+ final EGLConfig eglConfig = configs[0];
+ if (eglConfig == null) {
+ throw new RuntimeException("eglChooseConfig returned null");
+ }
+ return eglConfig;
+ }
+
+ // Return an EGLConfig, or die trying.
+ private static EGLContext createEglContext(
+ EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+ if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Invalid sharedContext");
+ }
+ int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ EGLContext rootContext =
+ sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
+ final EGLContext eglContext;
+ synchronized (EglBase.lock) {
+ eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
+ }
+ if (eglContext == EGL14.EGL_NO_CONTEXT) {
+ throw new RuntimeException("Failed to create EGL context");
+ }
+ return eglContext;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java b/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java
new file mode 100644
index 0000000..cfadff1
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/GlRectDrawer.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+
+import org.webrtc.GlShader;
+import org.webrtc.GlUtil;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.Arrays;
+import java.util.IdentityHashMap;
+import java.util.Map;
+
+/**
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
+ */
+public class GlRectDrawer implements RendererCommon.GlDrawer {
+ // Simple vertex shader, used for both YUV and OES.
+ private static final String VERTEX_SHADER_STRING =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String YUV_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D y_tex;\n"
+ + "uniform sampler2D u_tex;\n"
+ + "uniform sampler2D v_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ // CSC according to http://www.fourcc.org/fccyvrgb.php
+ + " float y = texture2D(y_tex, interp_tc).r;\n"
+ + " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
+ + " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
+ + " gl_FragColor = vec4(y + 1.403 * v, "
+ + " y - 0.344 * u - 0.714 * v, "
+ + " y + 1.77 * u, 1);\n"
+ + "}\n";
+
+ private static final String RGB_FRAGMENT_SHADER_STRING =
+ "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform sampler2D rgb_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
+ + "}\n";
+
+ private static final String OES_FRAGMENT_SHADER_STRING =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oes_tex;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
+ + "}\n";
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
+ // top-right.
+ private static final FloatBuffer FULL_RECTANGLE_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static class Shader {
+ public final GlShader glShader;
+ public final int texMatrixLocation;
+
+ public Shader(String fragmentShader) {
+ this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+ this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
+ }
+ }
+
+ // The keys are one of the fragments shaders above.
+ private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
+ /**
+ * Draw an OES texture frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ // updateTexImage() may be called from another thread in another EGL context, so we need to
+ // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
+ drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ }
+
+ /**
+ * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
+ * are allocated at the first call to this function.
+ */
+ @Override
+ public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
+ // Unbind the texture as a precaution.
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ /**
+ * Draw a YUV frame with specified texture transformation matrix. Required resources are
+ * allocated at the first call to this function.
+ */
+ @Override
+ public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
+ prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
+ // Bind the textures.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
+ }
+ drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
+ // Unbind the textures as a precaution..
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+ }
+
+ private void drawRectangle(int x, int y, int width, int height) {
+ // Draw quad.
+ GLES20.glViewport(x, y, width, height);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ }
+
+ private void prepareShader(String fragmentShader, float[] texMatrix) {
+ final Shader shader;
+ if (shaders.containsKey(fragmentShader)) {
+ shader = shaders.get(fragmentShader);
+ } else {
+ // Lazy allocation.
+ shader = new Shader(fragmentShader);
+ shaders.put(fragmentShader, shader);
+ shader.glShader.useProgram();
+ // Initialize fragment shader uniform values.
+ if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
+ } else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
+ } else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
+ GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
+ } else {
+ throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
+ }
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+ shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
+ }
+ shader.glShader.useProgram();
+ // Copy the texture transformation matrix over.
+ GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
+ }
+
+ /**
+ * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
+ */
+ @Override
+ public void release() {
+ for (Shader shader : shaders.values()) {
+ shader.glShader.release();
+ }
+ shaders.clear();
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/GlShader.java b/webrtc/api/android/java/src/org/webrtc/GlShader.java
new file mode 100644
index 0000000..fb5e4c2
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/GlShader.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import org.webrtc.Logging;
+
+import java.nio.FloatBuffer;
+
+// Helper class for handling OpenGL shaders and shader programs.
+public class GlShader {
+ private static final String TAG = "GlShader";
+
+ private static int compileShader(int shaderType, String source) {
+ final int shader = GLES20.glCreateShader(shaderType);
+ if (shader == 0) {
+ throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compileStatus = new int[] {
+ GLES20.GL_FALSE
+ };
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
+ if (compileStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not compile shader " + shaderType + ":" +
+ GLES20.glGetShaderInfoLog(shader));
+ throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
+ }
+ GlUtil.checkNoGLES2Error("compileShader");
+ return shader;
+ }
+
+ private int program;
+
+ public GlShader(String vertexSource, String fragmentSource) {
+ final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ program = GLES20.glCreateProgram();
+ if (program == 0) {
+ throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[] {
+ GLES20.GL_FALSE
+ };
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Logging.e(TAG, "Could not link program: " +
+ GLES20.glGetProgramInfoLog(program));
+ throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
+ }
+ // According to the documentation of glLinkProgram():
+ // "After the link operation, applications are free to modify attached shader objects, compile
+ // attached shader objects, detach shader objects, delete shader objects, and attach additional
+ // shader objects. None of these operations affects the information log or the program that is
+ // part of the program object."
+ // But in practice, detaching shaders from the program seems to break some devices. Deleting the
+ // shaders are fine however - it will delete them when they are no longer attached to a program.
+ GLES20.glDeleteShader(vertexShader);
+ GLES20.glDeleteShader(fragmentShader);
+ GlUtil.checkNoGLES2Error("Creating GlShader");
+ }
+
+ public int getAttribLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetAttribLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate '" + label + "' in program");
+ }
+ return location;
+ }
+
+ /**
+ * Enable and upload a vertex array for attribute |label|. The vertex data is specified in
+ * |buffer| with |dimension| number of components per vertex.
+ */
+ public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = getAttribLocation(label);
+ GLES20.glEnableVertexAttribArray(location);
+ GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
+ GlUtil.checkNoGLES2Error("setVertexAttribArray");
+ }
+
+ public int getUniformLocation(String label) {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ int location = GLES20.glGetUniformLocation(program, label);
+ if (location < 0) {
+ throw new RuntimeException("Could not locate uniform '" + label + "' in program");
+ }
+ return location;
+ }
+
+ public void useProgram() {
+ if (program == -1) {
+ throw new RuntimeException("The program has been released");
+ }
+ GLES20.glUseProgram(program);
+ GlUtil.checkNoGLES2Error("glUseProgram");
+ }
+
+ public void release() {
+ Logging.d(TAG, "Deleting shader.");
+ // Delete program, automatically detaching any shaders from it.
+ if (program != -1) {
+ GLES20.glDeleteProgram(program);
+ program = -1;
+ }
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java b/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java
new file mode 100644
index 0000000..a456010
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/GlTextureFrameBuffer.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+/**
+ * Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
+ * buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
+ * conversion.
+ */
+// TODO(magjed): Add unittests for this class.
+public class GlTextureFrameBuffer {
+ private final int frameBufferId;
+ private final int textureId;
+ private final int pixelFormat;
+ private int width;
+ private int height;
+
+ /**
+ * Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
+ * when calling this function. The framebuffer is not complete until setSize() is called.
+ */
+ public GlTextureFrameBuffer(int pixelFormat) {
+ switch (pixelFormat) {
+ case GLES20.GL_LUMINANCE:
+ case GLES20.GL_RGB:
+ case GLES20.GL_RGBA:
+ this.pixelFormat = pixelFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
+ }
+
+ textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ this.width = 0;
+ this.height = 0;
+
+ // Create framebuffer object and bind it.
+ final int frameBuffers[] = new int[1];
+ GLES20.glGenFramebuffers(1, frameBuffers, 0);
+ frameBufferId = frameBuffers[0];
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("Generate framebuffer");
+
+ // Attach the texture to the framebuffer as color attachment.
+ GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
+ GLES20.GL_TEXTURE_2D, textureId, 0);
+ GlUtil.checkNoGLES2Error("Attach texture to framebuffer");
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ }
+
+ /**
+ * (Re)allocate texture. Will do nothing if the requested size equals the current size. An
+ * EGLContext must be bound on the current thread when calling this function. Must be called at
+ * least once before using the framebuffer. May be called multiple times to change size.
+ */
+ public void setSize(int width, int height) {
+ if (width == 0 || height == 0) {
+ throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
+ }
+ if (width == this.width && height == this.height) {
+ return;
+ }
+ this.width = width;
+ this.height = height;
+
+ // Bind our framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Allocate texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
+ GLES20.GL_UNSIGNED_BYTE, null);
+
+ // Check that the framebuffer is in a good state.
+ final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
+ if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
+ throw new IllegalStateException("Framebuffer not complete, status: " + status);
+ }
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
+ }
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ public int getFrameBufferId() {
+ return frameBufferId;
+ }
+
+ public int getTextureId() {
+ return textureId;
+ }
+
+ /**
+ * Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
+ * this function. This object should not be used after this call.
+ */
+ public void release() {
+ GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
+ GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
+ width = 0;
+ height = 0;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/GlUtil.java b/webrtc/api/android/java/src/org/webrtc/GlUtil.java
new file mode 100644
index 0000000..6f5e605
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/GlUtil.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.opengl.GLES20;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Some OpenGL static utility functions.
+ */
+public class GlUtil {
+ private GlUtil() {}
+
+ // Assert that no OpenGL ES 2.0 error has been raised.
+ public static void checkNoGLES2Error(String msg) {
+ int error = GLES20.glGetError();
+ if (error != GLES20.GL_NO_ERROR) {
+ throw new RuntimeException(msg + ": GLES20 error: " + error);
+ }
+ }
+
+ public static FloatBuffer createFloatBuffer(float[] coords) {
+ // Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
+ ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
+ bb.order(ByteOrder.nativeOrder());
+ FloatBuffer fb = bb.asFloatBuffer();
+ fb.put(coords);
+ fb.position(0);
+ return fb;
+ }
+
+ /**
+ * Generate texture with standard parameters.
+ */
+ public static int generateTexture(int target) {
+ final int textureArray[] = new int[1];
+ GLES20.glGenTextures(1, textureArray, 0);
+ final int textureId = textureArray[0];
+ GLES20.glBindTexture(target, textureId);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+ checkNoGLES2Error("generateTexture");
+ return textureId;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/IceCandidate.java b/webrtc/api/android/java/src/org/webrtc/IceCandidate.java
new file mode 100644
index 0000000..3476ab4
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/IceCandidate.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/**
+ * Representation of a single ICE Candidate, mirroring
+ * {@code IceCandidateInterface} in the C++ API.
+ */
+public class IceCandidate {
+ public final String sdpMid;
+ public final int sdpMLineIndex;
+ public final String sdp;
+
+ public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
+ this.sdpMid = sdpMid;
+ this.sdpMLineIndex = sdpMLineIndex;
+ this.sdp = sdp;
+ }
+
+ public String toString() {
+ return sdpMid + ":" + sdpMLineIndex + ":" + sdp;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java
new file mode 100644
index 0000000..02235fe
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -0,0 +1,714 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.os.SystemClock;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.Queue;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoDecoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoDecoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoDecoder";
+ private static final long MAX_DECODE_TIME_MS = 200;
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ // Timeout for input buffer dequeue.
+ private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
+ // Timeout for codec releasing.
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
+ // Max number of output buffers queued before starting to drop decoded frames.
+ private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
+ // Active running decoder instance. Set in initDecode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoDecoder runningInstance = null;
+ private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+ // List of disabled codec types - can be set from application.
+ private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
+
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] inputBuffers;
+ private ByteBuffer[] outputBuffers;
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+ private static final String H264_MIME_TYPE = "video/avc";
+ // List of supported HW VP8 decoders.
+ private static final String[] supportedVp8HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel." };
+ // List of supported HW VP9 decoders.
+ private static final String[] supportedVp9HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Exynos." };
+ // List of supported HW H.264 decoders.
+ private static final String[] supportedH264HwCodecPrefixes =
+ {"OMX.qcom.", "OMX.Intel.", "OMX.Exynos." };
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final List<Integer> supportedColorList = Arrays.asList(
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
+ private int colorFormat;
+ private int width;
+ private int height;
+ private int stride;
+ private int sliceHeight;
+ private boolean hasDecodedFirstFrame;
+ private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
+ private boolean useSurface;
+
+ // The below variables are only used when decoding to a Surface.
+ private TextureListener textureListener;
+ private int droppedFrames;
+ private Surface surface = null;
+ private final Queue<DecodedOutputBuffer>
+ dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoDecoderErrorCallback {
+ void onMediaCodecVideoDecoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoDecoder.errorCallback = errorCallback;
+ }
+
+ // Functions to disable HW decoding - can be called from applications for platforms
+ // which have known HW decoding problems.
+ public static void disableVp8HwCodec() {
+ Logging.w(TAG, "VP8 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
+ }
+
+ public static void disableVp9HwCodec() {
+ Logging.w(TAG, "VP9 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
+ }
+
+ public static void disableH264HwCodec() {
+ Logging.w(TAG, "H.264 decoding is disabled by application.");
+ hwDecoderDisabledTypes.add(H264_MIME_TYPE);
+ }
+
+ // Functions to query if HW decoding is supported.
+ public static boolean isVp8HwSupported() {
+ return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+ (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
+ }
+
+ public static boolean isVp9HwSupported() {
+ return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+ (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
+ }
+
+ public static boolean isH264HwSupported() {
+ return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) &&
+ (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ // Helper struct for findDecoder() below.
+ private static class DecoderProperties {
+ public DecoderProperties(String codecName, int colorFormat) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ }
+ public final String codecName; // OpenMax component name for VP8 codec.
+ public final int colorFormat; // Color format supported by codec.
+ }
+
+ private static DecoderProperties findDecoder(
+ String mime, String[] supportedCodecPrefixes) {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null; // MediaCodec.setParameters is missing.
+ }
+ Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.d(TAG, "Found candidate decoder " + name);
+
+ // Check if this is supported decoder.
+ boolean supportedCodec = false;
+ for (String codecPrefix : supportedCodecPrefixes) {
+ if (name.startsWith(codecPrefix)) {
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ // Check if codec supports either yuv420 or nv12.
+ CodecCapabilities capabilities =
+ info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+ for (int supportedColorFormat : supportedColorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW decoder.
+ Logging.d(TAG, "Found target decoder " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new DecoderProperties(name, codecColorFormat);
+ }
+ }
+ }
+ }
+ Logging.d(TAG, "No HW decoder found for mime " + mime);
+ return null; // No HW decoder.
+ }
+
+ private void checkOnMediaCodecThread() throws IllegalStateException {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new IllegalStateException(
+ "MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
+ private boolean initDecode(
+ VideoCodecType type, int width, int height,
+ SurfaceTextureHelper surfaceTextureHelper) {
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("initDecode: Forgot to release()?");
+ }
+
+ String mime = null;
+ useSurface = (surfaceTextureHelper != null);
+ String[] supportedCodecPrefixes = null;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ supportedCodecPrefixes = supportedH264HwCodecPrefixes;
+ } else {
+ throw new RuntimeException("initDecode: Non-supported codec " + type);
+ }
+ DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
+ if (properties == null) {
+ throw new RuntimeException("Cannot find HW decoder for " + type);
+ }
+
+ Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
+ ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
+ ". Use Surface: " + useSurface);
+
+ runningInstance = this; // Decoder is now running and can be queried for stack traces.
+ mediaCodecThread = Thread.currentThread();
+ try {
+ this.width = width;
+ this.height = height;
+ stride = width;
+ sliceHeight = height;
+
+ if (useSurface) {
+ textureListener = new TextureListener(surfaceTextureHelper);
+ surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
+ }
+
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ if (!useSurface) {
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ }
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media decoder");
+ return false;
+ }
+ mediaCodec.configure(format, surface, null, 0);
+ mediaCodec.start();
+
+ colorFormat = properties.colorFormat;
+ outputBuffers = mediaCodec.getOutputBuffers();
+ inputBuffers = mediaCodec.getInputBuffers();
+ decodeStartTimeMs.clear();
+ hasDecodedFirstFrame = false;
+ dequeuedSurfaceOutputBuffers.clear();
+ droppedFrames = 0;
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length +
+ ". Output buffers: " + outputBuffers.length);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initDecode failed", e);
+ return false;
+ }
+ }
+
+ // Resets the decoder so it can start decoding frames with new resolution.
+ // Flushes MediaCodec and clears decoder output buffers.
+ private void reset(int width, int height) {
+ if (mediaCodecThread == null || mediaCodec == null) {
+ throw new RuntimeException("Incorrect reset call for non-initialized decoder.");
+ }
+ Logging.d(TAG, "Java reset: " + width + " x " + height);
+
+ mediaCodec.flush();
+
+ this.width = width;
+ this.height = height;
+ decodeStartTimeMs.clear();
+ dequeuedSurfaceOutputBuffers.clear();
+ hasDecodedFirstFrame = false;
+ droppedFrames = 0;
+ }
+
+ private void release() {
+ Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseDecoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseDecoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media decoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media decoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
+ }
+ }
+
+ mediaCodec = null;
+ mediaCodecThread = null;
+ runningInstance = null;
+ if (useSurface) {
+ surface.release();
+ surface = null;
+ textureListener.release();
+ }
+ Logging.d(TAG, "Java releaseDecoder done");
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ private int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
+ long timeStampMs, long ntpTimeStamp) {
+ checkOnMediaCodecThread();
+ try {
+ inputBuffers[inputBufferIndex].position(0);
+ inputBuffers[inputBufferIndex].limit(size);
+ decodeStartTimeMs.add(new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs,
+ ntpTimeStamp));
+ mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "decode failed", e);
+ return false;
+ }
+ }
+
+ private static class TimeStamps {
+ public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
+ this.decodeStartTimeMs = decodeStartTimeMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ }
+ // Time when this frame was queued for decoding.
+ private final long decodeStartTimeMs;
+ // Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
+ private final long timeStampMs;
+ // Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
+ private final long ntpTimeStampMs;
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ private static class DecodedOutputBuffer {
+ public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
+ long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
+ this.index = index;
+ this.offset = offset;
+ this.size = size;
+ this.presentationTimeStampMs = presentationTimeStampMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTime;
+ this.endDecodeTimeMs = endDecodeTime;
+ }
+
+ private final int index;
+ private final int offset;
+ private final int size;
+ // Presentation timestamp returned in dequeueOutputBuffer call.
+ private final long presentationTimeStampMs;
+ // C++ inputImage._timeStamp value for output frame.
+ private final long timeStampMs;
+ // C++ inputImage.ntp_time_ms_ value for output frame.
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // System time when this frame decoding finished.
+ private final long endDecodeTimeMs;
+ }
+
+ // Helper struct for dequeueTextureBuffer() below.
+ private static class DecodedTextureBuffer {
+ private final int textureID;
+ private final float[] transformMatrix;
+ // Presentation timestamp returned in dequeueOutputBuffer call.
+ private final long presentationTimeStampMs;
+ // C++ inputImage._timeStamp value for output frame.
+ private final long timeStampMs;
+ // C++ inputImage.ntp_time_ms_ value for output frame.
+ private final long ntpTimeStampMs;
+ // Number of ms it took to decode this frame.
+ private final long decodeTimeMs;
+ // Interval from when the frame finished decoding until this buffer has been created.
+ // Since there is only one texture, this interval depend on the time from when
+ // a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
+ // so that the texture can be updated with the next decoded frame.
+ private final long frameDelayMs;
+
+ // A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
+ // that was dropped.
+ public DecodedTextureBuffer(int textureID, float[] transformMatrix,
+ long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
+ long frameDelay) {
+ this.textureID = textureID;
+ this.transformMatrix = transformMatrix;
+ this.presentationTimeStampMs = presentationTimeStampMs;
+ this.timeStampMs = timeStampMs;
+ this.ntpTimeStampMs = ntpTimeStampMs;
+ this.decodeTimeMs = decodeTimeMs;
+ this.frameDelayMs = frameDelay;
+ }
+ }
+
+ // Poll based texture listener.
+ private static class TextureListener
+ implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final SurfaceTextureHelper surfaceTextureHelper;
+ // |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
+ private final Object newFrameLock = new Object();
+ // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
+ // onTextureFrameAvailable().
+ private DecodedOutputBuffer bufferToRender;
+ private DecodedTextureBuffer renderedBuffer;
+
+ public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
+ this.surfaceTextureHelper = surfaceTextureHelper;
+ surfaceTextureHelper.startListening(this);
+ }
+
+ public void addBufferToRender(DecodedOutputBuffer buffer) {
+ if (bufferToRender != null) {
+ Logging.e(TAG,
+ "Unexpected addBufferToRender() called while waiting for a texture.");
+ throw new IllegalStateException("Waiting for a texture.");
+ }
+ bufferToRender = buffer;
+ }
+
+ public boolean isWaitingForTexture() {
+ synchronized (newFrameLock) {
+ return bufferToRender != null;
+ }
+ }
+
+ // Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ Logging.e(TAG,
+ "Unexpected onTextureFrameAvailable() called while already holding a texture.");
+ throw new IllegalStateException("Already holding a texture.");
+ }
+ // |timestampNs| is always zero on some Android versions.
+ renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
+ bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
+ bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
+ SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
+ bufferToRender = null;
+ newFrameLock.notifyAll();
+ }
+ }
+
+ // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+ public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
+ synchronized (newFrameLock) {
+ if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
+ try {
+ newFrameLock.wait(timeoutMs);
+ } catch(InterruptedException e) {
+ // Restore the interrupted status by reinterrupting the thread.
+ Thread.currentThread().interrupt();
+ }
+ }
+ DecodedTextureBuffer returnedBuffer = renderedBuffer;
+ renderedBuffer = null;
+ return returnedBuffer;
+ }
+ }
+
+ public void release() {
+ // SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
+ // progress is done. Therefore, the call must be outside any synchronized
+ // statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
+ surfaceTextureHelper.stopListening();
+ synchronized (newFrameLock) {
+ if (renderedBuffer != null) {
+ surfaceTextureHelper.returnTextureFrame();
+ renderedBuffer = null;
+ }
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error.
+ private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (decodeStartTimeMs.isEmpty()) {
+ return null;
+ }
+ // Drain the decoder until receiving a decoded buffer or hitting
+ // MediaCodec.INFO_TRY_AGAIN_LATER.
+ final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ while (true) {
+ final int result = mediaCodec.dequeueOutputBuffer(
+ info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
+ switch (result) {
+ case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
+ outputBuffers = mediaCodec.getOutputBuffers();
+ Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
+ if (hasDecodedFirstFrame) {
+ throw new RuntimeException("Unexpected output buffer change event.");
+ }
+ break;
+ case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
+ MediaFormat format = mediaCodec.getOutputFormat();
+ Logging.d(TAG, "Decoder format changed: " + format.toString());
+ int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
+ int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
+ throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
+ height + ". New " + new_width + "*" + new_height);
+ }
+ width = format.getInteger(MediaFormat.KEY_WIDTH);
+ height = format.getInteger(MediaFormat.KEY_HEIGHT);
+
+ if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
+ colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
+ if (!supportedColorList.contains(colorFormat)) {
+ throw new IllegalStateException("Non supported color format: " + colorFormat);
+ }
+ }
+ if (format.containsKey("stride")) {
+ stride = format.getInteger("stride");
+ }
+ if (format.containsKey("slice-height")) {
+ sliceHeight = format.getInteger("slice-height");
+ }
+ Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
+ stride = Math.max(width, stride);
+ sliceHeight = Math.max(height, sliceHeight);
+ break;
+ case MediaCodec.INFO_TRY_AGAIN_LATER:
+ return null;
+ default:
+ hasDecodedFirstFrame = true;
+ TimeStamps timeStamps = decodeStartTimeMs.remove();
+ long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
+ if (decodeTimeMs > MAX_DECODE_TIME_MS) {
+ Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ + ". Q size: " + decodeStartTimeMs.size()
+ + ". Might be caused by resuming H264 decoding after a pause.");
+ decodeTimeMs = MAX_DECODE_TIME_MS;
+ }
+ return new DecodedOutputBuffer(result,
+ info.offset,
+ info.size,
+ TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs),
+ timeStamps.timeStampMs,
+ timeStamps.ntpTimeStampMs,
+ decodeTimeMs,
+ SystemClock.elapsedRealtime());
+ }
+ }
+ }
+
+ // Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
+ // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
+ // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
+ // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
+ // a frame can't be returned.
+ private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+ checkOnMediaCodecThread();
+ if (!useSurface) {
+ throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
+ }
+ DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
+ if (outputBuffer != null) {
+ dequeuedSurfaceOutputBuffers.add(outputBuffer);
+ }
+
+ MaybeRenderDecodedTextureBuffer();
+ // Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
+ DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
+ if (renderedBuffer != null) {
+ MaybeRenderDecodedTextureBuffer();
+ return renderedBuffer;
+ }
+
+ if ((dequeuedSurfaceOutputBuffers.size()
+ >= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
+ || (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
+ ++droppedFrames;
+ // Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
+ // The oldest frame is owned by |textureListener| and can't be dropped since
+ // mediaCodec.releaseOutputBuffer has already been called.
+ final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
+ if (dequeueTimeoutMs > 0) {
+ // TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
+ // return the one and only texture even if it does not render.
+ Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
+ + droppedFrame.presentationTimeStampMs +
+ ". Total number of dropped frames: " + droppedFrames);
+ } else {
+ Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size() +
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs +
+ ". Total number of dropped frames: " + droppedFrames);
+ }
+
+ mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
+ return new DecodedTextureBuffer(0, null,
+ droppedFrame.presentationTimeStampMs, droppedFrame.timeStampMs,
+ droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
+ SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
+ }
+ return null;
+ }
+
+ private void MaybeRenderDecodedTextureBuffer() {
+ if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
+ return;
+ }
+ // Get the first frame in the queue and render to the decoder output surface.
+ final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
+ textureListener.addBufferToRender(buffer);
+ mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
+ }
+
+ // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
+ // non-surface decoding.
+ // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
+ // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
+ // MediaCodec.CodecException upon codec error.
+ private void returnDecodedOutputBuffer(int index)
+ throws IllegalStateException, MediaCodec.CodecException {
+ checkOnMediaCodecThread();
+ if (useSurface) {
+ throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
+ }
+ mediaCodec.releaseOutputBuffer(index, false /* render */);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java
new file mode 100644
index 0000000..f79e317
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -0,0 +1,650 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.media.MediaCodecInfo;
+import android.media.MediaCodecList;
+import android.media.MediaFormat;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Bundle;
+import android.view.Surface;
+
+import org.webrtc.Logging;
+
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
+// This class is an implementation detail of the Java PeerConnection API.
+@TargetApi(19)
+@SuppressWarnings("deprecation")
+public class MediaCodecVideoEncoder {
+ // This class is constructed, operated, and destroyed by its C++ incarnation,
+ // so the class and its methods have non-public visibility. The API this
+ // class exposes aims to mimic the webrtc::VideoEncoder API as closely as
+ // possibly to minimize the amount of translation work necessary.
+
+ private static final String TAG = "MediaCodecVideoEncoder";
+
+ // Tracks webrtc::VideoCodecType.
+ public enum VideoCodecType {
+ VIDEO_CODEC_VP8,
+ VIDEO_CODEC_VP9,
+ VIDEO_CODEC_H264
+ }
+
+ private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
+ private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
+ private static final int BITRATE_ADJUSTMENT_FPS = 30;
+ // Active running encoder instance. Set in initEncode() (called from native code)
+ // and reset to null in release() call.
+ private static MediaCodecVideoEncoder runningInstance = null;
+ private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+ private static int codecErrors = 0;
+ // List of disabled codec types - can be set from application.
+ private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
+
+ private Thread mediaCodecThread;
+ private MediaCodec mediaCodec;
+ private ByteBuffer[] outputBuffers;
+ private EglBase14 eglBase;
+ private int width;
+ private int height;
+ private Surface inputSurface;
+ private GlRectDrawer drawer;
+
+ private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+ private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
+ private static final String H264_MIME_TYPE = "video/avc";
+
+ // Class describing supported media codec properties.
+ private static class MediaCodecProperties {
+ public final String codecPrefix;
+ // Minimum Android SDK required for this codec to be used.
+ public final int minSdk;
+ // Flag if encoder implementation does not use frame timestamps to calculate frame bitrate
+ // budget and instead is relying on initial fps configuration assuming that all frames are
+ // coming at fixed initial frame rate. Bitrate adjustment is required for this case.
+ public final boolean bitrateAdjustmentRequired;
+
+ MediaCodecProperties(
+ String codecPrefix, int minSdk, boolean bitrateAdjustmentRequired) {
+ this.codecPrefix = codecPrefix;
+ this.minSdk = minSdk;
+ this.bitrateAdjustmentRequired = bitrateAdjustmentRequired;
+ }
+ }
+
+ // List of supported HW VP8 encoders.
+ private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties(
+ "OMX.qcom.", Build.VERSION_CODES.KITKAT, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
+ "OMX.Exynos.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties intelVp8HwProperties = new MediaCodecProperties(
+ "OMX.Intel.", Build.VERSION_CODES.LOLLIPOP, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties[] vp8HwList = new MediaCodecProperties[] {
+ qcomVp8HwProperties, exynosVp8HwProperties, intelVp8HwProperties
+ };
+
+ // List of supported HW VP9 encoders.
+ private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
+ "OMX.qcom.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
+ "OMX.Exynos.", Build.VERSION_CODES.M, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties[] vp9HwList = new MediaCodecProperties[] {
+ qcomVp9HwProperties, exynosVp9HwProperties
+ };
+
+ // List of supported HW H.264 encoders.
+ private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
+ "OMX.qcom.", Build.VERSION_CODES.KITKAT, false /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
+ "OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, true /* bitrateAdjustmentRequired */);
+ private static final MediaCodecProperties[] h264HwList = new MediaCodecProperties[] {
+ qcomH264HwProperties, exynosH264HwProperties
+ };
+
+ // List of devices with poor H.264 encoder quality.
+ private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
+ // HW H.264 encoder on below devices has poor bitrate control - actual
+ // bitrates deviates a lot from the target value.
+ "SAMSUNG-SGH-I337",
+ "Nexus 7",
+ "Nexus 4"
+ };
+
+ // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+ // in OMX_Video.h
+ private static final int VIDEO_ControlRateConstant = 2;
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ private static final int
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
+ // Allowable color formats supported by codec - in order of preference.
+ private static final int[] supportedColorList = {
+ CodecCapabilities.COLOR_FormatYUV420Planar,
+ CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
+ CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
+ };
+ private static final int[] supportedSurfaceColorList = {
+ CodecCapabilities.COLOR_FormatSurface
+ };
+ private VideoCodecType type;
+ private int colorFormat; // Used by native code.
+ private boolean bitrateAdjustmentRequired;
+
+ // SPS and PPS NALs (Config frame) for H.264.
+ private ByteBuffer configData = null;
+
+ // MediaCodec error handler - invoked when critical error happens which may prevent
+ // further use of media codec API. Now it means that one of media codec instances
+ // is hanging and can no longer be used in the next call.
+ public static interface MediaCodecVideoEncoderErrorCallback {
+ void onMediaCodecVideoEncoderCriticalError(int codecErrors);
+ }
+
+ public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
+ Logging.d(TAG, "Set error callback");
+ MediaCodecVideoEncoder.errorCallback = errorCallback;
+ }
+
+ // Functions to disable HW encoding - can be called from applications for platforms
+ // which have known HW decoding problems.
+ public static void disableVp8HwCodec() {
+ Logging.w(TAG, "VP8 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
+ }
+
+ public static void disableVp9HwCodec() {
+ Logging.w(TAG, "VP9 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
+ }
+
+ public static void disableH264HwCodec() {
+ Logging.w(TAG, "H.264 encoding is disabled by application.");
+ hwEncoderDisabledTypes.add(H264_MIME_TYPE);
+ }
+
+ // Functions to query if HW encoding is supported.
+ public static boolean isVp8HwSupported() {
+ return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+ (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedColorList) != null);
+ }
+
+ public static boolean isVp9HwSupported() {
+ return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+ (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
+ }
+
+ public static boolean isH264HwSupported() {
+ return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
+ (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
+ }
+
+ public static boolean isVp8HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) &&
+ (findHwEncoder(VP8_MIME_TYPE, vp8HwList, supportedSurfaceColorList) != null);
+ }
+
+ public static boolean isVp9HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) &&
+ (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
+ }
+
+ public static boolean isH264HwSupportedUsingTextures() {
+ return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) &&
+ (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
+ }
+
+ // Helper struct for findHwEncoder() below.
+ private static class EncoderProperties {
+ public EncoderProperties(String codecName, int colorFormat, boolean bitrateAdjustment) {
+ this.codecName = codecName;
+ this.colorFormat = colorFormat;
+ this.bitrateAdjustment = bitrateAdjustment;
+ }
+ public final String codecName; // OpenMax component name for HW codec.
+ public final int colorFormat; // Color format supported by codec.
+ public final boolean bitrateAdjustment; // true if bitrate adjustment workaround is required.
+ }
+
+ private static EncoderProperties findHwEncoder(
+ String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) {
+ // MediaCodec.setParameters is missing for JB and below, so bitrate
+ // can not be adjusted dynamically.
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
+ return null;
+ }
+
+ // Check if device is in H.264 exception list.
+ if (mime.equals(H264_MIME_TYPE)) {
+ List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
+ if (exceptionModels.contains(Build.MODEL)) {
+ Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
+ return null;
+ }
+ }
+
+ for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
+ MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
+ if (!info.isEncoder()) {
+ continue;
+ }
+ String name = null;
+ for (String mimeType : info.getSupportedTypes()) {
+ if (mimeType.equals(mime)) {
+ name = info.getName();
+ break;
+ }
+ }
+ if (name == null) {
+ continue; // No HW support in this codec; try the next one.
+ }
+ Logging.v(TAG, "Found candidate encoder " + name);
+
+ // Check if this is supported HW encoder.
+ boolean supportedCodec = false;
+ boolean bitrateAdjustmentRequired = false;
+ for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
+ if (name.startsWith(codecProperties.codecPrefix)) {
+ if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
+ Logging.w(TAG, "Codec " + name + " is disabled due to SDK version " +
+ Build.VERSION.SDK_INT);
+ continue;
+ }
+ if (codecProperties.bitrateAdjustmentRequired) {
+ Logging.w(TAG, "Codec " + name + " does not use frame timestamps.");
+ bitrateAdjustmentRequired = true;
+ }
+ supportedCodec = true;
+ break;
+ }
+ }
+ if (!supportedCodec) {
+ continue;
+ }
+
+ // Check if HW codec supports known color format.
+ CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
+ for (int colorFormat : capabilities.colorFormats) {
+ Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
+ }
+
+ for (int supportedColorFormat : colorList) {
+ for (int codecColorFormat : capabilities.colorFormats) {
+ if (codecColorFormat == supportedColorFormat) {
+ // Found supported HW encoder.
+ Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
+ ". Color: 0x" + Integer.toHexString(codecColorFormat));
+ return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentRequired);
+ }
+ }
+ }
+ }
+ return null; // No HW encoder.
+ }
+
+ private void checkOnMediaCodecThread() {
+ if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
+ throw new RuntimeException(
+ "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
+ " but is now called on " + Thread.currentThread());
+ }
+ }
+
+ public static void printStackTrace() {
+ if (runningInstance != null && runningInstance.mediaCodecThread != null) {
+ StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
+ if (mediaCodecStackTraces.length > 0) {
+ Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
+ for (StackTraceElement stackTrace : mediaCodecStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ static MediaCodec createByCodecName(String codecName) {
+ try {
+ // In the L-SDK this call can throw IOException so in order to work in
+ // both cases catch an exception.
+ return MediaCodec.createByCodecName(codecName);
+ } catch (Exception e) {
+ return null;
+ }
+ }
+
+ boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+ EglBase14.Context sharedContext) {
+ final boolean useSurface = sharedContext != null;
+ Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
+
+ this.width = width;
+ this.height = height;
+ if (mediaCodecThread != null) {
+ throw new RuntimeException("Forgot to release()?");
+ }
+ EncoderProperties properties = null;
+ String mime = null;
+ int keyFrameIntervalSec = 0;
+ if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+ mime = VP8_MIME_TYPE;
+ properties = findHwEncoder(
+ VP8_MIME_TYPE, vp8HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
+ mime = VP9_MIME_TYPE;
+ properties = findHwEncoder(
+ VP9_MIME_TYPE, vp9HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 100;
+ } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+ mime = H264_MIME_TYPE;
+ properties = findHwEncoder(
+ H264_MIME_TYPE, h264HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
+ keyFrameIntervalSec = 20;
+ }
+ if (properties == null) {
+ throw new RuntimeException("Can not find HW encoder for " + type);
+ }
+ runningInstance = this; // Encoder is now running and can be queried for stack traces.
+ colorFormat = properties.colorFormat;
+ bitrateAdjustmentRequired = properties.bitrateAdjustment;
+ if (bitrateAdjustmentRequired) {
+ fps = BITRATE_ADJUSTMENT_FPS;
+ }
+ Logging.d(TAG, "Color format: " + colorFormat +
+ ". Bitrate adjustment: " + bitrateAdjustmentRequired);
+
+ mediaCodecThread = Thread.currentThread();
+ try {
+ MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
+ format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
+ format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
+ format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
+ Logging.d(TAG, " Format: " + format);
+ mediaCodec = createByCodecName(properties.codecName);
+ this.type = type;
+ if (mediaCodec == null) {
+ Logging.e(TAG, "Can not create media encoder");
+ return false;
+ }
+ mediaCodec.configure(
+ format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
+ if (useSurface) {
+ eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
+ // Create an input surface and keep a reference since we must release the surface when done.
+ inputSurface = mediaCodec.createInputSurface();
+ eglBase.createSurface(inputSurface);
+ drawer = new GlRectDrawer();
+ }
+ mediaCodec.start();
+ outputBuffers = mediaCodec.getOutputBuffers();
+ Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "initEncode failed", e);
+ return false;
+ }
+ return true;
+ }
+
+ ByteBuffer[] getInputBuffers() {
+ ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+ Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+ return inputBuffers;
+ }
+
+ boolean encodeBuffer(
+ boolean isKeyframe, int inputBuffer, int size,
+ long presentationTimestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ if (isKeyframe) {
+ // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
+ // indicate this in queueInputBuffer() below and guarantee _this_ frame
+ // be encoded as a key frame, but sadly that flag is ignored. Instead,
+ // we request a key frame "soon".
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ mediaCodec.queueInputBuffer(
+ inputBuffer, 0, size, presentationTimestampUs, 0);
+ return true;
+ }
+ catch (IllegalStateException e) {
+ Logging.e(TAG, "encodeBuffer failed", e);
+ return false;
+ }
+ }
+
+ boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+ long presentationTimestampUs) {
+ checkOnMediaCodecThread();
+ try {
+ if (isKeyframe) {
+ Logging.d(TAG, "Sync frame request");
+ Bundle b = new Bundle();
+ b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+ mediaCodec.setParameters(b);
+ }
+ eglBase.makeCurrent();
+ // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
+ // but it's a workaround for bug webrtc:5147.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
+ eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
+ return true;
+ }
+ catch (RuntimeException e) {
+ Logging.e(TAG, "encodeTexture failed", e);
+ return false;
+ }
+ }
+
+ void release() {
+ Logging.d(TAG, "Java releaseEncoder");
+ checkOnMediaCodecThread();
+
+ // Run Mediacodec stop() and release() on separate thread since sometime
+ // Mediacodec.stop() may hang.
+ final CountDownLatch releaseDone = new CountDownLatch(1);
+
+ Runnable runMediaCodecRelease = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ Logging.d(TAG, "Java releaseEncoder on release thread");
+ mediaCodec.stop();
+ mediaCodec.release();
+ Logging.d(TAG, "Java releaseEncoder on release thread done");
+ } catch (Exception e) {
+ Logging.e(TAG, "Media encoder release failed", e);
+ }
+ releaseDone.countDown();
+ }
+ };
+ new Thread(runMediaCodecRelease).start();
+
+ if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
+ Logging.e(TAG, "Media encoder release timeout");
+ codecErrors++;
+ if (errorCallback != null) {
+ Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
+ errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
+ }
+ }
+
+ mediaCodec = null;
+ mediaCodecThread = null;
+ if (drawer != null) {
+ drawer.release();
+ drawer = null;
+ }
+ if (eglBase != null) {
+ eglBase.release();
+ eglBase = null;
+ }
+ if (inputSurface != null) {
+ inputSurface.release();
+ inputSurface = null;
+ }
+ runningInstance = null;
+ Logging.d(TAG, "Java releaseEncoder done");
+ }
+
+ private boolean setRates(int kbps, int frameRate) {
+ checkOnMediaCodecThread();
+ int codecBitrate = 1000 * kbps;
+ if (bitrateAdjustmentRequired && frameRate > 0) {
+ codecBitrate = BITRATE_ADJUSTMENT_FPS * codecBitrate / frameRate;
+ Logging.v(TAG, "setRates: " + kbps + " -> " + (codecBitrate / 1000)
+ + " kbps. Fps: " + frameRate);
+ } else {
+ Logging.v(TAG, "setRates: " + kbps);
+ }
+ try {
+ Bundle params = new Bundle();
+ params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrate);
+ mediaCodec.setParameters(params);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "setRates failed", e);
+ return false;
+ }
+ }
+
+ // Dequeue an input buffer and return its index, -1 if no input buffer is
+ // available, or -2 if the codec is no longer operative.
+ int dequeueInputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueIntputBuffer failed", e);
+ return -2;
+ }
+ }
+
+ // Helper struct for dequeueOutputBuffer() below.
+ static class OutputBufferInfo {
+ public OutputBufferInfo(
+ int index, ByteBuffer buffer,
+ boolean isKeyFrame, long presentationTimestampUs) {
+ this.index = index;
+ this.buffer = buffer;
+ this.isKeyFrame = isKeyFrame;
+ this.presentationTimestampUs = presentationTimestampUs;
+ }
+
+ public final int index;
+ public final ByteBuffer buffer;
+ public final boolean isKeyFrame;
+ public final long presentationTimestampUs;
+ }
+
+ // Dequeue and return an output buffer, or null if no output is ready. Return
+ // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
+ OutputBufferInfo dequeueOutputBuffer() {
+ checkOnMediaCodecThread();
+ try {
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ // Check if this is config frame and save configuration data.
+ if (result >= 0) {
+ boolean isConfigFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+ if (isConfigFrame) {
+ Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
+ ". Size: " + info.size);
+ configData = ByteBuffer.allocateDirect(info.size);
+ outputBuffers[result].position(info.offset);
+ outputBuffers[result].limit(info.offset + info.size);
+ configData.put(outputBuffers[result]);
+ // Release buffer back.
+ mediaCodec.releaseOutputBuffer(result, false);
+ // Query next output.
+ result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+ }
+ }
+ if (result >= 0) {
+ // MediaCodec doesn't care about Buffer position/remaining/etc so we can
+ // mess with them to get a slice and avoid having to pass extra
+ // (BufferInfo-related) parameters back to C++.
+ ByteBuffer outputBuffer = outputBuffers[result].duplicate();
+ outputBuffer.position(info.offset);
+ outputBuffer.limit(info.offset + info.size);
+ // Check key frame flag.
+ boolean isKeyFrame =
+ (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
+ if (isKeyFrame) {
+ Logging.d(TAG, "Sync frame generated");
+ }
+ if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
+ Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
+ " to output buffer with offset " + info.offset + ", size " +
+ info.size);
+ // For H.264 key frame append SPS and PPS NALs at the start
+ ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
+ configData.capacity() + info.size);
+ configData.rewind();
+ keyFrameBuffer.put(configData);
+ keyFrameBuffer.put(outputBuffer);
+ keyFrameBuffer.position(0);
+ return new OutputBufferInfo(result, keyFrameBuffer,
+ isKeyFrame, info.presentationTimeUs);
+ } else {
+ return new OutputBufferInfo(result, outputBuffer.slice(),
+ isKeyFrame, info.presentationTimeUs);
+ }
+ } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffers = mediaCodec.getOutputBuffers();
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ return dequeueOutputBuffer();
+ } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
+ return null;
+ }
+ throw new RuntimeException("dequeueOutputBuffer: " + result);
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "dequeueOutputBuffer failed", e);
+ return new OutputBufferInfo(-1, null, false, -1);
+ }
+ }
+
+ // Release a dequeued output buffer back to the codec for re-use. Return
+ // false if the codec is no longer operable.
+ boolean releaseOutputBuffer(int index) {
+ checkOnMediaCodecThread();
+ try {
+ mediaCodec.releaseOutputBuffer(index, false);
+ return true;
+ } catch (IllegalStateException e) {
+ Logging.e(TAG, "releaseOutputBuffer failed", e);
+ return false;
+ }
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java b/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java
new file mode 100644
index 0000000..1cab682
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaConstraints.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Description of media constraints for {@code MediaStream} and
+ * {@code PeerConnection}.
+ */
+public class MediaConstraints {
+ /** Simple String key/value pair. */
+ public static class KeyValuePair {
+ private final String key;
+ private final String value;
+
+ public KeyValuePair(String key, String value) {
+ this.key = key;
+ this.value = value;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public String toString() {
+ return key + ": " + value;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (this == other) {
+ return true;
+ }
+ if (other == null || getClass() != other.getClass()) {
+ return false;
+ }
+ KeyValuePair that = (KeyValuePair)other;
+ return key.equals(that.key) && value.equals(that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return key.hashCode() + value.hashCode();
+ }
+ }
+
+ public final List<KeyValuePair> mandatory;
+ public final List<KeyValuePair> optional;
+
+ public MediaConstraints() {
+ mandatory = new LinkedList<KeyValuePair>();
+ optional = new LinkedList<KeyValuePair>();
+ }
+
+ private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
+ StringBuilder builder = new StringBuilder("[");
+ for (KeyValuePair pair : list) {
+ if (builder.length() > 1) {
+ builder.append(", ");
+ }
+ builder.append(pair.toString());
+ }
+ return builder.append("]").toString();
+ }
+
+ public String toString() {
+ return "mandatory: " + stringifyKeyValuePairList(mandatory) +
+ ", optional: " + stringifyKeyValuePairList(optional);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaSource.java b/webrtc/api/android/java/src/org/webrtc/MediaSource.java
new file mode 100644
index 0000000..070a95d
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaSource.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaSourceInterface. */
+public class MediaSource {
+ /** Tracks MediaSourceInterface.SourceState */
+ public enum State {
+ INITIALIZING, LIVE, ENDED, MUTED
+ }
+
+ final long nativeSource; // Package-protected for PeerConnectionFactory.
+
+ public MediaSource(long nativeSource) {
+ this.nativeSource = nativeSource;
+ }
+
+ public State state() {
+ return nativeState(nativeSource);
+ }
+
+ public void dispose() {
+ free(nativeSource);
+ }
+
+ private static native State nativeState(long pointer);
+
+ private static native void free(long nativeSource);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaStream.java b/webrtc/api/android/java/src/org/webrtc/MediaStream.java
new file mode 100644
index 0000000..2128b73
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaStream.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java wrapper for a C++ MediaStreamInterface. */
+public class MediaStream {
+ public final LinkedList<AudioTrack> audioTracks;
+ public final LinkedList<VideoTrack> videoTracks;
+ public final LinkedList<VideoTrack> preservedVideoTracks;
+ // Package-protected for PeerConnection.
+ final long nativeStream;
+
+ public MediaStream(long nativeStream) {
+ audioTracks = new LinkedList<AudioTrack>();
+ videoTracks = new LinkedList<VideoTrack>();
+ preservedVideoTracks = new LinkedList<VideoTrack>();
+ this.nativeStream = nativeStream;
+ }
+
+ public boolean addTrack(AudioTrack track) {
+ if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
+ audioTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean addTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ videoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ // Tracks added in addTrack() call will be auto released once MediaStream.dispose()
+ // is called. If video track need to be preserved after MediaStream is destroyed it
+ // should be added to MediaStream using addPreservedTrack() call.
+ public boolean addPreservedTrack(VideoTrack track) {
+ if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
+ preservedVideoTracks.add(track);
+ return true;
+ }
+ return false;
+ }
+
+ public boolean removeTrack(AudioTrack track) {
+ audioTracks.remove(track);
+ return nativeRemoveAudioTrack(nativeStream, track.nativeTrack);
+ }
+
+ public boolean removeTrack(VideoTrack track) {
+ videoTracks.remove(track);
+ preservedVideoTracks.remove(track);
+ return nativeRemoveVideoTrack(nativeStream, track.nativeTrack);
+ }
+
+ public void dispose() {
+ // Remove and release previously added audio and video tracks.
+ while (!audioTracks.isEmpty()) {
+ AudioTrack track = audioTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ while (!videoTracks.isEmpty()) {
+ VideoTrack track = videoTracks.getFirst();
+ removeTrack(track);
+ track.dispose();
+ }
+ // Remove, but do not release preserved video tracks.
+ while (!preservedVideoTracks.isEmpty()) {
+ removeTrack(preservedVideoTracks.getFirst());
+ }
+ free(nativeStream);
+ }
+
+ public String label() {
+ return nativeLabel(nativeStream);
+ }
+
+ public String toString() {
+ return "[" + label() + ":A=" + audioTracks.size() +
+ ":V=" + videoTracks.size() + "]";
+ }
+
+ private static native boolean nativeAddAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeAddVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native boolean nativeRemoveAudioTrack(
+ long nativeStream, long nativeAudioTrack);
+
+ private static native boolean nativeRemoveVideoTrack(
+ long nativeStream, long nativeVideoTrack);
+
+ private static native String nativeLabel(long nativeStream);
+
+ private static native void free(long nativeStream);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java b/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java
new file mode 100644
index 0000000..49b2210
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/MediaStreamTrack.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ MediaStreamTrackInterface. */
+public class MediaStreamTrack {
+ /** Tracks MediaStreamTrackInterface.TrackState */
+ public enum State { LIVE, ENDED }
+
+ final long nativeTrack;
+
+ public MediaStreamTrack(long nativeTrack) {
+ this.nativeTrack = nativeTrack;
+ }
+
+ public String id() {
+ return nativeId(nativeTrack);
+ }
+
+ public String kind() {
+ return nativeKind(nativeTrack);
+ }
+
+ public boolean enabled() {
+ return nativeEnabled(nativeTrack);
+ }
+
+ public boolean setEnabled(boolean enable) {
+ return nativeSetEnabled(nativeTrack, enable);
+ }
+
+ public State state() {
+ return nativeState(nativeTrack);
+ }
+
+ public void dispose() {
+ free(nativeTrack);
+ }
+
+ private static native String nativeId(long nativeTrack);
+
+ private static native String nativeKind(long nativeTrack);
+
+ private static native boolean nativeEnabled(long nativeTrack);
+
+ private static native boolean nativeSetEnabled(
+ long nativeTrack, boolean enabled);
+
+ private static native State nativeState(long nativeTrack);
+
+ private static native void free(long nativeTrack);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/Metrics.java b/webrtc/api/android/java/src/org/webrtc/Metrics.java
new file mode 100644
index 0000000..90209ad
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/Metrics.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.HashMap;
+import java.util.Map;
+
+// Java-side of androidmetrics_jni.cc.
+//
+// Rtc histograms can be queried through the API, getAndReset().
+// The returned map holds the name of a histogram and its samples.
+//
+// Example of |map| with one histogram:
+// |name|: "WebRTC.Video.InputFramesPerSecond"
+// |min|: 1
+// |max|: 100
+// |bucketCount|: 50
+// |samples|: [30]:1
+//
+// Most histograms are not updated frequently (e.g. most video metrics are an
+// average over the call and recorded when a stream is removed).
+// The metrics can for example be retrieved when a peer connection is closed.
+
+public class Metrics {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+ public final Map<String, HistogramInfo> map =
+ new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
+
+ /**
+ * Class holding histogram information.
+ */
+ public static class HistogramInfo {
+ public final int min;
+ public final int max;
+ public final int bucketCount;
+ public final Map<Integer, Integer> samples =
+ new HashMap<Integer, Integer>(); // <value, # of events>
+
+ public HistogramInfo(int min, int max, int bucketCount) {
+ this.min = min;
+ this.max = max;
+ this.bucketCount = bucketCount;
+ }
+
+ public void addSample(int value, int numEvents) {
+ samples.put(value, numEvents);
+ }
+ }
+
+ private void add(String name, HistogramInfo info) {
+ map.put(name, info);
+ }
+
+ // Enables gathering of metrics (which can be fetched with getAndReset()).
+ // Must be called before PeerConnectionFactory is created.
+ public static void enable() {
+ nativeEnable();
+ }
+
+ // Gets and clears native histograms.
+ public static Metrics getAndReset() {
+ return nativeGetAndReset();
+ }
+
+ private static native void nativeEnable();
+ private static native Metrics nativeGetAndReset();
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java b/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java
new file mode 100644
index 0000000..cbe68d0
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/NetworkMonitor.java
@@ -0,0 +1,252 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
+import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
+import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
+
+import org.webrtc.Logging;
+
+import android.content.Context;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
+ *
+ * Triggers updates to the underlying network state from OS networking events.
+ *
+ * WARNING: This class is not thread-safe.
+ */
+public class NetworkMonitor {
+ /**
+ * Alerted when the connection type of the network changes.
+ * The alert is fired on the UI thread.
+ */
+ public interface NetworkObserver {
+ public void onConnectionTypeChanged(ConnectionType connectionType);
+ }
+
+ private static final String TAG = "NetworkMonitor";
+ private static NetworkMonitor instance;
+
+ private final Context applicationContext;
+
+ // Native observers of the connection type changes.
+ private final ArrayList<Long> nativeNetworkObservers;
+ // Java observers of the connection type changes.
+ private final ArrayList<NetworkObserver> networkObservers;
+
+ // Object that detects the connection type changes.
+ private NetworkMonitorAutoDetect autoDetector;
+
+ private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
+
+ private NetworkMonitor(Context context) {
+ assertIsTrue(context != null);
+ applicationContext =
+ context.getApplicationContext() == null ? context : context.getApplicationContext();
+
+ nativeNetworkObservers = new ArrayList<Long>();
+ networkObservers = new ArrayList<NetworkObserver>();
+ }
+
+ /**
+ * Initializes the singleton once.
+ * Called from the native code.
+ */
+ public static NetworkMonitor init(Context context) {
+ if (!isInitialized()) {
+ instance = new NetworkMonitor(context);
+ }
+ return instance;
+ }
+
+ public static boolean isInitialized() {
+ return instance != null;
+ }
+
+ /**
+ * Returns the singleton instance.
+ */
+ public static NetworkMonitor getInstance() {
+ return instance;
+ }
+
+ /**
+ * Enables auto detection of the current network state based on notifications from the system.
+ * Note that passing true here requires the embedding app have the platform ACCESS_NETWORK_STATE
+ * permission.
+ *
+ * @param shouldAutoDetect true if the NetworkMonitor should listen for system changes in
+ * network connectivity.
+ */
+ public static void setAutoDetectConnectivityState(boolean shouldAutoDetect) {
+ getInstance().setAutoDetectConnectivityStateInternal(shouldAutoDetect);
+ }
+
+ private static void assertIsTrue(boolean condition) {
+ if (!condition) {
+ throw new AssertionError("Expected to be true");
+ }
+ }
+
+ // Called by the native code.
+ private void startMonitoring(long nativeObserver) {
+ Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
+ nativeNetworkObservers.add(nativeObserver);
+ setAutoDetectConnectivityStateInternal(true);
+ }
+
+ // Called by the native code.
+ private void stopMonitoring(long nativeObserver) {
+ Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
+ setAutoDetectConnectivityStateInternal(false);
+ nativeNetworkObservers.remove(nativeObserver);
+ }
+
+ private ConnectionType getCurrentConnectionType() {
+ return currentConnectionType;
+ }
+
+ private int getCurrentDefaultNetId() {
+ return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
+ }
+
+ private void destroyAutoDetector() {
+ if (autoDetector != null) {
+ autoDetector.destroy();
+ autoDetector = null;
+ }
+ }
+
+ private void setAutoDetectConnectivityStateInternal(boolean shouldAutoDetect) {
+ if (!shouldAutoDetect) {
+ destroyAutoDetector();
+ return;
+ }
+ if (autoDetector == null) {
+ autoDetector = new NetworkMonitorAutoDetect(
+ new NetworkMonitorAutoDetect.Observer() {
+
+ @Override
+ public void onConnectionTypeChanged(ConnectionType newConnectionType) {
+ updateCurrentConnectionType(newConnectionType);
+ }
+
+ @Override
+ public void onNetworkConnect(NetworkInformation networkInfo) {
+ notifyObserversOfNetworkConnect(networkInfo);
+ }
+
+ @Override
+ public void onNetworkDisconnect(int networkHandle) {
+ notifyObserversOfNetworkDisconnect(networkHandle);
+ }
+ },
+ applicationContext);
+ final NetworkMonitorAutoDetect.NetworkState networkState =
+ autoDetector.getCurrentNetworkState();
+ updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
+ updateActiveNetworkList();
+ }
+ }
+
+ private void updateCurrentConnectionType(ConnectionType newConnectionType) {
+ currentConnectionType = newConnectionType;
+ notifyObserversOfConnectionTypeChange(newConnectionType);
+ }
+
+ /**
+ * Alerts all observers of a connection change.
+ */
+ private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyConnectionTypeChanged(nativeObserver);
+ }
+ for (NetworkObserver observer : networkObservers) {
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+ }
+
+ private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
+ }
+ }
+
+ private void notifyObserversOfNetworkDisconnect(int networkHandle) {
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
+ }
+ }
+
+ private void updateActiveNetworkList() {
+ List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
+ if (networkInfoList == null || networkInfoList.size() == 0) {
+ return;
+ }
+
+ NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
+ networkInfos = networkInfoList.toArray(networkInfos);
+ for (long nativeObserver : nativeNetworkObservers) {
+ nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
+ }
+ }
+
+ /**
+ * Adds an observer for any connection type changes.
+ */
+ public static void addNetworkObserver(NetworkObserver observer) {
+ getInstance().addNetworkObserverInternal(observer);
+ }
+
+ private void addNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.add(observer);
+ }
+
+ /**
+ * Removes an observer for any connection type changes.
+ */
+ public static void removeNetworkObserver(NetworkObserver observer) {
+ getInstance().removeNetworkObserverInternal(observer);
+ }
+
+ private void removeNetworkObserverInternal(NetworkObserver observer) {
+ networkObservers.remove(observer);
+ }
+
+ /**
+ * Checks if there currently is connectivity.
+ */
+ public static boolean isOnline() {
+ ConnectionType connectionType = getInstance().getCurrentConnectionType();
+ return connectionType != ConnectionType.CONNECTION_NONE;
+ }
+
+ private native void nativeNotifyConnectionTypeChanged(long nativePtr);
+ private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
+ private native void nativeNotifyOfNetworkDisconnect(long nativePtr, int networkHandle);
+ private native void nativeNotifyOfActiveNetworkList(long nativePtr,
+ NetworkInformation[] networkInfos);
+
+ // For testing only.
+ static void resetInstanceForTests(Context context) {
+ instance = new NetworkMonitor(context);
+ }
+
+ // For testing only.
+ public static NetworkMonitorAutoDetect getAutoDetectorForTest() {
+ return getInstance().autoDetector;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java b/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java
new file mode 100644
index 0000000..4335e5c
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/NetworkMonitorAutoDetect.java
@@ -0,0 +1,622 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import static android.net.NetworkCapabilities.NET_CAPABILITY_INTERNET;
+import static android.net.NetworkCapabilities.TRANSPORT_CELLULAR;
+
+
+import org.webrtc.Logging;
+
+import android.Manifest.permission;
+import android.annotation.SuppressLint;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.net.ConnectivityManager;
+import android.net.ConnectivityManager.NetworkCallback;
+import android.net.LinkAddress;
+import android.net.LinkProperties;
+import android.net.Network;
+import android.net.NetworkCapabilities;
+import android.net.NetworkInfo;
+import android.net.NetworkRequest;
+import android.net.wifi.WifiInfo;
+import android.net.wifi.WifiManager;
+import android.os.Build;
+import android.telephony.TelephonyManager;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Borrowed from Chromium's
+ * src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
+ *
+ * Used by the NetworkMonitor to listen to platform changes in connectivity.
+ * Note that use of this class requires that the app have the platform
+ * ACCESS_NETWORK_STATE permission.
+ */
+public class NetworkMonitorAutoDetect extends BroadcastReceiver {
+ public static enum ConnectionType {
+ CONNECTION_UNKNOWN,
+ CONNECTION_ETHERNET,
+ CONNECTION_WIFI,
+ CONNECTION_4G,
+ CONNECTION_3G,
+ CONNECTION_2G,
+ CONNECTION_UNKNOWN_CELLULAR,
+ CONNECTION_BLUETOOTH,
+ CONNECTION_NONE
+ }
+
+ public static class IPAddress {
+ public final byte[] address;
+ public IPAddress (byte[] address) {
+ this.address = address;
+ }
+ }
+
+ /** Java version of NetworkMonitor.NetworkInformation */
+ public static class NetworkInformation{
+ public final String name;
+ public final ConnectionType type;
+ public final int handle;
+ public final IPAddress[] ipAddresses;
+ public NetworkInformation(String name, ConnectionType type, int handle,
+ IPAddress[] addresses) {
+ this.name = name;
+ this.type = type;
+ this.handle = handle;
+ this.ipAddresses = addresses;
+ }
+ };
+
+ static class NetworkState {
+ private final boolean connected;
+ // Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
+ // further divided into 2G, 3G, or 4G from the subtype.
+ private final int type;
+ // Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
+ // Will be useful to find the maximum bandwidth.
+ private final int subtype;
+
+ public NetworkState(boolean connected, int type, int subtype) {
+ this.connected = connected;
+ this.type = type;
+ this.subtype = subtype;
+ }
+
+ public boolean isConnected() {
+ return connected;
+ }
+
+ public int getNetworkType() {
+ return type;
+ }
+
+ public int getNetworkSubType() {
+ return subtype;
+ }
+ }
+ /**
+ * The methods in this class get called when the network changes if the callback
+ * is registered with a proper network request. It is only available in Android Lollipop
+ * and above.
+ */
+ @SuppressLint("NewApi")
+ private class SimpleNetworkCallback extends NetworkCallback {
+
+ @Override
+ public void onAvailable(Network network) {
+ Logging.d(TAG, "Network becomes available: " + network.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onCapabilitiesChanged(
+ Network network, NetworkCapabilities networkCapabilities) {
+ // A capabilities change may indicate the ConnectionType has changed,
+ // so forward the new NetworkInformation along to the observer.
+ Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
+ // A link property change may indicate the IP address changes.
+ // so forward the new NetworkInformation to the observer.
+ Logging.d(TAG, "link properties changed: " + linkProperties.toString());
+ onNetworkChanged(network);
+ }
+
+ @Override
+ public void onLosing(Network network, int maxMsToLive) {
+ // Tell the network is going to lose in MaxMsToLive milliseconds.
+ // We may use this signal later.
+ Logging.d(TAG,
+ "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
+ }
+
+ @Override
+ public void onLost(Network network) {
+ Logging.d(TAG, "Network " + network.toString() + " is disconnected");
+ observer.onNetworkDisconnect(networkToNetId(network));
+ }
+
+ private void onNetworkChanged(Network network) {
+ NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
+ if (networkInformation != null) {
+ observer.onNetworkConnect(networkInformation);
+ }
+ }
+ }
+
+ /** Queries the ConnectivityManager for information about the current connection. */
+ static class ConnectivityManagerDelegate {
+ /**
+ * Note: In some rare Android systems connectivityManager is null. We handle that
+ * gracefully below.
+ */
+ private final ConnectivityManager connectivityManager;
+
+ ConnectivityManagerDelegate(Context context) {
+ connectivityManager =
+ (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
+ }
+
+ // For testing.
+ ConnectivityManagerDelegate() {
+ // All the methods below should be overridden.
+ connectivityManager = null;
+ }
+
+ /**
+ * Returns connection type and status information about the current
+ * default network.
+ */
+ NetworkState getNetworkState() {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
+ return getNetworkState(connectivityManager.getActiveNetworkInfo());
+ }
+
+ /**
+ * Returns connection type and status information about |network|.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ NetworkState getNetworkState(Network network) {
+ if (connectivityManager == null) {
+ return new NetworkState(false, -1, -1);
+ }
+ return getNetworkState(connectivityManager.getNetworkInfo(network));
+ }
+
+ /**
+ * Returns connection type and status information gleaned from networkInfo.
+ */
+ NetworkState getNetworkState(NetworkInfo networkInfo) {
+ if (networkInfo == null || !networkInfo.isConnected()) {
+ return new NetworkState(false, -1, -1);
+ }
+ return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
+ }
+
+ /**
+ * Returns all connected networks.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ Network[] getAllNetworks() {
+ if (connectivityManager == null) {
+ return new Network[0];
+ }
+ return connectivityManager.getAllNetworks();
+ }
+
+ List<NetworkInformation> getActiveNetworkList() {
+ if (!supportNetworkCallback()) {
+ return null;
+ }
+ ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
+ for (Network network : getAllNetworks()) {
+ NetworkInformation info = networkToInfo(network);
+ if (info != null) {
+ netInfoList.add(info);
+ }
+ }
+ return netInfoList;
+ }
+
+ /**
+ * Returns the NetID of the current default network. Returns
+ * INVALID_NET_ID if no current default network connected.
+ * Only callable on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ int getDefaultNetId() {
+ if (!supportNetworkCallback()) {
+ return INVALID_NET_ID;
+ }
+ // Android Lollipop had no API to get the default network; only an
+ // API to return the NetworkInfo for the default network. To
+ // determine the default network one can find the network with
+ // type matching that of the default network.
+ final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
+ if (defaultNetworkInfo == null) {
+ return INVALID_NET_ID;
+ }
+ final Network[] networks = getAllNetworks();
+ int defaultNetId = INVALID_NET_ID;
+ for (Network network : networks) {
+ if (!hasInternetCapability(network)) {
+ continue;
+ }
+ final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
+ if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
+ // There should not be multiple connected networks of the
+ // same type. At least as of Android Marshmallow this is
+ // not supported. If this becomes supported this assertion
+ // may trigger. At that point we could consider using
+ // ConnectivityManager.getDefaultNetwork() though this
+ // may give confusing results with VPNs and is only
+ // available with Android Marshmallow.
+ assert defaultNetId == INVALID_NET_ID;
+ defaultNetId = networkToNetId(network);
+ }
+ }
+ return defaultNetId;
+ }
+
+ @SuppressLint("NewApi")
+ private NetworkInformation networkToInfo(Network network) {
+ LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
+ // getLinkProperties will return null if the network is unknown.
+ if (linkProperties == null) {
+ Logging.w(TAG, "Detected unknown network: " + network.toString());
+ return null;
+ }
+ if (linkProperties.getInterfaceName() == null) {
+ Logging.w(TAG, "Null interface name for network " + network.toString());
+ return null;
+ }
+
+ NetworkState networkState = getNetworkState(network);
+ ConnectionType connectionType = getConnectionType(networkState);
+ if (connectionType == ConnectionType.CONNECTION_NONE) {
+ // This may not be an error. The OS may signal a network event with connection type
+ // NONE when the network disconnects.
+ Logging.d(TAG, "Network " + network.toString() + " is disconnected");
+ return null;
+ }
+
+ // Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
+ // which appears to be usable. Just log them here.
+ if (connectionType == ConnectionType.CONNECTION_UNKNOWN
+ || connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
+ Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
+ + " because it has type " + networkState.getNetworkType()
+ + " and subtype " + networkState.getNetworkSubType());
+ }
+
+ NetworkInformation networkInformation = new NetworkInformation(
+ linkProperties.getInterfaceName(),
+ connectionType,
+ networkToNetId(network),
+ getIPAddresses(linkProperties));
+ return networkInformation;
+ }
+
+ /**
+ * Returns true if {@code network} can provide Internet access. Can be used to
+ * ignore specialized networks (e.g. IMS, FOTA).
+ */
+ @SuppressLint("NewApi")
+ boolean hasInternetCapability(Network network) {
+ if (connectivityManager == null) {
+ return false;
+ }
+ final NetworkCapabilities capabilities =
+ connectivityManager.getNetworkCapabilities(network);
+ return capabilities != null && capabilities.hasCapability(NET_CAPABILITY_INTERNET);
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void registerNetworkCallback(NetworkCallback networkCallback) {
+ connectivityManager.registerNetworkCallback(
+ new NetworkRequest.Builder().addCapability(NET_CAPABILITY_INTERNET).build(),
+ networkCallback);
+ }
+
+ /** Only callable on Lollipop and newer releases. */
+ @SuppressLint("NewApi")
+ public void requestMobileNetwork(NetworkCallback networkCallback) {
+ NetworkRequest.Builder builder = new NetworkRequest.Builder();
+ builder.addCapability(NET_CAPABILITY_INTERNET).addTransportType(TRANSPORT_CELLULAR);
+ connectivityManager.requestNetwork(builder.build(), networkCallback);
+ }
+
+ @SuppressLint("NewApi")
+ IPAddress[] getIPAddresses(LinkProperties linkProperties) {
+ IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
+ int i = 0;
+ for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
+ ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
+ ++i;
+ }
+ return ipAddresses;
+ }
+
+ @SuppressLint("NewApi")
+ public void releaseCallback(NetworkCallback networkCallback) {
+ if (supportNetworkCallback()) {
+ Logging.d(TAG, "Unregister network callback");
+ connectivityManager.unregisterNetworkCallback(networkCallback);
+ }
+ }
+
+ public boolean supportNetworkCallback() {
+ return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
+ }
+ }
+
+
+ /** Queries the WifiManager for SSID of the current Wifi connection. */
+ static class WifiManagerDelegate {
+ private final Context context;
+ WifiManagerDelegate(Context context) {
+ this.context = context;
+ }
+
+ // For testing.
+ WifiManagerDelegate() {
+ // All the methods below should be overridden.
+ context = null;
+ }
+
+ String getWifiSSID() {
+ final Intent intent = context.registerReceiver(null,
+ new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
+ if (intent != null) {
+ final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
+ if (wifiInfo != null) {
+ final String ssid = wifiInfo.getSSID();
+ if (ssid != null) {
+ return ssid;
+ }
+ }
+ }
+ return "";
+ }
+
+ }
+
+ static final int INVALID_NET_ID = -1;
+ private static final String TAG = "NetworkMonitorAutoDetect";
+
+ // Observer for the connection type change.
+ private final Observer observer;
+ private final IntentFilter intentFilter;
+ private final Context context;
+ // Used to request mobile network. It does not do anything except for keeping
+ // the callback for releasing the request.
+ private final NetworkCallback mobileNetworkCallback;
+ // Used to receive updates on all networks.
+ private final NetworkCallback allNetworkCallback;
+ // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
+ private ConnectivityManagerDelegate connectivityManagerDelegate;
+ private WifiManagerDelegate wifiManagerDelegate;
+
+ private boolean isRegistered;
+ private ConnectionType connectionType;
+ private String wifiSSID;
+
+ /**
+ * Observer interface by which observer is notified of network changes.
+ */
+ public static interface Observer {
+ /**
+ * Called when default network changes.
+ */
+ public void onConnectionTypeChanged(ConnectionType newConnectionType);
+ public void onNetworkConnect(NetworkInformation networkInfo);
+ public void onNetworkDisconnect(int networkHandle);
+ }
+
+ /**
+ * Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
+ */
+ @SuppressLint("NewApi")
+ public NetworkMonitorAutoDetect(Observer observer, Context context) {
+ this.observer = observer;
+ this.context = context;
+ connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
+ wifiManagerDelegate = new WifiManagerDelegate(context);
+
+ final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
+ connectionType = getConnectionType(networkState);
+ wifiSSID = getWifiSSID(networkState);
+ intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
+
+ registerReceiver();
+ if (connectivityManagerDelegate.supportNetworkCallback()) {
+ // On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
+ // requestNetwork, so it will fail. This was fixed in Android 6.0.1.
+ NetworkCallback tempNetworkCallback = new NetworkCallback();
+ try {
+ connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
+ } catch (java.lang.SecurityException e) {
+ Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
+ tempNetworkCallback = null;
+ }
+ mobileNetworkCallback = tempNetworkCallback;
+ allNetworkCallback = new SimpleNetworkCallback();
+ connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
+ } else {
+ mobileNetworkCallback = null;
+ allNetworkCallback = null;
+ }
+ }
+
+ /**
+ * Allows overriding the ConnectivityManagerDelegate for tests.
+ */
+ void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
+ connectivityManagerDelegate = delegate;
+ }
+
+ /**
+ * Allows overriding the WifiManagerDelegate for tests.
+ */
+ void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
+ wifiManagerDelegate = delegate;
+ }
+
+ /**
+ * Returns whether the object has registered to receive network connectivity intents.
+ * Visible for testing.
+ */
+ boolean isReceiverRegisteredForTesting() {
+ return isRegistered;
+ }
+
+ List<NetworkInformation> getActiveNetworkList() {
+ return connectivityManagerDelegate.getActiveNetworkList();
+ }
+
+ public void destroy() {
+ if (allNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(allNetworkCallback);
+ }
+ if (mobileNetworkCallback != null) {
+ connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
+ }
+ unregisterReceiver();
+ }
+
+ /**
+ * Registers a BroadcastReceiver in the given context.
+ */
+ private void registerReceiver() {
+ if (isRegistered) return;
+
+ isRegistered = true;
+ context.registerReceiver(this, intentFilter);
+ }
+
+ /**
+ * Unregisters the BroadcastReceiver in the given context.
+ */
+ private void unregisterReceiver() {
+ if (!isRegistered) return;
+
+ isRegistered = false;
+ context.unregisterReceiver(this);
+ }
+
+ public NetworkState getCurrentNetworkState() {
+ return connectivityManagerDelegate.getNetworkState();
+ }
+
+ /**
+ * Returns NetID of device's current default connected network used for
+ * communication.
+ * Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
+ * when not implemented.
+ */
+ public int getDefaultNetId() {
+ return connectivityManagerDelegate.getDefaultNetId();
+ }
+
+ public static ConnectionType getConnectionType(NetworkState networkState) {
+ if (!networkState.isConnected()) {
+ return ConnectionType.CONNECTION_NONE;
+ }
+
+ switch (networkState.getNetworkType()) {
+ case ConnectivityManager.TYPE_ETHERNET:
+ return ConnectionType.CONNECTION_ETHERNET;
+ case ConnectivityManager.TYPE_WIFI:
+ return ConnectionType.CONNECTION_WIFI;
+ case ConnectivityManager.TYPE_WIMAX:
+ return ConnectionType.CONNECTION_4G;
+ case ConnectivityManager.TYPE_BLUETOOTH:
+ return ConnectionType.CONNECTION_BLUETOOTH;
+ case ConnectivityManager.TYPE_MOBILE:
+ // Use information from TelephonyManager to classify the connection.
+ switch (networkState.getNetworkSubType()) {
+ case TelephonyManager.NETWORK_TYPE_GPRS:
+ case TelephonyManager.NETWORK_TYPE_EDGE:
+ case TelephonyManager.NETWORK_TYPE_CDMA:
+ case TelephonyManager.NETWORK_TYPE_1xRTT:
+ case TelephonyManager.NETWORK_TYPE_IDEN:
+ return ConnectionType.CONNECTION_2G;
+ case TelephonyManager.NETWORK_TYPE_UMTS:
+ case TelephonyManager.NETWORK_TYPE_EVDO_0:
+ case TelephonyManager.NETWORK_TYPE_EVDO_A:
+ case TelephonyManager.NETWORK_TYPE_HSDPA:
+ case TelephonyManager.NETWORK_TYPE_HSUPA:
+ case TelephonyManager.NETWORK_TYPE_HSPA:
+ case TelephonyManager.NETWORK_TYPE_EVDO_B:
+ case TelephonyManager.NETWORK_TYPE_EHRPD:
+ case TelephonyManager.NETWORK_TYPE_HSPAP:
+ return ConnectionType.CONNECTION_3G;
+ case TelephonyManager.NETWORK_TYPE_LTE:
+ return ConnectionType.CONNECTION_4G;
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
+ }
+ default:
+ return ConnectionType.CONNECTION_UNKNOWN;
+ }
+ }
+
+ private String getWifiSSID(NetworkState networkState) {
+ if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI) return "";
+ return wifiManagerDelegate.getWifiSSID();
+ }
+
+ // BroadcastReceiver
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ final NetworkState networkState = getCurrentNetworkState();
+ if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
+ connectionTypeChanged(networkState);
+ }
+ }
+
+ private void connectionTypeChanged(NetworkState networkState) {
+ ConnectionType newConnectionType = getConnectionType(networkState);
+ String newWifiSSID = getWifiSSID(networkState);
+ if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID)) return;
+
+ connectionType = newConnectionType;
+ wifiSSID = newWifiSSID;
+ Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
+ observer.onConnectionTypeChanged(newConnectionType);
+ }
+
+ /**
+ * Extracts NetID of network. Only available on Lollipop and newer releases.
+ */
+ @SuppressLint("NewApi")
+ private static int networkToNetId(Network network) {
+ // NOTE(pauljensen): This depends on Android framework implementation details.
+ // Fortunately this functionality is unlikely to ever change.
+ // TODO(honghaiz): When we update to Android M SDK, use Network.getNetworkHandle().
+ return Integer.parseInt(network.toString());
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/OWNERS b/webrtc/api/android/java/src/org/webrtc/OWNERS
new file mode 100644
index 0000000..4d31ffb
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/OWNERS
@@ -0,0 +1 @@
+magjed@webrtc.org
diff --git a/webrtc/api/android/java/src/org/webrtc/PeerConnection.java b/webrtc/api/android/java/src/org/webrtc/PeerConnection.java
new file mode 100644
index 0000000..ad8362d
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/PeerConnection.java
@@ -0,0 +1,306 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+package org.webrtc;
+
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Java-land version of the PeerConnection APIs; wraps the C++ API
+ * http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
+ * JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
+ * http://www.w3.org/TR/mediacapture-streams/
+ */
+public class PeerConnection {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ /** Tracks PeerConnectionInterface::IceGatheringState */
+ public enum IceGatheringState { NEW, GATHERING, COMPLETE };
+
+
+ /** Tracks PeerConnectionInterface::IceConnectionState */
+ public enum IceConnectionState {
+ NEW, CHECKING, CONNECTED, COMPLETED, FAILED, DISCONNECTED, CLOSED
+ };
+
+ /** Tracks PeerConnectionInterface::SignalingState */
+ public enum SignalingState {
+ STABLE, HAVE_LOCAL_OFFER, HAVE_LOCAL_PRANSWER, HAVE_REMOTE_OFFER,
+ HAVE_REMOTE_PRANSWER, CLOSED
+ };
+
+ /** Java version of PeerConnectionObserver. */
+ public static interface Observer {
+ /** Triggered when the SignalingState changes. */
+ public void onSignalingChange(SignalingState newState);
+
+ /** Triggered when the IceConnectionState changes. */
+ public void onIceConnectionChange(IceConnectionState newState);
+
+ /** Triggered when the ICE connection receiving status changes. */
+ public void onIceConnectionReceivingChange(boolean receiving);
+
+ /** Triggered when the IceGatheringState changes. */
+ public void onIceGatheringChange(IceGatheringState newState);
+
+ /** Triggered when a new ICE candidate has been found. */
+ public void onIceCandidate(IceCandidate candidate);
+
+ /** Triggered when some ICE candidates have been removed. */
+ public void onIceCandidatesRemoved(IceCandidate[] candidates);
+
+ /** Triggered when media is received on a new stream from remote peer. */
+ public void onAddStream(MediaStream stream);
+
+ /** Triggered when a remote peer close a stream. */
+ public void onRemoveStream(MediaStream stream);
+
+ /** Triggered when a remote peer opens a DataChannel. */
+ public void onDataChannel(DataChannel dataChannel);
+
+ /** Triggered when renegotiation is necessary. */
+ public void onRenegotiationNeeded();
+ }
+
+ /** Java version of PeerConnectionInterface.IceServer. */
+ public static class IceServer {
+ public final String uri;
+ public final String username;
+ public final String password;
+
+ /** Convenience constructor for STUN servers. */
+ public IceServer(String uri) {
+ this(uri, "", "");
+ }
+
+ public IceServer(String uri, String username, String password) {
+ this.uri = uri;
+ this.username = username;
+ this.password = password;
+ }
+
+ public String toString() {
+ return uri + "[" + username + ":" + password + "]";
+ }
+ }
+
+ /** Java version of PeerConnectionInterface.IceTransportsType */
+ public enum IceTransportsType {
+ NONE, RELAY, NOHOST, ALL
+ };
+
+ /** Java version of PeerConnectionInterface.BundlePolicy */
+ public enum BundlePolicy {
+ BALANCED, MAXBUNDLE, MAXCOMPAT
+ };
+
+ /** Java version of PeerConnectionInterface.RtcpMuxPolicy */
+ public enum RtcpMuxPolicy {
+ NEGOTIATE, REQUIRE
+ };
+
+ /** Java version of PeerConnectionInterface.TcpCandidatePolicy */
+ public enum TcpCandidatePolicy {
+ ENABLED, DISABLED
+ };
+
+ /** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
+ public enum CandidateNetworkPolicy {
+ ALL, LOW_COST
+ };
+
+ /** Java version of rtc::KeyType */
+ public enum KeyType {
+ RSA, ECDSA
+ }
+
+ /** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
+ public enum ContinualGatheringPolicy {
+ GATHER_ONCE, GATHER_CONTINUALLY
+ }
+
+ /** Java version of PeerConnectionInterface.RTCConfiguration */
+ public static class RTCConfiguration {
+ public IceTransportsType iceTransportsType;
+ public List<IceServer> iceServers;
+ public BundlePolicy bundlePolicy;
+ public RtcpMuxPolicy rtcpMuxPolicy;
+ public TcpCandidatePolicy tcpCandidatePolicy;
+ public CandidateNetworkPolicy candidateNetworkPolicy;
+ public int audioJitterBufferMaxPackets;
+ public boolean audioJitterBufferFastAccelerate;
+ public int iceConnectionReceivingTimeout;
+ public int iceBackupCandidatePairPingInterval;
+ public KeyType keyType;
+ public ContinualGatheringPolicy continualGatheringPolicy;
+ public int iceCandidatePoolSize;
+
+ public RTCConfiguration(List<IceServer> iceServers) {
+ iceTransportsType = IceTransportsType.ALL;
+ bundlePolicy = BundlePolicy.BALANCED;
+ rtcpMuxPolicy = RtcpMuxPolicy.NEGOTIATE;
+ tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
+ candidateNetworkPolicy = candidateNetworkPolicy.ALL;
+ this.iceServers = iceServers;
+ audioJitterBufferMaxPackets = 50;
+ audioJitterBufferFastAccelerate = false;
+ iceConnectionReceivingTimeout = -1;
+ iceBackupCandidatePairPingInterval = -1;
+ keyType = KeyType.ECDSA;
+ continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
+ iceCandidatePoolSize = 0;
+ }
+ };
+
+ private final List<MediaStream> localStreams;
+ private final long nativePeerConnection;
+ private final long nativeObserver;
+ private List<RtpSender> senders;
+ private List<RtpReceiver> receivers;
+
+ PeerConnection(long nativePeerConnection, long nativeObserver) {
+ this.nativePeerConnection = nativePeerConnection;
+ this.nativeObserver = nativeObserver;
+ localStreams = new LinkedList<MediaStream>();
+ senders = new LinkedList<RtpSender>();
+ receivers = new LinkedList<RtpReceiver>();
+ }
+
+ // JsepInterface.
+ public native SessionDescription getLocalDescription();
+
+ public native SessionDescription getRemoteDescription();
+
+ public native DataChannel createDataChannel(
+ String label, DataChannel.Init init);
+
+ public native void createOffer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void createAnswer(
+ SdpObserver observer, MediaConstraints constraints);
+
+ public native void setLocalDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native void setRemoteDescription(
+ SdpObserver observer, SessionDescription sdp);
+
+ public native boolean setConfiguration(RTCConfiguration config);
+
+ public boolean addIceCandidate(IceCandidate candidate) {
+ return nativeAddIceCandidate(
+ candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
+ }
+
+ public boolean removeIceCandidates(final IceCandidate[] candidates) {
+ return nativeRemoveIceCandidates(candidates);
+ }
+
+ public boolean addStream(MediaStream stream) {
+ boolean ret = nativeAddLocalStream(stream.nativeStream);
+ if (!ret) {
+ return false;
+ }
+ localStreams.add(stream);
+ return true;
+ }
+
+ public void removeStream(MediaStream stream) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ localStreams.remove(stream);
+ }
+
+ public RtpSender createSender(String kind, String stream_id) {
+ RtpSender new_sender = nativeCreateSender(kind, stream_id);
+ if (new_sender != null) {
+ senders.add(new_sender);
+ }
+ return new_sender;
+ }
+
+ // Note that calling getSenders will dispose of the senders previously
+ // returned (and same goes for getReceivers).
+ public List<RtpSender> getSenders() {
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders = nativeGetSenders();
+ return Collections.unmodifiableList(senders);
+ }
+
+ public List<RtpReceiver> getReceivers() {
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers = nativeGetReceivers();
+ return Collections.unmodifiableList(receivers);
+ }
+
+ public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
+ return nativeGetStats(observer, (track == null) ? 0 : track.nativeTrack);
+ }
+
+ // TODO(fischman): add support for DTMF-related methods once that API
+ // stabilizes.
+ public native SignalingState signalingState();
+
+ public native IceConnectionState iceConnectionState();
+
+ public native IceGatheringState iceGatheringState();
+
+ public native void close();
+
+ public void dispose() {
+ close();
+ for (MediaStream stream : localStreams) {
+ nativeRemoveLocalStream(stream.nativeStream);
+ stream.dispose();
+ }
+ localStreams.clear();
+ for (RtpSender sender : senders) {
+ sender.dispose();
+ }
+ senders.clear();
+ for (RtpReceiver receiver : receivers) {
+ receiver.dispose();
+ }
+ receivers.clear();
+ freePeerConnection(nativePeerConnection);
+ freeObserver(nativeObserver);
+ }
+
+ private static native void freePeerConnection(long nativePeerConnection);
+
+ private static native void freeObserver(long nativeObserver);
+
+ private native boolean nativeAddIceCandidate(
+ String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
+
+ private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
+
+ private native boolean nativeAddLocalStream(long nativeStream);
+
+ private native void nativeRemoveLocalStream(long nativeStream);
+
+ private native boolean nativeGetStats(
+ StatsObserver observer, long nativeTrack);
+
+ private native RtpSender nativeCreateSender(String kind, String stream_id);
+
+ private native List<RtpSender> nativeGetSenders();
+
+ private native List<RtpReceiver> nativeGetReceivers();
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
new file mode 100644
index 0000000..0c1ef3c
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/PeerConnectionFactory.java
@@ -0,0 +1,293 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+package org.webrtc;
+
+import java.util.List;
+
+/**
+ * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
+ * the PeerConnection API for clients.
+ */
+public class PeerConnectionFactory {
+ static {
+ System.loadLibrary("jingle_peerconnection_so");
+ }
+
+ private static final String TAG = "PeerConnectionFactory";
+ private final long nativeFactory;
+ private static Thread networkThread;
+ private static Thread workerThread;
+ private static Thread signalingThread;
+ private EglBase localEglbase;
+ private EglBase remoteEglbase;
+
+ public static class Options {
+ // Keep in sync with webrtc/base/network.h!
+ static final int ADAPTER_TYPE_UNKNOWN = 0;
+ static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
+ static final int ADAPTER_TYPE_WIFI = 1 << 1;
+ static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
+ static final int ADAPTER_TYPE_VPN = 1 << 3;
+ static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
+
+ public int networkIgnoreMask;
+ public boolean disableEncryption;
+ public boolean disableNetworkMonitor;
+ }
+
+ // |context| is an android.content.Context object, but we keep it untyped here
+ // to allow building on non-Android platforms.
+ // Callers may specify either |initializeAudio| or |initializeVideo| as false
+ // to skip initializing the respective engine (and avoid the need for the
+ // respective permissions).
+ // |renderEGLContext| can be provided to suport HW video decoding to
+ // texture and will be used to create a shared EGL context on video
+ // decoding thread.
+ public static native boolean initializeAndroidGlobals(
+ Object context, boolean initializeAudio, boolean initializeVideo,
+ boolean videoHwAcceleration);
+
+ // Field trial initialization. Must be called before PeerConnectionFactory
+ // is created.
+ public static native void initializeFieldTrials(String fieldTrialsInitString);
+ // Internal tracing initialization. Must be called before PeerConnectionFactory is created to
+ // prevent racing with tracing code.
+ public static native void initializeInternalTracer();
+ // Internal tracing shutdown, called to prevent resource leaks. Must be called after
+ // PeerConnectionFactory is gone to prevent races with code performing tracing.
+ public static native void shutdownInternalTracer();
+ // Start/stop internal capturing of internal tracing.
+ public static native boolean startInternalTracingCapture(String tracing_filename);
+ public static native void stopInternalTracingCapture();
+
+ @Deprecated
+ public PeerConnectionFactory() {
+ this(null);
+ }
+
+ public PeerConnectionFactory(Options options) {
+ nativeFactory = nativeCreatePeerConnectionFactory(options);
+ if (nativeFactory == 0) {
+ throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
+ }
+ }
+
+ public PeerConnection createPeerConnection(
+ PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ long nativeObserver = nativeCreateObserver(observer);
+ if (nativeObserver == 0) {
+ return null;
+ }
+ long nativePeerConnection = nativeCreatePeerConnection(
+ nativeFactory, rtcConfig, constraints, nativeObserver);
+ if (nativePeerConnection == 0) {
+ return null;
+ }
+ return new PeerConnection(nativePeerConnection, nativeObserver);
+ }
+
+ public PeerConnection createPeerConnection(
+ List<PeerConnection.IceServer> iceServers,
+ MediaConstraints constraints,
+ PeerConnection.Observer observer) {
+ PeerConnection.RTCConfiguration rtcConfig =
+ new PeerConnection.RTCConfiguration(iceServers);
+ return createPeerConnection(rtcConfig, constraints, observer);
+ }
+
+ public MediaStream createLocalMediaStream(String label) {
+ return new MediaStream(
+ nativeCreateLocalMediaStream(nativeFactory, label));
+ }
+
+ // The VideoSource takes ownership of |capturer|, so capturer.release() should not be called
+ // manually after this.
+ public VideoSource createVideoSource(
+ VideoCapturer capturer, MediaConstraints constraints) {
+ final EglBase.Context eglContext =
+ localEglbase == null ? null : localEglbase.getEglBaseContext();
+ return new VideoSource(nativeCreateVideoSource(nativeFactory,
+ eglContext, capturer, constraints));
+ }
+
+ public VideoTrack createVideoTrack(String id, VideoSource source) {
+ return new VideoTrack(nativeCreateVideoTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ public AudioSource createAudioSource(MediaConstraints constraints) {
+ return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
+ }
+
+ public AudioTrack createAudioTrack(String id, AudioSource source) {
+ return new AudioTrack(nativeCreateAudioTrack(
+ nativeFactory, id, source.nativeSource));
+ }
+
+ // Starts recording an AEC dump. Ownership of the file is transfered to the
+ // native code. If an AEC dump is already in progress, it will be stopped and
+ // a new one will start using the provided file.
+ public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
+ return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
+ }
+
+ // Stops recording an AEC dump. If no AEC dump is currently being recorded,
+ // this call will have no effect.
+ public void stopAecDump() {
+ nativeStopAecDump(nativeFactory);
+ }
+
+ // Starts recording an RTC event log. Ownership of the file is transfered to
+ // the native code. If an RTC event log is already being recorded, it will be
+ // stopped and a new one will start using the provided file.
+ public boolean startRtcEventLog(int file_descriptor) {
+ return startRtcEventLog(file_descriptor, -1);
+ }
+
+ // Same as above, but allows setting an upper limit to the size of the
+ // generated logfile.
+ public boolean startRtcEventLog(int file_descriptor,
+ int filesize_limit_bytes) {
+ return nativeStartRtcEventLog(nativeFactory,
+ file_descriptor,
+ filesize_limit_bytes);
+ }
+
+ // Stops recording an RTC event log. If no RTC event log is currently being
+ // recorded, this call will have no effect.
+ public void stopRtcEventLog() {
+ nativeStopRtcEventLog(nativeFactory);
+ }
+
+ @Deprecated
+ public void setOptions(Options options) {
+ nativeSetOptions(nativeFactory, options);
+ }
+
+ /** Set the EGL context used by HW Video encoding and decoding.
+ *
+ * @param localEglContext Must be the same as used by VideoCapturerAndroid and any local video
+ * renderer.
+ * @param remoteEglContext Must be the same as used by any remote video renderer.
+ */
+ public void setVideoHwAccelerationOptions(EglBase.Context localEglContext,
+ EglBase.Context remoteEglContext) {
+ if (localEglbase != null) {
+ Logging.w(TAG, "Egl context already set.");
+ localEglbase.release();
+ }
+ if (remoteEglbase != null) {
+ Logging.w(TAG, "Egl context already set.");
+ remoteEglbase.release();
+ }
+ localEglbase = EglBase.create(localEglContext);
+ remoteEglbase = EglBase.create(remoteEglContext);
+ nativeSetVideoHwAccelerationOptions(nativeFactory, localEglbase.getEglBaseContext(),
+ remoteEglbase.getEglBaseContext());
+ }
+
+ public void dispose() {
+ nativeFreeFactory(nativeFactory);
+ networkThread = null;
+ workerThread = null;
+ signalingThread = null;
+ if (localEglbase != null)
+ localEglbase.release();
+ if (remoteEglbase != null)
+ remoteEglbase.release();
+ }
+
+ public void threadsCallbacks() {
+ nativeThreadsCallbacks(nativeFactory);
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static void printStackTraces() {
+ printStackTrace(networkThread, "Network thread");
+ printStackTrace(workerThread, "Worker thread");
+ printStackTrace(signalingThread, "Signaling thread");
+ }
+
+ private static void onNetworkThreadReady() {
+ networkThread = Thread.currentThread();
+ Logging.d(TAG, "onNetworkThreadReady");
+ }
+
+ private static void onWorkerThreadReady() {
+ workerThread = Thread.currentThread();
+ Logging.d(TAG, "onWorkerThreadReady");
+ }
+
+ private static void onSignalingThreadReady() {
+ signalingThread = Thread.currentThread();
+ Logging.d(TAG, "onSignalingThreadReady");
+ }
+
+ private static native long nativeCreatePeerConnectionFactory(Options options);
+
+ private static native long nativeCreateObserver(
+ PeerConnection.Observer observer);
+
+ private static native long nativeCreatePeerConnection(
+ long nativeFactory, PeerConnection.RTCConfiguration rtcConfig,
+ MediaConstraints constraints, long nativeObserver);
+
+ private static native long nativeCreateLocalMediaStream(
+ long nativeFactory, String label);
+
+ private static native long nativeCreateVideoSource(
+ long nativeFactory, EglBase.Context eglContext, VideoCapturer videoCapturer,
+ MediaConstraints constraints);
+
+ private static native long nativeCreateVideoTrack(
+ long nativeFactory, String id, long nativeVideoSource);
+
+ private static native long nativeCreateAudioSource(
+ long nativeFactory, MediaConstraints constraints);
+
+ private static native long nativeCreateAudioTrack(
+ long nativeFactory, String id, long nativeSource);
+
+ private static native boolean nativeStartAecDump(
+ long nativeFactory, int file_descriptor, int filesize_limit_bytes);
+
+ private static native void nativeStopAecDump(long nativeFactory);
+
+ private static native boolean nativeStartRtcEventLog(long nativeFactory,
+ int file_descriptor,
+ int filesize_limit_bytes);
+
+ private static native void nativeStopRtcEventLog(long nativeFactory);
+
+ @Deprecated
+ public native void nativeSetOptions(long nativeFactory, Options options);
+
+ private static native void nativeSetVideoHwAccelerationOptions(
+ long nativeFactory, Object localEGLContext, Object remoteEGLContext);
+
+ private static native void nativeThreadsCallbacks(long nativeFactory);
+
+ private static native void nativeFreeFactory(long nativeFactory);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/RendererCommon.java b/webrtc/api/android/java/src/org/webrtc/RendererCommon.java
new file mode 100644
index 0000000..55547eb
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/RendererCommon.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Static helper functions for renderer implementations.
+ */
+public class RendererCommon {
+ /** Interface for reporting rendering events. */
+ public static interface RendererEvents {
+ /**
+ * Callback fired once first frame is rendered.
+ */
+ public void onFirstFrameRendered();
+
+ /**
+ * Callback fired when rendered frame resolution or rotation has changed.
+ */
+ public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
+ }
+
+ /** Interface for rendering frames on an EGLSurface. */
+ public static interface GlDrawer {
+ /**
+ * Functions for drawing frames with different sources. The rendering surface target is
+ * implied by the current EGL context of the calling thread and requires no explicit argument.
+ * The coordinates specify the viewport location on the surface target.
+ */
+ void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+ void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+ void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
+ int viewportX, int viewportY, int viewportWidth, int viewportHeight);
+
+ /**
+ * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+ */
+ void release();
+ }
+
+ /**
+ * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+ * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+ */
+ public static class YuvUploader {
+ // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+ // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+ // that handles stride and compare performance with intermediate copy.
+ private ByteBuffer copyBuffer;
+
+ /**
+ * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+ * |outputYuvTextures| must have been generated in advance.
+ */
+ public void uploadYuvData(
+ int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+ final int[] planeWidths = new int[] {width, width / 2, width / 2};
+ final int[] planeHeights = new int[] {height, height / 2, height / 2};
+ // Make a first pass to see if we need a temporary copy buffer.
+ int copyCapacityNeeded = 0;
+ for (int i = 0; i < 3; ++i) {
+ if (strides[i] > planeWidths[i]) {
+ copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+ }
+ }
+ // Allocate copy buffer if necessary.
+ if (copyCapacityNeeded > 0
+ && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+ copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+ }
+ // Upload each plane.
+ for (int i = 0; i < 3; ++i) {
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+ // GLES only accepts packed data, i.e. stride == planeWidth.
+ final ByteBuffer packedByteBuffer;
+ if (strides[i] == planeWidths[i]) {
+ // Input is packed already.
+ packedByteBuffer = planes[i];
+ } else {
+ VideoRenderer.nativeCopyPlane(
+ planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+ packedByteBuffer = copyBuffer;
+ }
+ GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+ planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+ }
+ }
+ }
+
+ // Types of video scaling:
+ // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
+ // maintaining the aspect ratio (black borders may be displayed).
+ // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
+ // maintaining the aspect ratio. Some portion of the video frame may be
+ // clipped.
+ // SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
+ // possible of the view while maintaining aspect ratio, under the constraint that at least
+ // |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
+ public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
+ // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
+ // This limits excessive cropping when adjusting display size.
+ private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+ public static final float[] identityMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 0, 0, 1};
+ }
+ // Matrix with transform y' = 1 - y.
+ public static final float[] verticalFlipMatrix() {
+ return new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
+ }
+
+ // Matrix with transform x' = 1 - x.
+ public static final float[] horizontalFlipMatrix() {
+ return new float[] {
+ -1, 0, 0, 0,
+ 0, 1, 0, 0,
+ 0, 0, 1, 0,
+ 1, 0, 0, 1};
+ }
+
+ /**
+ * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
+ * clockwise when rendered.
+ */
+ public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
+ final float[] rotationMatrix = new float[16];
+ Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
+ adjustOrigin(rotationMatrix);
+ return multiplyMatrices(textureMatrix, rotationMatrix);
+ }
+
+ /**
+ * Returns new matrix with the result of a * b.
+ */
+ public static float[] multiplyMatrices(float[] a, float[] b) {
+ final float[] resultMatrix = new float[16];
+ Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
+ return resultMatrix;
+ }
+
+ /**
+ * Returns layout transformation matrix that applies an optional mirror effect and compensates
+ * for video vs display aspect ratio.
+ */
+ public static float[] getLayoutMatrix(
+ boolean mirror, float videoAspectRatio, float displayAspectRatio) {
+ float scaleX = 1;
+ float scaleY = 1;
+ // Scale X or Y dimension so that video and display size have same aspect ratio.
+ if (displayAspectRatio > videoAspectRatio) {
+ scaleY = videoAspectRatio / displayAspectRatio;
+ } else {
+ scaleX = displayAspectRatio / videoAspectRatio;
+ }
+ // Apply optional horizontal flip.
+ if (mirror) {
+ scaleX *= -1;
+ }
+ final float matrix[] = new float[16];
+ Matrix.setIdentityM(matrix, 0);
+ Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
+ adjustOrigin(matrix);
+ return matrix;
+ }
+
+ /**
+ * Calculate display size based on scaling type, video aspect ratio, and maximum display size.
+ */
+ public static Point getDisplaySize(ScalingType scalingType, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
+ maxDisplayWidth, maxDisplayHeight);
+ }
+
+ /**
+ * Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
+ * that are in the range 0 to 1.
+ */
+ private static void adjustOrigin(float[] matrix) {
+ // Note that OpenGL is using column-major order.
+ // Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
+ matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
+ matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
+ // Post translate with 0.5 to move coordinates to range [0, 1].
+ matrix[12] += 0.5f;
+ matrix[13] += 0.5f;
+ }
+
+ /**
+ * Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
+ * that must remain visible.
+ */
+ private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
+ switch (scalingType) {
+ case SCALE_ASPECT_FIT:
+ return 1.0f;
+ case SCALE_ASPECT_FILL:
+ return 0.0f;
+ case SCALE_ASPECT_BALANCED:
+ return BALANCED_VISIBLE_FRACTION;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Calculate display size based on minimum fraction of the video that must remain visible,
+ * video aspect ratio, and maximum display size.
+ */
+ private static Point getDisplaySize(float minVisibleFraction, float videoAspectRatio,
+ int maxDisplayWidth, int maxDisplayHeight) {
+ // If there is no constraint on the amount of cropping, fill the allowed display area.
+ if (minVisibleFraction == 0 || videoAspectRatio == 0) {
+ return new Point(maxDisplayWidth, maxDisplayHeight);
+ }
+ // Each dimension is constrained on max display size and how much we are allowed to crop.
+ final int width = Math.min(maxDisplayWidth,
+ Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
+ final int height = Math.min(maxDisplayHeight,
+ Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
+ return new Point(width, height);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/RtpParameters.java b/webrtc/api/android/java/src/org/webrtc/RtpParameters.java
new file mode 100644
index 0000000..4aa10fb
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/RtpParameters.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.List;
+import java.util.LinkedList;
+
+/**
+ * The parameters for an {@code RtpSender}, as defined in
+ * http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface.
+ */
+public class RtpParameters {
+ public static class Encoding {
+ public boolean active = true;
+ // A null value means "no maximum bitrate".
+ public Integer maxBitrateBps;
+ }
+
+ public static class Codec {
+ int payloadType;
+ String mimeType;
+ int clockRate;
+ int channels = 1;
+ }
+
+ public final LinkedList<Encoding> encodings;
+ public final LinkedList<Codec> codecs;
+
+ public RtpParameters() {
+ encodings = new LinkedList<Encoding>();
+ codecs = new LinkedList<Codec>();
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java b/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java
new file mode 100644
index 0000000..1c4eef3
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/RtpReceiver.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpReceiverInterface. */
+public class RtpReceiver {
+ final long nativeRtpReceiver;
+
+ private MediaStreamTrack cachedTrack;
+
+ public RtpReceiver(long nativeRtpReceiver) {
+ this.nativeRtpReceiver = nativeRtpReceiver;
+ long track = nativeGetTrack(nativeRtpReceiver);
+ // We can assume that an RtpReceiver always has an associated track.
+ cachedTrack = new MediaStreamTrack(track);
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public boolean setParameters(RtpParameters parameters) {
+ return nativeSetParameters(nativeRtpReceiver, parameters);
+ }
+
+ public RtpParameters getParameters() {
+ return nativeGetParameters(nativeRtpReceiver);
+ }
+
+ public String id() {
+ return nativeId(nativeRtpReceiver);
+ }
+
+ public void dispose() {
+ cachedTrack.dispose();
+ free(nativeRtpReceiver);
+ }
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose().
+ private static native long nativeGetTrack(long nativeRtpReceiver);
+
+ private static native boolean nativeSetParameters(long nativeRtpReceiver,
+ RtpParameters parameters);
+
+ private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
+
+ private static native String nativeId(long nativeRtpReceiver);
+
+ private static native void free(long nativeRtpReceiver);
+};
diff --git a/webrtc/api/android/java/src/org/webrtc/RtpSender.java b/webrtc/api/android/java/src/org/webrtc/RtpSender.java
new file mode 100644
index 0000000..2c094ac
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/RtpSender.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java wrapper for a C++ RtpSenderInterface. */
+public class RtpSender {
+ final long nativeRtpSender;
+
+ private MediaStreamTrack cachedTrack;
+ private boolean ownsTrack = true;
+
+ public RtpSender(long nativeRtpSender) {
+ this.nativeRtpSender = nativeRtpSender;
+ long track = nativeGetTrack(nativeRtpSender);
+ // It may be possible for an RtpSender to be created without a track.
+ cachedTrack = (track == 0) ? null : new MediaStreamTrack(track);
+ }
+
+ // If |takeOwnership| is true, the RtpSender takes ownership of the track
+ // from the caller, and will auto-dispose of it when no longer needed.
+ // |takeOwnership| should only be used if the caller owns the track; it is
+ // not appropriate when the track is owned by, for example, another RtpSender
+ // or a MediaStream.
+ public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+ if (!nativeSetTrack(nativeRtpSender,
+ (track == null) ? 0 : track.nativeTrack)) {
+ return false;
+ }
+ if (cachedTrack != null && ownsTrack) {
+ cachedTrack.dispose();
+ }
+ cachedTrack = track;
+ ownsTrack = takeOwnership;
+ return true;
+ }
+
+ public MediaStreamTrack track() {
+ return cachedTrack;
+ }
+
+ public boolean setParameters(RtpParameters parameters) {
+ return nativeSetParameters(nativeRtpSender, parameters);
+ }
+
+ public RtpParameters getParameters() {
+ return nativeGetParameters(nativeRtpSender);
+ }
+
+ public String id() {
+ return nativeId(nativeRtpSender);
+ }
+
+ public void dispose() {
+ if (cachedTrack != null && ownsTrack) {
+ cachedTrack.dispose();
+ }
+ free(nativeRtpSender);
+ }
+
+ private static native boolean nativeSetTrack(long nativeRtpSender,
+ long nativeTrack);
+
+ // This should increment the reference count of the track.
+ // Will be released in dispose() or setTrack().
+ private static native long nativeGetTrack(long nativeRtpSender);
+
+ private static native boolean nativeSetParameters(long nativeRtpSender,
+ RtpParameters parameters);
+
+ private static native RtpParameters nativeGetParameters(long nativeRtpSender);
+
+ private static native String nativeId(long nativeRtpSender);
+
+ private static native void free(long nativeRtpSender);
+}
+;
diff --git a/webrtc/api/android/java/src/org/webrtc/SdpObserver.java b/webrtc/api/android/java/src/org/webrtc/SdpObserver.java
new file mode 100644
index 0000000..a9c03f7
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/SdpObserver.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing SDP-related events. */
+public interface SdpObserver {
+ /** Called on success of Create{Offer,Answer}(). */
+ public void onCreateSuccess(SessionDescription sdp);
+
+ /** Called on success of Set{Local,Remote}Description(). */
+ public void onSetSuccess();
+
+ /** Called on error of Create{Offer,Answer}(). */
+ public void onCreateFailure(String error);
+
+ /** Called on error of Set{Local,Remote}Description(). */
+ public void onSetFailure(String error);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/SessionDescription.java b/webrtc/api/android/java/src/org/webrtc/SessionDescription.java
new file mode 100644
index 0000000..f6dd361
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/SessionDescription.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Description of an RFC 4566 Session.
+ * SDPs are passed as serialized Strings in Java-land and are materialized
+ * to SessionDescriptionInterface as appropriate in the JNI layer.
+ */
+public class SessionDescription {
+ /** Java-land enum version of SessionDescriptionInterface's type() string. */
+ public static enum Type {
+ OFFER, PRANSWER, ANSWER;
+
+ public String canonicalForm() {
+ return name().toLowerCase();
+ }
+
+ public static Type fromCanonicalForm(String canonical) {
+ return Type.valueOf(Type.class, canonical.toUpperCase());
+ }
+ }
+
+ public final Type type;
+ public final String description;
+
+ public SessionDescription(Type type, String description) {
+ this.type = type;
+ this.description = description;
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/StatsObserver.java b/webrtc/api/android/java/src/org/webrtc/StatsObserver.java
new file mode 100644
index 0000000..b1ad0de
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/StatsObserver.java
@@ -0,0 +1,17 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Interface for observing Stats reports (see webrtc::StatsObservers). */
+public interface StatsObserver {
+ /** Called when the reports are ready.*/
+ public void onComplete(StatsReport[] reports);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/StatsReport.java b/webrtc/api/android/java/src/org/webrtc/StatsReport.java
new file mode 100644
index 0000000..0082b6e
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/StatsReport.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+/** Java version of webrtc::StatsReport. */
+public class StatsReport {
+
+ /** Java version of webrtc::StatsReport::Value. */
+ public static class Value {
+ public final String name;
+ public final String value;
+
+ public Value(String name, String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("[").append(name).append(": ").append(value).append("]");
+ return builder.toString();
+ }
+ }
+
+ public final String id;
+ public final String type;
+ // Time since 1970-01-01T00:00:00Z in milliseconds.
+ public final double timestamp;
+ public final Value[] values;
+
+ public StatsReport(String id, String type, double timestamp, Value[] values) {
+ this.id = id;
+ this.type = type;
+ this.timestamp = timestamp;
+ this.values = values;
+ }
+
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("id: ").append(id).append(", type: ").append(type)
+ .append(", timestamp: ").append(timestamp).append(", values: ");
+ for (int i = 0; i < values.length; ++i) {
+ builder.append(values[i].toString()).append(", ");
+ }
+ return builder.toString();
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java
new file mode 100644
index 0000000..5677209
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/SurfaceTextureHelper.java
@@ -0,0 +1,499 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+
+import java.nio.ByteBuffer;
+import java.nio.FloatBuffer;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
+ * of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
+ * the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
+ * called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
+ * dispose to release all resources once the texture frame is returned.
+ * Note that there is a C++ counter part of this class that optionally can be used. It is used for
+ * wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
+ * when the webrtc::VideoFrame is no longer used.
+ */
+class SurfaceTextureHelper {
+ private static final String TAG = "SurfaceTextureHelper";
+ /**
+ * Callback interface for being notified that a new texture frame is available. The calls will be
+ * made on a dedicated thread with a bound EGLContext. The thread will be the same throughout the
+ * lifetime of the SurfaceTextureHelper instance, but different from the thread calling the
+ * SurfaceTextureHelper constructor. The callee is not allowed to make another EGLContext current
+ * on the calling thread.
+ */
+ public interface OnTextureFrameAvailableListener {
+ abstract void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs);
+ }
+
+ /**
+ * Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
+ * thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
+ * initialize a pixel buffer surface and make it current.
+ */
+ public static SurfaceTextureHelper create(
+ final String threadName, final EglBase.Context sharedContext) {
+ final HandlerThread thread = new HandlerThread(threadName);
+ thread.start();
+ final Handler handler = new Handler(thread.getLooper());
+
+ // The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
+ // http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
+ // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
+ // is constructed on the |handler| thread.
+ return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
+ @Override
+ public SurfaceTextureHelper call() {
+ try {
+ return new SurfaceTextureHelper(sharedContext, handler);
+ } catch (RuntimeException e) {
+ Logging.e(TAG, threadName + " create failure", e);
+ return null;
+ }
+ }
+ });
+ }
+
+ // State for YUV conversion, instantiated on demand.
+ static private class YuvConverter {
+ private final EglBase eglBase;
+ private final GlShader shader;
+ private boolean released = false;
+
+ // Vertex coordinates in Normalized Device Coordinates, i.e.
+ // (-1, -1) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer DEVICE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ -1.0f, -1.0f, // Bottom left.
+ 1.0f, -1.0f, // Bottom right.
+ -1.0f, 1.0f, // Top left.
+ 1.0f, 1.0f, // Top right.
+ });
+
+ // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
+ private static final FloatBuffer TEXTURE_RECTANGLE =
+ GlUtil.createFloatBuffer(new float[] {
+ 0.0f, 0.0f, // Bottom left.
+ 1.0f, 0.0f, // Bottom right.
+ 0.0f, 1.0f, // Top left.
+ 1.0f, 1.0f // Top right.
+ });
+
+ private static final String VERTEX_SHADER =
+ "varying vec2 interp_tc;\n"
+ + "attribute vec4 in_pos;\n"
+ + "attribute vec4 in_tc;\n"
+ + "\n"
+ + "uniform mat4 texMatrix;\n"
+ + "\n"
+ + "void main() {\n"
+ + " gl_Position = in_pos;\n"
+ + " interp_tc = (texMatrix * in_tc).xy;\n"
+ + "}\n";
+
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n"
+ + "precision mediump float;\n"
+ + "varying vec2 interp_tc;\n"
+ + "\n"
+ + "uniform samplerExternalOES oesTex;\n"
+ // Difference in texture coordinate corresponding to one
+ // sub-pixel in the x direction.
+ + "uniform vec2 xUnit;\n"
+ // Color conversion coefficients, including constant term
+ + "uniform vec4 coeffs;\n"
+ + "\n"
+ + "void main() {\n"
+ // Since the alpha read from the texture is always 1, this could
+ // be written as a mat4 x vec4 multiply. However, that seems to
+ // give a worse framerate, possibly because the additional
+ // multiplies by 1.0 consume resources. TODO(nisse): Could also
+ // try to do it as a vec3 x mat3x4, followed by an add in of a
+ // constant vector.
+ + " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ + " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ + " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ + " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ + "}\n";
+
+ private int texMatrixLoc;
+ private int xUnitLoc;
+ private int coeffsLoc;;
+
+ YuvConverter (EglBase.Context sharedContext) {
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_RGBA_BUFFER);
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+
+ shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
+ shader.useProgram();
+ texMatrixLoc = shader.getUniformLocation("texMatrix");
+ xUnitLoc = shader.getUniformLocation("xUnit");
+ coeffsLoc = shader.getUniformLocation("coeffs");
+ GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
+ GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
+ // Initialize vertex shader attributes.
+ shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
+ // If the width is not a multiple of 4 pixels, the texture
+ // will be scaled up slightly and clipped at the right border.
+ shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void convert(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (released) {
+ throw new IllegalStateException(
+ "YuvConverter.convert called on released object");
+ }
+
+ // We draw into a buffer laid out like
+ //
+ // +---------+
+ // | |
+ // | Y |
+ // | |
+ // | |
+ // +----+----+
+ // | U | V |
+ // | | |
+ // +----+----+
+ //
+ // In memory, we use the same stride for all of Y, U and V. The
+ // U data starts at offset |height| * |stride| from the Y data,
+ // and the V data starts at at offset |stride/2| from the U
+ // data, with rows of U and V data alternating.
+ //
+ // Now, it would have made sense to allocate a pixel buffer with
+ // a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
+ // EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
+ // unsupported by devices. So do the following hack: Allocate an
+ // RGBA buffer, of width |stride|/4. To render each of these
+ // large pixels, sample the texture at 4 different x coordinates
+ // and store the results in the four components.
+ //
+ // Since the V data needs to start on a boundary of such a
+ // larger pixel, it is not sufficient that |stride| is even, it
+ // has to be a multiple of 8 pixels.
+
+ if (stride % 8 != 0) {
+ throw new IllegalArgumentException(
+ "Invalid stride, must be a multiple of 8");
+ }
+ if (stride < width){
+ throw new IllegalArgumentException(
+ "Invalid stride, must >= width");
+ }
+
+ int y_width = (width+3) / 4;
+ int uv_width = (width+7) / 8;
+ int uv_height = (height+1)/2;
+ int total_height = height + uv_height;
+ int size = stride * total_height;
+
+ if (buf.capacity() < size) {
+ throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
+ }
+ // Produce a frame buffer starting at top-left corner, not
+ // bottom-left.
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix,
+ RendererCommon.verticalFlipMatrix());
+
+ // Create new pBuffferSurface with the correct size if needed.
+ if (eglBase.hasSurface()) {
+ if (eglBase.surfaceWidth() != stride/4 ||
+ eglBase.surfaceHeight() != total_height){
+ eglBase.releaseSurface();
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+ } else {
+ eglBase.createPbufferSurface(stride/4, total_height);
+ }
+
+ eglBase.makeCurrent();
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
+ GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
+
+ // Draw Y
+ GLES20.glViewport(0, 0, y_width, height);
+ // Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ transformMatrix[0] / width,
+ transformMatrix[1] / width);
+ // Y'UV444 to RGB888, see
+ // https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
+ // We use the ITU-R coefficients for U and V */
+ GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw U
+ GLES20.glViewport(0, height, uv_width, uv_height);
+ // Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
+ GLES20.glUniform2f(xUnitLoc,
+ 2.0f * transformMatrix[0] / width,
+ 2.0f * transformMatrix[1] / width);
+ GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ // Draw V
+ GLES20.glViewport(stride/8, height, uv_width, uv_height);
+ GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+
+ GLES20.glReadPixels(0, 0, stride/4, total_height, GLES20.GL_RGBA,
+ GLES20.GL_UNSIGNED_BYTE, buf);
+
+ GlUtil.checkNoGLES2Error("YuvConverter.convert");
+
+ // Unbind texture. Reportedly needed on some devices to get
+ // the texture updated from the camera.
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
+ eglBase.detachCurrent();
+ }
+
+ synchronized void release() {
+ released = true;
+ eglBase.makeCurrent();
+ shader.release();
+ eglBase.release();
+ }
+ }
+
+ private final Handler handler;
+ private final EglBase eglBase;
+ private final SurfaceTexture surfaceTexture;
+ private final int oesTextureId;
+ private YuvConverter yuvConverter;
+
+ // These variables are only accessed from the |handler| thread.
+ private OnTextureFrameAvailableListener listener;
+ // The possible states of this class.
+ private boolean hasPendingTexture = false;
+ private volatile boolean isTextureInUse = false;
+ private boolean isQuitting = false;
+ // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
+ // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
+ private OnTextureFrameAvailableListener pendingListener;
+ final Runnable setListenerRunnable = new Runnable() {
+ @Override
+ public void run() {
+ Logging.d(TAG, "Setting listener to " + pendingListener);
+ listener = pendingListener;
+ pendingListener = null;
+ // May have a pending frame from the previous capture session - drop it.
+ if (hasPendingTexture) {
+ // Calling updateTexImage() is neccessary in order to receive new frames.
+ updateTexImage();
+ hasPendingTexture = false;
+ }
+ }
+ };
+
+ private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
+ }
+ this.handler = handler;
+
+ eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
+ try {
+ // Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
+ eglBase.createDummyPbufferSurface();
+ eglBase.makeCurrent();
+ } catch (RuntimeException e) {
+ // Clean up before rethrowing the exception.
+ eglBase.release();
+ handler.getLooper().quit();
+ throw e;
+ }
+
+ oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+ surfaceTexture = new SurfaceTexture(oesTextureId);
+ surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ hasPendingTexture = true;
+ tryDeliverTextureFrame();
+ }
+ });
+ }
+
+ private YuvConverter getYuvConverter() {
+ // yuvConverter is assigned once
+ if (yuvConverter != null)
+ return yuvConverter;
+
+ synchronized(this) {
+ if (yuvConverter == null)
+ yuvConverter = new YuvConverter(eglBase.getEglBaseContext());
+ return yuvConverter;
+ }
+ }
+
+ /**
+ * Start to stream textures to the given |listener|. If you need to change listener, you need to
+ * call stopListening() first.
+ */
+ public void startListening(final OnTextureFrameAvailableListener listener) {
+ if (this.listener != null || this.pendingListener != null) {
+ throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
+ }
+ this.pendingListener = listener;
+ handler.post(setListenerRunnable);
+ }
+
+ /**
+ * Stop listening. The listener set in startListening() is guaranteded to not receive any more
+ * onTextureFrameAvailable() callbacks after this function returns.
+ */
+ public void stopListening() {
+ Logging.d(TAG, "stopListening()");
+ handler.removeCallbacks(setListenerRunnable);
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
+ @Override
+ public void run() {
+ listener = null;
+ pendingListener = null;
+ }
+ });
+ }
+
+ /**
+ * Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
+ * producer such as a camera or decoder.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return surfaceTexture;
+ }
+
+ /**
+ * Retrieve the handler that calls onTextureFrameAvailable(). This handler is valid until
+ * dispose() is called.
+ */
+ public Handler getHandler() {
+ return handler;
+ }
+
+ /**
+ * Call this function to signal that you are done with the frame received in
+ * onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
+ * this function in order to receive a new frame.
+ */
+ public void returnTextureFrame() {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ isTextureInUse = false;
+ if (isQuitting) {
+ release();
+ } else {
+ tryDeliverTextureFrame();
+ }
+ }
+ });
+ }
+
+ public boolean isTextureInUse() {
+ return isTextureInUse;
+ }
+
+ /**
+ * Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
+ * stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
+ * guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
+ */
+ public void dispose() {
+ Logging.d(TAG, "dispose()");
+ ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
+ @Override
+ public void run() {
+ isQuitting = true;
+ if (!isTextureInUse) {
+ release();
+ }
+ }
+ });
+ }
+
+ public void textureToYUV(ByteBuffer buf,
+ int width, int height, int stride, int textureId, float [] transformMatrix) {
+ if (textureId != oesTextureId)
+ throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
+
+ getYuvConverter().convert(buf, width, height, stride, textureId, transformMatrix);
+ }
+
+ private void updateTexImage() {
+ // SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
+ // as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
+ // See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
+ synchronized (EglBase.lock) {
+ surfaceTexture.updateTexImage();
+ }
+ }
+
+ private void tryDeliverTextureFrame() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
+ return;
+ }
+ isTextureInUse = true;
+ hasPendingTexture = false;
+
+ updateTexImage();
+
+ final float[] transformMatrix = new float[16];
+ surfaceTexture.getTransformMatrix(transformMatrix);
+ final long timestampNs = (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH)
+ ? surfaceTexture.getTimestamp()
+ : TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+ listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
+ }
+
+ private void release() {
+ if (handler.getLooper().getThread() != Thread.currentThread()) {
+ throw new IllegalStateException("Wrong thread.");
+ }
+ if (isTextureInUse || !isQuitting) {
+ throw new IllegalStateException("Unexpected release.");
+ }
+ synchronized (this) {
+ if (yuvConverter != null)
+ yuvConverter.release();
+ }
+ GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+ surfaceTexture.release();
+ eglBase.release();
+ handler.getLooper().quit();
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java b/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java
new file mode 100644
index 0000000..c37d247
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/SurfaceViewRenderer.java
@@ -0,0 +1,565 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import android.content.res.Resources.NotFoundException;
+import android.graphics.Point;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.AttributeSet;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+import org.webrtc.Logging;
+
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLContext;
+
+/**
+ * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
+ * renderFrame() is asynchronous to avoid blocking the calling thread.
+ * This class is thread safe and handles access from potentially four different threads:
+ * Interaction from the main app in init, release, setMirror, and setScalingtype.
+ * Interaction from C++ rtc::VideoSinkInterface in renderFrame.
+ * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
+ * Interaction with the layout framework in onMeasure and onSizeChanged.
+ */
+public class SurfaceViewRenderer extends SurfaceView
+ implements SurfaceHolder.Callback, VideoRenderer.Callbacks {
+ private static final String TAG = "SurfaceViewRenderer";
+
+ // Dedicated render thread.
+ private HandlerThread renderThread;
+ // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
+ // on |handlerLock|.
+ private final Object handlerLock = new Object();
+ private Handler renderThreadHandler;
+
+ // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
+ // from the render thread.
+ private EglBase eglBase;
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private RendererCommon.GlDrawer drawer;
+ // Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
+ private int[] yuvTextures = null;
+
+ // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
+ private final Object frameLock = new Object();
+ private VideoRenderer.I420Frame pendingFrame;
+
+ // These variables are synchronized on |layoutLock|.
+ private final Object layoutLock = new Object();
+ // These dimension values are used to keep track of the state in these functions: onMeasure(),
+ // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens
+ // internally when the incoming frame size changes. requestLayout() can also be triggered
+ // externally. The layout change is a two pass process: first onMeasure() is called in a top-down
+ // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the
+ // onLayout() pass, each parent is responsible for positioning its children using the sizes
+ // computed in the measure pass.
+ // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to
+ // take effect.
+ private Point desiredLayoutSize = new Point();
+ // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in
+ // onLayout() and surfaceChanged() respectively.
+ private final Point layoutSize = new Point();
+ // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple
+ // layout and surface size.
+ private final Point surfaceSize = new Point();
+ // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed().
+ private boolean isSurfaceCreated;
+ // Last rendered frame dimensions, or 0 if no frame has been rendered yet.
+ private int frameWidth;
+ private int frameHeight;
+ private int frameRotation;
+ // |scalingType| determines how the video will fill the allowed layout area in onMeasure().
+ private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED;
+ // If true, mirrors the video stream horizontally.
+ private boolean mirror;
+ // Callback for reporting renderer events.
+ private RendererCommon.RendererEvents rendererEvents;
+
+ // These variables are synchronized on |statisticsLock|.
+ private final Object statisticsLock = new Object();
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous frame has not been rendered
+ // yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long firstFrameTimeNs;
+ // Time in ns spent in renderFrameOnRenderThread() function.
+ private long renderTimeNs;
+
+ // Runnable for posting frames to render thread.
+ private final Runnable renderFrameRunnable = new Runnable() {
+ @Override public void run() {
+ renderFrameOnRenderThread();
+ }
+ };
+ // Runnable for clearing Surface to black.
+ private final Runnable makeBlackRunnable = new Runnable() {
+ @Override public void run() {
+ makeBlack();
+ }
+ };
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context) {
+ super(context);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Standard View constructor. In order to render something, you must first call init().
+ */
+ public SurfaceViewRenderer(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ getHolder().addCallback(this);
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
+ * reinitialize the renderer after a previous init()/release() cycle.
+ */
+ public void init(
+ EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+ init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+ }
+
+ /**
+ * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+ * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+ * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+ * init()/release() cycle.
+ */
+ public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+ int[] configAttributes, RendererCommon.GlDrawer drawer) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ throw new IllegalStateException(getResourceName() + "Already initialized");
+ }
+ Logging.d(TAG, getResourceName() + "Initializing.");
+ this.rendererEvents = rendererEvents;
+ this.drawer = drawer;
+ renderThread = new HandlerThread(TAG);
+ renderThread.start();
+ eglBase = EglBase.create(sharedContext, configAttributes);
+ renderThreadHandler = new Handler(renderThread.getLooper());
+ }
+ tryCreateEglSurface();
+ }
+
+ /**
+ * Create and make an EGLSurface current if both init() and surfaceCreated() have been called.
+ */
+ public void tryCreateEglSurface() {
+ // |renderThreadHandler| is only created after |eglBase| is created in init(), so the
+ // following code will only execute if eglBase != null.
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ synchronized (layoutLock) {
+ if (isSurfaceCreated && !eglBase.hasSurface()) {
+ eglBase.createSurface(getHolder().getSurface());
+ eglBase.makeCurrent();
+ // Necessary for YUV frames with odd width.
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ }
+ }
+ }
+ });
+ }
+
+ /**
+ * Block until any pending frame is returned and all GL resources released, even if an interrupt
+ * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
+ * should be called before the Activity is destroyed and the EGLContext is still valid. If you
+ * don't call this function, the GL resources might leak.
+ */
+ public void release() {
+ final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, getResourceName() + "Already released");
+ return;
+ }
+ // Release EGL and GL resources on render thread.
+ // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted
+ // when the EGL context is lost. It might be dangerous to delete them manually in
+ // Activity.onDestroy().
+ renderThreadHandler.postAtFrontOfQueue(new Runnable() {
+ @Override public void run() {
+ drawer.release();
+ drawer = null;
+ if (yuvTextures != null) {
+ GLES20.glDeleteTextures(3, yuvTextures, 0);
+ yuvTextures = null;
+ }
+ // Clear last rendered image to black.
+ makeBlack();
+ eglBase.release();
+ eglBase = null;
+ eglCleanupBarrier.countDown();
+ }
+ });
+ // Don't accept any more frames or messages to the render thread.
+ renderThreadHandler = null;
+ }
+ // Make sure the EGL/GL cleanup posted above is executed.
+ ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
+ renderThread.quit();
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done.
+ ThreadUtils.joinUninterruptibly(renderThread);
+ renderThread = null;
+ // Reset statistics and event reporting.
+ synchronized (layoutLock) {
+ frameWidth = 0;
+ frameHeight = 0;
+ frameRotation = 0;
+ rendererEvents = null;
+ }
+ resetStatistics();
+ }
+
+ /**
+ * Reset statistics. This will reset the logged statistics in logStatistics(), and
+ * RendererEvents.onFirstFrameRendered() will be called for the next frame.
+ */
+ public void resetStatistics() {
+ synchronized (statisticsLock) {
+ framesReceived = 0;
+ framesDropped = 0;
+ framesRendered = 0;
+ firstFrameTimeNs = 0;
+ renderTimeNs = 0;
+ }
+ }
+
+ /**
+ * Set if the video stream should be mirrored or not.
+ */
+ public void setMirror(final boolean mirror) {
+ synchronized (layoutLock) {
+ this.mirror = mirror;
+ }
+ }
+
+ /**
+ * Set how the video will fill the allowed layout area.
+ */
+ public void setScalingType(RendererCommon.ScalingType scalingType) {
+ synchronized (layoutLock) {
+ this.scalingType = scalingType;
+ }
+ }
+
+ // VideoRenderer.Callbacks interface.
+ @Override
+ public void renderFrame(VideoRenderer.I420Frame frame) {
+ synchronized (statisticsLock) {
+ ++framesReceived;
+ }
+ synchronized (handlerLock) {
+ if (renderThreadHandler == null) {
+ Logging.d(TAG, getResourceName()
+ + "Dropping frame - Not initialized or already released.");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ synchronized (frameLock) {
+ if (pendingFrame != null) {
+ // Drop old frame.
+ synchronized (statisticsLock) {
+ ++framesDropped;
+ }
+ VideoRenderer.renderFrameDone(pendingFrame);
+ }
+ pendingFrame = frame;
+ updateFrameDimensionsAndReportEvents(frame);
+ renderThreadHandler.post(renderFrameRunnable);
+ }
+ }
+ }
+
+ // Returns desired layout size given current measure specification and video aspect ratio.
+ private Point getDesiredLayoutSize(int widthSpec, int heightSpec) {
+ synchronized (layoutLock) {
+ final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec);
+ final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec);
+ final Point size =
+ RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight);
+ if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) {
+ size.x = maxWidth;
+ }
+ if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) {
+ size.y = maxHeight;
+ }
+ return size;
+ }
+ }
+
+ // View layout interface.
+ @Override
+ protected void onMeasure(int widthSpec, int heightSpec) {
+ synchronized (layoutLock) {
+ if (frameWidth == 0 || frameHeight == 0) {
+ super.onMeasure(widthSpec, heightSpec);
+ return;
+ }
+ desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec);
+ if (desiredLayoutSize.x != getMeasuredWidth() || desiredLayoutSize.y != getMeasuredHeight()) {
+ // Clear the surface asap before the layout change to avoid stretched video and other
+ // render artifacs. Don't wait for it to finish because the IO thread should never be
+ // blocked, so it's a best-effort attempt.
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable);
+ }
+ }
+ }
+ setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y);
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ synchronized (layoutLock) {
+ layoutSize.x = right - left;
+ layoutSize.y = bottom - top;
+ }
+ // Might have a pending frame waiting for a layout of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ // SurfaceHolder.Callback interface.
+ @Override
+ public void surfaceCreated(final SurfaceHolder holder) {
+ Logging.d(TAG, getResourceName() + "Surface created.");
+ synchronized (layoutLock) {
+ isSurfaceCreated = true;
+ }
+ tryCreateEglSurface();
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ Logging.d(TAG, getResourceName() + "Surface destroyed.");
+ synchronized (layoutLock) {
+ isSurfaceCreated = false;
+ surfaceSize.x = 0;
+ surfaceSize.y = 0;
+ }
+ runOnRenderThread(new Runnable() {
+ @Override public void run() {
+ eglBase.releaseSurface();
+ }
+ });
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height);
+ synchronized (layoutLock) {
+ surfaceSize.x = width;
+ surfaceSize.y = height;
+ }
+ // Might have a pending frame waiting for a surface of correct size.
+ runOnRenderThread(renderFrameRunnable);
+ }
+
+ /**
+ * Private helper function to post tasks safely.
+ */
+ private void runOnRenderThread(Runnable runnable) {
+ synchronized (handlerLock) {
+ if (renderThreadHandler != null) {
+ renderThreadHandler.post(runnable);
+ }
+ }
+ }
+
+ private String getResourceName() {
+ try {
+ return getResources().getResourceEntryName(getId()) + ": ";
+ } catch (NotFoundException e) {
+ return "";
+ }
+ }
+
+ private void makeBlack() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ if (eglBase != null && eglBase.hasSurface()) {
+ GLES20.glClearColor(0, 0, 0, 0);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ eglBase.swapBuffers();
+ }
+ }
+
+ /**
+ * Requests new layout if necessary. Returns true if layout and surface size are consistent.
+ */
+ private boolean checkConsistentLayout() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ synchronized (layoutLock) {
+ // Return false while we are in the middle of a layout change.
+ return layoutSize.equals(desiredLayoutSize) && surfaceSize.equals(layoutSize);
+ }
+ }
+
+ /**
+ * Renders and releases |pendingFrame|.
+ */
+ private void renderFrameOnRenderThread() {
+ if (Thread.currentThread() != renderThread) {
+ throw new IllegalStateException(getResourceName() + "Wrong thread.");
+ }
+ // Fetch and render |pendingFrame|.
+ final VideoRenderer.I420Frame frame;
+ synchronized (frameLock) {
+ if (pendingFrame == null) {
+ return;
+ }
+ frame = pendingFrame;
+ pendingFrame = null;
+ }
+ if (eglBase == null || !eglBase.hasSurface()) {
+ Logging.d(TAG, getResourceName() + "No surface to draw on");
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ if (!checkConsistentLayout()) {
+ // Output intermediate black frames while the layout is updated.
+ makeBlack();
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ // After a surface size change, the EGLSurface might still have a buffer of the old size in the
+ // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet
+ // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame.
+ synchronized (layoutLock) {
+ if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) {
+ makeBlack();
+ }
+ }
+
+ final long startTimeNs = System.nanoTime();
+ final float[] texMatrix;
+ synchronized (layoutLock) {
+ final float[] rotatedSamplingMatrix =
+ RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
+ final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y);
+ texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ }
+
+ // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
+ // a workaround for bug 5147. Performance will be slightly worse.
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ if (frame.yuvFrame) {
+ // Make sure YUV textures are allocated.
+ if (yuvTextures == null) {
+ yuvTextures = new int[3];
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ }
+ yuvUploader.uploadYuvData(
+ yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
+ drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
+ 0, 0, surfaceSize.x, surfaceSize.y);
+ } else {
+ drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(),
+ 0, 0, surfaceSize.x, surfaceSize.y);
+ }
+
+ eglBase.swapBuffers();
+ VideoRenderer.renderFrameDone(frame);
+ synchronized (statisticsLock) {
+ if (framesRendered == 0) {
+ firstFrameTimeNs = startTimeNs;
+ synchronized (layoutLock) {
+ Logging.d(TAG, getResourceName() + "Reporting first rendered frame.");
+ if (rendererEvents != null) {
+ rendererEvents.onFirstFrameRendered();
+ }
+ }
+ }
+ ++framesRendered;
+ renderTimeNs += (System.nanoTime() - startTimeNs);
+ if (framesRendered % 300 == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ // Return current frame aspect ratio, taking rotation into account.
+ private float frameAspectRatio() {
+ synchronized (layoutLock) {
+ if (frameWidth == 0 || frameHeight == 0) {
+ return 0.0f;
+ }
+ return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight
+ : (float) frameHeight / frameWidth;
+ }
+ }
+
+ // Update frame dimensions and report any changes to |rendererEvents|.
+ private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
+ synchronized (layoutLock) {
+ if (frameWidth != frame.width || frameHeight != frame.height
+ || frameRotation != frame.rotationDegree) {
+ Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to "
+ + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree);
+ if (rendererEvents != null) {
+ rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
+ }
+ frameWidth = frame.width;
+ frameHeight = frame.height;
+ frameRotation = frame.rotationDegree;
+ post(new Runnable() {
+ @Override public void run() {
+ requestLayout();
+ }
+ });
+ }
+ }
+ }
+
+ private void logStatistics() {
+ synchronized (statisticsLock) {
+ Logging.d(TAG, getResourceName() + "Frames received: "
+ + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs;
+ Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, getResourceName() + "Average render time: "
+ + (int) (renderTimeNs / (1000 * framesRendered)) + " us.");
+ }
+ }
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
new file mode 100644
index 0000000..452009b
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturer.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+
+import java.util.List;
+
+// Base interface for all VideoCapturers to implement.
+public interface VideoCapturer {
+ // Interface used for providing callbacks to an observer.
+ public interface CapturerObserver {
+ // Notify if the camera have been started successfully or not.
+ // Called on a Java thread owned by VideoCapturer.
+ void onCapturerStarted(boolean success);
+
+ // Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
+ void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
+ long timeStamp);
+
+ // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
+ // owned by VideoCapturer.
+ void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp);
+
+ // Requests an output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // Called on a Java thread owned by VideoCapturer.
+ void onOutputFormatRequest(int width, int height, int framerate);
+ }
+
+ // An implementation of CapturerObserver that forwards all calls from
+ // Java to the C layer.
+ static class NativeObserver implements CapturerObserver {
+ private final long nativeCapturer;
+
+ public NativeObserver(long nativeCapturer) {
+ this.nativeCapturer = nativeCapturer;
+ }
+
+ @Override
+ public void onCapturerStarted(boolean success) {
+ nativeCapturerStarted(nativeCapturer, success);
+ }
+
+ @Override
+ public void onByteBufferFrameCaptured(byte[] data, int width, int height,
+ int rotation, long timeStamp) {
+ nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
+ timeStamp);
+ }
+
+ @Override
+ public void onTextureFrameCaptured(
+ int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+ long timestamp) {
+ nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
+ rotation, timestamp);
+ }
+
+ @Override
+ public void onOutputFormatRequest(int width, int height, int framerate) {
+ nativeOnOutputFormatRequest(nativeCapturer, width, height, framerate);
+ }
+
+ private native void nativeCapturerStarted(long nativeCapturer,
+ boolean success);
+ private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
+ byte[] data, int length, int width, int height, int rotation, long timeStamp);
+ private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
+ int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
+ private native void nativeOnOutputFormatRequest(long nativeCapturer,
+ int width, int height, int framerate);
+ }
+
+ /**
+ * Returns a list with all the formats this VideoCapturer supports.
+ */
+ List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats();
+
+ /**
+ * Start capturing frames in a format that is as close as possible to |width| x |height| and
+ * |framerate|. If the VideoCapturer wants to deliver texture frames, it should do this by
+ * rendering on the SurfaceTexture in |surfaceTextureHelper|, register itself as a listener,
+ * and forward the texture frames to CapturerObserver.onTextureFrameCaptured().
+ */
+ void startCapture(
+ int width, int height, int framerate, SurfaceTextureHelper surfaceTextureHelper,
+ Context applicationContext, CapturerObserver frameObserver);
+
+ /**
+ * Stop capturing. This function should block until capture is actually stopped.
+ */
+ void stopCapture() throws InterruptedException;
+
+ void onOutputFormatRequest(int width, int height, int framerate);
+
+ void changeCaptureFormat(int width, int height, int framerate);
+
+ /**
+ * Perform any final cleanup here. No more capturing will be done after this call.
+ */
+ void dispose();
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java b/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
new file mode 100644
index 0000000..8f6f911
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoCapturerAndroid.java
@@ -0,0 +1,672 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.SystemClock;
+import android.view.Surface;
+import android.view.WindowManager;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+
+// Android specific implementation of VideoCapturer.
+// An instance of this class can be created by an application using
+// VideoCapturerAndroid.create();
+// This class extends VideoCapturer with a method to easily switch between the
+// front and back camera. It also provides methods for enumerating valid device
+// names.
+//
+// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
+// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
+// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
+// the camera has been stopped.
+// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
+@SuppressWarnings("deprecation")
+public class VideoCapturerAndroid implements
+ CameraVideoCapturer,
+ android.hardware.Camera.PreviewCallback,
+ SurfaceTextureHelper.OnTextureFrameAvailableListener {
+ private final static String TAG = "VideoCapturerAndroid";
+ private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
+
+ private android.hardware.Camera camera; // Only non-null while capturing.
+ private final Object handlerLock = new Object();
+ // |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
+ // or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to
+ // the handler - this way all callbacks with a specifed token can be removed at once.
+ private Handler cameraThreadHandler;
+ private Context applicationContext;
+ // Synchronization lock for |id|.
+ private final Object cameraIdLock = new Object();
+ private int id;
+ private android.hardware.Camera.CameraInfo info;
+ private CameraStatistics cameraStatistics;
+ // Remember the requested format in case we want to switch cameras.
+ private int requestedWidth;
+ private int requestedHeight;
+ private int requestedFramerate;
+ // The capture format will be the closest supported format to the requested format.
+ private CaptureFormat captureFormat;
+ private final Object pendingCameraSwitchLock = new Object();
+ private volatile boolean pendingCameraSwitch;
+ private CapturerObserver frameObserver = null;
+ private final CameraEventsHandler eventsHandler;
+ private boolean firstFrameReported;
+ // Arbitrary queue depth. Higher number means more memory allocated & held,
+ // lower number means more sensitivity to processing time in the client (and
+ // potentially stalling the capturer if it runs out of buffers to write to).
+ private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
+ private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
+ private final boolean isCapturingToTexture;
+ private SurfaceTextureHelper surfaceHelper;
+ private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
+ private final static int OPEN_CAMERA_DELAY_MS = 500;
+ private int openCameraAttempts;
+
+ // Camera error callback.
+ private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
+ new android.hardware.Camera.ErrorCallback() {
+ @Override
+ public void onError(int error, android.hardware.Camera camera) {
+ String errorMessage;
+ if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
+ errorMessage = "Camera server died!";
+ } else {
+ errorMessage = "Camera error: " + error;
+ }
+ Logging.e(TAG, errorMessage);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError(errorMessage);
+ }
+ }
+ };
+
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler) {
+ return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
+ }
+
+ // Use ctor directly instead.
+ @Deprecated
+ public static VideoCapturerAndroid create(String name,
+ CameraEventsHandler eventsHandler, boolean captureToTexture) {
+ try {
+ return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
+ } catch (RuntimeException e) {
+ Logging.e(TAG, "Couldn't create camera.", e);
+ return null;
+ }
+ }
+
+ public void printStackTrace() {
+ Thread cameraThread = null;
+ synchronized (handlerLock) {
+ if (cameraThreadHandler != null) {
+ cameraThread = cameraThreadHandler.getLooper().getThread();
+ }
+ }
+ if (cameraThread != null) {
+ StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
+ if (cameraStackTraces.length > 0) {
+ Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
+ for (StackTraceElement stackTrace : cameraStackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ // Switch camera to the next valid camera id. This can only be called while
+ // the camera is running.
+ @Override
+ public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
+ if (android.hardware.Camera.getNumberOfCameras() < 2) {
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("No camera to switch to.");
+ }
+ return;
+ }
+ synchronized (pendingCameraSwitchLock) {
+ if (pendingCameraSwitch) {
+ // Do not handle multiple camera switch request to avoid blocking
+ // camera thread by handling too many switch request from a queue.
+ Logging.w(TAG, "Ignoring camera switch request.");
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
+ }
+ return;
+ }
+ pendingCameraSwitch = true;
+ }
+ final boolean didPost = maybePostOnCameraThread(new Runnable() {
+ @Override
+ public void run() {
+ switchCameraOnCameraThread();
+ synchronized (pendingCameraSwitchLock) {
+ pendingCameraSwitch = false;
+ }
+ if (switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchDone(
+ info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
+ }
+ }
+ });
+ if (!didPost && switchEventsHandler != null) {
+ switchEventsHandler.onCameraSwitchError("Camera is stopped.");
+ }
+ }
+
+ // Requests a new output format from the video capturer. Captured frames
+ // by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
+ // TODO(magjed/perkj): Document what this function does. Change name?
+ @Override
+ public void onOutputFormatRequest(final int width, final int height, final int framerate) {
+ maybePostOnCameraThread(new Runnable() {
+ @Override public void run() {
+ onOutputFormatRequestOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Reconfigure the camera to capture in a new format. This should only be called while the camera
+ // is running.
+ @Override
+ public void changeCaptureFormat(final int width, final int height, final int framerate) {
+ maybePostOnCameraThread(new Runnable() {
+ @Override public void run() {
+ startPreviewOnCameraThread(width, height, framerate);
+ }
+ });
+ }
+
+ // Helper function to retrieve the current camera id synchronously. Note that the camera id might
+ // change at any point by switchCamera() calls.
+ private int getCurrentCameraId() {
+ synchronized (cameraIdLock) {
+ return id;
+ }
+ }
+
+ @Override
+ public List<CaptureFormat> getSupportedFormats() {
+ return Camera1Enumerator.getSupportedFormats(getCurrentCameraId());
+ }
+
+ // Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
+ public boolean isCapturingToTexture() {
+ return isCapturingToTexture;
+ }
+
+ public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
+ boolean captureToTexture) {
+ if (android.hardware.Camera.getNumberOfCameras() == 0) {
+ throw new RuntimeException("No cameras available");
+ }
+ if (cameraName == null || cameraName.equals("")) {
+ this.id = 0;
+ } else {
+ this.id = Camera1Enumerator.getCameraIndex(cameraName);
+ }
+ this.eventsHandler = eventsHandler;
+ isCapturingToTexture = captureToTexture;
+ Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
+ }
+
+ private void checkIsOnCameraThread() {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "Camera is stopped - can't check thread.");
+ } else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
+ throw new IllegalStateException("Wrong thread");
+ }
+ }
+ }
+
+ private boolean maybePostOnCameraThread(Runnable runnable) {
+ return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable);
+ }
+
+ private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
+ synchronized (handlerLock) {
+ return cameraThreadHandler != null
+ && cameraThreadHandler.postAtTime(
+ runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
+ }
+ }
+
+ @Override
+ public void dispose() {
+ Logging.d(TAG, "dispose");
+ }
+
+ // Note that this actually opens the camera, and Camera callbacks run on the
+ // thread that calls open(), so this is done on the CameraThread.
+ @Override
+ public void startCapture(
+ final int width, final int height, final int framerate,
+ final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
+ final CapturerObserver frameObserver) {
+ Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
+ if (surfaceTextureHelper == null) {
+ frameObserver.onCapturerStarted(false /* success */);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("No SurfaceTexture created.");
+ }
+ return;
+ }
+ if (applicationContext == null) {
+ throw new IllegalArgumentException("applicationContext not set.");
+ }
+ if (frameObserver == null) {
+ throw new IllegalArgumentException("frameObserver not set.");
+ }
+ synchronized (handlerLock) {
+ if (this.cameraThreadHandler != null) {
+ throw new RuntimeException("Camera has already been started.");
+ }
+ this.cameraThreadHandler = surfaceTextureHelper.getHandler();
+ this.surfaceHelper = surfaceTextureHelper;
+ final boolean didPost = maybePostOnCameraThread(new Runnable() {
+ @Override
+ public void run() {
+ openCameraAttempts = 0;
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ });
+ if (!didPost) {
+ frameObserver.onCapturerStarted(false);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Could not post task to camera thread.");
+ }
+ }
+ }
+ }
+
+ private void startCaptureOnCameraThread(
+ final int width, final int height, final int framerate, final CapturerObserver frameObserver,
+ final Context applicationContext) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ if (camera != null) {
+ Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been started.");
+ return;
+ }
+ this.applicationContext = applicationContext;
+ this.frameObserver = frameObserver;
+ this.firstFrameReported = false;
+
+ try {
+ try {
+ synchronized (cameraIdLock) {
+ Logging.d(TAG, "Opening camera " + id);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraOpening(id);
+ }
+ camera = android.hardware.Camera.open(id);
+ info = new android.hardware.Camera.CameraInfo();
+ android.hardware.Camera.getCameraInfo(id, info);
+ }
+ } catch (RuntimeException e) {
+ openCameraAttempts++;
+ if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
+ Logging.e(TAG, "Camera.open failed, retrying", e);
+ maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
+ @Override public void run() {
+ startCaptureOnCameraThread(width, height, framerate, frameObserver,
+ applicationContext);
+ }
+ });
+ return;
+ }
+ throw e;
+ }
+
+ camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
+
+ Logging.d(TAG, "Camera orientation: " + info.orientation +
+ " .Device orientation: " + getDeviceOrientation());
+ camera.setErrorCallback(cameraErrorCallback);
+ startPreviewOnCameraThread(width, height, framerate);
+ frameObserver.onCapturerStarted(true);
+ if (isCapturingToTexture) {
+ surfaceHelper.startListening(this);
+ }
+
+ // Start camera observer.
+ cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
+ } catch (IOException|RuntimeException e) {
+ Logging.e(TAG, "startCapture failed", e);
+ // Make sure the camera is released.
+ stopCaptureOnCameraThread(true /* stopHandler */);
+ frameObserver.onCapturerStarted(false);
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera can not be started.");
+ }
+ }
+ }
+
+ // (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
+ private void startPreviewOnCameraThread(int width, int height, int framerate) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null || camera == null) {
+ Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ Logging.d(
+ TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
+
+ requestedWidth = width;
+ requestedHeight = height;
+ requestedFramerate = framerate;
+
+ // Find closest supported format for |width| x |height| @ |framerate|.
+ final android.hardware.Camera.Parameters parameters = camera.getParameters();
+ final List<CaptureFormat.FramerateRange> supportedFramerates =
+ Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
+ Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
+
+ final CaptureFormat.FramerateRange fpsRange =
+ CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
+
+ final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
+
+ final CaptureFormat captureFormat =
+ new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
+
+ // Check if we are already using this capture format, then we don't need to do anything.
+ if (captureFormat.equals(this.captureFormat)) {
+ return;
+ }
+
+ // Update camera parameters.
+ Logging.d(TAG, "isVideoStabilizationSupported: " +
+ parameters.isVideoStabilizationSupported());
+ if (parameters.isVideoStabilizationSupported()) {
+ parameters.setVideoStabilization(true);
+ }
+ // Note: setRecordingHint(true) actually decrease frame rate on N5.
+ // parameters.setRecordingHint(true);
+ if (captureFormat.framerate.max > 0) {
+ parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
+ }
+ parameters.setPreviewSize(previewSize.width, previewSize.height);
+
+ if (!isCapturingToTexture) {
+ parameters.setPreviewFormat(captureFormat.imageFormat);
+ }
+ // Picture size is for taking pictures and not for preview/video, but we need to set it anyway
+ // as a workaround for an aspect ratio problem on Nexus 7.
+ final Size pictureSize = CameraEnumerationAndroid.getClosestSupportedSize(
+ Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
+ parameters.setPictureSize(pictureSize.width, pictureSize.height);
+
+ // Temporarily stop preview if it's already running.
+ if (this.captureFormat != null) {
+ camera.stopPreview();
+ // Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
+ // queue, but sometimes we receive a frame with the old resolution after this call anyway.
+ camera.setPreviewCallbackWithBuffer(null);
+ }
+
+ // (Re)start preview.
+ Logging.d(TAG, "Start capturing: " + captureFormat);
+ this.captureFormat = captureFormat;
+
+ List<String> focusModes = parameters.getSupportedFocusModes();
+ if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ }
+
+ camera.setParameters(parameters);
+ // Calculate orientation manually and send it as CVO instead.
+ camera.setDisplayOrientation(0 /* degrees */);
+ if (!isCapturingToTexture) {
+ queuedBuffers.clear();
+ final int frameSize = captureFormat.frameSize();
+ for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
+ final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
+ queuedBuffers.add(buffer.array());
+ camera.addCallbackBuffer(buffer.array());
+ }
+ camera.setPreviewCallbackWithBuffer(this);
+ }
+ camera.startPreview();
+ }
+
+ // Blocks until camera is known to be stopped.
+ @Override
+ public void stopCapture() throws InterruptedException {
+ Logging.d(TAG, "stopCapture");
+ final CountDownLatch barrier = new CountDownLatch(1);
+ final boolean didPost = maybePostOnCameraThread(new Runnable() {
+ @Override public void run() {
+ stopCaptureOnCameraThread(true /* stopHandler */);
+ barrier.countDown();
+ }
+ });
+ if (!didPost) {
+ Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
+ return;
+ }
+ if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
+ Logging.e(TAG, "Camera stop timeout");
+ printStackTrace();
+ if (eventsHandler != null) {
+ eventsHandler.onCameraError("Camera stop timeout");
+ }
+ }
+ Logging.d(TAG, "stopCapture done");
+ }
+
+ private void stopCaptureOnCameraThread(boolean stopHandler) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ Logging.d(TAG, "stopCaptureOnCameraThread");
+ // Note that the camera might still not be started here if startCaptureOnCameraThread failed
+ // and we posted a retry.
+
+ // Make sure onTextureFrameAvailable() is not called anymore.
+ if (surfaceHelper != null) {
+ surfaceHelper.stopListening();
+ }
+ if (stopHandler) {
+ synchronized (handlerLock) {
+ // Clear the cameraThreadHandler first, in case stopPreview or
+ // other driver code deadlocks. Deadlock in
+ // android.hardware.Camera._stopPreview(Native Method) has
+ // been observed on Nexus 5 (hammerhead), OS version LMY48I.
+ // The camera might post another one or two preview frames
+ // before stopped, so we have to check for a null
+ // cameraThreadHandler in our handler. Remove all pending
+ // Runnables posted from |this|.
+ if (cameraThreadHandler != null) {
+ cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
+ cameraThreadHandler = null;
+ }
+ surfaceHelper = null;
+ }
+ }
+ if (cameraStatistics != null) {
+ cameraStatistics.release();
+ cameraStatistics = null;
+ }
+ Logging.d(TAG, "Stop preview.");
+ if (camera != null) {
+ camera.stopPreview();
+ camera.setPreviewCallbackWithBuffer(null);
+ }
+ queuedBuffers.clear();
+ captureFormat = null;
+
+ Logging.d(TAG, "Release camera.");
+ if (camera != null) {
+ camera.release();
+ camera = null;
+ }
+ if (eventsHandler != null) {
+ eventsHandler.onCameraClosed();
+ }
+ Logging.d(TAG, "stopCaptureOnCameraThread done");
+ }
+
+ private void switchCameraOnCameraThread() {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ Logging.d(TAG, "switchCameraOnCameraThread");
+ stopCaptureOnCameraThread(false /* stopHandler */);
+ synchronized (cameraIdLock) {
+ id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
+ }
+ startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
+ applicationContext);
+ Logging.d(TAG, "switchCameraOnCameraThread done");
+ }
+
+ private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null || camera == null) {
+ Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped");
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
+ "@" + framerate);
+ frameObserver.onOutputFormatRequest(width, height, framerate);
+ }
+
+ private int getDeviceOrientation() {
+ int orientation = 0;
+
+ WindowManager wm = (WindowManager) applicationContext.getSystemService(
+ Context.WINDOW_SERVICE);
+ switch(wm.getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_90:
+ orientation = 90;
+ break;
+ case Surface.ROTATION_180:
+ orientation = 180;
+ break;
+ case Surface.ROTATION_270:
+ orientation = 270;
+ break;
+ case Surface.ROTATION_0:
+ default:
+ orientation = 0;
+ break;
+ }
+ return orientation;
+ }
+
+ private int getFrameOrientation() {
+ int rotation = getDeviceOrientation();
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
+ rotation = 360 - rotation;
+ }
+ return (info.orientation + rotation) % 360;
+ }
+
+ // Called on cameraThread so must not "synchronized".
+ @Override
+ public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "onPreviewFrame: Camera is stopped");
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ if (!queuedBuffers.contains(data)) {
+ // |data| is an old invalid buffer.
+ return;
+ }
+ if (camera != callbackCamera) {
+ throw new RuntimeException("Unexpected camera in callback!");
+ }
+
+ final long captureTimeNs =
+ TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
+
+ if (eventsHandler != null && !firstFrameReported) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameReported = true;
+ }
+
+ cameraStatistics.addFrame();
+ frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
+ getFrameOrientation(), captureTimeNs);
+ camera.addCallbackBuffer(data);
+ }
+
+ @Override
+ public void onTextureFrameAvailable(
+ int oesTextureId, float[] transformMatrix, long timestampNs) {
+ synchronized (handlerLock) {
+ if (cameraThreadHandler == null) {
+ Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
+ surfaceHelper.returnTextureFrame();
+ return;
+ } else {
+ checkIsOnCameraThread();
+ }
+ }
+ if (eventsHandler != null && !firstFrameReported) {
+ eventsHandler.onFirstFrameAvailable();
+ firstFrameReported = true;
+ }
+
+ int rotation = getFrameOrientation();
+ if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ // Undo the mirror that the OS "helps" us with.
+ // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
+ transformMatrix =
+ RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
+ }
+ cameraStatistics.addFrame();
+ frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+ transformMatrix, rotation, timestampNs);
+ }
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java b/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java
new file mode 100644
index 0000000..819b77d
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoRenderer.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Java version of VideoSinkInterface. In addition to allowing clients to
+ * define their own rendering behavior (by passing in a Callbacks object), this
+ * class also provides a createGui() method for creating a GUI-rendering window
+ * on various platforms.
+ */
+public class VideoRenderer {
+ /**
+ * Java version of cricket::VideoFrame. Frames are only constructed from native code and test
+ * code.
+ */
+ public static class I420Frame {
+ public final int width;
+ public final int height;
+ public final int[] yuvStrides;
+ public ByteBuffer[] yuvPlanes;
+ public final boolean yuvFrame;
+ // Matrix that transforms standard coordinates to their proper sampling locations in
+ // the texture. This transform compensates for any properties of the video source that
+ // cause it to appear different from a normalized texture. This matrix does not take
+ // |rotationDegree| into account.
+ public final float[] samplingMatrix;
+ public int textureId;
+ // Frame pointer in C++.
+ private long nativeFramePointer;
+
+ // rotationDegree is the degree that the frame must be rotated clockwisely
+ // to be rendered correctly.
+ public int rotationDegree;
+
+ /**
+ * Construct a frame of the given dimensions with the specified planar data.
+ */
+ I420Frame(int width, int height, int rotationDegree, int[] yuvStrides, ByteBuffer[] yuvPlanes,
+ long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = yuvStrides;
+ this.yuvPlanes = yuvPlanes;
+ this.yuvFrame = true;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+ // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+ // bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
+ // matrix.
+ samplingMatrix = new float[] {
+ 1, 0, 0, 0,
+ 0, -1, 0, 0,
+ 0, 0, 1, 0,
+ 0, 1, 0, 1};
+ }
+
+ /**
+ * Construct a texture frame of the given dimensions with data in SurfaceTexture
+ */
+ I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
+ long nativeFramePointer) {
+ this.width = width;
+ this.height = height;
+ this.yuvStrides = null;
+ this.yuvPlanes = null;
+ this.samplingMatrix = samplingMatrix;
+ this.textureId = textureId;
+ this.yuvFrame = false;
+ this.rotationDegree = rotationDegree;
+ this.nativeFramePointer = nativeFramePointer;
+ if (rotationDegree % 90 != 0) {
+ throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
+ }
+ }
+
+ public int rotatedWidth() {
+ return (rotationDegree % 180 == 0) ? width : height;
+ }
+
+ public int rotatedHeight() {
+ return (rotationDegree % 180 == 0) ? height : width;
+ }
+
+ @Override
+ public String toString() {
+ return width + "x" + height + ":" + yuvStrides[0] + ":" + yuvStrides[1] +
+ ":" + yuvStrides[2];
+ }
+ }
+
+ // Helper native function to do a video frame plane copying.
+ public static native void nativeCopyPlane(ByteBuffer src, int width,
+ int height, int srcStride, ByteBuffer dst, int dstStride);
+
+ /** The real meat of VideoSinkInterface. */
+ public static interface Callbacks {
+ // |frame| might have pending rotation and implementation of Callbacks
+ // should handle that by applying rotation during rendering. The callee
+ // is responsible for signaling when it is done with |frame| by calling
+ // renderFrameDone(frame).
+ public void renderFrame(I420Frame frame);
+ }
+
+ /**
+ * This must be called after every renderFrame() to release the frame.
+ */
+ public static void renderFrameDone(I420Frame frame) {
+ frame.yuvPlanes = null;
+ frame.textureId = 0;
+ if (frame.nativeFramePointer != 0) {
+ releaseNativeFrame(frame.nativeFramePointer);
+ frame.nativeFramePointer = 0;
+ }
+ }
+
+ long nativeVideoRenderer;
+
+ public VideoRenderer(Callbacks callbacks) {
+ nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
+ }
+
+ public void dispose() {
+ if (nativeVideoRenderer == 0) {
+ // Already disposed.
+ return;
+ }
+
+ freeWrappedVideoRenderer(nativeVideoRenderer);
+ nativeVideoRenderer = 0;
+ }
+
+ private static native long nativeWrapVideoRenderer(Callbacks callbacks);
+ private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
+ private static native void releaseNativeFrame(long nativeFramePointer);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java b/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java
new file mode 100644
index 0000000..e292bac
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoRendererGui.java
@@ -0,0 +1,650 @@
+/*
+ * Copyright 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.ArrayList;
+import java.util.concurrent.CountDownLatch;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.annotation.SuppressLint;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.opengl.EGL14;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+
+import org.webrtc.Logging;
+import org.webrtc.VideoRenderer.I420Frame;
+
+/**
+ * Efficiently renders YUV frames using the GPU for CSC.
+ * Clients will want first to call setView() to pass GLSurfaceView
+ * and then for each video stream either create instance of VideoRenderer using
+ * createGui() call or VideoRenderer.Callbacks interface using create() call.
+ * Only one instance of the class can be created.
+ */
+public class VideoRendererGui implements GLSurfaceView.Renderer {
+ // |instance|, |instance.surface|, |eglContext|, and |eglContextReady| are synchronized on
+ // |VideoRendererGui.class|.
+ private static VideoRendererGui instance = null;
+ private static Runnable eglContextReady = null;
+ private static final String TAG = "VideoRendererGui";
+ private GLSurfaceView surface;
+ private static EglBase.Context eglContext = null;
+ // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
+ // If true then for every newly created yuv image renderer createTexture()
+ // should be called. The variable is accessed on multiple threads and
+ // all accesses are synchronized on yuvImageRenderers' object lock.
+ private boolean onSurfaceCreatedCalled;
+ private int screenWidth;
+ private int screenHeight;
+ // List of yuv renderers.
+ private final ArrayList<YuvImageRenderer> yuvImageRenderers;
+ // Render and draw threads.
+ private static Thread renderFrameThread;
+ private static Thread drawThread;
+
+ private VideoRendererGui(GLSurfaceView surface) {
+ this.surface = surface;
+ // Create an OpenGL ES 2.0 context.
+ surface.setPreserveEGLContextOnPause(true);
+ surface.setEGLContextClientVersion(2);
+ surface.setRenderer(this);
+ surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+
+ yuvImageRenderers = new ArrayList<YuvImageRenderer>();
+ }
+
+ /**
+ * Class used to display stream of YUV420 frames at particular location
+ * on a screen. New video frames are sent to display using renderFrame()
+ * call.
+ */
+ private static class YuvImageRenderer implements VideoRenderer.Callbacks {
+ // |surface| is synchronized on |this|.
+ private GLSurfaceView surface;
+ private int id;
+ // TODO(magjed): Delete GL resources in release(). Must be synchronized with draw(). We are
+ // currently leaking resources to avoid a rare crash in release() where the EGLContext has
+ // become invalid beforehand.
+ private int[] yuvTextures = { 0, 0, 0 };
+ private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+ private final RendererCommon.GlDrawer drawer;
+ // Resources for making a deep copy of incoming OES texture frame.
+ private GlTextureFrameBuffer textureCopy;
+
+ // Pending frame to render. Serves as a queue with size 1. |pendingFrame| is accessed by two
+ // threads - frames are received in renderFrame() and consumed in draw(). Frames are dropped in
+ // renderFrame() if the previous frame has not been rendered yet.
+ private I420Frame pendingFrame;
+ private final Object pendingFrameLock = new Object();
+ // Type of video frame used for recent frame rendering.
+ private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
+ private RendererType rendererType;
+ private RendererCommon.ScalingType scalingType;
+ private boolean mirror;
+ private RendererCommon.RendererEvents rendererEvents;
+ // Flag if renderFrame() was ever called.
+ boolean seenFrame;
+ // Total number of video frames received in renderFrame() call.
+ private int framesReceived;
+ // Number of video frames dropped by renderFrame() because previous
+ // frame has not been rendered yet.
+ private int framesDropped;
+ // Number of rendered video frames.
+ private int framesRendered;
+ // Time in ns when the first video frame was rendered.
+ private long startTimeNs = -1;
+ // Time in ns spent in draw() function.
+ private long drawTimeNs;
+ // Time in ns spent in draw() copying resources from |pendingFrame| - including uploading frame
+ // data to rendering planes.
+ private long copyTimeNs;
+ // The allowed view area in percentage of screen size.
+ private final Rect layoutInPercentage;
+ // The actual view area in pixels. It is a centered subrectangle of the rectangle defined by
+ // |layoutInPercentage|.
+ private final Rect displayLayout = new Rect();
+ // Cached layout transformation matrix, calculated from current layout parameters.
+ private float[] layoutMatrix;
+ // Flag if layout transformation matrix update is needed.
+ private boolean updateLayoutProperties;
+ // Layout properties update lock. Guards |updateLayoutProperties|, |screenWidth|,
+ // |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
+ private final Object updateLayoutLock = new Object();
+ // Texture sampling matrix.
+ private float[] rotatedSamplingMatrix;
+ // Viewport dimensions.
+ private int screenWidth;
+ private int screenHeight;
+ // Video dimension.
+ private int videoWidth;
+ private int videoHeight;
+
+ // This is the degree that the frame should be rotated clockwisely to have
+ // it rendered up right.
+ private int rotationDegree;
+
+ private YuvImageRenderer(
+ GLSurfaceView surface, int id,
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+ Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
+ this.surface = surface;
+ this.id = id;
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ this.drawer = drawer;
+ layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ updateLayoutProperties = false;
+ rotationDegree = 0;
+ }
+
+ public synchronized void reset() {
+ seenFrame = false;
+ }
+
+ private synchronized void release() {
+ surface = null;
+ drawer.release();
+ synchronized (pendingFrameLock) {
+ if (pendingFrame != null) {
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+ }
+
+ private void createTextures() {
+ Logging.d(TAG, " YuvImageRenderer.createTextures " + id + " on GL thread:" +
+ Thread.currentThread().getId());
+
+ // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
+ for (int i = 0; i < 3; i++) {
+ yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
+ }
+ // Generate texture and framebuffer for offscreen texture copy.
+ textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
+ }
+
+ private void updateLayoutMatrix() {
+ synchronized(updateLayoutLock) {
+ if (!updateLayoutProperties) {
+ return;
+ }
+ // Initialize to maximum allowed area. Round to integer coordinates inwards the layout
+ // bounding box (ceil left/top and floor right/bottom) to not break constraints.
+ displayLayout.set(
+ (screenWidth * layoutInPercentage.left + 99) / 100,
+ (screenHeight * layoutInPercentage.top + 99) / 100,
+ (screenWidth * layoutInPercentage.right) / 100,
+ (screenHeight * layoutInPercentage.bottom) / 100);
+ Logging.d(TAG, "ID: " + id + ". AdjustTextureCoords. Allowed display size: "
+ + displayLayout.width() + " x " + displayLayout.height() + ". Video: " + videoWidth
+ + " x " + videoHeight + ". Rotation: " + rotationDegree + ". Mirror: " + mirror);
+ final float videoAspectRatio = (rotationDegree % 180 == 0)
+ ? (float) videoWidth / videoHeight
+ : (float) videoHeight / videoWidth;
+ // Adjust display size based on |scalingType|.
+ final Point displaySize = RendererCommon.getDisplaySize(scalingType,
+ videoAspectRatio, displayLayout.width(), displayLayout.height());
+ displayLayout.inset((displayLayout.width() - displaySize.x) / 2,
+ (displayLayout.height() - displaySize.y) / 2);
+ Logging.d(TAG, " Adjusted display size: " + displayLayout.width() + " x "
+ + displayLayout.height());
+ layoutMatrix = RendererCommon.getLayoutMatrix(
+ mirror, videoAspectRatio, (float) displayLayout.width() / displayLayout.height());
+ updateLayoutProperties = false;
+ Logging.d(TAG, " AdjustTextureCoords done");
+ }
+ }
+
+ private void draw() {
+ if (!seenFrame) {
+ // No frame received yet - nothing to render.
+ return;
+ }
+ long now = System.nanoTime();
+
+ final boolean isNewFrame;
+ synchronized (pendingFrameLock) {
+ isNewFrame = (pendingFrame != null);
+ if (isNewFrame && startTimeNs == -1) {
+ startTimeNs = now;
+ }
+
+ if (isNewFrame) {
+ rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+ pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
+ if (pendingFrame.yuvFrame) {
+ rendererType = RendererType.RENDERER_YUV;
+ yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+ pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+ } else {
+ rendererType = RendererType.RENDERER_TEXTURE;
+ // External texture rendering. Make a deep copy of the external texture.
+ // Reallocate offscreen texture if necessary.
+ textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
+
+ // Bind our offscreen framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
+ GlUtil.checkNoGLES2Error("glBindFramebuffer");
+
+ // Copy the OES texture content. This will also normalize the sampling matrix.
+ drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+ textureCopy.getWidth(), textureCopy.getHeight(),
+ 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
+ rotatedSamplingMatrix = RendererCommon.identityMatrix();
+
+ // Restore normal framebuffer.
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
+ GLES20.glFinish();
+ }
+ copyTimeNs += (System.nanoTime() - now);
+ VideoRenderer.renderFrameDone(pendingFrame);
+ pendingFrame = null;
+ }
+ }
+
+ updateLayoutMatrix();
+ final float[] texMatrix =
+ RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+ // OpenGL defaults to lower left origin - flip viewport position vertically.
+ final int viewportY = screenHeight - displayLayout.bottom;
+ if (rendererType == RendererType.RENDERER_YUV) {
+ drawer.drawYuv(yuvTextures, texMatrix, videoWidth, videoHeight,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+ } else {
+ drawer.drawRgb(textureCopy.getTextureId(), texMatrix, videoWidth, videoHeight,
+ displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
+ }
+
+ if (isNewFrame) {
+ framesRendered++;
+ drawTimeNs += (System.nanoTime() - now);
+ if ((framesRendered % 300) == 0) {
+ logStatistics();
+ }
+ }
+ }
+
+ private void logStatistics() {
+ long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
+ Logging.d(TAG, "ID: " + id + ". Type: " + rendererType +
+ ". Frames received: " + framesReceived +
+ ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
+ if (framesReceived > 0 && framesRendered > 0) {
+ Logging.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
+ " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs);
+ Logging.d(TAG, "Draw time: " +
+ (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
+ (int) (copyTimeNs / (1000 * framesReceived)) + " us");
+ }
+ }
+
+ public void setScreenSize(final int screenWidth, final int screenHeight) {
+ synchronized(updateLayoutLock) {
+ if (screenWidth == this.screenWidth && screenHeight == this.screenHeight) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setScreenSize: " +
+ screenWidth + " x " + screenHeight);
+ this.screenWidth = screenWidth;
+ this.screenHeight = screenHeight;
+ updateLayoutProperties = true;
+ }
+ }
+
+ public void setPosition(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ final Rect layoutInPercentage =
+ new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
+ synchronized(updateLayoutLock) {
+ if (layoutInPercentage.equals(this.layoutInPercentage) && scalingType == this.scalingType
+ && mirror == this.mirror) {
+ return;
+ }
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setPosition: (" + x + ", " + y +
+ ") " + width + " x " + height + ". Scaling: " + scalingType +
+ ". Mirror: " + mirror);
+ this.layoutInPercentage.set(layoutInPercentage);
+ this.scalingType = scalingType;
+ this.mirror = mirror;
+ updateLayoutProperties = true;
+ }
+ }
+
+ private void setSize(final int videoWidth, final int videoHeight, final int rotation) {
+ if (videoWidth == this.videoWidth && videoHeight == this.videoHeight
+ && rotation == rotationDegree) {
+ return;
+ }
+ if (rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id +
+ ". Reporting frame resolution changed to " + videoWidth + " x " + videoHeight);
+ rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation);
+ }
+
+ synchronized (updateLayoutLock) {
+ Logging.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
+ videoWidth + " x " + videoHeight + " rotation " + rotation);
+
+ this.videoWidth = videoWidth;
+ this.videoHeight = videoHeight;
+ rotationDegree = rotation;
+ updateLayoutProperties = true;
+ Logging.d(TAG, " YuvImageRenderer.setSize done.");
+ }
+ }
+
+ @Override
+ public synchronized void renderFrame(I420Frame frame) {
+ if (surface == null) {
+ // This object has been released.
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ if (renderFrameThread == null) {
+ renderFrameThread = Thread.currentThread();
+ }
+ if (!seenFrame && rendererEvents != null) {
+ Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
+ rendererEvents.onFirstFrameRendered();
+ }
+ framesReceived++;
+ synchronized (pendingFrameLock) {
+ // Check input frame parameters.
+ if (frame.yuvFrame) {
+ if (frame.yuvStrides[0] < frame.width ||
+ frame.yuvStrides[1] < frame.width / 2 ||
+ frame.yuvStrides[2] < frame.width / 2) {
+ Logging.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
+ frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
+ VideoRenderer.renderFrameDone(frame);
+ return;
+ }
+ }
+
+ if (pendingFrame != null) {
+ // Skip rendering of this frame if previous frame was not rendered yet.
+ framesDropped++;
+ VideoRenderer.renderFrameDone(frame);
+ seenFrame = true;
+ return;
+ }
+ pendingFrame = frame;
+ }
+ setSize(frame.width, frame.height, frame.rotationDegree);
+ seenFrame = true;
+
+ // Request rendering.
+ surface.requestRender();
+ }
+ }
+
+ /** Passes GLSurfaceView to video renderer. */
+ public static synchronized void setView(GLSurfaceView surface,
+ Runnable eglContextReadyCallback) {
+ Logging.d(TAG, "VideoRendererGui.setView");
+ instance = new VideoRendererGui(surface);
+ eglContextReady = eglContextReadyCallback;
+ }
+
+ public static synchronized EglBase.Context getEglBaseContext() {
+ return eglContext;
+ }
+
+ /** Releases GLSurfaceView video renderer. */
+ public static synchronized void dispose() {
+ if (instance == null){
+ return;
+ }
+ Logging.d(TAG, "VideoRendererGui.dispose");
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ yuvImageRenderer.release();
+ }
+ instance.yuvImageRenderers.clear();
+ }
+ renderFrameThread = null;
+ drawThread = null;
+ instance.surface = null;
+ eglContext = null;
+ eglContextReady = null;
+ instance = null;
+ }
+
+ /**
+ * Creates VideoRenderer with top left corner at (x, y) and resolution
+ * (width, height). All parameters are in percentage of screen resolution.
+ */
+ public static VideoRenderer createGui(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) throws Exception {
+ YuvImageRenderer javaGuiRenderer = create(
+ x, y, width, height, scalingType, mirror);
+ return new VideoRenderer(javaGuiRenderer);
+ }
+
+ public static VideoRenderer.Callbacks createGuiRenderer(
+ int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror);
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
+ * resolution (width, height). All parameters are in percentage of
+ * screen resolution.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+ }
+
+ /**
+ * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+ * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+ * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+ */
+ public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
+ // Check display region parameters.
+ if (x < 0 || x > 100 || y < 0 || y > 100 ||
+ width < 0 || width > 100 || height < 0 || height > 100 ||
+ x + width > 100 || y + height > 100) {
+ throw new RuntimeException("Incorrect window parameters.");
+ }
+
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to create yuv renderer before setting GLSurfaceView");
+ }
+ final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
+ instance.surface, instance.yuvImageRenderers.size(),
+ x, y, width, height, scalingType, mirror, drawer);
+ synchronized (instance.yuvImageRenderers) {
+ if (instance.onSurfaceCreatedCalled) {
+ // onSurfaceCreated has already been called for VideoRendererGui -
+ // need to create texture for new image and add image to the
+ // rendering list.
+ final CountDownLatch countDownLatch = new CountDownLatch(1);
+ instance.surface.queueEvent(new Runnable() {
+ @Override
+ public void run() {
+ yuvImageRenderer.createTextures();
+ yuvImageRenderer.setScreenSize(
+ instance.screenWidth, instance.screenHeight);
+ countDownLatch.countDown();
+ }
+ });
+ // Wait for task completion.
+ try {
+ countDownLatch.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ // Add yuv renderer to rendering list.
+ instance.yuvImageRenderers.add(yuvImageRenderer);
+ }
+ return yuvImageRenderer;
+ }
+
+ public static synchronized void update(
+ VideoRenderer.Callbacks renderer, int x, int y, int width, int height,
+ RendererCommon.ScalingType scalingType, boolean mirror) {
+ Logging.d(TAG, "VideoRendererGui.update");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to update yuv renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.setPosition(x, y, width, height, scalingType, mirror);
+ }
+ }
+ }
+ }
+
+ public static synchronized void setRendererEvents(
+ VideoRenderer.Callbacks renderer, RendererCommon.RendererEvents rendererEvents) {
+ Logging.d(TAG, "VideoRendererGui.setRendererEvents");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to set renderer events before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.rendererEvents = rendererEvents;
+ }
+ }
+ }
+ }
+
+ public static synchronized void remove(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.remove");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to remove renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ final int index = instance.yuvImageRenderers.indexOf(renderer);
+ if (index == -1) {
+ Logging.w(TAG, "Couldn't remove renderer (not present in current list)");
+ } else {
+ instance.yuvImageRenderers.remove(index).release();
+ }
+ }
+ }
+
+ public static synchronized void reset(VideoRenderer.Callbacks renderer) {
+ Logging.d(TAG, "VideoRendererGui.reset");
+ if (instance == null) {
+ throw new RuntimeException(
+ "Attempt to reset renderer before setting GLSurfaceView");
+ }
+ synchronized (instance.yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : instance.yuvImageRenderers) {
+ if (yuvImageRenderer == renderer) {
+ yuvImageRenderer.reset();
+ }
+ }
+ }
+ }
+
+ private static void printStackTrace(Thread thread, String threadName) {
+ if (thread != null) {
+ StackTraceElement[] stackTraces = thread.getStackTrace();
+ if (stackTraces.length > 0) {
+ Logging.d(TAG, threadName + " stacks trace:");
+ for (StackTraceElement stackTrace : stackTraces) {
+ Logging.d(TAG, stackTrace.toString());
+ }
+ }
+ }
+ }
+
+ public static synchronized void printStackTraces() {
+ if (instance == null) {
+ return;
+ }
+ printStackTrace(renderFrameThread, "Render frame thread");
+ printStackTrace(drawThread, "Draw thread");
+ }
+
+ @SuppressLint("NewApi")
+ @Override
+ public void onSurfaceCreated(GL10 unused, EGLConfig config) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
+ // Store render EGL context.
+ synchronized (VideoRendererGui.class) {
+ if (EglBase14.isEGL14Supported()) {
+ eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
+ } else {
+ eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
+ }
+
+ Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
+ }
+
+ synchronized (yuvImageRenderers) {
+ // Create textures for all images.
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.createTextures();
+ }
+ onSurfaceCreatedCalled = true;
+ }
+ GlUtil.checkNoGLES2Error("onSurfaceCreated done");
+ GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
+ GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
+
+ // Fire EGL context ready event.
+ synchronized (VideoRendererGui.class) {
+ if (eglContextReady != null) {
+ eglContextReady.run();
+ }
+ }
+ }
+
+ @Override
+ public void onSurfaceChanged(GL10 unused, int width, int height) {
+ Logging.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
+ width + " x " + height + " ");
+ screenWidth = width;
+ screenHeight = height;
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
+ }
+ }
+ }
+
+ @Override
+ public void onDrawFrame(GL10 unused) {
+ if (drawThread == null) {
+ drawThread = Thread.currentThread();
+ }
+ GLES20.glViewport(0, 0, screenWidth, screenHeight);
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
+ synchronized (yuvImageRenderers) {
+ for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
+ yuvImageRenderer.draw();
+ }
+ }
+ }
+
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoSource.java b/webrtc/api/android/java/src/org/webrtc/VideoSource.java
new file mode 100644
index 0000000..3864ccb
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoSource.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+package org.webrtc;
+
+/**
+ * Java version of VideoSourceInterface, extended with stop/restart
+ * functionality to allow explicit control of the camera device on android,
+ * where there is no support for multiple open capture devices and the cost of
+ * holding a camera open (even if MediaStreamTrack.setEnabled(false) is muting
+ * its output to the encoder) can be too high to bear.
+ */
+public class VideoSource extends MediaSource {
+
+ public VideoSource(long nativeSource) {
+ super(nativeSource);
+ }
+
+ // Stop capture feeding this source.
+ public void stop() {
+ stop(nativeSource);
+ }
+
+ // Restart capture feeding this source. stop() must have been called since
+ // the last call to restart() (if any). Note that this isn't "start()";
+ // sources are started by default at birth.
+ public void restart() {
+ restart(nativeSource);
+ }
+
+ @Override
+ public void dispose() {
+ super.dispose();
+ }
+
+ private static native void stop(long nativeSource);
+ private static native void restart(long nativeSource);
+}
diff --git a/webrtc/api/android/java/src/org/webrtc/VideoTrack.java b/webrtc/api/android/java/src/org/webrtc/VideoTrack.java
new file mode 100644
index 0000000..a4ec8ca
--- /dev/null
+++ b/webrtc/api/android/java/src/org/webrtc/VideoTrack.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import java.util.LinkedList;
+
+/** Java version of VideoTrackInterface. */
+public class VideoTrack extends MediaStreamTrack {
+ private final LinkedList<VideoRenderer> renderers =
+ new LinkedList<VideoRenderer>();
+
+ public VideoTrack(long nativeTrack) {
+ super(nativeTrack);
+ }
+
+ public void addRenderer(VideoRenderer renderer) {
+ renderers.add(renderer);
+ nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ }
+
+ public void removeRenderer(VideoRenderer renderer) {
+ if (!renderers.remove(renderer)) {
+ return;
+ }
+ nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
+ renderer.dispose();
+ }
+
+ public void dispose() {
+ while (!renderers.isEmpty()) {
+ removeRenderer(renderers.getFirst());
+ }
+ super.dispose();
+ }
+
+ private static native void free(long nativeTrack);
+
+ private static native void nativeAddRenderer(
+ long nativeTrack, long nativeRenderer);
+
+ private static native void nativeRemoveRenderer(
+ long nativeTrack, long nativeRenderer);
+}
diff --git a/webrtc/api/android/jni/OWNERS b/webrtc/api/android/jni/OWNERS
new file mode 100644
index 0000000..4178fd6
--- /dev/null
+++ b/webrtc/api/android/jni/OWNERS
@@ -0,0 +1,8 @@
+per-file androidvideocapturer*=magjed@webrtc.org
+per-file androidmediaencoder*=magjed@webrtc.org
+per-file androidmediadecoder*=magjed@webrtc.org
+per-file androidmediacodeccommon.h=magjed@webrtc.org
+per-file surfacetexturehelper*=magjed@webrtc.org
+per-file native_handle_impl*=magjed@webrtc.org
+# Video related parts of peerconnection only.
+per-file peerconnection_jni.cc=magjed@webrtc.org
diff --git a/webrtc/api/android/jni/androidmediacodeccommon.h b/webrtc/api/android/jni/androidmediacodeccommon.h
new file mode 100644
index 0000000..4aa938d
--- /dev/null
+++ b/webrtc/api/android/jni/androidmediacodeccommon.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
+
+#include <android/log.h>
+#include <string>
+
+#include "webrtc/base/thread.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+
+namespace webrtc_jni {
+
+// Uncomment this define to enable verbose logging for every encoded/decoded
+// video frame.
+//#define TRACK_BUFFER_TIMING
+
+#define TAG_COMMON "MediaCodecVideo"
+
+// Color formats supported by encoder - should mirror supportedColorList
+// from MediaCodecVideoEncoder.java
+enum COLOR_FORMATTYPE {
+ COLOR_FormatYUV420Planar = 0x13,
+ COLOR_FormatYUV420SemiPlanar = 0x15,
+ COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
+ // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
+ // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
+ // This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
+ // but requires some (16, 32?) byte alignment.
+ COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
+};
+
+// Arbitrary interval to poll the codec for new outputs.
+enum { kMediaCodecPollMs = 10 };
+// Arbitrary interval to poll at when there should be no more frames.
+enum { kMediaCodecPollNoFramesMs = 100 };
+// Media codec maximum output buffer ready timeout.
+enum { kMediaCodecTimeoutMs = 1000 };
+// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
+enum { kMediaCodecStatisticsIntervalMs = 3000 };
+// Maximum amount of pending frames for VP8 decoder.
+enum { kMaxPendingFramesVp8 = 1 };
+// Maximum amount of pending frames for VP9 decoder.
+enum { kMaxPendingFramesVp9 = 1 };
+// Maximum amount of pending frames for H.264 decoder.
+enum { kMaxPendingFramesH264 = 3 };
+// Maximum amount of decoded frames for which per-frame logging is enabled.
+enum { kMaxDecodedLogFrames = 10 };
+// Maximum amount of encoded frames for which per-frame logging is enabled.
+enum { kMaxEncodedLogFrames = 10 };
+
+static inline void AllowBlockingCalls() {
+ rtc::Thread* current_thread = rtc::Thread::Current();
+ if (current_thread != NULL)
+ current_thread->SetAllowBlockingCalls(true);
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndexAndClassName(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+// Checks for any Java exception, prints stack backtrace and clears
+// currently thrown exception.
+static inline bool CheckException(JNIEnv* jni) {
+ if (jni->ExceptionCheck()) {
+ LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception.";
+ jni->ExceptionDescribe();
+ jni->ExceptionClear();
+ return true;
+ }
+ return false;
+}
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/webrtc/api/android/jni/androidmediadecoder_jni.cc b/webrtc/api/android/jni/androidmediadecoder_jni.cc
new file mode 100644
index 0000000..2a7e689
--- /dev/null
+++ b/webrtc/api/android/jni/androidmediadecoder_jni.cc
@@ -0,0 +1,995 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <memory>
+#include <vector>
+
+// NOTICE: androidmediadecoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/android/jni/androidmediacodeccommon.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::DecodedImageCallback;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+
+namespace webrtc_jni {
+
+// Logging macros.
+#define TAG_DECODER "MediaCodecVideoDecoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+ __android_log_print(ANDROID_LOG_VERBOSE, TAG_DECODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_DECODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_DECODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_DECODER)
+
+enum { kMaxWarningLogFrames = 2 };
+
+class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
+ public rtc::MessageHandler {
+ public:
+ explicit MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context);
+ virtual ~MediaCodecVideoDecoder();
+
+ int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
+ override;
+
+ int32_t Decode(
+ const EncodedImage& inputImage, bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo = NULL,
+ int64_t renderTimeMs = -1) override;
+
+ int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
+ override;
+
+ int32_t Release() override;
+
+ bool PrefersLateDecoding() const override { return true; }
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ const char* ImplementationName() const override;
+
+ private:
+ // CHECK-fail if not running on |codec_thread_|.
+ void CheckOnCodecThread();
+
+ int32_t InitDecodeOnCodecThread();
+ int32_t ResetDecodeOnCodecThread();
+ int32_t ReleaseOnCodecThread();
+ int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
+ int32_t ProcessHWErrorOnCodecThread();
+ void EnableFrameLogOnWarning();
+ void ResetVariables();
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ // Render EGL context - owned by factory, should not be allocated/destroyed
+ // by VideoDecoder.
+ jobject render_egl_context_;
+
+ bool key_frame_required_;
+ bool inited_;
+ bool sw_fallback_required_;
+ bool use_surface_;
+ VideoCodec codec_;
+ webrtc::I420BufferPool decoded_frame_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+ DecodedImageCallback* callback_;
+ int frames_received_; // Number of frames received by decoder.
+ int frames_decoded_; // Number of frames decoded by decoder.
+ // Number of decoded frames for which log information is displayed.
+ int frames_decoded_logged_;
+ int64_t start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_decoding_time_ms_; // Overall decoding time in the current second
+ int current_delay_time_ms_; // Overall delay time in the current second.
+ uint32_t max_pending_frames_; // Maximum number of pending input frames.
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ std::unique_ptr<Thread>
+ codec_thread_; // Thread on which to operate MediaCodec.
+ ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
+ jmethodID j_init_decode_method_;
+ jmethodID j_reset_method_;
+ jmethodID j_release_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_queue_input_buffer_method_;
+ jmethodID j_dequeue_byte_buffer_method_;
+ jmethodID j_dequeue_texture_buffer_method_;
+ jmethodID j_return_decoded_byte_buffer_method_;
+ // MediaCodecVideoDecoder fields.
+ jfieldID j_input_buffers_field_;
+ jfieldID j_output_buffers_field_;
+ jfieldID j_color_format_field_;
+ jfieldID j_width_field_;
+ jfieldID j_height_field_;
+ jfieldID j_stride_field_;
+ jfieldID j_slice_height_field_;
+ // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
+ jfieldID j_texture_id_field_;
+ jfieldID j_transform_matrix_field_;
+ jfieldID j_texture_presentation_timestamp_ms_field_;
+ jfieldID j_texture_timestamp_ms_field_;
+ jfieldID j_texture_ntp_timestamp_ms_field_;
+ jfieldID j_texture_decode_time_ms_field_;
+ jfieldID j_texture_frame_delay_ms_field_;
+ // MediaCodecVideoDecoder.DecodedOutputBuffer fields.
+ jfieldID j_info_index_field_;
+ jfieldID j_info_offset_field_;
+ jfieldID j_info_size_field_;
+ jfieldID j_presentation_timestamp_ms_field_;
+ jfieldID j_timestamp_ms_field_;
+ jfieldID j_ntp_timestamp_ms_field_;
+ jfieldID j_byte_buffer_decode_time_ms_field_;
+
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+};
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject render_egl_context) :
+ codecType_(codecType),
+ render_egl_context_(render_egl_context),
+ key_frame_required_(true),
+ inited_(false),
+ sw_fallback_required_(false),
+ codec_thread_(new Thread()),
+ j_media_codec_video_decoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
+ j_media_codec_video_decoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_decoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_decoder_class_,
+ "<init>",
+ "()V"))) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
+
+ j_init_decode_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "initDecode",
+ "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+ "IILorg/webrtc/SurfaceTextureHelper;)Z");
+ j_reset_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
+ j_queue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
+ j_dequeue_byte_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
+ j_dequeue_texture_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
+ "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
+ j_return_decoded_byte_buffer_method_ =
+ GetMethodID(jni, *j_media_codec_video_decoder_class_,
+ "returnDecodedOutputBuffer", "(I)V");
+
+ j_input_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "inputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_output_buffers_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_,
+ "outputBuffers", "[Ljava/nio/ByteBuffer;");
+ j_color_format_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
+ j_width_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "width", "I");
+ j_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "height", "I");
+ j_stride_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "stride", "I");
+ j_slice_height_field_ = GetFieldID(
+ jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
+
+ jclass j_decoded_texture_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ j_texture_id_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "textureID", "I");
+ j_transform_matrix_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
+ j_texture_presentation_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
+ j_texture_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
+ j_texture_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
+ j_texture_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
+ j_texture_frame_delay_ms_field_ = GetFieldID(
+ jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
+
+ jclass j_decoded_output_buffer_class = FindClass(jni,
+ "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+ j_info_index_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "index", "I");
+ j_info_offset_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "offset", "I");
+ j_info_size_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "size", "I");
+ j_presentation_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
+ j_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "timeStampMs", "J");
+ j_ntp_timestamp_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
+ j_byte_buffer_decode_time_ms_field_ = GetFieldID(
+ jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
+
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
+ use_surface_ = (render_egl_context_ != NULL);
+ ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
+ memset(&codec_, 0, sizeof(codec_));
+ AllowBlockingCalls();
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
+ int32_t numberOfCores) {
+ ALOGD << "InitDecode.";
+ if (inst == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(inst->codecType == codecType_)
+ << "Unsupported codec " << inst->codecType << " for " << codecType_;
+
+ if (sw_fallback_required_) {
+ ALOGE << "InitDecode() - fallback to SW decoder";
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ // Save VideoCodec instance for later.
+ if (&codec_ != inst) {
+ codec_ = *inst;
+ }
+ // If maxFramerate is not set then assume 30 fps.
+ codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 30;
+
+ // Call Java init.
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
+}
+
+void MediaCodecVideoDecoder::ResetVariables() {
+ CheckOnCodecThread();
+
+ key_frame_required_ = true;
+ frames_received_ = 0;
+ frames_decoded_ = 0;
+ frames_decoded_logged_ = kMaxDecodedLogFrames;
+ start_time_ms_ = rtc::TimeMillis();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ current_delay_time_ms_ = 0;
+}
+
+int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ ALOGD << "InitDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+ << codec_.width << " x " << codec_.height << ". Fps: " <<
+ (int)codec_.maxFramerate;
+
+ // Release previous codec first if it was allocated before.
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "Release failure: " << ret_val << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ ResetVariables();
+
+ if (use_surface_) {
+ surface_texture_helper_ = SurfaceTextureHelper::create(
+ jni, "Decoder SurfaceTextureHelper", render_egl_context_);
+ if (!surface_texture_helper_) {
+ ALOGE << "Couldn't create SurfaceTextureHelper - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+ jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_init_decode_method_,
+ j_video_codec_enum,
+ codec_.width,
+ codec_.height,
+ use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+ : nullptr);
+
+ if (CheckException(jni) || !success) {
+ ALOGE << "Codec initialization error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ inited_ = true;
+
+ switch (codecType_) {
+ case kVideoCodecVP8:
+ max_pending_frames_ = kMaxPendingFramesVp8;
+ break;
+ case kVideoCodecVP9:
+ max_pending_frames_ = kMaxPendingFramesVp9;
+ break;
+ case kVideoCodecH264:
+ max_pending_frames_ = kMaxPendingFramesH264;
+ break;
+ default:
+ max_pending_frames_ = 0;
+ }
+ ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
+
+ jobjectArray input_buffers = (jobjectArray)GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ if (CheckException(jni)) {
+ ALOGE << "NewGlobalRef error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ ALOGD << "ResetDecodeOnCodecThread Type: " << (int)codecType_ << ". "
+ << codec_.width << " x " << codec_.height;
+ ALOGD << " Frames received: " << frames_received_ <<
+ ". Frames decoded: " << frames_decoded_;
+
+ inited_ = false;
+ rtc::MessageQueueManager::Clear(this);
+ ResetVariables();
+
+ jni->CallVoidMethod(
+ *j_media_codec_video_decoder_,
+ j_reset_method_,
+ codec_.width,
+ codec_.height);
+
+ if (CheckException(jni)) {
+ ALOGE << "Soft reset error - fallback to SW codec.";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ inited_ = true;
+
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+ ALOGD << "DecoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE, Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); i++) {
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ }
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
+ surface_texture_helper_ = nullptr;
+ inited_ = false;
+ rtc::MessageQueueManager::Clear(this);
+ if (CheckException(jni)) {
+ ALOGE << "Decoder release exception";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ ALOGD << "DecoderReleaseOnCodecThread done";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::CheckOnCodecThread() {
+ RTC_CHECK(codec_thread_.get() == ThreadManager::Instance()->CurrentThread())
+ << "Running on wrong thread!";
+}
+
+void MediaCodecVideoDecoder::EnableFrameLogOnWarning() {
+ // Log next 2 output frames.
+ frames_decoded_logged_ = std::max(
+ frames_decoded_logged_, frames_decoded_ + kMaxWarningLogFrames);
+}
+
+int32_t MediaCodecVideoDecoder::ProcessHWErrorOnCodecThread() {
+ CheckOnCodecThread();
+ int ret_val = ReleaseOnCodecThread();
+ if (ret_val < 0) {
+ ALOGE << "ProcessHWError: Release failure";
+ }
+ if (codecType_ == kVideoCodecH264) {
+ // For now there is no SW H.264 which can be used as fallback codec.
+ // So try to restart hw codec for now.
+ ret_val = InitDecodeOnCodecThread();
+ ALOGE << "Reset H.264 codec done. Status: " << ret_val;
+ if (ret_val == WEBRTC_VIDEO_CODEC_OK) {
+ // H.264 codec was succesfully reset - return regular error code.
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ } else {
+ // Fail to restart H.264 codec - return error code which should stop the
+ // call.
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ } else {
+ sw_fallback_required_ = true;
+ ALOGE << "Return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+ const EncodedImage& inputImage,
+ bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs) {
+ if (sw_fallback_required_) {
+ ALOGE << "Decode() - fallback to SW codec";
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ if (callback_ == NULL) {
+ ALOGE << "Decode() - callback_ is NULL";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ if (inputImage._buffer == NULL && inputImage._length > 0) {
+ ALOGE << "Decode() - inputImage is incorrect";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ if (!inited_) {
+ ALOGE << "Decode() - decoder is not initialized";
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ // Check if encoded frame dimension has changed.
+ if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
+ (inputImage._encodedWidth != codec_.width ||
+ inputImage._encodedHeight != codec_.height)) {
+ ALOGW << "Input resolution changed from " <<
+ codec_.width << " x " << codec_.height << " to " <<
+ inputImage._encodedWidth << " x " << inputImage._encodedHeight;
+ codec_.width = inputImage._encodedWidth;
+ codec_.height = inputImage._encodedHeight;
+ int32_t ret;
+ if (use_surface_ &&
+ (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecH264)) {
+ // Soft codec reset - only for surface decoding.
+ ret = codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoDecoder::ResetDecodeOnCodecThread, this));
+ } else {
+ // Hard codec reset.
+ ret = InitDecode(&codec_, 1);
+ }
+ if (ret < 0) {
+ ALOGE << "InitDecode failure: " << ret << " - fallback to SW codec";
+ sw_fallback_required_ = true;
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ }
+
+ // Always start with a complete key frame.
+ if (key_frame_required_) {
+ if (inputImage._frameType != webrtc::kVideoFrameKey) {
+ ALOGE << "Decode() - key frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (!inputImage._completeFrame) {
+ ALOGE << "Decode() - complete frame is required";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ key_frame_required_ = false;
+ }
+ if (inputImage._length == 0) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
+}
+
+int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
+ const EncodedImage& inputImage) {
+ CheckOnCodecThread();
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // Try to drain the decoder and wait until output is not too
+ // much behind the input.
+ if (codecType_ == kVideoCodecH264 &&
+ frames_received_ > frames_decoded_ + max_pending_frames_) {
+ // Print warning for H.264 only - for VP8/VP9 one frame delay is ok.
+ ALOGW << "Decoder is too far behind. Try to drain. Received: " <<
+ frames_received_ << ". Decoded: " << frames_decoded_;
+ EnableFrameLogOnWarning();
+ }
+ const int64 drain_start = rtc::TimeMillis();
+ while ((frames_received_ > frames_decoded_ + max_pending_frames_) &&
+ (rtc::TimeMillis() - drain_start) < kMediaCodecTimeoutMs) {
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+ ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ }
+ if (frames_received_ > frames_decoded_ + max_pending_frames_) {
+ ALOGE << "Output buffer dequeue timeout. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Get input buffer.
+ int j_input_buffer_index = jni->CallIntMethod(
+ *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+ if (CheckException(jni) || j_input_buffer_index < 0) {
+ ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
+ ". Retry DeliverPendingOutputs.";
+ EnableFrameLogOnWarning();
+ // Try to drain the decoder.
+ if (!DeliverPendingOutputs(jni, kMediaCodecPollMs)) {
+ ALOGE << "DeliverPendingOutputs error. Frames received: " <<
+ frames_received_ << ". Frames decoded: " << frames_decoded_;
+ return ProcessHWErrorOnCodecThread();
+ }
+ // Try dequeue input buffer one last time.
+ j_input_buffer_index = jni->CallIntMethod(
+ *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
+ if (CheckException(jni) || j_input_buffer_index < 0) {
+ ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
+ return ProcessHWErrorOnCodecThread();
+ }
+ }
+
+ // Copy encoded data to Java ByteBuffer.
+ jobject j_input_buffer = input_buffers_[j_input_buffer_index];
+ uint8_t* buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ RTC_CHECK(buffer) << "Indirect buffer??";
+ int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+ if (CheckException(jni) || buffer_capacity < inputImage._length) {
+ ALOGE << "Input frame size "<< inputImage._length <<
+ " is bigger than buffer size " << buffer_capacity;
+ return ProcessHWErrorOnCodecThread();
+ }
+ jlong presentation_timestamp_us = static_cast<jlong>(
+ static_cast<int64_t>(frames_received_) * 1000000 / codec_.maxFramerate);
+ memcpy(buffer, inputImage._buffer, inputImage._length);
+
+ if (frames_decoded_ < frames_decoded_logged_) {
+ ALOGD << "Decoder frame in # " << frames_received_ <<
+ ". Type: " << inputImage._frameType <<
+ ". Buffer # " << j_input_buffer_index <<
+ ". TS: " << presentation_timestamp_us / 1000 <<
+ ". Size: " << inputImage._length;
+ }
+
+ // Save input image timestamps for later output.
+ frames_received_++;
+ current_bytes_ += inputImage._length;
+
+ // Feed input to decoder.
+ bool success = jni->CallBooleanMethod(
+ *j_media_codec_video_decoder_,
+ j_queue_input_buffer_method_,
+ j_input_buffer_index,
+ inputImage._length,
+ presentation_timestamp_us,
+ static_cast<int64_t> (inputImage._timeStamp),
+ inputImage.ntp_time_ms_);
+ if (CheckException(jni) || !success) {
+ ALOGE << "queueInputBuffer error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ // Try to drain the decoder
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "DeliverPendingOutputs error";
+ return ProcessHWErrorOnCodecThread();
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoDecoder::DeliverPendingOutputs(
+ JNIEnv* jni, int dequeue_timeout_ms) {
+ if (frames_received_ <= frames_decoded_) {
+ // No need to query for output buffers - decoder is drained.
+ return true;
+ }
+ // Get decoder output.
+ jobject j_decoder_output_buffer =
+ jni->CallObjectMethod(*j_media_codec_video_decoder_,
+ use_surface_ ? j_dequeue_texture_buffer_method_
+ : j_dequeue_byte_buffer_method_,
+ dequeue_timeout_ms);
+
+ if (CheckException(jni)) {
+ ALOGE << "dequeueOutputBuffer() error";
+ return false;
+ }
+ if (IsNull(jni, j_decoder_output_buffer)) {
+ // No decoded frame ready.
+ return true;
+ }
+
+ // Get decoded video frame properties.
+ int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_color_format_field_);
+ int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
+ int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
+ int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
+ int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
+ j_slice_height_field_);
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
+ int64_t presentation_timestamps_ms = 0;
+ int64_t output_timestamps_ms = 0;
+ int64_t output_ntp_timestamps_ms = 0;
+ int decode_time_ms = 0;
+ int64_t frame_delayed_ms = 0;
+ if (use_surface_) {
+ // Extract data from Java DecodedTextureBuffer.
+ presentation_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer,
+ j_texture_presentation_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
+ output_ntp_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
+ decode_time_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
+
+ const int texture_id =
+ GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
+ if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
+ const jfloatArray j_transform_matrix =
+ reinterpret_cast<jfloatArray>(GetObjectField(
+ jni, j_decoder_output_buffer, j_transform_matrix_field_));
+ frame_delayed_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
+
+ // Create webrtc::VideoFrameBuffer with native texture handle.
+ frame_buffer = surface_texture_helper_->CreateTextureFrame(
+ width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
+ } else {
+ EnableFrameLogOnWarning();
+ }
+ } else {
+ // Extract data from Java ByteBuffer and create output yuv420 frame -
+ // for non surface decoding only.
+ const int output_buffer_index = GetIntField(
+ jni, j_decoder_output_buffer, j_info_index_field_);
+ const int output_buffer_offset = GetIntField(
+ jni, j_decoder_output_buffer, j_info_offset_field_);
+ const int output_buffer_size = GetIntField(
+ jni, j_decoder_output_buffer, j_info_size_field_);
+ presentation_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
+ output_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_timestamp_ms_field_);
+ output_ntp_timestamps_ms = GetLongField(
+ jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
+
+ decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
+ j_byte_buffer_decode_time_ms_field_);
+
+ if (output_buffer_size < width * height * 3 / 2) {
+ ALOGE << "Insufficient output buffer size: " << output_buffer_size;
+ return false;
+ }
+ if (output_buffer_size < stride * height * 3 / 2 &&
+ slice_height == height && stride > width) {
+ // Some codecs (Exynos) incorrectly report stride information for
+ // output byte buffer, so actual stride value need to be corrected.
+ stride = output_buffer_size * 2 / (height * 3);
+ }
+ jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
+ jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
+ jobject output_buffer =
+ jni->GetObjectArrayElement(output_buffers, output_buffer_index);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
+ output_buffer));
+ if (CheckException(jni)) {
+ return false;
+ }
+ payload += output_buffer_offset;
+
+ // Create yuv420 frame.
+ frame_buffer = decoded_frame_pool_.CreateBuffer(width, height);
+ if (color_format == COLOR_FormatYUV420Planar) {
+ RTC_CHECK_EQ(0, stride % 2);
+ RTC_CHECK_EQ(0, slice_height % 2);
+ const int uv_stride = stride / 2;
+ const int u_slice_height = slice_height / 2;
+ const uint8_t* y_ptr = payload;
+ const uint8_t* u_ptr = y_ptr + stride * slice_height;
+ const uint8_t* v_ptr = u_ptr + uv_stride * u_slice_height;
+ libyuv::I420Copy(y_ptr, stride,
+ u_ptr, uv_stride,
+ v_ptr, uv_stride,
+ frame_buffer->MutableDataY(),
+ frame_buffer->StrideY(),
+ frame_buffer->MutableDataU(),
+ frame_buffer->StrideU(),
+ frame_buffer->MutableDataV(),
+ frame_buffer->StrideV(),
+ width, height);
+ } else {
+ // All other supported formats are nv12.
+ const uint8_t* y_ptr = payload;
+ const uint8_t* uv_ptr = y_ptr + stride * slice_height;
+ libyuv::NV12ToI420(
+ y_ptr, stride,
+ uv_ptr, stride,
+ frame_buffer->MutableDataY(),
+ frame_buffer->StrideY(),
+ frame_buffer->MutableDataU(),
+ frame_buffer->StrideU(),
+ frame_buffer->MutableDataV(),
+ frame_buffer->StrideV(),
+ width, height);
+ }
+ // Return output byte buffer back to codec.
+ jni->CallVoidMethod(
+ *j_media_codec_video_decoder_,
+ j_return_decoded_byte_buffer_method_,
+ output_buffer_index);
+ if (CheckException(jni)) {
+ ALOGE << "returnDecodedOutputBuffer error";
+ return false;
+ }
+ }
+ if (frames_decoded_ < frames_decoded_logged_) {
+ ALOGD << "Decoder frame out # " << frames_decoded_ <<
+ ". " << width << " x " << height <<
+ ". " << stride << " x " << slice_height <<
+ ". Color: " << color_format <<
+ ". TS: " << presentation_timestamps_ms <<
+ ". DecTime: " << (int)decode_time_ms <<
+ ". DelayTime: " << (int)frame_delayed_ms;
+ }
+
+ // Calculate and print decoding statistics - every 3 seconds.
+ frames_decoded_++;
+ current_frames_++;
+ current_decoding_time_ms_ += decode_time_ms;
+ current_delay_time_ms_ += frame_delayed_ms;
+ int statistic_time_ms = rtc::TimeMillis() - start_time_ms_;
+ if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
+ current_frames_ > 0) {
+ int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+ int current_fps =
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+ ALOGD << "Frames decoded: " << frames_decoded_ <<
+ ". Received: " << frames_received_ <<
+ ". Bitrate: " << current_bitrate << " kbps" <<
+ ". Fps: " << current_fps <<
+ ". DecTime: " << (current_decoding_time_ms_ / current_frames_) <<
+ ". DelayTime: " << (current_delay_time_ms_ / current_frames_) <<
+ " for last " << statistic_time_ms << " ms.";
+ start_time_ms_ = rtc::TimeMillis();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_decoding_time_ms_ = 0;
+ current_delay_time_ms_ = 0;
+ }
+
+ // If the frame was dropped, frame_buffer is left as nullptr.
+ if (frame_buffer) {
+ VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0);
+ decoded_frame.set_timestamp(output_timestamps_ms);
+ decoded_frame.set_ntp_time_ms(output_ntp_timestamps_ms);
+
+ const int32_t callback_status =
+ callback_->Decoded(decoded_frame, decode_time_ms);
+ if (callback_status > 0) {
+ ALOGE << "callback error";
+ }
+ }
+ return true;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (!inited_) {
+ return;
+ }
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ CheckOnCodecThread();
+
+ if (!DeliverPendingOutputs(jni, 0)) {
+ ALOGE << "OnMessage: DeliverPendingOutputs error";
+ ProcessHWErrorOnCodecThread();
+ return;
+ }
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
+}
+
+MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory()
+ : egl_context_(nullptr) {
+ ALOGD << "MediaCodecVideoDecoderFactory ctor";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ supported_codec_types_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp8_hw_supported = false;
+ }
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP8);
+ }
+
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_vp9_hw_supported = false;
+ }
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecVP9);
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_decoder_class,
+ GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+ if (CheckException(jni)) {
+ is_h264_hw_supported = false;
+ }
+ if (is_h264_hw_supported) {
+ ALOGD << "H264 HW Decoder supported.";
+ supported_codec_types_.push_back(kVideoCodecH264);
+ }
+}
+
+MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {
+ ALOGD << "MediaCodecVideoDecoderFactory dtor";
+ if (egl_context_) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->DeleteGlobalRef(egl_context_);
+ }
+}
+
+void MediaCodecVideoDecoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject egl_context) {
+ ALOGD << "MediaCodecVideoDecoderFactory::SetEGLContext";
+ if (egl_context_) {
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = nullptr;
+ }
+ egl_context_ = jni->NewGlobalRef(egl_context);
+ if (CheckException(jni)) {
+ ALOGE << "error calling NewGlobalRef for EGL Context.";
+ }
+}
+
+webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
+ VideoCodecType type) {
+ if (supported_codec_types_.empty()) {
+ ALOGW << "No HW video decoder for type " << (int)type;
+ return nullptr;
+ }
+ for (VideoCodecType codec_type : supported_codec_types_) {
+ if (codec_type == type) {
+ ALOGD << "Create HW video decoder for type " << (int)type;
+ return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type,
+ egl_context_);
+ }
+ }
+ ALOGW << "Can not find HW video decoder for type " << (int)type;
+ return nullptr;
+}
+
+void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
+ webrtc::VideoDecoder* decoder) {
+ ALOGD << "Destroy video decoder.";
+ delete decoder;
+}
+
+const char* MediaCodecVideoDecoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidmediadecoder_jni.h b/webrtc/api/android/jni/androidmediadecoder_jni.h
new file mode 100644
index 0000000..f8e8fa6
--- /dev/null
+++ b/webrtc/api/android/jni/androidmediadecoder_jni.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based decoder factory.
+class MediaCodecVideoDecoderFactory
+ : public cricket::WebRtcVideoDecoderFactory {
+ public:
+ MediaCodecVideoDecoderFactory();
+ virtual ~MediaCodecVideoDecoderFactory();
+
+ void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
+ // WebRtcVideoDecoderFactory implementation.
+ webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
+ override;
+
+ void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
+
+ private:
+ jobject egl_context_;
+ std::vector<webrtc::VideoCodecType> supported_codec_types_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
diff --git a/webrtc/api/android/jni/androidmediaencoder_jni.cc b/webrtc/api/android/jni/androidmediaencoder_jni.cc
new file mode 100644
index 0000000..8d0d3b5
--- /dev/null
+++ b/webrtc/api/android/jni/androidmediaencoder_jni.cc
@@ -0,0 +1,1306 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// NOTICE: androidmediaencoder_jni.h must be included before
+// androidmediacodeccommon.h to avoid build errors.
+#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
+
+#include <algorithm>
+#include <memory>
+#include <list>
+
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "third_party/libyuv/include/libyuv/convert_from.h"
+#include "third_party/libyuv/include/libyuv/video_common.h"
+#include "webrtc/api/android/jni/androidmediacodeccommon.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/base/timeutils.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
+#include "webrtc/modules/video_coding/utility/h264_bitstream_parser.h"
+#include "webrtc/modules/video_coding/utility/quality_scaler.h"
+#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
+#include "webrtc/system_wrappers/include/field_trial.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+
+using webrtc::CodecSpecificInfo;
+using webrtc::EncodedImage;
+using webrtc::VideoFrame;
+using webrtc::RTPFragmentationHeader;
+using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
+using webrtc::kVideoCodecVP8;
+using webrtc::kVideoCodecVP9;
+using webrtc::QualityScaler;
+
+namespace webrtc_jni {
+
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+// Maximum allowed fps value in SetRates() call.
+#define MAX_ALLOWED_VIDEO_FPS 60
+// Maximum allowed frames in encoder input queue.
+#define MAX_ENCODER_Q_SIZE 2
+// Maximum amount of dropped frames caused by full encoder queue - exceeding
+// this threshold means that encoder probably got stuck and need to be reset.
+#define ENCODER_STALL_FRAMEDROP_THRESHOLD 60
+
+// Logging macros.
+#define TAG_ENCODER "MediaCodecVideoEncoder"
+#ifdef TRACK_BUFFER_TIMING
+#define ALOGV(...)
+ __android_log_print(ANDROID_LOG_VERBOSE, TAG_ENCODER, __VA_ARGS__)
+#else
+#define ALOGV(...)
+#endif
+#define ALOGD LOG_TAG(rtc::LS_INFO, TAG_ENCODER)
+#define ALOGW LOG_TAG(rtc::LS_WARNING, TAG_ENCODER)
+#define ALOGE LOG_TAG(rtc::LS_ERROR, TAG_ENCODER)
+
+namespace {
+// Maximum time limit between incoming frames before requesting a key frame.
+const size_t kFrameDiffThresholdMs = 1100;
+const int kMinKeyFrameInterval = 2;
+} // namespace
+
+// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
+// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
+// HW-backed video encode. This C++ class is implemented as a very thin shim,
+// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
+// MediaCodecVideoEncoder is created, operated, and destroyed on a single
+// thread, currently the libjingle Worker thread.
+class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
+ public rtc::MessageHandler {
+ public:
+ virtual ~MediaCodecVideoEncoder();
+ MediaCodecVideoEncoder(JNIEnv* jni,
+ VideoCodecType codecType,
+ jobject egl_context);
+
+ // webrtc::VideoEncoder implementation. Everything trampolines to
+ // |codec_thread_| for execution.
+ int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) override;
+ int32_t Encode(const webrtc::VideoFrame& input_image,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) override;
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) override;
+ int32_t Release() override;
+ int32_t SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) override;
+ int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
+
+ // rtc::MessageHandler implementation.
+ void OnMessage(rtc::Message* msg) override;
+
+ void OnDroppedFrame() override;
+
+ bool SupportsNativeHandle() const override { return egl_context_ != nullptr; }
+ const char* ImplementationName() const override;
+
+ private:
+ // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+ // InitEncodeOnCodecThread() in an attempt to restore the codec to an
+ // operable state. Necessary after all manner of OMX-layer errors.
+ bool ResetCodecOnCodecThread();
+
+ // Implementation of webrtc::VideoEncoder methods above, all running on the
+ // codec thread exclusively.
+ //
+ // If width==0 then this is assumed to be a re-initialization and the
+ // previously-current values are reused instead of the passed parameters
+ // (makes it easier to reason about thread-safety).
+ int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+ bool use_surface);
+ // Reconfigure to match |frame| in width, height. Also reconfigures the
+ // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+ // for byte buffer/texture. Returns false if reconfiguring fails.
+ bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
+ int32_t EncodeOnCodecThread(
+ const webrtc::VideoFrame& input_image,
+ const std::vector<webrtc::FrameType>* frame_types,
+ const int64_t frame_input_time_ms);
+ bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+ bool EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame);
+
+ int32_t RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback);
+ int32_t ReleaseOnCodecThread();
+ int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
+ void OnDroppedFrameOnCodecThread();
+
+ // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
+ int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
+ jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
+ bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
+ jlong GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni, jobject j_output_buffer_info);
+
+ // Deliver any outputs pending in the MediaCodec to our |callback_| and return
+ // true on success.
+ bool DeliverPendingOutputs(JNIEnv* jni);
+
+ // Search for H.264 start codes.
+ int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+ // Displays encoder statistics.
+ void LogStatistics(bool force_log);
+
+ // Type of video codec.
+ VideoCodecType codecType_;
+
+ // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
+ // |codec_thread_| synchronously.
+ webrtc::EncodedImageCallback* callback_;
+
+ // State that is constant for the lifetime of this object once the ctor
+ // returns.
+ std::unique_ptr<Thread>
+ codec_thread_; // Thread on which to operate MediaCodec.
+ rtc::ThreadChecker codec_thread_checker_;
+ ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
+ ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
+ jmethodID j_init_encode_method_;
+ jmethodID j_get_input_buffers_method_;
+ jmethodID j_dequeue_input_buffer_method_;
+ jmethodID j_encode_buffer_method_;
+ jmethodID j_encode_texture_method_;
+ jmethodID j_release_method_;
+ jmethodID j_set_rates_method_;
+ jmethodID j_dequeue_output_buffer_method_;
+ jmethodID j_release_output_buffer_method_;
+ jfieldID j_color_format_field_;
+ jfieldID j_info_index_field_;
+ jfieldID j_info_buffer_field_;
+ jfieldID j_info_is_key_frame_field_;
+ jfieldID j_info_presentation_timestamp_us_field_;
+
+ // State that is valid only between InitEncode() and the next Release().
+ // Touched only on codec_thread_ so no explicit synchronization necessary.
+ int width_; // Frame width in pixels.
+ int height_; // Frame height in pixels.
+ bool inited_;
+ bool use_surface_;
+ uint16_t picture_id_;
+ enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
+ int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
+ int last_set_fps_; // Last-requested frame rate.
+ int64_t current_timestamp_us_; // Current frame timestamps in us.
+ int frames_received_; // Number of frames received by encoder.
+ int frames_encoded_; // Number of frames encoded by encoder.
+ int frames_dropped_media_encoder_; // Number of frames dropped by encoder.
+ // Number of dropped frames caused by full queue.
+ int consecutive_full_queue_frame_drops_;
+ int64_t stat_start_time_ms_; // Start time for statistics.
+ int current_frames_; // Number of frames in the current statistics interval.
+ int current_bytes_; // Encoded bytes in the current statistics interval.
+ int current_acc_qp_; // Accumulated QP in the current statistics interval.
+ int current_encoding_time_ms_; // Overall encoding time in the current second
+ int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
+ int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
+
+ struct InputFrameInfo {
+ InputFrameInfo(int64_t encode_start_time,
+ int32_t frame_timestamp,
+ int64_t frame_render_time_ms,
+ webrtc::VideoRotation rotation)
+ : encode_start_time(encode_start_time),
+ frame_timestamp(frame_timestamp),
+ frame_render_time_ms(frame_render_time_ms),
+ rotation(rotation) {}
+ // Time when video frame is sent to encoder input.
+ const int64_t encode_start_time;
+
+ // Input frame information.
+ const int32_t frame_timestamp;
+ const int64_t frame_render_time_ms;
+ const webrtc::VideoRotation rotation;
+ };
+ std::list<InputFrameInfo> input_frame_infos_;
+ int32_t output_timestamp_; // Last output frame timestamp from
+ // |input_frame_infos_|.
+ int64_t output_render_time_ms_; // Last output frame render time from
+ // |input_frame_infos_|.
+ webrtc::VideoRotation output_rotation_; // Last output frame rotation from
+ // |input_frame_infos_|.
+ // Frame size in bytes fed to MediaCodec.
+ int yuv_size_;
+ // True only when between a callback_->Encoded() call return a positive value
+ // and the next Encode() call being ignored.
+ bool drop_next_input_frame_;
+ // Global references; must be deleted in Release().
+ std::vector<jobject> input_buffers_;
+ QualityScaler quality_scaler_;
+ // Dynamic resolution change, off by default.
+ bool scale_;
+
+ // H264 bitstream parser, used to extract QP from encoded bitstreams.
+ webrtc::H264BitstreamParser h264_bitstream_parser_;
+
+ // VP9 variables to populate codec specific structure.
+ webrtc::GofInfoVP9 gof_; // Contains each frame's temporal information for
+ // non-flexible VP9 mode.
+ uint8_t tl0_pic_idx_;
+ size_t gof_idx_;
+
+ // EGL context - owned by factory, should not be allocated/destroyed
+ // by MediaCodecVideoEncoder.
+ jobject egl_context_;
+
+ // Temporary fix for VP8.
+ // Sends a key frame if frames are largely spaced apart (possibly
+ // corresponding to a large image change).
+ int64_t last_frame_received_ms_;
+ int frames_received_since_last_key_;
+ webrtc::VideoCodecMode codec_mode_;
+};
+
+MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
+ // Call Release() to ensure no more callbacks to us after we are deleted.
+ Release();
+}
+
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+ JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
+ codecType_(codecType),
+ callback_(NULL),
+ codec_thread_(new Thread()),
+ j_media_codec_video_encoder_class_(
+ jni,
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
+ j_media_codec_video_encoder_(
+ jni,
+ jni->NewObject(*j_media_codec_video_encoder_class_,
+ GetMethodID(jni,
+ *j_media_codec_video_encoder_class_,
+ "<init>",
+ "()V"))),
+ inited_(false),
+ use_surface_(false),
+ picture_id_(0),
+ egl_context_(egl_context) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // It would be nice to avoid spinning up a new thread per MediaCodec, and
+ // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
+ // 2732 means that deadlocks abound. This class synchronously trampolines
+ // to |codec_thread_|, so if anything else can be coming to _us_ from
+ // |codec_thread_|, or from any thread holding the |_sendCritSect| described
+ // in the bug, we have a problem. For now work around that with a dedicated
+ // thread.
+ codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
+ RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
+ codec_thread_checker_.DetachFromThread();
+ jclass j_output_buffer_info_class =
+ FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ j_init_encode_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "initEncode",
+ "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+ "IIIILorg/webrtc/EglBase14$Context;)Z");
+ j_get_input_buffers_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "getInputBuffers",
+ "()[Ljava/nio/ByteBuffer;");
+ j_dequeue_input_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
+ j_encode_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+ j_encode_texture_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+ "(ZI[FJ)Z");
+ j_release_method_ =
+ GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
+ j_set_rates_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
+ j_dequeue_output_buffer_method_ = GetMethodID(
+ jni,
+ *j_media_codec_video_encoder_class_,
+ "dequeueOutputBuffer",
+ "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+ j_release_output_buffer_method_ = GetMethodID(
+ jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
+
+ j_color_format_field_ =
+ GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
+ j_info_index_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "index", "I");
+ j_info_buffer_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
+ j_info_is_key_frame_field_ =
+ GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
+ j_info_presentation_timestamp_us_field_ = GetFieldID(
+ jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
+ CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
+ srand(time(NULL));
+ AllowBlockingCalls();
+}
+
+int32_t MediaCodecVideoEncoder::InitEncode(
+ const webrtc::VideoCodec* codec_settings,
+ int32_t /* number_of_cores */,
+ size_t /* max_payload_size */) {
+ if (codec_settings == NULL) {
+ ALOGE << "NULL VideoCodec instance";
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+ // Factory should guard against other codecs being used with us.
+ RTC_CHECK(codec_settings->codecType == codecType_)
+ << "Unsupported codec " << codec_settings->codecType << " for "
+ << codecType_;
+
+ codec_mode_ = codec_settings->mode;
+ int init_width = codec_settings->width;
+ int init_height = codec_settings->height;
+ // Scaling is disabled for VP9, but optionally enabled for VP8.
+ // TODO(pbos): Extract automaticResizeOn out of VP8 settings.
+ scale_ = false;
+ if (codecType_ == kVideoCodecVP8) {
+ scale_ = codec_settings->codecSpecific.VP8.automaticResizeOn;
+ } else if (codecType_ != kVideoCodecVP9) {
+ scale_ = true;
+ }
+
+ ALOGD << "InitEncode request: " << init_width << " x " << init_height;
+ ALOGD << "Encoder automatic resize " << (scale_ ? "enabled" : "disabled");
+
+ if (scale_) {
+ if (codecType_ == kVideoCodecVP8) {
+ quality_scaler_.Init(
+ QualityScaler::kLowVp8QpThreshold, QualityScaler::kBadVp8QpThreshold,
+ codec_settings->startBitrate, codec_settings->width,
+ codec_settings->height, codec_settings->maxFramerate);
+ } else if (codecType_ == kVideoCodecH264) {
+ quality_scaler_.Init(QualityScaler::kLowH264QpThreshold,
+ QualityScaler::kBadH264QpThreshold,
+ codec_settings->startBitrate, codec_settings->width,
+ codec_settings->height,
+ codec_settings->maxFramerate);
+ } else {
+ // When adding codec support to additional hardware codecs, also configure
+ // their QP thresholds for scaling.
+ RTC_NOTREACHED() << "Unsupported codec without configured QP thresholds.";
+ scale_ = false;
+ }
+ QualityScaler::Resolution res = quality_scaler_.GetScaledResolution();
+ init_width = res.width;
+ init_height = res.height;
+ ALOGD << "Scaled resolution: " << init_width << " x " << init_height;
+ }
+
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, init_width,
+ init_height, codec_settings->startBitrate,
+ codec_settings->maxFramerate,
+ codec_settings->expect_encode_from_texture));
+}
+
+int32_t MediaCodecVideoEncoder::Encode(
+ const webrtc::VideoFrame& frame,
+ const webrtc::CodecSpecificInfo* /* codec_specific_info */,
+ const std::vector<webrtc::FrameType>* frame_types) {
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::EncodeOnCodecThread, this,
+ frame, frame_types, rtc::TimeMillis()));
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
+ this, callback));
+}
+
+int32_t MediaCodecVideoEncoder::Release() {
+ ALOGD << "EncoderRelease request";
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
+}
+
+int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
+ int64_t /* rtt */) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ return codec_thread_->Invoke<int32_t>(
+ RTC_FROM_HERE, Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread, this,
+ new_bit_rate, frame_rate));
+}
+
+void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ // We only ever send one message to |this| directly (not through a Bind()'d
+ // functor), so expect no ID/data.
+ RTC_CHECK(!msg->message_id) << "Unexpected message!";
+ RTC_CHECK(!msg->pdata) << "Unexpected message!";
+ if (!inited_) {
+ return;
+ }
+
+ // It would be nice to recover from a failure here if one happened, but it's
+ // unclear how to signal such a failure to the app, so instead we stay silent
+ // about it and let the next app-called API method reveal the borkedness.
+ DeliverPendingOutputs(jni);
+
+ // If there aren't more frames to deliver, we can start polling at lower rate.
+ if (input_frame_infos_.empty()) {
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollNoFramesMs, this);
+ } else {
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
+ }
+
+ // Call log statistics here so it's called even if no frames are being
+ // delivered.
+ LogStatistics(false);
+}
+
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ ALOGE << "ResetOnCodecThread";
+ if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+ InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+ WEBRTC_VIDEO_CODEC_OK) {
+ // TODO(fischman): wouldn't it be nice if there was a way to gracefully
+ // degrade to a SW encoder at this point? There isn't one AFAICT :(
+ // https://code.google.com/p/webrtc/issues/detail?id=2920
+ return false;
+ }
+ return true;
+}
+
+int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
+ int width, int height, int kbps, int fps, bool use_surface) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ ALOGD << "InitEncodeOnCodecThread Type: " << (int)codecType_ << ", " <<
+ width << " x " << height << ". Bitrate: " << kbps <<
+ " kbps. Fps: " << fps;
+ if (kbps == 0) {
+ kbps = last_set_bitrate_kbps_;
+ }
+ if (fps == 0) {
+ fps = MAX_VIDEO_FPS;
+ }
+
+ width_ = width;
+ height_ = height;
+ last_set_bitrate_kbps_ = kbps;
+ last_set_fps_ = (fps < MAX_VIDEO_FPS) ? fps : MAX_VIDEO_FPS;
+ yuv_size_ = width_ * height_ * 3 / 2;
+ frames_received_ = 0;
+ frames_encoded_ = 0;
+ frames_dropped_media_encoder_ = 0;
+ consecutive_full_queue_frame_drops_ = 0;
+ current_timestamp_us_ = 0;
+ stat_start_time_ms_ = rtc::TimeMillis();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_acc_qp_ = 0;
+ current_encoding_time_ms_ = 0;
+ last_input_timestamp_ms_ = -1;
+ last_output_timestamp_ms_ = -1;
+ output_timestamp_ = 0;
+ output_render_time_ms_ = 0;
+ input_frame_infos_.clear();
+ drop_next_input_frame_ = false;
+ use_surface_ = use_surface;
+ picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+ gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
+ tl0_pic_idx_ = static_cast<uint8_t>(rand());
+ gof_idx_ = 0;
+ last_frame_received_ms_ = -1;
+ frames_received_since_last_key_ = kMinKeyFrameInterval;
+
+ // We enforce no extra stride/padding in the format creation step.
+ jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
+ jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+ const bool encode_status = jni->CallBooleanMethod(
+ *j_media_codec_video_encoder_, j_init_encode_method_,
+ j_video_codec_enum, width, height, kbps, fps,
+ (use_surface ? egl_context_ : nullptr));
+ if (!encode_status) {
+ ALOGE << "Failed to configure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ CHECK_EXCEPTION(jni);
+
+ if (!use_surface) {
+ jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+ jni->CallObjectMethod(*j_media_codec_video_encoder_,
+ j_get_input_buffers_method_));
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, input_buffers)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ switch (GetIntField(jni, *j_media_codec_video_encoder_,
+ j_color_format_field_)) {
+ case COLOR_FormatYUV420Planar:
+ encoder_fourcc_ = libyuv::FOURCC_YU12;
+ break;
+ case COLOR_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FormatYUV420SemiPlanar:
+ case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+ encoder_fourcc_ = libyuv::FOURCC_NV12;
+ break;
+ default:
+ LOG(LS_ERROR) << "Wrong color format.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+ RTC_CHECK(input_buffers_.empty())
+ << "Unexpected double InitEncode without Release";
+ input_buffers_.resize(num_input_buffers);
+ for (size_t i = 0; i < num_input_buffers; ++i) {
+ input_buffers_[i] =
+ jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+ int64_t yuv_buffer_capacity =
+ jni->GetDirectBufferCapacity(input_buffers_[i]);
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+ }
+ }
+
+ inited_ = true;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
+ const webrtc::VideoFrame& frame,
+ const std::vector<webrtc::FrameType>* frame_types,
+ const int64_t frame_input_time_ms) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ bool send_key_frame = false;
+ if (codec_mode_ == webrtc::kRealtimeVideo) {
+ ++frames_received_since_last_key_;
+ int64_t now_ms = rtc::TimeMillis();
+ if (last_frame_received_ms_ != -1 &&
+ (now_ms - last_frame_received_ms_) > kFrameDiffThresholdMs) {
+ // Add limit to prevent triggering a key for every frame for very low
+ // framerates (e.g. if frame diff > kFrameDiffThresholdMs).
+ if (frames_received_since_last_key_ > kMinKeyFrameInterval) {
+ ALOGD << "Send key, frame diff: " << (now_ms - last_frame_received_ms_);
+ send_key_frame = true;
+ }
+ frames_received_since_last_key_ = 0;
+ }
+ last_frame_received_ms_ = now_ms;
+ }
+
+ frames_received_++;
+ if (!DeliverPendingOutputs(jni)) {
+ if (!ResetCodecOnCodecThread())
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ if (frames_encoded_ < kMaxEncodedLogFrames) {
+ ALOGD << "Encoder frame in # " << (frames_received_ - 1)
+ << ". TS: " << (int)(current_timestamp_us_ / 1000)
+ << ". Q: " << input_frame_infos_.size() << ". Fps: " << last_set_fps_
+ << ". Kbps: " << last_set_bitrate_kbps_;
+ }
+
+ if (drop_next_input_frame_) {
+ ALOGW << "Encoder drop frame - failed callback.";
+ drop_next_input_frame_ = false;
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ frames_dropped_media_encoder_++;
+ OnDroppedFrameOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ RTC_CHECK(frame_types->size() == 1) << "Unexpected stream count";
+
+ // Check if we accumulated too many frames in encoder input buffers and drop
+ // frame if so.
+ if (input_frame_infos_.size() > MAX_ENCODER_Q_SIZE) {
+ ALOGD << "Already " << input_frame_infos_.size()
+ << " frames in the queue, dropping"
+ << ". TS: " << (int)(current_timestamp_us_ / 1000)
+ << ". Fps: " << last_set_fps_
+ << ". Consecutive drops: " << consecutive_full_queue_frame_drops_;
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ consecutive_full_queue_frame_drops_++;
+ if (consecutive_full_queue_frame_drops_ >=
+ ENCODER_STALL_FRAMEDROP_THRESHOLD) {
+ ALOGE << "Encoder got stuck. Reset.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ frames_dropped_media_encoder_++;
+ OnDroppedFrameOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ consecutive_full_queue_frame_drops_ = 0;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> input_buffer(
+ frame.video_frame_buffer());
+ if (scale_) {
+ // Check framerate before spatial resolution change.
+ quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
+ const webrtc::QualityScaler::Resolution scaled_resolution =
+ quality_scaler_.GetScaledResolution();
+ if (scaled_resolution.width != frame.width() ||
+ scaled_resolution.height != frame.height()) {
+ if (input_buffer->native_handle() != nullptr) {
+ input_buffer = static_cast<AndroidTextureBuffer*>(input_buffer.get())
+ ->CropScaleAndRotate(frame.width(), frame.height(),
+ 0, 0,
+ scaled_resolution.width,
+ scaled_resolution.height,
+ webrtc::kVideoRotation_0);
+ } else {
+ input_buffer = quality_scaler_.GetScaledBuffer(input_buffer);
+ }
+ }
+ }
+
+ VideoFrame input_frame(input_buffer, frame.timestamp(),
+ frame.render_time_ms(), frame.rotation());
+
+ if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+ ALOGE << "Failed to reconfigure encoder.";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ const bool key_frame =
+ frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame;
+ bool encode_status = true;
+ if (!input_frame.video_frame_buffer()->native_handle()) {
+ int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+ j_dequeue_input_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (j_input_buffer_index == -1) {
+ // Video codec falls behind - no input buffer available.
+ ALOGW << "Encoder drop frame - no input buffers available";
+ if (frames_received_ > 1) {
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+ frames_dropped_media_encoder_++;
+ OnDroppedFrameOnCodecThread();
+ } else {
+ // Input buffers are not ready after codec initialization, HW is still
+ // allocating thme - this is expected and should not result in drop
+ // frame report.
+ frames_received_ = 0;
+ }
+ return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
+ } else if (j_input_buffer_index == -2) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+ j_input_buffer_index);
+ } else {
+ encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
+ }
+
+ if (!encode_status) {
+ ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // Save input image timestamps for later output.
+ input_frame_infos_.emplace_back(
+ frame_input_time_ms, input_frame.timestamp(),
+ input_frame.render_time_ms(), input_frame.rotation());
+
+ last_input_timestamp_ms_ =
+ current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
+
+ current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+ codec_thread_->Clear(this);
+ codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
+
+ if (!DeliverPendingOutputs(jni)) {
+ ALOGE << "Failed deliver pending outputs.";
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+ const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+ const bool is_texture_frame =
+ frame.video_frame_buffer()->native_handle() != nullptr;
+ const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
+ const bool reconfigure_due_to_size =
+ frame.width() != width_ || frame.height() != height_;
+
+ if (reconfigure_due_to_format) {
+ ALOGD << "Reconfigure encoder due to format change. "
+ << (use_surface_ ?
+ "Reconfiguring to encode from byte buffer." :
+ "Reconfiguring to encode from texture.");
+ LogStatistics(true);
+ }
+ if (reconfigure_due_to_size) {
+ ALOGW << "Reconfigure encoder due to frame resolution change from "
+ << width_ << " x " << height_ << " to " << frame.width() << " x "
+ << frame.height();
+ LogStatistics(true);
+ width_ = frame.width();
+ height_ = frame.height();
+ }
+
+ if (!reconfigure_due_to_format && !reconfigure_due_to_size)
+ return true;
+
+ ReleaseOnCodecThread();
+
+ return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
+ WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(!use_surface_);
+
+ jobject j_input_buffer = input_buffers_[input_buffer_index];
+ uint8_t* yuv_buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+ CHECK_EXCEPTION(jni);
+ RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+ RTC_CHECK(!libyuv::ConvertFromI420(
+ frame.video_frame_buffer()->DataY(),
+ frame.video_frame_buffer()->StrideY(),
+ frame.video_frame_buffer()->DataU(),
+ frame.video_frame_buffer()->StrideU(),
+ frame.video_frame_buffer()->DataV(),
+ frame.video_frame_buffer()->StrideV(),
+ yuv_buffer, width_, width_, height_, encoder_fourcc_))
+ << "ConvertFromI420 failed";
+
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_buffer_method_,
+ key_frame,
+ input_buffer_index,
+ yuv_size_,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
+}
+
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+ bool key_frame, const webrtc::VideoFrame& frame) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ RTC_CHECK(use_surface_);
+ NativeHandleImpl* handle = static_cast<NativeHandleImpl*>(
+ frame.video_frame_buffer()->native_handle());
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni);
+ bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_encode_texture_method_,
+ key_frame,
+ handle->oes_texture_id,
+ sampling_matrix,
+ current_timestamp_us_);
+ CHECK_EXCEPTION(jni);
+ return encode_status;
+}
+
+int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
+ webrtc::EncodedImageCallback* callback) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ if (!inited_) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
+ frames_received_ << ". Encoded: " << frames_encoded_ <<
+ ". Dropped: " << frames_dropped_media_encoder_;
+ ScopedLocalRefFrame local_ref_frame(jni);
+ for (size_t i = 0; i < input_buffers_.size(); ++i)
+ jni->DeleteGlobalRef(input_buffers_[i]);
+ input_buffers_.clear();
+ jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
+ CHECK_EXCEPTION(jni);
+ rtc::MessageQueueManager::Clear(this);
+ inited_ = false;
+ use_surface_ = false;
+ ALOGD << "EncoderReleaseOnCodecThread done.";
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
+ uint32_t frame_rate) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ frame_rate = (frame_rate < MAX_ALLOWED_VIDEO_FPS) ?
+ frame_rate : MAX_ALLOWED_VIDEO_FPS;
+ if (last_set_bitrate_kbps_ == new_bit_rate &&
+ last_set_fps_ == frame_rate) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+ if (scale_) {
+ quality_scaler_.ReportFramerate(frame_rate);
+ }
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ if (new_bit_rate > 0) {
+ last_set_bitrate_kbps_ = new_bit_rate;
+ }
+ if (frame_rate > 0) {
+ last_set_fps_ = frame_rate;
+ }
+ bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_set_rates_method_,
+ last_set_bitrate_kbps_,
+ last_set_fps_);
+ CHECK_EXCEPTION(jni);
+ if (!ret) {
+ ResetCodecOnCodecThread();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
+}
+
+jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
+}
+
+bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
+}
+
+jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
+ JNIEnv* jni,
+ jobject j_output_buffer_info) {
+ return GetLongField(
+ jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
+}
+
+bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+ while (true) {
+ jobject j_output_buffer_info = jni->CallObjectMethod(
+ *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
+ CHECK_EXCEPTION(jni);
+ if (IsNull(jni, j_output_buffer_info)) {
+ break;
+ }
+
+ int output_buffer_index =
+ GetOutputBufferInfoIndex(jni, j_output_buffer_info);
+ if (output_buffer_index == -1) {
+ ResetCodecOnCodecThread();
+ return false;
+ }
+
+ // Get key and config frame flags.
+ jobject j_output_buffer =
+ GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
+ bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+ // Get frame timestamps from a queue - for non config frames only.
+ int64_t encoding_start_time_ms = 0;
+ int64_t frame_encoding_time_ms = 0;
+ last_output_timestamp_ms_ =
+ GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+ rtc::kNumMicrosecsPerMillisec;
+ if (!input_frame_infos_.empty()) {
+ const InputFrameInfo& frame_info = input_frame_infos_.front();
+ output_timestamp_ = frame_info.frame_timestamp;
+ output_render_time_ms_ = frame_info.frame_render_time_ms;
+ output_rotation_ = frame_info.rotation;
+ encoding_start_time_ms = frame_info.encode_start_time;
+ input_frame_infos_.pop_front();
+ }
+
+ // Extract payload.
+ size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
+ uint8_t* payload = reinterpret_cast<uint8_t*>(
+ jni->GetDirectBufferAddress(j_output_buffer));
+ CHECK_EXCEPTION(jni);
+
+ // Callback - return encoded frame.
+ int32_t callback_status = 0;
+ if (callback_) {
+ std::unique_ptr<webrtc::EncodedImage> image(
+ new webrtc::EncodedImage(payload, payload_size, payload_size));
+ image->_encodedWidth = width_;
+ image->_encodedHeight = height_;
+ image->_timeStamp = output_timestamp_;
+ image->capture_time_ms_ = output_render_time_ms_;
+ image->rotation_ = output_rotation_;
+ image->_frameType =
+ (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
+ image->_completeFrame = true;
+ image->adapt_reason_.quality_resolution_downscales =
+ scale_ ? quality_scaler_.downscale_shift() : -1;
+
+ webrtc::CodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.codecType = codecType_;
+ if (codecType_ == kVideoCodecVP8) {
+ info.codecSpecific.VP8.pictureId = picture_id_;
+ info.codecSpecific.VP8.nonReference = false;
+ info.codecSpecific.VP8.simulcastIdx = 0;
+ info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP8.layerSync = false;
+ info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+ info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+ } else if (codecType_ == kVideoCodecVP9) {
+ if (key_frame) {
+ gof_idx_ = 0;
+ }
+ info.codecSpecific.VP9.picture_id = picture_id_;
+ info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
+ info.codecSpecific.VP9.flexible_mode = false;
+ info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
+ info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
+ info.codecSpecific.VP9.temporal_idx = webrtc::kNoTemporalIdx;
+ info.codecSpecific.VP9.spatial_idx = webrtc::kNoSpatialIdx;
+ info.codecSpecific.VP9.temporal_up_switch = true;
+ info.codecSpecific.VP9.inter_layer_predicted = false;
+ info.codecSpecific.VP9.gof_idx =
+ static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
+ info.codecSpecific.VP9.num_spatial_layers = 1;
+ info.codecSpecific.VP9.spatial_layer_resolution_present = false;
+ if (info.codecSpecific.VP9.ss_data_available) {
+ info.codecSpecific.VP9.spatial_layer_resolution_present = true;
+ info.codecSpecific.VP9.width[0] = width_;
+ info.codecSpecific.VP9.height[0] = height_;
+ info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
+ }
+ }
+ picture_id_ = (picture_id_ + 1) & 0x7FFF;
+
+ // Generate a header describing a single fragment.
+ webrtc::RTPFragmentationHeader header;
+ memset(&header, 0, sizeof(header));
+ if (codecType_ == kVideoCodecVP8 || codecType_ == kVideoCodecVP9) {
+ header.VerifyAndAllocateFragmentationHeader(1);
+ header.fragmentationOffset[0] = 0;
+ header.fragmentationLength[0] = image->_length;
+ header.fragmentationPlType[0] = 0;
+ header.fragmentationTimeDiff[0] = 0;
+ if (codecType_ == kVideoCodecVP8 && scale_) {
+ int qp;
+ if (webrtc::vp8::GetQp(payload, payload_size, &qp)) {
+ current_acc_qp_ += qp;
+ quality_scaler_.ReportQP(qp);
+ image->qp_ = qp;
+ }
+ }
+ } else if (codecType_ == kVideoCodecH264) {
+ if (scale_) {
+ h264_bitstream_parser_.ParseBitstream(payload, payload_size);
+ int qp;
+ if (h264_bitstream_parser_.GetLastSliceQp(&qp)) {
+ current_acc_qp_ += qp;
+ quality_scaler_.ReportQP(qp);
+ }
+ }
+ // For H.264 search for start codes.
+ int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+ int32_t scPositionsLength = 0;
+ int32_t scPosition = 0;
+ while (scPositionsLength < MAX_NALUS_PERFRAME) {
+ int32_t naluPosition = NextNaluPosition(
+ payload + scPosition, payload_size - scPosition);
+ if (naluPosition < 0) {
+ break;
+ }
+ scPosition += naluPosition;
+ scPositions[scPositionsLength++] = scPosition;
+ scPosition += H264_SC_LENGTH;
+ }
+ if (scPositionsLength == 0) {
+ ALOGE << "Start code is not found!";
+ ALOGE << "Data:" << image->_buffer[0] << " " << image->_buffer[1]
+ << " " << image->_buffer[2] << " " << image->_buffer[3]
+ << " " << image->_buffer[4] << " " << image->_buffer[5];
+ ResetCodecOnCodecThread();
+ return false;
+ }
+ scPositions[scPositionsLength] = payload_size;
+ header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+ for (size_t i = 0; i < scPositionsLength; i++) {
+ header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+ header.fragmentationLength[i] =
+ scPositions[i + 1] - header.fragmentationOffset[i];
+ header.fragmentationPlType[i] = 0;
+ header.fragmentationTimeDiff[i] = 0;
+ }
+ }
+
+ callback_status = callback_->Encoded(*image, &info, &header);
+ }
+
+ // Return output buffer back to the encoder.
+ bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+ j_release_output_buffer_method_,
+ output_buffer_index);
+ CHECK_EXCEPTION(jni);
+ if (!success) {
+ ResetCodecOnCodecThread();
+ return false;
+ }
+
+ // Print per frame statistics.
+ if (encoding_start_time_ms > 0) {
+ frame_encoding_time_ms = rtc::TimeMillis() - encoding_start_time_ms;
+ }
+ if (frames_encoded_ < kMaxEncodedLogFrames) {
+ int current_latency =
+ (int)(last_input_timestamp_ms_ - last_output_timestamp_ms_);
+ ALOGD << "Encoder frame out # " << frames_encoded_ <<
+ ". Key: " << key_frame <<
+ ". Size: " << payload_size <<
+ ". TS: " << (int)last_output_timestamp_ms_ <<
+ ". Latency: " << current_latency <<
+ ". EncTime: " << frame_encoding_time_ms;
+ }
+
+ // Calculate and print encoding statistics - every 3 seconds.
+ frames_encoded_++;
+ current_frames_++;
+ current_bytes_ += payload_size;
+ current_encoding_time_ms_ += frame_encoding_time_ms;
+ LogStatistics(false);
+
+ if (callback_status > 0) {
+ drop_next_input_frame_ = true;
+ // Theoretically could handle callback_status<0 here, but unclear what
+ // that would mean for us.
+ }
+ }
+ return true;
+}
+
+void MediaCodecVideoEncoder::LogStatistics(bool force_log) {
+ int statistic_time_ms = rtc::TimeMillis() - stat_start_time_ms_;
+ if ((statistic_time_ms >= kMediaCodecStatisticsIntervalMs || force_log)
+ && statistic_time_ms > 0) {
+ // Prevent division by zero.
+ int current_frames_divider = current_frames_ != 0 ? current_frames_ : 1;
+
+ int current_bitrate = current_bytes_ * 8 / statistic_time_ms;
+ int current_fps =
+ (current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms;
+ ALOGD << "Encoded frames: " << frames_encoded_ <<
+ ". Bitrate: " << current_bitrate <<
+ ", target: " << last_set_bitrate_kbps_ << " kbps" <<
+ ", fps: " << current_fps <<
+ ", encTime: " << (current_encoding_time_ms_ / current_frames_divider) <<
+ ". QP: " << (current_acc_qp_ / current_frames_divider) <<
+ " for last " << statistic_time_ms << " ms.";
+ stat_start_time_ms_ = rtc::TimeMillis();
+ current_frames_ = 0;
+ current_bytes_ = 0;
+ current_acc_qp_ = 0;
+ current_encoding_time_ms_ = 0;
+ }
+}
+
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+ uint8_t *buffer, size_t buffer_size) {
+ if (buffer_size < H264_SC_LENGTH) {
+ return -1;
+ }
+ uint8_t *head = buffer;
+ // Set end buffer pointer to 4 bytes before actual buffer end so we can
+ // access head[1], head[2] and head[3] in a loop without buffer overrun.
+ uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+ while (head < end) {
+ if (head[0]) {
+ head++;
+ continue;
+ }
+ if (head[1]) { // got 00xx
+ head += 2;
+ continue;
+ }
+ if (head[2]) { // got 0000xx
+ head += 3;
+ continue;
+ }
+ if (head[3] != 0x01) { // got 000000xx
+ head++; // xx != 1, continue searching.
+ continue;
+ }
+ return (int32_t)(head - buffer);
+ }
+ return -1;
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrame() {
+ // Methods running on the codec thread should call OnDroppedFrameOnCodecThread
+ // directly.
+ RTC_DCHECK(!codec_thread_checker_.CalledOnValidThread());
+ codec_thread_->Invoke<void>(
+ RTC_FROM_HERE,
+ Bind(&MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread, this));
+}
+
+void MediaCodecVideoEncoder::OnDroppedFrameOnCodecThread() {
+ RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+ // Report dropped frame to quality_scaler_.
+ if (scale_)
+ quality_scaler_.ReportDroppedFrame();
+}
+
+const char* MediaCodecVideoEncoder::ImplementationName() const {
+ return "MediaCodec";
+}
+
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
+ : egl_context_(nullptr) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ supported_codecs_.clear();
+
+ bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp8_hw_supported) {
+ ALOGD << "VP8 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
+ bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_vp9_hw_supported) {
+ ALOGD << "VP9 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecVP9, "VP9",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+
+ bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+ j_encoder_class,
+ GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+ CHECK_EXCEPTION(jni);
+ if (is_h264_hw_supported) {
+ ALOGD << "H.264 HW Encoder supported.";
+ supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+ MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+ }
+}
+
+MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {
+ ALOGD << "MediaCodecVideoEncoderFactory dtor";
+ if (egl_context_) {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->DeleteGlobalRef(egl_context_);
+ }
+}
+
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+ JNIEnv* jni, jobject egl_context) {
+ ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+ if (egl_context_) {
+ jni->DeleteGlobalRef(egl_context_);
+ egl_context_ = nullptr;
+ }
+ egl_context_ = jni->NewGlobalRef(egl_context);
+ if (CheckException(jni)) {
+ ALOGE << "error calling NewGlobalRef for EGL Context.";
+ }
+}
+
+webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
+ VideoCodecType type) {
+ if (supported_codecs_.empty()) {
+ ALOGW << "No HW video encoder for type " << (int)type;
+ return nullptr;
+ }
+ for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+ it != supported_codecs_.end(); ++it) {
+ if (it->type == type) {
+ ALOGD << "Create HW video encoder for type " << (int)type <<
+ " (" << it->name << ").";
+ return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+ egl_context_);
+ }
+ }
+ ALOGW << "Can not find HW video encoder for type " << (int)type;
+ return nullptr;
+}
+
+const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
+MediaCodecVideoEncoderFactory::codecs() const {
+ return supported_codecs_;
+}
+
+void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
+ webrtc::VideoEncoder* encoder) {
+ ALOGD << "Destroy video encoder.";
+ delete encoder;
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidmediaencoder_jni.h b/webrtc/api/android/jni/androidmediaencoder_jni.h
new file mode 100644
index 0000000..460eac3
--- /dev/null
+++ b/webrtc/api/android/jni/androidmediaencoder_jni.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
+
+#include <vector>
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
+
+namespace webrtc_jni {
+
+// Implementation of Android MediaCodec based encoder factory.
+class MediaCodecVideoEncoderFactory
+ : public cricket::WebRtcVideoEncoderFactory {
+ public:
+ MediaCodecVideoEncoderFactory();
+ virtual ~MediaCodecVideoEncoderFactory();
+
+ void SetEGLContext(JNIEnv* jni, jobject egl_context);
+
+ // WebRtcVideoEncoderFactory implementation.
+ webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
+ override;
+ const std::vector<VideoCodec>& codecs() const override;
+ void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
+
+ private:
+ jobject egl_context_;
+
+ // Empty if platform support is lacking, const after ctor returns.
+ std::vector<VideoCodec> supported_codecs_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
diff --git a/webrtc/api/android/jni/androidmetrics_jni.cc b/webrtc/api/android/jni/androidmetrics_jni.cc
new file mode 100644
index 0000000..6e658c8
--- /dev/null
+++ b/webrtc/api/android/jni/androidmetrics_jni.cc
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <map>
+#include <memory>
+
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/system_wrappers/include/metrics_default.h"
+
+// Enables collection of native histograms.
+namespace webrtc_jni {
+JOW(void, Metrics_nativeEnable)(JNIEnv* jni, jclass) {
+ webrtc::metrics::Enable();
+}
+
+// Gets and clears native histograms.
+JOW(jobject, Metrics_nativeGetAndReset)(JNIEnv* jni, jclass) {
+ jclass j_metrics_class = FindClass(jni, "org/webrtc/Metrics");
+ jmethodID j_add =
+ GetMethodID(jni, j_metrics_class, "add",
+ "(Ljava/lang/String;Lorg/webrtc/Metrics$HistogramInfo;)V");
+ jclass j_info_class = FindClass(jni, "org/webrtc/Metrics$HistogramInfo");
+ jmethodID j_add_sample = GetMethodID(jni, j_info_class, "addSample", "(II)V");
+
+ // Create |Metrics|.
+ jobject j_metrics = jni->NewObject(
+ j_metrics_class, GetMethodID(jni, j_metrics_class, "<init>", "()V"));
+
+ std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
+ histograms;
+ webrtc::metrics::GetAndReset(&histograms);
+ for (const auto& kv : histograms) {
+ // Create and add samples to |HistogramInfo|.
+ jobject j_info = jni->NewObject(
+ j_info_class, GetMethodID(jni, j_info_class, "<init>", "(III)V"),
+ kv.second->min, kv.second->max,
+ static_cast<int>(kv.second->bucket_count));
+ for (const auto& sample : kv.second->samples) {
+ jni->CallVoidMethod(j_info, j_add_sample, sample.first, sample.second);
+ }
+ // Add |HistogramInfo| to |Metrics|.
+ jstring j_name = jni->NewStringUTF(kv.first.c_str());
+ jni->CallVoidMethod(j_metrics, j_add, j_name, j_info);
+ jni->DeleteLocalRef(j_name);
+ jni->DeleteLocalRef(j_info);
+ }
+ CHECK_EXCEPTION(jni);
+ return j_metrics;
+}
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidnetworkmonitor_jni.cc b/webrtc/api/android/jni/androidnetworkmonitor_jni.cc
new file mode 100644
index 0000000..403badc
--- /dev/null
+++ b/webrtc/api/android/jni/androidnetworkmonitor_jni.cc
@@ -0,0 +1,379 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/android/jni/androidnetworkmonitor_jni.h"
+
+#include <dlfcn.h>
+
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/common.h"
+#include "webrtc/base/ipaddress.h"
+
+namespace webrtc_jni {
+
+jobject AndroidNetworkMonitor::application_context_ = nullptr;
+
+static NetworkType GetNetworkTypeFromJava(JNIEnv* jni, jobject j_network_type) {
+ std::string enum_name =
+ GetJavaEnumName(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType",
+ j_network_type);
+ if (enum_name == "CONNECTION_UNKNOWN") {
+ return NetworkType::NETWORK_UNKNOWN;
+ }
+ if (enum_name == "CONNECTION_ETHERNET") {
+ return NetworkType::NETWORK_ETHERNET;
+ }
+ if (enum_name == "CONNECTION_WIFI") {
+ return NetworkType::NETWORK_WIFI;
+ }
+ if (enum_name == "CONNECTION_4G") {
+ return NetworkType::NETWORK_4G;
+ }
+ if (enum_name == "CONNECTION_3G") {
+ return NetworkType::NETWORK_3G;
+ }
+ if (enum_name == "CONNECTION_2G") {
+ return NetworkType::NETWORK_2G;
+ }
+ if (enum_name == "CONNECTION_UNKNOWN_CELLULAR") {
+ return NetworkType::NETWORK_UNKNOWN_CELLULAR;
+ }
+ if (enum_name == "CONNECTION_BLUETOOTH") {
+ return NetworkType::NETWORK_BLUETOOTH;
+ }
+ if (enum_name == "CONNECTION_NONE") {
+ return NetworkType::NETWORK_NONE;
+ }
+ ASSERT(false);
+ return NetworkType::NETWORK_UNKNOWN;
+}
+
+static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
+ switch (network_type) {
+ case NETWORK_UNKNOWN:
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ case NETWORK_ETHERNET:
+ return rtc::ADAPTER_TYPE_ETHERNET;
+ case NETWORK_WIFI:
+ return rtc::ADAPTER_TYPE_WIFI;
+ case NETWORK_4G:
+ case NETWORK_3G:
+ case NETWORK_2G:
+ case NETWORK_UNKNOWN_CELLULAR:
+ return rtc::ADAPTER_TYPE_CELLULAR;
+ case NETWORK_BLUETOOTH:
+ // There is no corresponding mapping for bluetooth networks.
+ // Map it to VPN for now.
+ return rtc::ADAPTER_TYPE_VPN;
+ default:
+ RTC_DCHECK(false) << "Invalid network type " << network_type;
+ return rtc::ADAPTER_TYPE_UNKNOWN;
+ }
+}
+
+static rtc::IPAddress GetIPAddressFromJava(JNIEnv* jni, jobject j_ip_address) {
+ jclass j_ip_address_class = GetObjectClass(jni, j_ip_address);
+ jfieldID j_address_id = GetFieldID(jni, j_ip_address_class, "address", "[B");
+ jbyteArray j_addresses =
+ static_cast<jbyteArray>(GetObjectField(jni, j_ip_address, j_address_id));
+ size_t address_length = jni->GetArrayLength(j_addresses);
+ jbyte* addr_array = jni->GetByteArrayElements(j_addresses, nullptr);
+ CHECK_EXCEPTION(jni) << "Error during GetIPAddressFromJava";
+ if (address_length == 4) {
+ // IP4
+ struct in_addr ip4_addr;
+ memcpy(&ip4_addr.s_addr, addr_array, 4);
+ jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+ return rtc::IPAddress(ip4_addr);
+ }
+ // IP6
+ RTC_CHECK(address_length == 16);
+ struct in6_addr ip6_addr;
+ memcpy(ip6_addr.s6_addr, addr_array, address_length);
+ jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
+ return rtc::IPAddress(ip6_addr);
+}
+
+static void GetIPAddressesFromJava(JNIEnv* jni,
+ jobjectArray j_ip_addresses,
+ std::vector<rtc::IPAddress>* ip_addresses) {
+ ip_addresses->clear();
+ size_t num_addresses = jni->GetArrayLength(j_ip_addresses);
+ CHECK_EXCEPTION(jni) << "Error during GetArrayLength";
+ for (size_t i = 0; i < num_addresses; ++i) {
+ jobject j_ip_address = jni->GetObjectArrayElement(j_ip_addresses, i);
+ CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+ rtc::IPAddress ip = GetIPAddressFromJava(jni, j_ip_address);
+ ip_addresses->push_back(ip);
+ }
+}
+
+static NetworkInformation GetNetworkInformationFromJava(
+ JNIEnv* jni,
+ jobject j_network_info) {
+ jclass j_network_info_class = GetObjectClass(jni, j_network_info);
+ jfieldID j_interface_name_id =
+ GetFieldID(jni, j_network_info_class, "name", "Ljava/lang/String;");
+ jfieldID j_handle_id = GetFieldID(jni, j_network_info_class, "handle", "I");
+ jfieldID j_type_id =
+ GetFieldID(jni, j_network_info_class, "type",
+ "Lorg/webrtc/NetworkMonitorAutoDetect$ConnectionType;");
+ jfieldID j_ip_addresses_id =
+ GetFieldID(jni, j_network_info_class, "ipAddresses",
+ "[Lorg/webrtc/NetworkMonitorAutoDetect$IPAddress;");
+
+ NetworkInformation network_info;
+ network_info.interface_name = JavaToStdString(
+ jni, GetStringField(jni, j_network_info, j_interface_name_id));
+ network_info.handle =
+ static_cast<NetworkHandle>(GetIntField(jni, j_network_info, j_handle_id));
+ network_info.type = GetNetworkTypeFromJava(
+ jni, GetObjectField(jni, j_network_info, j_type_id));
+ jobjectArray j_ip_addresses = static_cast<jobjectArray>(
+ GetObjectField(jni, j_network_info, j_ip_addresses_id));
+ GetIPAddressesFromJava(jni, j_ip_addresses, &network_info.ip_addresses);
+ return network_info;
+}
+
+std::string NetworkInformation::ToString() const {
+ std::stringstream ss;
+ ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
+ << type << "; address";
+ for (const rtc::IPAddress address : ip_addresses) {
+ ss << " " << address.ToString();
+ }
+ ss << "]";
+ return ss.str();
+}
+
+// static
+void AndroidNetworkMonitor::SetAndroidContext(JNIEnv* jni, jobject context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, context);
+}
+
+AndroidNetworkMonitor::AndroidNetworkMonitor()
+ : j_network_monitor_class_(jni(),
+ FindClass(jni(), "org/webrtc/NetworkMonitor")),
+ j_network_monitor_(
+ jni(),
+ jni()->CallStaticObjectMethod(
+ *j_network_monitor_class_,
+ GetStaticMethodID(
+ jni(),
+ *j_network_monitor_class_,
+ "init",
+ "(Landroid/content/Context;)Lorg/webrtc/NetworkMonitor;"),
+ application_context_)) {
+ ASSERT(application_context_ != nullptr);
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
+}
+
+void AndroidNetworkMonitor::Start() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ if (started_) {
+ return;
+ }
+ started_ = true;
+
+ // This is kind of magic behavior, but doing this allows the SocketServer to
+ // use this as a NetworkBinder to bind sockets on a particular network when
+ // it creates sockets.
+ worker_thread()->socketserver()->set_network_binder(this);
+
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
+}
+
+void AndroidNetworkMonitor::Stop() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ if (!started_) {
+ return;
+ }
+ started_ = false;
+
+ // Once the network monitor stops, it will clear all network information and
+ // it won't find the network handle to bind anyway.
+ if (worker_thread()->socketserver()->network_binder() == this) {
+ worker_thread()->socketserver()->set_network_binder(nullptr);
+ }
+
+ jmethodID m =
+ GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
+ jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
+
+ network_handle_by_address_.clear();
+ network_info_by_handle_.clear();
+}
+
+int AndroidNetworkMonitor::BindSocketToNetwork(int socket_fd,
+ const rtc::IPAddress& address) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ // Android prior to Lollipop didn't have support for binding sockets to
+ // networks. However, in that case it should not have reached here because
+ // |network_handle_by_address_| should only be populated in Android Lollipop
+ // and above.
+ // TODO(honghaiz): Add a check for Android version here so that it won't try
+ // to look for handle if the Android version is before Lollipop.
+ auto iter = network_handle_by_address_.find(address);
+ if (iter == network_handle_by_address_.end()) {
+ return rtc::NETWORK_BIND_ADDRESS_NOT_FOUND;
+ }
+ NetworkHandle network_handle = iter->second;
+
+ // NOTE: This does rely on Android implementation details, but
+ // these details are unlikely to change.
+ typedef int (*SetNetworkForSocket)(unsigned netId, int socketFd);
+ static SetNetworkForSocket setNetworkForSocket;
+ // This is not threadsafe, but we are running this only on the worker thread.
+ if (setNetworkForSocket == nullptr) {
+ // Android's netd client library should always be loaded in our address
+ // space as it shims libc functions like connect().
+ const std::string net_library_path = "libnetd_client.so";
+ void* lib = dlopen(net_library_path.c_str(), RTLD_LAZY);
+ if (lib == nullptr) {
+ LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
+ return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+ }
+ setNetworkForSocket = reinterpret_cast<SetNetworkForSocket>(
+ dlsym(lib, "setNetworkForSocket"));
+ }
+ if (setNetworkForSocket == nullptr) {
+ LOG(LS_ERROR) << "Symbol setNetworkForSocket not found ";
+ return rtc::NETWORK_BIND_NOT_IMPLEMENTED;
+ }
+ int rv = setNetworkForSocket(network_handle, socket_fd);
+ // If |network| has since disconnected, |rv| will be ENONET. Surface this as
+ // ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
+ // the less descriptive ERR_FAILED.
+ if (rv == 0) {
+ return rtc::NETWORK_BIND_SUCCESS;
+ }
+ if (rv == ENONET) {
+ return rtc::NETWORK_BIND_NETWORK_CHANGED;
+ }
+ return rtc::NETWORK_BIND_FAILURE;
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected(
+ const NetworkInformation& network_info) {
+ worker_thread()->Invoke<void>(
+ RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_w,
+ this, network_info));
+ // Fire SignalNetworksChanged to update the list of networks.
+ OnNetworksChanged();
+}
+
+void AndroidNetworkMonitor::OnNetworkConnected_w(
+ const NetworkInformation& network_info) {
+ LOG(LS_INFO) << "Network connected: " << network_info.ToString();
+ adapter_type_by_name_[network_info.interface_name] =
+ AdapterTypeFromNetworkType(network_info.type);
+ network_info_by_handle_[network_info.handle] = network_info;
+ for (const rtc::IPAddress& address : network_info.ip_addresses) {
+ network_handle_by_address_[address] = network_info.handle;
+ }
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) {
+ LOG(LS_INFO) << "Network disconnected for handle " << handle;
+ worker_thread()->Invoke<void>(
+ RTC_FROM_HERE,
+ rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle));
+}
+
+void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) {
+ auto iter = network_info_by_handle_.find(handle);
+ if (iter != network_info_by_handle_.end()) {
+ for (const rtc::IPAddress& address : iter->second.ip_addresses) {
+ network_handle_by_address_.erase(address);
+ }
+ network_info_by_handle_.erase(iter);
+ }
+}
+
+void AndroidNetworkMonitor::SetNetworkInfos(
+ const std::vector<NetworkInformation>& network_infos) {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ network_handle_by_address_.clear();
+ network_info_by_handle_.clear();
+ LOG(LS_INFO) << "Android network monitor found " << network_infos.size()
+ << " networks";
+ for (NetworkInformation network : network_infos) {
+ OnNetworkConnected_w(network);
+ }
+}
+
+rtc::AdapterType AndroidNetworkMonitor::GetAdapterType(
+ const std::string& if_name) {
+ auto iter = adapter_type_by_name_.find(if_name);
+ rtc::AdapterType type = (iter == adapter_type_by_name_.end())
+ ? rtc::ADAPTER_TYPE_UNKNOWN
+ : iter->second;
+ if (type == rtc::ADAPTER_TYPE_UNKNOWN) {
+ LOG(LS_WARNING) << "Get an unknown type for the interface " << if_name;
+ }
+ return type;
+}
+
+rtc::NetworkMonitorInterface*
+AndroidNetworkMonitorFactory::CreateNetworkMonitor() {
+ return new AndroidNetworkMonitor();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyConnectionTypeChanged)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor) {
+ rtc::NetworkMonitorInterface* network_monitor =
+ reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
+ network_monitor->OnNetworksChanged();
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfActiveNetworkList)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jobjectArray j_network_infos) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ std::vector<NetworkInformation> network_infos;
+ size_t num_networks = jni->GetArrayLength(j_network_infos);
+ for (size_t i = 0; i < num_networks; ++i) {
+ jobject j_network_info = jni->GetObjectArrayElement(j_network_infos, i);
+ CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
+ network_infos.push_back(GetNetworkInformationFromJava(jni, j_network_info));
+ }
+ network_monitor->SetNetworkInfos(network_infos);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkConnect)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jobject j_network_info) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ NetworkInformation network_info =
+ GetNetworkInformationFromJava(jni, j_network_info);
+ network_monitor->OnNetworkConnected(network_info);
+}
+
+JOW(void, NetworkMonitor_nativeNotifyOfNetworkDisconnect)(
+ JNIEnv* jni, jobject j_monitor, jlong j_native_monitor,
+ jint network_handle) {
+ AndroidNetworkMonitor* network_monitor =
+ reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
+ network_monitor->OnNetworkDisconnected(
+ static_cast<NetworkHandle>(network_handle));
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidnetworkmonitor_jni.h b/webrtc/api/android/jni/androidnetworkmonitor_jni.h
new file mode 100644
index 0000000..c8d7fcd
--- /dev/null
+++ b/webrtc/api/android/jni/androidnetworkmonitor_jni.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
+
+#include "webrtc/base/networkmonitor.h"
+
+#include <map>
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/basictypes.h"
+#include "webrtc/base/thread_checker.h"
+
+namespace webrtc_jni {
+
+typedef uint32_t NetworkHandle;
+
+// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType.
+enum NetworkType {
+ NETWORK_UNKNOWN,
+ NETWORK_ETHERNET,
+ NETWORK_WIFI,
+ NETWORK_4G,
+ NETWORK_3G,
+ NETWORK_2G,
+ NETWORK_UNKNOWN_CELLULAR,
+ NETWORK_BLUETOOTH,
+ NETWORK_NONE
+};
+
+// The information is collected from Android OS so that the native code can get
+// the network type and handle (Android network ID) for each interface.
+struct NetworkInformation {
+ std::string interface_name;
+ NetworkHandle handle;
+ NetworkType type;
+ std::vector<rtc::IPAddress> ip_addresses;
+
+ std::string ToString() const;
+};
+
+class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
+ public rtc::NetworkBinderInterface {
+ public:
+ AndroidNetworkMonitor();
+
+ static void SetAndroidContext(JNIEnv* jni, jobject context);
+
+ void Start() override;
+ void Stop() override;
+
+ int BindSocketToNetwork(int socket_fd,
+ const rtc::IPAddress& address) override;
+ rtc::AdapterType GetAdapterType(const std::string& if_name) override;
+ void OnNetworkConnected(const NetworkInformation& network_info);
+ void OnNetworkDisconnected(NetworkHandle network_handle);
+ void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
+
+ private:
+ JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
+
+ void OnNetworkConnected_w(const NetworkInformation& network_info);
+ void OnNetworkDisconnected_w(NetworkHandle network_handle);
+
+ ScopedGlobalRef<jclass> j_network_monitor_class_;
+ ScopedGlobalRef<jobject> j_network_monitor_;
+ rtc::ThreadChecker thread_checker_;
+ static jobject application_context_;
+ bool started_ = false;
+ std::map<std::string, rtc::AdapterType> adapter_type_by_name_;
+ std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
+ std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
+};
+
+class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
+ public:
+ AndroidNetworkMonitorFactory() {}
+
+ rtc::NetworkMonitorInterface* CreateNetworkMonitor() override;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDNETWORKMONITOR_JNI_H_
diff --git a/webrtc/api/android/jni/androidvideocapturer_jni.cc b/webrtc/api/android/jni/androidvideocapturer_jni.cc
new file mode 100644
index 0000000..150f1c4
--- /dev/null
+++ b/webrtc/api/android/jni/androidvideocapturer_jni.cc
@@ -0,0 +1,348 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/android/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
+#include "third_party/libyuv/include/libyuv/convert.h"
+#include "webrtc/base/bind.h"
+
+namespace webrtc_jni {
+
+jobject AndroidVideoCapturerJni::application_context_ = nullptr;
+
+// static
+int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
+ jobject appliction_context) {
+ if (application_context_) {
+ jni->DeleteGlobalRef(application_context_);
+ }
+ application_context_ = NewGlobalRef(jni, appliction_context);
+
+ return 0;
+}
+
+AndroidVideoCapturerJni::AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_egl_context)
+ : j_video_capturer_(jni, j_video_capturer),
+ j_video_capturer_class_(jni, FindClass(jni, "org/webrtc/VideoCapturer")),
+ j_observer_class_(
+ jni,
+ FindClass(jni,
+ "org/webrtc/VideoCapturer$NativeObserver")),
+ surface_texture_helper_(SurfaceTextureHelper::create(
+ jni, "Camera SurfaceTextureHelper", j_egl_context)),
+ capturer_(nullptr) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
+ thread_checker_.DetachFromThread();
+}
+
+AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni dtor";
+ jni()->CallVoidMethod(
+ *j_video_capturer_,
+ GetMethodID(jni(), *j_video_capturer_class_, "dispose", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.dispose()";
+}
+
+void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni start";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ rtc::CritScope cs(&capturer_lock_);
+ RTC_CHECK(capturer_ == nullptr);
+ RTC_CHECK(invoker_.get() == nullptr);
+ capturer_ = capturer;
+ invoker_.reset(new rtc::GuardedAsyncInvoker());
+ }
+ jobject j_frame_observer =
+ jni()->NewObject(*j_observer_class_,
+ GetMethodID(jni(), *j_observer_class_, "<init>", "(J)V"),
+ jlongFromPointer(this));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(
+ jni(), *j_video_capturer_class_, "startCapture",
+ "(IIILorg/webrtc/SurfaceTextureHelper;Landroid/content/Context;"
+ "Lorg/webrtc/VideoCapturer$CapturerObserver;)V");
+ jni()->CallVoidMethod(
+ *j_video_capturer_, m, width, height, framerate,
+ surface_texture_helper_
+ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
+ : nullptr,
+ application_context_, j_frame_observer);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.startCapture";
+}
+
+void AndroidVideoCapturerJni::Stop() {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop";
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ {
+ // TODO(nisse): Consider moving this block until *after* the call to
+ // stopCapturer. stopCapturer should ensure that we get no
+ // more frames, and then we shouldn't need the if (!capturer_)
+ // checks in OnMemoryBufferFrame and OnTextureFrame.
+ rtc::CritScope cs(&capturer_lock_);
+ // Destroying |invoker_| will cancel all pending calls to |capturer_|.
+ invoker_ = nullptr;
+ capturer_ = nullptr;
+ }
+ jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
+ "stopCapture", "()V");
+ jni()->CallVoidMethod(*j_video_capturer_, m);
+ CHECK_EXCEPTION(jni()) << "error during VideoCapturer.stopCapture";
+ LOG(LS_INFO) << "AndroidVideoCapturerJni stop done";
+}
+
+template <typename... Args>
+void AndroidVideoCapturerJni::AsyncCapturerInvoke(
+ const rtc::Location& posted_from,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args) {
+ rtc::CritScope cs(&capturer_lock_);
+ if (!invoker_) {
+ LOG(LS_WARNING) << posted_from.function_name()
+ << "() called for closed capturer.";
+ return;
+ }
+ invoker_->AsyncInvoke<void>(posted_from,
+ rtc::Bind(method, capturer_, args...));
+}
+
+std::vector<cricket::VideoFormat>
+AndroidVideoCapturerJni::GetSupportedFormats() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jobject j_list_of_formats = jni->CallObjectMethod(
+ *j_video_capturer_,
+ GetMethodID(jni, *j_video_capturer_class_, "getSupportedFormats",
+ "()Ljava/util/List;"));
+ CHECK_EXCEPTION(jni) << "error during getSupportedFormats";
+
+ // Extract Java List<CaptureFormat> to std::vector<cricket::VideoFormat>.
+ jclass j_list_class = jni->FindClass("java/util/List");
+ jclass j_format_class =
+ jni->FindClass("org/webrtc/CameraEnumerationAndroid$CaptureFormat");
+ jclass j_framerate_class = jni->FindClass(
+ "org/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange");
+ const int size = jni->CallIntMethod(
+ j_list_of_formats, GetMethodID(jni, j_list_class, "size", "()I"));
+ jmethodID j_get =
+ GetMethodID(jni, j_list_class, "get", "(I)Ljava/lang/Object;");
+ jfieldID j_framerate_field = GetFieldID(
+ jni, j_format_class, "framerate",
+ "Lorg/webrtc/CameraEnumerationAndroid$CaptureFormat$FramerateRange;");
+ jfieldID j_width_field = GetFieldID(jni, j_format_class, "width", "I");
+ jfieldID j_height_field = GetFieldID(jni, j_format_class, "height", "I");
+ jfieldID j_max_framerate_field =
+ GetFieldID(jni, j_framerate_class, "max", "I");
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.reserve(size);
+ for (int i = 0; i < size; ++i) {
+ jobject j_format = jni->CallObjectMethod(j_list_of_formats, j_get, i);
+ jobject j_framerate = GetObjectField(jni, j_format, j_framerate_field);
+ const int frame_interval = cricket::VideoFormat::FpsToInterval(
+ (GetIntField(jni, j_framerate, j_max_framerate_field) + 999) / 1000);
+ formats.emplace_back(GetIntField(jni, j_format, j_width_field),
+ GetIntField(jni, j_format, j_height_field),
+ frame_interval, cricket::FOURCC_NV21);
+ }
+ CHECK_EXCEPTION(jni) << "error while extracting formats";
+ return formats;
+}
+
+void AndroidVideoCapturerJni::OnCapturerStarted(bool success) {
+ LOG(LS_INFO) << "AndroidVideoCapturerJni capture started: " << success;
+ AsyncCapturerInvoke(
+ RTC_FROM_HERE, &webrtc::AndroidVideoCapturer::OnCapturerStarted, success);
+}
+
+void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
+ int length,
+ int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ rtc::CritScope cs(&capturer_lock_);
+ if (!capturer_) {
+ LOG(LS_WARNING) << "OnMemoryBufferFrame() called for closed capturer.";
+ return;
+ }
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ int64_t translated_camera_time_us;
+
+ if (!capturer_->AdaptFrame(width, height,
+ timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros(),
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y,
+ &translated_camera_time_us)) {
+ return;
+ }
+
+ int rotated_width = crop_width;
+ int rotated_height = crop_height;
+
+ if (capturer_->apply_rotation() && (rotation == 90 || rotation == 270)) {
+ std::swap(adapted_width, adapted_height);
+ std::swap(rotated_width, rotated_height);
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
+
+ const uint8_t* y_plane = static_cast<const uint8_t*>(video_frame);
+ const uint8_t* uv_plane = y_plane + width * height;
+
+ // Can only crop at even pixels.
+ crop_x &= ~1;
+ crop_y &= ~1;
+ int uv_width = (width + 1) / 2;
+
+ libyuv::NV12ToI420Rotate(
+ y_plane + width * crop_y + crop_x, width,
+ uv_plane + uv_width * crop_y + crop_x, width,
+ buffer->MutableDataY(), buffer->StrideY(),
+ // Swap U and V, since we have NV21, not NV12.
+ buffer->MutableDataV(), buffer->StrideV(),
+ buffer->MutableDataU(), buffer->StrideU(),
+ crop_width, crop_height, static_cast<libyuv::RotationMode>(
+ capturer_->apply_rotation() ? rotation : 0));
+
+ if (adapted_width != buffer->width() || adapted_height != buffer->height()) {
+ rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
+ post_scale_pool_.CreateBuffer(adapted_width, adapted_height));
+ scaled_buffer->ScaleFrom(buffer);
+ buffer = scaled_buffer;
+ }
+ capturer_->OnFrame(cricket::WebRtcVideoFrame(
+ buffer,
+ capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation),
+ translated_camera_time_us),
+ width, height);
+}
+
+void AndroidVideoCapturerJni::OnTextureFrame(int width,
+ int height,
+ int rotation,
+ int64_t timestamp_ns,
+ const NativeHandleImpl& handle) {
+ RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
+ rotation == 270);
+ rtc::CritScope cs(&capturer_lock_);
+ if (!capturer_) {
+ LOG(LS_WARNING) << "OnTextureFrame() called for closed capturer.";
+ surface_texture_helper_->ReturnTextureFrame();
+ return;
+ }
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+ int64_t translated_camera_time_us;
+
+ if (!capturer_->AdaptFrame(width, height,
+ timestamp_ns / rtc::kNumNanosecsPerMicrosec,
+ rtc::TimeMicros(),
+ &adapted_width, &adapted_height,
+ &crop_width, &crop_height, &crop_x, &crop_y,
+ &translated_camera_time_us)) {
+ surface_texture_helper_->ReturnTextureFrame();
+ return;
+ }
+
+ Matrix matrix = handle.sampling_matrix;
+
+ matrix.Crop(crop_width / static_cast<float>(width),
+ crop_height / static_cast<float>(height),
+ crop_x / static_cast<float>(width),
+ crop_y / static_cast<float>(height));
+
+ if (capturer_->apply_rotation()) {
+ if (rotation == webrtc::kVideoRotation_90 ||
+ rotation == webrtc::kVideoRotation_270) {
+ std::swap(adapted_width, adapted_height);
+ }
+ matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
+ }
+
+ capturer_->OnFrame(
+ cricket::WebRtcVideoFrame(
+ surface_texture_helper_->CreateTextureFrame(
+ adapted_width, adapted_height,
+ NativeHandleImpl(handle.oes_texture_id, matrix)),
+ capturer_->apply_rotation()
+ ? webrtc::kVideoRotation_0
+ : static_cast<webrtc::VideoRotation>(rotation),
+ translated_camera_time_us),
+ width, height);
+}
+
+void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
+ int height,
+ int fps) {
+ AsyncCapturerInvoke(RTC_FROM_HERE,
+ &webrtc::AndroidVideoCapturer::OnOutputFormatRequest,
+ width, height, fps);
+}
+
+JNIEnv* AndroidVideoCapturerJni::jni() { return AttachCurrentThreadIfNeeded(); }
+
+JOW(void,
+ VideoCapturer_00024NativeObserver_nativeOnByteBufferFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jbyteArray j_frame, jint length,
+ jint width, jint height, jint rotation, jlong timestamp) {
+ jboolean is_copy = true;
+ jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
+ jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeOnTextureFrameCaptured)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_oes_texture_id, jfloatArray j_transform_matrix,
+ jint j_rotation, jlong j_timestamp) {
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
+ ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
+ NativeHandleImpl(jni, j_oes_texture_id,
+ j_transform_matrix));
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeCapturerStarted)
+ (JNIEnv* jni, jclass, jlong j_capturer, jboolean j_success) {
+ LOG(LS_INFO) << "NativeObserver_nativeCapturerStarted";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnCapturerStarted(
+ j_success);
+}
+
+JOW(void, VideoCapturer_00024NativeObserver_nativeOnOutputFormatRequest)
+ (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
+ jint j_fps) {
+ LOG(LS_INFO) << "NativeObserver_nativeOnOutputFormatRequest";
+ reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)->OnOutputFormatRequest(
+ j_width, j_height, j_fps);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/androidvideocapturer_jni.h b/webrtc/api/android/jni/androidvideocapturer_jni.h
new file mode 100644
index 0000000..3d8db6e
--- /dev/null
+++ b/webrtc/api/android/jni/androidvideocapturer_jni.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
+
+#include <memory>
+#include <string>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/asyncinvoker.h"
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/criticalsection.h"
+#include "webrtc/base/thread_checker.h"
+#include "webrtc/common_video/include/i420_buffer_pool.h"
+
+namespace webrtc_jni {
+
+struct NativeHandleImpl;
+class SurfaceTextureHelper;
+
+// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
+// The purpose of the delegate is to hide the JNI specifics from the C++ only
+// AndroidVideoCapturer.
+class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
+ public:
+ static int SetAndroidObjects(JNIEnv* jni, jobject appliction_context);
+
+ AndroidVideoCapturerJni(JNIEnv* jni,
+ jobject j_video_capturer,
+ jobject j_egl_context);
+
+ void Start(int width, int height, int framerate,
+ webrtc::AndroidVideoCapturer* capturer) override;
+ void Stop() override;
+
+ std::vector<cricket::VideoFormat> GetSupportedFormats() override;
+
+ // Called from VideoCapturer::NativeObserver on a Java thread.
+ void OnCapturerStarted(bool success);
+ void OnMemoryBufferFrame(void* video_frame, int length, int width,
+ int height, int rotation, int64_t timestamp_ns);
+ void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
+ const NativeHandleImpl& handle);
+ void OnOutputFormatRequest(int width, int height, int fps);
+
+ protected:
+ ~AndroidVideoCapturerJni();
+
+ private:
+ JNIEnv* jni();
+
+ // To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
+ template <typename T>
+ struct Identity {
+ typedef T type;
+ };
+
+ // Helper function to make safe asynchronous calls to |capturer_|. The calls
+ // are not guaranteed to be delivered.
+ template <typename... Args>
+ void AsyncCapturerInvoke(
+ const rtc::Location& posted_from,
+ void (webrtc::AndroidVideoCapturer::*method)(Args...),
+ typename Identity<Args>::type... args);
+
+ const ScopedGlobalRef<jobject> j_video_capturer_;
+ const ScopedGlobalRef<jclass> j_video_capturer_class_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+
+ // Used on the Java thread running the camera.
+ webrtc::I420BufferPool pre_scale_pool_;
+ webrtc::I420BufferPool post_scale_pool_;
+ rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
+ rtc::ThreadChecker thread_checker_;
+
+ // |capturer| is a guaranteed to be a valid pointer between a call to
+ // AndroidVideoCapturerDelegate::Start
+ // until AndroidVideoCapturerDelegate::Stop.
+ rtc::CriticalSection capturer_lock_;
+ webrtc::AndroidVideoCapturer* capturer_ GUARDED_BY(capturer_lock_);
+ // |invoker_| is used to communicate with |capturer_| on the thread Start() is
+ // called on.
+ std::unique_ptr<rtc::GuardedAsyncInvoker> invoker_ GUARDED_BY(capturer_lock_);
+
+ static jobject application_context_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(AndroidVideoCapturerJni);
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_ANDROIDVIDEOCAPTURER_JNI_H_
diff --git a/webrtc/api/android/jni/classreferenceholder.cc b/webrtc/api/android/jni/classreferenceholder.cc
new file mode 100644
index 0000000..be5cde2
--- /dev/null
+++ b/webrtc/api/android/jni/classreferenceholder.cc
@@ -0,0 +1,137 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/api/android/jni/classreferenceholder.h"
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+
+namespace webrtc_jni {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+ explicit ClassReferenceHolder(JNIEnv* jni);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+ jclass GetClass(const std::string& name);
+
+ private:
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+ RTC_CHECK(g_class_reference_holder == nullptr);
+ g_class_reference_holder = new ClassReferenceHolder(GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+ g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
+ delete g_class_reference_holder;
+ g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+ LoadClass(jni, "android/graphics/SurfaceTexture");
+ LoadClass(jni, "java/nio/ByteBuffer");
+ LoadClass(jni, "java/util/ArrayList");
+ LoadClass(jni, "org/webrtc/AudioTrack");
+ LoadClass(jni, "org/webrtc/Camera1Enumerator");
+ LoadClass(jni, "org/webrtc/Camera2Enumerator");
+ LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
+ LoadClass(jni, "org/webrtc/DataChannel");
+ LoadClass(jni, "org/webrtc/DataChannel$Buffer");
+ LoadClass(jni, "org/webrtc/DataChannel$Init");
+ LoadClass(jni, "org/webrtc/DataChannel$State");
+ LoadClass(jni, "org/webrtc/EglBase");
+ LoadClass(jni, "org/webrtc/EglBase$Context");
+ LoadClass(jni, "org/webrtc/EglBase14$Context");
+ LoadClass(jni, "org/webrtc/IceCandidate");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
+ LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
+ LoadClass(jni, "org/webrtc/MediaSource$State");
+ LoadClass(jni, "org/webrtc/MediaStream");
+ LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
+ LoadClass(jni, "org/webrtc/Metrics");
+ LoadClass(jni, "org/webrtc/Metrics$HistogramInfo");
+ LoadClass(jni, "org/webrtc/NetworkMonitor");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$IPAddress");
+ LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$NetworkInformation");
+ LoadClass(jni, "org/webrtc/PeerConnectionFactory");
+ LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
+ LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
+ LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$CandidateNetworkPolicy");
+ LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
+ LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
+ LoadClass(jni, "org/webrtc/RtpReceiver");
+ LoadClass(jni, "org/webrtc/RtpSender");
+ LoadClass(jni, "org/webrtc/SessionDescription");
+ LoadClass(jni, "org/webrtc/SessionDescription$Type");
+ LoadClass(jni, "org/webrtc/StatsReport");
+ LoadClass(jni, "org/webrtc/StatsReport$Value");
+ LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
+ LoadClass(jni, "org/webrtc/VideoCapturer");
+ LoadClass(jni, "org/webrtc/VideoCapturer$NativeObserver");
+ LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
+ LoadClass(jni, "org/webrtc/VideoTrack");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+ RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+ RTC_CHECK(localRef) << name;
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+ RTC_CHECK(globalRef) << name;
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+ return g_class_reference_holder->GetClass(name);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/classreferenceholder.h b/webrtc/api/android/jni/classreferenceholder.h
new file mode 100644
index 0000000..4120278
--- /dev/null
+++ b/webrtc/api/android/jni/classreferenceholder.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack. Consequently, we only look up all classes once in app/webrtc.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+
+#ifndef WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+#define WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
+
+#include <jni.h>
+#include <map>
+#include <string>
+
+namespace webrtc_jni {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+// Convenience macro defining JNI-accessible methods in the org.webrtc package.
+// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
+#define JOW(rettype, name) extern "C" rettype JNIEXPORT JNICALL \
+ Java_org_webrtc_##name
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_CLASSREFERENCEHOLDER_H_
diff --git a/webrtc/api/android/jni/jni_helpers.cc b/webrtc/api/android/jni/jni_helpers.cc
new file mode 100644
index 0000000..3bc0ff9
--- /dev/null
+++ b/webrtc/api/android/jni/jni_helpers.cc
@@ -0,0 +1,350 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "webrtc/api/android/jni/jni_helpers.h"
+
+#include "webrtc/api/android/jni/classreferenceholder.h"
+
+#include <asm/unistd.h>
+#include <sys/prctl.h>
+#include <sys/syscall.h>
+#include <unistd.h>
+
+namespace webrtc_jni {
+
+static JavaVM* g_jvm = nullptr;
+
+static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
+
+// Key for per-thread JNIEnv* data. Non-NULL in threads attached to |g_jvm| by
+// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
+// were attached by the JVM because of a Java->native call.
+static pthread_key_t g_jni_ptr;
+
+JavaVM *GetJVM() {
+ RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
+ return g_jvm;
+}
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv() {
+ void* env = nullptr;
+ jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
+ RTC_CHECK(((env != nullptr) && (status == JNI_OK)) ||
+ ((env == nullptr) && (status == JNI_EDETACHED)))
+ << "Unexpected GetEnv return: " << status << ":" << env;
+ return reinterpret_cast<JNIEnv*>(env);
+}
+
+static void ThreadDestructor(void* prev_jni_ptr) {
+ // This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
+ // we were responsible for originally attaching the thread, so are responsible
+ // for detaching it now. However, because some JVM implementations (notably
+ // Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
+ // the JVMs accounting info for this thread may already be wiped out by the
+ // time this is called. Thus it may appear we are already detached even though
+ // it was our responsibility to detach! Oh well.
+ if (!GetEnv())
+ return;
+
+ RTC_CHECK(GetEnv() == prev_jni_ptr)
+ << "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
+ jint status = g_jvm->DetachCurrentThread();
+ RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
+ RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
+}
+
+static void CreateJNIPtrKey() {
+ RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
+ << "pthread_key_create";
+}
+
+jint InitGlobalJniVariables(JavaVM *jvm) {
+ RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
+ g_jvm = jvm;
+ RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
+
+ RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
+
+ JNIEnv* jni = nullptr;
+ if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
+ return -1;
+
+ return JNI_VERSION_1_6;
+}
+
+// Return thread ID as a string.
+static std::string GetThreadId() {
+ char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
+ RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
+ static_cast<long>(syscall(__NR_gettid))),
+ sizeof(buf))
+ << "Thread id is bigger than uint64??";
+ return std::string(buf);
+}
+
+// Return the current thread's name.
+static std::string GetThreadName() {
+ char name[17] = {0};
+ if (prctl(PR_GET_NAME, name) != 0)
+ return std::string("<noname>");
+ return std::string(name);
+}
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded() {
+ JNIEnv* jni = GetEnv();
+ if (jni)
+ return jni;
+ RTC_CHECK(!pthread_getspecific(g_jni_ptr))
+ << "TLS has a JNIEnv* but not attached?";
+
+ std::string name(GetThreadName() + " - " + GetThreadId());
+ JavaVMAttachArgs args;
+ args.version = JNI_VERSION_1_6;
+ args.name = &name[0];
+ args.group = nullptr;
+ // Deal with difference in signatures between Oracle's jni.h and Android's.
+#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
+ void* env = nullptr;
+#else
+ JNIEnv* env = nullptr;
+#endif
+ RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
+ << "Failed to attach thread";
+ RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
+ jni = reinterpret_cast<JNIEnv*>(env);
+ RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
+ return jni;
+}
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr) {
+ static_assert(sizeof(intptr_t) <= sizeof(jlong),
+ "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the static_assert above.
+ jlong ret = reinterpret_cast<intptr_t>(ptr);
+ RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
+ return ret;
+}
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
+ jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+ CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jmethodID m = jni->GetStaticMethodID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
+ << signature;
+ RTC_CHECK(m) << name << ", " << signature;
+ return m;
+}
+
+jfieldID GetFieldID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature) {
+ jfieldID f = jni->GetFieldID(c, name, signature);
+ CHECK_EXCEPTION(jni) << "error during GetFieldID";
+ RTC_CHECK(f) << name << ", " << signature;
+ return f;
+}
+
+jclass GetObjectClass(JNIEnv* jni, jobject object) {
+ jclass c = jni->GetObjectClass(object);
+ CHECK_EXCEPTION(jni) << "error during GetObjectClass";
+ RTC_CHECK(c) << "GetObjectClass returned NULL";
+ return c;
+}
+
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+ jobject o = jni->GetObjectField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetObjectField";
+ RTC_CHECK(!IsNull(jni, o)) << "GetObjectField returned NULL";
+ return o;
+}
+
+jobject GetNullableObjectField(JNIEnv* jni, jobject object, jfieldID id) {
+ jobject o = jni->GetObjectField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetObjectField";
+ return o;
+}
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
+ return static_cast<jstring>(GetObjectField(jni, object, id));
+}
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
+ jlong l = jni->GetLongField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetLongField";
+ return l;
+}
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
+ jint i = jni->GetIntField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetIntField";
+ return i;
+}
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
+ jboolean b = jni->GetBooleanField(object, id);
+ CHECK_EXCEPTION(jni) << "error during GetBooleanField";
+ return b;
+}
+
+bool IsNull(JNIEnv* jni, jobject obj) {
+ return jni->IsSameObject(obj, nullptr);
+}
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
+ jstring jstr = jni->NewStringUTF(native.c_str());
+ CHECK_EXCEPTION(jni) << "error during NewStringUTF";
+ return jstr;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+ const char* chars = jni->GetStringUTFChars(j_string, nullptr);
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFChars";
+ std::string str(chars, jni->GetStringUTFLength(j_string));
+ CHECK_EXCEPTION(jni) << "Error during GetStringUTFLength";
+ jni->ReleaseStringUTFChars(j_string, chars);
+ CHECK_EXCEPTION(jni) << "Error during ReleaseStringUTFChars";
+ return str;
+}
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index) {
+ jmethodID state_values_id = GetStaticMethodID(
+ jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
+ jobjectArray state_values = static_cast<jobjectArray>(
+ jni->CallStaticObjectMethod(state_class, state_values_id));
+ CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
+ jobject ret = jni->GetObjectArrayElement(state_values, index);
+ CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
+ return ret;
+}
+
+std::string GetJavaEnumName(JNIEnv* jni,
+ const std::string& className,
+ jobject j_enum) {
+ jclass enumClass = FindClass(jni, className.c_str());
+ jmethodID nameMethod =
+ GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
+ jstring name =
+ reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod for " << className
+ << ".name";
+ return JavaToStdString(jni, name);
+}
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o) {
+ jobject ret = jni->NewGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
+ RTC_CHECK(ret);
+ return ret;
+}
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o) {
+ jni->DeleteGlobalRef(o);
+ CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
+}
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
+ RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
+}
+ScopedLocalRefFrame::~ScopedLocalRefFrame() {
+ jni_->PopLocalFrame(nullptr);
+}
+
+// Creates an iterator representing the end of any collection.
+Iterable::Iterator::Iterator() : iterator_(nullptr) {}
+
+// Creates an iterator pointing to the beginning of the specified collection.
+Iterable::Iterator::Iterator(JNIEnv* jni, jobject iterable) : jni_(jni) {
+ jclass j_class = GetObjectClass(jni, iterable);
+ jmethodID iterator_id =
+ GetMethodID(jni, j_class, "iterator", "()Ljava/util/Iterator;");
+ iterator_ = jni->CallObjectMethod(iterable, iterator_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ RTC_CHECK(iterator_ != nullptr);
+
+ jclass iterator_class = GetObjectClass(jni, iterator_);
+ has_next_id_ = GetMethodID(jni, iterator_class, "hasNext", "()Z");
+ next_id_ = GetMethodID(jni, iterator_class, "next", "()Ljava/lang/Object;");
+
+ // Start at the first element in the collection.
+ ++(*this);
+}
+
+// Move constructor - necessary to be able to return iterator types from
+// functions.
+Iterable::Iterator::Iterator(Iterator&& other)
+ : jni_(std::move(other.jni_)),
+ iterator_(std::move(other.iterator_)),
+ value_(std::move(other.value_)),
+ has_next_id_(std::move(other.has_next_id_)),
+ next_id_(std::move(other.next_id_)),
+ thread_checker_(std::move(other.thread_checker_)){};
+
+// Advances the iterator one step.
+Iterable::Iterator& Iterable::Iterator::operator++() {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ if (AtEnd()) {
+ // Can't move past the end.
+ return *this;
+ }
+ bool has_next = jni_->CallBooleanMethod(iterator_, has_next_id_);
+ CHECK_EXCEPTION(jni_) << "error during CallBooleanMethod";
+ if (!has_next) {
+ iterator_ = nullptr;
+ value_ = nullptr;
+ return *this;
+ }
+
+ value_ = jni_->CallObjectMethod(iterator_, next_id_);
+ CHECK_EXCEPTION(jni_) << "error during CallObjectMethod";
+ return *this;
+}
+
+// Provides a way to compare the iterator with itself and with the end iterator.
+// Note: all other comparison results are undefined, just like for C++ input
+// iterators.
+bool Iterable::Iterator::operator==(const Iterable::Iterator& other) {
+ // Two different active iterators should never be compared.
+ RTC_DCHECK(this == &other || AtEnd() || other.AtEnd());
+ return AtEnd() == other.AtEnd();
+}
+
+jobject Iterable::Iterator::operator*() {
+ RTC_CHECK(!AtEnd());
+ return value_;
+}
+
+bool Iterable::Iterator::AtEnd() const {
+ RTC_CHECK(thread_checker_.CalledOnValidThread());
+ return jni_ == nullptr || IsNull(jni_, iterator_);
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/jni_helpers.h b/webrtc/api/android/jni/jni_helpers.h
new file mode 100644
index 0000000..2832df1
--- /dev/null
+++ b/webrtc/api/android/jni/jni_helpers.h
@@ -0,0 +1,191 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contain convenience functions and classes for JNI.
+// Before using any of the methods, InitGlobalJniVariables must be called.
+
+#ifndef WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+#define WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
+
+#include <jni.h>
+#include <string>
+
+#include "webrtc/base/constructormagic.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/thread_checker.h"
+
+// Abort the process if |jni| has a Java exception pending.
+// This macros uses the comma operator to execute ExceptionDescribe
+// and ExceptionClear ignoring their return values and sending ""
+// to the error stream.
+#define CHECK_EXCEPTION(jni) \
+ RTC_CHECK(!jni->ExceptionCheck()) \
+ << (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
+
+// Helper that calls ptr->Release() and aborts the process with a useful
+// message if that didn't actually delete *ptr because of extra refcounts.
+#define CHECK_RELEASE(ptr) \
+ RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
+
+namespace webrtc_jni {
+
+jint InitGlobalJniVariables(JavaVM *jvm);
+
+// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
+JNIEnv* GetEnv();
+
+JavaVM *GetJVM();
+
+// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
+JNIEnv* AttachCurrentThreadIfNeeded();
+
+// Return a |jlong| that will correctly convert back to |ptr|. This is needed
+// because the alternative (of silently passing a 32-bit pointer to a vararg
+// function expecting a 64-bit param) picks up garbage in the high 32 bits.
+jlong jlongFromPointer(void* ptr);
+
+// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
+// found object/class/method/field is non-null.
+jmethodID GetMethodID(
+ JNIEnv* jni, jclass c, const std::string& name, const char* signature);
+
+jmethodID GetStaticMethodID(
+ JNIEnv* jni, jclass c, const char* name, const char* signature);
+
+jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
+ const char* signature);
+
+jclass GetObjectClass(JNIEnv* jni, jobject object);
+
+// Throws an exception if the object field is null.
+jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jobject GetNullableObjectField(JNIEnv* jni, jobject object, jfieldID id);
+
+jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
+
+jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
+
+jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
+
+bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
+
+// Returns true if |obj| == null in Java.
+bool IsNull(JNIEnv* jni, jobject obj);
+
+// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
+jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
+ const std::string& state_class_name, int index);
+
+// Returns the name of a Java enum.
+std::string GetJavaEnumName(JNIEnv* jni,
+ const std::string& className,
+ jobject j_enum);
+
+jobject NewGlobalRef(JNIEnv* jni, jobject o);
+
+void DeleteGlobalRef(JNIEnv* jni, jobject o);
+
+// Scope Java local references to the lifetime of this object. Use in all C++
+// callbacks (i.e. entry points that don't originate in a Java callstack
+// through a "native" method call).
+class ScopedLocalRefFrame {
+ public:
+ explicit ScopedLocalRefFrame(JNIEnv* jni);
+ ~ScopedLocalRefFrame();
+
+ private:
+ JNIEnv* jni_;
+};
+
+// Scoped holder for global Java refs.
+template<class T> // T is jclass, jobject, jintArray, etc.
+class ScopedGlobalRef {
+ public:
+ ScopedGlobalRef(JNIEnv* jni, T obj)
+ : obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
+ ~ScopedGlobalRef() {
+ DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
+ }
+ T operator*() const {
+ return obj_;
+ }
+ private:
+ T obj_;
+};
+
+// Provides a convenient way to iterate over a Java Iterable using the
+// C++ range-for loop.
+// E.g. for (jobject value : Iterable(jni, j_iterable)) { ... }
+// Note: Since Java iterators cannot be duplicated, the iterator class is not
+// copyable to prevent creating multiple C++ iterators that refer to the same
+// Java iterator.
+class Iterable {
+ public:
+ Iterable(JNIEnv* jni, jobject iterable) : jni_(jni), iterable_(iterable) {}
+
+ class Iterator {
+ public:
+ // Creates an iterator representing the end of any collection.
+ Iterator();
+ // Creates an iterator pointing to the beginning of the specified
+ // collection.
+ Iterator(JNIEnv* jni, jobject iterable);
+
+ // Move constructor - necessary to be able to return iterator types from
+ // functions.
+ Iterator(Iterator&& other);
+
+ // Move assignment should not be used.
+ Iterator& operator=(Iterator&&) = delete;
+
+ // Advances the iterator one step.
+ Iterator& operator++();
+
+ // Provides a way to compare the iterator with itself and with the end
+ // iterator.
+ // Note: all other comparison results are undefined, just like for C++ input
+ // iterators.
+ bool operator==(const Iterator& other);
+ bool operator!=(const Iterator& other) { return !(*this == other); }
+ jobject operator*();
+
+ private:
+ bool AtEnd() const;
+
+ JNIEnv* jni_ = nullptr;
+ jobject iterator_ = nullptr;
+ jobject value_ = nullptr;
+ jmethodID has_next_id_ = nullptr;
+ jmethodID next_id_ = nullptr;
+ rtc::ThreadChecker thread_checker_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Iterator);
+ };
+
+ Iterable::Iterator begin() { return Iterable::Iterator(jni_, iterable_); }
+ Iterable::Iterator end() { return Iterable::Iterator(); }
+
+ private:
+ JNIEnv* jni_;
+ jobject iterable_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(Iterable);
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_JNI_HELPERS_H_
diff --git a/webrtc/api/android/jni/jni_onload.cc b/webrtc/api/android/jni/jni_onload.cc
new file mode 100644
index 0000000..092e5c7
--- /dev/null
+++ b/webrtc/api/android/jni/jni_onload.cc
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/ssladapter.h"
+
+namespace webrtc_jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
+ FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/native_handle_impl.cc b/webrtc/api/android/jni/native_handle_impl.cc
new file mode 100644
index 0000000..f5076d6
--- /dev/null
+++ b/webrtc/api/android/jni/native_handle_impl.cc
@@ -0,0 +1,215 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/api/android/jni/native_handle_impl.h"
+
+#include <memory>
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/keep_ref_until_done.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+
+using webrtc::NativeHandleBuffer;
+
+namespace webrtc_jni {
+
+Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
+ RTC_CHECK_EQ(16, jni->GetArrayLength(a));
+ jfloat* ptr = jni->GetFloatArrayElements(a, nullptr);
+ for (int i = 0; i < 16; ++i) {
+ elem_[i] = ptr[i];
+ }
+ jni->ReleaseFloatArrayElements(a, ptr, 0);
+}
+
+jfloatArray Matrix::ToJava(JNIEnv* jni) {
+ jfloatArray matrix = jni->NewFloatArray(16);
+ jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
+ return matrix;
+}
+
+void Matrix::Rotate(webrtc::VideoRotation rotation) {
+ // Texture coordinates are in the range 0 to 1. The transformation of the last
+ // row in each rotation matrix is needed for proper translation, e.g, to
+ // mirror x, we don't replace x by -x, but by 1-x.
+ switch (rotation) {
+ case webrtc::kVideoRotation_0:
+ break;
+ case webrtc::kVideoRotation_90: {
+ const float ROTATE_90[16] =
+ { elem_[4], elem_[5], elem_[6], elem_[7],
+ -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[12], elem_[1] + elem_[13],
+ elem_[2] + elem_[14], elem_[3] + elem_[15]};
+ memcpy(elem_, ROTATE_90, sizeof(elem_));
+ } break;
+ case webrtc::kVideoRotation_180: {
+ const float ROTATE_180[16] =
+ { -elem_[0], -elem_[1], -elem_[2], -elem_[3],
+ -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13],
+ elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]};
+ memcpy(elem_, ROTATE_180, sizeof(elem_));
+ } break;
+ case webrtc::kVideoRotation_270: {
+ const float ROTATE_270[16] =
+ { -elem_[4], -elem_[5], -elem_[6], -elem_[7],
+ elem_[0], elem_[1], elem_[2], elem_[3],
+ elem_[8], elem_[9], elem_[10], elem_[11],
+ elem_[4] + elem_[12], elem_[5] + elem_[13],
+ elem_[6] + elem_[14], elem_[7] + elem_[15]};
+ memcpy(elem_, ROTATE_270, sizeof(elem_));
+ } break;
+ }
+}
+
+// Calculates result = a * b, in column-major order.
+void Matrix::Multiply(const float a[16], const float b[16], float result[16]) {
+ for (int i = 0; i < 4; ++i) {
+ for (int j = 0; j < 4; ++j) {
+ float sum = 0;
+ for (int k = 0; k < 4; ++k) {
+ sum += a[k * 4 + j] * b[i * 4 + k];
+ }
+ result[i * 4 + j] = sum;
+ }
+ }
+}
+
+// Center crop by keeping xFraction of the width and yFraction of the height,
+// so e.g. cropping from 640x480 to 640x360 would use
+// xFraction=1, yFraction=360/480.
+void Matrix::Crop(float xFraction,
+ float yFraction,
+ float xOffset,
+ float yOffset) {
+ const float crop_matrix[16] =
+ {xFraction, 0, 0, 0,
+ 0, yFraction, 0, 0,
+ 0, 0, 1, 0,
+ xOffset, yOffset, 0, 1};
+ const Matrix old = *this;
+ Multiply(crop_matrix, old.elem_, this->elem_);
+}
+
+// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
+static const int kBufferAlignment = 64;
+
+NativeHandleImpl::NativeHandleImpl(int id, const Matrix& matrix)
+ : oes_texture_id(id), sampling_matrix(matrix) {}
+
+NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix)
+ : oes_texture_id(j_oes_texture_id),
+ sampling_matrix(jni, j_transform_matrix) {}
+
+AndroidTextureBuffer::AndroidTextureBuffer(
+ int width,
+ int height,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
+ const rtc::Callback0<void>& no_longer_used)
+ : webrtc::NativeHandleBuffer(&native_handle_, width, height),
+ native_handle_(native_handle),
+ surface_texture_helper_(surface_texture_helper),
+ no_longer_used_cb_(no_longer_used) {}
+
+AndroidTextureBuffer::~AndroidTextureBuffer() {
+ no_longer_used_cb_();
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+AndroidTextureBuffer::NativeToI420Buffer() {
+ int uv_width = (width()+7) / 8;
+ int stride = 8 * uv_width;
+ int uv_height = (height()+1)/2;
+ size_t size = stride * (height() + uv_height);
+ // The data is owned by the frame, and the normal case is that the
+ // data is deleted by the frame's destructor callback.
+ //
+ // TODO(nisse): Use an I420BufferPool. We then need to extend that
+ // class, and I420Buffer, to support our memory layout.
+ std::unique_ptr<uint8_t, webrtc::AlignedFreeDeleter> yuv_data(
+ static_cast<uint8_t*>(webrtc::AlignedMalloc(size, kBufferAlignment)));
+ // See SurfaceTextureHelper.java for the required layout.
+ uint8_t* y_data = yuv_data.get();
+ uint8_t* u_data = y_data + height() * stride;
+ uint8_t* v_data = u_data + stride/2;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> copy =
+ new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
+ width(), height(),
+ y_data, stride,
+ u_data, stride,
+ v_data, stride,
+ rtc::Bind(&webrtc::AlignedFree, yuv_data.release()));
+
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+
+ jmethodID transform_mid = GetMethodID(
+ jni,
+ GetObjectClass(jni, surface_texture_helper_),
+ "textureToYUV",
+ "(Ljava/nio/ByteBuffer;IIII[F)V");
+
+ jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
+
+ jfloatArray sampling_matrix = native_handle_.sampling_matrix.ToJava(jni);
+ jni->CallVoidMethod(surface_texture_helper_,
+ transform_mid,
+ byte_buffer, width(), height(), stride,
+ native_handle_.oes_texture_id, sampling_matrix);
+ CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
+
+ return copy;
+}
+
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::CropScaleAndRotate(int cropped_width,
+ int cropped_height,
+ int crop_x,
+ int crop_y,
+ int dst_width,
+ int dst_height,
+ webrtc::VideoRotation rotation) {
+ if (cropped_width == dst_width && cropped_height == dst_height &&
+ width() == dst_width && height() == dst_height &&
+ rotation == webrtc::kVideoRotation_0) {
+ return this;
+ }
+ int rotated_width = (rotation % 180 == 0) ? dst_width : dst_height;
+ int rotated_height = (rotation % 180 == 0) ? dst_height : dst_width;
+
+ // Here we use Bind magic to add a reference count to |this| until the newly
+ // created AndroidTextureBuffer is destructed
+ rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+ new rtc::RefCountedObject<AndroidTextureBuffer>(
+ rotated_width, rotated_height, native_handle_,
+ surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+ if (cropped_width != width() || cropped_height != height()) {
+ buffer->native_handle_.sampling_matrix.Crop(
+ cropped_width / static_cast<float>(width()),
+ cropped_height / static_cast<float>(height()),
+ crop_x / static_cast<float>(width()),
+ crop_y / static_cast<float>(height()));
+ }
+ buffer->native_handle_.sampling_matrix.Rotate(rotation);
+ return buffer;
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/native_handle_impl.h b/webrtc/api/android/jni/native_handle_impl.h
new file mode 100644
index 0000000..0d01532
--- /dev/null
+++ b/webrtc/api/android/jni/native_handle_impl.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+#define WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
+
+#include <jni.h>
+
+#include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
+
+namespace webrtc_jni {
+
+// Open gl texture matrix, in column-major order. Operations are
+// in-place.
+class Matrix {
+ public:
+ Matrix(JNIEnv* jni, jfloatArray a);
+
+ jfloatArray ToJava(JNIEnv* jni);
+
+ // Crop arguments are relative to original size.
+ void Crop(float cropped_width,
+ float cropped_height,
+ float crop_x,
+ float crop_y);
+
+ void Rotate(webrtc::VideoRotation rotation);
+
+ private:
+ static void Multiply(const float a[16], const float b[16], float result[16]);
+ float elem_[16];
+};
+
+// Wrapper for texture object.
+struct NativeHandleImpl {
+ NativeHandleImpl(JNIEnv* jni,
+ jint j_oes_texture_id,
+ jfloatArray j_transform_matrix);
+
+ NativeHandleImpl(int id, const Matrix& matrix);
+
+ const int oes_texture_id;
+ Matrix sampling_matrix;
+};
+
+class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
+ public:
+ AndroidTextureBuffer(int width,
+ int height,
+ const NativeHandleImpl& native_handle,
+ jobject surface_texture_helper,
+ const rtc::Callback0<void>& no_longer_used);
+ ~AndroidTextureBuffer();
+ rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+
+ // First crop, then scale to dst resolution, and then rotate.
+ rtc::scoped_refptr<AndroidTextureBuffer> CropScaleAndRotate(
+ int cropped_width,
+ int cropped_height,
+ int crop_x,
+ int crop_y,
+ int dst_width,
+ int dst_height,
+ webrtc::VideoRotation rotation);
+
+ private:
+ NativeHandleImpl native_handle_;
+ // Raw object pointer, relying on the caller, i.e.,
+ // AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
+ // a global reference. TODO(nisse): Make this a reference to the C++
+ // SurfaceTextureHelper instead, but that requires some refactoring
+ // of AndroidVideoCapturerJni.
+ jobject surface_texture_helper_;
+ rtc::Callback0<void> no_longer_used_cb_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
diff --git a/webrtc/api/android/jni/peerconnection_jni.cc b/webrtc/api/android/jni/peerconnection_jni.cc
new file mode 100644
index 0000000..b92e31a
--- /dev/null
+++ b/webrtc/api/android/jni/peerconnection_jni.cc
@@ -0,0 +1,2300 @@
+/*
+ * Copyright 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Hints for future visitors:
+// This entire file is an implementation detail of the org.webrtc Java package,
+// the most interesting bits of which are org.webrtc.PeerConnection{,Factory}.
+// The layout of this file is roughly:
+// - various helper C++ functions & classes that wrap Java counterparts and
+// expose a C++ interface that can be passed to the C++ PeerConnection APIs
+// - implementations of methods declared "static" in the Java package (named
+// things like Java_org_webrtc_OMG_Can_This_Name_Be_Any_Longer, prescribed by
+// the JNI spec).
+//
+// Lifecycle notes: objects are owned where they will be called; in other words
+// FooObservers are owned by C++-land, and user-callable objects (e.g.
+// PeerConnection and VideoTrack) are owned by Java-land.
+// When this file allocates C++ RefCountInterfaces it AddRef()s an artificial
+// ref simulating the jlong held in Java-land, and then Release()s the ref in
+// the respective free call. Sometimes this AddRef is implicit in the
+// construction of a scoped_refptr<> which is then .release()d.
+// Any persistent (non-local) references from C++ to Java must be global or weak
+// (in which case they must be checked before use)!
+//
+// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
+// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
+// call. In this file this is done in CHECK_EXCEPTION, making for much easier
+// debugging in case of failure (the alternative is to wait for control to
+// return to the Java frame that called code in this file, at which point it's
+// impossible to tell which JNI call broke).
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include <limits>
+#include <memory>
+#include <utility>
+
+#include "webrtc/api/androidvideocapturer.h"
+#include "webrtc/api/dtlsidentitystore.h"
+#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
+#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
+#include "webrtc/api/android/jni/androidnetworkmonitor_jni.h"
+#include "webrtc/api/android/jni/androidvideocapturer_jni.h"
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/api/mediaconstraintsinterface.h"
+#include "webrtc/api/peerconnectioninterface.h"
+#include "webrtc/api/rtpreceiverinterface.h"
+#include "webrtc/api/rtpsenderinterface.h"
+#include "webrtc/api/webrtcsdp.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/event_tracer.h"
+#include "webrtc/base/logging.h"
+#include "webrtc/base/logsinks.h"
+#include "webrtc/base/messagequeue.h"
+#include "webrtc/base/networkmonitor.h"
+#include "webrtc/base/rtccertificategenerator.h"
+#include "webrtc/base/ssladapter.h"
+#include "webrtc/base/stringutils.h"
+#include "webrtc/media/base/videocapturer.h"
+#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
+#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
+#include "webrtc/system_wrappers/include/field_trial_default.h"
+#include "webrtc/system_wrappers/include/logcat_trace_context.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+using cricket::WebRtcVideoDecoderFactory;
+using cricket::WebRtcVideoEncoderFactory;
+using rtc::Bind;
+using rtc::Thread;
+using rtc::ThreadManager;
+using webrtc::AudioSourceInterface;
+using webrtc::AudioTrackInterface;
+using webrtc::AudioTrackVector;
+using webrtc::CreateSessionDescriptionObserver;
+using webrtc::DataBuffer;
+using webrtc::DataChannelInit;
+using webrtc::DataChannelInterface;
+using webrtc::DataChannelObserver;
+using webrtc::IceCandidateInterface;
+using webrtc::LogcatTraceContext;
+using webrtc::MediaConstraintsInterface;
+using webrtc::MediaSourceInterface;
+using webrtc::MediaStreamInterface;
+using webrtc::MediaStreamTrackInterface;
+using webrtc::PeerConnectionFactoryInterface;
+using webrtc::PeerConnectionInterface;
+using webrtc::PeerConnectionObserver;
+using webrtc::RtpReceiverInterface;
+using webrtc::RtpSenderInterface;
+using webrtc::SessionDescriptionInterface;
+using webrtc::SetSessionDescriptionObserver;
+using webrtc::StatsObserver;
+using webrtc::StatsReport;
+using webrtc::StatsReports;
+using webrtc::VideoTrackSourceInterface;
+using webrtc::VideoTrackInterface;
+using webrtc::VideoTrackVector;
+using webrtc::kVideoCodecVP8;
+
+namespace webrtc_jni {
+
+// Field trials initialization string
+static char *field_trials_init_string = NULL;
+
+// Set in PeerConnectionFactory_initializeAndroidGlobals().
+static bool factory_static_initialized = false;
+static bool video_hw_acceleration_enabled = true;
+
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static jobject JavaEnumFromIndex(
+ JNIEnv* jni, const std::string& state_class_fragment, int index) {
+ const std::string state_class = "org/webrtc/" + state_class_fragment;
+ return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+ state_class, index);
+}
+
+static DataChannelInit JavaDataChannelInitToNative(
+ JNIEnv* jni, jobject j_init) {
+ DataChannelInit init;
+
+ jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
+ jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
+ jfieldID max_retransmit_time_id =
+ GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
+ jfieldID max_retransmits_id =
+ GetFieldID(jni, j_init_class, "maxRetransmits", "I");
+ jfieldID protocol_id =
+ GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
+ jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
+ jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
+
+ init.ordered = GetBooleanField(jni, j_init, ordered_id);
+ init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
+ init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
+ init.protocol = JavaToStdString(
+ jni, GetStringField(jni, j_init, protocol_id));
+ init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
+ init.id = GetIntField(jni, j_init, id_id);
+
+ return init;
+}
+
+class ConstraintsWrapper;
+
+// Adapter between the C++ PeerConnectionObserver interface and the Java
+// PeerConnection.Observer interface. Wraps an instance of the Java interface
+// and dispatches C++ callbacks to Java.
+class PCOJava : public PeerConnectionObserver {
+ public:
+ // We need these using declarations because there are two versions of each of
+ // the below methods and we only override one of them.
+ // TODO(deadbeef): Remove once there's only one version of the methods.
+ using PeerConnectionObserver::OnAddStream;
+ using PeerConnectionObserver::OnRemoveStream;
+ using PeerConnectionObserver::OnDataChannel;
+
+ PCOJava(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
+ j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
+ j_media_stream_ctor_(GetMethodID(
+ jni, *j_media_stream_class_, "<init>", "(J)V")),
+ j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
+ j_audio_track_ctor_(GetMethodID(
+ jni, *j_audio_track_class_, "<init>", "(J)V")),
+ j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
+ j_video_track_ctor_(GetMethodID(
+ jni, *j_video_track_class_, "<init>", "(J)V")),
+ j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
+ j_data_channel_ctor_(GetMethodID(
+ jni, *j_data_channel_class_, "<init>", "(J)V")) {
+ }
+
+ virtual ~PCOJava() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ while (!remote_streams_.empty())
+ DisposeRemoteStream(remote_streams_.begin());
+ }
+
+ void OnIceCandidate(const IceCandidateInterface* candidate) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ std::string sdp;
+ RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
+ jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
+ jmethodID ctor = GetMethodID(jni(), candidate_class,
+ "<init>", "(Ljava/lang/String;ILjava/lang/String;)V");
+ jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
+ jstring j_sdp = JavaStringFromStdString(jni(), sdp);
+ jobject j_candidate = jni()->NewObject(candidate_class, ctor, j_mid,
+ candidate->sdp_mline_index(), j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jmethodID m = GetMethodID(jni(), *j_observer_class_,
+ "onIceCandidate", "(Lorg/webrtc/IceCandidate;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceCandidatesRemoved(
+ const std::vector<cricket::Candidate>& candidates) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobjectArray candidates_array = ToJavaCandidateArray(jni(), candidates);
+ jmethodID m =
+ GetMethodID(jni(), *j_observer_class_, "onIceCandidatesRemoved",
+ "([Lorg/webrtc/IceCandidate;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, candidates_array);
+ CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
+ }
+
+ void OnSignalingChange(
+ PeerConnectionInterface::SignalingState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onSignalingChange",
+ "(Lorg/webrtc/PeerConnection$SignalingState;)V");
+ jobject new_state_enum =
+ JavaEnumFromIndex(jni(), "PeerConnection$SignalingState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionChange(
+ PeerConnectionInterface::IceConnectionState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionChange",
+ "(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceConnectionState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceConnectionReceivingChange(bool receiving) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceConnectionReceivingChange", "(Z)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, receiving);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnIceGatheringChange(
+ PeerConnectionInterface::IceGatheringState new_state) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onIceGatheringChange",
+ "(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
+ jobject new_state_enum = JavaEnumFromIndex(
+ jni(), "PeerConnection$IceGatheringState", new_state);
+ jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ // Java MediaStream holds one reference. Corresponding Release() is in
+ // MediaStream_free, triggered by MediaStream.dispose().
+ stream->AddRef();
+ jobject j_stream =
+ jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
+ reinterpret_cast<jlong>(stream.get()));
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ for (const auto& track : stream->GetAudioTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java AudioTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by AudioTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID audio_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "audioTracks",
+ "Ljava/util/LinkedList;");
+ jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), audio_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+
+ for (const auto& track : stream->GetVideoTracks()) {
+ jstring id = JavaStringFromStdString(jni(), track->id());
+ // Java VideoTrack holds one reference. Corresponding Release() is in
+ // MediaStreamTrack_free, triggered by VideoTrack.dispose().
+ track->AddRef();
+ jobject j_track =
+ jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
+ reinterpret_cast<jlong>(track.get()), id);
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+ jfieldID video_tracks_id = GetFieldID(jni(),
+ *j_media_stream_class_,
+ "videoTracks",
+ "Ljava/util/LinkedList;");
+ jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
+ jmethodID add = GetMethodID(jni(),
+ GetObjectClass(jni(), video_tracks),
+ "add",
+ "(Ljava/lang/Object;)Z");
+ jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
+ CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+ remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRemoveStream(
+ rtc::scoped_refptr<MediaStreamInterface> stream) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
+ RTC_CHECK(it != remote_streams_.end()) << "unexpected stream: " << std::hex
+ << stream;
+ jobject j_stream = it->second;
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
+ "(Lorg/webrtc/MediaStream;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ // Release the refptr reference so that DisposeRemoteStream can assert
+ // it removes the final reference.
+ stream = nullptr;
+ DisposeRemoteStream(it);
+ }
+
+ void OnDataChannel(
+ rtc::scoped_refptr<DataChannelInterface> channel) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_channel = jni()->NewObject(
+ *j_data_channel_class_, j_data_channel_ctor_, (jlong)channel.get());
+ CHECK_EXCEPTION(jni()) << "error during NewObject";
+
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
+ "(Lorg/webrtc/DataChannel;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
+
+ // Channel is now owned by Java object, and will be freed from
+ // DataChannel.dispose(). Important that this be done _after_ the
+ // CallVoidMethod above as Java code might call back into native code and be
+ // surprised to see a refcount of 2.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
+
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnRenegotiationNeeded() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m =
+ GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void SetConstraints(ConstraintsWrapper* constraints) {
+ RTC_CHECK(!constraints_.get()) << "constraints already set!";
+ constraints_.reset(constraints);
+ }
+
+ const ConstraintsWrapper* constraints() { return constraints_.get(); }
+
+ private:
+ typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
+
+ void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it) {
+ jobject j_stream = it->second;
+ remote_streams_.erase(it);
+ jni()->CallVoidMethod(
+ j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
+ CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
+ DeleteGlobalRef(jni(), j_stream);
+ }
+
+ jobject ToJavaCandidate(JNIEnv* jni,
+ jclass* candidate_class,
+ const cricket::Candidate& candidate) {
+ std::string sdp = webrtc::SdpSerializeCandidate(candidate);
+ RTC_CHECK(!sdp.empty()) << "got an empty ICE candidate";
+ jmethodID ctor = GetMethodID(jni, *candidate_class, "<init>",
+ "(Ljava/lang/String;ILjava/lang/String;)V");
+ jstring j_mid = JavaStringFromStdString(jni, candidate.transport_name());
+ jstring j_sdp = JavaStringFromStdString(jni, sdp);
+ // sdp_mline_index is not used, pass an invalid value -1.
+ jobject j_candidate =
+ jni->NewObject(*candidate_class, ctor, j_mid, -1, j_sdp);
+ CHECK_EXCEPTION(jni) << "error during Java Candidate NewObject";
+ return j_candidate;
+ }
+
+ jobjectArray ToJavaCandidateArray(
+ JNIEnv* jni,
+ const std::vector<cricket::Candidate>& candidates) {
+ jclass candidate_class = FindClass(jni, "org/webrtc/IceCandidate");
+ jobjectArray java_candidates =
+ jni->NewObjectArray(candidates.size(), candidate_class, NULL);
+ int i = 0;
+ for (const cricket::Candidate& candidate : candidates) {
+ jobject j_candidate = ToJavaCandidate(jni, &candidate_class, candidate);
+ jni->SetObjectArrayElement(java_candidates, i++, j_candidate);
+ }
+ return java_candidates;
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_media_stream_class_;
+ const jmethodID j_media_stream_ctor_;
+ const ScopedGlobalRef<jclass> j_audio_track_class_;
+ const jmethodID j_audio_track_ctor_;
+ const ScopedGlobalRef<jclass> j_video_track_class_;
+ const jmethodID j_video_track_ctor_;
+ const ScopedGlobalRef<jclass> j_data_channel_class_;
+ const jmethodID j_data_channel_ctor_;
+ // C++ -> Java remote streams. The stored jobects are global refs and must be
+ // manually deleted upon removal. Use DisposeRemoteStream().
+ NativeToJavaStreamsMap remote_streams_;
+ std::unique_ptr<ConstraintsWrapper> constraints_;
+};
+
+// Wrapper for a Java MediaConstraints object. Copies all needed data so when
+// the constructor returns the Java object is no longer needed.
+class ConstraintsWrapper : public MediaConstraintsInterface {
+ public:
+ ConstraintsWrapper(JNIEnv* jni, jobject j_constraints) {
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "mandatory", &mandatory_);
+ PopulateConstraintsFromJavaPairList(
+ jni, j_constraints, "optional", &optional_);
+ }
+
+ virtual ~ConstraintsWrapper() {}
+
+ // MediaConstraintsInterface.
+ const Constraints& GetMandatory() const override { return mandatory_; }
+
+ const Constraints& GetOptional() const override { return optional_; }
+
+ private:
+ // Helper for translating a List<Pair<String, String>> to a Constraints.
+ static void PopulateConstraintsFromJavaPairList(
+ JNIEnv* jni, jobject j_constraints,
+ const char* field_name, Constraints* field) {
+ jfieldID j_id = GetFieldID(jni,
+ GetObjectClass(jni, j_constraints), field_name, "Ljava/util/List;");
+ jobject j_list = GetObjectField(jni, j_constraints, j_id);
+ for (jobject entry : Iterable(jni, j_list)) {
+ jmethodID get_key = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getKey", "()Ljava/lang/String;");
+ jstring j_key = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_key));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ jmethodID get_value = GetMethodID(jni,
+ GetObjectClass(jni, entry), "getValue", "()Ljava/lang/String;");
+ jstring j_value = reinterpret_cast<jstring>(
+ jni->CallObjectMethod(entry, get_value));
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ field->push_back(Constraint(JavaToStdString(jni, j_key),
+ JavaToStdString(jni, j_value)));
+ }
+ }
+
+ Constraints mandatory_;
+ Constraints optional_;
+};
+
+static jobject JavaSdpFromNativeSdp(
+ JNIEnv* jni, const SessionDescriptionInterface* desc) {
+ std::string sdp;
+ RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
+ jstring j_description = JavaStringFromStdString(jni, sdp);
+
+ jclass j_type_class = FindClass(
+ jni, "org/webrtc/SessionDescription$Type");
+ jmethodID j_type_from_canonical = GetStaticMethodID(
+ jni, j_type_class, "fromCanonicalForm",
+ "(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
+ jstring j_type_string = JavaStringFromStdString(jni, desc->type());
+ jobject j_type = jni->CallStaticObjectMethod(
+ j_type_class, j_type_from_canonical, j_type_string);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+
+ jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
+ jmethodID j_sdp_ctor = GetMethodID(
+ jni, j_sdp_class, "<init>",
+ "(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
+ jobject j_sdp = jni->NewObject(
+ j_sdp_class, j_sdp_ctor, j_type, j_description);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ return j_sdp;
+}
+
+template <class T> // T is one of {Create,Set}SessionDescriptionObserver.
+class SdpObserverWrapper : public T {
+ public:
+ SdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : constraints_(constraints),
+ j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)) {
+ }
+
+ virtual ~SdpObserverWrapper() {}
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess() {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
+ jni()->CallVoidMethod(*j_observer_global_, m);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ // Can't mark override because of templating.
+ virtual void OnSuccess(SessionDescriptionInterface* desc) {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jmethodID m = GetMethodID(
+ jni(), *j_observer_class_, "onCreateSuccess",
+ "(Lorg/webrtc/SessionDescription;)V");
+ jobject j_sdp = JavaSdpFromNativeSdp(jni(), desc);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ protected:
+ // Common implementation for failure of Set & Create types, distinguished by
+ // |op| being "Set" or "Create".
+ void DoOnFailure(const std::string& op, const std::string& error) {
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
+ "(Ljava/lang/String;)V");
+ jstring j_error_string = JavaStringFromStdString(jni(), error);
+ jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ private:
+ std::unique_ptr<ConstraintsWrapper> constraints_;
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+};
+
+class CreateSdpObserverWrapper
+ : public SdpObserverWrapper<CreateSessionDescriptionObserver> {
+ public:
+ CreateSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::DoOnFailure(std::string("Create"), error);
+ }
+};
+
+class SetSdpObserverWrapper
+ : public SdpObserverWrapper<SetSessionDescriptionObserver> {
+ public:
+ SetSdpObserverWrapper(JNIEnv* jni, jobject j_observer,
+ ConstraintsWrapper* constraints)
+ : SdpObserverWrapper(jni, j_observer, constraints) {}
+
+ void OnFailure(const std::string& error) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ SdpObserverWrapper::DoOnFailure(std::string("Set"), error);
+ }
+};
+
+// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
+// and dispatching the callback from C++ back to Java.
+class DataChannelObserverWrapper : public DataChannelObserver {
+ public:
+ DataChannelObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
+ j_on_buffered_amount_change_mid_(GetMethodID(
+ jni, *j_observer_class_, "onBufferedAmountChange", "(J)V")),
+ j_on_state_change_mid_(
+ GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
+ j_on_message_mid_(GetMethodID(jni, *j_observer_class_, "onMessage",
+ "(Lorg/webrtc/DataChannel$Buffer;)V")),
+ j_buffer_ctor_(GetMethodID(jni, *j_buffer_class_, "<init>",
+ "(Ljava/nio/ByteBuffer;Z)V")) {}
+
+ virtual ~DataChannelObserverWrapper() {}
+
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
+ previous_amount);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnStateChange() override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ void OnMessage(const DataBuffer& buffer) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject byte_buffer = jni()->NewDirectByteBuffer(
+ const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
+ jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
+ byte_buffer, buffer.binary);
+ jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_buffer_class_;
+ const jmethodID j_on_buffered_amount_change_mid_;
+ const jmethodID j_on_state_change_mid_;
+ const jmethodID j_on_message_mid_;
+ const jmethodID j_buffer_ctor_;
+};
+
+// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
+// dispatching the callback from C++ back to Java.
+class StatsObserverWrapper : public StatsObserver {
+ public:
+ StatsObserverWrapper(JNIEnv* jni, jobject j_observer)
+ : j_observer_global_(jni, j_observer),
+ j_observer_class_(jni, GetObjectClass(jni, j_observer)),
+ j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
+ j_stats_report_ctor_(GetMethodID(
+ jni, *j_stats_report_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;D"
+ "[Lorg/webrtc/StatsReport$Value;)V")),
+ j_value_class_(jni, FindClass(
+ jni, "org/webrtc/StatsReport$Value")),
+ j_value_ctor_(GetMethodID(
+ jni, *j_value_class_, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;)V")) {
+ }
+
+ virtual ~StatsObserverWrapper() {}
+
+ void OnComplete(const StatsReports& reports) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobjectArray j_reports = ReportsToJava(jni(), reports);
+ jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
+ "([Lorg/webrtc/StatsReport;)V");
+ jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
+ CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
+ }
+
+ private:
+ jobjectArray ReportsToJava(
+ JNIEnv* jni, const StatsReports& reports) {
+ jobjectArray reports_array = jni->NewObjectArray(
+ reports.size(), *j_stats_report_class_, NULL);
+ int i = 0;
+ for (const auto* report : reports) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
+ jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
+ jobjectArray j_values = ValuesToJava(jni, report->values());
+ jobject j_report = jni->NewObject(*j_stats_report_class_,
+ j_stats_report_ctor_,
+ j_id,
+ j_type,
+ report->timestamp(),
+ j_values);
+ jni->SetObjectArrayElement(reports_array, i++, j_report);
+ }
+ return reports_array;
+ }
+
+ jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values) {
+ jobjectArray j_values = jni->NewObjectArray(
+ values.size(), *j_value_class_, NULL);
+ int i = 0;
+ for (const auto& it : values) {
+ ScopedLocalRefFrame local_ref_frame(jni);
+ // Should we use the '.name' enum value here instead of converting the
+ // name to a string?
+ jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
+ jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
+ jobject j_element_value =
+ jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
+ jni->SetObjectArrayElement(j_values, i++, j_element_value);
+ }
+ return j_values;
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ const ScopedGlobalRef<jobject> j_observer_global_;
+ const ScopedGlobalRef<jclass> j_observer_class_;
+ const ScopedGlobalRef<jclass> j_stats_report_class_;
+ const jmethodID j_stats_report_ctor_;
+ const ScopedGlobalRef<jclass> j_value_class_;
+ const jmethodID j_value_ctor_;
+};
+
+// Wrapper dispatching rtc::VideoSinkInterface to a Java VideoRenderer
+// instance.
+class JavaVideoRendererWrapper
+ : public rtc::VideoSinkInterface<cricket::VideoFrame> {
+ public:
+ JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
+ : j_callbacks_(jni, j_callbacks),
+ j_render_frame_id_(GetMethodID(
+ jni, GetObjectClass(jni, j_callbacks), "renderFrame",
+ "(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
+ j_frame_class_(jni,
+ FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
+ j_i420_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
+ j_texture_frame_ctor_id_(GetMethodID(
+ jni, *j_frame_class_, "<init>",
+ "(IIII[FJ)V")),
+ j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
+ CHECK_EXCEPTION(jni);
+ }
+
+ virtual ~JavaVideoRendererWrapper() {}
+
+ void OnFrame(const cricket::VideoFrame& video_frame) override {
+ ScopedLocalRefFrame local_ref_frame(jni());
+ jobject j_frame =
+ (video_frame.video_frame_buffer()->native_handle() != nullptr)
+ ? CricketToJavaTextureFrame(&video_frame)
+ : CricketToJavaI420Frame(&video_frame);
+ // |j_callbacks_| is responsible for releasing |j_frame| with
+ // VideoRenderer.renderFrameDone().
+ jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
+ CHECK_EXCEPTION(jni());
+ }
+
+ private:
+ // Make a shallow copy of |frame| to be used with Java. The callee has
+ // ownership of the frame, and the frame should be released with
+ // VideoRenderer.releaseNativeFrame().
+ static jlong javaShallowCopy(const cricket::VideoFrame* frame) {
+ return jlongFromPointer(frame->Copy());
+ }
+
+ // Return a VideoRenderer.I420Frame referring to the data in |frame|.
+ jobject CricketToJavaI420Frame(const cricket::VideoFrame* frame) {
+ jintArray strides = jni()->NewIntArray(3);
+ jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
+ strides_array[0] = frame->video_frame_buffer()->StrideY();
+ strides_array[1] = frame->video_frame_buffer()->StrideU();
+ strides_array[2] = frame->video_frame_buffer()->StrideV();
+ jni()->ReleaseIntArrayElements(strides, strides_array, 0);
+ jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
+ jobject y_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->video_frame_buffer()->DataY()),
+ frame->video_frame_buffer()->StrideY() *
+ frame->video_frame_buffer()->height());
+ size_t chroma_height = (frame->height() + 1) / 2;
+ jobject u_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->video_frame_buffer()->DataU()),
+ frame->video_frame_buffer()->StrideU() * chroma_height);
+ jobject v_buffer = jni()->NewDirectByteBuffer(
+ const_cast<uint8_t*>(frame->video_frame_buffer()->DataV()),
+ frame->video_frame_buffer()->StrideV() * chroma_height);
+
+ jni()->SetObjectArrayElement(planes, 0, y_buffer);
+ jni()->SetObjectArrayElement(planes, 1, u_buffer);
+ jni()->SetObjectArrayElement(planes, 2, v_buffer);
+ return jni()->NewObject(
+ *j_frame_class_, j_i420_frame_ctor_id_,
+ frame->width(), frame->height(),
+ static_cast<int>(frame->rotation()),
+ strides, planes, javaShallowCopy(frame));
+ }
+
+ // Return a VideoRenderer.I420Frame referring texture object in |frame|.
+ jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
+ NativeHandleImpl* handle = reinterpret_cast<NativeHandleImpl*>(
+ frame->video_frame_buffer()->native_handle());
+ jfloatArray sampling_matrix = handle->sampling_matrix.ToJava(jni());
+
+ return jni()->NewObject(
+ *j_frame_class_, j_texture_frame_ctor_id_,
+ frame->width(), frame->height(),
+ static_cast<int>(frame->rotation()),
+ handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
+ }
+
+ JNIEnv* jni() {
+ return AttachCurrentThreadIfNeeded();
+ }
+
+ ScopedGlobalRef<jobject> j_callbacks_;
+ jmethodID j_render_frame_id_;
+ ScopedGlobalRef<jclass> j_frame_class_;
+ jmethodID j_i420_frame_ctor_id_;
+ jmethodID j_texture_frame_ctor_id_;
+ ScopedGlobalRef<jclass> j_byte_buffer_class_;
+};
+
+
+static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
+ jfieldID native_dc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
+ jlong j_d = GetLongField(jni, j_dc, native_dc_id);
+ return reinterpret_cast<DataChannelInterface*>(j_d);
+}
+
+JOW(jlong, DataChannel_registerObserverNative)(
+ JNIEnv* jni, jobject j_dc, jobject j_observer) {
+ std::unique_ptr<DataChannelObserverWrapper> observer(
+ new DataChannelObserverWrapper(jni, j_observer));
+ ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
+ return jlongFromPointer(observer.release());
+}
+
+JOW(void, DataChannel_unregisterObserverNative)(
+ JNIEnv* jni, jobject j_dc, jlong native_observer) {
+ ExtractNativeDC(jni, j_dc)->UnregisterObserver();
+ delete reinterpret_cast<DataChannelObserverWrapper*>(native_observer);
+}
+
+JOW(jstring, DataChannel_label)(JNIEnv* jni, jobject j_dc) {
+ return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
+}
+
+JOW(jobject, DataChannel_state)(JNIEnv* jni, jobject j_dc) {
+ return JavaEnumFromIndex(
+ jni, "DataChannel$State", ExtractNativeDC(jni, j_dc)->state());
+}
+
+JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
+ << "buffered_amount overflowed jlong!";
+ return static_cast<jlong>(buffered_amount);
+}
+
+JOW(void, DataChannel_close)(JNIEnv* jni, jobject j_dc) {
+ ExtractNativeDC(jni, j_dc)->Close();
+}
+
+JOW(jboolean, DataChannel_sendNative)(JNIEnv* jni, jobject j_dc,
+ jbyteArray data, jboolean binary) {
+ jbyte* bytes = jni->GetByteArrayElements(data, NULL);
+ bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
+ rtc::CopyOnWriteBuffer(bytes, jni->GetArrayLength(data)),
+ binary));
+ jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
+ return ret;
+}
+
+JOW(void, DataChannel_dispose)(JNIEnv* jni, jobject j_dc) {
+ CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
+}
+
+JOW(void, Logging_nativeEnableTracing)(
+ JNIEnv* jni, jclass, jstring j_path, jint nativeLevels) {
+ std::string path = JavaToStdString(jni, j_path);
+ if (nativeLevels != webrtc::kTraceNone) {
+ webrtc::Trace::set_level_filter(nativeLevels);
+ if (path != "logcat:") {
+ RTC_CHECK_EQ(0, webrtc::Trace::SetTraceFile(path.c_str(), false))
+ << "SetTraceFile failed";
+ } else {
+ // Intentionally leak this to avoid needing to reason about its lifecycle.
+ // It keeps no state and functions only as a dispatch point.
+ static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
+ }
+ }
+}
+
+JOW(void, Logging_nativeEnableLogToDebugOutput)
+ (JNIEnv *jni, jclass, jint nativeSeverity) {
+ if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_NONE) {
+ rtc::LogMessage::LogToDebug(
+ static_cast<rtc::LoggingSeverity>(nativeSeverity));
+ }
+}
+
+JOW(void, Logging_nativeEnableLogThreads)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogThreads(true);
+}
+
+JOW(void, Logging_nativeEnableLogTimeStamps)(JNIEnv* jni, jclass) {
+ rtc::LogMessage::LogTimestamps(true);
+}
+
+JOW(void, Logging_nativeLog)(
+ JNIEnv* jni, jclass, jint j_severity, jstring j_tag, jstring j_message) {
+ std::string message = JavaToStdString(jni, j_message);
+ std::string tag = JavaToStdString(jni, j_tag);
+ LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
+}
+
+JOW(void, PeerConnection_freePeerConnection)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
+ PCOJava* p = reinterpret_cast<PCOJava*>(j_p);
+ delete p;
+}
+
+JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
+}
+
+JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
+}
+
+JOW(void, VideoRenderer_releaseNativeFrame)(
+ JNIEnv* jni, jclass, jlong j_frame_ptr) {
+ delete reinterpret_cast<const cricket::VideoFrame*>(j_frame_ptr);
+}
+
+JOW(void, MediaStreamTrack_free)(JNIEnv*, jclass, jlong j_p) {
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->Release();
+}
+
+JOW(jboolean, MediaStream_nativeAddAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeAddVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)
+ ->AddTrack(reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveAudioTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_audio_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
+}
+
+JOW(jboolean, MediaStream_nativeRemoveVideoTrack)(
+ JNIEnv* jni, jclass, jlong pointer, jlong j_video_track_pointer) {
+ return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
+}
+
+JOW(jstring, MediaStream_nativeLabel)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
+}
+
+JOW(void, MediaStream_free)(JNIEnv*, jclass, jlong j_p) {
+ CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
+ JNIEnv * jni, jclass, jobject j_observer) {
+ return (jlong)new PCOJava(jni, j_observer);
+}
+
+JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
+ JNIEnv* jni, jclass, jobject context,
+ jboolean initialize_audio, jboolean initialize_video,
+ jboolean video_hw_acceleration) {
+ bool failure = false;
+ video_hw_acceleration_enabled = video_hw_acceleration;
+ AndroidNetworkMonitor::SetAndroidContext(jni, context);
+ if (!factory_static_initialized) {
+ if (initialize_video) {
+ failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
+ }
+ if (initialize_audio)
+ failure |= webrtc::VoiceEngine::SetAndroidObjects(GetJVM(), context);
+ factory_static_initialized = true;
+ }
+ return !failure;
+}
+
+JOW(void, PeerConnectionFactory_initializeFieldTrials)(
+ JNIEnv* jni, jclass, jstring j_trials_init_string) {
+ field_trials_init_string = NULL;
+ if (j_trials_init_string != NULL) {
+ const char* init_string =
+ jni->GetStringUTFChars(j_trials_init_string, NULL);
+ int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
+ field_trials_init_string = new char[init_string_length + 1];
+ rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
+ jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
+ LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
+ }
+ webrtc::field_trial::InitFieldTrialsFromString(field_trials_init_string);
+}
+
+JOW(void, PeerConnectionFactory_initializeInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::SetupInternalTracer();
+}
+
+JOW(jboolean, PeerConnectionFactory_startInternalTracingCapture)(
+ JNIEnv* jni, jclass, jstring j_event_tracing_filename) {
+ if (!j_event_tracing_filename)
+ return false;
+
+ const char* init_string =
+ jni->GetStringUTFChars(j_event_tracing_filename, NULL);
+ LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
+ bool ret = rtc::tracing::StartInternalCapture(init_string);
+ jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
+ return ret;
+}
+
+JOW(void, PeerConnectionFactory_stopInternalTracingCapture)(
+ JNIEnv* jni, jclass) {
+ rtc::tracing::StopInternalCapture();
+}
+
+JOW(void, PeerConnectionFactory_shutdownInternalTracer)(JNIEnv* jni, jclass) {
+ rtc::tracing::ShutdownInternalTracer();
+}
+
+// Helper struct for working around the fact that CreatePeerConnectionFactory()
+// comes in two flavors: either entirely automagical (constructing its own
+// threads and deleting them on teardown, but no external codec factory support)
+// or entirely manual (requires caller to delete threads after factory
+// teardown). This struct takes ownership of its ctor's arguments to present a
+// single thing for Java to hold and eventually free.
+class OwnedFactoryAndThreads {
+ public:
+ OwnedFactoryAndThreads(std::unique_ptr<Thread> network_thread,
+ std::unique_ptr<Thread> worker_thread,
+ std::unique_ptr<Thread> signaling_thread,
+ WebRtcVideoEncoderFactory* encoder_factory,
+ WebRtcVideoDecoderFactory* decoder_factory,
+ rtc::NetworkMonitorFactory* network_monitor_factory,
+ PeerConnectionFactoryInterface* factory)
+ : network_thread_(std::move(network_thread)),
+ worker_thread_(std::move(worker_thread)),
+ signaling_thread_(std::move(signaling_thread)),
+ encoder_factory_(encoder_factory),
+ decoder_factory_(decoder_factory),
+ network_monitor_factory_(network_monitor_factory),
+ factory_(factory) {}
+
+ ~OwnedFactoryAndThreads() {
+ CHECK_RELEASE(factory_);
+ if (network_monitor_factory_ != nullptr) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
+ }
+ }
+
+ PeerConnectionFactoryInterface* factory() { return factory_; }
+ WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
+ WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
+ rtc::NetworkMonitorFactory* network_monitor_factory() {
+ return network_monitor_factory_;
+ }
+ void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
+ void InvokeJavaCallbacksOnFactoryThreads();
+
+ private:
+ void JavaCallbackOnFactoryThreads();
+
+ const std::unique_ptr<Thread> network_thread_;
+ const std::unique_ptr<Thread> worker_thread_;
+ const std::unique_ptr<Thread> signaling_thread_;
+ WebRtcVideoEncoderFactory* encoder_factory_;
+ WebRtcVideoDecoderFactory* decoder_factory_;
+ rtc::NetworkMonitorFactory* network_monitor_factory_;
+ PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
+};
+
+void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ ScopedLocalRefFrame local_ref_frame(jni);
+ jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
+ jmethodID m = nullptr;
+ if (network_thread_->IsCurrent()) {
+ LOG(LS_INFO) << "Network thread JavaCallback";
+ m = GetStaticMethodID(jni, j_factory_class, "onNetworkThreadReady", "()V");
+ }
+ if (worker_thread_->IsCurrent()) {
+ LOG(LS_INFO) << "Worker thread JavaCallback";
+ m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
+ }
+ if (signaling_thread_->IsCurrent()) {
+ LOG(LS_INFO) << "Signaling thread JavaCallback";
+ m = GetStaticMethodID(
+ jni, j_factory_class, "onSignalingThreadReady", "()V");
+ }
+ if (m != nullptr) {
+ jni->CallStaticVoidMethod(j_factory_class, m);
+ CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
+ }
+}
+
+void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
+ LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
+ network_thread_->Invoke<void>(RTC_FROM_HERE,
+ [this] { JavaCallbackOnFactoryThreads(); });
+ worker_thread_->Invoke<void>(RTC_FROM_HERE,
+ [this] { JavaCallbackOnFactoryThreads(); });
+ signaling_thread_->Invoke<void>(RTC_FROM_HERE,
+ [this] { JavaCallbackOnFactoryThreads(); });
+}
+
+PeerConnectionFactoryInterface::Options ParseOptionsFromJava(JNIEnv* jni,
+ jobject options) {
+ jclass options_class = jni->GetObjectClass(options);
+ jfieldID network_ignore_mask_field =
+ jni->GetFieldID(options_class, "networkIgnoreMask", "I");
+ int network_ignore_mask =
+ jni->GetIntField(options, network_ignore_mask_field);
+
+ jfieldID disable_encryption_field =
+ jni->GetFieldID(options_class, "disableEncryption", "Z");
+ bool disable_encryption =
+ jni->GetBooleanField(options, disable_encryption_field);
+
+ jfieldID disable_network_monitor_field =
+ jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
+ bool disable_network_monitor =
+ jni->GetBooleanField(options, disable_network_monitor_field);
+
+ PeerConnectionFactoryInterface::Options native_options;
+
+ // This doesn't necessarily match the c++ version of this struct; feel free
+ // to add more parameters as necessary.
+ native_options.network_ignore_mask = network_ignore_mask;
+ native_options.disable_encryption = disable_encryption;
+ native_options.disable_network_monitor = disable_network_monitor;
+ return native_options;
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
+ JNIEnv* jni, jclass, jobject joptions) {
+ // talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
+ // ThreadManager only WrapCurrentThread()s the thread where it is first
+ // created. Since the semantics around when auto-wrapping happens in
+ // webrtc/base/ are convoluted, we simply wrap here to avoid having to think
+ // about ramifications of auto-wrapping there.
+ rtc::ThreadManager::Instance()->WrapCurrentThread();
+ webrtc::Trace::CreateTrace();
+
+ std::unique_ptr<Thread> network_thread =
+ rtc::Thread::CreateWithSocketServer();
+ network_thread->SetName("network_thread", nullptr);
+ RTC_CHECK(network_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<Thread> worker_thread = rtc::Thread::Create();
+ worker_thread->SetName("worker_thread", nullptr);
+ RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
+
+ std::unique_ptr<Thread> signaling_thread = rtc::Thread::Create();
+ signaling_thread->SetName("signaling_thread", NULL);
+ RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
+
+ WebRtcVideoEncoderFactory* encoder_factory = nullptr;
+ WebRtcVideoDecoderFactory* decoder_factory = nullptr;
+ rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
+
+ PeerConnectionFactoryInterface::Options options;
+ bool has_options = joptions != NULL;
+ if (has_options) {
+ options = ParseOptionsFromJava(jni, joptions);
+ }
+
+ if (video_hw_acceleration_enabled) {
+ encoder_factory = new MediaCodecVideoEncoderFactory();
+ decoder_factory = new MediaCodecVideoDecoderFactory();
+ }
+ // Do not create network_monitor_factory only if the options are
+ // provided and disable_network_monitor therein is set to true.
+ if (!(has_options && options.disable_network_monitor)) {
+ network_monitor_factory = new AndroidNetworkMonitorFactory();
+ rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
+ }
+
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ webrtc::CreatePeerConnectionFactory(
+ network_thread.get(), worker_thread.get(), signaling_thread.get(),
+ nullptr, encoder_factory, decoder_factory));
+ RTC_CHECK(factory) << "Failed to create the peer connection factory; "
+ << "WebRTC/libjingle init likely failed on this device";
+ // TODO(honghaiz): Maybe put the options as the argument of
+ // CreatePeerConnectionFactory.
+ if (has_options) {
+ factory->SetOptions(options);
+ }
+ OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
+ std::move(network_thread), std::move(worker_thread),
+ std::move(signaling_thread), encoder_factory, decoder_factory,
+ network_monitor_factory, factory.release());
+ owned_factory->InvokeJavaCallbacksOnFactoryThreads();
+ return jlongFromPointer(owned_factory);
+}
+
+JOW(void, PeerConnectionFactory_nativeFreeFactory)(JNIEnv*, jclass, jlong j_p) {
+ delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ if (field_trials_init_string) {
+ webrtc::field_trial::InitFieldTrialsFromString(NULL);
+ delete field_trials_init_string;
+ field_trials_init_string = NULL;
+ }
+ webrtc::Trace::ReturnTrace();
+}
+
+static PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
+ return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
+}
+
+JOW(void, PeerConnectionFactory_nativeThreadsCallbacks)(
+ JNIEnv*, jclass, jlong j_p) {
+ OwnedFactoryAndThreads *factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
+ factory->InvokeJavaCallbacksOnFactoryThreads();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring label) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<MediaStreamInterface> stream(
+ factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
+ return (jlong)stream.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context,
+ jobject j_video_capturer, jobject j_constraints) {
+ // Create a cricket::VideoCapturer from |j_video_capturer|.
+ rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
+ new rtc::RefCountedObject<AndroidVideoCapturerJni>(
+ jni, j_video_capturer, j_egl_context);
+ std::unique_ptr<cricket::VideoCapturer> capturer(
+ new webrtc::AndroidVideoCapturer(delegate));
+ // Create a webrtc::VideoTrackSourceInterface from the cricket::VideoCapturer,
+ // native factory and constraints.
+ std::unique_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoTrackSourceInterface> source(
+ factory->CreateVideoSource(capturer.release(), constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<VideoTrackInterface> track(factory->CreateVideoTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<VideoTrackSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioSource)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject j_constraints) {
+ std::unique_ptr<ConstraintsWrapper> constraints(
+ new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioSourceInterface> source(
+ factory->CreateAudioSource(constraints.get()));
+ return (jlong)source.release();
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreateAudioTrack)(
+ JNIEnv* jni, jclass, jlong native_factory, jstring id,
+ jlong native_source) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
+ JavaToStdString(jni, id),
+ reinterpret_cast<AudioSourceInterface*>(native_source)));
+ return (jlong)track.release();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory, jint file,
+ jint filesize_limit_bytes) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartAecDump(file, filesize_limit_bytes);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopAecDump)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopAecDump();
+}
+
+JOW(jboolean, PeerConnectionFactory_nativeStartRtcEventLog)
+(JNIEnv* jni,
+ jclass,
+ jlong native_factory,
+ jint file,
+ jint filesize_limit_bytes) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ return factory->StartRtcEventLog(file, filesize_limit_bytes);
+}
+
+JOW(void, PeerConnectionFactory_nativeStopRtcEventLog)(
+ JNIEnv* jni, jclass, jlong native_factory) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ factory->StopRtcEventLog();
+}
+
+JOW(void, PeerConnectionFactory_nativeSetOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject options) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
+ factoryFromJava(native_factory));
+ PeerConnectionFactoryInterface::Options options_to_set =
+ ParseOptionsFromJava(jni, options);
+ factory->SetOptions(options_to_set);
+
+ if (options_to_set.disable_network_monitor) {
+ OwnedFactoryAndThreads* owner =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+ if (owner->network_monitor_factory()) {
+ rtc::NetworkMonitorFactory::ReleaseFactory(
+ owner->network_monitor_factory());
+ owner->clear_network_monitor_factory();
+ }
+ }
+}
+
+JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
+ JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+ jobject remote_egl_context) {
+ OwnedFactoryAndThreads* owned_factory =
+ reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+ jclass j_eglbase14_context_class =
+ FindClass(jni, "org/webrtc/EglBase14$Context");
+
+ MediaCodecVideoEncoderFactory* encoder_factory =
+ static_cast<MediaCodecVideoEncoderFactory*>
+ (owned_factory->encoder_factory());
+ if (encoder_factory &&
+ jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
+ LOG(LS_INFO) << "Set EGL context for HW encoding.";
+ encoder_factory->SetEGLContext(jni, local_egl_context);
+ }
+
+ MediaCodecVideoDecoderFactory* decoder_factory =
+ static_cast<MediaCodecVideoDecoderFactory*>
+ (owned_factory->decoder_factory());
+ if (decoder_factory) {
+ LOG(LS_INFO) << "Set EGL context for HW decoding.";
+ decoder_factory->SetEGLContext(jni, remote_egl_context);
+ }
+}
+
+static PeerConnectionInterface::IceTransportsType
+JavaIceTransportsTypeToNativeType(JNIEnv* jni, jobject j_ice_transports_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$IceTransportsType",
+ j_ice_transports_type);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kAll;
+
+ if (enum_name == "RELAY")
+ return PeerConnectionInterface::kRelay;
+
+ if (enum_name == "NOHOST")
+ return PeerConnectionInterface::kNoHost;
+
+ if (enum_name == "NONE")
+ return PeerConnectionInterface::kNone;
+
+ RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
+ return PeerConnectionInterface::kAll;
+}
+
+static PeerConnectionInterface::BundlePolicy
+JavaBundlePolicyToNativeType(JNIEnv* jni, jobject j_bundle_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$BundlePolicy",
+ j_bundle_policy);
+
+ if (enum_name == "BALANCED")
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+
+ if (enum_name == "MAXBUNDLE")
+ return PeerConnectionInterface::kBundlePolicyMaxBundle;
+
+ if (enum_name == "MAXCOMPAT")
+ return PeerConnectionInterface::kBundlePolicyMaxCompat;
+
+ RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kBundlePolicyBalanced;
+}
+
+static PeerConnectionInterface::RtcpMuxPolicy
+JavaRtcpMuxPolicyToNativeType(JNIEnv* jni, jobject j_rtcp_mux_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$RtcpMuxPolicy",
+ j_rtcp_mux_policy);
+
+ if (enum_name == "NEGOTIATE")
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+
+ if (enum_name == "REQUIRE")
+ return PeerConnectionInterface::kRtcpMuxPolicyRequire;
+
+ RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
+}
+
+static PeerConnectionInterface::TcpCandidatePolicy
+JavaTcpCandidatePolicyToNativeType(
+ JNIEnv* jni, jobject j_tcp_candidate_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
+ j_tcp_candidate_policy);
+
+ if (enum_name == "ENABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+
+ if (enum_name == "DISABLED")
+ return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
+
+ RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
+ return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
+}
+
+static PeerConnectionInterface::CandidateNetworkPolicy
+JavaCandidateNetworkPolicyToNativeType(JNIEnv* jni,
+ jobject j_candidate_network_policy) {
+ std::string enum_name =
+ GetJavaEnumName(jni, "org/webrtc/PeerConnection$CandidateNetworkPolicy",
+ j_candidate_network_policy);
+
+ if (enum_name == "ALL")
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+
+ if (enum_name == "LOW_COST")
+ return PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
+
+ RTC_CHECK(false) << "Unexpected CandidateNetworkPolicy enum_name "
+ << enum_name;
+ return PeerConnectionInterface::kCandidateNetworkPolicyAll;
+}
+
+static rtc::KeyType JavaKeyTypeToNativeType(JNIEnv* jni, jobject j_key_type) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
+
+ if (enum_name == "RSA")
+ return rtc::KT_RSA;
+ if (enum_name == "ECDSA")
+ return rtc::KT_ECDSA;
+
+ RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
+ return rtc::KT_ECDSA;
+}
+
+static PeerConnectionInterface::ContinualGatheringPolicy
+ JavaContinualGatheringPolicyToNativeType(
+ JNIEnv* jni, jobject j_gathering_policy) {
+ std::string enum_name = GetJavaEnumName(
+ jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
+ j_gathering_policy);
+ if (enum_name == "GATHER_ONCE")
+ return PeerConnectionInterface::GATHER_ONCE;
+
+ if (enum_name == "GATHER_CONTINUALLY")
+ return PeerConnectionInterface::GATHER_CONTINUALLY;
+
+ RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
+ << enum_name;
+ return PeerConnectionInterface::GATHER_ONCE;
+}
+
+static void JavaIceServersToJsepIceServers(
+ JNIEnv* jni, jobject j_ice_servers,
+ PeerConnectionInterface::IceServers* ice_servers) {
+ for (jobject j_ice_server : Iterable(jni, j_ice_servers)) {
+ jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
+ jfieldID j_ice_server_uri_id =
+ GetFieldID(jni, j_ice_server_class, "uri", "Ljava/lang/String;");
+ jfieldID j_ice_server_username_id =
+ GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
+ jfieldID j_ice_server_password_id =
+ GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
+ jstring uri = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_uri_id));
+ jstring username = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_username_id));
+ jstring password = reinterpret_cast<jstring>(
+ GetObjectField(jni, j_ice_server, j_ice_server_password_id));
+ PeerConnectionInterface::IceServer server;
+ server.uri = JavaToStdString(jni, uri);
+ server.username = JavaToStdString(jni, username);
+ server.password = JavaToStdString(jni, password);
+ ice_servers->push_back(server);
+ }
+}
+
+static void JavaRTCConfigurationToJsepRTCConfiguration(
+ JNIEnv* jni,
+ jobject j_rtc_config,
+ PeerConnectionInterface::RTCConfiguration* rtc_config) {
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+
+ jfieldID j_ice_transports_type_id = GetFieldID(
+ jni, j_rtc_config_class, "iceTransportsType",
+ "Lorg/webrtc/PeerConnection$IceTransportsType;");
+ jobject j_ice_transports_type = GetObjectField(
+ jni, j_rtc_config, j_ice_transports_type_id);
+
+ jfieldID j_bundle_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "bundlePolicy",
+ "Lorg/webrtc/PeerConnection$BundlePolicy;");
+ jobject j_bundle_policy = GetObjectField(
+ jni, j_rtc_config, j_bundle_policy_id);
+
+ jfieldID j_rtcp_mux_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "rtcpMuxPolicy",
+ "Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
+ jobject j_rtcp_mux_policy = GetObjectField(
+ jni, j_rtc_config, j_rtcp_mux_policy_id);
+
+ jfieldID j_tcp_candidate_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "tcpCandidatePolicy",
+ "Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
+ jobject j_tcp_candidate_policy = GetObjectField(
+ jni, j_rtc_config, j_tcp_candidate_policy_id);
+
+ jfieldID j_candidate_network_policy_id = GetFieldID(
+ jni, j_rtc_config_class, "candidateNetworkPolicy",
+ "Lorg/webrtc/PeerConnection$CandidateNetworkPolicy;");
+ jobject j_candidate_network_policy = GetObjectField(
+ jni, j_rtc_config, j_candidate_network_policy_id);
+
+ jfieldID j_ice_servers_id = GetFieldID(
+ jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
+ jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
+
+ jfieldID j_audio_jitter_buffer_max_packets_id =
+ GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
+ jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
+ jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
+
+ jfieldID j_ice_connection_receiving_timeout_id =
+ GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
+
+ jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
+ jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
+
+ jfieldID j_continual_gathering_policy_id =
+ GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
+ "Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
+ jobject j_continual_gathering_policy =
+ GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
+
+ jfieldID j_ice_candidate_pool_size_id =
+ GetFieldID(jni, j_rtc_config_class, "iceCandidatePoolSize", "I");
+
+ rtc_config->type =
+ JavaIceTransportsTypeToNativeType(jni, j_ice_transports_type);
+ rtc_config->bundle_policy =
+ JavaBundlePolicyToNativeType(jni, j_bundle_policy);
+ rtc_config->rtcp_mux_policy =
+ JavaRtcpMuxPolicyToNativeType(jni, j_rtcp_mux_policy);
+ rtc_config->tcp_candidate_policy =
+ JavaTcpCandidatePolicyToNativeType(jni, j_tcp_candidate_policy);
+ rtc_config->candidate_network_policy =
+ JavaCandidateNetworkPolicyToNativeType(jni, j_candidate_network_policy);
+ JavaIceServersToJsepIceServers(jni, j_ice_servers, &rtc_config->servers);
+ rtc_config->audio_jitter_buffer_max_packets =
+ GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
+ rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
+ jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
+ rtc_config->ice_connection_receiving_timeout =
+ GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
+ rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
+ jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
+ rtc_config->continual_gathering_policy =
+ JavaContinualGatheringPolicyToNativeType(
+ jni, j_continual_gathering_policy);
+ rtc_config->ice_candidate_pool_size =
+ GetIntField(jni, j_rtc_config, j_ice_candidate_pool_size_id);
+}
+
+JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnection)(
+ JNIEnv *jni, jclass, jlong factory, jobject j_rtc_config,
+ jobject j_constraints, jlong observer_p) {
+ rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
+ reinterpret_cast<PeerConnectionFactoryInterface*>(
+ factoryFromJava(factory)));
+
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+
+ jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
+ jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
+ "Lorg/webrtc/PeerConnection$KeyType;");
+ jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
+
+ // Generate non-default certificate.
+ rtc::KeyType key_type = JavaKeyTypeToNativeType(jni, j_key_type);
+ if (key_type != rtc::KT_DEFAULT) {
+ rtc::scoped_refptr<rtc::RTCCertificate> certificate =
+ rtc::RTCCertificateGenerator::GenerateCertificate(
+ rtc::KeyParams(key_type), rtc::Optional<uint64_t>());
+ if (!certificate) {
+ LOG(LS_ERROR) << "Failed to generate certificate. KeyType: " << key_type;
+ return 0;
+ }
+ rtc_config.certificates.push_back(certificate);
+ }
+
+ PCOJava* observer = reinterpret_cast<PCOJava*>(observer_p);
+ observer->SetConstraints(new ConstraintsWrapper(jni, j_constraints));
+ rtc::scoped_refptr<PeerConnectionInterface> pc(f->CreatePeerConnection(
+ rtc_config, observer->constraints(), NULL, NULL, observer));
+ return (jlong)pc.release();
+}
+
+static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
+ JNIEnv* jni, jobject j_pc) {
+ jfieldID native_pc_id = GetFieldID(jni,
+ GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
+ jlong j_p = GetLongField(jni, j_pc, native_pc_id);
+ return rtc::scoped_refptr<PeerConnectionInterface>(
+ reinterpret_cast<PeerConnectionInterface*>(j_p));
+}
+
+JOW(jobject, PeerConnection_getLocalDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->local_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_getRemoteDescription)(JNIEnv* jni, jobject j_pc) {
+ const SessionDescriptionInterface* sdp =
+ ExtractNativePC(jni, j_pc)->remote_description();
+ return sdp ? JavaSdpFromNativeSdp(jni, sdp) : NULL;
+}
+
+JOW(jobject, PeerConnection_createDataChannel)(
+ JNIEnv* jni, jobject j_pc, jstring j_label, jobject j_init) {
+ DataChannelInit init = JavaDataChannelInitToNative(jni, j_init);
+ rtc::scoped_refptr<DataChannelInterface> channel(
+ ExtractNativePC(jni, j_pc)->CreateDataChannel(
+ JavaToStdString(jni, j_label), &init));
+ // Mustn't pass channel.get() directly through NewObject to avoid reading its
+ // vararg parameter as 64-bit and reading memory that doesn't belong to the
+ // 32-bit parameter.
+ jlong nativeChannelPtr = jlongFromPointer(channel.get());
+ RTC_CHECK(nativeChannelPtr) << "Failed to create DataChannel";
+ jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
+ jmethodID j_data_channel_ctor = GetMethodID(
+ jni, j_data_channel_class, "<init>", "(J)V");
+ jobject j_channel = jni->NewObject(
+ j_data_channel_class, j_data_channel_ctor, nativeChannelPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Channel is now owned by Java object, and will be freed from there.
+ int bumped_count = channel->AddRef();
+ RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
+ return j_channel;
+}
+
+JOW(void, PeerConnection_createOffer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
+}
+
+JOW(void, PeerConnection_createAnswer)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jobject j_constraints) {
+ ConstraintsWrapper* constraints =
+ new ConstraintsWrapper(jni, j_constraints);
+ rtc::scoped_refptr<CreateSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<CreateSdpObserverWrapper>(
+ jni, j_observer, constraints));
+ ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
+}
+
+// Helper to create a SessionDescriptionInterface from a SessionDescription.
+static SessionDescriptionInterface* JavaSdpToNativeSdp(
+ JNIEnv* jni, jobject j_sdp) {
+ jfieldID j_type_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "type",
+ "Lorg/webrtc/SessionDescription$Type;");
+ jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
+ jmethodID j_canonical_form_id = GetMethodID(
+ jni, GetObjectClass(jni, j_type), "canonicalForm",
+ "()Ljava/lang/String;");
+ jstring j_type_string = (jstring)jni->CallObjectMethod(
+ j_type, j_canonical_form_id);
+ CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
+ std::string std_type = JavaToStdString(jni, j_type_string);
+
+ jfieldID j_description_id = GetFieldID(
+ jni, GetObjectClass(jni, j_sdp), "description", "Ljava/lang/String;");
+ jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
+ std::string std_description = JavaToStdString(jni, j_description);
+
+ return webrtc::CreateSessionDescription(
+ std_type, std_description, NULL);
+}
+
+JOW(void, PeerConnection_setLocalDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetLocalDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(void, PeerConnection_setRemoteDescription)(
+ JNIEnv* jni, jobject j_pc,
+ jobject j_observer, jobject j_sdp) {
+ rtc::scoped_refptr<SetSdpObserverWrapper> observer(
+ new rtc::RefCountedObject<SetSdpObserverWrapper>(
+ jni, j_observer, reinterpret_cast<ConstraintsWrapper*>(NULL)));
+ ExtractNativePC(jni, j_pc)->SetRemoteDescription(
+ observer, JavaSdpToNativeSdp(jni, j_sdp));
+}
+
+JOW(jboolean, PeerConnection_setConfiguration)(
+ JNIEnv* jni, jobject j_pc, jobject j_rtc_config) {
+ PeerConnectionInterface::RTCConfiguration rtc_config;
+ JavaRTCConfigurationToJsepRTCConfiguration(jni, j_rtc_config, &rtc_config);
+ return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
+}
+
+JOW(jboolean, PeerConnection_nativeAddIceCandidate)(
+ JNIEnv* jni, jobject j_pc, jstring j_sdp_mid,
+ jint j_sdp_mline_index, jstring j_candidate_sdp) {
+ std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
+ std::string sdp = JavaToStdString(jni, j_candidate_sdp);
+ std::unique_ptr<IceCandidateInterface> candidate(
+ webrtc::CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, NULL));
+ return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
+}
+
+static cricket::Candidate GetCandidateFromJava(JNIEnv* jni,
+ jobject j_candidate) {
+ jclass j_candidate_class = GetObjectClass(jni, j_candidate);
+ jfieldID j_sdp_mid_id =
+ GetFieldID(jni, j_candidate_class, "sdpMid", "Ljava/lang/String;");
+ std::string sdp_mid =
+ JavaToStdString(jni, GetStringField(jni, j_candidate, j_sdp_mid_id));
+ jfieldID j_sdp_id =
+ GetFieldID(jni, j_candidate_class, "sdp", "Ljava/lang/String;");
+ std::string sdp =
+ JavaToStdString(jni, GetStringField(jni, j_candidate, j_sdp_id));
+ cricket::Candidate candidate;
+ if (!webrtc::SdpDeserializeCandidate(sdp_mid, sdp, &candidate, NULL)) {
+ LOG(LS_ERROR) << "SdpDescrializeCandidate failed with sdp " << sdp;
+ }
+ return candidate;
+}
+
+JOW(jboolean, PeerConnection_nativeRemoveIceCandidates)
+(JNIEnv* jni, jobject j_pc, jobjectArray j_candidates) {
+ std::vector<cricket::Candidate> candidates;
+ size_t num_candidates = jni->GetArrayLength(j_candidates);
+ for (size_t i = 0; i < num_candidates; ++i) {
+ jobject j_candidate = jni->GetObjectArrayElement(j_candidates, i);
+ candidates.push_back(GetCandidateFromJava(jni, j_candidate));
+ }
+ return ExtractNativePC(jni, j_pc)->RemoveIceCandidates(candidates);
+}
+
+JOW(jboolean, PeerConnection_nativeAddLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ return ExtractNativePC(jni, j_pc)->AddStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(void, PeerConnection_nativeRemoveLocalStream)(
+ JNIEnv* jni, jobject j_pc, jlong native_stream) {
+ ExtractNativePC(jni, j_pc)->RemoveStream(
+ reinterpret_cast<MediaStreamInterface*>(native_stream));
+}
+
+JOW(jobject, PeerConnection_nativeCreateSender)(
+ JNIEnv* jni, jobject j_pc, jstring j_kind, jstring j_stream_id) {
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ std::string kind = JavaToStdString(jni, j_kind);
+ std::string stream_id = JavaToStdString(jni, j_stream_id);
+ rtc::scoped_refptr<RtpSenderInterface> sender =
+ ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
+ if (!sender.get()) {
+ return nullptr;
+ }
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ return j_sender;
+}
+
+JOW(jobject, PeerConnection_nativeGetSenders)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
+ jmethodID j_rtp_sender_ctor =
+ GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
+
+ auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
+ for (const auto& sender : senders) {
+ jlong nativeSenderPtr = jlongFromPointer(sender.get());
+ jobject j_sender =
+ jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Sender is now owned by the Java object, and will be freed from
+ // RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
+ sender->AddRef();
+ jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_senders;
+}
+
+JOW(jobject, PeerConnection_nativeGetReceivers)(JNIEnv* jni, jobject j_pc) {
+ jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
+ jmethodID j_array_list_ctor =
+ GetMethodID(jni, j_array_list_class, "<init>", "()V");
+ jmethodID j_array_list_add =
+ GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
+ jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
+ jmethodID j_rtp_receiver_ctor =
+ GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
+
+ auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
+ for (const auto& receiver : receivers) {
+ jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
+ jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
+ j_rtp_receiver_ctor, nativeReceiverPtr);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ // Receiver is now owned by Java object, and will be freed from there.
+ receiver->AddRef();
+ jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ }
+ return j_receivers;
+}
+
+JOW(bool, PeerConnection_nativeGetStats)(
+ JNIEnv* jni, jobject j_pc, jobject j_observer, jlong native_track) {
+ rtc::scoped_refptr<StatsObserverWrapper> observer(
+ new rtc::RefCountedObject<StatsObserverWrapper>(jni, j_observer));
+ return ExtractNativePC(jni, j_pc)->GetStats(
+ observer,
+ reinterpret_cast<MediaStreamTrackInterface*>(native_track),
+ PeerConnectionInterface::kStatsOutputLevelStandard);
+}
+
+JOW(jobject, PeerConnection_signalingState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::SignalingState state =
+ ExtractNativePC(jni, j_pc)->signaling_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$SignalingState", state);
+}
+
+JOW(jobject, PeerConnection_iceConnectionState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceConnectionState state =
+ ExtractNativePC(jni, j_pc)->ice_connection_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceConnectionState", state);
+}
+
+JOW(jobject, PeerConnection_iceGatheringState)(JNIEnv* jni, jobject j_pc) {
+ PeerConnectionInterface::IceGatheringState state =
+ ExtractNativePC(jni, j_pc)->ice_gathering_state();
+ return JavaEnumFromIndex(jni, "PeerConnection$IceGatheringState", state);
+}
+
+JOW(void, PeerConnection_close)(JNIEnv* jni, jobject j_pc) {
+ ExtractNativePC(jni, j_pc)->Close();
+ return;
+}
+
+JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ rtc::scoped_refptr<MediaSourceInterface> p(
+ reinterpret_cast<MediaSourceInterface*>(j_p));
+ return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
+}
+
+JOW(jlong, VideoRenderer_nativeWrapVideoRenderer)(
+ JNIEnv* jni, jclass, jobject j_callbacks) {
+ std::unique_ptr<JavaVideoRendererWrapper> renderer(
+ new JavaVideoRendererWrapper(jni, j_callbacks));
+ return (jlong)renderer.release();
+}
+
+JOW(void, VideoRenderer_nativeCopyPlane)(
+ JNIEnv *jni, jclass, jobject j_src_buffer, jint width, jint height,
+ jint src_stride, jobject j_dst_buffer, jint dst_stride) {
+ size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
+ size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
+ RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
+ RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
+ RTC_CHECK(src_size >= src_stride * height)
+ << "Insufficient source buffer capacity " << src_size;
+ RTC_CHECK(dst_size >= dst_stride * height)
+ << "Insufficient destination buffer capacity " << dst_size;
+ uint8_t *src =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
+ uint8_t *dst =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
+ if (src_stride == dst_stride) {
+ memcpy(dst, src, src_stride * height);
+ } else {
+ for (int i = 0; i < height; i++) {
+ memcpy(dst, src, width);
+ src += src_stride;
+ dst += dst_stride;
+ }
+ }
+}
+
+JOW(void, VideoSource_stop)(JNIEnv* jni, jclass, jlong j_p) {
+ reinterpret_cast<VideoTrackSourceInterface*>(j_p)->Stop();
+}
+
+JOW(void, VideoSource_restart)(
+ JNIEnv* jni, jclass, jlong j_p_source, jlong j_p_format) {
+ reinterpret_cast<VideoTrackSourceInterface*>(j_p_source)->Restart();
+}
+
+JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
+}
+
+JOW(jstring, MediaStreamTrack_nativeKind)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeEnabled)(JNIEnv* jni, jclass, jlong j_p) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
+}
+
+JOW(jobject, MediaStreamTrack_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
+ return JavaEnumFromIndex(
+ jni,
+ "MediaStreamTrack$State",
+ reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
+}
+
+JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
+ JNIEnv* jni, jclass, jlong j_p, jboolean enabled) {
+ return reinterpret_cast<MediaStreamTrackInterface*>(j_p)
+ ->set_enabled(enabled);
+}
+
+JOW(void, VideoTrack_nativeAddRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
+ ->AddOrUpdateSink(
+ reinterpret_cast<rtc::VideoSinkInterface<cricket::VideoFrame>*>(
+ j_renderer_pointer),
+ rtc::VideoSinkWants());
+}
+
+JOW(void, VideoTrack_nativeRemoveRenderer)(
+ JNIEnv* jni, jclass,
+ jlong j_video_track_pointer, jlong j_renderer_pointer) {
+ reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
+ ->RemoveSink(
+ reinterpret_cast<rtc::VideoSinkInterface<cricket::VideoFrame>*>(
+ j_renderer_pointer));
+}
+
+JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
+ JNIEnv* jni, jclass,
+ jstring j_dirPath, jint j_maxFileSize, jint j_severity) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ rtc::CallSessionFileRotatingLogSink* sink =
+ new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
+ if (!sink->Init()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to init CallSessionFileRotatingLogSink for path " << dir_path;
+ delete sink;
+ return 0;
+ }
+ rtc::LogMessage::AddLogToStream(
+ sink, static_cast<rtc::LoggingSeverity>(j_severity));
+ return (jlong) sink;
+}
+
+JOW(void, CallSessionFileRotatingLogSink_nativeDeleteSink)(
+ JNIEnv* jni, jclass, jlong j_sink) {
+ rtc::CallSessionFileRotatingLogSink* sink =
+ reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
+ rtc::LogMessage::RemoveLogToStream(sink);
+ delete sink;
+}
+
+JOW(jbyteArray, CallSessionFileRotatingLogSink_nativeGetLogData)(
+ JNIEnv* jni, jclass, jstring j_dirPath) {
+ std::string dir_path = JavaToStdString(jni, j_dirPath);
+ std::unique_ptr<rtc::CallSessionFileRotatingStream> stream(
+ new rtc::CallSessionFileRotatingStream(dir_path));
+ if (!stream->Open()) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "Failed to open CallSessionFileRotatingStream for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+ size_t log_size = 0;
+ if (!stream->GetSize(&log_size) || log_size == 0) {
+ LOG_V(rtc::LoggingSeverity::LS_WARNING) <<
+ "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
+ return jni->NewByteArray(0);
+ }
+
+ size_t read = 0;
+ std::unique_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
+ stream->ReadAll(buffer.get(), log_size, &read, nullptr);
+
+ jbyteArray result = jni->NewByteArray(read);
+ jni->SetByteArrayRegion(result, 0, read, buffer.get());
+
+ return result;
+}
+
+JOW(jboolean, RtpSender_nativeSetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
+}
+
+JOW(jlong, RtpSender_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_sender_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->track()
+ .release());
+}
+
+static void JavaRtpParametersToJsepRtpParameters(
+ JNIEnv* jni,
+ jobject j_parameters,
+ webrtc::RtpParameters* parameters) {
+ RTC_CHECK(parameters != nullptr);
+ jclass parameters_class = jni->FindClass("org/webrtc/RtpParameters");
+ jfieldID encodings_id =
+ GetFieldID(jni, parameters_class, "encodings", "Ljava/util/LinkedList;");
+ jfieldID codecs_id =
+ GetFieldID(jni, parameters_class, "codecs", "Ljava/util/LinkedList;");
+
+ // Convert encodings.
+ jobject j_encodings = GetObjectField(jni, j_parameters, encodings_id);
+ const int kBitrateUnlimited = -1;
+ jclass j_encoding_parameters_class =
+ jni->FindClass("org/webrtc/RtpParameters$Encoding");
+ jfieldID active_id =
+ GetFieldID(jni, j_encoding_parameters_class, "active", "Z");
+ jfieldID bitrate_id = GetFieldID(jni, j_encoding_parameters_class,
+ "maxBitrateBps", "Ljava/lang/Integer;");
+ jclass j_integer_class = jni->FindClass("java/lang/Integer");
+ jmethodID int_value_id = GetMethodID(jni, j_integer_class, "intValue", "()I");
+
+ for (jobject j_encoding_parameters : Iterable(jni, j_encodings)) {
+ webrtc::RtpEncodingParameters encoding;
+ encoding.active = GetBooleanField(jni, j_encoding_parameters, active_id);
+ jobject j_bitrate =
+ GetNullableObjectField(jni, j_encoding_parameters, bitrate_id);
+ if (!IsNull(jni, j_bitrate)) {
+ int bitrate_value = jni->CallIntMethod(j_bitrate, int_value_id);
+ CHECK_EXCEPTION(jni) << "error during CallIntMethod";
+ encoding.max_bitrate_bps = bitrate_value;
+ } else {
+ encoding.max_bitrate_bps = kBitrateUnlimited;
+ }
+ parameters->encodings.push_back(encoding);
+ }
+
+ // Convert codecs.
+ jobject j_codecs = GetObjectField(jni, j_parameters, codecs_id);
+ jclass codec_class = jni->FindClass("org/webrtc/RtpParameters$Codec");
+ jfieldID payload_type_id = GetFieldID(jni, codec_class, "payloadType", "I");
+ jfieldID mime_type_id =
+ GetFieldID(jni, codec_class, "mimeType", "Ljava/lang/String;");
+ jfieldID clock_rate_id = GetFieldID(jni, codec_class, "clockRate", "I");
+ jfieldID channels_id = GetFieldID(jni, codec_class, "channels", "I");
+
+ for (jobject j_codec : Iterable(jni, j_codecs)) {
+ webrtc::RtpCodecParameters codec;
+ codec.payload_type = GetIntField(jni, j_codec, payload_type_id);
+ codec.mime_type =
+ JavaToStdString(jni, GetStringField(jni, j_codec, mime_type_id));
+ codec.clock_rate = GetIntField(jni, j_codec, clock_rate_id);
+ codec.channels = GetIntField(jni, j_codec, channels_id);
+ parameters->codecs.push_back(codec);
+ }
+}
+
+static jobject JsepRtpParametersToJavaRtpParameters(
+ JNIEnv* jni,
+ const webrtc::RtpParameters& parameters) {
+ jclass parameters_class = jni->FindClass("org/webrtc/RtpParameters");
+ jmethodID parameters_ctor =
+ GetMethodID(jni, parameters_class, "<init>", "()V");
+ jobject j_parameters = jni->NewObject(parameters_class, parameters_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+
+ // Add encodings.
+ jclass encoding_class = jni->FindClass("org/webrtc/RtpParameters$Encoding");
+ jmethodID encoding_ctor = GetMethodID(jni, encoding_class, "<init>", "()V");
+ jfieldID encodings_id =
+ GetFieldID(jni, parameters_class, "encodings", "Ljava/util/LinkedList;");
+ jobject j_encodings = GetObjectField(jni, j_parameters, encodings_id);
+ jmethodID encodings_add = GetMethodID(jni, GetObjectClass(jni, j_encodings),
+ "add", "(Ljava/lang/Object;)Z");
+ jfieldID active_id =
+ GetFieldID(jni, encoding_class, "active", "Z");
+ jfieldID bitrate_id =
+ GetFieldID(jni, encoding_class, "maxBitrateBps", "Ljava/lang/Integer;");
+
+ jclass integer_class = jni->FindClass("java/lang/Integer");
+ jmethodID integer_ctor = GetMethodID(jni, integer_class, "<init>", "(I)V");
+
+ for (const webrtc::RtpEncodingParameters& encoding : parameters.encodings) {
+ jobject j_encoding_parameters =
+ jni->NewObject(encoding_class, encoding_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ jni->SetBooleanField(j_encoding_parameters, active_id, encoding.active);
+ CHECK_EXCEPTION(jni) << "error during SetBooleanField";
+ if (encoding.max_bitrate_bps > 0) {
+ jobject j_bitrate_value =
+ jni->NewObject(integer_class, integer_ctor, encoding.max_bitrate_bps);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ jni->SetObjectField(j_encoding_parameters, bitrate_id, j_bitrate_value);
+ CHECK_EXCEPTION(jni) << "error during SetObjectField";
+ }
+ jboolean added = jni->CallBooleanMethod(j_encodings, encodings_add,
+ j_encoding_parameters);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+
+ // Add codecs.
+ jclass codec_class = jni->FindClass("org/webrtc/RtpParameters$Codec");
+ jmethodID codec_ctor = GetMethodID(jni, codec_class, "<init>", "()V");
+ jfieldID codecs_id =
+ GetFieldID(jni, parameters_class, "codecs", "Ljava/util/LinkedList;");
+ jobject j_codecs = GetObjectField(jni, j_parameters, codecs_id);
+ jmethodID codecs_add = GetMethodID(jni, GetObjectClass(jni, j_codecs),
+ "add", "(Ljava/lang/Object;)Z");
+ jfieldID payload_type_id = GetFieldID(jni, codec_class, "payloadType", "I");
+ jfieldID mime_type_id =
+ GetFieldID(jni, codec_class, "mimeType", "Ljava/lang/String;");
+ jfieldID clock_rate_id = GetFieldID(jni, codec_class, "clockRate", "I");
+ jfieldID channels_id = GetFieldID(jni, codec_class, "channels", "I");
+
+ for (const webrtc::RtpCodecParameters& codec : parameters.codecs) {
+ jobject j_codec = jni->NewObject(codec_class, codec_ctor);
+ CHECK_EXCEPTION(jni) << "error during NewObject";
+ jni->SetIntField(j_codec, payload_type_id, codec.payload_type);
+ CHECK_EXCEPTION(jni) << "error during SetIntField";
+ jni->SetObjectField(j_codec, mime_type_id,
+ JavaStringFromStdString(jni, codec.mime_type));
+ CHECK_EXCEPTION(jni) << "error during SetObjectField";
+ jni->SetIntField(j_codec, clock_rate_id, codec.clock_rate);
+ CHECK_EXCEPTION(jni) << "error during SetIntField";
+ jni->SetIntField(j_codec, channels_id, codec.channels);
+ CHECK_EXCEPTION(jni) << "error during SetIntField";
+ jboolean added = jni->CallBooleanMethod(j_codecs, codecs_add, j_codec);
+ CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
+ RTC_CHECK(added);
+ }
+
+ return j_parameters;
+}
+
+JOW(jboolean, RtpSender_nativeSetParameters)
+(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer, jobject j_parameters) {
+ if (IsNull(jni, j_parameters)) {
+ return false;
+ }
+ webrtc::RtpParameters parameters;
+ JavaRtpParametersToJsepRtpParameters(jni, j_parameters, ¶meters);
+ return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->SetParameters(parameters);
+}
+
+JOW(jobject, RtpSender_nativeGetParameters)
+(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ webrtc::RtpParameters parameters =
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
+ ->GetParameters();
+ return JsepRtpParametersToJavaRtpParameters(jni, parameters);
+}
+
+JOW(jstring, RtpSender_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ return JavaStringFromStdString(
+ jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
+}
+
+JOW(void, RtpSender_free)(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->Release();
+}
+
+JOW(jlong, RtpReceiver_nativeGetTrack)(JNIEnv* jni,
+ jclass,
+ jlong j_rtp_receiver_pointer,
+ jlong j_track_pointer) {
+ return jlongFromPointer(
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
+ ->track()
+ .release());
+}
+
+JOW(jboolean, RtpReceiver_nativeSetParameters)
+(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer, jobject j_parameters) {
+ if (IsNull(jni, j_parameters)) {
+ return false;
+ }
+ webrtc::RtpParameters parameters;
+ JavaRtpParametersToJsepRtpParameters(jni, j_parameters, ¶meters);
+ return reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
+ ->SetParameters(parameters);
+}
+
+JOW(jobject, RtpReceiver_nativeGetParameters)
+(JNIEnv* jni, jclass, jlong j_rtp_sender_pointer) {
+ webrtc::RtpParameters parameters =
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_sender_pointer)
+ ->GetParameters();
+ return JsepRtpParametersToJavaRtpParameters(jni, parameters);
+}
+
+JOW(jstring, RtpReceiver_nativeId)(
+ JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ return JavaStringFromStdString(
+ jni,
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
+}
+
+JOW(void, RtpReceiver_free)(JNIEnv* jni, jclass, jlong j_rtp_receiver_pointer) {
+ reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->Release();
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/surfacetexturehelper_jni.cc b/webrtc/api/android/jni/surfacetexturehelper_jni.cc
new file mode 100644
index 0000000..29b20e1
--- /dev/null
+++ b/webrtc/api/android/jni/surfacetexturehelper_jni.cc
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
+
+#include "webrtc/api/android/jni/classreferenceholder.h"
+#include "webrtc/base/bind.h"
+#include "webrtc/base/checks.h"
+#include "webrtc/base/logging.h"
+
+namespace webrtc_jni {
+
+rtc::scoped_refptr<SurfaceTextureHelper> SurfaceTextureHelper::create(
+ JNIEnv* jni,
+ const char* thread_name,
+ jobject j_egl_context) {
+ jobject j_surface_texture_helper = jni->CallStaticObjectMethod(
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ GetStaticMethodID(jni, FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "create",
+ "(Ljava/lang/String;Lorg/webrtc/EglBase$Context;)"
+ "Lorg/webrtc/SurfaceTextureHelper;"),
+ jni->NewStringUTF(thread_name), j_egl_context);
+ CHECK_EXCEPTION(jni)
+ << "error during initialization of Java SurfaceTextureHelper";
+ if (IsNull(jni, j_surface_texture_helper))
+ return nullptr;
+ return new rtc::RefCountedObject<SurfaceTextureHelper>(
+ jni, j_surface_texture_helper);
+}
+
+SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
+ jobject j_surface_texture_helper)
+ : j_surface_texture_helper_(jni, j_surface_texture_helper),
+ j_return_texture_method_(
+ GetMethodID(jni,
+ FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "returnTextureFrame",
+ "()V")) {
+ CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
+}
+
+SurfaceTextureHelper::~SurfaceTextureHelper() {
+ LOG(LS_INFO) << "SurfaceTextureHelper dtor";
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->CallVoidMethod(
+ *j_surface_texture_helper_,
+ GetMethodID(jni, FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
+ "dispose", "()V"));
+
+ CHECK_EXCEPTION(jni) << "error during SurfaceTextureHelper.dispose()";
+}
+
+jobject SurfaceTextureHelper::GetJavaSurfaceTextureHelper() const {
+ return *j_surface_texture_helper_;
+}
+
+void SurfaceTextureHelper::ReturnTextureFrame() const {
+ JNIEnv* jni = AttachCurrentThreadIfNeeded();
+ jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
+
+ CHECK_EXCEPTION(
+ jni) << "error during SurfaceTextureHelper.returnTextureFrame";
+}
+
+rtc::scoped_refptr<webrtc::VideoFrameBuffer>
+SurfaceTextureHelper::CreateTextureFrame(int width, int height,
+ const NativeHandleImpl& native_handle) {
+ return new rtc::RefCountedObject<AndroidTextureBuffer>(
+ width, height, native_handle, *j_surface_texture_helper_,
+ rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
+}
+
+} // namespace webrtc_jni
diff --git a/webrtc/api/android/jni/surfacetexturehelper_jni.h b/webrtc/api/android/jni/surfacetexturehelper_jni.h
new file mode 100644
index 0000000..5a28edc
--- /dev/null
+++ b/webrtc/api/android/jni/surfacetexturehelper_jni.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+#define WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_
+
+#include <jni.h>
+
+#include "webrtc/api/android/jni/jni_helpers.h"
+#include "webrtc/api/android/jni/native_handle_impl.h"
+#include "webrtc/base/refcount.h"
+#include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/common_video/include/video_frame_buffer.h"
+
+namespace webrtc_jni {
+
+// Helper class to create and synchronize access to an Android SurfaceTexture.
+// It is used for creating webrtc::VideoFrameBuffers from a SurfaceTexture when
+// the SurfaceTexture has been updated.
+// When the VideoFrameBuffer is released, this class returns the buffer to the
+// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
+// can be released on an arbitrary thread.
+// SurfaceTextureHelper is reference counted to make sure that it is not
+// destroyed while a VideoFrameBuffer is in use.
+// This class is the C++ counterpart of the java class SurfaceTextureHelper.
+// It owns the corresponding java object, and calls the java dispose
+// method when destroyed.
+// Usage:
+// 1. Create an instance of this class.
+// 2. Get the Java SurfaceTextureHelper with GetJavaSurfaceTextureHelper().
+// 3. Register a listener to the Java SurfaceListener and start producing
+// new buffers.
+// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
+class SurfaceTextureHelper : public rtc::RefCountInterface {
+ public:
+ // Might return null if creating the Java SurfaceTextureHelper fails.
+ static rtc::scoped_refptr<SurfaceTextureHelper> create(
+ JNIEnv* jni, const char* thread_name, jobject j_egl_context);
+
+ jobject GetJavaSurfaceTextureHelper() const;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
+ int width,
+ int height,
+ const NativeHandleImpl& native_handle);
+
+ // May be called on arbitrary thread.
+ void ReturnTextureFrame() const;
+
+ protected:
+ ~SurfaceTextureHelper();
+ SurfaceTextureHelper(JNIEnv* jni, jobject j_surface_texture_helper);
+
+ private:
+ const ScopedGlobalRef<jobject> j_surface_texture_helper_;
+ const jmethodID j_return_texture_method_;
+};
+
+} // namespace webrtc_jni
+
+#endif // WEBRTC_API_JAVA_JNI_SURFACETEXTUREHELPER_JNI_H_