Annotate libjingle_peerconnection_java with @Nullable.

Bug: webrtc:8881
Change-Id: Ida2ef6c003567d19529c21629c916ed40e8de3a6
Reviewed-on: https://webrtc-review.googlesource.com/63380
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22563}
diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn
index a17c05b..ee6c1b9 100644
--- a/sdk/android/BUILD.gn
+++ b/sdk/android/BUILD.gn
@@ -844,9 +844,16 @@
     ]
   }
 
+  javac_args = [
+    "-Xep:ParameterNotNullable:ERROR",
+    "-Xep:FieldMissingNullable:ERROR",
+    "-Xep:ReturnMissingNullable:ERROR",
+  ]
+
   deps = [
     "../../modules/audio_device:audio_device_java",
     "../../rtc_base:base_java",
+    "//third_party/jsr-305:jsr_305_javalib",
   ]
 }
 
diff --git a/sdk/android/api/org/webrtc/Camera1Capturer.java b/sdk/android/api/org/webrtc/Camera1Capturer.java
index 41352ab..27b2df7 100644
--- a/sdk/android/api/org/webrtc/Camera1Capturer.java
+++ b/sdk/android/api/org/webrtc/Camera1Capturer.java
@@ -12,6 +12,7 @@
 
 import android.content.Context;
 import android.media.MediaRecorder;
+import javax.annotation.Nullable;
 
 public class Camera1Capturer extends CameraCapturer {
   private final boolean captureToTexture;
@@ -26,8 +27,8 @@
   @Override
   protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
       CameraSession.Events events, Context applicationContext,
-      SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
-      int width, int height, int framerate) {
+      SurfaceTextureHelper surfaceTextureHelper, @Nullable MediaRecorder mediaRecorder,
+      String cameraName, int width, int height, int framerate) {
     Camera1Session.create(createSessionCallback, events,
         captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper,
         mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
diff --git a/sdk/android/api/org/webrtc/Camera1Enumerator.java b/sdk/android/api/org/webrtc/Camera1Enumerator.java
index 429daf5..b11da31 100644
--- a/sdk/android/api/org/webrtc/Camera1Enumerator.java
+++ b/sdk/android/api/org/webrtc/Camera1Enumerator.java
@@ -13,6 +13,7 @@
 import android.os.SystemClock;
 import java.util.ArrayList;
 import java.util.List;
+import javax.annotation.Nullable;
 import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
 
 @SuppressWarnings("deprecation")
@@ -73,7 +74,7 @@
     return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
   }
 
-  private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
+  private static @Nullable android.hardware.Camera.CameraInfo getCameraInfo(int index) {
     android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
     try {
       android.hardware.Camera.getCameraInfo(index, info);
@@ -171,7 +172,7 @@
 
   // Returns the name of the camera with camera index. Returns null if the
   // camera can not be used.
-  static String getDeviceName(int index) {
+  static @Nullable String getDeviceName(int index) {
     android.hardware.Camera.CameraInfo info = getCameraInfo(index);
     if (info == null) {
       return null;
diff --git a/sdk/android/api/org/webrtc/Camera2Capturer.java b/sdk/android/api/org/webrtc/Camera2Capturer.java
index 769a94a..423c621 100644
--- a/sdk/android/api/org/webrtc/Camera2Capturer.java
+++ b/sdk/android/api/org/webrtc/Camera2Capturer.java
@@ -14,11 +14,12 @@
 import android.content.Context;
 import android.hardware.camera2.CameraManager;
 import android.media.MediaRecorder;
+import javax.annotation.Nullable;
 
 @TargetApi(21)
 public class Camera2Capturer extends CameraCapturer {
   private final Context context;
-  private final CameraManager cameraManager;
+  @Nullable private final CameraManager cameraManager;
 
   public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
     super(cameraName, eventsHandler, new Camera2Enumerator(context));
diff --git a/sdk/android/api/org/webrtc/Camera2Enumerator.java b/sdk/android/api/org/webrtc/Camera2Enumerator.java
index fa5c55d..4c3fc9a 100644
--- a/sdk/android/api/org/webrtc/Camera2Enumerator.java
+++ b/sdk/android/api/org/webrtc/Camera2Enumerator.java
@@ -10,8 +10,6 @@
 
 package org.webrtc;
 
-import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
-
 import android.annotation.TargetApi;
 import android.content.Context;
 import android.graphics.Rect;
@@ -24,11 +22,12 @@
 import android.os.SystemClock;
 import android.util.AndroidException;
 import android.util.Range;
-
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import javax.annotation.Nullable;
+import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
 
 @TargetApi(21)
 public class Camera2Enumerator implements CameraEnumerator {
@@ -41,7 +40,7 @@
       new HashMap<String, List<CaptureFormat>>();
 
   final Context context;
-  final CameraManager cameraManager;
+  @Nullable final CameraManager cameraManager;
 
   public Camera2Enumerator(Context context) {
     this.context = context;
@@ -90,7 +89,7 @@
     return new Camera2Capturer(context, deviceName, eventsHandler);
   }
 
-  private CameraCharacteristics getCameraCharacteristics(String deviceName) {
+  private @Nullable CameraCharacteristics getCameraCharacteristics(String deviceName) {
     try {
       return cameraManager.getCameraCharacteristics(deviceName);
       // On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
diff --git a/sdk/android/api/org/webrtc/CameraVideoCapturer.java b/sdk/android/api/org/webrtc/CameraVideoCapturer.java
index be9d669..3d9516d 100644
--- a/sdk/android/api/org/webrtc/CameraVideoCapturer.java
+++ b/sdk/android/api/org/webrtc/CameraVideoCapturer.java
@@ -11,6 +11,7 @@
 package org.webrtc;
 
 import android.media.MediaRecorder;
+import javax.annotation.Nullable;
 
 /**
  * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
diff --git a/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java b/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
index 561e7a5..b9a26d3 100644
--- a/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/DefaultVideoDecoderFactory.java
@@ -10,6 +10,8 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 public class DefaultVideoDecoderFactory implements VideoDecoderFactory {
   private final HardwareVideoDecoderFactory hardwareVideoDecoderFactory;
   private final SoftwareVideoDecoderFactory softwareVideoDecoderFactory;
@@ -21,7 +23,7 @@
   }
 
   @Override
-  public VideoDecoder createDecoder(String codecType) {
+  public @Nullable VideoDecoder createDecoder(String codecType) {
     VideoDecoder decoder = hardwareVideoDecoderFactory.createDecoder(codecType);
     if (decoder != null) {
       return decoder;
diff --git a/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java b/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
index 309ae74..0650cb3 100644
--- a/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/DefaultVideoEncoderFactory.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.util.Arrays;
 import java.util.LinkedHashSet;
 import java.util.List;
@@ -31,6 +32,7 @@
     softwareVideoEncoderFactory = new SoftwareVideoEncoderFactory();
   }
 
+  @Nullable
   @Override
   public VideoEncoder createEncoder(VideoCodecInfo info) {
     final VideoEncoder videoEncoder = hardwareVideoEncoderFactory.createEncoder(info);
diff --git a/sdk/android/api/org/webrtc/EglBase.java b/sdk/android/api/org/webrtc/EglBase.java
index e94037d..31ab678 100644
--- a/sdk/android/api/org/webrtc/EglBase.java
+++ b/sdk/android/api/org/webrtc/EglBase.java
@@ -11,6 +11,7 @@
 package org.webrtc;
 
 import android.graphics.SurfaceTexture;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import javax.microedition.khronos.egl.EGL10;
 
@@ -84,7 +85,7 @@
    * If |sharedContext| is null, a root context is created. This function will try to create an EGL
    * 1.4 context if possible, and an EGL 1.0 context otherwise.
    */
-  public static EglBase create(Context sharedContext, int[] configAttributes) {
+  public static EglBase create(@Nullable Context sharedContext, int[] configAttributes) {
     return (EglBase14.isEGL14Supported()
                && (sharedContext == null || sharedContext instanceof EglBase14.Context))
         ? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
diff --git a/sdk/android/api/org/webrtc/EglRenderer.java b/sdk/android/api/org/webrtc/EglRenderer.java
index ba39971..c6e9812 100644
--- a/sdk/android/api/org/webrtc/EglRenderer.java
+++ b/sdk/android/api/org/webrtc/EglRenderer.java
@@ -24,6 +24,7 @@
 import java.util.Locale;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
 
 /**
  * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on an EGL Surface.
@@ -84,7 +85,7 @@
   // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
   // on |handlerLock|.
   private final Object handlerLock = new Object();
-  private Handler renderThreadHandler;
+  @Nullable private Handler renderThreadHandler;
 
   private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
 
@@ -98,14 +99,14 @@
 
   // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
   // from the render thread.
-  private EglBase eglBase;
+  @Nullable private EglBase eglBase;
   private final VideoFrameDrawer frameDrawer = new VideoFrameDrawer();
-  private RendererCommon.GlDrawer drawer;
+  @Nullable private RendererCommon.GlDrawer drawer;
   private final Matrix drawMatrix = new Matrix();
 
   // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
   private final Object frameLock = new Object();
-  private VideoFrame pendingFrame;
+  @Nullable private VideoFrame pendingFrame;
 
   // These variables are synchronized on |layoutLock|.
   private final Object layoutLock = new Object();
@@ -130,7 +131,7 @@
   private long renderSwapBufferTimeNs;
 
   // Used for bitmap capturing.
-  private GlTextureFrameBuffer bitmapTextureFramebuffer;
+  @Nullable private GlTextureFrameBuffer bitmapTextureFramebuffer;
 
   private final Runnable logStatisticsRunnable = new Runnable() {
     @Override
@@ -162,7 +163,7 @@
    * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
    * init()/release() cycle.
    */
-  public void init(final EglBase.Context sharedContext, final int[] configAttributes,
+  public void init(@Nullable final EglBase.Context sharedContext, final int[] configAttributes,
       RendererCommon.GlDrawer drawer) {
     synchronized (handlerLock) {
       if (renderThreadHandler != null) {
@@ -385,7 +386,7 @@
    *                          FPS reduction.
    */
   public void addFrameListener(final FrameListener listener, final float scale,
-      final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
+      @Nullable final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
     postToRenderThread(() -> {
       final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
       frameListeners.add(
diff --git a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
index c14ec5f..2163a04 100644
--- a/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
@@ -19,6 +19,7 @@
 import android.media.MediaCodecInfo.CodecCapabilities;
 import android.media.MediaCodecList;
 import android.os.Build;
+import javax.annotation.Nullable;
 
 /** Factory for Android hardware VideoDecoders. */
 @SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
@@ -47,6 +48,7 @@
     this.fallbackToSoftware = fallbackToSoftware;
   }
 
+  @Nullable
   @Override
   public VideoDecoder createDecoder(String codecType) {
     VideoCodecType type = VideoCodecType.valueOf(codecType);
@@ -70,7 +72,7 @@
         sharedContext);
   }
 
-  private MediaCodecInfo findCodecForType(VideoCodecType type) {
+  private @Nullable MediaCodecInfo findCodecForType(VideoCodecType type) {
     // HW decoding is not supported on builds before KITKAT.
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
       return null;
diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
index 6fc747e..70b7ebb 100644
--- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
@@ -22,6 +22,7 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import javax.annotation.Nullable;
 
 /** Factory for android hardware video encoders. */
 @SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
@@ -40,7 +41,7 @@
   private static final List<String> H264_HW_EXCEPTION_MODELS =
       Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
 
-  private final EglBase14.Context sharedContext;
+  @Nullable private final EglBase14.Context sharedContext;
   private final boolean enableIntelVp8Encoder;
   private final boolean enableH264HighProfile;
   private final boolean fallbackToSoftware;
@@ -70,6 +71,7 @@
     this(null, enableIntelVp8Encoder, enableH264HighProfile);
   }
 
+  @Nullable
   @Override
   public VideoEncoder createEncoder(VideoCodecInfo input) {
     VideoCodecType type = VideoCodecType.valueOf(input.name);
@@ -141,7 +143,7 @@
     return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
   }
 
-  private MediaCodecInfo findCodecForType(VideoCodecType type) {
+  private @Nullable MediaCodecInfo findCodecForType(VideoCodecType type) {
     for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
       MediaCodecInfo info = null;
       try {
diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
index def4299..8ab033d 100644
--- a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
+++ b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import android.graphics.SurfaceTexture;
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecCapabilities;
@@ -28,6 +29,7 @@
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
 
 // Java-side of peerconnection.cc:MediaCodecVideoDecoder.
 // This class is an implementation detail of the Java PeerConnection API.
@@ -69,14 +71,14 @@
   private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
   // Active running decoder instance. Set in initDecode() (called from native code)
   // and reset to null in release() call.
-  private static MediaCodecVideoDecoder runningInstance = null;
-  private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
+  @Nullable private static MediaCodecVideoDecoder runningInstance = null;
+  @Nullable private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
   private static int codecErrors = 0;
   // List of disabled codec types - can be set from application.
   private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
 
-  private Thread mediaCodecThread;
-  private MediaCodec mediaCodec;
+  @Nullable private Thread mediaCodecThread;
+  @Nullable private MediaCodec mediaCodec;
   private ByteBuffer[] inputBuffers;
   private ByteBuffer[] outputBuffers;
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
@@ -139,9 +141,9 @@
   private boolean useSurface;
 
   // The below variables are only used when decoding to a Surface.
-  private TextureListener textureListener;
+  @Nullable private TextureListener textureListener;
   private int droppedFrames;
-  private Surface surface = null;
+  @Nullable private Surface surface = null;
   private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
       new ArrayDeque<DecodedOutputBuffer>();
 
@@ -242,7 +244,8 @@
     public final int colorFormat; // Color format supported by codec.
   }
 
-  private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
+  private static @Nullable DecoderProperties findDecoder(
+      String mime, String[] supportedCodecPrefixes) {
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
       return null; // MediaCodec.setParameters is missing.
     }
@@ -319,8 +322,8 @@
 
   // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
   @CalledByNativeUnchecked
-  private boolean initDecode(
-      VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
+  private boolean initDecode(VideoCodecType type, int width, int height,
+      @Nullable SurfaceTextureHelper surfaceTextureHelper) {
     if (mediaCodecThread != null) {
       throw new RuntimeException("initDecode: Forgot to release()?");
     }
@@ -356,7 +359,7 @@
       stride = width;
       sliceHeight = height;
 
-      if (useSurface) {
+      if (useSurface && surfaceTextureHelper != null) {
         textureListener = new TextureListener(surfaceTextureHelper);
         surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
       }
@@ -638,8 +641,8 @@
     private final Object newFrameLock = new Object();
     // |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
     // onTextureFrameAvailable().
-    private DecodedOutputBuffer bufferToRender;
-    private DecodedTextureBuffer renderedBuffer;
+    @Nullable private DecodedOutputBuffer bufferToRender;
+    @Nullable private DecodedTextureBuffer renderedBuffer;
 
     public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
       this.surfaceTextureHelper = surfaceTextureHelper;
@@ -681,6 +684,7 @@
     }
 
     // Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
+    @Nullable
     @SuppressWarnings("WaitNotInLoop")
     public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
       synchronized (newFrameLock) {
@@ -717,7 +721,7 @@
   // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
   // upon codec error.
   @CalledByNativeUnchecked
-  private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
+  private @Nullable DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
     checkOnMediaCodecThread();
     if (decodeStartTimeMs.isEmpty()) {
       return null;
@@ -801,7 +805,7 @@
   // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
   // a frame can't be returned.
   @CalledByNativeUnchecked
-  private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
+  private @Nullable DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
     checkOnMediaCodecThread();
     if (!useSurface) {
       throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
index efc5a59..0b0ff51 100644
--- a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
+++ b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
@@ -29,6 +29,7 @@
 import java.util.Set;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
 import org.webrtc.EglBase14;
 import org.webrtc.VideoFrame;
 
@@ -73,21 +74,21 @@
 
   // Active running encoder instance. Set in initEncode() (called from native code)
   // and reset to null in release() call.
-  private static MediaCodecVideoEncoder runningInstance = null;
-  private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
+  @Nullable private static MediaCodecVideoEncoder runningInstance = null;
+  @Nullable private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
   private static int codecErrors = 0;
   // List of disabled codec types - can be set from application.
   private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
 
-  private Thread mediaCodecThread;
-  private MediaCodec mediaCodec;
+  @Nullable private Thread mediaCodecThread;
+  @Nullable private MediaCodec mediaCodec;
   private ByteBuffer[] outputBuffers;
-  private EglBase14 eglBase;
+  @Nullable private EglBase14 eglBase;
   private int profile;
   private int width;
   private int height;
-  private Surface inputSurface;
-  private GlRectDrawer drawer;
+  @Nullable private Surface inputSurface;
+  @Nullable private GlRectDrawer drawer;
 
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
   private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
@@ -231,7 +232,7 @@
   private long lastKeyFrameMs;
 
   // SPS and PPS NALs (Config frame) for H.264.
-  private ByteBuffer configData = null;
+  @Nullable private ByteBuffer configData = null;
 
   // MediaCodec error handler - invoked when critical error happens which may prevent
   // further use of media codec API. Now it means that one of media codec instances
@@ -269,7 +270,7 @@
         && (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null);
   }
 
-  public static EncoderProperties vp8HwEncoderProperties() {
+  public static @Nullable EncoderProperties vp8HwEncoderProperties() {
     if (hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)) {
       return null;
     } else {
@@ -322,7 +323,7 @@
     public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type
   }
 
-  private static EncoderProperties findHwEncoder(
+  private static @Nullable EncoderProperties findHwEncoder(
       String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) {
     // MediaCodec.setParameters is missing for JB and below, so bitrate
     // can not be adjusted dynamically.
@@ -433,7 +434,7 @@
     }
   }
 
-  static MediaCodec createByCodecName(String codecName) {
+  static @Nullable MediaCodec createByCodecName(String codecName) {
     try {
       // In the L-SDK this call can throw IOException so in order to work in
       // both cases catch an exception.
@@ -445,7 +446,7 @@
 
   @CalledByNativeUnchecked
   boolean initEncode(VideoCodecType type, int profile, int width, int height, int kbps, int fps,
-      EglBase14.Context sharedContext) {
+      @Nullable EglBase14.Context sharedContext) {
     final boolean useSurface = sharedContext != null;
     Logging.d(TAG,
         "Java initEncode: " + type + ". Profile: " + profile + " : " + width + " x " + height
@@ -857,6 +858,7 @@
 
   // Dequeue and return an output buffer, or null if no output is ready.  Return
   // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
+  @Nullable
   @CalledByNativeUnchecked
   OutputBufferInfo dequeueOutputBuffer() {
     checkOnMediaCodecThread();
diff --git a/sdk/android/api/org/webrtc/MediaConstraints.java b/sdk/android/api/org/webrtc/MediaConstraints.java
index fc0c9c2..76f0483 100644
--- a/sdk/android/api/org/webrtc/MediaConstraints.java
+++ b/sdk/android/api/org/webrtc/MediaConstraints.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -44,7 +45,7 @@
     }
 
     @Override
-    public boolean equals(Object other) {
+    public boolean equals(@Nullable Object other) {
       if (this == other) {
         return true;
       }
diff --git a/sdk/android/api/org/webrtc/MediaStreamTrack.java b/sdk/android/api/org/webrtc/MediaStreamTrack.java
index 0a020ae..65da734 100644
--- a/sdk/android/api/org/webrtc/MediaStreamTrack.java
+++ b/sdk/android/api/org/webrtc/MediaStreamTrack.java
@@ -10,6 +10,8 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 /** Java wrapper for a C++ MediaStreamTrackInterface. */
 @JNINamespace("webrtc::jni")
 public class MediaStreamTrack {
@@ -55,7 +57,7 @@
   }
 
   /** Factory method to create an AudioTrack or VideoTrack subclass. */
-  static MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
+  static @Nullable MediaStreamTrack createMediaStreamTrack(long nativeTrack) {
     if (nativeTrack == 0) {
       return null;
     }
diff --git a/sdk/android/api/org/webrtc/NetworkMonitor.java b/sdk/android/api/org/webrtc/NetworkMonitor.java
index 9a01882..ff3ea15 100644
--- a/sdk/android/api/org/webrtc/NetworkMonitor.java
+++ b/sdk/android/api/org/webrtc/NetworkMonitor.java
@@ -14,6 +14,7 @@
 
 import android.content.Context;
 import android.os.Build;
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.List;
 import org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
@@ -50,7 +51,7 @@
 
   private final Object autoDetectLock = new Object();
   // Object that detects the connection type changes and brings up mobile networks.
-  private NetworkMonitorAutoDetect autoDetect;
+  @Nullable private NetworkMonitorAutoDetect autoDetect;
   // Also guarded by autoDetectLock.
   private int numObservers;
 
@@ -107,7 +108,7 @@
    * CHANGE_NETWORK_STATE permission.
    */
   @CalledByNative
-  private void startMonitoring(Context applicationContext, long nativeObserver) {
+  private void startMonitoring(@Nullable Context applicationContext, long nativeObserver) {
     Logging.d(TAG, "Start monitoring with native observer " + nativeObserver);
 
     startMonitoring(
@@ -291,6 +292,7 @@
       long nativePtr, NetworkInformation[] networkInfos);
 
   // For testing only.
+  @Nullable
   NetworkMonitorAutoDetect getNetworkMonitorAutoDetect() {
     synchronized (autoDetectLock) {
       return autoDetect;
diff --git a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
index 876f720..55a0257 100644
--- a/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
+++ b/sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
@@ -35,6 +35,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import javax.annotation.Nullable;
 
 /**
  * Borrowed from Chromium's
@@ -188,7 +189,7 @@
      *  Note: In some rare Android systems connectivityManager is null.  We handle that
      *  gracefully below.
      */
-    private final ConnectivityManager connectivityManager;
+    @Nullable private final ConnectivityManager connectivityManager;
 
     ConnectivityManagerDelegate(Context context) {
       connectivityManager =
@@ -227,7 +228,7 @@
     /**
      * Returns connection type and status information gleaned from networkInfo.
      */
-    NetworkState getNetworkState(NetworkInfo networkInfo) {
+    NetworkState getNetworkState(@Nullable NetworkInfo networkInfo) {
       if (networkInfo == null || !networkInfo.isConnected()) {
         return new NetworkState(false, -1, -1);
       }
@@ -246,6 +247,7 @@
       return connectivityManager.getAllNetworks();
     }
 
+    @Nullable
     List<NetworkInformation> getActiveNetworkList() {
       if (!supportNetworkCallback()) {
         return null;
@@ -304,7 +306,7 @@
     }
 
     @SuppressLint("NewApi")
-    private NetworkInformation networkToInfo(Network network) {
+    private @Nullable NetworkInformation networkToInfo(Network network) {
       if (connectivityManager == null) {
         return null;
       }
@@ -408,7 +410,7 @@
 
   /** Queries the WifiManager for SSID of the current Wifi connection. */
   static class WifiManagerDelegate {
-    private final Context context;
+    @Nullable private final Context context;
     WifiManagerDelegate(Context context) {
       this.context = context;
     }
@@ -444,7 +446,7 @@
     private final Observer observer;
     // Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
     // connected.
-    private NetworkInformation wifiP2pNetworkInfo = null;
+    @Nullable private NetworkInformation wifiP2pNetworkInfo = null;
 
     WifiDirectManagerDelegate(Observer observer, Context context) {
       this.context = context;
@@ -482,7 +484,7 @@
     }
 
     /** Handle a change notification about the wifi p2p group. */
-    private void onWifiP2pGroupChange(WifiP2pGroup wifiP2pGroup) {
+    private void onWifiP2pGroupChange(@Nullable WifiP2pGroup wifiP2pGroup) {
       if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) {
         return;
       }
@@ -528,9 +530,9 @@
   private final Context context;
   // Used to request mobile network. It does not do anything except for keeping
   // the callback for releasing the request.
-  private final NetworkCallback mobileNetworkCallback;
+  @Nullable private final NetworkCallback mobileNetworkCallback;
   // Used to receive updates on all networks.
-  private final NetworkCallback allNetworkCallback;
+  @Nullable private final NetworkCallback allNetworkCallback;
   // connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
   private ConnectivityManagerDelegate connectivityManagerDelegate;
   private WifiManagerDelegate wifiManagerDelegate;
@@ -617,6 +619,7 @@
     return isRegistered;
   }
 
+  @Nullable
   List<NetworkInformation> getActiveNetworkList() {
     List<NetworkInformation> connectivityManagerList =
         connectivityManagerDelegate.getActiveNetworkList();
diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java
index cf0eed5..dfe4b61 100644
--- a/sdk/android/api/org/webrtc/PeerConnection.java
+++ b/sdk/android/api/org/webrtc/PeerConnection.java
@@ -13,6 +13,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import javax.annotation.Nullable;
 
 /**
  * Java-land version of the PeerConnection APIs; wraps the C++ API
@@ -202,7 +203,7 @@
     }
 
     public static class Builder {
-      private final List<String> urls;
+      @Nullable private final List<String> urls;
       private String username = "";
       private String password = "";
       private TlsCertPolicy tlsCertPolicy = TlsCertPolicy.TLS_CERT_POLICY_SECURE;
@@ -253,16 +254,19 @@
       }
     }
 
+    @Nullable
     @CalledByNative("IceServer")
     List<String> getUrls() {
       return urls;
     }
 
+    @Nullable
     @CalledByNative("IceServer")
     String getUsername() {
       return username;
     }
 
+    @Nullable
     @CalledByNative("IceServer")
     String getPassword() {
       return password;
@@ -273,6 +277,7 @@
       return tlsCertPolicy;
     }
 
+    @Nullable
     @CalledByNative("IceServer")
     String getHostname() {
       return hostname;
@@ -406,19 +411,19 @@
     // 3) iceCheckMinInterval defines the minimal interval (equivalently the
     // maximum rate) that overrides the above two intervals when either of them
     // is less.
-    public Integer iceCheckIntervalStrongConnectivityMs;
-    public Integer iceCheckIntervalWeakConnectivityMs;
-    public Integer iceCheckMinInterval;
+    @Nullable public Integer iceCheckIntervalStrongConnectivityMs;
+    @Nullable public Integer iceCheckIntervalWeakConnectivityMs;
+    @Nullable public Integer iceCheckMinInterval;
     // The time period in milliseconds for which a candidate pair must wait for response to
     // connectivitiy checks before it becomes unwritable.
-    public Integer iceUnwritableTimeMs;
+    @Nullable public Integer iceUnwritableTimeMs;
     // The minimum number of connectivity checks that a candidate pair must sent without receiving
     // response before it becomes unwritable.
-    public Integer iceUnwritableMinChecks;
+    @Nullable public Integer iceUnwritableMinChecks;
     // The interval in milliseconds at which STUN candidates will resend STUN binding requests
     // to keep NAT bindings open.
     // The default value in the implementation is used if this field is null.
-    public Integer stunCandidateKeepaliveIntervalMs;
+    @Nullable public Integer stunCandidateKeepaliveIntervalMs;
     public boolean disableIPv6OnWifi;
     // By default, PeerConnection will use a limited number of IPv6 network
     // interfaces, in order to avoid too many ICE candidate pairs being created
@@ -426,7 +431,7 @@
     //
     // Can be set to Integer.MAX_VALUE to effectively disable the limit.
     public int maxIPv6Networks;
-    public IntervalRange iceRegatherIntervalRange;
+    @Nullable public IntervalRange iceRegatherIntervalRange;
 
     // These values will be overridden by MediaStream constraints if deprecated constraints-based
     // create peerconnection interface is used.
@@ -435,16 +440,16 @@
     public boolean enableCpuOveruseDetection;
     public boolean enableRtpDataChannel;
     public boolean suspendBelowMinBitrate;
-    public Integer screencastMinBitrate;
-    public Boolean combinedAudioVideoBwe;
-    public Boolean enableDtlsSrtp;
+    @Nullable public Integer screencastMinBitrate;
+    @Nullable public Boolean combinedAudioVideoBwe;
+    @Nullable public Boolean enableDtlsSrtp;
     // Use "Unknown" to represent no preference of adapter types, not the
     // preference of adapters of unknown types.
     public AdapterType networkPreference;
     public SdpSemantics sdpSemantics;
 
     // This is an optional wrapper for the C++ webrtc::TurnCustomizer.
-    public TurnCustomizer turnCustomizer;
+    @Nullable public TurnCustomizer turnCustomizer;
 
     // TODO(deadbeef): Instead of duplicating the defaults here, we should do
     // something to pick up the defaults from C++. The Objective-C equivalent
@@ -561,31 +566,37 @@
       return presumeWritableWhenFullyRelayed;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getIceCheckIntervalStrongConnectivity() {
       return iceCheckIntervalStrongConnectivityMs;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getIceCheckIntervalWeakConnectivity() {
       return iceCheckIntervalWeakConnectivityMs;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getIceCheckMinInterval() {
       return iceCheckMinInterval;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getIceUnwritableTimeout() {
       return iceUnwritableTimeMs;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getIceUnwritableMinChecks() {
       return iceUnwritableMinChecks;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getStunCandidateKeepaliveInterval() {
       return stunCandidateKeepaliveIntervalMs;
@@ -601,11 +612,13 @@
       return maxIPv6Networks;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     IntervalRange getIceRegatherIntervalRange() {
       return iceRegatherIntervalRange;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     TurnCustomizer getTurnCustomizer() {
       return turnCustomizer;
@@ -636,16 +649,19 @@
       return suspendBelowMinBitrate;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Integer getScreencastMinBitrate() {
       return screencastMinBitrate;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Boolean getCombinedAudioVideoBwe() {
       return combinedAudioVideoBwe;
     }
 
+    @Nullable
     @CalledByNative("RTCConfiguration")
     Boolean getEnableDtlsSrtp() {
       return enableDtlsSrtp;
@@ -923,7 +939,7 @@
   }
 
   public RtpTransceiver addTransceiver(
-      MediaStreamTrack track, RtpTransceiver.RtpTransceiverInit init) {
+      MediaStreamTrack track, @Nullable RtpTransceiver.RtpTransceiverInit init) {
     if (track == null) {
       throw new NullPointerException("No MediaStreamTrack specified for addTransceiver.");
     }
@@ -943,7 +959,7 @@
   }
 
   public RtpTransceiver addTransceiver(
-      MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init) {
+      MediaStreamTrack.MediaType mediaType, @Nullable RtpTransceiver.RtpTransceiverInit init) {
     if (mediaType == null) {
       throw new NullPointerException("No MediaType specified for addTransceiver.");
     }
@@ -960,7 +976,7 @@
 
   // Older, non-standard implementation of getStats.
   @Deprecated
-  public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
+  public boolean getStats(StatsObserver observer, @Nullable MediaStreamTrack track) {
     return nativeOldGetStats(observer, (track == null) ? 0 : track.nativeTrack);
   }
 
diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
index 0aa49eb..03eb9e3 100644
--- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java
+++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java
@@ -12,6 +12,7 @@
 
 import android.content.Context;
 import java.util.List;
+import javax.annotation.Nullable;
 
 /**
  * Java wrapper for a C++ PeerConnectionFactoryInterface.  Main entry point to
@@ -27,9 +28,9 @@
 
   private final long nativeFactory;
   private static volatile boolean internalTracerInitialized = false;
-  private static Thread networkThread;
-  private static Thread workerThread;
-  private static Thread signalingThread;
+  @Nullable private static Thread networkThread;
+  @Nullable private static Thread workerThread;
+  @Nullable private static Thread signalingThread;
   private EglBase localEglbase;
   private EglBase remoteEglbase;
 
@@ -124,11 +125,11 @@
   }
 
   public static class Builder {
-    private Options options;
-    private VideoEncoderFactory encoderFactory;
-    private VideoDecoderFactory decoderFactory;
-    private AudioProcessingFactory audioProcessingFactory;
-    private FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
+    private @Nullable Options options;
+    private @Nullable VideoEncoderFactory encoderFactory;
+    private @Nullable VideoDecoderFactory decoderFactory;
+    private @Nullable AudioProcessingFactory audioProcessingFactory;
+    private @Nullable FecControllerFactoryFactoryInterface fecControllerFactoryFactory;
 
     private Builder() {}
 
@@ -262,9 +263,10 @@
         null /* fecControllerFactoryFactory */);
   }
 
-  private PeerConnectionFactory(Options options, VideoEncoderFactory encoderFactory,
-      VideoDecoderFactory decoderFactory, AudioProcessingFactory audioProcessingFactory,
-      FecControllerFactoryFactoryInterface fecControllerFactoryFactory) {
+  private PeerConnectionFactory(Options options, @Nullable VideoEncoderFactory encoderFactory,
+      @Nullable VideoDecoderFactory decoderFactory,
+      @Nullable AudioProcessingFactory audioProcessingFactory,
+      @Nullable FecControllerFactoryFactoryInterface fecControllerFactoryFactory) {
     checkInitializeHasBeenCalled();
     nativeFactory = nativeCreatePeerConnectionFactory(ContextUtils.getApplicationContext(), options,
         encoderFactory, decoderFactory,
@@ -279,6 +281,7 @@
    * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
    * instead and use the method without constraints in the signature.
    */
+  @Nullable
   @Deprecated
   public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
       MediaConstraints constraints, PeerConnection.Observer observer) {
@@ -298,6 +301,7 @@
    * Deprecated. PeerConnection constraints are deprecated. Supply values in rtcConfig struct
    * instead and use the method without constraints in the signature.
    */
+  @Nullable
   @Deprecated
   public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
       MediaConstraints constraints, PeerConnection.Observer observer) {
@@ -305,12 +309,14 @@
     return createPeerConnection(rtcConfig, constraints, observer);
   }
 
+  @Nullable
   public PeerConnection createPeerConnection(
       List<PeerConnection.IceServer> iceServers, PeerConnection.Observer observer) {
     PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
     return createPeerConnection(rtcConfig, observer);
   }
 
+  @Nullable
   public PeerConnection createPeerConnection(
       PeerConnection.RTCConfiguration rtcConfig, PeerConnection.Observer observer) {
     return createPeerConnection(rtcConfig, null /* constraints */, observer);
@@ -411,7 +417,7 @@
     return nativeFactory;
   }
 
-  private static void printStackTrace(Thread thread, String threadName) {
+  private static void printStackTrace(@Nullable Thread thread, String threadName) {
     if (thread != null) {
       StackTraceElement[] stackTraces = thread.getStackTrace();
       if (stackTraces.length > 0) {
diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java
index 28ce6aa..634f3b3 100644
--- a/sdk/android/api/org/webrtc/RtpParameters.java
+++ b/sdk/android/api/org/webrtc/RtpParameters.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.util.List;
 import java.util.ArrayList;
 import org.webrtc.MediaStreamTrack;
@@ -31,7 +32,7 @@
     // If non-null, this represents the Transport Independent Application
     // Specific maximum bandwidth defined in RFC3890. If null, there is no
     // maximum bitrate.
-    public Integer maxBitrateBps;
+    @Nullable public Integer maxBitrateBps;
     // SSRC to be used by this encoding.
     // Can't be changed between getParameters/setParameters.
     public Long ssrc;
@@ -48,6 +49,7 @@
       return active;
     }
 
+    @Nullable
     @CalledByNative("Encoding")
     Integer getMaxBitrateBps() {
       return maxBitrateBps;
diff --git a/sdk/android/api/org/webrtc/RtpReceiver.java b/sdk/android/api/org/webrtc/RtpReceiver.java
index f33fa58..b770519 100644
--- a/sdk/android/api/org/webrtc/RtpReceiver.java
+++ b/sdk/android/api/org/webrtc/RtpReceiver.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import org.webrtc.MediaStreamTrack;
 
 /** Java wrapper for a C++ RtpReceiverInterface. */
@@ -25,7 +26,7 @@
   final long nativeRtpReceiver;
   private long nativeObserver;
 
-  private MediaStreamTrack cachedTrack;
+  @Nullable private MediaStreamTrack cachedTrack;
 
   @CalledByNative
   public RtpReceiver(long nativeRtpReceiver) {
@@ -34,11 +35,12 @@
     cachedTrack = MediaStreamTrack.createMediaStreamTrack(nativeTrack);
   }
 
+  @Nullable
   public MediaStreamTrack track() {
     return cachedTrack;
   }
 
-  public boolean setParameters(RtpParameters parameters) {
+  public boolean setParameters(@Nullable RtpParameters parameters) {
     return parameters == null ? false : nativeSetParameters(nativeRtpReceiver, parameters);
   }
 
diff --git a/sdk/android/api/org/webrtc/RtpSender.java b/sdk/android/api/org/webrtc/RtpSender.java
index 33ac3af..e20352b 100644
--- a/sdk/android/api/org/webrtc/RtpSender.java
+++ b/sdk/android/api/org/webrtc/RtpSender.java
@@ -10,15 +10,17 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 /** Java wrapper for a C++ RtpSenderInterface. */
 @JNINamespace("webrtc::jni")
 public class RtpSender {
   final long nativeRtpSender;
 
-  private MediaStreamTrack cachedTrack;
+  @Nullable private MediaStreamTrack cachedTrack;
   private boolean ownsTrack = true;
 
-  private final DtmfSender dtmfSender;
+  private final @Nullable DtmfSender dtmfSender;
 
   @CalledByNative
   public RtpSender(long nativeRtpSender) {
@@ -44,7 +46,7 @@
    *                      or a MediaStream.
    * @return              true on success and false on failure.
    */
-  public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
+  public boolean setTrack(@Nullable MediaStreamTrack track, boolean takeOwnership) {
     if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
       return false;
     }
@@ -56,6 +58,7 @@
     return true;
   }
 
+  @Nullable
   public MediaStreamTrack track() {
     return cachedTrack;
   }
@@ -72,6 +75,7 @@
     return nativeGetId(nativeRtpSender);
   }
 
+  @Nullable
   public DtmfSender dtmf() {
     return dtmfSender;
   }
diff --git a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
index 260d7a3..1b5dde7 100644
--- a/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
+++ b/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
@@ -19,6 +19,7 @@
 import android.media.projection.MediaProjection;
 import android.media.projection.MediaProjectionManager;
 import android.view.Surface;
+import javax.annotation.Nullable;
 
 /**
  * An implementation of VideoCapturer to capture the screen content as a video stream.
@@ -44,13 +45,13 @@
 
   private int width;
   private int height;
-  private VirtualDisplay virtualDisplay;
-  private SurfaceTextureHelper surfaceTextureHelper;
-  private CapturerObserver capturerObserver;
+  @Nullable private VirtualDisplay virtualDisplay;
+  @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+  @Nullable private CapturerObserver capturerObserver;
   private long numCapturedFrames = 0;
-  private MediaProjection mediaProjection;
+  @Nullable private MediaProjection mediaProjection;
   private boolean isDisposed = false;
-  private MediaProjectionManager mediaProjectionManager;
+  @Nullable private MediaProjectionManager mediaProjectionManager;
 
   /**
    * Constructs a new Screen Capturer.
diff --git a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
index cfe76a4..d3158ff 100644
--- a/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/SoftwareVideoDecoderFactory.java
@@ -10,7 +10,10 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 public class SoftwareVideoDecoderFactory implements VideoDecoderFactory {
+  @Nullable
   @Override
   public VideoDecoder createDecoder(String codecType) {
     if (codecType.equalsIgnoreCase("VP8")) {
diff --git a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
index 3481915..a2ddf36 100644
--- a/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/SoftwareVideoEncoderFactory.java
@@ -10,11 +10,13 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
 public class SoftwareVideoEncoderFactory implements VideoEncoderFactory {
+  @Nullable
   @Override
   public VideoEncoder createEncoder(VideoCodecInfo info) {
     if (info.name.equalsIgnoreCase("VP8")) {
diff --git a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
index 3aaa4e2..eefc153 100644
--- a/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
+++ b/sdk/android/api/org/webrtc/SurfaceTextureHelper.java
@@ -20,6 +20,7 @@
 import android.os.HandlerThread;
 import java.nio.ByteBuffer;
 import java.util.concurrent.Callable;
+import javax.annotation.Nullable;
 import org.webrtc.EglBase;
 import org.webrtc.VideoFrame.TextureBuffer;
 
@@ -62,6 +63,7 @@
     // Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
     // is constructed on the |handler| thread.
     return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
+      @Nullable
       @Override
       public SurfaceTextureHelper call() {
         try {
@@ -81,14 +83,14 @@
   private YuvConverter yuvConverter;
 
   // These variables are only accessed from the |handler| thread.
-  private OnTextureFrameAvailableListener listener;
+  @Nullable private OnTextureFrameAvailableListener listener;
   // The possible states of this class.
   private boolean hasPendingTexture = false;
   private volatile boolean isTextureInUse = false;
   private boolean isQuitting = false;
   // |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
   // setListener() is not allowed to be called again before stopListening(), so this is thread safe.
-  private OnTextureFrameAvailableListener pendingListener;
+  @Nullable private OnTextureFrameAvailableListener pendingListener;
   final Runnable setListenerRunnable = new Runnable() {
     @Override
     public void run() {
diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java
index f1f1145..3ee51f0 100644
--- a/sdk/android/api/org/webrtc/VideoCodecInfo.java
+++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.util.Arrays;
 import java.util.Locale;
 import java.util.Map;
@@ -50,7 +51,7 @@
   }
 
   @Override
-  public boolean equals(Object obj) {
+  public boolean equals(@Nullable Object obj) {
     if (obj == null)
       return false;
     if (obj == this)
diff --git a/sdk/android/api/org/webrtc/VideoDecoderFactory.java b/sdk/android/api/org/webrtc/VideoDecoderFactory.java
index 9979e6c..ca903f8 100644
--- a/sdk/android/api/org/webrtc/VideoDecoderFactory.java
+++ b/sdk/android/api/org/webrtc/VideoDecoderFactory.java
@@ -10,11 +10,13 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 /** Factory for creating VideoDecoders. */
 public interface VideoDecoderFactory {
   /**
    * Creates a VideoDecoder for the given codec. Supports the same codecs supported by
    * VideoEncoderFactory.
    */
-  @CalledByNative VideoDecoder createDecoder(String codecType);
+  @Nullable @CalledByNative VideoDecoder createDecoder(String codecType);
 }
diff --git a/sdk/android/api/org/webrtc/VideoEncoder.java b/sdk/android/api/org/webrtc/VideoEncoder.java
index f4836c9..af656bb 100644
--- a/sdk/android/api/org/webrtc/VideoEncoder.java
+++ b/sdk/android/api/org/webrtc/VideoEncoder.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import org.webrtc.EncodedImage;
 
 /**
@@ -93,8 +94,8 @@
   /** Settings for WebRTC quality based scaling. */
   public class ScalingSettings {
     public final boolean on;
-    public final Integer low;
-    public final Integer high;
+    @Nullable public final Integer low;
+    @Nullable public final Integer high;
 
     /**
      * Settings to disable quality based scaling.
diff --git a/sdk/android/api/org/webrtc/VideoEncoderFactory.java b/sdk/android/api/org/webrtc/VideoEncoderFactory.java
index 5fe7af5..fdf1309 100644
--- a/sdk/android/api/org/webrtc/VideoEncoderFactory.java
+++ b/sdk/android/api/org/webrtc/VideoEncoderFactory.java
@@ -10,10 +10,12 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
+
 /** Factory for creating VideoEncoders. */
 public interface VideoEncoderFactory {
   /** Creates an encoder for the given video codec. */
-  @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info);
+  @Nullable @CalledByNative VideoEncoder createEncoder(VideoCodecInfo info);
 
   /**
    * Enumerates the list of supported video codecs. This method will only be called once and the
diff --git a/sdk/android/api/org/webrtc/VideoFrame.java b/sdk/android/api/org/webrtc/VideoFrame.java
index 3b73543..30304a3 100644
--- a/sdk/android/api/org/webrtc/VideoFrame.java
+++ b/sdk/android/api/org/webrtc/VideoFrame.java
@@ -14,6 +14,7 @@
 import android.opengl.GLES11Ext;
 import android.opengl.GLES20;
 import java.nio.ByteBuffer;
+import javax.annotation.Nullable;
 
 /**
  * Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
diff --git a/sdk/android/api/org/webrtc/VideoFrameDrawer.java b/sdk/android/api/org/webrtc/VideoFrameDrawer.java
index 491fd05..96292ec 100644
--- a/sdk/android/api/org/webrtc/VideoFrameDrawer.java
+++ b/sdk/android/api/org/webrtc/VideoFrameDrawer.java
@@ -13,6 +13,7 @@
 import android.graphics.Matrix;
 import android.graphics.Point;
 import android.opengl.GLES20;
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 
 /**
@@ -55,14 +56,15 @@
     // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
     // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
     // that handles stride and compare performance with intermediate copy.
-    private ByteBuffer copyBuffer;
-    private int[] yuvTextures;
+    @Nullable private ByteBuffer copyBuffer;
+    @Nullable private int[] yuvTextures;
 
     /**
      * Upload |planes| into OpenGL textures, taking stride into consideration.
      *
      * @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
      */
+    @Nullable
     public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
       final int[] planeWidths = new int[] {width, width / 2, width / 2};
       final int[] planeHeights = new int[] {height, height / 2, height / 2};
@@ -105,12 +107,14 @@
       return yuvTextures;
     }
 
+    @Nullable
     public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
       int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
       ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
       return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
     }
 
+    @Nullable
     public int[] getYuvTextures() {
       return yuvTextures;
     }
@@ -144,7 +148,7 @@
   // |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
   // frame.
   private void calculateTransformedRenderSize(
-      int frameWidth, int frameHeight, Matrix renderMatrix) {
+      int frameWidth, int frameHeight, @Nullable Matrix renderMatrix) {
     if (renderMatrix == null) {
       renderWidth = frameWidth;
       renderHeight = frameHeight;
@@ -167,7 +171,7 @@
   private final YuvUploader yuvUploader = new YuvUploader();
   // This variable will only be used for checking reference equality and is used for caching I420
   // textures.
-  private VideoFrame lastI420Frame;
+  @Nullable private VideoFrame lastI420Frame;
   private final Matrix renderMatrix = new Matrix();
 
   public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
@@ -181,7 +185,7 @@
   }
 
   public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
-      Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
+      @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
       int viewportHeight) {
     final int width = frame.getRotatedWidth();
     final int height = frame.getRotatedHeight();
diff --git a/sdk/android/api/org/webrtc/VideoRenderer.java b/sdk/android/api/org/webrtc/VideoRenderer.java
index 3dd3f1a..f10ad20 100644
--- a/sdk/android/api/org/webrtc/VideoRenderer.java
+++ b/sdk/android/api/org/webrtc/VideoRenderer.java
@@ -10,6 +10,7 @@
 
 package org.webrtc;
 
+import javax.annotation.Nullable;
 import java.nio.ByteBuffer;
 import org.webrtc.VideoFrame;
 
@@ -28,14 +29,14 @@
   public static class I420Frame {
     public final int width;
     public final int height;
-    public final int[] yuvStrides;
-    public ByteBuffer[] yuvPlanes;
+    @Nullable public final int[] yuvStrides;
+    @Nullable public ByteBuffer[] yuvPlanes;
     public final boolean yuvFrame;
     // Matrix that transforms standard coordinates to their proper sampling locations in
     // the texture. This transform compensates for any properties of the video source that
     // cause it to appear different from a normalized texture. This matrix does not take
     // |rotationDegree| into account.
-    public final float[] samplingMatrix;
+    @Nullable public final float[] samplingMatrix;
     public int textureId;
     // Frame pointer in C++.
     private long nativeFramePointer;
@@ -46,7 +47,7 @@
 
     // If this I420Frame was constructed from VideoFrame.Buffer, this points to
     // the backing buffer.
-    private final VideoFrame.Buffer backingBuffer;
+    @Nullable private final VideoFrame.Buffer backingBuffer;
 
     /**
      * Construct a frame of the given dimensions with the specified planar data.
diff --git a/sdk/android/src/java/org/webrtc/Camera1Session.java b/sdk/android/src/java/org/webrtc/Camera1Session.java
index f76a74e..fff63a9 100644
--- a/sdk/android/src/java/org/webrtc/Camera1Session.java
+++ b/sdk/android/src/java/org/webrtc/Camera1Session.java
@@ -14,6 +14,7 @@
 import android.media.MediaRecorder;
 import android.os.Handler;
 import android.os.SystemClock;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import android.view.WindowManager;
 import java.io.IOException;
@@ -152,8 +153,8 @@
   }
 
   private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
-      SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
-      android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
+      SurfaceTextureHelper surfaceTextureHelper, @Nullable MediaRecorder mediaRecorder,
+      int cameraId, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
       CaptureFormat captureFormat, long constructionTimeNs) {
     Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
 
diff --git a/sdk/android/src/java/org/webrtc/Camera2Session.java b/sdk/android/src/java/org/webrtc/Camera2Session.java
index 2cb15d4..b7bb5bd 100644
--- a/sdk/android/src/java/org/webrtc/Camera2Session.java
+++ b/sdk/android/src/java/org/webrtc/Camera2Session.java
@@ -24,6 +24,7 @@
 import android.hardware.camera2.CaptureRequest;
 import android.media.MediaRecorder;
 import android.os.Handler;
+import javax.annotation.Nullable;
 import android.util.Range;
 import android.view.Surface;
 import android.view.WindowManager;
@@ -51,7 +52,7 @@
   private final Context applicationContext;
   private final CameraManager cameraManager;
   private final SurfaceTextureHelper surfaceTextureHelper;
-  private final Surface mediaRecorderSurface;
+  @Nullable private final Surface mediaRecorderSurface;
   private final String cameraId;
   private final int width;
   private final int height;
@@ -65,11 +66,11 @@
   private CaptureFormat captureFormat;
 
   // Initialized when camera opens
-  private CameraDevice cameraDevice;
-  private Surface surface;
+  @Nullable private CameraDevice cameraDevice;
+  @Nullable private Surface surface;
 
   // Initialized when capture session is created
-  private CameraCaptureSession captureSession;
+  @Nullable private CameraCaptureSession captureSession;
 
   // State
   private SessionState state = SessionState.RUNNING;
@@ -304,7 +305,8 @@
 
   private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
       CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
-      MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
+      @Nullable MediaRecorder mediaRecorder, String cameraId, int width, int height,
+      int framerate) {
     Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
 
     constructionTimeNs = System.nanoTime();
diff --git a/sdk/android/src/java/org/webrtc/CameraCapturer.java b/sdk/android/src/java/org/webrtc/CameraCapturer.java
index c8b56ca..cc8cc01 100644
--- a/sdk/android/src/java/org/webrtc/CameraCapturer.java
+++ b/sdk/android/src/java/org/webrtc/CameraCapturer.java
@@ -14,6 +14,7 @@
 import android.media.MediaRecorder;
 import android.os.Handler;
 import android.os.Looper;
+import javax.annotation.Nullable;
 import java.util.Arrays;
 
 @SuppressWarnings("deprecation")
@@ -37,9 +38,10 @@
   private final static int OPEN_CAMERA_TIMEOUT = 10000;
 
   private final CameraEnumerator cameraEnumerator;
-  private final CameraEventsHandler eventsHandler;
+  @Nullable private final CameraEventsHandler eventsHandler;
   private final Handler uiThreadHandler;
 
+  @Nullable
   private final CameraSession.CreateSessionCallback createSessionCallback =
       new CameraSession.CreateSessionCallback() {
         @Override
@@ -125,6 +127,7 @@
         }
       };
 
+  @Nullable
   private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
     @Override
     public void onCameraOpening() {
@@ -203,31 +206,31 @@
 
   // Initialized on initialize
   // -------------------------
-  private Handler cameraThreadHandler;
+  @Nullable private Handler cameraThreadHandler;
   private Context applicationContext;
   private CapturerObserver capturerObserver;
-  private SurfaceTextureHelper surfaceHelper;
+  @Nullable private SurfaceTextureHelper surfaceHelper;
 
   private final Object stateLock = new Object();
   private boolean sessionOpening; /* guarded by stateLock */
-  private CameraSession currentSession; /* guarded by stateLock */
+  @Nullable private CameraSession currentSession; /* guarded by stateLock */
   private String cameraName; /* guarded by stateLock */
   private int width; /* guarded by stateLock */
   private int height; /* guarded by stateLock */
   private int framerate; /* guarded by stateLock */
   private int openAttemptsRemaining; /* guarded by stateLock */
   private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
-  private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
+  @Nullable private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
   // Valid from onDone call until stopCapture, otherwise null.
-  private CameraStatistics cameraStatistics; /* guarded by stateLock */
+  @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
   private boolean firstFrameObserved; /* guarded by stateLock */
 
   // Variables used on camera thread - do not require stateLock synchronization.
   private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
-  private MediaRecorderHandler mediaRecorderEventsHandler;
+  @Nullable private MediaRecorderHandler mediaRecorderEventsHandler;
 
-  public CameraCapturer(
-      String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
+  public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
+      CameraEnumerator cameraEnumerator) {
     if (eventsHandler == null) {
       eventsHandler = new CameraEventsHandler() {
         @Override
@@ -262,8 +265,8 @@
   }
 
   @Override
-  public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
-      CapturerObserver capturerObserver) {
+  public void initialize(@Nullable SurfaceTextureHelper surfaceTextureHelper,
+      Context applicationContext, CapturerObserver capturerObserver) {
     this.applicationContext = applicationContext;
     this.capturerObserver = capturerObserver;
     this.surfaceHelper = surfaceTextureHelper;
@@ -412,14 +415,15 @@
     }
   }
 
-  private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
+  private void reportCameraSwitchError(
+      String error, @Nullable CameraSwitchHandler switchEventsHandler) {
     Logging.e(TAG, error);
     if (switchEventsHandler != null) {
       switchEventsHandler.onCameraSwitchError(error);
     }
   }
 
-  private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
+  private void switchCameraInternal(@Nullable final CameraSwitchHandler switchEventsHandler) {
     Logging.d(TAG, "switchCamera internal");
 
     final String[] deviceNames = cameraEnumerator.getDeviceNames();
@@ -476,7 +480,7 @@
   }
 
   private void reportUpdateMediaRecorderError(
-      String error, MediaRecorderHandler mediaRecoderEventsHandler) {
+      String error, @Nullable MediaRecorderHandler mediaRecoderEventsHandler) {
     checkIsOnCameraThread();
     Logging.e(TAG, error);
     if (mediaRecoderEventsHandler != null) {
@@ -485,7 +489,7 @@
   }
 
   private void updateMediaRecorderInternal(
-      MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
+      @Nullable MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
     checkIsOnCameraThread();
     boolean addMediaRecorder = (mediaRecorder != null);
     Logging.d(TAG,
diff --git a/sdk/android/src/java/org/webrtc/EglBase10.java b/sdk/android/src/java/org/webrtc/EglBase10.java
index d00388e..ab840f7 100644
--- a/sdk/android/src/java/org/webrtc/EglBase10.java
+++ b/sdk/android/src/java/org/webrtc/EglBase10.java
@@ -13,6 +13,7 @@
 import android.graphics.Canvas;
 import android.graphics.Rect;
 import android.graphics.SurfaceTexture;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import android.view.SurfaceHolder;
 import javax.microedition.khronos.egl.EGL10;
@@ -31,7 +32,7 @@
 
   private final EGL10 egl;
   private EGLContext eglContext;
-  private EGLConfig eglConfig;
+  @Nullable private EGLConfig eglConfig;
   private EGLDisplay eglDisplay;
   private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
 
@@ -102,11 +103,13 @@
       @Override
       public void setKeepScreenOn(boolean b) {}
 
+      @Nullable
       @Override
       public Canvas lockCanvas() {
         return null;
       }
 
+      @Nullable
       @Override
       public Canvas lockCanvas(Rect rect) {
         return null;
@@ -115,6 +118,7 @@
       @Override
       public void unlockCanvasAndPost(Canvas canvas) {}
 
+      @Nullable
       @Override
       public Rect getSurfaceFrame() {
         return null;
@@ -301,7 +305,7 @@
 
   // Return an EGLConfig, or die trying.
   private EGLContext createEglContext(
-      Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+      @Nullable Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
     if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
       throw new RuntimeException("Invalid sharedContext");
     }
diff --git a/sdk/android/src/java/org/webrtc/EglBase14.java b/sdk/android/src/java/org/webrtc/EglBase14.java
index b67eb6c..f578371 100644
--- a/sdk/android/src/java/org/webrtc/EglBase14.java
+++ b/sdk/android/src/java/org/webrtc/EglBase14.java
@@ -19,6 +19,7 @@
 import android.opengl.EGLExt;
 import android.opengl.EGLSurface;
 import android.os.Build;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import org.webrtc.EglBase;
 
@@ -33,7 +34,7 @@
   private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
   private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
   private EGLContext eglContext;
-  private EGLConfig eglConfig;
+  @Nullable private EGLConfig eglConfig;
   private EGLDisplay eglDisplay;
   private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
 
@@ -262,7 +263,7 @@
 
   // Return an EGLConfig, or die trying.
   private static EGLContext createEglContext(
-      EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
+      @Nullable EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
     if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
       throw new RuntimeException("Invalid sharedContext");
     }
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
index 85be9ba..0ca99af 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoDecoder.java
@@ -15,6 +15,7 @@
 import android.media.MediaCodecInfo.CodecCapabilities;
 import android.media.MediaFormat;
 import android.os.SystemClock;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -71,7 +72,7 @@
   // Output thread runs a loop which polls MediaCodec for decoded output buffers.  It reformats
   // those buffers into VideoFrames and delivers them to the callback.  Variable is set on decoder
   // thread and is immutable while the codec is running.
-  private Thread outputThread;
+  @Nullable private Thread outputThread;
 
   // Checker that ensures work is run on the output thread.
   private ThreadChecker outputThreadChecker;
@@ -81,7 +82,7 @@
   private ThreadChecker decoderThreadChecker;
 
   private volatile boolean running = false;
-  private volatile Exception shutdownException = null;
+  @Nullable private volatile Exception shutdownException = null;
 
   // Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
   // or the output thread.  Accesses should be protected with this lock.
@@ -101,8 +102,8 @@
 
   private final EglBase.Context sharedContext;
   // Valid and immutable while the decoder is running.
-  private SurfaceTextureHelper surfaceTextureHelper;
-  private Surface surface = null;
+  @Nullable private SurfaceTextureHelper surfaceTextureHelper;
+  @Nullable private Surface surface = null;
 
   private static class DecodedTextureMetadata {
     final int width;
@@ -123,14 +124,14 @@
 
   // Metadata for the last frame rendered to the texture.
   private final Object renderedTextureMetadataLock = new Object();
-  private DecodedTextureMetadata renderedTextureMetadata;
+  @Nullable private DecodedTextureMetadata renderedTextureMetadata;
 
   // Decoding proceeds asynchronously.  This callback returns decoded frames to the caller.  Valid
   // and immutable while the decoder is running.
-  private Callback callback;
+  @Nullable private Callback callback;
 
   // Valid and immutable while the decoder is running.
-  private MediaCodec codec = null;
+  @Nullable private MediaCodec codec = null;
 
   HardwareVideoDecoder(
       String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
index 0882c4b..7a05f37 100644
--- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
+++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java
@@ -17,6 +17,7 @@
 import android.media.MediaFormat;
 import android.opengl.GLES20;
 import android.os.Bundle;
+import javax.annotation.Nullable;
 import android.view.Surface;
 import java.io.IOException;
 import java.nio.ByteBuffer;
@@ -81,16 +82,16 @@
   private boolean automaticResizeOn;
 
   // --- Valid and immutable while an encoding session is running.
-  private MediaCodec codec;
+  @Nullable private MediaCodec codec;
   // Thread that delivers encoded frames to the user callback.
-  private Thread outputThread;
+  @Nullable private Thread outputThread;
 
   // EGL base wrapping the shared texture context.  Holds hooks to both the shared context and the
   // input surface.  Making this base current allows textures from the context to be drawn onto the
   // surface.
-  private EglBase14 textureEglBase;
+  @Nullable private EglBase14 textureEglBase;
   // Input surface for the codec.  The encoder will draw input textures onto this surface.
-  private Surface textureInputSurface;
+  @Nullable private Surface textureInputSurface;
 
   private int width;
   private int height;
@@ -102,7 +103,7 @@
 
   // --- Only accessed on the output thread.
   // Contents of the last observed config frame output by the MediaCodec. Used by H.264.
-  private ByteBuffer configBuffer = null;
+  @Nullable private ByteBuffer configBuffer = null;
   private int adjustedBitrate;
 
   // Whether the encoder is running.  Volatile so that the output thread can watch this value and
@@ -110,7 +111,7 @@
   private volatile boolean running = false;
   // Any exception thrown during shutdown.  The output thread releases the MediaCodec and uses this
   // value to send exceptions thrown during release back to the encoder thread.
-  private volatile Exception shutdownException = null;
+  @Nullable private volatile Exception shutdownException = null;
 
   /**
    * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
diff --git a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
index 8c829b8..2fee1c3 100644
--- a/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
+++ b/sdk/android/src/java/org/webrtc/MediaCodecUtils.java
@@ -14,6 +14,7 @@
 import android.media.MediaCodec;
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecInfo.CodecCapabilities;
+import javax.annotation.Nullable;
 
 /** Container class for static constants and helpers used with MediaCodec. */
 @TargetApi(18)
@@ -54,7 +55,8 @@
   // Color formats supported by texture mode encoding - in order of preference.
   static final int[] TEXTURE_COLOR_FORMATS = {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
 
-  static Integer selectColorFormat(int[] supportedColorFormats, CodecCapabilities capabilities) {
+  static @Nullable Integer selectColorFormat(
+      int[] supportedColorFormats, CodecCapabilities capabilities) {
     for (int supportedColorFormat : supportedColorFormats) {
       for (int codecColorFormat : capabilities.colorFormats) {
         if (codecColorFormat == supportedColorFormat) {
diff --git a/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java b/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
index bc9fcec..c8a48ff 100644
--- a/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
+++ b/sdk/android/src/java/org/webrtc/VideoEncoderWrapper.java
@@ -11,6 +11,7 @@
 package org.webrtc;
 
 // Explicit imports necessary for JNI generation.
+import javax.annotation.Nullable;
 import org.webrtc.VideoEncoder;
 import java.nio.ByteBuffer;
 
@@ -23,11 +24,13 @@
     return scalingSettings.on;
   }
 
+  @Nullable
   @CalledByNative
   static Integer getScalingSettingsLow(VideoEncoder.ScalingSettings scalingSettings) {
     return scalingSettings.low;
   }
 
+  @Nullable
   @CalledByNative
   static Integer getScalingSettingsHigh(VideoEncoder.ScalingSettings scalingSettings) {
     return scalingSettings.high;
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
index 8273b7e..b87630e 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioEffects.java
@@ -19,6 +19,7 @@
 import android.os.Build;
 import java.util.List;
 import java.util.UUID;
+import javax.annotation.Nullable;
 import org.webrtc.Logging;
 
 // This class wraps control of three different platform effects. Supported
@@ -40,12 +41,12 @@
   // Contains the available effect descriptors returned from the
   // AudioEffect.getEffects() call. This result is cached to avoid doing the
   // slow OS call multiple times.
-  private static Descriptor[] cachedEffects = null;
+  private static @Nullable Descriptor[] cachedEffects = null;
 
   // Contains the audio effect objects. Created in enable() and destroyed
   // in release().
-  private AcousticEchoCanceler aec = null;
-  private NoiseSuppressor ns = null;
+  private @Nullable AcousticEchoCanceler aec = null;
+  private @Nullable NoiseSuppressor ns = null;
 
   // Affects the final state given to the setEnabled() method on each effect.
   // The default state is set to "disabled" but each effect can also be enabled
@@ -293,7 +294,7 @@
 
   // Returns the cached copy of the audio effects array, if available, or
   // queries the operating system for the list of effects.
-  private static Descriptor[] getAvailableEffects() {
+  private static @Nullable Descriptor[] getAvailableEffects() {
     if (cachedEffects != null) {
       return cachedEffects;
     }
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
index 27e4356..aa83656 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
@@ -20,6 +20,7 @@
 import android.os.Build;
 import java.util.Timer;
 import java.util.TimerTask;
+import javax.annotation.Nullable;
 import org.webrtc.Logging;
 import org.webrtc.CalledByNative;
 
@@ -100,7 +101,7 @@
     private static final int TIMER_PERIOD_IN_SECONDS = 30;
 
     private final AudioManager audioManager;
-    private Timer timer;
+    private @Nullable Timer timer;
 
     public VolumeLogger(AudioManager audioManager) {
       this.audioManager = audioManager;
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
index fa4d179..d07330c 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java
@@ -19,12 +19,13 @@
 import java.nio.ByteBuffer;
 import java.util.Arrays;
 import java.util.concurrent.TimeUnit;
+import javax.annotation.Nullable;
+import org.webrtc.CalledByNative;
 import org.webrtc.Logging;
+import org.webrtc.NativeClassQualifiedName;
 import org.webrtc.ThreadUtils;
 import org.webrtc.audio.AudioDeviceModule.AudioRecordErrorCallback;
 import org.webrtc.audio.AudioDeviceModule.AudioRecordStartErrorCode;
-import org.webrtc.CalledByNative;
-import org.webrtc.NativeClassQualifiedName;
 import org.webrtc.audio.AudioDeviceModule.SamplesReadyCallback;
 
 class WebRtcAudioRecord {
@@ -56,24 +57,24 @@
 
   private final long nativeAudioRecord;
 
-  private WebRtcAudioEffects effects = null;
+  private @Nullable WebRtcAudioEffects effects = null;
 
-  private ByteBuffer byteBuffer;
+  private @Nullable ByteBuffer byteBuffer;
 
-  private AudioRecord audioRecord = null;
-  private AudioRecordThread audioThread = null;
+  private @Nullable AudioRecord audioRecord = null;
+  private @Nullable AudioRecordThread audioThread = null;
 
   private static volatile boolean microphoneMute = false;
   private byte[] emptyBytes;
 
-  private static AudioRecordErrorCallback errorCallback = null;
+  private static @Nullable AudioRecordErrorCallback errorCallback = null;
 
   public static void setErrorCallback(AudioRecordErrorCallback errorCallback) {
     Logging.d(TAG, "Set error callback");
     WebRtcAudioRecord.errorCallback = errorCallback;
   }
 
-  private static SamplesReadyCallback audioSamplesReadyCallback = null;
+  private static @Nullable SamplesReadyCallback audioSamplesReadyCallback = null;
 
   public static void setOnAudioSamplesReady(SamplesReadyCallback callback) {
     audioSamplesReadyCallback = callback;
diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
index f0467c2..d3304ce 100644
--- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
+++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java
@@ -20,6 +20,7 @@
 import android.os.Process;
 import java.lang.Thread;
 import java.nio.ByteBuffer;
+import javax.annotation.Nullable;
 import org.webrtc.ContextUtils;
 import org.webrtc.Logging;
 import org.webrtc.ThreadUtils;
@@ -82,15 +83,15 @@
 
   private ByteBuffer byteBuffer;
 
-  private AudioTrack audioTrack = null;
-  private AudioTrackThread audioThread = null;
+  private @Nullable AudioTrack audioTrack = null;
+  private @Nullable AudioTrackThread audioThread = null;
 
   // Samples to be played are replaced by zeros if |speakerMute| is set to true.
   // Can be used to ensure that the speaker is fully muted.
   private static volatile boolean speakerMute = false;
   private byte[] emptyBytes;
 
-  private static AudioTrackErrorCallback errorCallback = null;
+  private static @Nullable AudioTrackErrorCallback errorCallback = null;
 
   public static void setErrorCallback(AudioTrackErrorCallback errorCallback) {
     Logging.d(TAG, "Set extended error callback");