* Update libjingle to 50389769.
* Together with "Add texture support for i420 video frame." from
wuchengli@chromium.org.
https://webrtc-codereview.appspot.com/1413004

RISK=P1
TESTED=try bots
R=fischman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1967004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4489 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/common_video/common_video.gyp b/webrtc/common_video/common_video.gyp
index 8be60cd..f8b7124 100644
--- a/webrtc/common_video/common_video.gyp
+++ b/webrtc/common_video/common_video.gyp
@@ -54,6 +54,7 @@
       ],
       'sources': [
         'interface/i420_video_frame.h',
+        'interface/texture_video_frame.h',
         'i420_video_frame.cc',
         'jpeg/include/jpeg.h',
         'jpeg/data_manager.cc',
@@ -65,6 +66,7 @@
         'libyuv/scaler.cc',
         'plane.h',
         'plane.cc',
+        'texture_video_frame.cc'
       ],
       # Silence jpeg struct padding warnings.
       'msvs_disabled_warnings': [ 4324, ],
@@ -88,6 +90,7 @@
             'libyuv/libyuv_unittest.cc',
             'libyuv/scaler_unittest.cc',
             'plane_unittest.cc',
+            'texture_video_frame_unittest.cc'
           ],
           # Disable warnings to enable Win64 build, issue 1323.
           'msvs_disabled_warnings': [
diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc
index 77b7648..e369ffe 100644
--- a/webrtc/common_video/i420_video_frame.cc
+++ b/webrtc/common_video/i420_video_frame.cc
@@ -142,6 +142,8 @@
   v_plane_.ResetSize();
 }
 
+void* I420VideoFrame::native_handle() const { return NULL; }
+
 int I420VideoFrame::CheckDimensions(int width, int height,
                                     int stride_y, int stride_u, int stride_v) {
   int half_width = (width + 1) / 2;
@@ -179,5 +181,4 @@
   return NULL;
 }
 
-
 }  // namespace webrtc
diff --git a/webrtc/common_video/interface/i420_video_frame.h b/webrtc/common_video/interface/i420_video_frame.h
index 5aaf8c0..45f2ec3 100644
--- a/webrtc/common_video/interface/i420_video_frame.h
+++ b/webrtc/common_video/interface/i420_video_frame.h
@@ -16,6 +16,7 @@
 // Storing and handling of YUV (I420) video frames.
 
 #include "webrtc/common_video/plane.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
 #include "webrtc/typedefs.h"
 
 /*
@@ -49,74 +50,81 @@
   // If required size is bigger than the allocated one, new buffers of adequate
   // size will be allocated.
   // Return value: 0 on success ,-1 on error.
-  int CreateEmptyFrame(int width, int height,
-                       int stride_y, int stride_u, int stride_v);
+  virtual int CreateEmptyFrame(int width, int height,
+                               int stride_y, int stride_u, int stride_v);
 
   // CreateFrame: Sets the frame's members and buffers. If required size is
   // bigger than allocated one, new buffers of adequate size will be allocated.
   // Return value: 0 on success ,-1 on error.
-  int CreateFrame(int size_y, const uint8_t* buffer_y,
-                  int size_u, const uint8_t* buffer_u,
-                  int size_v, const uint8_t* buffer_v,
-                  int width, int height,
-                  int stride_y, int stride_u, int stride_v);
+  virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
+                          int size_u, const uint8_t* buffer_u,
+                          int size_v, const uint8_t* buffer_v,
+                          int width, int height,
+                          int stride_y, int stride_u, int stride_v);
 
   // Copy frame: If required size is bigger than allocated one, new buffers of
   // adequate size will be allocated.
   // Return value: 0 on success ,-1 on error.
-  int CopyFrame(const I420VideoFrame& videoFrame);
+  virtual int CopyFrame(const I420VideoFrame& videoFrame);
 
   // Swap Frame.
-  void SwapFrame(I420VideoFrame* videoFrame);
+  virtual void SwapFrame(I420VideoFrame* videoFrame);
 
   // Get pointer to buffer per plane.
-  uint8_t* buffer(PlaneType type);
+  virtual uint8_t* buffer(PlaneType type);
   // Overloading with const.
-  const uint8_t* buffer(PlaneType type) const;
+  virtual const uint8_t* buffer(PlaneType type) const;
 
   // Get allocated size per plane.
-  int allocated_size(PlaneType type) const;
+  virtual int allocated_size(PlaneType type) const;
 
   // Get allocated stride per plane.
-  int stride(PlaneType type) const;
+  virtual int stride(PlaneType type) const;
 
   // Set frame width.
-  int set_width(int width);
+  virtual int set_width(int width);
 
   // Set frame height.
-  int set_height(int height);
+  virtual int set_height(int height);
 
   // Get frame width.
-  int width() const {return width_;}
+  virtual int width() const {return width_;}
 
   // Get frame height.
-  int height() const {return height_;}
+  virtual int height() const {return height_;}
 
   // Set frame timestamp (90kHz).
-  void set_timestamp(uint32_t timestamp) {timestamp_ = timestamp;}
+  virtual void set_timestamp(uint32_t timestamp) {timestamp_ = timestamp;}
 
   // Get frame timestamp (90kHz).
-  uint32_t timestamp() const {return timestamp_;}
+  virtual uint32_t timestamp() const {return timestamp_;}
 
   // Set render time in miliseconds.
-  void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
+  virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
                                                    render_time_ms;}
 
   // Get render time in miliseconds.
-  int64_t render_time_ms() const {return render_time_ms_;}
+  virtual int64_t render_time_ms() const {return render_time_ms_;}
 
   // Return true if underlying plane buffers are of zero size, false if not.
-  bool IsZeroSize() const;
+  virtual bool IsZeroSize() const;
 
   // Reset underlying plane buffers sizes to 0. This function doesn't
   // clear memory.
-  void ResetSize();
+  virtual void ResetSize();
+
+  // Return the handle of the underlying video frame. This is used when the
+  // frame is backed by a texture. The object should be destroyed when it is no
+  // longer in use, so the underlying resource can be freed.
+  virtual void* native_handle() const;
+
+ protected:
+  // Verifies legality of parameters.
+  // Return value: 0 on success, -1 on error.
+  virtual int CheckDimensions(int width, int height,
+                              int stride_y, int stride_u, int stride_v);
 
  private:
-  // Verifies legality of parameters.
-  // Return value: 0 on success ,-1 on error.
-  int CheckDimensions(int width, int height,
-                      int stride_y, int stride_u, int stride_v);
   // Get the pointer to a specific plane.
   const Plane* GetPlane(PlaneType type) const;
   // Overloading with non-const.
diff --git a/webrtc/common_video/interface/native_handle.h b/webrtc/common_video/interface/native_handle.h
new file mode 100644
index 0000000..d078d4c
--- /dev/null
+++ b/webrtc/common_video/interface/native_handle.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
+#define COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
+
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+// A class to store an opaque handle of the underlying video frame. This is used
+// when the frame is backed by a texture. WebRTC carries the handle in
+// TextureVideoFrame. This object keeps a reference to the handle. The reference
+// is cleared when the object is destroyed. It is important to destroy the
+// object as soon as possible so the texture can be recycled.
+class NativeHandle {
+ public:
+  virtual ~NativeHandle() {}
+  // For scoped_refptr
+  virtual int32_t AddRef() = 0;
+  virtual int32_t Release() = 0;
+
+  // Gets the handle.
+  virtual void* GetHandle() = 0;
+};
+
+}  // namespace webrtc
+
+#endif  // COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
diff --git a/webrtc/common_video/interface/texture_video_frame.h b/webrtc/common_video/interface/texture_video_frame.h
new file mode 100644
index 0000000..e905ea7
--- /dev/null
+++ b/webrtc/common_video/interface/texture_video_frame.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
+#define COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
+
+// TextureVideoFrame class
+//
+// Storing and handling of video frames backed by textures.
+
+#include "webrtc/common_video/interface/i420_video_frame.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/system_wrappers/interface/scoped_refptr.h"
+#include "webrtc/typedefs.h"
+
+namespace webrtc {
+
+class TextureVideoFrame : public I420VideoFrame {
+ public:
+  TextureVideoFrame(NativeHandle* handle,
+                    int width,
+                    int height,
+                    uint32_t timestamp,
+                    int64_t render_time_ms);
+  virtual ~TextureVideoFrame();
+
+  // I420VideoFrame implementation
+  virtual int CreateEmptyFrame(int width,
+                               int height,
+                               int stride_y,
+                               int stride_u,
+                               int stride_v) OVERRIDE;
+  virtual int CreateFrame(int size_y,
+                          const uint8_t* buffer_y,
+                          int size_u,
+                          const uint8_t* buffer_u,
+                          int size_v,
+                          const uint8_t* buffer_v,
+                          int width,
+                          int height,
+                          int stride_y,
+                          int stride_u,
+                          int stride_v) OVERRIDE;
+  virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
+  virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
+  virtual uint8_t* buffer(PlaneType type) OVERRIDE;
+  virtual const uint8_t* buffer(PlaneType type) const OVERRIDE;
+  virtual int allocated_size(PlaneType type) const OVERRIDE;
+  virtual int stride(PlaneType type) const OVERRIDE;
+  virtual bool IsZeroSize() const OVERRIDE;
+  virtual void ResetSize() OVERRIDE;
+  virtual void* native_handle() const OVERRIDE;
+
+ protected:
+  virtual int CheckDimensions(
+      int width, int height, int stride_y, int stride_u, int stride_v) OVERRIDE;
+
+ private:
+  // An opaque handle that stores the underlying video frame.
+  scoped_refptr<NativeHandle> handle_;
+};
+
+}  // namespace webrtc
+
+#endif  // COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
diff --git a/webrtc/common_video/texture_video_frame.cc b/webrtc/common_video/texture_video_frame.cc
new file mode 100644
index 0000000..ea53dc2
--- /dev/null
+++ b/webrtc/common_video/texture_video_frame.cc
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/interface/texture_video_frame.h"
+
+#include <assert.h>
+
+#include "webrtc/system_wrappers/interface/trace.h"
+
+#define NOTREACHED() \
+  do { \
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, "Not reached"); \
+    assert(false); \
+  } while (0)
+
+namespace webrtc {
+
+TextureVideoFrame::TextureVideoFrame(NativeHandle* handle,
+                                     int width,
+                                     int height,
+                                     uint32_t timestamp,
+                                     int64_t render_time_ms)
+    : handle_(handle) {
+  set_width(width);
+  set_height(height);
+  set_timestamp(timestamp);
+  set_render_time_ms(render_time_ms);
+}
+
+TextureVideoFrame::~TextureVideoFrame() {}
+
+int TextureVideoFrame::CreateEmptyFrame(int width,
+                                        int height,
+                                        int stride_y,
+                                        int stride_u,
+                                        int stride_v) {
+  NOTREACHED();
+  return -1;
+}
+
+int TextureVideoFrame::CreateFrame(int size_y,
+                                   const uint8_t* buffer_y,
+                                   int size_u,
+                                   const uint8_t* buffer_u,
+                                   int size_v,
+                                   const uint8_t* buffer_v,
+                                   int width,
+                                   int height,
+                                   int stride_y,
+                                   int stride_u,
+                                   int stride_v) {
+  NOTREACHED();
+  return -1;
+}
+
+int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
+  NOTREACHED();
+  return -1;
+}
+
+void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
+  NOTREACHED();
+}
+
+uint8_t* TextureVideoFrame::buffer(PlaneType type) {
+  NOTREACHED();
+  return NULL;
+}
+
+const uint8_t* TextureVideoFrame::buffer(PlaneType type) const {
+  NOTREACHED();
+  return NULL;
+}
+
+int TextureVideoFrame::allocated_size(PlaneType type) const {
+  NOTREACHED();
+  return -1;
+}
+
+int TextureVideoFrame::stride(PlaneType type) const {
+  NOTREACHED();
+  return -1;
+}
+
+bool TextureVideoFrame::IsZeroSize() const {
+  NOTREACHED();
+  return true;
+}
+
+void TextureVideoFrame::ResetSize() {
+  NOTREACHED();
+}
+
+void* TextureVideoFrame::native_handle() const { return handle_.get(); }
+
+int TextureVideoFrame::CheckDimensions(
+    int width, int height, int stride_y, int stride_u, int stride_v) {
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/common_video/texture_video_frame_unittest.cc b/webrtc/common_video/texture_video_frame_unittest.cc
new file mode 100644
index 0000000..04e09a6
--- /dev/null
+++ b/webrtc/common_video/texture_video_frame_unittest.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
+
+namespace webrtc {
+
+class NativeHandleImpl : public NativeHandle {
+ public:
+  NativeHandleImpl() : ref_count_(0) {}
+  virtual ~NativeHandleImpl() {}
+  virtual int32_t AddRef() { return ++ref_count_; }
+  virtual int32_t Release() { return --ref_count_; }
+  virtual void* GetHandle() { return NULL; }
+
+  int32_t ref_count() { return ref_count_; }
+ private:
+  int32_t ref_count_;
+};
+
+TEST(TestTextureVideoFrame, InitialValues) {
+  NativeHandleImpl handle;
+  TextureVideoFrame frame(&handle, 640, 480, 100, 10);
+  EXPECT_EQ(640, frame.width());
+  EXPECT_EQ(480, frame.height());
+  EXPECT_EQ(100u, frame.timestamp());
+  EXPECT_EQ(10, frame.render_time_ms());
+  EXPECT_EQ(&handle, frame.native_handle());
+
+  EXPECT_EQ(0, frame.set_width(320));
+  EXPECT_EQ(320, frame.width());
+  EXPECT_EQ(0, frame.set_height(240));
+  EXPECT_EQ(240, frame.height());
+  frame.set_timestamp(200);
+  EXPECT_EQ(200u, frame.timestamp());
+  frame.set_render_time_ms(20);
+  EXPECT_EQ(20, frame.render_time_ms());
+}
+
+TEST(TestTextureVideoFrame, RefCount) {
+  NativeHandleImpl handle;
+  EXPECT_EQ(0, handle.ref_count());
+  TextureVideoFrame *frame = new TextureVideoFrame(&handle, 640, 480, 100, 200);
+  EXPECT_EQ(1, handle.ref_count());
+  delete frame;
+  EXPECT_EQ(0, handle.ref_count());
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/utility/source/video_frames_queue.cc b/webrtc/modules/utility/source/video_frames_queue.cc
index 535660c..d3d37be 100644
--- a/webrtc/modules/utility/source/video_frames_queue.cc
+++ b/webrtc/modules/utility/source/video_frames_queue.cc
@@ -14,6 +14,7 @@
 
 #include <assert.h>
 
+#include "webrtc/common_video/interface/texture_video_frame.h"
 #include "webrtc/modules/interface/module_common_types.h"
 #include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/system_wrappers/interface/trace.h"
@@ -48,6 +49,16 @@
 }
 
 int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
+  if (newFrame.native_handle() != NULL) {
+    _incomingFrames.PushBack(new TextureVideoFrame(
+        static_cast<NativeHandle*>(newFrame.native_handle()),
+        newFrame.width(),
+        newFrame.height(),
+        newFrame.timestamp(),
+        newFrame.render_time_ms()));
+    return 0;
+  }
+
   I420VideoFrame* ptrFrameToAdd = NULL;
   // Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
   if (!_emptyFrames.Empty()) {
@@ -113,12 +124,17 @@
 }
 
 int32_t VideoFramesQueue::ReturnFrame(I420VideoFrame* ptrOldFrame) {
-  ptrOldFrame->set_timestamp(0);
-  ptrOldFrame->set_width(0);
-  ptrOldFrame->set_height(0);
-  ptrOldFrame->set_render_time_ms(0);
-  ptrOldFrame->ResetSize();
-  _emptyFrames.PushBack(ptrOldFrame);
+  // No need to reuse texture frames because they do not allocate memory.
+  if (ptrOldFrame->native_handle() == NULL) {
+    ptrOldFrame->set_timestamp(0);
+    ptrOldFrame->set_width(0);
+    ptrOldFrame->set_height(0);
+    ptrOldFrame->set_render_time_ms(0);
+    ptrOldFrame->ResetSize();
+    _emptyFrames.PushBack(ptrOldFrame);
+  } else {
+    delete ptrOldFrame;
+  }
   return 0;
 }
 
diff --git a/webrtc/modules/video_render/incoming_video_stream.cc b/webrtc/modules/video_render/incoming_video_stream.cc
index eb602d1..39556d8 100644
--- a/webrtc/modules/video_render/incoming_video_stream.cc
+++ b/webrtc/modules/video_render/incoming_video_stream.cc
@@ -101,7 +101,8 @@
     return -1;
   }
 
-  if (true == mirror_frames_enabled_) {
+  // Mirroring is not supported if the frame is backed by a texture.
+  if (true == mirror_frames_enabled_ && video_frame.native_handle() == NULL) {
     transformed_video_frame_.CreateEmptyFrame(video_frame.width(),
                                               video_frame.height(),
                                               video_frame.stride(kYPlane),
diff --git a/webrtc/modules/video_render/video_render_frames.cc b/webrtc/modules/video_render/video_render_frames.cc
index 80b3d59..be5cac9 100644
--- a/webrtc/modules/video_render/video_render_frames.cc
+++ b/webrtc/modules/video_render/video_render_frames.cc
@@ -12,6 +12,7 @@
 
 #include <assert.h>
 
+#include "webrtc/common_video/interface/texture_video_frame.h"
 #include "webrtc/modules/interface/module_common_types.h"
 #include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/system_wrappers/interface/trace.h"
@@ -47,6 +48,16 @@
     return -1;
   }
 
+  if (new_frame->native_handle() != NULL) {
+    incoming_frames_.PushBack(new TextureVideoFrame(
+        static_cast<NativeHandle*>(new_frame->native_handle()),
+        new_frame->width(),
+        new_frame->height(),
+        new_frame->timestamp(),
+        new_frame->render_time_ms()));
+    return incoming_frames_.GetSize();
+  }
+
   // Get an empty frame
   I420VideoFrame* frame_to_add = NULL;
   if (!empty_frames_.Empty()) {
@@ -103,10 +114,7 @@
         // This is the oldest one so far and it's OK to render.
         if (render_frame) {
           // This one is older than the newly found frame, remove this one.
-          render_frame->ResetSize();
-          render_frame->set_timestamp(0);
-          render_frame->set_render_time_ms(0);
-          empty_frames_.PushFront(render_frame);
+          ReturnFrame(render_frame);
         }
         render_frame = oldest_frame_in_list;
         incoming_frames_.Erase(item);
@@ -122,10 +130,15 @@
 }
 
 int32_t VideoRenderFrames::ReturnFrame(I420VideoFrame* old_frame) {
-  old_frame->ResetSize();
-  old_frame->set_timestamp(0);
-  old_frame->set_render_time_ms(0);
-  empty_frames_.PushBack(old_frame);
+  // No need to reuse texture frames because they do not allocate memory.
+  if (old_frame->native_handle() == NULL) {
+    old_frame->ResetSize();
+    old_frame->set_timestamp(0);
+    old_frame->set_render_time_ms(0);
+    empty_frames_.PushBack(old_frame);
+  } else {
+    delete old_frame;
+  }
   return 0;
 }
 
diff --git a/webrtc/video_engine/include/vie_render.h b/webrtc/video_engine/include/vie_render.h
index 24e5926..48afc1a 100644
--- a/webrtc/video_engine/include/vie_render.h
+++ b/webrtc/video_engine/include/vie_render.h
@@ -39,7 +39,13 @@
                            // RTP timestamp in 90kHz.
                            uint32_t time_stamp,
                            // Wallclock render time in miliseconds
-                           int64_t render_time) = 0;
+                           int64_t render_time,
+                           // Handle of the underlying video frame,
+                           void* handle) = 0;
+
+  // Returns true if the renderer supports textures. DeliverFrame can be called
+  // with NULL |buffer| and non-NULL |handle|.
+  virtual bool IsTextureSupported() = 0;
 
  protected:
   virtual ~ExternalRenderer() {}
diff --git a/webrtc/video_engine/internal/video_receive_stream.cc b/webrtc/video_engine/internal/video_receive_stream.cc
index 50f4553..6f23e9c 100644
--- a/webrtc/video_engine/internal/video_receive_stream.cc
+++ b/webrtc/video_engine/internal/video_receive_stream.cc
@@ -117,7 +117,8 @@
 }
 
 int VideoReceiveStream::DeliverFrame(uint8_t* frame, int buffer_size,
-                                     uint32_t timestamp, int64_t render_time) {
+                                     uint32_t timestamp, int64_t render_time,
+                                     void* /*handle*/) {
   if (config_.renderer == NULL) {
     return 0;
   }
@@ -142,6 +143,8 @@
   return 0;
 }
 
+bool VideoReceiveStream::IsTextureSupported() { return false; }
+
 int VideoReceiveStream::SendPacket(int /*channel*/,
                                    const void* packet,
                                    int length) {
diff --git a/webrtc/video_engine/internal/video_receive_stream.h b/webrtc/video_engine/internal/video_receive_stream.h
index 932776a..cdac6fb 100644
--- a/webrtc/video_engine/internal/video_receive_stream.h
+++ b/webrtc/video_engine/internal/video_receive_stream.h
@@ -46,7 +46,9 @@
   virtual int FrameSizeChange(unsigned int width, unsigned int height,
                               unsigned int /*number_of_streams*/) OVERRIDE;
   virtual int DeliverFrame(uint8_t* frame, int buffer_size, uint32_t timestamp,
-                           int64_t render_time) OVERRIDE;
+                           int64_t render_time, void* /*handle*/) OVERRIDE;
+
+  virtual bool IsTextureSupported() OVERRIDE;
 
   virtual int SendPacket(int /*channel*/, const void* packet, int length)
       OVERRIDE;
diff --git a/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.cc b/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.cc
index e49be53..a79f7d5 100644
--- a/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.cc
+++ b/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.cc
@@ -588,7 +588,7 @@
 
 int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
     unsigned char *buffer, int buffer_size, uint32_t time_stamp,
-    int64_t render_time) {
+    int64_t render_time, void* /*handle*/) {
   // |render_time| provides the ideal render time for this frame. If that time
   // has already passed we will render it immediately.
   int64_t report_render_time_us = render_time * 1000;
@@ -600,7 +600,7 @@
   frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
                                          time_stamp, report_render_time_us);
   return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
-                                         time_stamp, render_time);
+                                         time_stamp, render_time, NULL);
 }
 
 int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(
diff --git a/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h b/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h
index 3a60520..b507784 100644
--- a/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h
+++ b/webrtc/video_engine/test/auto_test/primitives/framedrop_primitives.h
@@ -223,10 +223,11 @@
 
   // Implementation of ExternalRenderer:
   int FrameSizeChange(unsigned int width, unsigned int height,
-                      unsigned int number_of_streams);
+                      unsigned int number_of_streams) OVERRIDE;
   int DeliverFrame(unsigned char* buffer, int buffer_size,
                    uint32_t time_stamp,
-                   int64_t render_time);
+                   int64_t render_time,
+                   void* handle) OVERRIDE;
  private:
   FrameDropDetector* frame_drop_detector_;
 };
diff --git a/webrtc/video_engine/test/auto_test/source/vie_autotest_render.cc b/webrtc/video_engine/test/auto_test/source/vie_autotest_render.cc
index c4b8155..c1d2fac 100644
--- a/webrtc/video_engine/test/auto_test/source/vie_autotest_render.cc
+++ b/webrtc/video_engine/test/auto_test/source/vie_autotest_render.cc
@@ -58,7 +58,8 @@
 
     virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
                              uint32_t time_stamp,
-                             int64_t render_time) {
+                             int64_t render_time,
+                             void* /*handle*/) {
       if (bufferSize != CalcBufferSize(webrtc::kI420, _width, _height)) {
         ViETest::Log("Incorrect render buffer received, of length = %d\n",
                      bufferSize);
@@ -67,6 +68,8 @@
       return 0;
     }
 
+    virtual bool IsTextureSupported() { return false; }
+
 public:
     virtual ~ViEAutoTestExternalRenderer()
     {
diff --git a/webrtc/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc b/webrtc/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
index a70d6c9..245ddfe 100644
--- a/webrtc/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
+++ b/webrtc/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
@@ -123,7 +123,8 @@
 int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
                                     int buffer_size,
                                     uint32_t time_stamp,
-                                    int64_t render_time) {
+                                    int64_t render_time,
+                                    void* /*handle*/) {
   webrtc::CriticalSectionScoped lock(frame_queue_cs_.get());
   test::Frame* frame;
   if (free_frame_queue_.empty()) {
@@ -146,6 +147,8 @@
   return 0;
 }
 
+bool ViEToFileRenderer::IsTextureSupported() { return false; }
+
 int ViEToFileRenderer::FrameSizeChange(unsigned int width,
                                        unsigned int height,
                                        unsigned int number_of_streams) {
diff --git a/webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h b/webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h
index a1aeb4c..f337d17 100644
--- a/webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h
+++ b/webrtc/video_engine/test/libvietest/include/vie_external_render_filter.h
@@ -35,7 +35,8 @@
     return renderer_->DeliverFrame(frame_buffer,
                                    size,
                                    time_stamp90KHz,
-                                   webrtc::TickTime::MillisecondTimestamp());
+                                   webrtc::TickTime::MillisecondTimestamp(),
+                                   NULL);
   }
 
  private:
diff --git a/webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h b/webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h
index 32ee1c1..393524a 100644
--- a/webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h
+++ b/webrtc/video_engine/test/libvietest/include/vie_to_file_renderer.h
@@ -55,12 +55,15 @@
 
   // Implementation of ExternalRenderer:
   int FrameSizeChange(unsigned int width, unsigned int height,
-                      unsigned int number_of_streams);
+                      unsigned int number_of_streams) OVERRIDE;
 
   int DeliverFrame(unsigned char* buffer,
                    int buffer_size,
                    uint32_t time_stamp,
-                   int64_t render_time);
+                   int64_t render_time,
+                   void* handle) OVERRIDE;
+
+  bool IsTextureSupported() OVERRIDE;
 
   const std::string GetFullOutputPath() const;
 
diff --git a/webrtc/video_engine/vie_channel.cc b/webrtc/video_engine/vie_channel.cc
index 78a9015..7184777 100644
--- a/webrtc/video_engine/vie_channel.cc
+++ b/webrtc/video_engine/vie_channel.cc
@@ -1684,18 +1684,21 @@
     }
     decoder_reset_ = false;
   }
-  if (effect_filter_) {
-    unsigned int length = CalcBufferSize(kI420,
-                                         video_frame.width(),
-                                         video_frame.height());
-    scoped_array<uint8_t> video_buffer(new uint8_t[length]);
-    ExtractBuffer(video_frame, length, video_buffer.get());
-    effect_filter_->Transform(length, video_buffer.get(),
-                              video_frame.timestamp(), video_frame.width(),
-                              video_frame.height());
-  }
-  if (color_enhancement_) {
-    VideoProcessingModule::ColorEnhancement(&video_frame);
+  // Post processing is not supported if the frame is backed by a texture.
+  if (video_frame.native_handle() == NULL) {
+    if (effect_filter_) {
+      unsigned int length = CalcBufferSize(kI420,
+                                           video_frame.width(),
+                                           video_frame.height());
+      scoped_array<uint8_t> video_buffer(new uint8_t[length]);
+      ExtractBuffer(video_frame, length, video_buffer.get());
+      effect_filter_->Transform(length, video_buffer.get(),
+                                video_frame.timestamp(), video_frame.width(),
+                                video_frame.height());
+    }
+    if (color_enhancement_) {
+      VideoProcessingModule::ColorEnhancement(&video_frame);
+    }
   }
 
   uint32_t arr_ofCSRC[kRtpCsrcSize];
diff --git a/webrtc/video_engine/vie_frame_provider_base.cc b/webrtc/video_engine/vie_frame_provider_base.cc
index ac05841..1ed0966 100644
--- a/webrtc/video_engine/vie_frame_provider_base.cc
+++ b/webrtc/video_engine/vie_frame_provider_base.cc
@@ -56,7 +56,7 @@
 
   // Deliver the frame to all registered callbacks.
   if (frame_callbacks_.size() > 0) {
-    if (frame_callbacks_.size() == 1) {
+    if (frame_callbacks_.size() == 1 || video_frame->native_handle() != NULL) {
       // We don't have to copy the frame.
       frame_callbacks_.front()->DeliverFrame(id_, video_frame, num_csrcs, CSRC);
     } else {
diff --git a/webrtc/video_engine/vie_renderer.cc b/webrtc/video_engine/vie_renderer.cc
index 597f49d..35c68aa 100644
--- a/webrtc/video_engine/vie_renderer.cc
+++ b/webrtc/video_engine/vie_renderer.cc
@@ -169,6 +169,21 @@
 int32_t ViEExternalRendererImpl::RenderFrame(
     const uint32_t stream_id,
     I420VideoFrame&   video_frame) {
+  if (video_frame.native_handle() != NULL) {
+    NotifyFrameSizeChange(stream_id, video_frame);
+
+    if (external_renderer_->IsTextureSupported()) {
+      external_renderer_->DeliverFrame(NULL,
+                                       0,
+                                       video_frame.timestamp(),
+                                       video_frame.render_time_ms(),
+                                       video_frame.native_handle());
+    } else {
+      // TODO(wuchengli): readback the pixels and deliver the frame.
+    }
+    return 0;
+  }
+
   VideoFrame* out_frame = converted_frame_.get();
 
   // Convert to requested format.
@@ -218,21 +233,28 @@
       break;
   }
 
-  if (external_renderer_width_ != video_frame.width() ||
-      external_renderer_height_ != video_frame.height()) {
-    external_renderer_width_ = video_frame.width();
-    external_renderer_height_ = video_frame.height();
-    external_renderer_->FrameSizeChange(external_renderer_width_,
-                                        external_renderer_height_, stream_id);
-  }
+  NotifyFrameSizeChange(stream_id, video_frame);
 
   if (out_frame) {
     external_renderer_->DeliverFrame(out_frame->Buffer(),
                                      out_frame->Length(),
                                      video_frame.timestamp(),
-                                     video_frame.render_time_ms());
+                                     video_frame.render_time_ms(),
+                                     NULL);
   }
   return 0;
 }
 
+void ViEExternalRendererImpl::NotifyFrameSizeChange(
+    const uint32_t stream_id,
+    I420VideoFrame& video_frame) {
+  if (external_renderer_width_ != video_frame.width() ||
+      external_renderer_height_ != video_frame.height()) {
+    external_renderer_width_ = video_frame.width();
+    external_renderer_height_ = video_frame.height();
+    external_renderer_->FrameSizeChange(
+        external_renderer_width_, external_renderer_height_, stream_id);
+  }
+}
+
 }  // namespace webrtc
diff --git a/webrtc/video_engine/vie_renderer.h b/webrtc/video_engine/vie_renderer.h
index a4a1e22..04295f7 100644
--- a/webrtc/video_engine/vie_renderer.h
+++ b/webrtc/video_engine/vie_renderer.h
@@ -36,6 +36,8 @@
                               I420VideoFrame& video_frame);
 
  private:
+  void NotifyFrameSizeChange(const uint32_t stream_id,
+                             I420VideoFrame& video_frame);
   ExternalRenderer* external_renderer_;
   RawVideoType external_renderer_format_;
   int external_renderer_width_;