Revert of Delete video_render module. (patchset #12 id:220001 of https://codereview.webrtc.org/1912143002/ )

Reason for revert:
This breaks every buildbot in chromium.webrtc.fyi and I don't see any roll in progress to address this (and I don't see how that would be possible either).
Usage in Chrome: https://code.google.com/p/chromium/codesearch#search/&q=modules.gyp%3Avideo_render&sq=package:chromium&type=cs

Example failures:
https://build.chromium.org/p/chromium.webrtc.fyi/builders/Linux%20Builder/builds/5420
https://build.chromium.org/p/chromium.webrtc.fyi/builders/Win%20Builder/builds/4526

I think it's fine to delete our video_render_module_internal_impl target and those files, but video_render target needs to remain.

Original issue's description:
> Delete video_render module.
>
> BUG=webrtc:5817
>
> Committed: https://crrev.com/97cfd1ec05d07ef233356e57f7aa4b028b74ffba
> Cr-Commit-Position: refs/heads/master@{#12526}

TBR=mflodman@webrtc.org,pbos@webrtc.org,nisse@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=webrtc:5817

Review-Url: https://codereview.webrtc.org/1923613003
Cr-Commit-Position: refs/heads/master@{#12534}
diff --git a/webrtc/modules/modules.gyp b/webrtc/modules/modules.gyp
index de47272..502454c 100644
--- a/webrtc/modules/modules.gyp
+++ b/webrtc/modules/modules.gyp
@@ -26,6 +26,7 @@
     'video_coding/video_coding.gypi',
     'video_capture/video_capture.gypi',
     'video_processing/video_processing.gypi',
+    'video_render/video_render.gypi',
   ],
   'conditions': [
     ['include_tests==1', {
@@ -780,6 +781,19 @@
                 'modules_unittests.isolate',
               ],
             },
+            {
+              'target_name': 'video_render_tests_run',
+              'type': 'none',
+              'dependencies': [
+                'video_render_tests',
+              ],
+              'includes': [
+                '../build/isolate.gypi',
+              ],
+              'sources': [
+                'video_render_tests.isolate',
+              ],
+            },
           ],
         }],
       ],
diff --git a/webrtc/modules/modules_java.gyp b/webrtc/modules/modules_java.gyp
index 2a72fb3..060de2a 100644
--- a/webrtc/modules/modules_java.gyp
+++ b/webrtc/modules/modules_java.gyp
@@ -18,5 +18,14 @@
 
       'includes': [ '../../build/java.gypi' ],
     }, # audio_device_module_java
+    {
+      'target_name': 'video_render_module_java',
+      'type': 'none',
+      'variables': {
+        'java_in_dir': 'video_render/android/java',
+        'additional_src_dirs': [ '../base/java/src', ],
+      },
+      'includes': [ '../../build/java.gypi' ],
+    }, # video_render_module_java
   ],
 }
diff --git a/webrtc/modules/modules_java_chromium.gyp b/webrtc/modules/modules_java_chromium.gyp
index ebc53d6..32d2d8d 100644
--- a/webrtc/modules/modules_java_chromium.gyp
+++ b/webrtc/modules/modules_java_chromium.gyp
@@ -16,5 +16,13 @@
       },
       'includes': [ '../../../build/java.gypi' ],
     }, # audio_device_module_java
+    {
+      'target_name': 'video_render_module_java',
+      'type': 'none',
+      'variables': {
+        'java_in_dir': 'video_render/android/java',
+      },
+      'includes': [ '../../../build/java.gypi' ],
+    }, # video_render_module_java
   ],
 }
diff --git a/webrtc/modules/video_render/BUILD.gn b/webrtc/modules/video_render/BUILD.gn
new file mode 100644
index 0000000..0771bd7
--- /dev/null
+++ b/webrtc/modules/video_render/BUILD.gn
@@ -0,0 +1,178 @@
+# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import("../../build/webrtc.gni")
+
+source_set("video_render_module") {
+  sources = [
+    "external/video_render_external_impl.cc",
+    "external/video_render_external_impl.h",
+    "i_video_render.h",
+    "video_render.h",
+    "video_render_defines.h",
+    "video_render_impl.h",
+  ]
+
+  deps = [
+    "../..:webrtc_common",
+    "../../common_video",
+    "../../system_wrappers",
+    "../utility",
+  ]
+
+  configs += [ "../..:common_config" ]
+  public_configs = [ "../..:common_inherited_config" ]
+
+  if (is_clang) {
+    # Suppress warnings from Chrome's Clang plugins.
+    # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+    configs -= [ "//build/config/clang:find_bad_constructs" ]
+  }
+}
+
+source_set("video_render") {
+  sources = [
+    "video_render_impl.cc",
+  ]
+  deps = [
+    ":video_render_module",
+    "../../system_wrappers",
+  ]
+
+  configs += [ "../..:common_config" ]
+  public_configs = [ "../..:common_inherited_config" ]
+
+  if (is_clang) {
+    # Suppress warnings from Chrome's Clang plugins.
+    # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+    configs -= [ "//build/config/clang:find_bad_constructs" ]
+  }
+}
+
+if (!build_with_chromium) {
+  config("video_render_internal_impl_config") {
+    if (is_ios) {
+      libs = [
+        "OpenGLES.framework",
+        "QuartzCore.framework",
+      ]
+    }
+  }
+
+  source_set("video_render_internal_impl") {
+    libs = []
+    sources = [
+      "video_render_internal_impl.cc",
+    ]
+    deps = [
+      ":video_render_module",
+      "../../system_wrappers",
+    ]
+
+    if (is_linux) {
+      sources += [
+        "linux/video_render_linux_impl.cc",
+        "linux/video_render_linux_impl.h",
+        "linux/video_x11_channel.cc",
+        "linux/video_x11_channel.h",
+        "linux/video_x11_render.cc",
+        "linux/video_x11_render.h",
+      ]
+
+      deps += [ "../..:webrtc_common" ]
+
+      libs += [ "Xext" ]
+    }
+    if (is_mac) {
+      sources += [
+        "mac/cocoa_full_screen_window.h",
+        "mac/cocoa_full_screen_window.mm",
+        "mac/cocoa_render_view.h",
+        "mac/cocoa_render_view.mm",
+        "mac/video_render_agl.cc",
+        "mac/video_render_agl.h",
+        "mac/video_render_mac_carbon_impl.cc",
+        "mac/video_render_mac_carbon_impl.h",
+        "mac/video_render_mac_cocoa_impl.h",
+        "mac/video_render_mac_cocoa_impl.mm",
+        "mac/video_render_nsopengl.h",
+        "mac/video_render_nsopengl.mm",
+      ]
+
+      libs += [
+        "CoreVideo.framework",
+        "QTKit.framework",
+      ]
+    }
+    if (is_win) {
+      sources += [
+        "windows/i_video_render_win.h",
+        "windows/video_render_direct3d9.cc",
+        "windows/video_render_direct3d9.h",
+        "windows/video_render_windows_impl.cc",
+        "windows/video_render_windows_impl.h",
+      ]
+
+      directxsdk_exists =
+          exec_script("//build/dir_exists.py",
+                      [ rebase_path("//third_party/directxsdk/files",
+                                    root_build_dir) ],
+                      "trim string") == "True"
+      if (directxsdk_exists) {
+        directxsdk_path = "//third_party/directxsdk/files"
+      } else {
+        directxsdk_path =
+            exec_script("../../build/find_directx_sdk.py", [], "trim string")
+      }
+      include_dirs = [ directxsdk_path + "/Include" ]
+    }
+    if (is_android) {
+      sources += [
+        "android/video_render_android_impl.cc",
+        "android/video_render_android_impl.h",
+        "android/video_render_android_native_opengl2.cc",
+        "android/video_render_android_native_opengl2.h",
+        "android/video_render_android_surface_view.cc",
+        "android/video_render_android_surface_view.h",
+        "android/video_render_opengles20.cc",
+        "android/video_render_opengles20.h",
+      ]
+
+      libs += [ "GLESv2" ]
+    }
+    if (is_ios) {
+      sources += [
+        "ios/open_gles20.h",
+        "ios/open_gles20.mm",
+        "ios/video_render_ios_channel.h",
+        "ios/video_render_ios_channel.mm",
+        "ios/video_render_ios_gles20.h",
+        "ios/video_render_ios_gles20.mm",
+        "ios/video_render_ios_impl.h",
+        "ios/video_render_ios_impl.mm",
+        "ios/video_render_ios_view.h",
+        "ios/video_render_ios_view.mm",
+      ]
+
+      deps += [ "../..:webrtc_common" ]
+
+      cflags = [ "-fobjc-arc" ]  # CLANG_ENABLE_OBJC_ARC = YES.
+    }
+
+    all_dependent_configs = [ ":video_render_internal_impl_config" ]
+
+    configs += [ "../..:common_config" ]
+    public_configs = [ "../..:common_inherited_config" ]
+
+    if (is_clang) {
+      # Suppress warnings from Chrome's Clang plugins.
+      # See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
+      configs -= [ "//build/config/clang:find_bad_constructs" ]
+    }
+  }
+}
diff --git a/webrtc/modules/video_render/DEPS b/webrtc/modules/video_render/DEPS
new file mode 100644
index 0000000..58ae9fe
--- /dev/null
+++ b/webrtc/modules/video_render/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+  "+webrtc/base",
+  "+webrtc/common_video",
+  "+webrtc/system_wrappers",
+]
diff --git a/webrtc/modules/video_render/OWNERS b/webrtc/modules/video_render/OWNERS
new file mode 100644
index 0000000..3aaa532
--- /dev/null
+++ b/webrtc/modules/video_render/OWNERS
@@ -0,0 +1,12 @@
+mflodman@webrtc.org
+perkj@webrtc.org
+tkchin@webrtc.org
+
+per-file *.isolate=kjellander@webrtc.org
+
+# These are for the common case of adding or renaming files. If you're doing
+# structural changes, please get a review from a reviewer in this file.
+per-file *.gyp=*
+per-file *.gypi=*
+
+per-file BUILD.gn=kjellander@webrtc.org
diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java
new file mode 100644
index 0000000..fa756ba
--- /dev/null
+++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java
@@ -0,0 +1,371 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.pm.ConfigurationInfo;
+import android.graphics.PixelFormat;
+import android.opengl.GLSurfaceView;
+
+import org.webrtc.Logging;
+
+public class ViEAndroidGLES20 extends GLSurfaceView
+        implements GLSurfaceView.Renderer {
+    private static String TAG = "WEBRTC-JR";
+    private static final boolean DEBUG = false;
+    // True if onSurfaceCreated has been called.
+    private boolean surfaceCreated = false;
+    private boolean openGLCreated = false;
+    // True if NativeFunctionsRegistered has been called.
+    private boolean nativeFunctionsRegisted = false;
+    private ReentrantLock nativeFunctionLock = new ReentrantLock();
+    // Address of Native object that will do the drawing.
+    private long nativeObject = 0;
+    private int viewWidth = 0;
+    private int viewHeight = 0;
+
+    public static boolean UseOpenGL2(Object renderWindow) {
+        return ViEAndroidGLES20.class.isInstance(renderWindow);
+    }
+
+    public ViEAndroidGLES20(Context context) {
+        super(context);
+        init(false, 0, 0);
+    }
+
+    public ViEAndroidGLES20(Context context, boolean translucent,
+            int depth, int stencil) {
+        super(context);
+        init(translucent, depth, stencil);
+    }
+
+    private void init(boolean translucent, int depth, int stencil) {
+
+        // By default, GLSurfaceView() creates a RGB_565 opaque surface.
+        // If we want a translucent one, we should change the surface's
+        // format here, using PixelFormat.TRANSLUCENT for GL Surfaces
+        // is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
+        if (translucent) {
+            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
+        }
+
+        // Setup the context factory for 2.0 rendering.
+        // See ContextFactory class definition below
+        setEGLContextFactory(new ContextFactory());
+
+        // We need to choose an EGLConfig that matches the format of
+        // our surface exactly. This is going to be done in our
+        // custom config chooser. See ConfigChooser class definition
+        // below.
+        setEGLConfigChooser( translucent ?
+                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
+                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );
+
+        // Set the renderer responsible for frame rendering
+        this.setRenderer(this);
+        this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+    }
+
+    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
+        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
+            Logging.w(TAG, "creating OpenGL ES 2.0 context");
+            checkEglError("Before eglCreateContext", egl);
+            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+            EGLContext context = egl.eglCreateContext(display, eglConfig,
+                    EGL10.EGL_NO_CONTEXT, attrib_list);
+            checkEglError("After eglCreateContext", egl);
+            return context;
+        }
+
+        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
+            egl.eglDestroyContext(display, context);
+        }
+    }
+
+    private static void checkEglError(String prompt, EGL10 egl) {
+        int error;
+        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
+            Logging.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
+        }
+    }
+
+    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
+            mRedSize = r;
+            mGreenSize = g;
+            mBlueSize = b;
+            mAlphaSize = a;
+            mDepthSize = depth;
+            mStencilSize = stencil;
+        }
+
+        // This EGL config specification is used to specify 2.0 rendering.
+        // We use a minimum size of 4 bits for red/green/blue, but will
+        // perform actual matching in chooseConfig() below.
+        private static int EGL_OPENGL_ES2_BIT = 4;
+        private static int[] s_configAttribs2 =
+        {
+            EGL10.EGL_RED_SIZE, 4,
+            EGL10.EGL_GREEN_SIZE, 4,
+            EGL10.EGL_BLUE_SIZE, 4,
+            EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+            EGL10.EGL_NONE
+        };
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+
+            // Get the number of minimally matching EGL configurations
+            int[] num_config = new int[1];
+            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+            int numConfigs = num_config[0];
+
+            if (numConfigs <= 0) {
+                throw new IllegalArgumentException("No configs match configSpec");
+            }
+
+            // Allocate then read the array of minimally matching EGL configs
+            EGLConfig[] configs = new EGLConfig[numConfigs];
+            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
+
+            if (DEBUG) {
+                printConfigs(egl, display, configs);
+            }
+            // Now return the "best" one
+            return chooseConfig(egl, display, configs);
+        }
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs) {
+            for(EGLConfig config : configs) {
+                int d = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_DEPTH_SIZE, 0);
+                int s = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_STENCIL_SIZE, 0);
+
+                // We need at least mDepthSize and mStencilSize bits
+                if (d < mDepthSize || s < mStencilSize)
+                    continue;
+
+                // We want an *exact* match for red/green/blue/alpha
+                int r = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_RED_SIZE, 0);
+                int g = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_GREEN_SIZE, 0);
+                int b = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_BLUE_SIZE, 0);
+                int a = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_ALPHA_SIZE, 0);
+
+                if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
+                    return config;
+            }
+            return null;
+        }
+
+        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                EGLConfig config, int attribute, int defaultValue) {
+
+            if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+                return mValue[0];
+            }
+            return defaultValue;
+        }
+
+        private void printConfigs(EGL10 egl, EGLDisplay display,
+            EGLConfig[] configs) {
+            int numConfigs = configs.length;
+            Logging.w(TAG, String.format("%d configurations", numConfigs));
+            for (int i = 0; i < numConfigs; i++) {
+                Logging.w(TAG, String.format("Configuration %d:\n", i));
+                printConfig(egl, display, configs[i]);
+            }
+        }
+
+        private void printConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig config) {
+            int[] attributes = {
+                    EGL10.EGL_BUFFER_SIZE,
+                    EGL10.EGL_ALPHA_SIZE,
+                    EGL10.EGL_BLUE_SIZE,
+                    EGL10.EGL_GREEN_SIZE,
+                    EGL10.EGL_RED_SIZE,
+                    EGL10.EGL_DEPTH_SIZE,
+                    EGL10.EGL_STENCIL_SIZE,
+                    EGL10.EGL_CONFIG_CAVEAT,
+                    EGL10.EGL_CONFIG_ID,
+                    EGL10.EGL_LEVEL,
+                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
+                    EGL10.EGL_MAX_PBUFFER_PIXELS,
+                    EGL10.EGL_MAX_PBUFFER_WIDTH,
+                    EGL10.EGL_NATIVE_RENDERABLE,
+                    EGL10.EGL_NATIVE_VISUAL_ID,
+                    EGL10.EGL_NATIVE_VISUAL_TYPE,
+                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+                    EGL10.EGL_SAMPLES,
+                    EGL10.EGL_SAMPLE_BUFFERS,
+                    EGL10.EGL_SURFACE_TYPE,
+                    EGL10.EGL_TRANSPARENT_TYPE,
+                    EGL10.EGL_TRANSPARENT_RED_VALUE,
+                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+                    EGL10.EGL_LUMINANCE_SIZE,
+                    EGL10.EGL_ALPHA_MASK_SIZE,
+                    EGL10.EGL_COLOR_BUFFER_TYPE,
+                    EGL10.EGL_RENDERABLE_TYPE,
+                    0x3042 // EGL10.EGL_CONFORMANT
+            };
+            String[] names = {
+                    "EGL_BUFFER_SIZE",
+                    "EGL_ALPHA_SIZE",
+                    "EGL_BLUE_SIZE",
+                    "EGL_GREEN_SIZE",
+                    "EGL_RED_SIZE",
+                    "EGL_DEPTH_SIZE",
+                    "EGL_STENCIL_SIZE",
+                    "EGL_CONFIG_CAVEAT",
+                    "EGL_CONFIG_ID",
+                    "EGL_LEVEL",
+                    "EGL_MAX_PBUFFER_HEIGHT",
+                    "EGL_MAX_PBUFFER_PIXELS",
+                    "EGL_MAX_PBUFFER_WIDTH",
+                    "EGL_NATIVE_RENDERABLE",
+                    "EGL_NATIVE_VISUAL_ID",
+                    "EGL_NATIVE_VISUAL_TYPE",
+                    "EGL_PRESERVED_RESOURCES",
+                    "EGL_SAMPLES",
+                    "EGL_SAMPLE_BUFFERS",
+                    "EGL_SURFACE_TYPE",
+                    "EGL_TRANSPARENT_TYPE",
+                    "EGL_TRANSPARENT_RED_VALUE",
+                    "EGL_TRANSPARENT_GREEN_VALUE",
+                    "EGL_TRANSPARENT_BLUE_VALUE",
+                    "EGL_BIND_TO_TEXTURE_RGB",
+                    "EGL_BIND_TO_TEXTURE_RGBA",
+                    "EGL_MIN_SWAP_INTERVAL",
+                    "EGL_MAX_SWAP_INTERVAL",
+                    "EGL_LUMINANCE_SIZE",
+                    "EGL_ALPHA_MASK_SIZE",
+                    "EGL_COLOR_BUFFER_TYPE",
+                    "EGL_RENDERABLE_TYPE",
+                    "EGL_CONFORMANT"
+            };
+            int[] value = new int[1];
+            for (int i = 0; i < attributes.length; i++) {
+                int attribute = attributes[i];
+                String name = names[i];
+                if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
+                    Logging.w(TAG, String.format("  %s: %d\n", name, value[0]));
+                } else {
+                    // Logging.w(TAG, String.format("  %s: failed\n", name));
+                    while (egl.eglGetError() != EGL10.EGL_SUCCESS);
+                }
+            }
+        }
+
+        // Subclasses can adjust these values:
+        protected int mRedSize;
+        protected int mGreenSize;
+        protected int mBlueSize;
+        protected int mAlphaSize;
+        protected int mDepthSize;
+        protected int mStencilSize;
+        private int[] mValue = new int[1];
+    }
+
+    // IsSupported
+    // Return true if this device support Open GL ES 2.0 rendering.
+    public static boolean IsSupported(Context context) {
+        ActivityManager am =
+                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        ConfigurationInfo info = am.getDeviceConfigurationInfo();
+        if(info.reqGlEsVersion >= 0x20000) {
+            // Open GL ES 2.0 is supported.
+            return true;
+        }
+        return false;
+    }
+
+    public void onDrawFrame(GL10 gl) {
+        nativeFunctionLock.lock();
+        if(!nativeFunctionsRegisted || !surfaceCreated) {
+            nativeFunctionLock.unlock();
+            return;
+        }
+
+        if(!openGLCreated) {
+            if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
+                return; // Failed to create OpenGL
+            }
+            openGLCreated = true; // Created OpenGL successfully
+        }
+        DrawNative(nativeObject); // Draw the new frame
+        nativeFunctionLock.unlock();
+    }
+
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        surfaceCreated = true;
+        viewWidth = width;
+        viewHeight = height;
+
+        nativeFunctionLock.lock();
+        if(nativeFunctionsRegisted) {
+            if(CreateOpenGLNative(nativeObject,width,height) == 0)
+                openGLCreated = true;
+        }
+        nativeFunctionLock.unlock();
+    }
+
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+    }
+
+    public void RegisterNativeObject(long nativeObject) {
+        nativeFunctionLock.lock();
+        this.nativeObject = nativeObject;
+        nativeFunctionsRegisted = true;
+        nativeFunctionLock.unlock();
+    }
+
+    public void DeRegisterNativeObject() {
+        nativeFunctionLock.lock();
+        nativeFunctionsRegisted = false;
+        openGLCreated = false;
+        this.nativeObject = 0;
+        nativeFunctionLock.unlock();
+    }
+
+    public void ReDraw() {
+        if(surfaceCreated) {
+            // Request the renderer to redraw using the render thread context.
+            this.requestRender();
+        }
+    }
+
+    private native int CreateOpenGLNative(long nativeObject,
+            int width, int height);
+    private native void DrawNative(long nativeObject);
+
+}
diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java
new file mode 100644
index 0000000..50b1a59
--- /dev/null
+++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import android.content.Context;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+public class ViERenderer {
+    public static SurfaceView CreateRenderer(Context context) {
+        return CreateRenderer(context, false);
+    }
+
+    public static SurfaceView CreateRenderer(Context context,
+            boolean useOpenGLES2) {
+        if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
+            return new ViEAndroidGLES20(context);
+        else
+            return new SurfaceView(context);
+    }
+}
diff --git a/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java
new file mode 100644
index 0000000..71f26c2
--- /dev/null
+++ b/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+// The following four imports are needed saveBitmapToJPEG which
+// is for debug only
+import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.SurfaceHolder.Callback;
+
+import org.webrtc.Logging;
+
+public class ViESurfaceRenderer implements Callback {
+
+    private final static String TAG = "WEBRTC";
+
+    // the bitmap used for drawing.
+    private Bitmap bitmap = null;
+    private ByteBuffer byteBuffer = null;
+    private SurfaceHolder surfaceHolder;
+    // Rect of the source bitmap to draw
+    private Rect srcRect = new Rect();
+    // Rect of the destination canvas to draw to
+    private Rect dstRect = new Rect();
+    private float dstTopScale = 0;
+    private float dstBottomScale = 1;
+    private float dstLeftScale = 0;
+    private float dstRightScale = 1;
+
+    public ViESurfaceRenderer(SurfaceView view) {
+        surfaceHolder = view.getHolder();
+        if(surfaceHolder == null)
+            return;
+        surfaceHolder.addCallback(this);
+    }
+
+    // surfaceChanged and surfaceCreated share this function
+    private void changeDestRect(int dstWidth, int dstHeight) {
+        dstRect.right = (int)(dstRect.left + dstRightScale * dstWidth);
+        dstRect.bottom = (int)(dstRect.top + dstBottomScale * dstHeight);
+    }
+
+    public void surfaceChanged(SurfaceHolder holder, int format,
+            int in_width, int in_height) {
+        Logging.d(TAG, "ViESurfaceRender::surfaceChanged");
+
+        changeDestRect(in_width, in_height);
+
+        Logging.d(TAG, "ViESurfaceRender::surfaceChanged" +
+                " in_width:" + in_width + " in_height:" + in_height +
+                " srcRect.left:" + srcRect.left +
+                " srcRect.top:" + srcRect.top +
+                " srcRect.right:" + srcRect.right +
+                " srcRect.bottom:" + srcRect.bottom +
+                " dstRect.left:" + dstRect.left +
+                " dstRect.top:" + dstRect.top +
+                " dstRect.right:" + dstRect.right +
+                " dstRect.bottom:" + dstRect.bottom);
+    }
+
+    public void surfaceCreated(SurfaceHolder holder) {
+        Canvas canvas = surfaceHolder.lockCanvas();
+        if(canvas != null) {
+            Rect dst = surfaceHolder.getSurfaceFrame();
+            if(dst != null) {
+                changeDestRect(dst.right - dst.left, dst.bottom - dst.top);
+                Logging.d(TAG, "ViESurfaceRender::surfaceCreated" +
+                        " dst.left:" + dst.left +
+                        " dst.top:" + dst.top +
+                        " dst.right:" + dst.right +
+                        " dst.bottom:" + dst.bottom +
+                        " srcRect.left:" + srcRect.left +
+                        " srcRect.top:" + srcRect.top +
+                        " srcRect.right:" + srcRect.right +
+                        " srcRect.bottom:" + srcRect.bottom +
+                        " dstRect.left:" + dstRect.left +
+                        " dstRect.top:" + dstRect.top +
+                        " dstRect.right:" + dstRect.right +
+                        " dstRect.bottom:" + dstRect.bottom);
+            }
+            surfaceHolder.unlockCanvasAndPost(canvas);
+        }
+    }
+
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        Logging.d(TAG, "ViESurfaceRenderer::surfaceDestroyed");
+        bitmap = null;
+        byteBuffer = null;
+    }
+
+    public Bitmap CreateBitmap(int width, int height) {
+        Logging.d(TAG, "CreateByteBitmap " + width + ":" + height);
+        if (bitmap == null) {
+            try {
+                android.os.Process.setThreadPriority(
+                    android.os.Process.THREAD_PRIORITY_DISPLAY);
+            }
+            catch (Exception e) {
+            }
+        }
+        bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
+        srcRect.left = 0;
+        srcRect.top = 0;
+        srcRect.bottom = height;
+        srcRect.right = width;
+        return bitmap;
+    }
+
+    public ByteBuffer CreateByteBuffer(int width, int height) {
+        Logging.d(TAG, "CreateByteBuffer " + width + ":" + height);
+        if (bitmap == null) {
+            bitmap = CreateBitmap(width, height);
+            byteBuffer = ByteBuffer.allocateDirect(width * height * 2);
+        }
+        return byteBuffer;
+    }
+
+    public void SetCoordinates(float left, float top,
+            float right, float bottom) {
+        Logging.d(TAG, "SetCoordinates " + left + "," + top + ":" +
+                right + "," + bottom);
+        dstLeftScale = left;
+        dstTopScale = top;
+        dstRightScale = right;
+        dstBottomScale = bottom;
+    }
+
+    // It saves bitmap data to a JPEG picture, this function is for debug only.
+    private void saveBitmapToJPEG(int width, int height) {
+        ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
+        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream);
+
+        try{
+            FileOutputStream output = new FileOutputStream(String.format(
+                "/sdcard/render_%d.jpg", System.currentTimeMillis()));
+            output.write(byteOutStream.toByteArray());
+            output.flush();
+            output.close();
+        }
+        catch (FileNotFoundException e) {
+        }
+        catch (IOException e) {
+        }
+    }
+
+    public void DrawByteBuffer() {
+        if(byteBuffer == null)
+            return;
+        byteBuffer.rewind();
+        bitmap.copyPixelsFromBuffer(byteBuffer);
+        DrawBitmap();
+    }
+
+    public void DrawBitmap() {
+        if(bitmap == null)
+            return;
+
+        Canvas canvas = surfaceHolder.lockCanvas();
+        if(canvas != null) {
+            // The follow line is for debug only
+            // saveBitmapToJPEG(srcRect.right - srcRect.left,
+            //                  srcRect.bottom - srcRect.top);
+            canvas.drawBitmap(bitmap, srcRect, dstRect, null);
+            surfaceHolder.unlockCanvasAndPost(canvas);
+        }
+    }
+
+}
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.cc b/webrtc/modules/video_render/android/video_render_android_impl.cc
new file mode 100644
index 0000000..9affb23
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_impl.cc
@@ -0,0 +1,316 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+
+#include "webrtc/modules/video_render/video_render_internal.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+#ifdef ANDROID
+#include <android/log.h>
+#include <stdio.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "webrtc/system_wrappers/include/trace.h"
+#endif
+
+namespace webrtc {
+
+JavaVM* VideoRenderAndroid::g_jvm = NULL;
+
+int32_t SetRenderAndroidVM(JavaVM* javaVM) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
+  VideoRenderAndroid::g_jvm = javaVM;
+  return 0;
+}
+
+VideoRenderAndroid::VideoRenderAndroid(
+    const int32_t id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool /*fullscreen*/):
+    _id(id),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderType(videoRenderType),
+    _ptrWindow((jobject)(window)),
+    _javaShutDownFlag(false),
+    _javaShutdownEvent(*EventWrapper::Create()),
+    _javaRenderEvent(*EventWrapper::Create()),
+    _lastJavaRenderEvent(0),
+    _javaRenderJniEnv(NULL) {
+}
+
+VideoRenderAndroid::~VideoRenderAndroid() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "VideoRenderAndroid dtor");
+
+  if (_javaRenderThread)
+    StopRender();
+
+  for (AndroidStreamMap::iterator it = _streamsMap.begin();
+       it != _streamsMap.end();
+       ++it) {
+    delete it->second;
+  }
+  delete &_javaShutdownEvent;
+  delete &_javaRenderEvent;
+  delete &_critSect;
+}
+
+int32_t VideoRenderAndroid::ChangeWindow(void* /*window*/) {
+  return -1;
+}
+
+VideoRenderCallback*
+VideoRenderAndroid::AddIncomingRenderStream(const uint32_t streamId,
+                                            const uint32_t zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) {
+  CriticalSectionScoped cs(&_critSect);
+
+  AndroidStream* renderStream = NULL;
+  AndroidStreamMap::iterator item = _streamsMap.find(streamId);
+  if (item != _streamsMap.end() && item->second != NULL) {
+    WEBRTC_TRACE(kTraceInfo,
+                 kTraceVideoRenderer,
+                 -1,
+                 "%s: Render stream already exists",
+                 __FUNCTION__);
+    return renderStream;
+  }
+
+  renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
+                                            right, bottom, *this);
+  if (renderStream) {
+    _streamsMap[streamId] = renderStream;
+  }
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+    return NULL;
+  }
+  return renderStream;
+}
+
+int32_t VideoRenderAndroid::DeleteIncomingRenderStream(
+    const uint32_t streamId) {
+  CriticalSectionScoped cs(&_critSect);
+
+  AndroidStreamMap::iterator item = _streamsMap.find(streamId);
+  if (item == _streamsMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+    return -1;
+  }
+  delete item->second;
+  _streamsMap.erase(item);
+  return 0;
+}
+
+int32_t VideoRenderAndroid::GetIncomingRenderStreamProperties(
+    const uint32_t streamId,
+    uint32_t& zOrder,
+    float& left,
+    float& top,
+    float& right,
+    float& bottom) const {
+  return -1;
+}
+
+int32_t VideoRenderAndroid::StartRender() {
+  CriticalSectionScoped cs(&_critSect);
+
+  if (_javaRenderThread) {
+    // StartRender is called when this stream should start render.
+    // However StopRender is not called when the streams stop rendering.
+    // Thus the the thread  is only deleted when the renderer is removed.
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s, Render thread already exist", __FUNCTION__);
+    return 0;
+  }
+
+  _javaRenderThread.reset(new rtc::PlatformThread(JavaRenderThreadFun, this,
+                                                  "AndroidRenderThread"));
+
+  _javaRenderThread->Start();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started",
+               __FUNCTION__);
+  _javaRenderThread->SetPriority(rtc::kRealtimePriority);
+  return 0;
+}
+
+int32_t VideoRenderAndroid::StopRender() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
+  {
+    CriticalSectionScoped cs(&_critSect);
+    if (!_javaRenderThread)
+    {
+      return -1;
+    }
+    _javaShutDownFlag = true;
+    _javaRenderEvent.Set();
+  }
+
+  _javaShutdownEvent.Wait(3000);
+  CriticalSectionScoped cs(&_critSect);
+  _javaRenderThread->Stop();
+  _javaRenderThread.reset();
+
+  return 0;
+}
+
+void VideoRenderAndroid::ReDraw() {
+  CriticalSectionScoped cs(&_critSect);
+  // Allow redraw if it was more than 20ms since last.
+  if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
+    _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
+    _javaRenderEvent.Set();
+  }
+}
+
+bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
+  return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
+}
+
+bool VideoRenderAndroid::JavaRenderThreadProcess()
+{
+  _javaRenderEvent.Wait(1000);
+
+  CriticalSectionScoped cs(&_critSect);
+  if (!_javaRenderJniEnv) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !_javaRenderJniEnv) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, _javaRenderJniEnv);
+      return false;
+    }
+  }
+
+  for (AndroidStreamMap::iterator it = _streamsMap.begin();
+       it != _streamsMap.end();
+       ++it) {
+    it->second->DeliverFrame(_javaRenderJniEnv);
+  }
+
+  if (_javaShutDownFlag) {
+    if (g_jvm->DetachCurrentThread() < 0)
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    else {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                   "%s: Java thread detached", __FUNCTION__);
+    }
+    _javaRenderJniEnv = NULL;
+    _javaShutDownFlag = false;
+    _javaShutdownEvent.Set();
+    return false; // Do not run this thread again.
+  }
+  return true;
+}
+
+VideoRenderType VideoRenderAndroid::RenderType() {
+  return _renderType;
+}
+
+RawVideoType VideoRenderAndroid::PerferedVideoType() {
+  return kVideoI420;
+}
+
+bool VideoRenderAndroid::FullScreen() {
+  return false;
+}
+
+int32_t VideoRenderAndroid::GetGraphicsMemory(
+    uint64_t& /*totalGraphicsMemory*/,
+    uint64_t& /*availableGraphicsMemory*/) const {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::GetScreenResolution(
+    uint32_t& /*screenWidth*/,
+    uint32_t& /*screenHeight*/) const {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+uint32_t VideoRenderAndroid::RenderFrameRate(
+    const uint32_t /*streamId*/) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::SetStreamCropping(
+    const uint32_t /*streamId*/,
+    const float /*left*/,
+    const float /*top*/,
+    const float /*right*/,
+    const float /*bottom*/) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::SetTransparentBackground(const bool enable) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::ConfigureRenderer(
+    const uint32_t streamId,
+    const unsigned int zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::SetText(
+    const uint8_t textId,
+    const uint8_t* text,
+    const int32_t textLength,
+    const uint32_t textColorRef,
+    const uint32_t backgroundColorRef,
+    const float left, const float top,
+    const float rigth, const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+int32_t VideoRenderAndroid::SetBitmap(const void* bitMap,
+                                      const uint8_t pictureId,
+                                      const void* colorKey,
+                                      const float left, const float top,
+                                      const float right,
+                                      const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/android/video_render_android_impl.h b/webrtc/modules/video_render/android/video_render_android_impl.h
new file mode 100644
index 0000000..06fd7a1
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_impl.h
@@ -0,0 +1,154 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+
+#include <jni.h>
+
+#include <map>
+#include <memory>
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/i_video_render.h"
+
+
+namespace webrtc {
+
+//#define ANDROID_LOG
+
+class CriticalSectionWrapper;
+class EventWrapper;
+
+// The object a module user uses to send new frames to the java renderer
+// Base class for android render streams.
+
+class AndroidStream : public VideoRenderCallback {
+ public:
+  // DeliverFrame is called from a thread connected to the Java VM.
+  // Used for Delivering frame for rendering.
+  virtual void DeliverFrame(JNIEnv* jniEnv)=0;
+
+  virtual ~AndroidStream() {};
+};
+
+class VideoRenderAndroid: IVideoRender {
+ public:
+  VideoRenderAndroid(const int32_t id,
+                     const VideoRenderType videoRenderType,
+                     void* window,
+                     const bool fullscreen);
+
+  virtual ~VideoRenderAndroid();
+
+  virtual int32_t Init()=0;
+
+  virtual int32_t ChangeWindow(void* window);
+
+  virtual VideoRenderCallback* AddIncomingRenderStream(
+      const uint32_t streamId,
+      const uint32_t zOrder,
+      const float left, const float top,
+      const float right, const float bottom);
+
+  virtual int32_t DeleteIncomingRenderStream(
+      const uint32_t streamId);
+
+  virtual int32_t GetIncomingRenderStreamProperties(
+      const uint32_t streamId,
+      uint32_t& zOrder,
+      float& left, float& top,
+      float& right, float& bottom) const;
+
+  virtual int32_t StartRender();
+
+  virtual int32_t StopRender();
+
+  virtual void ReDraw();
+
+  // Properties
+
+  virtual VideoRenderType RenderType();
+
+  virtual RawVideoType PerferedVideoType();
+
+  virtual bool FullScreen();
+
+  virtual int32_t GetGraphicsMemory(
+      uint64_t& totalGraphicsMemory,
+      uint64_t& availableGraphicsMemory) const;
+
+  virtual int32_t GetScreenResolution(
+      uint32_t& screenWidth,
+      uint32_t& screenHeight) const;
+
+  virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+  virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                    const float left, const float top,
+                                    const float right, const float bottom);
+
+  virtual int32_t SetTransparentBackground(const bool enable);
+
+  virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                    const unsigned int zOrder,
+                                    const float left, const float top,
+                                    const float right, const float bottom);
+
+  virtual int32_t SetText(const uint8_t textId,
+                          const uint8_t* text,
+                          const int32_t textLength,
+                          const uint32_t textColorRef,
+                          const uint32_t backgroundColorRef,
+                          const float left, const float top,
+                          const float rigth, const float bottom);
+
+  virtual int32_t SetBitmap(const void* bitMap,
+                            const uint8_t pictureId,
+                            const void* colorKey, const float left,
+                            const float top, const float right,
+                            const float bottom);
+  static JavaVM* g_jvm;
+
+ protected:
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      int32_t streamId,
+      int32_t zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer) = 0;
+
+  int32_t _id;
+  CriticalSectionWrapper& _critSect;
+  VideoRenderType _renderType;
+  jobject _ptrWindow;
+
+ private:
+  static bool JavaRenderThreadFun(void* obj);
+  bool JavaRenderThreadProcess();
+
+  // Map with streams to render.
+  typedef std::map<int32_t, AndroidStream*> AndroidStreamMap;
+  AndroidStreamMap _streamsMap;
+  // True if the _javaRenderThread thread shall be detached from the JVM.
+  bool _javaShutDownFlag;
+  EventWrapper& _javaShutdownEvent;
+  EventWrapper& _javaRenderEvent;
+  int64_t _lastJavaRenderEvent;
+  JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
+  // TODO(pbos): Remove unique_ptr and use the member directly.
+  std::unique_ptr<rtc::PlatformThread> _javaRenderThread;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
new file mode 100644
index 0000000..286776e
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.cc
@@ -0,0 +1,450 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <android/log.h>
+#include <stdio.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "webrtc/system_wrappers/include/trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
+    const int32_t id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool fullscreen) :
+    VideoRenderAndroid(id, videoRenderType, window, fullscreen),
+    _javaRenderObj(NULL),
+    _javaRenderClass(NULL) {
+}
+
+bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "RendererAndroid():UseOpenGL No JVM set.");
+    return false;
+  }
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(
+          kTraceError,
+          kTraceVideoRenderer,
+          -1,
+          "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
+          res, env);
+      return false;
+    }
+    isAttached = true;
+  }
+
+  // get the renderer class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: could not find ViEAndroidRenderer class",
+                 __FUNCTION__);
+    return false;
+  }
+
+  // get the method ID for UseOpenGL
+  jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
+                                                  "UseOpenGL2",
+                                                  "(Ljava/lang/Object;)Z");
+  if (cidUseOpenGL == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: could not get UseOpenGL ID", __FUNCTION__);
+    return false;
+  }
+  jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
+                                              cidUseOpenGL, (jobject) window);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+  return res;
+}
+
+AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidNativeOpenGl2Renderer dtor");
+  if (g_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        env = NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+
+    env->DeleteGlobalRef(_javaRenderObj);
+    env->DeleteGlobalRef(_javaRenderClass);
+
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+int32_t AndroidNativeOpenGl2Renderer::Init() {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s): Not a valid Java VM pointer.", __FUNCTION__);
+    return -1;
+  }
+  if (!_ptrWindow) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                 "(%s): No window have been provided.", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  // get the ViEAndroidGLES20 class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not find ViEAndroidGLES20", __FUNCTION__);
+    return -1;
+  }
+
+  // create a global reference to the class (to tell JNI that
+  // we are referencing it after this function has returned)
+  _javaRenderClass =
+      reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
+  if (!_javaRenderClass) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not create Java SurfaceHolder class reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Delete local class ref, we only use the global ref
+  env->DeleteLocalRef(javaRenderClassLocal);
+
+  // create a reference to the object (to tell JNI that we are referencing it
+  // after this function has returned)
+  _javaRenderObj = env->NewGlobalRef(_ptrWindow);
+  if (!_javaRenderObj) {
+    WEBRTC_TRACE(
+        kTraceError,
+        kTraceVideoRenderer,
+        _id,
+        "%s: could not create Java SurfaceRender object reference",
+        __FUNCTION__);
+    return -1;
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
+               __FUNCTION__);
+  return 0;
+
+}
+AndroidStream*
+AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
+    int32_t streamId,
+    int32_t zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom,
+    VideoRenderAndroid& renderer) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
+               __FUNCTION__, streamId);
+  AndroidNativeOpenGl2Channel* stream =
+      new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
+                                      _javaRenderObj);
+  if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+    return stream;
+  else {
+    delete stream;
+  }
+  return NULL;
+}
+
+AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
+    uint32_t streamId,
+    JavaVM* jvm,
+    VideoRenderAndroid& renderer,jobject javaRenderObj):
+    _id(streamId),
+    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
+    _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
+    _openGLRenderer(streamId) {
+
+}
+AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidNativeOpenGl2Channel dtor");
+  if (_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        env = NULL;
+      } else {
+        isAttached = true;
+      }
+    }
+    if (env && _deRegisterNativeCID) {
+      env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
+    }
+
+    if (isAttached) {
+      if (_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+
+  delete &_renderCritSect;
+}
+
+int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
+                                          const float left,
+                                          const float top,
+                                          const float right,
+                                          const float bottom)
+{
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
+  if (!_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Not a valid Java VM pointer", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  jclass javaRenderClass =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClass) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not find ViESurfaceRenderer", __FUNCTION__);
+    return -1;
+  }
+
+  // get the method ID for the ReDraw function
+  _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
+  if (_redrawCid == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get ReDraw ID", __FUNCTION__);
+    return -1;
+  }
+
+  _registerNativeCID = env->GetMethodID(javaRenderClass,
+                                        "RegisterNativeObject", "(J)V");
+  if (_registerNativeCID == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get RegisterNativeObject ID", __FUNCTION__);
+    return -1;
+  }
+
+  _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
+                                          "DeRegisterNativeObject", "()V");
+  if (_deRegisterNativeCID == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get DeRegisterNativeObject ID",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  JNINativeMethod nativeFunctions[2] = {
+    { "DrawNative",
+      "(J)V",
+      (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
+    { "CreateOpenGLNative",
+      "(JII)I",
+      (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
+  };
+  if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
+                 "%s: Registered native functions", __FUNCTION__);
+  }
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: Failed to register native functions", __FUNCTION__);
+    return -1;
+  }
+
+  env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
+  return 0;
+}
+
+int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
+                                                 const VideoFrame& videoFrame) {
+  //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+  _renderCritSect.Enter();
+  _bufferToRender = videoFrame;
+  _renderCritSect.Leave();
+  _renderer.ReDraw();
+  return 0;
+}
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
+  //TickTime timeNow=TickTime::Now();
+
+  //Draw the Surface
+  jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
+
+  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
+  // "%s: time to deliver %lld" ,__FUNCTION__,
+  // (TickTime::Now()-timeNow).Milliseconds());
+}
+
+/*
+ * JNI callback from Java class. Called when the render
+ * want to render a frame. Called from the GLRenderThread
+ * Method:    DrawNative
+ * Signature: (J)V
+ */
+void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
+    JNIEnv * env, jobject, jlong context) {
+  AndroidNativeOpenGl2Channel* renderChannel =
+      reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
+  renderChannel->DrawNative();
+}
+
+void AndroidNativeOpenGl2Channel::DrawNative() {
+  _renderCritSect.Enter();
+  _openGLRenderer.Render(_bufferToRender);
+  _renderCritSect.Leave();
+}
+
+/*
+ * JNI callback from Java class. Called when the GLSurfaceview
+ * have created a surface. Called from the GLRenderThread
+ * Method:    CreateOpenGLNativeStatic
+ * Signature: (JII)I
+ */
+jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
+    JNIEnv * env,
+    jobject,
+    jlong context,
+    jint width,
+    jint height) {
+  AndroidNativeOpenGl2Channel* renderChannel =
+      reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
+  return renderChannel->CreateOpenGLNative(width, height);
+}
+
+jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
+    int width, int height) {
+  return _openGLRenderer.Setup(width, height);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/android/video_render_android_native_opengl2.h b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
new file mode 100644
index 0000000..8be247b
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_native_opengl2.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+
+#include <jni.h>
+
+#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+#include "webrtc/modules/video_render/android/video_render_opengles20.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidNativeOpenGl2Channel: public AndroidStream {
+ public:
+  AndroidNativeOpenGl2Channel(
+      uint32_t streamId,
+      JavaVM* jvm,
+      VideoRenderAndroid& renderer,jobject javaRenderObj);
+  ~AndroidNativeOpenGl2Channel();
+
+  int32_t Init(int32_t zOrder, const float left, const float top,
+               const float right, const float bottom);
+
+  //Implement VideoRenderCallback
+  virtual int32_t RenderFrame(const uint32_t streamId,
+                              const VideoFrame& videoFrame);
+
+  //Implements AndroidStream
+  virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+  static jint JNICALL CreateOpenGLNativeStatic(
+      JNIEnv * env,
+      jobject,
+      jlong context,
+      jint width,
+      jint height);
+  jint CreateOpenGLNative(int width, int height);
+
+  static void JNICALL DrawNativeStatic(JNIEnv * env,jobject, jlong context);
+  void DrawNative();
+  uint32_t _id;
+  CriticalSectionWrapper& _renderCritSect;
+
+  VideoFrame _bufferToRender;
+  VideoRenderAndroid& _renderer;
+  JavaVM*     _jvm;
+  jobject     _javaRenderObj;
+
+  jmethodID      _redrawCid;
+  jmethodID      _registerNativeCID;
+  jmethodID      _deRegisterNativeCID;
+  VideoRenderOpenGles20 _openGLRenderer;
+};
+
+
+class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
+ public:
+  AndroidNativeOpenGl2Renderer(const int32_t id,
+                               const VideoRenderType videoRenderType,
+                               void* window,
+                               const bool fullscreen);
+
+  ~AndroidNativeOpenGl2Renderer();
+  static bool UseOpenGL2(void* window);
+
+  int32_t Init();
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      int32_t streamId,
+      int32_t zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer);
+
+ private:
+  jobject _javaRenderObj;
+  jclass _javaRenderClass;
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.cc b/webrtc/modules/video_render/android/video_render_android_surface_view.cc
new file mode 100644
index 0000000..ea3b106
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_surface_view.cc
@@ -0,0 +1,474 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <android/log.h>
+#include <stdio.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "webrtc/system_wrappers/include/trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
+    const int32_t id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool fullscreen) :
+    VideoRenderAndroid(id,videoRenderType,window,fullscreen),
+    _javaRenderObj(NULL),
+    _javaRenderClass(NULL) {
+}
+
+AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidSurfaceViewRenderer dtor");
+  if(g_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__,
+                     res,
+                     env);
+        env=NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+    env->DeleteGlobalRef(_javaRenderObj);
+    env->DeleteGlobalRef(_javaRenderClass);
+
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+int32_t AndroidSurfaceViewRenderer::Init() {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "(%s): Not a valid Java VM pointer.",
+                 __FUNCTION__);
+    return -1;
+  }
+  if(!_ptrWindow) {
+    WEBRTC_TRACE(kTraceWarning,
+                 kTraceVideoRenderer,
+                 _id,
+                 "(%s): No window have been provided.",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__,
+                   res,
+                   env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  // get the ViESurfaceRender class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not find ViESurfaceRenderer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // create a global reference to the class (to tell JNI that
+  // we are referencing it after this function has returned)
+  _javaRenderClass =
+      reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
+  if (!_javaRenderClass) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java ViESurfaceRenderer class reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Delete local class ref, we only use the global ref
+  env->DeleteLocalRef(javaRenderClassLocal);
+
+  // get the method ID for the constructor
+  jmethodID cid = env->GetMethodID(_javaRenderClass,
+                                   "<init>",
+                                   "(Landroid/view/SurfaceView;)V");
+  if (cid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get constructor ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // construct the object
+  jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
+                                              cid,
+                                              _ptrWindow);
+  if (!javaRenderObjLocal) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java Render",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // create a reference to the object (to tell JNI that we are referencing it
+  // after this function has returned)
+  _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
+  if (!_javaRenderObj) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java SurfaceRender object reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
+  return 0;
+}
+
+AndroidStream*
+AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
+    int32_t streamId,
+    int32_t zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom,
+    VideoRenderAndroid& renderer) {
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: Id %d",
+               __FUNCTION__,
+               streamId);
+  AndroidSurfaceViewChannel* stream =
+      new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
+  if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+    return stream;
+  else
+    delete stream;
+  return NULL;
+}
+
+AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
+    uint32_t streamId,
+    JavaVM* jvm,
+    VideoRenderAndroid& renderer,
+    jobject javaRenderObj) :
+    _id(streamId),
+    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderer(renderer),
+    _jvm(jvm),
+    _javaRenderObj(javaRenderObj),
+#ifndef ANDROID_NDK_8_OR_ABOVE
+    _javaByteBufferObj(NULL),
+    _directBuffer(NULL),
+#endif
+    _bitmapWidth(0),
+    _bitmapHeight(0) {
+}
+
+AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
+  WEBRTC_TRACE(kTraceInfo,
+               kTraceVideoRenderer,
+               _id,
+               "AndroidSurfaceViewChannel dtor");
+  delete &_renderCritSect;
+  if(_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__,
+                     res,
+                     env);
+        env=NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+
+    env->DeleteGlobalRef(_javaByteBufferObj);
+    if (isAttached) {
+      if (_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+int32_t AndroidSurfaceViewChannel::Init(
+    int32_t /*zOrder*/,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom) {
+
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: AndroidSurfaceViewChannel",
+               __FUNCTION__);
+  if (!_jvm) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: Not a valid Java VM pointer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  if( (top > 1 || top < 0) ||
+      (right > 1 || right < 0) ||
+      (bottom > 1 || bottom < 0) ||
+      (left > 1 || left < 0)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Wrong coordinates", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__,
+                   res,
+                   env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  jclass javaRenderClass =
+      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+  if (!javaRenderClass) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not find ViESurfaceRenderer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // get the method ID for the CreateIntArray
+  _createByteBufferCid =
+      env->GetMethodID(javaRenderClass,
+                       "CreateByteBuffer",
+                       "(II)Ljava/nio/ByteBuffer;");
+  if (_createByteBufferCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get CreateByteBuffer ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // get the method ID for the DrawByteBuffer function
+  _drawByteBufferCid = env->GetMethodID(javaRenderClass,
+                                        "DrawByteBuffer",
+                                        "()V");
+  if (_drawByteBufferCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get DrawByteBuffer ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // get the method ID for the SetCoordinates function
+  _setCoordinatesCid = env->GetMethodID(javaRenderClass,
+                                        "SetCoordinates",
+                                        "(FFFF)V");
+  if (_setCoordinatesCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get SetCoordinates ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
+                      left, top, right, bottom);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not detach thread from JVM",
+                   __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: AndroidSurfaceViewChannel done",
+               __FUNCTION__);
+  return 0;
+}
+
+int32_t AndroidSurfaceViewChannel::RenderFrame(const uint32_t /*streamId*/,
+                                               const VideoFrame& videoFrame) {
+  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+  _renderCritSect.Enter();
+  _bufferToRender = videoFrame;
+  _renderCritSect.Leave();
+  _renderer.ReDraw();
+  return 0;
+}
+
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
+  _renderCritSect.Enter();
+
+  if (_bitmapWidth != _bufferToRender.width() ||
+      _bitmapHeight != _bufferToRender.height()) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
+                 "%d",__FUNCTION__,
+                 _bufferToRender.width(), _bufferToRender.height());
+    if (_javaByteBufferObj) {
+      jniEnv->DeleteGlobalRef(_javaByteBufferObj);
+      _javaByteBufferObj = NULL;
+      _directBuffer = NULL;
+    }
+
+    jobject javaByteBufferObj =
+        jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
+                                 _bufferToRender.width(),
+                                 _bufferToRender.height());
+    _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
+    if (!_javaByteBufferObj) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not "
+                   "create Java ByteBuffer object reference", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    } else {
+      _directBuffer = static_cast<unsigned char*>
+          (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
+      _bitmapWidth = _bufferToRender.width();
+      _bitmapHeight = _bufferToRender.height();
+    }
+  }
+
+  if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
+    const int conversionResult =
+        ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer);
+
+    if (conversionResult < 0)  {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
+                   " failed.", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    }
+  }
+  _renderCritSect.Leave();
+  // Draw the Surface
+  jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/android/video_render_android_surface_view.h b/webrtc/modules/video_render/android/video_render_android_surface_view.h
new file mode 100644
index 0000000..0f029b5
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_android_surface_view.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+
+#include <jni.h>
+
+#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidSurfaceViewChannel : public AndroidStream {
+ public:
+  AndroidSurfaceViewChannel(uint32_t streamId,
+                            JavaVM* jvm,
+                            VideoRenderAndroid& renderer,
+                            jobject javaRenderObj);
+  ~AndroidSurfaceViewChannel();
+
+  int32_t Init(int32_t zOrder, const float left, const float top,
+               const float right, const float bottom);
+
+  //Implement VideoRenderCallback
+  virtual int32_t RenderFrame(const uint32_t streamId,
+                              const VideoFrame& videoFrame);
+
+  //Implements AndroidStream
+  virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+  uint32_t _id;
+  CriticalSectionWrapper& _renderCritSect;
+
+  VideoFrame _bufferToRender;
+  VideoRenderAndroid& _renderer;
+  JavaVM* _jvm;
+  jobject _javaRenderObj;
+
+  jobject _javaByteBufferObj;
+  unsigned char* _directBuffer;
+  jmethodID _createByteBufferCid;
+  jmethodID _drawByteBufferCid;
+
+  jmethodID _setCoordinatesCid;
+  int _bitmapWidth;
+  int _bitmapHeight;
+};
+
+class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
+ public:
+  AndroidSurfaceViewRenderer(const int32_t id,
+                             const VideoRenderType videoRenderType,
+                             void* window,
+                             const bool fullscreen);
+  ~AndroidSurfaceViewRenderer();
+  int32_t Init();
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      int32_t streamId,
+      int32_t zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer);
+ private:
+  jobject _javaRenderObj;
+  jclass _javaRenderClass;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/webrtc/modules/video_render/android/video_render_opengles20.cc b/webrtc/modules/video_render/android/video_render_opengles20.cc
new file mode 100644
index 0000000..45db56a
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_opengles20.cc
@@ -0,0 +1,397 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "webrtc/modules/video_render/android/video_render_opengles20.h"
+
+//#define ANDROID_LOG
+
+#ifdef ANDROID_LOG
+#include <android/log.h>
+#include <stdio.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "webrtc/system_wrappers/include/trace.h"
+#endif
+
+namespace webrtc {
+
+const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
+
+const char VideoRenderOpenGles20::g_vertextShader[] = {
+  "attribute vec4 aPosition;\n"
+  "attribute vec2 aTextureCoord;\n"
+  "varying vec2 vTextureCoord;\n"
+  "void main() {\n"
+  "  gl_Position = aPosition;\n"
+  "  vTextureCoord = aTextureCoord;\n"
+  "}\n" };
+
+// The fragment shader.
+// Do YUV to RGB565 conversion.
+const char VideoRenderOpenGles20::g_fragmentShader[] = {
+  "precision mediump float;\n"
+  "uniform sampler2D Ytex;\n"
+  "uniform sampler2D Utex,Vtex;\n"
+  "varying vec2 vTextureCoord;\n"
+  "void main(void) {\n"
+  "  float nx,ny,r,g,b,y,u,v;\n"
+  "  mediump vec4 txl,ux,vx;"
+  "  nx=vTextureCoord[0];\n"
+  "  ny=vTextureCoord[1];\n"
+  "  y=texture2D(Ytex,vec2(nx,ny)).r;\n"
+  "  u=texture2D(Utex,vec2(nx,ny)).r;\n"
+  "  v=texture2D(Vtex,vec2(nx,ny)).r;\n"
+
+  //"  y = v;\n"+
+  "  y=1.1643*(y-0.0625);\n"
+  "  u=u-0.5;\n"
+  "  v=v-0.5;\n"
+
+  "  r=y+1.5958*v;\n"
+  "  g=y-0.39173*u-0.81290*v;\n"
+  "  b=y+2.017*u;\n"
+  "  gl_FragColor=vec4(r,g,b,1.0);\n"
+  "}\n" };
+
+VideoRenderOpenGles20::VideoRenderOpenGles20(int32_t id) :
+    _id(id),
+    _textureWidth(-1),
+    _textureHeight(-1) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+               __FUNCTION__, (int) _id);
+
+  const GLfloat vertices[20] = {
+    // X, Y, Z, U, V
+    -1, -1, 0, 0, 1, // Bottom Left
+    1, -1, 0, 1, 1, //Bottom Right
+    1, 1, 0, 1, 0, //Top Right
+    -1, 1, 0, 0, 0 }; //Top Left
+
+  memcpy(_vertices, vertices, sizeof(_vertices));
+}
+
+VideoRenderOpenGles20::~VideoRenderOpenGles20() {
+}
+
+int32_t VideoRenderOpenGles20::Setup(int32_t width, int32_t height) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: width %d, height %d", __FUNCTION__, (int) width,
+               (int) height);
+
+  printGLString("Version", GL_VERSION);
+  printGLString("Vendor", GL_VENDOR);
+  printGLString("Renderer", GL_RENDERER);
+  printGLString("Extensions", GL_EXTENSIONS);
+
+  int maxTextureImageUnits[2];
+  int maxTextureSize[2];
+  glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
+  glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: number of textures %d, size %d", __FUNCTION__,
+               (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
+
+  _program = createProgram(g_vertextShader, g_fragmentShader);
+  if (!_program) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not create program", __FUNCTION__);
+    return -1;
+  }
+
+  int positionHandle = glGetAttribLocation(_program, "aPosition");
+  checkGlError("glGetAttribLocation aPosition");
+  if (positionHandle == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not get aPosition handle", __FUNCTION__);
+    return -1;
+  }
+
+  int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
+  checkGlError("glGetAttribLocation aTextureCoord");
+  if (textureHandle == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not get aTextureCoord handle", __FUNCTION__);
+    return -1;
+  }
+
+  // set the vertices array in the shader
+  // _vertices contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
+                        5 * sizeof(GLfloat), _vertices);
+  checkGlError("glVertexAttribPointer aPosition");
+
+  glEnableVertexAttribArray(positionHandle);
+  checkGlError("glEnableVertexAttribArray positionHandle");
+
+  // set the texture coordinate array in the shader
+  // _vertices contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
+                        * sizeof(GLfloat), &_vertices[3]);
+  checkGlError("glVertexAttribPointer maTextureHandle");
+  glEnableVertexAttribArray(textureHandle);
+  checkGlError("glEnableVertexAttribArray textureHandle");
+
+  glUseProgram(_program);
+  int i = glGetUniformLocation(_program, "Ytex");
+  checkGlError("glGetUniformLocation");
+  glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
+  checkGlError("glUniform1i Ytex");
+
+  i = glGetUniformLocation(_program, "Utex");
+  checkGlError("glGetUniformLocation Utex");
+  glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
+  checkGlError("glUniform1i Utex");
+
+  i = glGetUniformLocation(_program, "Vtex");
+  checkGlError("glGetUniformLocation");
+  glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
+  checkGlError("glUniform1i");
+
+  glViewport(0, 0, width, height);
+  checkGlError("glViewport");
+  return 0;
+}
+
+// SetCoordinates
+// Sets the coordinates where the stream shall be rendered.
+// Values must be between 0 and 1.
+int32_t VideoRenderOpenGles20::SetCoordinates(int32_t zOrder,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom) {
+  if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
+      (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Wrong coordinates", __FUNCTION__);
+    return -1;
+  }
+
+  //  X, Y, Z, U, V
+  // -1, -1, 0, 0, 1, // Bottom Left
+  //  1, -1, 0, 1, 1, //Bottom Right
+  //  1,  1, 0, 1, 0, //Top Right
+  // -1,  1, 0, 0, 0  //Top Left
+
+  // Bottom Left
+  _vertices[0] = (left * 2) - 1;
+  _vertices[1] = -1 * (2 * bottom) + 1;
+  _vertices[2] = zOrder;
+
+  //Bottom Right
+  _vertices[5] = (right * 2) - 1;
+  _vertices[6] = -1 * (2 * bottom) + 1;
+  _vertices[7] = zOrder;
+
+  //Top Right
+  _vertices[10] = (right * 2) - 1;
+  _vertices[11] = -1 * (2 * top) + 1;
+  _vertices[12] = zOrder;
+
+  //Top Left
+  _vertices[15] = (left * 2) - 1;
+  _vertices[16] = -1 * (2 * top) + 1;
+  _vertices[17] = zOrder;
+
+  return 0;
+}
+
+int32_t VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
+  if (frameToRender.IsZeroSize()) {
+    return -1;
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+               __FUNCTION__, (int) _id);
+
+  glUseProgram(_program);
+  checkGlError("glUseProgram");
+
+  if (_textureWidth != (GLsizei) frameToRender.width() ||
+      _textureHeight != (GLsizei) frameToRender.height()) {
+    SetupTextures(frameToRender);
+  }
+  UpdateTextures(frameToRender);
+
+  glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
+  checkGlError("glDrawArrays");
+
+  return 0;
+}
+
+GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
+                                         const char* pSource) {
+  GLuint shader = glCreateShader(shaderType);
+  if (shader) {
+    glShaderSource(shader, 1, &pSource, NULL);
+    glCompileShader(shader);
+    GLint compiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+    if (!compiled) {
+      GLint infoLen = 0;
+      glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+      if (infoLen) {
+        char* buf = (char*) malloc(infoLen);
+        if (buf) {
+          glGetShaderInfoLog(shader, infoLen, NULL, buf);
+          WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                       "%s: Could not compile shader %d: %s",
+                       __FUNCTION__, shaderType, buf);
+          free(buf);
+        }
+        glDeleteShader(shader);
+        shader = 0;
+      }
+    }
+  }
+  return shader;
+}
+
+GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
+                                            const char* pFragmentSource) {
+  GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
+  if (!vertexShader) {
+    return 0;
+  }
+
+  GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
+  if (!pixelShader) {
+    return 0;
+  }
+
+  GLuint program = glCreateProgram();
+  if (program) {
+    glAttachShader(program, vertexShader);
+    checkGlError("glAttachShader");
+    glAttachShader(program, pixelShader);
+    checkGlError("glAttachShader");
+    glLinkProgram(program);
+    GLint linkStatus = GL_FALSE;
+    glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+    if (linkStatus != GL_TRUE) {
+      GLint bufLength = 0;
+      glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+      if (bufLength) {
+        char* buf = (char*) malloc(bufLength);
+        if (buf) {
+          glGetProgramInfoLog(program, bufLength, NULL, buf);
+          WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                       "%s: Could not link program: %s",
+                       __FUNCTION__, buf);
+          free(buf);
+        }
+      }
+      glDeleteProgram(program);
+      program = 0;
+    }
+  }
+  return program;
+}
+
+void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
+  const char *v = (const char *) glGetString(s);
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
+               name, v);
+}
+
+void VideoRenderOpenGles20::checkGlError(const char* op) {
+#ifdef ANDROID_LOG
+  for (GLint error = glGetError(); error; error = glGetError()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "after %s() glError (0x%x)\n", op, error);
+  }
+#else
+  return;
+#endif
+}
+
+static void InitializeTexture(int name, int id, int width, int height) {
+  glActiveTexture(name);
+  glBindTexture(GL_TEXTURE_2D, id);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+  glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
+               GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
+}
+
+void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: width %d, height %d", __FUNCTION__,
+               frameToRender.width(), frameToRender.height());
+
+  const GLsizei width = frameToRender.width();
+  const GLsizei height = frameToRender.height();
+
+  glGenTextures(3, _textureIds); //Generate  the Y, U and V texture
+  InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height);
+  InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2);
+  InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2);
+
+  checkGlError("SetupTextures");
+
+  _textureWidth = width;
+  _textureHeight = height;
+}
+
+// Uploads a plane of pixel data, accounting for stride != width*bpp.
+static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride,
+                            const uint8_t* plane) {
+  if (stride == width) {
+    // Yay!  We can upload the entire plane in a single GL call.
+    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
+                    GL_UNSIGNED_BYTE,
+                    static_cast<const GLvoid*>(plane));
+  } else {
+    // Boo!  Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and Android doesn't
+    // have GL_EXT_unpack_subimage we have to upload a row at a time.  Ick.
+    for (int row = 0; row < height; ++row) {
+      glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE,
+                      GL_UNSIGNED_BYTE,
+                      static_cast<const GLvoid*>(plane + (row * stride)));
+    }
+  }
+}
+
+void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
+  const GLsizei width = frameToRender.width();
+  const GLsizei height = frameToRender.height();
+
+  glActiveTexture(GL_TEXTURE0);
+  glBindTexture(GL_TEXTURE_2D, _textureIds[0]);
+  GlTexSubImage2D(width, height, frameToRender.stride(kYPlane),
+                  frameToRender.buffer(kYPlane));
+
+  glActiveTexture(GL_TEXTURE1);
+  glBindTexture(GL_TEXTURE_2D, _textureIds[1]);
+  GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane),
+                  frameToRender.buffer(kUPlane));
+
+  glActiveTexture(GL_TEXTURE2);
+  glBindTexture(GL_TEXTURE_2D, _textureIds[2]);
+  GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kVPlane),
+                  frameToRender.buffer(kVPlane));
+
+  checkGlError("UpdateTextures");
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/android/video_render_opengles20.h b/webrtc/modules/video_render/android/video_render_opengles20.h
new file mode 100644
index 0000000..57e2a10
--- /dev/null
+++ b/webrtc/modules/video_render/android/video_render_opengles20.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace webrtc
+{
+
+class VideoRenderOpenGles20 {
+ public:
+  VideoRenderOpenGles20(int32_t id);
+  ~VideoRenderOpenGles20();
+
+  int32_t Setup(int32_t widht, int32_t height);
+  int32_t Render(const VideoFrame& frameToRender);
+  int32_t SetCoordinates(int32_t zOrder, const float left, const float top,
+                         const float right, const float bottom);
+
+ private:
+  void printGLString(const char *name, GLenum s);
+  void checkGlError(const char* op);
+  GLuint loadShader(GLenum shaderType, const char* pSource);
+  GLuint createProgram(const char* pVertexSource,
+                       const char* pFragmentSource);
+  void SetupTextures(const VideoFrame& frameToRender);
+  void UpdateTextures(const VideoFrame& frameToRender);
+
+  int32_t _id;
+  GLuint _textureIds[3]; // Texture id of Y,U and V texture.
+  GLuint _program;
+  GLsizei _textureWidth;
+  GLsizei _textureHeight;
+
+  GLfloat _vertices[20];
+  static const char g_indices[];
+
+  static const char g_vertextShader[];
+  static const char g_fragmentShader[];
+
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
diff --git a/webrtc/modules/video_render/external/video_render_external_impl.cc b/webrtc/modules/video_render/external/video_render_external_impl.cc
new file mode 100644
index 0000000..58df078
--- /dev/null
+++ b/webrtc/modules/video_render/external/video_render_external_impl.cc
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/external/video_render_external_impl.h"
+
+namespace webrtc {
+
+VideoRenderExternalImpl::VideoRenderExternalImpl(
+                                                 const int32_t id,
+                                                 const VideoRenderType videoRenderType,
+                                                 void* window,
+                                                 const bool fullscreen) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _fullscreen(fullscreen)
+{
+}
+
+VideoRenderExternalImpl::~VideoRenderExternalImpl()
+{
+    delete &_critSect;
+}
+
+int32_t VideoRenderExternalImpl::Init()
+{
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderExternalImpl::AddIncomingRenderStream(const uint32_t streamId,
+                                                 const uint32_t zOrder,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return this;
+}
+
+int32_t VideoRenderExternalImpl::DeleteIncomingRenderStream(
+                                                                  const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
+                                                                         const uint32_t streamId,
+                                                                         uint32_t& zOrder,
+                                                                         float& left,
+                                                                         float& top,
+                                                                         float& right,
+                                                                         float& bottom) const
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::StartRender()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::StopRender()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+VideoRenderType VideoRenderExternalImpl::RenderType()
+{
+    return kRenderExternal;
+}
+
+RawVideoType VideoRenderExternalImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderExternalImpl::FullScreen()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return _fullscreen;
+}
+
+int32_t VideoRenderExternalImpl::GetGraphicsMemory(
+                                                         uint64_t& totalGraphicsMemory,
+                                                         uint64_t& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+int32_t VideoRenderExternalImpl::GetScreenResolution(
+                                                           uint32_t& screenWidth,
+                                                           uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_critSect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+uint32_t VideoRenderExternalImpl::RenderFrameRate(
+                                                        const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::SetStreamCropping(
+                                                         const uint32_t streamId,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::ConfigureRenderer(
+                                                         const uint32_t streamId,
+                                                         const unsigned int zOrder,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::SetTransparentBackground(
+                                                                const bool enable)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::SetText(
+                                               const uint8_t textId,
+                                               const uint8_t* text,
+                                               const int32_t textLength,
+                                               const uint32_t textColorRef,
+                                               const uint32_t backgroundColorRef,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+int32_t VideoRenderExternalImpl::SetBitmap(const void* bitMap,
+                                           const uint8_t pictureId,
+                                           const void* colorKey,
+                                           const float left,
+                                           const float top,
+                                           const float right,
+                                           const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+// VideoRenderCallback
+int32_t VideoRenderExternalImpl::RenderFrame(const uint32_t streamId,
+                                             const VideoFrame& videoFrame) {
+    return 0;
+}
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/external/video_render_external_impl.h b/webrtc/modules/video_render/external/video_render_external_impl.h
new file mode 100644
index 0000000..a8b663f
--- /dev/null
+++ b/webrtc/modules/video_render/external/video_render_external_impl.h
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderExternalImpl(const int32_t id,
+                            const VideoRenderType videoRenderType,
+                            void* window, const bool fullscreen);
+
+    virtual ~VideoRenderExternalImpl();
+
+    virtual int32_t Init();
+
+    virtual int32_t ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId);
+
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual int32_t
+            GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+                              uint64_t& availableGraphicsMemory) const;
+
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left, const float top,
+                            const float right, const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey, const float left,
+                              const float top, const float right,
+                              const float bottom);
+
+    // VideoRenderCallback
+    virtual int32_t RenderFrame(const uint32_t streamId,
+                                const VideoFrame& videoFrame);
+
+private:
+    CriticalSectionWrapper& _critSect;
+    bool _fullscreen;
+};
+
+}  // namespace webrtc
+
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
diff --git a/webrtc/modules/video_render/i_video_render.h b/webrtc/modules/video_render/i_video_render.h
new file mode 100644
index 0000000..e6ec7a4
--- /dev/null
+++ b/webrtc/modules/video_render/i_video_render.h
@@ -0,0 +1,129 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+
+#include "webrtc/modules/video_render/video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    virtual ~IVideoRender() {}
+
+    virtual int32_t Init() = 0;
+
+    virtual int32_t ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId) = 0;
+
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left,
+                                              float& top,
+                                              float& right,
+                                              float& bottom) const = 0;
+    // Implemented in common code?
+    //virtual uint32_t GetNumIncomingRenderStreams() const = 0;
+    //virtual bool HasIncomingRenderStream(const uint16_t stramId) const = 0;
+
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender() = 0;
+
+    virtual int32_t StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+    virtual VideoRenderType RenderType() = 0;
+
+    virtual RawVideoType PerferedVideoType() = 0;
+
+    virtual bool FullScreen() = 0;
+
+    // TODO: This should be treated in platform specific code only
+    virtual int32_t
+            GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+                              uint64_t& availableGraphicsMemory) const = 0;
+
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const = 0;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual int32_t SetTransparentBackground(const bool enable) = 0;
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left,
+                            const float top,
+                            const float rigth,
+                            const float bottom) = 0;
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left,
+                              const float top,
+                              const float right,
+                              const float bottom) = 0;
+
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
diff --git a/webrtc/modules/video_render/ios/open_gles20.h b/webrtc/modules/video_render/ios/open_gles20.h
new file mode 100644
index 0000000..880ddb5
--- /dev/null
+++ b/webrtc/modules/video_render/ios/open_gles20.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
+
+#include <OpenGLES/ES2/glext.h>
+
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+/*
+ * This OpenGles20 is the class of renderer for VideoFrame into a GLES 2.0
+ * windows used in the VideoRenderIosView class.
+ */
+namespace webrtc {
+class OpenGles20 {
+ public:
+  OpenGles20();
+  ~OpenGles20();
+
+  bool Setup(int32_t width, int32_t height);
+  bool Render(const VideoFrame& frame);
+
+  // SetCoordinates
+  // Sets the coordinates where the stream shall be rendered.
+  // Values must be between 0 and 1.
+  bool SetCoordinates(const float z_order,
+                      const float left,
+                      const float top,
+                      const float right,
+                      const float bottom);
+
+ private:
+  // Compile and load the vertex and fragment shaders defined at the top of
+  // open_gles20.mm
+  GLuint LoadShader(GLenum shader_type, const char* shader_source);
+
+  GLuint CreateProgram(const char* vertex_source, const char* fragment_source);
+
+  // Initialize the textures by the frame width and height
+  void SetupTextures(const VideoFrame& frame);
+
+  // Update the textures by the YUV data from the frame
+  void UpdateTextures(const VideoFrame& frame);
+
+  GLuint texture_ids_[3];  // Texture id of Y,U and V texture.
+  GLuint program_;
+  GLsizei texture_width_;
+  GLsizei texture_height_;
+
+  GLfloat vertices_[20];
+  static const char indices_[];
+  static const char vertext_shader_[];
+  static const char fragment_shader_[];
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_IOS_OPEN_GLES20_H_
diff --git a/webrtc/modules/video_render/ios/open_gles20.mm b/webrtc/modules/video_render/ios/open_gles20.mm
new file mode 100644
index 0000000..d173528
--- /dev/null
+++ b/webrtc/modules/video_render/ios/open_gles20.mm
@@ -0,0 +1,330 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+// This files is mostly copied from
+// webrtc/modules/video_render/android/video_render_opengles20.h
+
+// TODO(sjlee): unify this copy with the android one.
+#include "webrtc/modules/video_render/ios/open_gles20.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+const char OpenGles20::indices_[] = {0, 3, 2, 0, 2, 1};
+
+const char OpenGles20::vertext_shader_[] = {
+    "attribute vec4 aPosition;\n"
+    "attribute vec2 aTextureCoord;\n"
+    "varying vec2 vTextureCoord;\n"
+    "void main() {\n"
+    "  gl_Position = aPosition;\n"
+    "  vTextureCoord = aTextureCoord;\n"
+    "}\n"};
+
+// The fragment shader.
+// Do YUV to RGB565 conversion.
+const char OpenGles20::fragment_shader_[] = {
+    "precision mediump float;\n"
+    "uniform sampler2D Ytex;\n"
+    "uniform sampler2D Utex,Vtex;\n"
+    "varying vec2 vTextureCoord;\n"
+    "void main(void) {\n"
+    "  float nx,ny,r,g,b,y,u,v;\n"
+    "  mediump vec4 txl,ux,vx;"
+    "  nx=vTextureCoord[0];\n"
+    "  ny=vTextureCoord[1];\n"
+    "  y=texture2D(Ytex,vec2(nx,ny)).r;\n"
+    "  u=texture2D(Utex,vec2(nx,ny)).r;\n"
+    "  v=texture2D(Vtex,vec2(nx,ny)).r;\n"
+    "  y=1.1643*(y-0.0625);\n"
+    "  u=u-0.5;\n"
+    "  v=v-0.5;\n"
+    "  r=y+1.5958*v;\n"
+    "  g=y-0.39173*u-0.81290*v;\n"
+    "  b=y+2.017*u;\n"
+    "  gl_FragColor=vec4(r,g,b,1.0);\n"
+    "}\n"};
+
+OpenGles20::OpenGles20() : texture_width_(-1), texture_height_(-1) {
+  texture_ids_[0] = 0;
+  texture_ids_[1] = 0;
+  texture_ids_[2] = 0;
+
+  program_ = 0;
+
+  const GLfloat vertices[20] = {
+      // X, Y, Z, U, V
+      -1, -1, 0, 0, 1,   // Bottom Left
+      1,  -1, 0, 1, 1,   // Bottom Right
+      1,  1,  0, 1, 0,   // Top Right
+      -1, 1,  0, 0, 0};  // Top Left
+
+  memcpy(vertices_, vertices, sizeof(vertices_));
+}
+
+OpenGles20::~OpenGles20() {
+  if (program_) {
+    glDeleteTextures(3, texture_ids_);
+    glDeleteProgram(program_);
+  }
+}
+
+bool OpenGles20::Setup(int32_t width, int32_t height) {
+  program_ = CreateProgram(vertext_shader_, fragment_shader_);
+  if (!program_) {
+    return false;
+  }
+
+  int position_handle = glGetAttribLocation(program_, "aPosition");
+  int texture_handle = glGetAttribLocation(program_, "aTextureCoord");
+
+  // set the vertices array in the shader
+  // vertices_ contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(
+      position_handle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), vertices_);
+
+  glEnableVertexAttribArray(position_handle);
+
+  // set the texture coordinate array in the shader
+  // vertices_ contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(
+      texture_handle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &vertices_[3]);
+  glEnableVertexAttribArray(texture_handle);
+
+  glUseProgram(program_);
+  int i = glGetUniformLocation(program_, "Ytex");
+  glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
+
+  i = glGetUniformLocation(program_, "Utex");
+  glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
+
+  i = glGetUniformLocation(program_, "Vtex");
+  glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
+
+  glViewport(0, 0, width, height);
+  return true;
+}
+
+bool OpenGles20::SetCoordinates(const float z_order,
+                                const float left,
+                                const float top,
+                                const float right,
+                                const float bottom) {
+  if (top > 1 || top < 0 || right > 1 || right < 0 || bottom > 1 ||
+      bottom < 0 || left > 1 || left < 0) {
+    return false;
+  }
+
+  // Bottom Left
+  vertices_[0] = (left * 2) - 1;
+  vertices_[1] = -1 * (2 * bottom) + 1;
+  vertices_[2] = z_order;
+
+  // Bottom Right
+  vertices_[5] = (right * 2) - 1;
+  vertices_[6] = -1 * (2 * bottom) + 1;
+  vertices_[7] = z_order;
+
+  // Top Right
+  vertices_[10] = (right * 2) - 1;
+  vertices_[11] = -1 * (2 * top) + 1;
+  vertices_[12] = z_order;
+
+  // Top Left
+  vertices_[15] = (left * 2) - 1;
+  vertices_[16] = -1 * (2 * top) + 1;
+  vertices_[17] = z_order;
+
+  return true;
+}
+
+bool OpenGles20::Render(const VideoFrame& frame) {
+  if (texture_width_ != (GLsizei)frame.width() ||
+      texture_height_ != (GLsizei)frame.height()) {
+    SetupTextures(frame);
+  }
+  UpdateTextures(frame);
+
+  glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices_);
+
+  return true;
+}
+
+GLuint OpenGles20::LoadShader(GLenum shader_type, const char* shader_source) {
+  GLuint shader = glCreateShader(shader_type);
+  if (shader) {
+    glShaderSource(shader, 1, &shader_source, NULL);
+    glCompileShader(shader);
+
+    GLint compiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+    if (!compiled) {
+      GLint info_len = 0;
+      glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &info_len);
+      if (info_len) {
+        char* buf = (char*)malloc(info_len);
+        glGetShaderInfoLog(shader, info_len, NULL, buf);
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     0,
+                     "%s: Could not compile shader %d: %s",
+                     __FUNCTION__,
+                     shader_type,
+                     buf);
+        free(buf);
+      }
+      glDeleteShader(shader);
+      shader = 0;
+    }
+  }
+  return shader;
+}
+
+GLuint OpenGles20::CreateProgram(const char* vertex_source,
+                                 const char* fragment_source) {
+  GLuint vertex_shader = LoadShader(GL_VERTEX_SHADER, vertex_source);
+  if (!vertex_shader) {
+    return -1;
+  }
+
+  GLuint fragment_shader = LoadShader(GL_FRAGMENT_SHADER, fragment_source);
+  if (!fragment_shader) {
+    return -1;
+  }
+
+  GLuint program = glCreateProgram();
+  if (program) {
+    glAttachShader(program, vertex_shader);
+    glAttachShader(program, fragment_shader);
+    glLinkProgram(program);
+    GLint link_status = GL_FALSE;
+    glGetProgramiv(program, GL_LINK_STATUS, &link_status);
+    if (link_status != GL_TRUE) {
+      GLint info_len = 0;
+      glGetProgramiv(program, GL_INFO_LOG_LENGTH, &info_len);
+      if (info_len) {
+        char* buf = (char*)malloc(info_len);
+        glGetProgramInfoLog(program, info_len, NULL, buf);
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     0,
+                     "%s: Could not link program: %s",
+                     __FUNCTION__,
+                     buf);
+        free(buf);
+      }
+      glDeleteProgram(program);
+      program = 0;
+    }
+  }
+
+  if (vertex_shader) {
+    glDeleteShader(vertex_shader);
+  }
+
+  if (fragment_shader) {
+    glDeleteShader(fragment_shader);
+  }
+
+  return program;
+}
+
+static void InitializeTexture(int name, int id, int width, int height) {
+  glActiveTexture(name);
+  glBindTexture(GL_TEXTURE_2D, id);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+  glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+  glTexImage2D(GL_TEXTURE_2D,
+               0,
+               GL_LUMINANCE,
+               width,
+               height,
+               0,
+               GL_LUMINANCE,
+               GL_UNSIGNED_BYTE,
+               NULL);
+}
+
+void OpenGles20::SetupTextures(const VideoFrame& frame) {
+  const GLsizei width = frame.width();
+  const GLsizei height = frame.height();
+
+  if (!texture_ids_[0]) {
+    glGenTextures(3, texture_ids_);  // Generate  the Y, U and V texture
+  }
+
+  InitializeTexture(GL_TEXTURE0, texture_ids_[0], width, height);
+  InitializeTexture(GL_TEXTURE1, texture_ids_[1], width / 2, height / 2);
+  InitializeTexture(GL_TEXTURE2, texture_ids_[2], width / 2, height / 2);
+
+  texture_width_ = width;
+  texture_height_ = height;
+}
+
+// Uploads a plane of pixel data, accounting for stride != width*bpp.
+static void GlTexSubImage2D(GLsizei width,
+                            GLsizei height,
+                            int stride,
+                            const uint8_t* plane) {
+  if (stride == width) {
+    // Yay!  We can upload the entire plane in a single GL call.
+    glTexSubImage2D(GL_TEXTURE_2D,
+                    0,
+                    0,
+                    0,
+                    width,
+                    height,
+                    GL_LUMINANCE,
+                    GL_UNSIGNED_BYTE,
+                    static_cast<const GLvoid*>(plane));
+  } else {
+    // Boo!  Since GLES2 doesn't have GL_UNPACK_ROW_LENGTH and iOS doesn't
+    // have GL_EXT_unpack_subimage we have to upload a row at a time.  Ick.
+    for (int row = 0; row < height; ++row) {
+      glTexSubImage2D(GL_TEXTURE_2D,
+                      0,
+                      0,
+                      row,
+                      width,
+                      1,
+                      GL_LUMINANCE,
+                      GL_UNSIGNED_BYTE,
+                      static_cast<const GLvoid*>(plane + (row * stride)));
+    }
+  }
+}
+
+void OpenGles20::UpdateTextures(const VideoFrame& frame) {
+  const GLsizei width = frame.width();
+  const GLsizei height = frame.height();
+
+  glActiveTexture(GL_TEXTURE0);
+  glBindTexture(GL_TEXTURE_2D, texture_ids_[0]);
+  GlTexSubImage2D(width, height, frame.stride(kYPlane), frame.buffer(kYPlane));
+
+  glActiveTexture(GL_TEXTURE1);
+  glBindTexture(GL_TEXTURE_2D, texture_ids_[1]);
+  GlTexSubImage2D(
+      width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
+
+  glActiveTexture(GL_TEXTURE2);
+  glBindTexture(GL_TEXTURE_2D, texture_ids_[2]);
+  GlTexSubImage2D(
+      width / 2, height / 2, frame.stride(kVPlane), frame.buffer(kVPlane));
+}
diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.h b/webrtc/modules/video_render/ios/video_render_ios_channel.h
new file mode 100644
index 0000000..a15ba39
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_channel.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
+
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
+
+namespace webrtc {
+
+class VideoRenderIosGles20;
+
+class VideoRenderIosChannel : public VideoRenderCallback {
+ public:
+  explicit VideoRenderIosChannel(VideoRenderIosView* view);
+  virtual ~VideoRenderIosChannel();
+
+  // Implementation of VideoRenderCallback.
+  int32_t RenderFrame(const uint32_t stream_id,
+                      const VideoFrame& video_frame) override;
+
+  int SetStreamSettings(const float z_order,
+                        const float left,
+                        const float top,
+                        const float right,
+                        const float bottom);
+  bool IsUpdated();
+  bool RenderOffScreenBuffer();
+
+ private:
+  VideoRenderIosView* view_;
+  VideoFrame* current_frame_;
+  bool buffer_is_updated_;
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_CHANNEL_H_
diff --git a/webrtc/modules/video_render/ios/video_render_ios_channel.mm b/webrtc/modules/video_render/ios/video_render_ios_channel.mm
new file mode 100644
index 0000000..b2b1585
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_channel.mm
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
+
+using namespace webrtc;
+
+VideoRenderIosChannel::VideoRenderIosChannel(VideoRenderIosView* view)
+    : view_(view), current_frame_(new VideoFrame()), buffer_is_updated_(false) {
+}
+
+VideoRenderIosChannel::~VideoRenderIosChannel() { delete current_frame_; }
+
+int32_t VideoRenderIosChannel::RenderFrame(const uint32_t stream_id,
+                                           const VideoFrame& video_frame) {
+  current_frame_->CopyFrame(video_frame);
+  current_frame_->set_render_time_ms(0);
+  buffer_is_updated_ = true;
+
+  return 0;
+}
+
+bool VideoRenderIosChannel::RenderOffScreenBuffer() {
+  if (![view_ renderFrame:current_frame_]) {
+    return false;
+  }
+
+  buffer_is_updated_ = false;
+
+  return true;
+}
+
+bool VideoRenderIosChannel::IsUpdated() { return buffer_is_updated_; }
+
+int VideoRenderIosChannel::SetStreamSettings(const float z_order,
+                                             const float left,
+                                             const float top,
+                                             const float right,
+                                             const float bottom) {
+  if (![view_ setCoordinatesForZOrder:z_order
+                                 Left:left
+                                  Top:bottom
+                                Right:right
+                               Bottom:top]) {
+
+    return -1;
+  }
+
+  return 0;
+}
diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.h b/webrtc/modules/video_render/ios/video_render_ios_gles20.h
new file mode 100644
index 0000000..d4e04e7
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
+
+#include <list>
+#include <map>
+#include <memory>
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/ios/video_render_ios_channel.h"
+#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EventTimerWrapper;
+
+class VideoRenderIosGles20 {
+ public:
+  VideoRenderIosGles20(VideoRenderIosView* view,
+                       bool full_screen,
+                       int render_id);
+  virtual ~VideoRenderIosGles20();
+
+  int Init();
+  VideoRenderIosChannel* CreateEaglChannel(int channel,
+                                           int z_order,
+                                           float left,
+                                           float top,
+                                           float right,
+                                           float bottom);
+  int DeleteEaglChannel(int channel);
+  bool HasChannel(int channel);
+  bool ScreenUpdateProcess();
+  int GetWindowRect(Rect& rect);  // NOLINT
+
+  int GetScreenResolution(uint& screen_width, uint& screen_height);  // NOLINT
+  int SetStreamCropping(const uint stream_id,
+                        const float left,
+                        const float top,
+                        const float right,
+                        const float bottom);
+
+  int ChangeWindow(void* new_window);
+  int StartRender();
+  int StopRender();
+
+ protected:
+  static bool ScreenUpdateThreadProc(void* obj);
+
+ private:
+  bool RenderOffScreenBuffers();
+  int SwapAndDisplayBuffers();
+
+ private:
+  std::unique_ptr<CriticalSectionWrapper> gles_crit_sec_;
+  EventTimerWrapper* screen_update_event_;
+  // TODO(pbos): Remove unique_ptr and use member directly.
+  std::unique_ptr<rtc::PlatformThread> screen_update_thread_;
+
+  VideoRenderIosView* view_;
+  Rect window_rect_;
+  int window_width_;
+  int window_height_;
+  bool is_full_screen_;
+  GLint backing_width_;
+  GLint backing_height_;
+  GLuint view_renderbuffer_;
+  GLuint view_framebuffer_;
+  GLuint depth_renderbuffer_;
+  std::map<int, VideoRenderIosChannel*> agl_channels_;
+  std::multimap<int, int> z_order_to_channel_;
+  EAGLContext* gles_context_;
+  bool is_rendering_;
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_GLES20_H_
diff --git a/webrtc/modules/video_render/ios/video_render_ios_gles20.mm b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
new file mode 100644
index 0000000..6ad5db8
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_gles20.mm
@@ -0,0 +1,285 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+
+using namespace webrtc;
+
+VideoRenderIosGles20::VideoRenderIosGles20(VideoRenderIosView* view,
+                                           bool full_screen,
+                                           int render_id)
+    : gles_crit_sec_(CriticalSectionWrapper::CreateCriticalSection()),
+      screen_update_event_(0),
+      view_(view),
+      window_rect_(),
+      window_width_(0),
+      window_height_(0),
+      is_full_screen_(full_screen),
+      agl_channels_(),
+      z_order_to_channel_(),
+      gles_context_([view context]),
+      is_rendering_(true) {
+  screen_update_thread_.reset(new rtc::PlatformThread(
+      ScreenUpdateThreadProc, this, "ScreenUpdateGles20"));
+  screen_update_event_ = EventTimerWrapper::Create();
+  GetWindowRect(window_rect_);
+}
+
+VideoRenderIosGles20::~VideoRenderIosGles20() {
+  // Signal event to exit thread, then delete it
+  rtc::PlatformThread* thread_wrapper = screen_update_thread_.release();
+
+  if (thread_wrapper) {
+    screen_update_event_->Set();
+    screen_update_event_->StopTimer();
+
+    thread_wrapper->Stop();
+    delete thread_wrapper;
+    delete screen_update_event_;
+    screen_update_event_ = NULL;
+    is_rendering_ = FALSE;
+  }
+
+  // Delete all channels
+  std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
+  while (it != agl_channels_.end()) {
+    delete it->second;
+    agl_channels_.erase(it);
+    it = agl_channels_.begin();
+  }
+  agl_channels_.clear();
+
+  // Clean the zOrder map
+  std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
+  while (z_it != z_order_to_channel_.end()) {
+    z_order_to_channel_.erase(z_it);
+    z_it = z_order_to_channel_.begin();
+  }
+  z_order_to_channel_.clear();
+}
+
+int VideoRenderIosGles20::Init() {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  if (!view_) {
+    view_ = [[VideoRenderIosView alloc] init];
+  }
+
+  if (![view_ createContext]) {
+    return -1;
+  }
+
+  screen_update_thread_->Start();
+  screen_update_thread_->SetPriority(rtc::kRealtimePriority);
+
+  // Start the event triggering the render process
+  unsigned int monitor_freq = 60;
+  screen_update_event_->StartTimer(true, 1000 / monitor_freq);
+
+  window_width_ = window_rect_.right - window_rect_.left;
+  window_height_ = window_rect_.bottom - window_rect_.top;
+
+  return 0;
+}
+
+VideoRenderIosChannel* VideoRenderIosGles20::CreateEaglChannel(int channel,
+                                                               int z_order,
+                                                               float left,
+                                                               float top,
+                                                               float right,
+                                                               float bottom) {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  if (HasChannel(channel)) {
+    return NULL;
+  }
+
+  VideoRenderIosChannel* new_eagl_channel = new VideoRenderIosChannel(view_);
+
+  if (new_eagl_channel->SetStreamSettings(z_order, left, top, right, bottom) ==
+      -1) {
+    return NULL;
+  }
+
+  agl_channels_[channel] = new_eagl_channel;
+  z_order_to_channel_.insert(std::pair<int, int>(z_order, channel));
+
+  return new_eagl_channel;
+}
+
+int VideoRenderIosGles20::DeleteEaglChannel(int channel) {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  std::map<int, VideoRenderIosChannel*>::iterator it;
+  it = agl_channels_.find(channel);
+  if (it != agl_channels_.end()) {
+    delete it->second;
+    agl_channels_.erase(it);
+  } else {
+    return -1;
+  }
+
+  std::multimap<int, int>::iterator z_it = z_order_to_channel_.begin();
+  while (z_it != z_order_to_channel_.end()) {
+    if (z_it->second == channel) {
+      z_order_to_channel_.erase(z_it);
+      break;
+    }
+    z_it++;
+  }
+
+  return 0;
+}
+
+bool VideoRenderIosGles20::HasChannel(int channel) {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  std::map<int, VideoRenderIosChannel*>::iterator it =
+      agl_channels_.find(channel);
+
+  if (it != agl_channels_.end()) {
+    return true;
+  }
+
+  return false;
+}
+
+// Rendering process
+bool VideoRenderIosGles20::ScreenUpdateThreadProc(void* obj) {
+  return static_cast<VideoRenderIosGles20*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderIosGles20::ScreenUpdateProcess() {
+  screen_update_event_->Wait(100);
+
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  if (!is_rendering_) {
+    return false;
+  }
+
+  if (!screen_update_thread_) {
+    return false;
+  }
+
+  if (GetWindowRect(window_rect_) == -1) {
+    return true;
+  }
+
+  if (window_width_ != (window_rect_.right - window_rect_.left) ||
+      window_height_ != (window_rect_.bottom - window_rect_.top)) {
+    window_width_ = window_rect_.right - window_rect_.left;
+    window_height_ = window_rect_.bottom - window_rect_.top;
+  }
+
+  // Check if there are any updated buffers
+  bool updated = false;
+
+  std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
+  while (it != agl_channels_.end()) {
+    VideoRenderIosChannel* agl_channel = it->second;
+
+    updated = agl_channel->IsUpdated();
+    if (updated) {
+      break;
+    }
+    it++;
+  }
+
+  if (updated) {
+    // At least one buffer has been updated, we need to repaint the texture
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator r_it =
+             z_order_to_channel_.rbegin();
+         r_it != z_order_to_channel_.rend();
+         r_it++) {
+      int channel_id = r_it->second;
+      std::map<int, VideoRenderIosChannel*>::iterator it =
+          agl_channels_.find(channel_id);
+
+      VideoRenderIosChannel* agl_channel = it->second;
+
+      agl_channel->RenderOffScreenBuffer();
+    }
+
+    [view_ presentFramebuffer];
+  }
+
+  return true;
+}
+
+int VideoRenderIosGles20::GetWindowRect(Rect& rect) {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  if (!view_) {
+    return -1;
+  }
+
+  CGRect bounds = [view_ bounds];
+  rect.top = bounds.origin.y;
+  rect.left = bounds.origin.x;
+  rect.bottom = bounds.size.height + bounds.origin.y;
+  rect.right = bounds.size.width + bounds.origin.x;
+
+  return 0;
+}
+
+int VideoRenderIosGles20::ChangeWindow(void* new_window) {
+  CriticalSectionScoped cs(gles_crit_sec_.get());
+
+  view_ = (__bridge VideoRenderIosView*)new_window;
+
+  return 0;
+}
+
+int VideoRenderIosGles20::StartRender() {
+  is_rendering_ = true;
+  return 0;
+}
+
+int VideoRenderIosGles20::StopRender() {
+  is_rendering_ = false;
+  return 0;
+}
+
+int VideoRenderIosGles20::GetScreenResolution(uint& screen_width,
+                                              uint& screen_height) {
+  screen_width = [view_ bounds].size.width;
+  screen_height = [view_ bounds].size.height;
+  return 0;
+}
+
+int VideoRenderIosGles20::SetStreamCropping(const uint stream_id,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) {
+  // Check if there are any updated buffers
+  // bool updated = false;
+  uint counter = 0;
+
+  std::map<int, VideoRenderIosChannel*>::iterator it = agl_channels_.begin();
+  while (it != agl_channels_.end()) {
+    if (counter == stream_id) {
+      VideoRenderIosChannel* agl_channel = it->second;
+      agl_channel->SetStreamSettings(0, left, top, right, bottom);
+    }
+    counter++;
+    it++;
+  }
+
+  return 0;
+}
diff --git a/webrtc/modules/video_render/ios/video_render_ios_impl.h b/webrtc/modules/video_render/ios/video_render_ios_impl.h
new file mode 100644
index 0000000..04a7493
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_impl.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
+
+#include <list>
+#include <map>
+#include <memory>
+
+#include "webrtc/modules/video_render/i_video_render.h"
+
+namespace webrtc {
+
+class VideoRenderIosGles20;
+class CriticalSectionWrapper;
+
+class VideoRenderIosImpl : IVideoRender {
+ public:
+  explicit VideoRenderIosImpl(const int32_t id,
+                              void* window,
+                              const bool full_screen);
+
+  ~VideoRenderIosImpl();
+
+  // Implementation of IVideoRender.
+  int32_t Init() override;
+  int32_t ChangeWindow(void* window) override;
+
+  VideoRenderCallback* AddIncomingRenderStream(const uint32_t stream_id,
+                                               const uint32_t z_order,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom) override;
+
+  int32_t DeleteIncomingRenderStream(const uint32_t stream_id) override;
+
+  int32_t GetIncomingRenderStreamProperties(const uint32_t stream_id,
+                                            uint32_t& z_order,
+                                            float& left,
+                                            float& top,
+                                            float& right,
+                                            float& bottom) const override;
+
+  int32_t StartRender() override;
+  int32_t StopRender() override;
+
+  VideoRenderType RenderType() override;
+  RawVideoType PerferedVideoType() override;
+  bool FullScreen() override;
+  int32_t GetGraphicsMemory(
+      uint64_t& total_graphics_memory,
+      uint64_t& available_graphics_memory) const override;  // NOLINT
+  int32_t GetScreenResolution(
+      uint32_t& screen_width,
+      uint32_t& screen_height) const override;  // NOLINT
+  uint32_t RenderFrameRate(const uint32_t stream_id);
+  int32_t SetStreamCropping(const uint32_t stream_id,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom) override;
+  int32_t ConfigureRenderer(const uint32_t stream_id,
+                            const unsigned int z_order,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom) override;
+  int32_t SetTransparentBackground(const bool enable) override;
+  int32_t SetText(const uint8_t text_id,
+                  const uint8_t* text,
+                  const int32_t text_length,
+                  const uint32_t text_color_ref,
+                  const uint32_t background_color_ref,
+                  const float left,
+                  const float top,
+                  const float right,
+                  const float bottom) override;
+  int32_t SetBitmap(const void* bit_map,
+                    const uint8_t picture_id,
+                    const void* color_key,
+                    const float left,
+                    const float top,
+                    const float right,
+                    const float bottom);
+  int32_t FullScreenRender(void* window, const bool enable);
+
+ private:
+  int32_t id_;
+  void* ptr_window_;
+  bool full_screen_;
+
+  CriticalSectionWrapper* crit_sec_;
+  std::unique_ptr<VideoRenderIosGles20> ptr_ios_render_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_IOS_VIDEO_RENDER_IOS_IMPL_H_
diff --git a/webrtc/modules/video_render/ios/video_render_ios_impl.mm b/webrtc/modules/video_render/ios/video_render_ios_impl.mm
new file mode 100644
index 0000000..0ef411d
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_impl.mm
@@ -0,0 +1,170 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
+#include "webrtc/modules/video_render/ios/video_render_ios_gles20.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+#define IOS_UNSUPPORTED()                                  \
+  WEBRTC_TRACE(kTraceError,                                \
+               kTraceVideoRenderer,                        \
+               id_,                                        \
+               "%s is not supported on the iOS platform.", \
+               __FUNCTION__);                              \
+  return -1;
+
+VideoRenderIosImpl::VideoRenderIosImpl(const int32_t id,
+                                       void* window,
+                                       const bool full_screen)
+    : id_(id),
+      ptr_window_(window),
+      full_screen_(full_screen),
+      crit_sec_(CriticalSectionWrapper::CreateCriticalSection()) {}
+
+VideoRenderIosImpl::~VideoRenderIosImpl() {
+  delete crit_sec_;
+}
+
+int32_t VideoRenderIosImpl::Init() {
+  CriticalSectionScoped cs(crit_sec_);
+
+  ptr_ios_render_.reset(new VideoRenderIosGles20(
+      (__bridge VideoRenderIosView*)ptr_window_, full_screen_, id_));
+
+  return ptr_ios_render_->Init();
+  ;
+}
+
+int32_t VideoRenderIosImpl::ChangeWindow(void* window) {
+  CriticalSectionScoped cs(crit_sec_);
+  if (window == NULL) {
+    return -1;
+  }
+
+  ptr_window_ = window;
+
+  return ptr_ios_render_->ChangeWindow(ptr_window_);
+}
+
+VideoRenderCallback* VideoRenderIosImpl::AddIncomingRenderStream(
+    const uint32_t stream_id,
+    const uint32_t z_order,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom) {
+  CriticalSectionScoped cs(crit_sec_);
+  if (!ptr_window_) {
+    return NULL;
+  }
+
+  return ptr_ios_render_->CreateEaglChannel(
+      stream_id, z_order, left, top, right, bottom);
+}
+
+int32_t VideoRenderIosImpl::DeleteIncomingRenderStream(
+    const uint32_t stream_id) {
+  CriticalSectionScoped cs(crit_sec_);
+
+  return ptr_ios_render_->DeleteEaglChannel(stream_id);
+}
+
+int32_t VideoRenderIosImpl::GetIncomingRenderStreamProperties(
+    const uint32_t stream_id,
+    uint32_t& z_order,
+    float& left,
+    float& top,
+    float& right,
+    float& bottom) const {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::StartRender() {
+  return ptr_ios_render_->StartRender();
+}
+
+int32_t VideoRenderIosImpl::StopRender() {
+  return ptr_ios_render_->StopRender();
+}
+
+VideoRenderType VideoRenderIosImpl::RenderType() { return kRenderiOS; }
+
+RawVideoType VideoRenderIosImpl::PerferedVideoType() { return kVideoI420; }
+
+bool VideoRenderIosImpl::FullScreen() { IOS_UNSUPPORTED(); }
+
+int32_t VideoRenderIosImpl::GetGraphicsMemory(
+    uint64_t& totalGraphicsMemory,
+    uint64_t& availableGraphicsMemory) const {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::GetScreenResolution(uint32_t& screenWidth,
+                                                uint32_t& screenHeight) const {
+  return ptr_ios_render_->GetScreenResolution(screenWidth, screenHeight);
+}
+
+uint32_t VideoRenderIosImpl::RenderFrameRate(const uint32_t streamId) {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::SetStreamCropping(const uint32_t streamId,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom) {
+  return ptr_ios_render_->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+int32_t VideoRenderIosImpl::ConfigureRenderer(const uint32_t streamId,
+                                              const unsigned int zOrder,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom) {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::SetTransparentBackground(const bool enable) {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::SetText(const uint8_t textId,
+                                    const uint8_t* text,
+                                    const int32_t textLength,
+                                    const uint32_t textColorRef,
+                                    const uint32_t backgroundColorRef,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom) {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::SetBitmap(const void* bitMap,
+                                      const uint8_t pictureId,
+                                      const void* colorKey,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) {
+  IOS_UNSUPPORTED();
+}
+
+int32_t VideoRenderIosImpl::FullScreenRender(void* window, const bool enable) {
+  IOS_UNSUPPORTED();
+}
diff --git a/webrtc/modules/video_render/ios/video_render_ios_view.h b/webrtc/modules/video_render/ios/video_render_ios_view.h
new file mode 100644
index 0000000..d110bc7
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_view.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
+
+#import <UIKit/UIKit.h>
+#import <QuartzCore/QuartzCore.h>
+
+#include "webrtc/modules/video_render/ios/open_gles20.h"
+
+@interface VideoRenderIosView : UIView
+
+- (BOOL)createContext;
+- (BOOL)presentFramebuffer;
+- (BOOL)renderFrame:(webrtc::VideoFrame*)frameToRender;
+- (BOOL)setCoordinatesForZOrder:(const float)zOrder
+                           Left:(const float)left
+                            Top:(const float)top
+                          Right:(const float)right
+                         Bottom:(const float)bottom;
+
+@property(nonatomic, retain) EAGLContext* context;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_IOS_RENDER_VIEW_H_
diff --git a/webrtc/modules/video_render/ios/video_render_ios_view.mm b/webrtc/modules/video_render/ios/video_render_ios_view.mm
new file mode 100644
index 0000000..b106ffa
--- /dev/null
+++ b/webrtc/modules/video_render/ios/video_render_ios_view.mm
@@ -0,0 +1,163 @@
+/*
+ *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if !defined(__has_feature) || !__has_feature(objc_arc)
+#error "This file requires ARC support."
+#endif
+
+#include <memory>
+
+#include "webrtc/modules/video_render/ios/video_render_ios_view.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+@implementation VideoRenderIosView {
+  EAGLContext* _context;
+  std::unique_ptr<webrtc::OpenGles20> _gles_renderer20;
+  int _frameBufferWidth;
+  int _frameBufferHeight;
+  unsigned int _defaultFrameBuffer;
+  unsigned int _colorRenderBuffer;
+}
+
+@synthesize context = context_;
+
++ (Class)layerClass {
+  return [CAEAGLLayer class];
+}
+
+- (id)initWithCoder:(NSCoder*)coder {
+  // init super class
+  self = [super initWithCoder:coder];
+  if (self) {
+    _gles_renderer20.reset(new OpenGles20());
+  }
+  return self;
+}
+
+- (id)init {
+  // init super class
+  self = [super init];
+  if (self) {
+    _gles_renderer20.reset(new OpenGles20());
+  }
+  return self;
+}
+
+- (id)initWithFrame:(CGRect)frame {
+  // init super class
+  self = [super initWithFrame:frame];
+  if (self) {
+    _gles_renderer20.reset(new OpenGles20());
+  }
+  return self;
+}
+
+- (void)dealloc {
+  if (_defaultFrameBuffer) {
+    glDeleteFramebuffers(1, &_defaultFrameBuffer);
+    _defaultFrameBuffer = 0;
+  }
+
+  if (_colorRenderBuffer) {
+    glDeleteRenderbuffers(1, &_colorRenderBuffer);
+    _colorRenderBuffer = 0;
+  }
+
+  [EAGLContext setCurrentContext:nil];
+}
+
+- (NSString*)description {
+  return [NSString stringWithFormat:
+          @"A WebRTC implemented subclass of UIView."
+          "+Class method is overwritten, along with custom methods"];
+}
+
+- (BOOL)createContext {
+  // create OpenGLES context from self layer class
+  CAEAGLLayer* eagl_layer = (CAEAGLLayer*)self.layer;
+  eagl_layer.opaque = YES;
+  eagl_layer.drawableProperties =
+      [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO],
+          kEAGLDrawablePropertyRetainedBacking,
+          kEAGLColorFormatRGBA8,
+          kEAGLDrawablePropertyColorFormat,
+          nil];
+  _context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+
+  if (!_context) {
+    return NO;
+  }
+
+  if (![EAGLContext setCurrentContext:_context]) {
+    return NO;
+  }
+
+  // generates and binds the OpenGLES buffers
+  glGenFramebuffers(1, &_defaultFrameBuffer);
+  glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
+
+  // Create color render buffer and allocate backing store.
+  glGenRenderbuffers(1, &_colorRenderBuffer);
+  glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
+  [_context renderbufferStorage:GL_RENDERBUFFER
+                   fromDrawable:(CAEAGLLayer*)self.layer];
+  glGetRenderbufferParameteriv(
+      GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_frameBufferWidth);
+  glGetRenderbufferParameteriv(
+      GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_frameBufferHeight);
+  glFramebufferRenderbuffer(GL_FRAMEBUFFER,
+                            GL_COLOR_ATTACHMENT0,
+                            GL_RENDERBUFFER,
+                            _colorRenderBuffer);
+
+  if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
+    return NO;
+  }
+
+  // set the frame buffer
+  glBindFramebuffer(GL_FRAMEBUFFER, _defaultFrameBuffer);
+  glViewport(0, 0, self.frame.size.width, self.frame.size.height);
+
+  return _gles_renderer20->Setup([self bounds].size.width,
+                                 [self bounds].size.height);
+}
+
+- (BOOL)presentFramebuffer {
+  if (![_context presentRenderbuffer:GL_RENDERBUFFER]) {
+    WEBRTC_TRACE(kTraceWarning,
+                 kTraceVideoRenderer,
+                 0,
+                 "%s:%d [context present_renderbuffer] "
+                 "returned false",
+                 __FUNCTION__,
+                 __LINE__);
+  }
+  return YES;
+}
+
+- (BOOL)renderFrame:(VideoFrame*)frameToRender {
+  if (![EAGLContext setCurrentContext:_context]) {
+    return NO;
+  }
+
+  return _gles_renderer20->Render(*frameToRender);
+}
+
+- (BOOL)setCoordinatesForZOrder:(const float)zOrder
+                           Left:(const float)left
+                            Top:(const float)top
+                          Right:(const float)right
+                         Bottom:(const float)bottom {
+  return _gles_renderer20->SetCoordinates(zOrder, left, top, right, bottom);
+}
+
+@end
diff --git a/webrtc/modules/video_render/linux/video_render_linux_impl.cc b/webrtc/modules/video_render/linux/video_render_linux_impl.cc
new file mode 100644
index 0000000..7e53dfd
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_render_linux_impl.cc
@@ -0,0 +1,261 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
+
+#include "webrtc/modules/video_render/linux/video_x11_render.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+#include <X11/Xlib.h>
+
+namespace webrtc {
+
+VideoRenderLinuxImpl::VideoRenderLinuxImpl(
+                                           const int32_t id,
+                                           const VideoRenderType videoRenderType,
+                                           void* window, const bool fullscreen) :
+            _id(id),
+            _renderLinuxCritsect(
+                                 *CriticalSectionWrapper::CreateCriticalSection()),
+            _ptrWindow(window), _ptrX11Render(NULL)
+{
+}
+
+VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
+{
+    if (_ptrX11Render)
+        delete _ptrX11Render;
+
+    delete &_renderLinuxCritsect;
+}
+
+int32_t VideoRenderLinuxImpl::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+    _ptrX11Render = new VideoX11Render((Window) _ptrWindow);
+    if (!_ptrX11Render)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s",
+                     "Failed to create instance of VideoX11Render object");
+        return -1;
+    }
+    int retVal = _ptrX11Render->Init();
+    if (retVal == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+
+}
+
+int32_t VideoRenderLinuxImpl::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+    _ptrWindow = window;
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->ChangeWindow((Window) window);
+    }
+
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
+                                                                       const uint32_t streamId,
+                                                                       const uint32_t zOrder,
+                                                                       const float left,
+                                                                       const float top,
+                                                                       const float right,
+                                                                       const float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    VideoRenderCallback* renderCallback = NULL;
+    if (_ptrX11Render)
+    {
+        VideoX11Channel* renderChannel =
+                _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
+                                                      top, right, bottom);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "Render channel creation failed for stream id: %d",
+                         streamId);
+            return NULL;
+        }
+        renderCallback = (VideoRenderCallback *) renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "_ptrX11Render is NULL");
+        return NULL;
+    }
+    return renderCallback;
+}
+
+int32_t VideoRenderLinuxImpl::DeleteIncomingRenderStream(
+                                                               const uint32_t streamId)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->DeleteX11RenderChannel(streamId);
+    }
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
+                                                                      const uint32_t streamId,
+                                                                      uint32_t& zOrder,
+                                                                      float& left,
+                                                                      float& top,
+                                                                      float& right,
+                                                                      float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
+                                                          left, top, right,
+                                                          bottom);
+    }
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::StartRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+int32_t VideoRenderLinuxImpl::StopRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+VideoRenderType VideoRenderLinuxImpl::RenderType()
+{
+    return kRenderX11;
+}
+
+RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderLinuxImpl::FullScreen()
+{
+    return false;
+}
+
+int32_t VideoRenderLinuxImpl::GetGraphicsMemory(
+                                                      uint64_t& /*totalGraphicsMemory*/,
+                                                      uint64_t& /*availableGraphicsMemory*/) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::GetScreenResolution(
+                                                        uint32_t& /*screenWidth*/,
+                                                        uint32_t& /*screenHeight*/) const
+{
+    return -1;
+}
+
+uint32_t VideoRenderLinuxImpl::RenderFrameRate(const uint32_t /*streamId*/)
+{
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::SetStreamCropping(
+                                                      const uint32_t /*streamId*/,
+                                                      const float /*left*/,
+                                                      const float /*top*/,
+                                                      const float /*right*/,
+                                                      const float /*bottom*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::ConfigureRenderer(
+                                                      const uint32_t streamId,
+                                                      const unsigned int zOrder,
+                                                      const float left,
+                                                      const float top,
+                                                      const float right,
+                                                      const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::SetText(
+                                            const uint8_t textId,
+                                            const uint8_t* text,
+                                            const int32_t textLength,
+                                            const uint32_t textColorRef,
+                                            const uint32_t backgroundColorRef,
+                                            const float left, const float top,
+                                            const float rigth,
+                                            const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+int32_t VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
+                                        const uint8_t pictureId,
+                                        const void* colorKey,
+                                        const float left,
+                                        const float top,
+                                        const float right,
+                                        const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/linux/video_render_linux_impl.h b/webrtc/modules/video_render/linux/video_render_linux_impl.h
new file mode 100644
index 0000000..0e9ae54
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_render_linux_impl.h
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+
+#include "webrtc/modules/video_render/i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Render;
+
+// Class definitions
+class VideoRenderLinuxImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderLinuxImpl(const int32_t id,
+                         const VideoRenderType videoRenderType,
+                         void* window, const bool fullscreen);
+
+    virtual ~VideoRenderLinuxImpl();
+
+    virtual int32_t Init();
+
+    virtual int32_t ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId);
+
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual int32_t
+            GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+                              uint64_t& availableGraphicsMemory) const;
+
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left, const float top,
+                            const float rigth, const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom);
+
+private:
+    int32_t _id;
+    CriticalSectionWrapper& _renderLinuxCritsect;
+
+    void* _ptrWindow;
+
+    // X11 Render
+    VideoX11Render* _ptrX11Render;
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
diff --git a/webrtc/modules/video_render/linux/video_x11_channel.cc b/webrtc/modules/video_render/linux/video_x11_channel.cc
new file mode 100644
index 0000000..8d86b7c
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_x11_channel.cc
@@ -0,0 +1,315 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/linux/video_x11_channel.h"
+
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+#define DISP_MAX 128
+
+static Display *dispArray[DISP_MAX];
+static int dispCount = 0;
+
+
+VideoX11Channel::VideoX11Channel(int32_t id) :
+    _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
+          _shminfo(), _image(NULL), _window(0L), _gc(NULL),
+          _width(DEFAULT_RENDER_FRAME_WIDTH),
+          _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
+          _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
+          _top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
+          _Id(id)
+{
+}
+
+VideoX11Channel::~VideoX11Channel()
+{
+    if (_prepared)
+    {
+        _crit.Enter();
+        ReleaseWindow();
+        _crit.Leave();
+    }
+    delete &_crit;
+}
+
+int32_t VideoX11Channel::RenderFrame(const uint32_t streamId,
+                                     const VideoFrame& videoFrame) {
+  CriticalSectionScoped cs(&_crit);
+  if (_width != videoFrame.width() || _height
+      != videoFrame.height()) {
+      if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
+        return -1;
+    }
+  }
+  return DeliverFrame(videoFrame);
+}
+
+int32_t VideoX11Channel::FrameSizeChange(int32_t width,
+                                         int32_t height,
+                                         int32_t /*numberOfStreams */)
+{
+    CriticalSectionScoped cs(&_crit);
+    if (_prepared)
+    {
+        RemoveRenderer();
+    }
+    if (CreateLocalRenderer(width, height) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t VideoX11Channel::DeliverFrame(const VideoFrame& videoFrame) {
+  CriticalSectionScoped cs(&_crit);
+  if (!_prepared) {
+    return 0;
+  }
+
+  if (!dispArray[_dispCount]) {
+    return -1;
+  }
+
+  ConvertFromI420(videoFrame, kARGB, 0, _buffer);
+
+  // Put image in window.
+  XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
+               _height, True);
+
+  // Very important for the image to update properly!
+  XSync(_display, False);
+  return 0;
+}
+
+int32_t VideoX11Channel::GetFrameSize(int32_t& width, int32_t& height)
+{
+    width = _width;
+    height = _height;
+
+    return 0;
+}
+
+int32_t VideoX11Channel::Init(Window window, float left, float top,
+                              float right, float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    _window = window;
+    _left = left;
+    _right = right;
+    _top = top;
+    _bottom = bottom;
+
+    _display = XOpenDisplay(NULL); // Use default display
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (dispCount < DISP_MAX)
+    {
+        dispArray[dispCount] = _display;
+        _dispCount = dispCount;
+        dispCount++;
+    }
+    else
+    {
+        return -1;
+    }
+
+    if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
+            < 0) || (1 < bottom || bottom < 0))
+    {
+        return -1;
+    }
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == 0)
+    {
+        return -1;
+    }
+
+    _xPos = (int32_t) (winWidth * left);
+    _yPos = (int32_t) (winHeight * top);
+    _outWidth = (int32_t) (winWidth * (right - left));
+    _outHeight = (int32_t) (winHeight * (bottom - top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    _gc = XCreateGC(_display, _window, 0, 0);
+    if (!_gc) {
+      // Failed to create the graphics context.
+      assert(false);
+      return -1;
+    }
+
+    if (CreateLocalRenderer(winWidth, winHeight) == -1)
+    {
+        return -1;
+    }
+    return 0;
+
+}
+
+int32_t VideoX11Channel::ChangeWindow(Window window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    // Stop the rendering, if we are rendering...
+    RemoveRenderer();
+    _window = window;
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == -1)
+    {
+        return -1;
+    }
+    _xPos = (int) (winWidth * _left);
+    _yPos = (int) (winHeight * _top);
+    _outWidth = (int) (winWidth * (_right - _left));
+    _outHeight = (int) (winHeight * (_bottom - _top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    // Prepare rendering using the
+    if (CreateLocalRenderer(_width, _height) == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+int32_t VideoX11Channel::ReleaseWindow()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    RemoveRenderer();
+    if (_gc) {
+      XFreeGC(_display, _gc);
+      _gc = NULL;
+    }
+    if (_display)
+    {
+        XCloseDisplay(_display);
+        _display = NULL;
+    }
+    return 0;
+}
+
+int32_t VideoX11Channel::CreateLocalRenderer(int32_t width, int32_t height)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (_prepared)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
+                     "Renderer already prepared, exits.");
+        return -1;
+    }
+
+    _width = width;
+    _height = height;
+
+    // create shared memory image
+    _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
+                             &_shminfo, _width, _height); // this parameter needs to be the same for some reason.
+    _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
+            * _image->height), IPC_CREAT | 0777);
+    _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
+    if (_image->data == reinterpret_cast<char*>(-1))
+    {
+        return -1;
+    }
+    _buffer = (unsigned char*) _image->data;
+    _shminfo.readOnly = False;
+
+    // attach image to display
+    if (!XShmAttach(_display, &_shminfo))
+    {
+        //printf("XShmAttach failed !\n");
+        return -1;
+    }
+    XSync(_display, False);
+
+    _prepared = true;
+    return 0;
+}
+
+int32_t VideoX11Channel::RemoveRenderer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    if (!_prepared)
+    {
+        return 0;
+    }
+    _prepared = false;
+
+    // Free the memory.
+    XShmDetach(_display, &_shminfo);
+    XDestroyImage( _image );
+    _image = NULL;
+    shmdt(_shminfo.shmaddr);
+    _shminfo.shmaddr = NULL;
+    _buffer = NULL;
+    shmctl(_shminfo.shmid, IPC_RMID, 0);
+    _shminfo.shmid = 0;
+    return 0;
+}
+
+int32_t VideoX11Channel::GetStreamProperties(uint32_t& zOrder,
+                                             float& left, float& top,
+                                             float& right, float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    zOrder = 0; // no z-order support yet
+    left = _left;
+    top = _top;
+    right = _right;
+    bottom = _bottom;
+
+    return 0;
+}
+
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/linux/video_x11_channel.h b/webrtc/modules/video_render/linux/video_x11_channel.h
new file mode 100644
index 0000000..6eb402e
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_x11_channel.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+
+#include <sys/shm.h>
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/extensions/XShm.h>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define DEFAULT_RENDER_FRAME_WIDTH 352
+#define DEFAULT_RENDER_FRAME_HEIGHT 288
+
+
+class VideoX11Channel: public VideoRenderCallback
+{
+public:
+    VideoX11Channel(int32_t id);
+
+    virtual ~VideoX11Channel();
+
+    virtual int32_t RenderFrame(const uint32_t streamId,
+                                const VideoFrame& videoFrame);
+
+    int32_t FrameSizeChange(int32_t width, int32_t height,
+                            int32_t numberOfStreams);
+    int32_t DeliverFrame(const VideoFrame& videoFrame);
+    int32_t GetFrameSize(int32_t& width, int32_t& height);
+    int32_t Init(Window window, float left, float top, float right,
+                 float bottom);
+    int32_t ChangeWindow(Window window);
+    int32_t
+            GetStreamProperties(uint32_t& zOrder, float& left,
+                                float& top, float& right, float& bottom) const;
+    int32_t ReleaseWindow();
+
+    bool IsPrepared()
+    {
+        return _prepared;
+    }
+
+private:
+
+    int32_t
+            CreateLocalRenderer(int32_t width, int32_t height);
+    int32_t RemoveRenderer();
+
+    //FIXME a better place for this method? the GetWidthHeight no longer
+    // supported by common_video.
+    int GetWidthHeight(VideoType type, int bufferSize, int& width,
+                       int& height);
+
+    CriticalSectionWrapper& _crit;
+
+    Display* _display;
+    XShmSegmentInfo _shminfo;
+    XImage* _image;
+    Window _window;
+    GC _gc;
+    int32_t _width; // incoming frame width
+    int32_t _height; // incoming frame height
+    int32_t _outWidth; // render frame width
+    int32_t _outHeight; // render frame height
+    int32_t _xPos; // position within window
+    int32_t _yPos;
+    bool _prepared; // true if ready to use
+    int32_t _dispCount;
+
+    unsigned char* _buffer;
+    float _top;
+    float _left;
+    float _right;
+    float _bottom;
+
+    int32_t _Id;
+
+};
+
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
diff --git a/webrtc/modules/video_render/linux/video_x11_render.cc b/webrtc/modules/video_render/linux/video_x11_render.cc
new file mode 100644
index 0000000..5eb4f36
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_x11_render.cc
@@ -0,0 +1,153 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/linux/video_x11_channel.h"
+#include "webrtc/modules/video_render/linux/video_x11_render.h"
+
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+VideoX11Render::VideoX11Render(Window window) :
+    _window(window),
+            _critSect(*CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+VideoX11Render::~VideoX11Render()
+{
+    delete &_critSect;
+}
+
+int32_t VideoX11Render::Init()
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    _streamIdToX11ChannelMap.clear();
+
+    return 0;
+}
+
+int32_t VideoX11Render::ChangeWindow(Window window)
+{
+    CriticalSectionScoped cs(&_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.begin();
+
+    while (iter != _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ChangeWindow(window);
+        }
+        iter++;
+    }
+
+    _window = window;
+
+    return 0;
+}
+
+VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
+                                                                int32_t streamId,
+                                                                int32_t zOrder,
+                                                                const float left,
+                                                                const float top,
+                                                                const float right,
+                                                                const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+
+    if (iter == _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = new VideoX11Channel(streamId);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "Failed to create VideoX11Channel for streamId : %d",
+                         streamId);
+            return NULL;
+        }
+        renderChannel->Init(_window, left, top, right, bottom);
+        _streamIdToX11ChannelMap[streamId] = renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                     "Render Channel already exists for streamId: %d", streamId);
+        renderChannel = iter->second;
+    }
+
+    return renderChannel;
+}
+
+int32_t VideoX11Render::DeleteX11RenderChannel(int32_t streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ReleaseWindow();
+            delete renderChannel;
+            renderChannel = NULL;
+        }
+        _streamIdToX11ChannelMap.erase(iter);
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+int32_t VideoX11Render::GetIncomingStreamProperties(
+                                                              int32_t streamId,
+                                                              uint32_t& zOrder,
+                                                              float& left,
+                                                              float& top,
+                                                              float& right,
+                                                              float& bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
+        }
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/linux/video_x11_render.h b/webrtc/modules/video_render/linux/video_x11_render.h
new file mode 100644
index 0000000..23b83bd
--- /dev/null
+++ b/webrtc/modules/video_render/linux/video_x11_render.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#include <X11/Xlib.h>
+#include <map>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Channel;
+
+class VideoX11Render
+{
+
+public:
+    VideoX11Render(Window window);
+    ~VideoX11Render();
+
+    int32_t Init();
+    int32_t ChangeWindow(Window window);
+
+    VideoX11Channel* CreateX11RenderChannel(int32_t streamId,
+                                            int32_t zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom);
+
+    int32_t DeleteX11RenderChannel(int32_t streamId);
+
+    int32_t GetIncomingStreamProperties(int32_t streamId,
+                                        uint32_t& zOrder,
+                                        float& left, float& top,
+                                        float& right, float& bottom);
+
+private:
+    Window _window;
+    CriticalSectionWrapper& _critSect;
+    std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
+
+};
+
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
diff --git a/webrtc/modules/video_render/mac/cocoa_full_screen_window.h b/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
new file mode 100644
index 0000000..c8e98bb
--- /dev/null
+++ b/webrtc/modules/video_render/mac/cocoa_full_screen_window.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_full_screen_window.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+
+#import <Cocoa/Cocoa.h>
+//#define GRAB_ALL_SCREENS 1
+
+@interface CocoaFullScreenWindow : NSObject {
+	NSWindow*			_window;
+}
+
+-(id)init;
+-(void)grabFullScreen;
+-(void)releaseFullScreen;
+-(NSWindow*)window;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
diff --git a/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm b/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
new file mode 100644
index 0000000..b57223b
--- /dev/null
+++ b/webrtc/modules/video_render/mac/cocoa_full_screen_window.mm
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaFullScreenWindow
+
+-(id)init{	
+	
+	self = [super init];
+	if(!self){
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__); 
+		return nil;
+	}
+	
+	
+	WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__); 
+	return self;
+}
+
+-(void)grabFullScreen{
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGCaptureAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__); 
+	}
+	
+	// get the shielding window level
+	int windowLevel = CGShieldingWindowLevel();
+	
+	// get the screen rect of main display
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+	
+	_window = [[NSWindow alloc]initWithContentRect:screenRect 
+										   styleMask:NSBorderlessWindowMask
+											 backing:NSBackingStoreBuffered
+											   defer:NO
+											  screen:[NSScreen mainScreen]];
+	
+	[_window setLevel:windowLevel];
+	[_window setBackgroundColor:[NSColor blackColor]];
+	[_window makeKeyAndOrderFront:nil];
+
+}
+ 
+-(void)releaseFullScreen
+{
+	[_window orderOut:self];
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGReleaseAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__); 
+	}		
+}
+
+- (NSWindow*)window
+{
+  return _window;
+}
+
+- (void) dealloc
+{
+	[self releaseFullScreen];
+	[super dealloc];
+}	
+
+
+	
+@end
diff --git a/webrtc/modules/video_render/mac/cocoa_render_view.h b/webrtc/modules/video_render/mac/cocoa_render_view.h
new file mode 100644
index 0000000..15a8108
--- /dev/null
+++ b/webrtc/modules/video_render/mac/cocoa_render_view.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_render_view.h
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/gl.h>
+#import <OpenGL/glu.h>
+#import <OpenGL/OpenGL.h>
+
+@interface CocoaRenderView : NSOpenGLView {
+  NSOpenGLContext* _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
+-(NSOpenGLContext*)nsOpenGLContext;
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
diff --git a/webrtc/modules/video_render/mac/cocoa_render_view.mm b/webrtc/modules/video_render/mac/cocoa_render_view.mm
new file mode 100644
index 0000000..4631ff3
--- /dev/null
+++ b/webrtc/modules/video_render/mac/cocoa_render_view.mm
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+
+#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaRenderView
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
+	
+    self = [super initWithFrame:[self frame] pixelFormat:fmt];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+-(NSOpenGLContext*)nsOpenGLContext {
+    return _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
+	
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+//	[_windowRef setFrame:screenRect];
+//	[_windowRef setBounds:screenRect];
+    self = [super initWithFrame:screenRect pixelFormat:fmt];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+@end
+
+
diff --git a/webrtc/modules/video_render/mac/video_render_agl.cc b/webrtc/modules/video_render/mac/video_render_agl.cc
new file mode 100644
index 0000000..3243563
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_agl.cc
@@ -0,0 +1,1987 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#include "webrtc/modules/video_render/mac/video_render_agl.h"
+
+//  includes
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+/*
+ *
+ *    VideoChannelAGL
+ *
+ */
+
+#pragma mark VideoChannelAGL constructor
+
+VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
+    _aglContext( aglContext),
+    _id( iId),
+    _owner( owner),
+    _width( 0),
+    _height( 0),
+    _stretchedWidth( 0),
+    _stretchedHeight( 0),
+    _startWidth( 0.0f),
+    _startHeight( 0.0f),
+    _stopWidth( 0.0f),
+    _stopHeight( 0.0f),
+    _xOldWidth( 0),
+    _yOldHeight( 0),
+    _oldStretchedHeight(0),
+    _oldStretchedWidth( 0),
+    _buffer( 0),
+    _bufferSize( 0),
+    _incomingBufferSize(0),
+    _bufferIsUpdated( false),
+    _sizeInitialized( false),
+    _numberOfStreams( 0),
+    _bVideoSizeStartedChanging(false),
+    _pixelFormat( GL_RGBA),
+    _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+    _texture( 0)
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
+}
+
+VideoChannelAGL::~VideoChannelAGL()
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    aglSetCurrentContext(_aglContext);
+
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+int32_t VideoChannelAGL::RenderFrame(const uint32_t streamId,
+                                     VideoFrame& videoFrame) {
+  _owner->LockAGLCntx();
+  if (_width != videoFrame.width() ||
+      _height != videoFrame.height()) {
+    if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSize
+                   Change returned an error", __FUNCTION__, __LINE__);
+      _owner->UnlockAGLCntx();
+      return -1;
+    }
+  }
+
+  _owner->UnlockAGLCntx();
+  return DeliverFrame(videoFrame);
+}
+
+int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
+{
+    _owner->LockAGLCntx();
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We'll get a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Delete a possible old texture
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+    }
+
+    // Do the setup for both textures
+    // Note: we setup two textures even if we're not running full screen
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    // Maximum width/height for a texture
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        // Image too big for memory
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_BGRA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+// Called from video engine when a new frame should be rendered.
+int VideoChannelAGL::DeliverFrame(const VideoFrame& videoFrame) {
+  _owner->LockAGLCntx();
+
+  if (_texture == 0) {
+    _owner->UnlockAGLCntx();
+    return 0;
+  }
+
+  if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
+      _incomingBufferSize) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  // Setting stride = width.
+  int rgbret = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
+  if (rgbret < 0) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  aglSetCurrentContext(_aglContext);
+
+  // Put the new frame into the graphic card texture.
+  // Make sure this texture is the active one
+  glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+  GLenum glErr = glGetError();
+  if (glErr != GL_NO_ERROR) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  // Copy buffer to texture
+  glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+                  0, // Level, not use
+                  0, // start point x, (low left of pic)
+                  0, // start point y,
+                  _width, // width
+                  _height, // height
+                  _pixelFormat, // pictue format for _buffer
+                  _pixelDataType, // data type of _buffer
+                  (const GLvoid*) _buffer); // the pixel data
+
+  if (glGetError() != GL_NO_ERROR) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  _bufferIsUpdated = true;
+  _owner->UnlockAGLCntx();
+
+  return 0;
+}
+
+int VideoChannelAGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    aglSetCurrentContext(_aglContext);
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
+    {
+        glViewport(0, 0, _stretchedWidth, _stretchedHeight);
+    }
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    // Now really put the texture into the framebuffer
+    glLoadIdentity();
+
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+    isUpdated = _bufferIsUpdated;
+    _owner->UnlockAGLCntx();
+
+    return 0;
+}
+
+int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+
+    return retVal;
+}
+
+int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+#pragma mark VideoRenderAGL WindowRef constructor
+
+VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( 0),
+_windowRef( windowRef),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
+
+    _screenUpdateThread.reset(
+        new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if(!IsValidWindowPtr(_windowRef))
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
+    }
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+}
+
+// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark WindowRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
+        EventRef theEvent,
+        void* userData)
+{
+    WindowRef windowRef = NULL;
+
+    int eventType = GetEventKind(theEvent);
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeWindowRef,
+            NULL,
+            sizeof (WindowRef),
+            NULL,
+            &windowRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+
+    bool updateUI = true;
+    if(kEventWindowBoundsChanged == eventType)
+    {
+    }
+    else if(kEventWindowBoundsChanging == eventType)
+    {
+    }
+    else if(kEventWindowZoomed == eventType)
+    {
+    }
+    else if(kEventWindowExpanding == eventType)
+    {
+    }
+    else if(kEventWindowExpanded == eventType)
+    {
+    }
+    else if(kEventWindowClickResizeRgn == eventType)
+    {
+    }
+    else if(kEventWindowClickDragRgn == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(windowRef);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return noErr;
+}
+
+#pragma mark VideoRenderAGL HIViewRef constructor
+
+VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( windowRef),
+_windowRef( 0),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+{
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
+    //    _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
+
+    _screenUpdateThread.reset(new rtc::PlatformThread(
+        ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
+    _screenUpdateEvent = EventWrapper::Create();
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
+    // The event handler looks for window resize events and adjusts the offset of the controls.
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
+
+
+    static const EventTypeSpec windowEventTypes[] =
+    {
+        kEventClassWindow, kEventWindowBoundsChanged,
+        kEventClassWindow, kEventWindowBoundsChanging,
+        kEventClassWindow, kEventWindowZoomed,
+        kEventClassWindow, kEventWindowExpanded,
+        kEventClassWindow, kEventWindowClickResizeRgn,
+        kEventClassWindow, kEventWindowClickDragRgn
+    };
+
+    WindowRef parentWindow = HIViewGetWindow(windowRef);
+
+    InstallWindowEventHandler (parentWindow,
+            NewEventHandlerUPP (sHandleWindowResized),
+            GetEventTypeCount(windowEventTypes),
+            windowEventTypes,
+            (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
+            &_windowEventHandlerRef);
+
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
+
+    static const EventTypeSpec hiviewEventTypes[] =
+    {
+        kEventClassControl, kEventControlBoundsChanged,
+        kEventClassControl, kEventControlDraw
+        //			kEventControlDragLeave
+        //			kEventControlDragReceive
+        //			kEventControlGetFocusPart
+        //			kEventControlApplyBackground
+        //			kEventControlDraw
+        //			kEventControlHit
+
+    };
+
+    HIViewInstallEventHandler(_hiviewRef,
+            NewEventHandlerUPP(sHandleHiViewResized),
+            GetEventTypeCount(hiviewEventTypes),
+            hiviewEventTypes,
+            (void *) this,
+            &_hiviewEventHandlerRef);
+
+#endif
+}
+
+// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark HIViewRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
+{
+    //static int      callbackCounter = 1;
+    HIViewRef hiviewRef = NULL;
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    int eventType = GetEventKind(theEvent);
+    OSStatus status = noErr;
+    status = GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeControlRef,
+            NULL,
+            sizeof (ControlRef),
+            NULL,
+            &hiviewRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+    WindowRef parentWindow = HIViewGetWindow(hiviewRef);
+    bool updateUI = true;
+
+    if(kEventControlBoundsChanged == eventType)
+    {
+    }
+    else if(kEventControlDraw == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(parentWindow);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return status;
+}
+
+VideoRenderAGL::~VideoRenderAGL()
+{
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
+
+
+#ifdef USE_EVENT_HANDLERS
+    // remove event handlers
+    OSStatus status;
+    if(_isHIViewRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+    }
+    else
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+    }
+    if(noErr != status)
+    {
+        if(_isHIViewRef)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+        else
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+
+#endif
+
+    OSStatus status;
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if(_windowEventHandlerRef)
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER
+    if(_hiviewEventHandlerRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+    }
+#endif
+
+    // Signal event to exit thread, then delete it
+    rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
+
+    if (tmpPtr)
+    {
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        tmpPtr->Stop();
+        delete tmpPtr;
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_aglContext != 0)
+    {
+        aglSetCurrentContext(_aglContext);
+        aglDestroyContext(_aglContext);
+        _aglContext = 0;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+    while (it!= _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+    //delete _renderCritSec;
+
+
+}
+
+int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
+{
+    aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
+    return 0;
+}
+
+int VideoRenderAGL::Init()
+{
+    LockAGLCntx();
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    _screenUpdateThread->Start();
+    _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    // Create mixing textures
+    if (CreateMixingContext() == -1)
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
+
+    if (HasChannel(channel))
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();k
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+        // There are already one channel using this zOrder
+        // TODO: Allow multiple channels with same zOrder
+    }
+
+    VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
+
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+        //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return NULL;
+    }
+k
+    _aglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    UnlockAGLCntx();
+    return newAGLChannel;
+}
+
+int VideoRenderAGL::DeleteAllAGLChannels()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
+    //int i = 0 ;
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        if (channel)
+        delete channel;
+
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+    return 0;
+}
+
+int VideoRenderAGL::DeleteAGLChannel(int channel)
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;// = _zOrderToChannel.begin();
+    }
+
+    return 0;
+}
+
+int VideoRenderAGL::StopThread()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
+
+    if (tmpPtr)
+    {
+        _screenUpdateEvent->Set();
+        _renderCritSec.Leave();
+        tmpPtr->Stop();
+        delete tmpPtr;
+        _renderCritSec.Enter();
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderAGL::IsFullScreen()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderAGL::HasChannels()
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    if (_aglChannels.begin() != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+bool VideoRenderAGL::HasChannel(int channel)
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+int VideoRenderAGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+
+    if (it != _aglChannels.end())
+    {
+        VideoChannelAGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            return NULL;
+        }
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderAGL::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return false;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == GL_FALSE)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        // We have a new window size, update the context.
+        if (aglUpdateContext(_aglContext) == GL_FALSE)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+
+    // this section will poll to see if the window size has changed
+    // this is causing problem w/invalid windowRef
+    // this code has been modified and exists now in the window event handler
+#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if (_isHIViewRef)
+    {
+
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        WindowRef window = HIViewGetWindow(_hiviewRef);
+
+        if(FALSE == IsValidWindowPtr(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        if (window == NULL)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        if(FALSE == MacIsWindowVisible(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        HIRect viewBounds; // Placement and size for HIView
+        int windowWidth = 0; // Parent window width
+        int windowHeight = 0; // Parent window height
+
+        // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
+        // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
+        Rect contentBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+        Rect globalBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+        globalBounds.top = contentBounds.top;
+        globalBounds.right = contentBounds.right;
+        globalBounds.bottom = contentBounds.bottom;
+        globalBounds.left = contentBounds.left;
+
+        windowHeight = globalBounds.bottom - globalBounds.top;
+        windowWidth = globalBounds.right - globalBounds.left;
+
+        // Get the size of the HIViewRef
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        // Check if this is the first call..
+        if (_lastWindowHeight == -1 &&
+                _lastWindowWidth == -1)
+        {
+            _lastWindowWidth = windowWidth;
+            _lastWindowHeight = windowHeight;
+
+            _lastViewBounds.origin.x = viewBounds.origin.x;
+            _lastViewBounds.origin.y = viewBounds.origin.y;
+            _lastViewBounds.size.width = viewBounds.size.width;
+            _lastViewBounds.size.height = viewBounds.size.height;
+        }
+        sfasdfasdf
+
+        bool resized = false;
+
+        // Check if parent window size has changed
+        if (windowHeight != _lastWindowHeight ||
+                windowWidth != _lastWindowWidth)
+        {
+            resized = true;
+        }
+
+        // Check if the HIView has new size or is moved in the parent window
+        if (_lastViewBounds.origin.x != viewBounds.origin.x ||
+                _lastViewBounds.origin.y != viewBounds.origin.y ||
+                _lastViewBounds.size.width != viewBounds.size.width ||
+                _lastViewBounds.size.height != viewBounds.size.height)
+        {
+            // The HiView is resized or has moved.
+            resized = true;
+        }
+
+        if (resized)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
+
+            // Calculate offset between the windows
+            // {x, y, widht, height}, x,y = lower left corner
+            const GLint offs[4] =
+            {   (int)(0.5f + viewBounds.origin.x),
+                (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+                viewBounds.size.width, viewBounds.size.height};
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
+            contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
+
+            aglSetDrawable (_aglContext, GetWindowPort(window));
+            aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+            aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+            // We need to change the viewport too if the HIView size has changed
+            glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+        }
+        _lastWindowWidth = windowWidth;
+        _lastWindowHeight = windowHeight;
+
+        _lastViewBounds.origin.x = viewBounds.origin.x;
+        _lastViewBounds.origin.y = viewBounds.origin.y;
+        _lastViewBounds.size.width = viewBounds.size.width;
+        _lastViewBounds.size.height = viewBounds.size.height;
+
+    }
+#endif
+    if (_fullScreen)
+    {
+        // TODO
+        // We use double buffers, must always update
+        //RenderOffScreenBuffersToBackBuffer();
+    }
+    else
+    {
+        // Check if there are any updated buffers
+        bool updated = false;
+
+        // TODO: check if window size is updated!
+        // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+        while (it != _aglChannels.end())
+        {
+
+            VideoChannelAGL* aglChannel = it->second;
+            aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+            aglChannel->IsUpdated(updated);
+            if (updated)
+            {
+                break;
+            }
+            it++;
+        }
+
+        if (updated)
+        {
+            // At least on buffers is updated, we need to repaint the texture
+            if (RenderOffScreenBuffers() != -1)
+            {
+                // MF
+                //SwapAndDisplayBuffers();
+            }
+            else
+            {
+                // Error updating the mixing texture, don't swap.
+            }
+        }
+    }
+
+    UnlockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
+    return true;
+}
+
+void VideoRenderAGL::ParentWindowResized(WindowRef window)
+{
+    //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
+
+    LockAGLCntx();
+k
+    // set flag
+    _windowHasResized = false;
+
+    if(FALSE == HIViewIsValid(_hiviewRef))
+    {
+        //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == IsValidWindowPtr(window))
+    {
+        //WEBRTC_LOG(kTraceError, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if (window == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "windowRef = NULL");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == MacIsWindowVisible(window))
+    {
+        //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
+        UnlockAGLCntx();
+        return;
+    }
+
+    Rect contentBounds =
+    {   0, 0, 0, 0};
+
+#if		defined(USE_CONTENT_RGN)
+    GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+    GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+    //WEBRTC_LOG(kTraceDebug, "%s contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+
+    // update global vars
+    _currentParentWindowBounds.top = contentBounds.top;
+    _currentParentWindowBounds.left = contentBounds.left;
+    _currentParentWindowBounds.bottom = contentBounds.bottom;
+    _currentParentWindowBounds.right = contentBounds.right;
+
+    _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
+    _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
+
+    _windowHasResized = true;
+
+    // ********* update AGL offsets
+    HIRect viewBounds;
+    HIViewGetBounds(_hiviewRef, &viewBounds);
+    HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+    const GLint offs[4] =
+    {   (int)(0.5f + viewBounds.origin.x),
+        (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+        viewBounds.size.width, viewBounds.size.height};
+    //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
+    //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+    aglSetCurrentContext(_aglContext);
+    aglSetDrawable (_aglContext, GetWindowPort(window));
+    aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+    aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+    // We need to change the viewport too if the HIView size has changed
+    glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+    UnlockAGLCntx();
+
+    return;
+}
+
+int VideoRenderAGL::CreateMixingContext()
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
+
+    // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure
+    // a hardware renderer is used and not a software renderer.
+
+    GLint attributes[] =
+    {
+        AGL_DOUBLEBUFFER,
+        AGL_WINDOW,
+        AGL_RGBA,
+        AGL_NO_RECOVERY,
+        AGL_ACCELERATED,
+        AGL_RED_SIZE, 8,
+        AGL_GREEN_SIZE, 8,
+        AGL_BLUE_SIZE, 8,
+        AGL_ALPHA_SIZE, 8,
+        AGL_DEPTH_SIZE, 24,
+        AGL_NONE,
+    };
+
+    AGLPixelFormat aglPixelFormat;
+
+    // ***** Set up the OpenGL Context *****
+
+    // Get a pixel format for the attributes above
+    aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
+    if (NULL == aglPixelFormat)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not create pixel format");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Create an AGL context
+    _aglContext = aglCreateContext(aglPixelFormat, NULL);
+    if (_aglContext == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "Could no create AGL context");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Release the pixel format memory
+    aglDestroyPixelFormat(aglPixelFormat);
+
+    // Set the current AGL context for the rest of the settings
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (_isHIViewRef)
+    {
+        //---------------------------
+        // BEGIN: new test code
+#if 0
+        // Don't use this one!
+        // There seems to be an OS X bug that can't handle
+        // movements and resizing of the parent window
+        // and or the HIView
+        if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+#else
+
+        // Get the parent window for this control
+        WindowRef window = GetControlOwner(_hiviewRef);
+
+        Rect globalBounds =
+        {   0,0,0,0}; // The bounds for the parent window
+        HIRect viewBounds; // Placemnt in the parent window and size.
+        int windowHeight = 0;
+
+        //		Rect titleBounds = {0,0,0,0};
+        //		GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
+        //		_titleBarHeight = titleBounds.top - titleBounds.bottom;
+        //		if(0 == _titleBarHeight)
+        //		{
+        //            //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
+        //            //return -1;
+        //		}
+
+
+        // Get the bounds for the parent window
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &globalBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
+#endif
+        windowHeight = globalBounds.bottom - globalBounds.top;
+
+        // Get the bounds for the HIView
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        const GLint offs[4] =
+        {   (int)(0.5f + viewBounds.origin.x),
+            (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+            viewBounds.size.width, viewBounds.size.height};
+
+        //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+
+        aglSetDrawable (_aglContext, GetWindowPort(window));
+        aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+        aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+        GLint surfaceOrder = 1; // 1: above window, -1 below.
+        //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+        aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+
+        glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+#endif
+
+    }
+    else
+    {
+        if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    _windowWidth = _windowRect.right - _windowRect.left;
+    _windowHeight = _windowRect.bottom - _windowRect.top;
+
+    // opaque surface
+    int surfaceOpacity = 1;
+    if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // 1 -> sync to screen rat, slow...
+    //int swapInterval = 0;  // 0 don't sync with vertical trace
+    int swapInterval = 0; // 1 sync with vertical trace
+    if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Update the rect with the current size
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window size");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    GLenum glErr = glGetError();
+
+    if (glErr)
+    {
+    }
+
+    UpdateClipping();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window rect");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // HERE - onl if updated!
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+    rIt != _zOrderToChannel.rend();
+    rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
+
+        VideoChannelAGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    SwapAndDisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::SwapAndDisplayBuffers()
+{
+
+    LockAGLCntx();
+    if (_fullScreen)
+    {
+        // TODO:
+        // Swap front and back buffers, rendering taking care of in the same call
+        //aglSwapBuffers(_aglContext);
+        // Update buffer index to the idx for the next rendering!
+        //_textureIdx = (_textureIdx + 1) & 1;
+    }
+    else
+    {
+        // Single buffer rendering, only update context.
+        glFlush();
+        aglSwapBuffers(_aglContext);
+        HIViewSetNeedsDisplay(_hiviewRef, true);
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::GetWindowRect(Rect& rect)
+{
+
+    LockAGLCntx();
+
+    if (_isHIViewRef)
+    {
+        if (_hiviewRef)
+        {
+            HIRect HIViewRect1;
+            if(FALSE == HIViewIsValid(_hiviewRef))
+            {
+                rect.top = 0;
+                rect.left = 0;
+                rect.right = 0;
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
+                UnlockAGLCntx();
+            }
+            HIViewGetBounds(_hiviewRef,&HIViewRect1);
+            HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
+            if(HIViewRect1.origin.x < 0)
+            {
+                rect.top = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
+            }
+            else
+            {
+                rect.top = HIViewRect1.origin.x;
+            }
+
+            if(HIViewRect1.origin.y < 0)
+            {
+                rect.left = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
+            }
+            else
+            {
+                rect.left = HIViewRect1.origin.y;
+            }
+
+            if(HIViewRect1.size.width < 0)
+            {
+                rect.right = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
+            }
+            else
+            {
+                rect.right = HIViewRect1.size.width;
+            }
+
+            if(HIViewRect1.size.height < 0)
+            {
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
+            }
+            else
+            {
+                rect.bottom = HIViewRect1.size.height;
+            }
+
+            ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
+            UnlockAGLCntx();
+        }
+    }
+    else
+    {
+        if (_windowRef)
+        {
+            GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "No WindowRef");
+            UnlockAGLCntx();
+        }
+    }
+}
+
+int VideoRenderAGL::UpdateClipping()
+{
+    //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
+    LockAGLCntx();
+
+    if(_isHIViewRef)
+    {
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+            //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        RgnHandle visibleRgn = NewRgn();
+        SetEmptyRgn (visibleRgn);
+
+        if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
+        {
+        }
+
+        if(GL_FALSE == aglSetCurrentContext(_aglContext))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        DisposeRgn(visibleRgn);
+    }
+    else
+    {
+        //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
+    UnlockAGLCntx();
+    return true;
+}
+
+int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
+{
+
+    //	LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
+    OSStatus osStatus = 0;
+    OSErr osErr = 0;
+
+    RgnHandle tempRgn = NewRgn();
+    if (IsControlVisible(control))
+    {
+        RgnHandle childRgn = NewRgn();
+        WindowRef window = GetControlOwner(control);
+        ControlRef rootControl;
+        GetRootControl(window, &rootControl); // 'wvnc'
+        ControlRef masterControl;
+        osStatus = GetSuperControl(rootControl, &masterControl);
+        // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
+
+        if (masterControl != NULL)
+        {
+            CheckValidRegion(visibleRgn);
+            // init visibleRgn with region of 'wvnc'
+            osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
+            // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+            //GetSuperControl(rootControl, &rootControl);
+            ControlRef tempControl = control, lastControl = 0;
+            while (tempControl != masterControl) // current control != master
+
+            {
+                CheckValidRegion(tempRgn);
+
+                // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
+                ControlRef subControl;
+
+                osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
+                // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
+                // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                SectRgn(tempRgn, visibleRgn, visibleRgn);
+                CheckValidRegion(tempRgn);
+                CheckValidRegion(visibleRgn);
+                if (EmptyRgn(visibleRgn)) // if the region is empty, bail
+                break;
+
+                if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
+
+                {
+                    UInt16 numChildren;
+                    osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
+                    // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
+
+                    // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
+                    for (int i = 0; i < numChildren; i++)
+                    {
+                        osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
+                        if ( subControl == lastControl ) // break because of zorder
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
+                            break;
+                        }
+
+                        if (!IsControlVisible(subControl)) // dont' clip invisible controls
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
+                            continue;
+                        }
+
+                        if(!subControl) continue;
+
+                        osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
+                        CheckValidRegion(tempRgn);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!tempRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
+                        CheckValidRegion(tempRgn);
+                        // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!rootControl)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        UnionRgn(tempRgn, childRgn, childRgn);
+                        CheckValidRegion(tempRgn);
+                        CheckValidRegion(childRgn);
+                        CheckValidRegion(visibleRgn);
+                        if(!childRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                    }  // next child control
+                }
+                lastControl = tempControl;
+                GetSuperControl(tempControl, &subControl);
+                tempControl = subControl;
+            }
+
+            DiffRgn(visibleRgn, childRgn, visibleRgn);
+            CheckValidRegion(visibleRgn);
+            CheckValidRegion(childRgn);
+            DisposeRgn(childRgn);
+        }
+        else
+        {
+            CopyRgn(tempRgn, visibleRgn);
+            CheckValidRegion(tempRgn);
+            CheckValidRegion(visibleRgn);
+        }
+        DisposeRgn(tempRgn);
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
+    //_aglCritPtr->Leave();
+    return 0;
+}
+
+bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
+{
+
+    Handle hndSize = (Handle)rHandle;
+    long size = GetHandleSize(hndSize);
+    if(0 == size)
+    {
+
+        OSErr memErr = MemError();
+        if(noErr != memErr)
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
+        }
+        else
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
+        }
+
+    }
+    else
+    {
+        // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
+    }
+
+    if(false == IsValidRgnHandle(rHandle))
+    {
+        // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
+        assert(false);
+    }
+
+    int err = QDError();
+    switch(err)
+    {
+        case 0:
+        break;
+        case -147:
+        //WEBRTC_LOG(kTraceError, "ERROR region too big");
+        assert(false);
+        break;
+
+        case -149:
+        //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
+        assert(false);
+        break;
+
+        default:
+        //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
+        assert(false);
+        break;
+    }
+
+    return true;
+}
+
+int VideoRenderAGL::ChangeWindow(void* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    UnlockAGLCntx();
+    return -1;
+}
+
+int32_t VideoRenderAGL::StartRender()
+{
+
+    LockAGLCntx();
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        if(FALSE == _screenUpdateThread->Start())
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+        _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+        if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        return 0;
+    }
+
+    _screenUpdateThread.reset(
+        new rtc::PlatformThread(ScreenUpdateThreadProc, this, "ScreenUpdate"));
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if (!_screenUpdateThread)
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start();
+    _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+    _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int32_t VideoRenderAGL::StopRender()
+{
+    LockAGLCntx();
+
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
+    {
+        _renderingIsPaused = FALSE;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
+    UnlockAGLCntx();
+    return 0;
+}
+
+int32_t VideoRenderAGL::DeleteAGLChannel(const uint32_t streamID)
+{
+
+    LockAGLCntx();
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
+        delete channel;
+        it++;
+    }
+    _aglChannels.clear();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int32_t VideoRenderAGL::GetChannelProperties(const uint16_t streamId,
+                                             uint32_t& zOrder,
+                                             float& left,
+                                             float& top,
+                                             float& right,
+                                             float& bottom)
+{
+
+    LockAGLCntx();
+    UnlockAGLCntx();
+    return -1;
+
+}
+
+void VideoRenderAGL::LockAGLCntx()
+{
+    _renderCritSec.Enter();
+}
+void VideoRenderAGL::UnlockAGLCntx()
+{
+    _renderCritSec.Leave();
+}
+
+}  // namespace webrtc
+
+#endif   // CARBON_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_agl.h b/webrtc/modules/video_render/mac/video_render_agl.h
new file mode 100644
index 0000000..c0a6059
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_agl.h
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
+#define NEW_HIVIEW_EVENT_HANDLER 1
+#define USE_STRUCT_RGN
+
+#include <AGL/agl.h>
+#include <Carbon/Carbon.h>
+#include <OpenGL/OpenGL.h>
+#include <OpenGL/glext.h>
+#include <OpenGL/glu.h>
+#include <list>
+#include <map>
+#include <memory>
+
+class VideoRenderAGL;
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+
+class VideoChannelAGL : public VideoRenderCallback {
+ public:
+
+  VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
+  virtual ~VideoChannelAGL();
+  virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+  virtual int DeliverFrame(const VideoFrame& videoFrame);
+  virtual int UpdateSize(int width, int height);
+  int SetStreamSettings(int streamId, float startWidth, float startHeight,
+                        float stopWidth, float stopHeight);
+  int SetStreamCropSettings(int streamId, float startWidth, float startHeight,
+                            float stopWidth, float stopHeight);
+  int RenderOffScreenBuffer();
+  int IsUpdated(bool& isUpdated);
+  virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+  virtual int32_t RenderFrame(const uint32_t streamId, VideoFrame& videoFrame);
+
+ private:
+
+  AGLContext _aglContext;
+  int _id;
+  VideoRenderAGL* _owner;
+  int _width;
+  int _height;
+  int _stretchedWidth;
+  int _stretchedHeight;
+  float _startHeight;
+  float _startWidth;
+  float _stopWidth;
+  float _stopHeight;
+  int _xOldWidth;
+  int _yOldHeight;
+  int _oldStretchedHeight;
+  int _oldStretchedWidth;
+  unsigned char* _buffer;
+  size_t _bufferSize;
+  size_t _incomingBufferSize;
+  bool _bufferIsUpdated;
+  bool _sizeInitialized;
+  int _numberOfStreams;
+  bool _bVideoSizeStartedChanging;
+  GLenum _pixelFormat;
+  GLenum _pixelDataType;
+  unsigned int _texture;
+};
+
+class VideoRenderAGL {
+ public:
+  VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
+  VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
+  ~VideoRenderAGL();
+
+  int Init();
+  VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth,
+                                    float startHeight, float stopWidth,
+                                    float stopHeight);
+  VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder,
+                                       float startWidth, float startHeight,
+                                       float stopWidth, float stopHeight);
+  int DeleteAGLChannel(int channel);
+  int DeleteAllAGLChannels();
+  int StopThread();
+  bool IsFullScreen();
+  bool HasChannels();
+  bool HasChannel(int channel);
+  int GetChannels(std::list<int>& channelList);
+  void LockAGLCntx();
+  void UnlockAGLCntx();
+
+  static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
+
+  // ********** new module functions ************ //
+  int ChangeWindow(void* newWindowRef);
+  int32_t StartRender();
+  int32_t StopRender();
+  int32_t DeleteAGLChannel(const uint32_t streamID);
+  int32_t GetChannelProperties(const uint16_t streamId, uint32_t& zOrder,
+                               float& left, float& top, float& right,
+                               float& bottom);
+
+ protected:
+  static bool ScreenUpdateThreadProc(void* obj);
+  bool ScreenUpdateProcess();
+  int GetWindowRect(Rect& rect);
+
+ private:
+  int CreateMixingContext();
+  int RenderOffScreenBuffers();
+  int SwapAndDisplayBuffers();
+  int UpdateClipping();
+  int CalculateVisibleRegion(ControlRef control, RgnHandle& visibleRgn,
+                             bool clipChildren);
+  bool CheckValidRegion(RgnHandle rHandle);
+  void ParentWindowResized(WindowRef window);
+
+  // Carbon GUI event handlers
+  static pascal OSStatus sHandleWindowResized(
+      EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+  static pascal OSStatus sHandleHiViewResized(
+      EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+
+  HIViewRef _hiviewRef;
+  WindowRef _windowRef;
+  bool _fullScreen;
+  int _id;
+  webrtc::CriticalSectionWrapper& _renderCritSec;
+  // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
+  std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
+  webrtc::EventWrapper* _screenUpdateEvent;
+  bool _isHIViewRef;
+  AGLContext _aglContext;
+  int _windowWidth;
+  int _windowHeight;
+  int _lastWindowWidth;
+  int _lastWindowHeight;
+  int _lastHiViewWidth;
+  int _lastHiViewHeight;
+  int _currentParentWindowHeight;
+  int _currentParentWindowWidth;
+  Rect _currentParentWindowBounds;
+  bool _windowHasResized;
+  Rect _lastParentWindowBounds;
+  Rect _currentHIViewBounds;
+  Rect _lastHIViewBounds;
+  Rect _windowRect;
+  std::map<int, VideoChannelAGL*> _aglChannels;
+  std::multimap<int, int> _zOrderToChannel;
+  EventHandlerRef _hiviewEventHandlerRef;
+  EventHandlerRef _windowEventHandlerRef;
+  HIRect _currentViewBounds;
+  HIRect _lastViewBounds;
+  bool _renderingIsPaused;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+#endif  // CARBON_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
new file mode 100644
index 0000000..f85be5f
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.cc
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#include <AGL/agl.h>
+#include "webrtc/modules/video_render/mac/video_render_agl.h"
+#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const int32_t id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+
+}
+
+VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCarbonCritsect;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::Init()
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    // We don't know if the user passed us a WindowRef or a HIViewRef, so test.
+    bool referenceIsValid = false;
+
+    // Check if it's a valid WindowRef
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    if (IsValidWindowPtr(*windowRef))
+    {
+        _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
+        referenceIsValid = true;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
+    }
+    else
+    {
+        HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
+        if (HIViewIsValid(*hiviewRef))
+        {
+            _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
+            referenceIsValid = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
+        }
+    }
+
+    if(!referenceIsValid)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    if(!_ptrCarbonRender)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
+    }
+
+    int retVal = _ptrCarbonRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::ChangeWindow(void* window)
+{
+    return -1;
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCarbonImpl::AddIncomingRenderStream(const uint32_t streamId,
+        const uint32_t zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelAGL* AGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!AGLChannel)
+    {
+        AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return AGLChannel;
+
+}
+
+int32_t
+VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const uint32_t streamId)
+{
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    _ptrCarbonRender->DeleteAGLChannel(streamId);
+
+    return 0;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
+        uint32_t& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return -1;
+    return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+int32_t
+VideoRenderMacCarbonImpl::StartRender()
+{
+    return _ptrCarbonRender->StartRender();
+}
+
+int32_t
+VideoRenderMacCarbonImpl::StopRender()
+{
+    return _ptrCarbonRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCarbonImpl::RenderType()
+{
+    return kRenderCarbon;
+}
+
+RawVideoType
+VideoRenderMacCarbonImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCarbonImpl::FullScreen()
+{
+    return false;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+        uint64_t& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::GetScreenResolution(uint32_t& screenWidth,
+        uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    //NSScreen* mainScreen = [NSScreen mainScreen];
+
+    //NSRect frame = [mainScreen frame];
+
+    //screenWidth = frame.size.width;
+    //screenHeight = frame.size.height;
+    return 0;
+}
+
+uint32_t
+VideoRenderMacCarbonImpl::RenderFrameRate(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    return 0;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::SetStreamCropping(const uint32_t streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCarbonImpl::ConfigureRenderer(const uint32_t streamId,
+                                                    const unsigned int zOrder,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom)
+{
+    return 0;
+}
+
+int32_t
+VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCarbonImpl::SetText(const uint8_t textId,
+                                          const uint8_t* text,
+                                          const int32_t textLength,
+                                          const uint32_t textColorRef,
+                                          const uint32_t backgroundColorRef,
+                                          const float left,
+                                          const float top,
+                                          const float right,
+                                          const float bottom)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
+                                            const uint8_t pictureId,
+                                            const void* colorKey,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom)
+{
+    return 0;
+}
+
+
+}  // namespace webrtc
+
+#endif // CARBON_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
new file mode 100644
index 0000000..9ad3a6c
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h
@@ -0,0 +1,146 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+
+#include "webrtc/modules/video_render/i_video_render.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class VideoRenderAGL;
+
+// Class definitions
+class VideoRenderMacCarbonImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCarbonImpl(const int32_t id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCarbonImpl();
+
+    virtual int32_t Init();
+
+    virtual int32_t ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
+            const uint32_t zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
+
+    virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                                      uint32_t& zOrder,
+                                                      float& left,
+                                                      float& top,
+                                                      float& right,
+                                                      float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+            uint64_t& availableGraphicsMemory) const;
+
+    virtual int32_t GetScreenResolution(uint32_t& screenWidth,
+            uint32_t& screenHeight) const;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+            const unsigned int zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t SetText(const uint8_t textId,
+            const uint8_t* text,
+            const int32_t textLength,
+            const uint32_t textColorRef,
+            const uint32_t backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+            const uint8_t pictureId,
+            const void* colorKey,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t FullScreenRender(void* window, const bool enable)
+    {
+        // not supported in Carbon at this time
+        return -1;
+    }
+
+private:
+    int32_t _id;
+    CriticalSectionWrapper& _renderMacCarbonCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderAGL* _ptrCarbonRender;
+
+};
+
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#endif // CARBON_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
new file mode 100644
index 0000000..21add27
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+
+#include "webrtc/modules/video_render/i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class VideoRenderNSOpenGL;
+
+// Class definitions
+class VideoRenderMacCocoaImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCocoaImpl(const int32_t id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCocoaImpl();
+
+    virtual int32_t Init();
+
+    virtual int32_t ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const uint32_t streamId,
+            const uint32_t zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual int32_t DeleteIncomingRenderStream(const uint32_t streamId);
+
+    virtual int32_t GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                                      uint32_t& zOrder,
+                                                      float& left,
+                                                      float& top,
+                                                      float& right,
+                                                      float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual int32_t GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+                                      uint64_t& availableGraphicsMemory) const;
+
+    virtual int32_t GetScreenResolution(uint32_t& screenWidth,
+                                        uint32_t& screenHeight) const;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left,
+                              const float top,
+                              const float right,
+                              const float bottom);
+
+    virtual int32_t FullScreenRender(void* window, const bool enable);
+
+private:
+    int32_t _id;
+    CriticalSectionWrapper& _renderMacCocoaCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderNSOpenGL* _ptrCocoaRender;
+
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#endif	// COCOA_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
new file mode 100644
index 0000000..5b017fe
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.mm
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#include "webrtc/modules/video_render/mac/cocoa_render_view.h"
+#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
+#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const int32_t id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+}
+
+VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCocoaCritsect;
+    if (_ptrCocoaRender)
+    {
+        delete _ptrCocoaRender;
+        _ptrCocoaRender = NULL;
+    }
+}
+
+int32_t
+VideoRenderMacCocoaImpl::Init()
+{
+
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
+    _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    int retVal = _ptrCocoaRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+    _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCocoaImpl::AddIncomingRenderStream(const uint32_t streamId,
+        const uint32_t zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!nsOpenGLChannel)
+    {
+        nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return nsOpenGLChannel;
+
+}
+
+int32_t
+VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const uint32_t streamId)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    _ptrCocoaRender->DeleteNSGLChannel(streamId);
+
+    return 0;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const uint32_t streamId,
+        uint32_t& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+int32_t
+VideoRenderMacCocoaImpl::StartRender()
+{
+    return _ptrCocoaRender->StartRender();
+}
+
+int32_t
+VideoRenderMacCocoaImpl::StopRender()
+{
+    return _ptrCocoaRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCocoaImpl::RenderType()
+{
+    return kRenderCocoa;
+}
+
+RawVideoType
+VideoRenderMacCocoaImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCocoaImpl::FullScreen()
+{
+    return false;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+        uint64_t& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::GetScreenResolution(uint32_t& screenWidth,
+        uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    NSScreen* mainScreen = [NSScreen mainScreen];
+
+    NSRect frame = [mainScreen frame];
+
+    screenWidth = frame.size.width;
+    screenHeight = frame.size.height;
+    return 0;
+}
+
+uint32_t
+VideoRenderMacCocoaImpl::RenderFrameRate(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    return 0;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::SetStreamCropping(const uint32_t streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCocoaImpl::ConfigureRenderer(const uint32_t streamId,
+                                                   const unsigned int zOrder,
+                                                   const float left,
+                                                   const float top,
+                                                   const float right,
+                                                   const float bottom)
+{
+    return 0;
+}
+
+int32_t
+VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCocoaImpl::SetText(const uint8_t textId,
+                                         const uint8_t* text,
+                                         const int32_t textLength,
+                                         const uint32_t textColorRef,
+                                         const uint32_t backgroundColorRef,
+                                         const float left,
+                                         const float top,
+                                         const float right,
+                                         const float bottom)
+{
+    return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
+}
+
+int32_t VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
+                                           const uint8_t pictureId,
+                                           const void* colorKey,
+                                           const float left,
+                                           const float top,
+                                           const float right,
+                                           const float bottom)
+{
+    return 0;
+}
+
+int32_t VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
+{
+    return -1;
+}
+
+}  // namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.h b/webrtc/modules/video_render/mac/video_render_nsopengl.h
new file mode 100644
index 0000000..457557d
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_nsopengl.h
@@ -0,0 +1,192 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/OpenGL.h>
+#import <OpenGL/glext.h>
+#import <OpenGL/glu.h>
+#include <QuickTime/QuickTime.h>
+#include <list>
+#include <map>
+#include <memory>
+
+#include "webrtc/base/thread_annotations.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#import "webrtc/modules/video_render/mac/cocoa_full_screen_window.h"
+#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
+
+class Trace;
+
+namespace rtc {
+class PlatformThread;
+}  // namespace rtc
+
+namespace webrtc {
+class EventTimerWrapper;
+class VideoRenderNSOpenGL;
+class CriticalSectionWrapper;
+
+class VideoChannelNSOpenGL : public VideoRenderCallback {
+public:
+    VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
+    virtual ~VideoChannelNSOpenGL();
+
+    // A new frame is delivered
+    virtual int DeliverFrame(const VideoFrame& videoFrame);
+
+    // Called when the incoming frame size and/or number of streams in mix
+    // changes.
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    virtual int UpdateSize(int width, int height);
+
+    // Setup
+    int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+
+    // Called when it's time to render the last frame for the channel
+    int RenderOffScreenBuffer();
+
+    // Returns true if a new buffer has been delivered to the texture
+    int IsUpdated(bool& isUpdated);
+    virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+
+    // ********** new module functions ************ //
+    virtual int32_t RenderFrame(const uint32_t streamId,
+                                const VideoFrame& videoFrame);
+
+    // ********** new module helper functions ***** //
+    int ChangeContext(NSOpenGLContext *nsglContext);
+    int32_t GetChannelProperties(float& left,
+                                 float& top,
+                                 float& right,
+                                 float& bottom);
+
+private:
+
+    NSOpenGLContext* _nsglContext;
+    const int _id;
+    VideoRenderNSOpenGL* _owner;
+    int32_t _width;
+    int32_t _height;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+    int _stretchedWidth;
+    int _stretchedHeight;
+    int _oldStretchedHeight;
+    int _oldStretchedWidth;
+    unsigned char* _buffer;
+    size_t _bufferSize;
+    size_t _incomingBufferSize;
+    bool _bufferIsUpdated;
+    int _numberOfStreams;
+    GLenum _pixelFormat;
+    GLenum _pixelDataType;
+    unsigned int _texture;
+};
+
+class VideoRenderNSOpenGL
+{
+
+public: // methods
+    VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
+    ~VideoRenderNSOpenGL();
+
+    static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
+
+    // Allocates textures
+    int Init();
+    VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int DeleteNSGLChannel(int channel);
+    int DeleteAllNSGLChannels();
+    int StopThread();
+    bool IsFullScreen();
+    bool HasChannels();
+    bool HasChannel(int channel);
+    int GetChannels(std::list<int>& channelList);
+    void LockAGLCntx() EXCLUSIVE_LOCK_FUNCTION(_nsglContextCritSec);
+    void UnlockAGLCntx() UNLOCK_FUNCTION(_nsglContextCritSec);
+
+    // ********** new module functions ************ //
+    int ChangeWindow(CocoaRenderView* newWindowRef);
+    int32_t StartRender();
+    int32_t StopRender();
+    int32_t DeleteNSGLChannel(const uint32_t streamID);
+    int32_t GetChannelProperties(const uint16_t streamId,
+                                 uint32_t& zOrder,
+                                 float& left,
+                                 float& top,
+                                 float& right,
+                                 float& bottom);
+
+    int32_t SetText(const uint8_t textId,
+                    const uint8_t* text,
+                    const int32_t textLength,
+                    const uint32_t textColorRef,
+                    const uint32_t backgroundColorRef,
+                    const float left,
+                    const float top,
+                    const float right,
+                    const float bottom);
+
+    // ********** new module helper functions ***** //
+    int configureNSOpenGLEngine();
+    int configureNSOpenGLView();
+    int setRenderTargetWindow();
+    int setRenderTargetFullScreen();
+
+protected: // methods
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+    int GetWindowRect(Rect& rect);
+
+private: // methods
+
+    int CreateMixingContext();
+    int RenderOffScreenBuffers();
+    int DisplayBuffers();
+
+private: // variables
+
+
+    CocoaRenderView* _windowRef;
+    bool _fullScreen;
+    int _id;
+    CriticalSectionWrapper& _nsglContextCritSec;
+    // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
+    std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
+    EventTimerWrapper* _screenUpdateEvent;
+    NSOpenGLContext* _nsglContext;
+    NSOpenGLContext* _nsglFullScreenContext;
+    CocoaFullScreenWindow* _fullScreenWindow;
+    Rect _windowRect; // The size of the window
+    int _windowWidth;
+    int _windowHeight;
+    std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
+    std::multimap<int, int> _zOrderToChannel;
+    bool _renderingIsPaused;
+    NSView* _windowRefSuperView;
+    NSRect _windowRefSuperViewFrame;
+};
+
+}  // namespace webrtc
+
+#endif   // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#endif	 // COCOA_RENDERING
diff --git a/webrtc/modules/video_render/mac/video_render_nsopengl.mm b/webrtc/modules/video_render/mac/video_render_nsopengl.mm
new file mode 100644
index 0000000..b7683a9
--- /dev/null
+++ b/webrtc/modules/video_render/mac/video_render_nsopengl.mm
@@ -0,0 +1,1247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
+_nsglContext( nsglContext),
+_id( iId),
+_owner( owner),
+_width( 0),
+_height( 0),
+_startWidth( 0.0f),
+_startHeight( 0.0f),
+_stopWidth( 0.0f),
+_stopHeight( 0.0f),
+_stretchedWidth( 0),
+_stretchedHeight( 0),
+_oldStretchedHeight( 0),
+_oldStretchedWidth( 0),
+_buffer( 0),
+_bufferSize( 0),
+_incomingBufferSize( 0),
+_bufferIsUpdated( false),
+_numberOfStreams( 0),
+_pixelFormat( GL_RGBA),
+_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+_texture( 0)
+{
+
+}
+
+VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
+{
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    if (_texture != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
+{
+    _owner->LockAGLCntx();
+
+    _nsglContext = nsglContext;
+    [_nsglContext makeCurrentContext];
+
+    _owner->UnlockAGLCntx();
+    return 0;
+
+}
+
+int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
+                                                   float& right, float& bottom)
+{
+
+    _owner->LockAGLCntx();
+
+    left = _startWidth;
+    top = _startHeight;
+    right = _stopWidth;
+    bottom = _stopHeight;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int32_t VideoChannelNSOpenGL::RenderFrame(const uint32_t /*streamId*/,
+                                          const VideoFrame& videoFrame) {
+  _owner->LockAGLCntx();
+
+  if(_width != videoFrame.width() ||
+     _height != videoFrame.height()) {
+      if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
+        _owner->UnlockAGLCntx();
+        return -1;
+      }
+  }
+  int ret = DeliverFrame(videoFrame);
+
+  _owner->UnlockAGLCntx();
+  return ret;
+}
+
+int VideoChannelNSOpenGL::UpdateSize(int width, int height)
+{
+    _owner->LockAGLCntx();
+    _width = width;
+    _height = height;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We got a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incomingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    [_nsglContext makeCurrentContext];
+
+    if(glIsTexture(_texture))
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+
+    }
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_RGBA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::DeliverFrame(const VideoFrame& videoFrame) {
+  _owner->LockAGLCntx();
+
+  if (_texture == 0) {
+    _owner->UnlockAGLCntx();
+    return 0;
+  }
+
+  if (CalcBufferSize(kI420, videoFrame.width(), videoFrame.height()) !=
+      _incomingBufferSize) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  // Using the VideoFrame for YV12: YV12 is YVU; I420 assumes
+  // YUV.
+  // TODO(mikhal) : Use appropriate functionality.
+  // TODO(wu): See if we are using glTexSubImage2D correctly.
+  int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
+  if (rgbRet < 0) {
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  [_nsglContext makeCurrentContext];
+
+  // Make sure this texture is the active one
+  glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+  GLenum glErr = glGetError();
+  if (glErr != GL_NO_ERROR) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+    "ERROR %d while calling glBindTexture", glErr);
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+                  0, // Level, not use
+                  0, // start point x, (low left of pic)
+                  0, // start point y,
+                  _width, // width
+                  _height, // height
+                  _pixelFormat, // pictue format for _buffer
+                  _pixelDataType, // data type of _buffer
+                  (const GLvoid*) _buffer); // the pixel data
+
+  glErr = glGetError();
+  if (glErr != GL_NO_ERROR) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+    "ERROR %d while calling glTexSubImage2d", glErr);
+    _owner->UnlockAGLCntx();
+    return -1;
+  }
+
+  _bufferIsUpdated = true;
+
+  _owner->UnlockAGLCntx();
+  return 0;
+}
+
+int VideoChannelNSOpenGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    //	if(_fullscreen)
+    //	{
+    // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+    //		_width = mainDisplayRect.size.width;
+    //		_height = mainDisplayRect.size.height;
+    //		glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+    //		float newX = mainDisplayRect.size.width/_width;
+    //		float newY = mainDisplayRect.size.height/_height;
+
+    // convert from 0.0 <= size <= 1.0 to
+    // open gl world -1.0 < size < 1.0
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    [_nsglContext makeCurrentContext];
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    glLoadIdentity();
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+
+    isUpdated = _bufferIsUpdated;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+    return retVal;
+}
+
+int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+/*
+ *
+ *    VideoRenderNSOpenGL
+ *
+ */
+
+VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
+_windowRef( (CocoaRenderView*)windowRef),
+_fullScreen( fullScreen),
+_id( iId),
+_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateEvent(EventTimerWrapper::Create()),
+_nsglContext( 0),
+_nsglFullScreenContext( 0),
+_fullScreenWindow( nil),
+_windowRect( ),
+_windowWidth( 0),
+_windowHeight( 0),
+_nsglChannels( ),
+_zOrderToChannel( ),
+_renderingIsPaused (FALSE),
+_windowRefSuperView(NULL),
+_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
+{
+  _screenUpdateThread.reset(new rtc::PlatformThread(
+      ScreenUpdateThreadProc, this, "ScreenUpdateNSOpenGL"));
+}
+
+int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    _windowRef = newWindowRef;
+
+    if(CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    int error = 0;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        error |= (it->second)->ChangeContext(_nsglContext);
+        it++;
+    }
+    if(error != 0)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/* Check if the thread and event already exist.
+ * If so then they will simply be restarted
+ * If not then create them and continue
+ */
+int32_t VideoRenderNSOpenGL::StartRender()
+{
+
+    LockAGLCntx();
+
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        _screenUpdateThread->Start();
+        if (FALSE ==
+            _screenUpdateEvent->StartTimer(true, 1000 / MONITOR_FREQ)) {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+
+    UnlockAGLCntx();
+    return 0;
+}
+int32_t VideoRenderNSOpenGL::StopRender()
+{
+
+    LockAGLCntx();
+
+    /* The code below is functional
+     * but it pauses for several seconds
+     */
+
+    // pause the update thread and the event timer
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    _screenUpdateThread->Stop();
+    if (FALSE == _screenUpdateEvent->StopTimer()) {
+        _renderingIsPaused = FALSE;
+
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLView()
+{
+    return 0;
+
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLEngine()
+{
+
+    LockAGLCntx();
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+    glViewport(0, 0, _windowWidth, _windowHeight);
+
+    // Synchronize buffer swaps with vertical refresh rate
+    GLint swapInt = 1;
+    [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetWindow()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
+                          (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderView:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetFullScreen()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
+                          (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
+
+    // Store original superview and frame for use when exiting full screens
+    _windowRefSuperViewFrame = [_windowRef frame];
+    _windowRefSuperView = [_windowRef superview];
+
+
+    // create new fullscreen window
+    NSRect screenRect = [[NSScreen mainScreen]frame];
+    [_windowRef setFrame:screenRect];
+    [_windowRef setBounds:screenRect];
+
+
+    _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
+    [_fullScreenWindow grabFullScreen];
+    [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderViewFullScreen:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
+{
+
+    if(_fullScreen)
+    {
+        if(_fullScreenWindow)
+        {
+            // Detach CocoaRenderView from full screen view back to
+            // it's original parent.
+            [_windowRef removeFromSuperview];
+            if(_windowRefSuperView)
+            {
+              [_windowRefSuperView addSubview:_windowRef];
+              [_windowRef setFrame:_windowRefSuperViewFrame];
+            }
+
+            WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
+            [_fullScreenWindow releaseFullScreen];
+
+        }
+    }
+
+    // Signal event to exit thread, then delete it
+    rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
+
+    if (tmpPtr)
+    {
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        tmpPtr->Stop();
+        delete tmpPtr;
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_nsglContext != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        _nsglContext = nil;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+        it = _nsglChannels.begin();
+    }
+    _nsglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+}
+
+/* static */
+int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
+{
+    return -1;
+}
+
+int VideoRenderNSOpenGL::Init()
+{
+
+    LockAGLCntx();
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start();
+    _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    if (CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (HasChannel(channel))
+    {
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+
+    }
+
+    VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+
+        return NULL;
+    }
+
+    _nsglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
+
+    return newAGLChannel;
+}
+
+int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* channel = it->second;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
+        delete channel;
+        it++;
+    }
+    _nsglChannels.clear();
+    return 0;
+}
+
+int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.find(channel);
+    if (it != _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+    }
+    else
+    {
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == (int)channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;
+    }
+
+    return 0;
+}
+
+int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId,
+                                                  uint32_t& zOrder,
+                                                  float& left,
+                                                  float& top,
+                                                  float& right,
+                                                  float& bottom)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    bool channelFound = false;
+
+    // Loop through all channels until we find a match.
+    // From that, get zorder.
+    // From that, get T, L, R, B
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        if(streamId == rIt->second)
+        {
+            channelFound = true;
+
+            zOrder = rIt->second;
+
+            std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
+            VideoChannelNSOpenGL* tempChannel = rIt->second;
+
+            if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
+            {
+                return -1;
+            }
+            break;
+        }
+    }
+
+    if(false == channelFound)
+    {
+
+        return -1;
+    }
+
+    return 0;
+}
+
+int VideoRenderNSOpenGL::StopThread()
+{
+
+    rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                 "%s Stopping thread ", __FUNCTION__, tmpPtr);
+
+    if (tmpPtr)
+    {
+        _screenUpdateEvent->Set();
+        tmpPtr->Stop();
+        delete tmpPtr;
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderNSOpenGL::IsFullScreen()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderNSOpenGL::HasChannels()
+{
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (_nsglChannels.begin() != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderNSOpenGL::HasChannel(int channel)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                    __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+            return NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+
+bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderNSOpenGL::ScreenUpdateProcess()
+{
+
+    _screenUpdateEvent->Wait(10);
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
+        UnlockAGLCntx();
+        return false;
+    }
+
+    [_nsglContext makeCurrentContext];
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+        glViewport(0, 0, _windowWidth, _windowHeight);
+    }
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it != _nsglChannels.end())
+    {
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+        aglChannel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+
+    if (updated)
+    {
+
+        // At least on buffers is updated, we need to repaint the texture
+        if (RenderOffScreenBuffers() != -1)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+    }
+    //    }
+    UnlockAGLCntx();
+    return true;
+}
+
+/*
+ *
+ *    Functions for creating mixing buffers and screen settings
+ *
+ */
+
+int VideoRenderNSOpenGL::CreateMixingContext()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if(_fullScreen)
+    {
+        if(-1 == setRenderTargetFullScreen())
+        {
+            return -1;
+        }
+    }
+    else
+    {
+
+        if(-1 == setRenderTargetWindow())
+        {
+            return -1;
+        }
+    }
+
+    configureNSOpenGLEngine();
+
+    DisplayBuffers();
+
+    GLenum glErr = glGetError();
+    if (glErr)
+    {
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    Rendering functions
+ *
+ */
+
+int VideoRenderNSOpenGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [_nsglContext makeCurrentContext];
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/*
+ *
+ * Help functions
+ *
+ * All help functions assumes external protections
+ *
+ */
+
+int VideoRenderNSOpenGL::DisplayBuffers()
+{
+
+    LockAGLCntx();
+
+    glFinish();
+    [_nsglContext flushBuffer];
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (_windowRef)
+    {
+        if(_fullScreen)
+        {
+            NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+            rect.bottom = 0;
+            rect.left = 0;
+            rect.right = mainDisplayRect.size.width;
+            rect.top = mainDisplayRect.size.height;
+        }
+        else
+        {
+            rect.top = [_windowRef frame].origin.y;
+            rect.left = [_windowRef frame].origin.x;
+            rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
+            rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
+        }
+
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/,
+                                     const uint8_t* /*text*/,
+                                     const int32_t /*textLength*/,
+                                     const uint32_t /*textColorRef*/,
+                                     const uint32_t /*backgroundColorRef*/,
+                                     const float /*left*/,
+                                     const float /*top*/,
+                                     const float /*right*/,
+                                     const float /*bottom*/)
+{
+
+    return 0;
+
+}
+
+void VideoRenderNSOpenGL::LockAGLCntx()
+{
+    _nsglContextCritSec.Enter();
+}
+void VideoRenderNSOpenGL::UnlockAGLCntx()
+{
+    _nsglContextCritSec.Leave();
+}
+
+/*
+
+ bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
+ {
+ NSRect mainDisplayRect, viewRect;
+
+ // Create a screen-sized window on the display you want to take over
+ // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
+ mainDisplayRect = [[NSScreen mainScreen] frame];
+ fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
+ backing:NSBackingStoreBuffered defer:YES];
+
+ // Set the window level to be above the menu bar
+ [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
+
+ // Perform any other window configuration you desire
+ [fullScreenWindow setOpaque:YES];
+ [fullScreenWindow setHidesOnDeactivate:YES];
+
+ // Create a view with a double-buffered OpenGL context and attach it to the window
+ // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
+ viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+ fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
+ [fullScreenWindow setContentView:fullScreenView];
+
+ // Show the window
+ [fullScreenWindow makeKeyAndOrderFront:self];
+
+ // Set the scene with the full-screen viewport and viewing transformation
+ [scene setViewportRect:viewRect];
+
+ // Assign the view's MainController to self
+ [fullScreenView setMainController:self];
+
+ if (!isAnimating) {
+ // Mark the view as needing drawing to initalize its contents
+ [fullScreenView setNeedsDisplay:YES];
+ }
+ else {
+ // Start playing the animation
+ [fullScreenView startAnimation];
+ }
+
+ }
+
+
+
+ */
+
+
+}  // namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp b/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
new file mode 100644
index 0000000..c443a58
--- /dev/null
+++ b/webrtc/modules/video_render/test/testAPI/renderStartImage.bmp
Binary files differ
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.cc b/webrtc/modules/video_render/test/testAPI/testAPI.cc
new file mode 100644
index 0000000..cea2f6b
--- /dev/null
+++ b/webrtc/modules/video_render/test/testAPI/testAPI.cc
@@ -0,0 +1,645 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/video_render/test/testAPI/testAPI.h"
+
+#include <stdio.h>
+
+#if defined(_WIN32)
+#include <tchar.h>
+#include <windows.h>
+#include <assert.h>
+#include <fstream>
+#include <iostream>
+#include <string>
+#include <windows.h>
+#include <ddraw.h>
+
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <iostream>
+#include <sys/time.h>
+
+#endif
+
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_render/video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/system_wrappers/include/sleep.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor);
+int TestSingleStream(VideoRender* renderModule);
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType);
+int TestBitmapText(VideoRender* renderModule);
+int TestMultipleStreams(VideoRender* renderModule);
+int TestExternalRender(VideoRender* renderModule);
+
+#define TEST_FRAME_RATE 30
+#define TEST_TIME_SECOND 5
+#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
+#define TEST_STREAM0_START_COLOR 0
+#define TEST_STREAM1_START_COLOR 64
+#define TEST_STREAM2_START_COLOR 128
+#define TEST_STREAM3_START_COLOR 192
+
+#if defined(WEBRTC_LINUX)
+
+#define GET_TIME_IN_MS timeGetTime()
+
+unsigned long timeGetTime()
+{
+    struct timeval tv;
+    struct timezone tz;
+    unsigned long val;
+
+    gettimeofday(&tv, &tz);
+    val= tv.tv_sec*1000+ tv.tv_usec/1000;
+    return(val);
+}
+
+#elif defined(WEBRTC_MAC)
+
+#include <unistd.h>
+
+#define GET_TIME_IN_MS timeGetTime()
+
+unsigned long timeGetTime()
+{
+    return 0;
+}
+
+#else
+
+#define GET_TIME_IN_MS ::timeGetTime()
+
+#endif
+
+using namespace std;
+
+#if defined(_WIN32)
+LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
+{
+    switch(uMsg)
+    {
+        case WM_DESTROY:
+        break;
+        case WM_COMMAND:
+        break;
+    }
+    return DefWindowProc(hWnd,uMsg,wParam,lParam);
+}
+
+int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
+{
+    HINSTANCE hinst = GetModuleHandle(0);
+    WNDCLASSEX wcx;
+    wcx.hInstance = hinst;
+    wcx.lpszClassName = TEXT("VideoRenderTest");
+    wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
+    wcx.style = CS_DBLCLKS;
+    wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
+    wcx.lpszMenuName = NULL;
+    wcx.cbSize = sizeof (WNDCLASSEX);
+    wcx.cbClsExtra = 0;
+    wcx.cbWndExtra = 0;
+    wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+
+    // Register our window class with the operating system.
+    // If there is an error, exit program.
+    if ( !RegisterClassEx (&wcx) )
+    {
+        MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
+        return 0;
+    }
+
+    // Create the main window.
+    hwndMain = CreateWindowEx(
+            0, // no extended styles
+            TEXT("VideoRenderTest"), // class name
+            TEXT("VideoRenderTest Window"), // window name
+            WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
+            800, // horizontal position
+            0, // vertical position
+            width, // width
+            height, // height
+            (HWND) NULL, // no parent or owner window
+            (HMENU) NULL, // class menu used
+            hinst, // instance handle
+            NULL); // no window creation data
+
+    if (!hwndMain)
+        return -1;
+
+    // Show the window using the flag specified by the program
+    // that started the application, and send the application
+    // a WM_PAINT message.
+
+    ShowWindow(hwndMain, SW_SHOWDEFAULT);
+    UpdateWindow(hwndMain);
+    return 0;
+}
+
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+
+int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
+
+{
+    int screen, xpos = 10, ypos = 10;
+    XEvent evnt;
+    XSetWindowAttributes xswa; // window attribute struct
+    XVisualInfo vinfo; // screen visual info struct
+    unsigned long mask; // attribute mask
+
+    // get connection handle to xserver
+    Display* _display = XOpenDisplay( NULL );
+
+    // get screen number
+    screen = DefaultScreen(_display);
+
+    // put desired visual info for the screen in vinfo
+    if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
+    {
+        //printf( "Screen visual info match!\n" );
+    }
+
+    // set window attributes
+    xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
+    xswa.event_mask = StructureNotifyMask | ExposureMask;
+    xswa.background_pixel = 0;
+    xswa.border_pixel = 0;
+
+    // value mask for attributes
+    mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
+
+    switch( winNum )
+    {
+        case 0:
+        xpos = 200;
+        ypos = 200;
+        break;
+        case 1:
+        xpos = 300;
+        ypos = 200;
+        break;
+        default:
+        break;
+    }
+
+    // create a subwindow for parent (defroot)
+    Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
+            xpos, ypos,
+            width,
+            height,
+            0, vinfo.depth,
+            InputOutput,
+            vinfo.visual,
+            mask, &xswa);
+
+    // Set window name
+    if( winNum == 0 )
+    {
+        XStoreName(_display, _window, "VE MM Local Window");
+        XSetIconName(_display, _window, "VE MM Local Window");
+    }
+    else if( winNum == 1 )
+    {
+        XStoreName(_display, _window, "VE MM Remote Window");
+        XSetIconName(_display, _window, "VE MM Remote Window");
+    }
+
+    // make x report events for mask
+    XSelectInput(_display, _window, StructureNotifyMask);
+
+    // map the window to the display
+    XMapWindow(_display, _window);
+
+    // wait for map event
+    do
+    {
+        XNextEvent(_display, &evnt);
+    }
+    while (evnt.type != MapNotify || evnt.xmap.event != _window);
+
+    *outWindow = _window;
+    *outDisplay = _display;
+
+    return 0;
+}
+#endif  // WEBRTC_LINUX
+
+// Note: Mac code is in testApi_mac.mm.
+
+class MyRenderCallback: public VideoRenderCallback
+{
+public:
+    MyRenderCallback() :
+        _cnt(0)
+    {
+    }
+    ;
+    ~MyRenderCallback()
+    {
+    }
+    ;
+    virtual int32_t RenderFrame(const uint32_t streamId,
+                                const VideoFrame& videoFrame) {
+        _cnt++;
+        if (_cnt % 100 == 0)
+        {
+            printf("Render callback %d \n",_cnt);
+        }
+        return 0;
+    }
+    int32_t _cnt;
+};
+
+void GetTestVideoFrame(VideoFrame* frame, uint8_t startColor) {
+    // changing color
+    static uint8_t color = startColor;
+
+    memset(frame->buffer(kYPlane), color, frame->allocated_size(kYPlane));
+    memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane));
+    memset(frame->buffer(kVPlane), color, frame->allocated_size(kVPlane));
+
+    ++color;
+}
+
+int TestSingleStream(VideoRender* renderModule) {
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    if (error != 0) {
+      // TODO(phoglund): This test will not work if compiled in release mode.
+      // This rather silly construct here is to avoid compilation errors when
+      // compiling in release. Release => no asserts => unused 'error' variable.
+      assert(false);
+    }
+
+    // Loop through an I420 file and render each frame
+    const int width = 352;
+    const int half_width = (width + 1) / 2;
+    const int height = 288;
+
+    VideoFrame videoFrame0;
+    videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
+
+    const uint32_t renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
+        // Render this frame with the specified delay
+        videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp()
+                                       + renderDelayMs);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(1000/TEST_FRAME_RATE);
+    }
+
+
+    // Shut down
+    printf("Closing...\n");
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+
+    return 0;
+}
+
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType) {
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
+
+    TestSingleStream(renderModule);
+
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
+
+    return 0;
+}
+
+int TestBitmapText(VideoRender* renderModule) {
+#if defined(WIN32)
+
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+    printf("Adding Bitmap\n");
+    DDCOLORKEY ColorKey; // black
+    ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
+    ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
+    HBITMAP hbm = (HBITMAP)LoadImage(NULL,
+                                     (LPCTSTR)_T("renderStartImage.bmp"),
+                                     IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
+    renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
+                             0.3f);
+
+    printf("Adding Text\n");
+    renderModule->SetText(1, (uint8_t*) "WebRtc Render Demo App", 20,
+                           RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
+                           1.0f);
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    assert(error == 0);
+
+    // Loop through an I420 file and render each frame
+    const int width = 352;
+    const int half_width = (width + 1) / 2;
+    const int height = 288;
+
+    VideoFrame videoFrame0;
+    videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
+
+    const uint32_t renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
+        // Render this frame with the specified delay
+        videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                       renderDelayMs);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(1000/TEST_FRAME_RATE);
+    }
+    // Sleep and let all frames be rendered before closing
+    SleepMs(renderDelayMs*2);
+
+
+    // Shut down
+    printf("Closing...\n");
+    ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
+    ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
+    renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
+    renderModule->SetText(1, NULL, 20, RGB(255,255,255),
+                    RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
+
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+#endif
+
+    return 0;
+}
+
+int TestMultipleStreams(VideoRender* renderModule) {
+    int error = 0;
+
+    // Add settings for a stream to render
+    printf("Add stream 0\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
+    assert(renderCallback0 != NULL);
+    printf("Add stream 1\n");
+    const int streamId1 = 1;
+    VideoRenderCallback* renderCallback1 =
+        renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
+    assert(renderCallback1 != NULL);
+    printf("Add stream 2\n");
+    const int streamId2 = 2;
+    VideoRenderCallback* renderCallback2 =
+        renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
+    assert(renderCallback2 != NULL);
+    printf("Add stream 3\n");
+    const int streamId3 = 3;
+    VideoRenderCallback* renderCallback3 =
+        renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
+    assert(renderCallback3 != NULL);
+    error = renderModule->StartRender(streamId0);
+    if (error != 0) {
+      // TODO(phoglund): This test will not work if compiled in release mode.
+      // This rather silly construct here is to avoid compilation errors when
+      // compiling in release. Release => no asserts => unused 'error' variable.
+      assert(false);
+    }
+    error = renderModule->StartRender(streamId1);
+    assert(error == 0);
+    error = renderModule->StartRender(streamId2);
+    assert(error == 0);
+    error = renderModule->StartRender(streamId3);
+    assert(error == 0);
+
+    // Loop through an I420 file and render each frame
+    const int width = 352;
+    const int half_width = (width + 1) / 2;
+    const int height = 288;
+
+    VideoFrame videoFrame0;
+    videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
+    VideoFrame videoFrame1;
+    videoFrame1.CreateEmptyFrame(width, height, width, half_width, half_width);
+    VideoFrame videoFrame2;
+    videoFrame2.CreateEmptyFrame(width, height, width, half_width, half_width);
+    VideoFrame videoFrame3;
+    videoFrame3.CreateEmptyFrame(width, height, width, half_width, half_width);
+
+    const uint32_t renderDelayMs = 500;
+
+    // Render frames with the specified delay.
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+      GetTestVideoFrame(&videoFrame0, TEST_STREAM0_START_COLOR);
+
+      videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                     renderDelayMs);
+      renderCallback0->RenderFrame(streamId0, videoFrame0);
+
+      GetTestVideoFrame(&videoFrame1, TEST_STREAM1_START_COLOR);
+      videoFrame1.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                     renderDelayMs);
+      renderCallback1->RenderFrame(streamId1, videoFrame1);
+
+      GetTestVideoFrame(&videoFrame2,  TEST_STREAM2_START_COLOR);
+      videoFrame2.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                     renderDelayMs);
+      renderCallback2->RenderFrame(streamId2, videoFrame2);
+
+      GetTestVideoFrame(&videoFrame3, TEST_STREAM3_START_COLOR);
+      videoFrame3.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                     renderDelayMs);
+      renderCallback3->RenderFrame(streamId3, videoFrame3);
+
+      SleepMs(1000/TEST_FRAME_RATE);
+    }
+
+    // Shut down
+    printf("Closing...\n");
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+    error = renderModule->StopRender(streamId1);
+    assert(error == 0);
+    error = renderModule->DeleteIncomingRenderStream(streamId1);
+    assert(error == 0);
+    error = renderModule->StopRender(streamId2);
+    assert(error == 0);
+    error = renderModule->DeleteIncomingRenderStream(streamId2);
+    assert(error == 0);
+    error = renderModule->StopRender(streamId3);
+    assert(error == 0);
+    error = renderModule->DeleteIncomingRenderStream(streamId3);
+    assert(error == 0);
+
+    return 0;
+}
+
+int TestExternalRender(VideoRender* renderModule) {
+    int error = 0;
+    MyRenderCallback *externalRender = new MyRenderCallback();
+
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
+                                                   1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+    error = renderModule->AddExternalRenderCallback(streamId0, externalRender);
+    if (error != 0) {
+      // TODO(phoglund): This test will not work if compiled in release mode.
+      // This rather silly construct here is to avoid compilation errors when
+      // compiling in release. Release => no asserts => unused 'error' variable.
+      assert(false);
+    }
+
+    error = renderModule->StartRender(streamId0);
+    assert(error == 0);
+
+    const int width = 352;
+    const int half_width = (width + 1) / 2;
+    const int height = 288;
+    VideoFrame videoFrame0;
+    videoFrame0.CreateEmptyFrame(width, height, width, half_width, half_width);
+
+    const uint32_t renderDelayMs = 500;
+    int frameCount = TEST_FRAME_NUM;
+    for (int i=0; i<frameCount; i++) {
+        videoFrame0.set_render_time_ms(TickTime::MillisecondTimestamp() +
+                                       renderDelayMs);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(33);
+    }
+
+    // Sleep and let all frames be rendered before closing
+    SleepMs(2*renderDelayMs);
+
+    // Shut down
+    printf("Closing...\n");
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+    assert(frameCount == externalRender->_cnt);
+
+    delete externalRender;
+    externalRender = NULL;
+
+    return 0;
+}
+
+void RunVideoRenderTests(void* window, VideoRenderType windowType) {
+    int myId = 12345;
+
+    // Create the render module
+    printf("Create render module\n");
+    VideoRender* renderModule = NULL;
+    renderModule = VideoRender::CreateVideoRender(myId,
+                                                  window,
+                                                  false,
+                                                  windowType);
+    assert(renderModule != NULL);
+
+    // ##### Test single stream rendering ####
+    printf("#### TestSingleStream ####\n");
+    if (TestSingleStream(renderModule) != 0) {
+        printf ("TestSingleStream failed\n");
+    }
+
+    // ##### Test fullscreen rendering ####
+    printf("#### TestFullscreenStream ####\n");
+    if (TestFullscreenStream(renderModule, window, windowType) != 0) {
+        printf ("TestFullscreenStream failed\n");
+    }
+
+    // ##### Test bitmap and text ####
+    printf("#### TestBitmapText ####\n");
+    if (TestBitmapText(renderModule) != 0) {
+        printf ("TestBitmapText failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestMultipleStreams ####\n");
+    if (TestMultipleStreams(renderModule) != 0) {
+        printf ("TestMultipleStreams failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestExternalRender ####\n");
+    if (TestExternalRender(renderModule) != 0) {
+        printf ("TestExternalRender failed\n");
+    }
+
+    delete renderModule;
+    renderModule = NULL;
+
+    printf("VideoRender unit tests passed.\n");
+}
+
+// Note: The Mac main is implemented in testApi_mac.mm.
+#if defined(_WIN32)
+int _tmain(int argc, _TCHAR* argv[])
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+int main(int argc, char* argv[])
+#endif
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+{
+    // Create a window for testing.
+    void* window = NULL;
+#if defined (_WIN32)
+    HWND testHwnd;
+    WebRtcCreateWindow(testHwnd, 0, 352, 288);
+    window = (void*)testHwnd;
+    VideoRenderType windowType = kRenderWindows;
+#elif defined(WEBRTC_LINUX)
+    Window testWindow;
+    Display* display;
+    WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
+    VideoRenderType windowType = kRenderX11;
+    window = (void*)testWindow;
+#endif // WEBRTC_LINUX
+
+    RunVideoRenderTests(window, windowType);
+    return 0;
+}
+#endif  // !WEBRTC_MAC
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI.h b/webrtc/modules/video_render/test/testAPI/testAPI.h
new file mode 100644
index 0000000..0655a5b
--- /dev/null
+++ b/webrtc/modules/video_render/test/testAPI/testAPI.h
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI_android.cc b/webrtc/modules/video_render/test/testAPI/testAPI_android.cc
new file mode 100644
index 0000000..c62a62f
--- /dev/null
+++ b/webrtc/modules/video_render/test/testAPI/testAPI_android.cc
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+int main(int argc, char* argv[]) {
+  // TODO(leozwang): Video render test app is not ready on android,
+  // make it dummy test now, will add android specific tests
+  return 0;
+}
diff --git a/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
new file mode 100644
index 0000000..dfee4c7
--- /dev/null
+++ b/webrtc/modules/video_render/test/testAPI/testAPI_mac.mm
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testAPI.h"
+
+#include <iostream>
+
+#import <Foundation/Foundation.h>
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+#import <QTKit/QTKit.h>
+#include <sys/time.h>
+
+#import "webrtc/modules/video_render/mac/cocoa_render_view.h"
+#include "webrtc/common_types.h"
+#include "webrtc/modules/include/module_common_types.h"
+#include "webrtc/modules/utility/include/process_thread.h"
+#include "webrtc/modules/video_render/video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/system_wrappers/include/tick_util.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+using namespace webrtc;
+
+int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
+{
+    // In Cocoa, rendering is not done directly to a window like in Windows and Linux.
+    // It is rendererd to a Subclass of NSOpenGLView
+
+    // create cocoa container window
+    NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
+    NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame 
+                                                      styleMask:NSTitledWindowMask 
+                                                        backing:NSBackingStoreBuffered 
+                                                          defer:NO];
+    [outWindow orderOut:nil];
+    [outWindow setTitle:@"Cocoa Renderer"];
+    [outWindow setBackgroundColor:[NSColor blueColor]];
+
+    // create renderer and attach to window
+    NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
+    cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
+    [[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
+
+    [outWindow makeKeyAndOrderFront:NSApp];
+
+    return 0;
+}
+
+int main (int argc, const char * argv[]) {
+    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
+    [NSApplication sharedApplication];
+
+    CocoaRenderView* testWindow;
+    WebRtcCreateWindow(testWindow, 0, 352, 288);
+    VideoRenderType windowType = kRenderCocoa;
+    void* window = (void*)testWindow;
+
+    RunVideoRenderTests(window, windowType);
+
+    [pool release];
+}
diff --git a/webrtc/modules/video_render/video_render.gypi b/webrtc/modules/video_render/video_render.gypi
new file mode 100644
index 0000000..e8cc03a
--- /dev/null
+++ b/webrtc/modules/video_render/video_render.gypi
@@ -0,0 +1,218 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      # Note this library is missing an implementation for the video render.
+      # For that targets must link with 'video_render' or
+      # 'video_render_module_internal_impl' if they want to compile and use
+      # the internal render as the default renderer.
+      'target_name': 'video_render_module',
+      'type': 'static_library',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common.gyp:webrtc_common',
+        '<(webrtc_root)/common_video/common_video.gyp:common_video',
+        '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+      ],
+      'sources': [
+        'external/video_render_external_impl.cc',
+        'external/video_render_external_impl.h',
+        'i_video_render.h',
+        'video_render.h',
+        'video_render_defines.h',
+        'video_render_impl.h',
+      ],
+    },
+    {
+      # Default video_render_module implementation that only supports external
+      # renders.
+      'target_name': 'video_render',
+      'type': 'static_library',
+      'dependencies': [
+        'video_render_module',
+      ],
+      'sources': [
+        'video_render_impl.cc',
+      ],
+    },
+  ], # targets
+
+  'conditions': [
+    ['build_with_chromium==0', {
+      'targets': [
+        {
+          # video_render_module implementation that supports the internal
+          # video_render implementation.
+          'target_name': 'video_render_module_internal_impl',
+          'type': 'static_library',
+          'dependencies': [
+            '<(webrtc_root)/common.gyp:webrtc_common',
+            'video_render_module',
+          ],
+          'sources': [
+            'video_render_internal_impl.cc',
+          ],
+           # TODO(andrew): with the proper suffix, these files will be excluded
+           # automatically.
+          'conditions': [
+            ['OS=="android"', {
+              'sources': [
+                'android/video_render_android_impl.h',
+                'android/video_render_android_native_opengl2.h',
+                'android/video_render_android_surface_view.h',
+                'android/video_render_opengles20.h',
+                'android/video_render_android_impl.cc',
+                'android/video_render_android_native_opengl2.cc',
+                'android/video_render_android_surface_view.cc',
+                'android/video_render_opengles20.cc',
+              ],
+              'link_settings': {
+                'libraries': [
+                  '-lGLESv2',
+                ],
+              },
+            }],
+            ['OS=="ios"', {
+              'sources': [
+                # iOS
+                'ios/open_gles20.h',
+                'ios/open_gles20.mm',
+                'ios/video_render_ios_channel.h',
+                'ios/video_render_ios_channel.mm',
+                'ios/video_render_ios_gles20.h',
+                'ios/video_render_ios_gles20.mm',
+                'ios/video_render_ios_impl.h',
+                'ios/video_render_ios_impl.mm',
+                'ios/video_render_ios_view.h',
+                'ios/video_render_ios_view.mm',
+              ],
+              'xcode_settings': {
+                'CLANG_ENABLE_OBJC_ARC': 'YES',
+              },
+              'all_dependent_settings': {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-framework OpenGLES',
+                    '-framework QuartzCore',
+                    '-framework UIKit',
+                  ],
+                },
+              },
+            }],
+            ['OS=="linux"', {
+              'sources': [
+                'linux/video_render_linux_impl.h',
+                'linux/video_x11_channel.h',
+                'linux/video_x11_render.h',
+                'linux/video_render_linux_impl.cc',
+                'linux/video_x11_channel.cc',
+                'linux/video_x11_render.cc',
+              ],
+              'link_settings': {
+                'libraries': [
+                  '-lXext',
+                ],
+              },
+            }],
+            ['OS=="mac"', {
+              'sources': [
+                'mac/cocoa_full_screen_window.h',
+                'mac/cocoa_render_view.h',
+                'mac/video_render_agl.h',
+                'mac/video_render_mac_carbon_impl.h',
+                'mac/video_render_mac_cocoa_impl.h',
+                'mac/video_render_nsopengl.h',
+                'mac/video_render_nsopengl.mm',
+                'mac/video_render_mac_cocoa_impl.mm',
+                'mac/video_render_agl.cc',
+                'mac/video_render_mac_carbon_impl.cc',
+                'mac/cocoa_render_view.mm',
+                'mac/cocoa_full_screen_window.mm',
+              ],
+            }],
+            ['OS=="win"', {
+              'sources': [
+                'windows/i_video_render_win.h',
+                'windows/video_render_direct3d9.h',
+                'windows/video_render_windows_impl.h',
+                'windows/video_render_direct3d9.cc',
+                'windows/video_render_windows_impl.cc',
+              ],
+              'include_dirs': [
+                '<(directx_sdk_path)/Include',
+              ],
+            }],
+            ['OS=="win" and clang==1', {
+              'msvs_settings': {
+                'VCCLCompilerTool': {
+                  'AdditionalOptions': [
+                    # Disable warnings failing when compiling with Clang on Windows.
+                    # https://bugs.chromium.org/p/webrtc/issues/detail?id=5366
+                    '-Wno-comment',
+                    '-Wno-reorder',
+                    '-Wno-unused-value',
+                    '-Wno-unused-private-field',
+                  ],
+                },
+              },
+            }],
+          ] # conditions
+        },
+      ],
+    }], # build_with_chromium==0
+    ['include_tests==1 and OS!="ios"', {
+      'targets': [
+        {
+          # Does not compile on iOS: webrtc:4755.
+          'target_name': 'video_render_tests',
+          'type': 'executable',
+          'dependencies': [
+            'video_render_module_internal_impl',
+            'webrtc_utility',
+            '<(webrtc_root)/common.gyp:webrtc_common',
+            '<(webrtc_root)/system_wrappers/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/common_video/common_video.gyp:common_video',
+          ],
+          'sources': [
+            'test/testAPI/testAPI.cc',
+            'test/testAPI/testAPI.h',
+            'test/testAPI/testAPI_android.cc',
+            'test/testAPI/testAPI_mac.mm',
+          ],
+          'conditions': [
+            ['OS=="mac" or OS=="linux"', {
+              'cflags': [
+                '-Wno-write-strings',
+              ],
+              'ldflags': [
+                '-lpthread -lm',
+              ],
+            }],
+            ['OS=="linux"', {
+              'link_settings': {
+                'libraries': [
+                  '-lX11',
+                ],
+              },
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_LDFLAGS': [
+                  '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
+                ],
+              },
+            }],
+          ] # conditions
+        }, # video_render_module_test
+      ], # targets
+    }], # include_tests==1 and OS!=ios
+  ], # conditions
+}
+
diff --git a/webrtc/modules/video_render/video_render.h b/webrtc/modules/video_render/video_render.h
new file mode 100644
index 0000000..a24acb9
--- /dev/null
+++ b/webrtc/modules/video_render/video_render.h
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
+
+/*
+ * video_render.h
+ *
+ * This header file together with module.h and module_common_types.h
+ * contains all of the APIs that are needed for using the video render
+ * module class.
+ *
+ */
+
+#include "webrtc/modules/include/module.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRender: public Module
+{
+public:
+    /*
+     *   Create a video render module object
+     *
+     *   id              - unique identifier of this video render module object
+     *   window          - pointer to the window to render to
+     *   fullscreen      - true if this is a fullscreen renderer
+     *   videoRenderType - type of renderer to create
+     */
+    static VideoRender
+            * CreateVideoRender(
+                                          const int32_t id,
+                                          void* window,
+                                          const bool fullscreen,
+                                          const VideoRenderType videoRenderType =
+                                                  kRenderDefault);
+
+    /*
+     *   Destroy a video render module object
+     *
+     *   module  - object to destroy
+     */
+    static void DestroyVideoRender(VideoRender* module);
+
+    int64_t TimeUntilNextProcess() override = 0;
+    void Process() override = 0;
+
+    /**************************************************************************
+     *
+     *   Window functions
+     *
+     ***************************************************************************/
+
+    /*
+     *   Get window for this renderer
+     */
+    virtual void* Window() = 0;
+
+    /*
+     *   Change render window
+     *
+     *   window      - the new render window, assuming same type as originally created.
+     */
+    virtual int32_t ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     *   zOrder      - relative render order for the streams, 0 = on top
+     *   left        - position of the stream in the window, [0.0f, 1.0f]
+     *   top         - position of the stream in the window, [0.0f, 1.0f]
+     *   right       - position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - position of the stream in the window, [0.0f, 1.0f]
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom) = 0;
+    /*
+     *   Delete incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     */
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId) = 0;
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     *
+     *   streamID     - id of the stream the callback is used for
+     *   renderObject - the VideoRenderCallback to use for this stream, NULL to remove
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual int32_t
+            AddExternalRenderCallback(const uint32_t streamId,
+                                      VideoRenderCallback* renderObject) = 0;
+
+    /*
+     *   Get the porperties for an incoming render stream
+     *
+     *   streamID    - [in] id of the stream to get properties for
+     *   zOrder      - [out] relative render order for the streams, 0 = on top
+     *   left        - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   top         - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   right       - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - [out] position of the stream in the window, [0.0f, 1.0f]
+     */
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const = 0;
+    /*
+     *   The incoming frame rate to the module, not the rate rendered in the window.
+     */
+    virtual uint32_t
+            GetIncomingFrameRate(const uint32_t streamId) = 0;
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual uint32_t GetNumIncomingRenderStreams() const = 0;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool
+            HasIncomingRenderStream(const uint32_t streamId) const = 0;
+
+    /*
+     *   Registers a callback to get raw images in the same time as sent
+     *   to the renderer. To be used for external rendering.
+     */
+    virtual int32_t
+            RegisterRawFrameCallback(const uint32_t streamId,
+                                     VideoRenderCallback* callbackObj) = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual int32_t StartRender(const uint32_t streamId) = 0;
+
+    /*
+     *   Stops the renderer
+     */
+    virtual int32_t StopRender(const uint32_t streamId) = 0;
+
+    /*
+     *   Resets the renderer
+     *   No streams are removed. The state should be as after AddStream was called.
+     */
+    virtual int32_t ResetRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the preferred render video type
+     */
+    virtual RawVideoType PreferredVideoType() const = 0;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen() = 0;
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const = 0;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual uint32_t RenderFrameRate(const uint32_t streamId) = 0;
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    /*
+     * re-configure renderer
+     */
+
+    // Set the expected time needed by the graphics card or external renderer,
+    // i.e. frames will be released for rendering |delay_ms| before set render
+    // time in the video frame.
+    virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
+                                           int32_t delay_ms) = 0;
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual int32_t SetTransparentBackground(const bool enable) = 0;
+
+    virtual int32_t FullScreenRender(void* window, const bool enable) = 0;
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom) = 0;
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left, const float top,
+                            const float right, const float bottom) = 0;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_H_
diff --git a/webrtc/modules/video_render/video_render_defines.h b/webrtc/modules/video_render/video_render_defines.h
new file mode 100644
index 0000000..999707c
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_defines.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
+
+#include "webrtc/common_types.h"
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/modules/include/module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+#define NULL    0
+#endif
+
+// Enums
+enum VideoRenderType
+{
+    kRenderExternal = 0, // External
+    kRenderWindows = 1, // Windows
+    kRenderCocoa = 2, // Mac
+    kRenderCarbon = 3,
+    kRenderiOS = 4, // iPhone
+    kRenderAndroid = 5, // Android
+    kRenderX11 = 6, // Linux
+    kRenderDefault
+};
+
+// Runtime errors
+enum VideoRenderError
+{
+    kRenderShutDown = 0,
+    kRenderPerformanceAlarm = 1
+};
+
+// Feedback class to be implemented by module user
+class VideoRenderFeedback
+{
+public:
+    virtual void OnRenderError(const int32_t streamId,
+                               const VideoRenderError error) = 0;
+
+protected:
+    virtual ~VideoRenderFeedback()
+    {
+    }
+};
+
+// Mobile enums
+enum StretchMode
+{
+    kStretchToInsideEdge = 1,
+    kStretchToOutsideEdge = 2,
+    kStretchMatchWidth = 3,
+    kStretchMatchHeight = 4,
+    kStretchNone = 5
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_DEFINES_H_
diff --git a/webrtc/modules/video_render/video_render_impl.cc b/webrtc/modules/video_render/video_render_impl.cc
new file mode 100644
index 0000000..f3d12dc
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_impl.cc
@@ -0,0 +1,550 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_render/external/video_render_external_impl.h"
+#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_impl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+VideoRender*
+VideoRender::CreateVideoRender(const int32_t id,
+                               void* window,
+                               const bool fullscreen,
+                               const VideoRenderType videoRenderType/*=kRenderDefault*/)
+{
+    VideoRenderType resultVideoRenderType = videoRenderType;
+    if (videoRenderType == kRenderDefault)
+    {
+        resultVideoRenderType = kRenderExternal;
+    }
+    return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
+                                     fullscreen);
+}
+
+void VideoRender::DestroyVideoRender(
+                                                         VideoRender* module)
+{
+    if (module)
+    {
+        delete module;
+    }
+}
+
+ModuleVideoRenderImpl::ModuleVideoRenderImpl(
+                                             const int32_t id,
+                                             const VideoRenderType videoRenderType,
+                                             void* window,
+                                             const bool fullscreen) :
+    _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
+{
+
+    // Create platform specific renderer
+    switch (videoRenderType)
+    {
+        case kRenderExternal:
+        {
+            VideoRenderExternalImpl* ptrRenderer(NULL);
+            ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
+                                                      window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+            break;
+        default:
+            // Error...
+            break;
+    }
+    if (_ptrRenderer)
+    {
+        if (_ptrRenderer->Init() == -1)
+        {
+        }
+    }
+}
+
+ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
+{
+    delete &_moduleCrit;
+
+    for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+         it != _streamRenderMap.end();
+         ++it) {
+      delete it->second;
+    }
+
+    // Delete platform specific renderer
+    if (_ptrRenderer)
+    {
+        VideoRenderType videoRenderType = _ptrRenderer->RenderType();
+
+        switch (videoRenderType)
+        {
+            case kRenderExternal:
+            {
+                VideoRenderExternalImpl
+                        * ptrRenderer =
+                                reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+
+            default:
+                // Error...
+                break;
+        }
+    }
+}
+
+int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
+{
+    // Not used
+    return 50;
+}
+void ModuleVideoRenderImpl::Process() {}
+
+void*
+ModuleVideoRenderImpl::Window()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _ptrWindow;
+}
+
+int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
+{
+    return -1;
+}
+
+int32_t ModuleVideoRenderImpl::Id()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _id;
+}
+
+uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
+
+  if (it == _streamRenderMap.end()) {
+    // This stream doesn't exist
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: stream doesn't exist",
+                 __FUNCTION__);
+    return 0;
+  }
+  assert(it->second != NULL);
+  return it->second->IncomingRate();
+}
+
+VideoRenderCallback*
+ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
+                                               const uint32_t zOrder,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return NULL;
+    }
+
+    if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
+        // The stream already exists...
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream already exists", __FUNCTION__);
+        return NULL;
+    }
+
+    VideoRenderCallback* ptrRenderCallback =
+            _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
+                                                  right, bottom);
+    if (ptrRenderCallback == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream in renderer",
+                     __FUNCTION__);
+        return NULL;
+    }
+
+    // Create platform independant code
+    IncomingVideoStream* ptrIncomingStream =
+        new IncomingVideoStream(streamId, false);
+    ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
+    VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
+
+    // Store the stream
+    _streamRenderMap[streamId] = ptrIncomingStream;
+
+    return moduleCallback;
+}
+
+int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
+                                                                const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+    if (item == _streamRenderMap.end())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    delete item->second;
+
+    _ptrRenderer->DeleteIncomingRenderStream(streamId);
+
+    _streamRenderMap.erase(item);
+
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
+    const uint32_t streamId,
+    VideoRenderCallback* renderObject) {
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    if (item->second == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get stream", __FUNCTION__);
+        return -1;
+    }
+    item->second->SetExternalCallback(renderObject);
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
+    const uint32_t streamId,
+    uint32_t& zOrder,
+    float& left,
+    float& top,
+    float& right,
+    float& bottom) const {
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
+                                                           left, top, right,
+                                                           bottom);
+}
+
+uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    return static_cast<uint32_t>(_streamRenderMap.size());
+}
+
+bool ModuleVideoRenderImpl::HasIncomingRenderStream(
+    const uint32_t streamId) const {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  return _streamRenderMap.find(streamId) != _streamRenderMap.end();
+}
+
+int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
+    const uint32_t streamId,
+    VideoRenderCallback* callbackObj) {
+  return -1;
+}
+
+int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    // Start the stream
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        return -1;
+    }
+
+    if (item->second->Start() == -1)
+    {
+        return -1;
+    }
+
+    // Start the HW renderer
+    if (_ptrRenderer->StartRender() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s(%d): No renderer", __FUNCTION__, streamId);
+        return -1;
+    }
+
+    // Stop the incoming stream
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        return -1;
+    }
+
+    if (item->second->Stop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::ResetRender()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    int32_t ret = 0;
+    // Loop through all incoming streams and reset them
+    for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+         it != _streamRenderMap.end();
+         ++it) {
+      if (it->second->Reset() == -1)
+        ret = -1;
+    }
+    return ret;
+}
+
+RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (_ptrRenderer == NULL)
+    {
+        return kVideoI420;
+    }
+
+    return _ptrRenderer->PerferedVideoType();
+}
+
+bool ModuleVideoRenderImpl::IsFullScreen()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->FullScreen();
+}
+
+int32_t ModuleVideoRenderImpl::GetScreenResolution(
+                                                         uint32_t& screenWidth,
+                                                         uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
+}
+
+uint32_t ModuleVideoRenderImpl::RenderFrameRate(
+                                                      const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->RenderFrameRate(streamId);
+}
+
+int32_t ModuleVideoRenderImpl::SetStreamCropping(
+                                                       const uint32_t streamId,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetTransparentBackground(enable);
+}
+
+int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
+{
+    return -1;
+}
+
+int32_t ModuleVideoRenderImpl::SetText(
+                                             const uint8_t textId,
+                                             const uint8_t* text,
+                                             const int32_t textLength,
+                                             const uint32_t textColorRef,
+                                             const uint32_t backgroundColorRef,
+                                             const float left, const float top,
+                                             const float right,
+                                             const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
+                                 backgroundColorRef, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
+                                         const uint8_t pictureId,
+                                         const void* colorKey,
+                                         const float left,
+                                         const float top,
+                                         const float right,
+                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
+                                   right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
+    uint32_t stream_id, int32_t delay_ms) {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  if (!_ptrRenderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: No renderer", __FUNCTION__);
+    return false;
+  }
+
+  IncomingVideoStreamMap::const_iterator item =
+      _streamRenderMap.find(stream_id);
+  if (item == _streamRenderMap.end()) {
+    // This stream doesn't exist
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
+                 delay_ms);
+    return -1;
+  }
+
+  assert(item->second != NULL);
+  return item->second->SetExpectedRenderDelay(delay_ms);
+}
+
+int32_t ModuleVideoRenderImpl::ConfigureRenderer(
+                                                       const uint32_t streamId,
+                                                       const unsigned int zOrder,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
+                                           bottom);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/video_render_impl.h b/webrtc/modules/video_render/video_render_impl.h
new file mode 100644
index 0000000..8dfa57d
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_impl.h
@@ -0,0 +1,208 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+
+#include <map>
+
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_render/video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class IncomingVideoStream;
+class IVideoRender;
+
+// Class definitions
+class ModuleVideoRenderImpl: public VideoRender
+{
+public:
+    /*
+     *   VideoRenderer constructor/destructor
+     */
+    ModuleVideoRenderImpl(const int32_t id,
+                          const VideoRenderType videoRenderType,
+                          void* window, const bool fullscreen);
+
+    virtual ~ModuleVideoRenderImpl();
+
+    virtual int64_t TimeUntilNextProcess();
+    virtual void Process();
+
+    /*
+     *   Returns the render window
+     */
+    virtual void* Window();
+
+    /*
+     *   Change render window
+     */
+    virtual int32_t ChangeWindow(void* window);
+
+    /*
+     *   Returns module id
+     */
+    int32_t Id();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+    /*
+     *   Delete incoming render stream
+     */
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId);
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     */
+    virtual int32_t
+            AddExternalRenderCallback(const uint32_t streamId,
+                                      VideoRenderCallback* renderObject);
+
+    /*
+     *   Get the porperties for an incoming render stream
+     */
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+    /*
+     *   Incoming frame rate for the specified stream.
+     */
+    virtual uint32_t GetIncomingFrameRate(const uint32_t streamId);
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual uint32_t GetNumIncomingRenderStreams() const;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool HasIncomingRenderStream(const uint32_t streamId) const;
+
+    /*
+     *
+     */
+    virtual int32_t
+            RegisterRawFrameCallback(const uint32_t streamId,
+                                     VideoRenderCallback* callbackObj);
+
+    virtual int32_t SetExpectedRenderDelay(uint32_t stream_id,
+                                           int32_t delay_ms);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual int32_t StartRender(const uint32_t streamId);
+
+    /*
+     *   Stops the renderer
+     */
+    virtual int32_t StopRender(const uint32_t streamId);
+
+    /*
+     *   Sets the renderer in start state, no streams removed.
+     */
+    virtual int32_t ResetRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the prefered render video type
+     */
+    virtual RawVideoType PreferredVideoType() const;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen();
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t FullScreenRender(void* window, const bool enable);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left, const float top,
+                            const float right, const float bottom);
+
+private:
+    int32_t _id;
+    CriticalSectionWrapper& _moduleCrit;
+    void* _ptrWindow;
+    bool _fullScreen;
+
+    IVideoRender* _ptrRenderer;
+    typedef std::map<uint32_t, IncomingVideoStream*> IncomingVideoStreamMap;
+    IncomingVideoStreamMap _streamRenderMap;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
diff --git a/webrtc/modules/video_render/video_render_internal.h b/webrtc/modules/video_render/video_render_internal.h
new file mode 100644
index 0000000..0508c1a
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_internal.h
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
+
+#ifdef ANDROID
+#include <jni.h>
+
+namespace webrtc {
+
+// In order to be able to use the internal webrtc video render
+// for android, the jvm objects must be set via this method.
+int32_t SetRenderAndroidVM(JavaVM* javaVM);
+
+}  // namespace webrtc
+
+#endif  // ANDROID
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_VIDEO_RENDER_INTERNAL_H_
diff --git a/webrtc/modules/video_render/video_render_internal_impl.cc b/webrtc/modules/video_render/video_render_internal_impl.cc
new file mode 100644
index 0000000..ac89e7f
--- /dev/null
+++ b/webrtc/modules/video_render/video_render_internal_impl.cc
@@ -0,0 +1,773 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "webrtc/common_video/include/incoming_video_stream.h"
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+#include "webrtc/modules/video_render/video_render_impl.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+#if defined (_WIN32)
+#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
+#define STANDARD_RENDERING kRenderWindows
+
+// WEBRTC_IOS should go before WEBRTC_MAC because WEBRTC_MAC
+// gets defined if WEBRTC_IOS is defined
+#elif defined(WEBRTC_IOS)
+#define STANDARD_RENDERING kRenderiOS
+#include "webrtc/modules/video_render/ios/video_render_ios_impl.h"
+#elif defined(WEBRTC_MAC)
+#if defined(COCOA_RENDERING)
+#define STANDARD_RENDERING kRenderCocoa
+#include "webrtc/modules/video_render/mac/video_render_mac_cocoa_impl.h"
+#elif defined(CARBON_RENDERING)
+#define STANDARD_RENDERING kRenderCarbon
+#include "webrtc/modules/video_render/mac/video_render_mac_carbon_impl.h"
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+#include "webrtc/modules/video_render/android/video_render_android_impl.h"
+#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
+#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
+#define STANDARD_RENDERING kRenderAndroid
+
+#elif defined(WEBRTC_LINUX)
+#include "webrtc/modules/video_render/linux/video_render_linux_impl.h"
+#define STANDARD_RENDERING kRenderX11
+
+#else
+//Other platforms
+#endif
+
+// For external rendering
+#include "webrtc/modules/video_render/external/video_render_external_impl.h"
+#ifndef STANDARD_RENDERING
+#define STANDARD_RENDERING kRenderExternal
+#endif  // STANDARD_RENDERING
+
+namespace webrtc {
+
+VideoRender*
+VideoRender::CreateVideoRender(const int32_t id,
+                               void* window,
+                               const bool fullscreen,
+                               const VideoRenderType videoRenderType/*=kRenderDefault*/)
+{
+    VideoRenderType resultVideoRenderType = videoRenderType;
+    if (videoRenderType == kRenderDefault)
+    {
+        resultVideoRenderType = STANDARD_RENDERING;
+    }
+    return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
+                                     fullscreen);
+}
+
+void VideoRender::DestroyVideoRender(
+                                                         VideoRender* module)
+{
+    if (module)
+    {
+        delete module;
+    }
+}
+
+ModuleVideoRenderImpl::ModuleVideoRenderImpl(
+                                             const int32_t id,
+                                             const VideoRenderType videoRenderType,
+                                             void* window,
+                                             const bool fullscreen) :
+    _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL)
+{
+
+    // Create platform specific renderer
+    switch (videoRenderType)
+    {
+#if defined(_WIN32)
+        case kRenderWindows:
+        {
+            VideoRenderWindowsImpl* ptrRenderer;
+            ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(WEBRTC_IOS)
+        case kRenderiOS:
+        {
+            VideoRenderIosImpl* ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(WEBRTC_MAC)
+
+#if defined(COCOA_RENDERING)
+        case kRenderCocoa:
+        {
+            VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+
+        break;
+#elif defined(CARBON_RENDERING)
+        case kRenderCarbon:
+        {
+            VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+        case kRenderAndroid:
+        {
+            if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
+            {
+                AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+            else
+            {
+                AndroidSurfaceViewRenderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+
+        }
+        break;
+#elif defined(WEBRTC_LINUX)
+        case kRenderX11:
+        {
+            VideoRenderLinuxImpl* ptrRenderer = NULL;
+            ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
+            if ( ptrRenderer )
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+        break;
+
+#else
+        // Other platforms
+#endif
+        case kRenderExternal:
+        {
+            VideoRenderExternalImpl* ptrRenderer(NULL);
+            ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
+                                                      window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+            break;
+        default:
+            // Error...
+            break;
+    }
+    if (_ptrRenderer)
+    {
+        if (_ptrRenderer->Init() == -1)
+        {
+        }
+    }
+}
+
+ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
+{
+    delete &_moduleCrit;
+
+    for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+         it != _streamRenderMap.end();
+         ++it) {
+      delete it->second;
+    }
+
+    // Delete platform specific renderer
+    if (_ptrRenderer)
+    {
+        VideoRenderType videoRenderType = _ptrRenderer->RenderType();
+
+        switch (videoRenderType)
+        {
+            case kRenderExternal:
+            {
+                VideoRenderExternalImpl
+                        * ptrRenderer =
+                                reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#if defined(_WIN32)
+            case kRenderWindows:
+            {
+                VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(WEBRTC_IOS)
+            case kRenderiOS:
+            {
+              VideoRenderIosImpl* ptrRenderer = reinterpret_cast<VideoRenderIosImpl*> (_ptrRenderer);
+              _ptrRenderer = NULL;
+              delete ptrRenderer;
+            }
+            break;
+#elif defined(WEBRTC_MAC)
+
+#if defined(COCOA_RENDERING)
+            case kRenderCocoa:
+            {
+                VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(CARBON_RENDERING)
+            case kRenderCarbon:
+            {
+                VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+            case kRenderAndroid:
+            {
+                VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+
+#elif defined(WEBRTC_LINUX)
+            case kRenderX11:
+            {
+                VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#else
+            //other platforms
+#endif
+
+            default:
+                // Error...
+                break;
+        }
+    }
+}
+
+int64_t ModuleVideoRenderImpl::TimeUntilNextProcess()
+{
+    // Not used
+    return 50;
+}
+void ModuleVideoRenderImpl::Process() {}
+
+void*
+ModuleVideoRenderImpl::Window()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _ptrWindow;
+}
+
+int32_t ModuleVideoRenderImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(&_moduleCrit);
+
+#if defined(WEBRTC_IOS) // WEBRTC_IOS must go before WEBRTC_MAC
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+    VideoRenderIosImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderIosImpl(_id, window, _fullScreen);
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+#elif defined(WEBRTC_MAC)
+
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+#if defined(COCOA_RENDERING)
+    VideoRenderMacCocoaImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
+#elif defined(CARBON_RENDERING)
+    VideoRenderMacCarbonImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
+#endif
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+
+#else
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->ChangeWindow(window);
+
+#endif
+}
+
+int32_t ModuleVideoRenderImpl::Id()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _id;
+}
+
+uint32_t ModuleVideoRenderImpl::GetIncomingFrameRate(const uint32_t streamId) {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  IncomingVideoStreamMap::iterator it = _streamRenderMap.find(streamId);
+
+  if (it == _streamRenderMap.end()) {
+    // This stream doesn't exist
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: stream doesn't exist",
+                 __FUNCTION__);
+    return 0;
+  }
+  assert(it->second != NULL);
+  return it->second->IncomingRate();
+}
+
+VideoRenderCallback*
+ModuleVideoRenderImpl::AddIncomingRenderStream(const uint32_t streamId,
+                                               const uint32_t zOrder,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return NULL;
+    }
+
+    if (_streamRenderMap.find(streamId) != _streamRenderMap.end()) {
+        // The stream already exists...
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream already exists", __FUNCTION__);
+        return NULL;
+    }
+
+    VideoRenderCallback* ptrRenderCallback =
+            _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
+                                                  right, bottom);
+    if (ptrRenderCallback == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream in renderer",
+                     __FUNCTION__);
+        return NULL;
+    }
+
+    // Create platform independant code
+    IncomingVideoStream* ptrIncomingStream =
+        new IncomingVideoStream(streamId, false);
+    ptrIncomingStream->SetRenderCallback(ptrRenderCallback);
+    VideoRenderCallback* moduleCallback = ptrIncomingStream->ModuleCallback();
+
+    // Store the stream
+    _streamRenderMap[streamId] = ptrIncomingStream;
+
+    return moduleCallback;
+}
+
+int32_t ModuleVideoRenderImpl::DeleteIncomingRenderStream(
+                                                                const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+    if (item == _streamRenderMap.end())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    delete item->second;
+
+    _ptrRenderer->DeleteIncomingRenderStream(streamId);
+
+    _streamRenderMap.erase(item);
+
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::AddExternalRenderCallback(
+    const uint32_t streamId,
+    VideoRenderCallback* renderObject) {
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    if (item->second == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get stream", __FUNCTION__);
+        return -1;
+    }
+    item->second->SetExternalCallback(renderObject);
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
+    const uint32_t streamId,
+    uint32_t& zOrder,
+    float& left,
+    float& top,
+    float& right,
+    float& bottom) const {
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
+                                                           left, top, right,
+                                                           bottom);
+}
+
+uint32_t ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    return static_cast<uint32_t>(_streamRenderMap.size());
+}
+
+bool ModuleVideoRenderImpl::HasIncomingRenderStream(
+    const uint32_t streamId) const {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  return _streamRenderMap.find(streamId) != _streamRenderMap.end();
+}
+
+int32_t ModuleVideoRenderImpl::RegisterRawFrameCallback(
+    const uint32_t streamId,
+    VideoRenderCallback* callbackObj) {
+  return -1;
+}
+
+int32_t ModuleVideoRenderImpl::StartRender(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    // Start the stream
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        return -1;
+    }
+
+    if (item->second->Start() == -1)
+    {
+        return -1;
+    }
+
+    // Start the HW renderer
+    if (_ptrRenderer->StartRender() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::StopRender(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s(%d): No renderer", __FUNCTION__, streamId);
+        return -1;
+    }
+
+    // Stop the incoming stream
+    IncomingVideoStreamMap::iterator item = _streamRenderMap.find(streamId);
+
+    if (item == _streamRenderMap.end())
+    {
+        return -1;
+    }
+
+    if (item->second->Stop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t ModuleVideoRenderImpl::ResetRender()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    int32_t ret = 0;
+    // Loop through all incoming streams and reset them
+    for (IncomingVideoStreamMap::iterator it = _streamRenderMap.begin();
+         it != _streamRenderMap.end();
+         ++it) {
+      if (it->second->Reset() == -1)
+        ret = -1;
+    }
+    return ret;
+}
+
+RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (_ptrRenderer == NULL)
+    {
+        return kVideoI420;
+    }
+
+    return _ptrRenderer->PerferedVideoType();
+}
+
+bool ModuleVideoRenderImpl::IsFullScreen()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->FullScreen();
+}
+
+int32_t ModuleVideoRenderImpl::GetScreenResolution(
+                                                         uint32_t& screenWidth,
+                                                         uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
+}
+
+uint32_t ModuleVideoRenderImpl::RenderFrameRate(
+                                                      const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->RenderFrameRate(streamId);
+}
+
+int32_t ModuleVideoRenderImpl::SetStreamCropping(
+                                                       const uint32_t streamId,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetTransparentBackground(enable);
+}
+
+int32_t ModuleVideoRenderImpl::FullScreenRender(void* window, const bool enable)
+{
+    return -1;
+}
+
+int32_t ModuleVideoRenderImpl::SetText(
+                                             const uint8_t textId,
+                                             const uint8_t* text,
+                                             const int32_t textLength,
+                                             const uint32_t textColorRef,
+                                             const uint32_t backgroundColorRef,
+                                             const float left, const float top,
+                                             const float right,
+                                             const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
+                                 backgroundColorRef, left, top, right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
+                                         const uint8_t pictureId,
+                                         const void* colorKey,
+                                         const float left,
+                                         const float top,
+                                         const float right,
+                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
+                                   right, bottom);
+}
+
+int32_t ModuleVideoRenderImpl::SetExpectedRenderDelay(
+    uint32_t stream_id, int32_t delay_ms) {
+  CriticalSectionScoped cs(&_moduleCrit);
+
+  if (!_ptrRenderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: No renderer", __FUNCTION__);
+    return false;
+  }
+
+  IncomingVideoStreamMap::const_iterator item =
+      _streamRenderMap.find(stream_id);
+  if (item == _streamRenderMap.end()) {
+    // This stream doesn't exist
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s(%u, %d): stream doesn't exist", __FUNCTION__, stream_id,
+                 delay_ms);
+    return -1;
+  }
+
+  assert(item->second != NULL);
+  return item->second->SetExpectedRenderDelay(delay_ms);
+}
+
+int32_t ModuleVideoRenderImpl::ConfigureRenderer(
+                                                       const uint32_t streamId,
+                                                       const unsigned int zOrder,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
+                                           bottom);
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/windows/i_video_render_win.h b/webrtc/modules/video_render/windows/i_video_render_win.h
new file mode 100644
index 0000000..6dbb4fd
--- /dev/null
+++ b/webrtc/modules/video_render/windows/i_video_render_win.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+
+#include "webrtc/modules/video_render/video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRenderWin
+{
+public:
+    /**************************************************************************
+     *
+     *   Constructor/destructor
+     *
+     ***************************************************************************/
+    virtual ~IVideoRenderWin()
+    {
+    };
+
+    virtual int32_t Init() = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * CreateChannel(const uint32_t streamId,
+                            const uint32_t zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom) = 0;
+
+    virtual int32_t DeleteChannel(const uint32_t streamId) = 0;
+
+    virtual int32_t GetStreamSettings(const uint32_t channel,
+                                      const uint16_t streamId,
+                                      uint32_t& zOrder,
+                                      float& left, float& top,
+                                      float& right, float& bottom) = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender() = 0;
+
+    virtual int32_t StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen() = 0;
+
+    virtual int32_t SetCropping(const uint32_t channel,
+                                const uint16_t streamId,
+                                const float left, const float top,
+                                const float right, const float bottom) = 0;
+
+    virtual int32_t ConfigureRenderer(const uint32_t channel,
+                                      const uint16_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual int32_t SetTransparentBackground(const bool enable) = 0;
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t colorText,
+                            const uint32_t colorBg,
+                            const float left, const float top,
+                            const float rigth, const float bottom) = 0;
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom) = 0;
+
+    virtual int32_t ChangeWindow(void* window) = 0;
+
+    virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
+                                      uint64_t& availableMemory) = 0;
+
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.cc b/webrtc/modules/video_render/windows/video_render_direct3d9.cc
new file mode 100644
index 0000000..b59b944
--- /dev/null
+++ b/webrtc/modules/video_render/windows/video_render_direct3d9.cc
@@ -0,0 +1,1160 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Own include file
+#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
+
+// System include files
+#include <windows.h>
+
+// WebRtc include files
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/event_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+
+namespace webrtc {
+
+// A structure for our custom vertex type
+struct CUSTOMVERTEX
+{
+    FLOAT x, y, z;
+    DWORD color; // The vertex color
+    FLOAT u, v;
+};
+
+// Our custom FVF, which describes our custom vertex structure
+#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1)
+
+/*
+ *
+ *    D3D9Channel
+ *
+ */
+D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                                 CriticalSectionWrapper* critSect,
+                                 Trace* trace) :
+    _width(0),
+    _height(0),
+    _pd3dDevice(pd3DDevice),
+    _pTexture(NULL),
+    _bufferIsUpdated(false),
+    _critSect(critSect),
+    _streamId(0),
+    _zOrder(0),
+    _startWidth(0),
+    _startHeight(0),
+    _stopWidth(0),
+    _stopHeight(0)
+{
+
+}
+
+D3D9Channel::~D3D9Channel()
+{
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+}
+
+void D3D9Channel::SetStreamSettings(uint16_t streamId,
+                                        uint32_t zOrder,
+                                        float startWidth,
+                                        float startHeight,
+                                        float stopWidth,
+                                        float stopHeight)
+{
+    _streamId = streamId;
+    _zOrder = zOrder;
+    _startWidth = startWidth;
+    _startHeight = startHeight;
+    _stopWidth = stopWidth;
+    _stopHeight = stopHeight;
+}
+
+int D3D9Channel::GetStreamSettings(uint16_t streamId,
+                                       uint32_t& zOrder,
+                                       float& startWidth,
+                                       float& startHeight,
+                                       float& stopWidth,
+                                       float& stopHeight)
+{
+    streamId = _streamId;
+    zOrder = _zOrder;
+    startWidth = _startWidth;
+    startHeight = _startHeight;
+    stopWidth = _stopWidth;
+    stopHeight = _stopHeight;
+    return 0;
+}
+
+int D3D9Channel::GetTextureWidth()
+{
+    return _width;
+}
+
+int D3D9Channel::GetTextureHeight()
+{
+    return _height;
+}
+
+// Called from video engine when a the frame size changed
+int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "FrameSizeChange, wifth: %d, height: %d, streams: %d", width,
+                 height, numberOfStreams);
+
+    CriticalSectionScoped cs(_critSect);
+    _width = width;
+    _height = height;
+
+    //clean the previous texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret = E_POINTER;
+
+    if (_pd3dDevice)
+      ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                       D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t D3D9Channel::RenderFrame(const uint32_t streamId,
+                                 const VideoFrame& videoFrame) {
+    CriticalSectionScoped cs(_critSect);
+    if (_width != videoFrame.width() || _height != videoFrame.height())
+    {
+        if (FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame);
+}
+
+// Called from video engine when a new frame should be rendered.
+int D3D9Channel::DeliverFrame(const VideoFrame& videoFrame) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+               "DeliverFrame to D3D9Channel");
+
+  CriticalSectionScoped cs(_critSect);
+
+  // FIXME if _bufferIsUpdated is still true (not be renderred), do we want to
+  // update the texture? probably not
+  if (_bufferIsUpdated) {
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "Last frame hasn't been rendered yet. Drop this frame.");
+    return -1;
+  }
+
+  if (!_pd3dDevice) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "D3D for rendering not initialized.");
+    return -1;
+  }
+
+  if (!_pTexture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Texture for rendering not initialized.");
+    return -1;
+  }
+
+  D3DLOCKED_RECT lr;
+
+  if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0))) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Failed to lock a texture in D3D9 Channel.");
+    return -1;
+  }
+  UCHAR* pRect = (UCHAR*) lr.pBits;
+
+  ConvertFromI420(videoFrame, kARGB, 0, pRect);
+
+  if (FAILED(_pTexture->UnlockRect(0))) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Failed to unlock a texture in D3D9 Channel.");
+    return -1;
+  }
+
+  _bufferIsUpdated = true;
+  return 0;
+}
+
+// Called by d3d channel owner to indicate the frame/texture has been rendered off
+int D3D9Channel::RenderOffFrame()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "Frame has been rendered to the screen.");
+    CriticalSectionScoped cs(_critSect);
+    _bufferIsUpdated = false;
+    return 0;
+}
+
+// Called by d3d channel owner to check if the texture is updated
+int D3D9Channel::IsUpdated(bool& isUpdated)
+{
+    CriticalSectionScoped cs(_critSect);
+    isUpdated = _bufferIsUpdated;
+    return 0;
+}
+
+// Called by d3d channel owner to get the texture
+LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture()
+{
+    CriticalSectionScoped cs(_critSect);
+    return _pTexture;
+}
+
+int D3D9Channel::ReleaseTexture()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+    _pd3dDevice = NULL;
+    return 0;
+}
+
+int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _pd3dDevice = pd3DDevice;
+
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret;
+
+    ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                     D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    VideoRenderDirect3D9
+ *
+ */
+VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
+                                                   HWND hWnd,
+                                                   bool fullScreen) :
+    _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _trace(trace),
+    _hWnd(hWnd),
+    _fullScreen(fullScreen),
+    _pTextureLogo(NULL),
+    _pVB(NULL),
+    _pd3dDevice(NULL),
+    _pD3D(NULL),
+    _d3dChannels(),
+    _d3dZorder(),
+    _screenUpdateEvent(NULL),
+    _logoLeft(0),
+    _logoTop(0),
+    _logoRight(0),
+    _logoBottom(0),
+    _pd3dSurface(NULL),
+    _totalMemory(0),
+    _availableMemory(0)
+{
+    _screenUpdateThread.reset(new rtc::PlatformThread(
+        ScreenUpdateThreadProc, this, "ScreenUpdateThread"));
+    _screenUpdateEvent = EventTimerWrapper::Create();
+    SetRect(&_originalHwndRect, 0, 0, 0, 0);
+}
+
+VideoRenderDirect3D9::~VideoRenderDirect3D9()
+{
+    //NOTE: we should not enter CriticalSection in here!
+
+    // Signal event to exit thread, then delete it
+    rtc::PlatformThread* tmpPtr = _screenUpdateThread.release();
+    if (tmpPtr)
+    {
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        tmpPtr->Stop();
+        delete tmpPtr;
+    }
+    delete _screenUpdateEvent;
+
+    //close d3d device
+    CloseDevice();
+
+    // Delete all channels
+    std::map<int, D3D9Channel*>::iterator it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        delete it->second;
+        it = _d3dChannels.erase(it);
+    }
+    // Clean the zOrder map
+    _d3dZorder.clear();
+
+    if (_fullScreen)
+    {
+        // restore hwnd to original size and position
+        ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
+                       _originalHwndRect.top, _originalHwndRect.right
+                               - _originalHwndRect.left,
+                       _originalHwndRect.bottom - _originalHwndRect.top,
+                       SWP_FRAMECHANGED);
+        ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+        ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+    }
+
+    delete &_refD3DCritsect;
+}
+
+DWORD VideoRenderDirect3D9::GetVertexProcessingCaps()
+{
+    D3DCAPS9 caps;
+    DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+    if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
+                                       &caps)))
+    {
+        if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+                == D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+        {
+            dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+        }
+    }
+    return dwVertexProcessing;
+}
+
+int VideoRenderDirect3D9::InitializeD3D(HWND hWnd,
+                                            D3DPRESENT_PARAMETERS* pd3dpp)
+{
+    // initialize Direct3D
+    if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+    {
+        return -1;
+    }
+
+    // determine what type of vertex processing to use based on the device capabilities
+    DWORD dwVertexProcessing = GetVertexProcessingCaps();
+
+    // get the display mode
+    D3DDISPLAYMODE d3ddm;
+    _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm);
+    pd3dpp->BackBufferFormat = d3ddm.Format;
+
+    // create the D3D device
+    if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
+                                   dwVertexProcessing | D3DCREATE_MULTITHREADED
+                                           | D3DCREATE_FPU_PRESERVE, pd3dpp,
+                                   &_pd3dDevice)))
+    {
+        //try the ref device
+        if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF,
+                                       hWnd, dwVertexProcessing
+                                               | D3DCREATE_MULTITHREADED
+                                               | D3DCREATE_FPU_PRESERVE,
+                                       pd3dpp, &_pd3dDevice)))
+        {
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::ResetDevice()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::ResetDevice");
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    //release the channel texture
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->ReleaseTexture();
+        }
+        it++;
+    }
+
+    //close d3d device
+    if (CloseDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to CloseDevice");
+        return -1;
+    }
+
+    //reinit d3d device
+    if (InitDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to InitDevice");
+        return -1;
+    }
+
+    //recreate channel texture
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->RecreateTexture(_pd3dDevice);
+        }
+        it++;
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::InitDevice()
+{
+    // Set up the structure used to create the D3DDevice
+    ZeroMemory(&_d3dpp, sizeof(_d3dpp));
+    _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+    _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
+    if (GetWindowRect(_hWnd, &_originalHwndRect) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice Could not get window size");
+        return -1;
+    }
+    if (!_fullScreen)
+    {
+        _winWidth = _originalHwndRect.right - _originalHwndRect.left;
+        _winHeight = _originalHwndRect.bottom - _originalHwndRect.top;
+        _d3dpp.Windowed = TRUE;
+        _d3dpp.BackBufferHeight = 0;
+        _d3dpp.BackBufferWidth = 0;
+    }
+    else
+    {
+        _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN);
+        _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN);
+        _d3dpp.Windowed = FALSE;
+        _d3dpp.BackBufferWidth = _winWidth;
+        _d3dpp.BackBufferHeight = _winHeight;
+        _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+    }
+
+    if (InitializeD3D(_hWnd, &_d3dpp) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice failed in InitializeD3D");
+        return -1;
+    }
+
+    // Turn off culling, so we see the front and back of the triangle
+    _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
+
+    // Turn off D3D lighting, since we are providing our own vertex colors
+    _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
+
+    // Settings for alpha blending
+    _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
+    _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
+    _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
+
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR );
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR );
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR );
+
+    // Initialize Vertices
+    CUSTOMVERTEX Vertices[] = {
+            //front
+            { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f,
+                    0xffffffff, 0, 0 },
+            { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f,
+                    0xffffffff, 1, 0 } };
+
+    // Create the vertex buffer.
+    if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0,
+                                               D3DFVF_CUSTOMVERTEX,
+                                               D3DPOOL_DEFAULT, &_pVB, NULL )))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to create the vertex buffer.");
+        return -1;
+    }
+
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, Vertices, sizeof(Vertices));
+    _pVB->Unlock();
+
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::Init");
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created");
+        return -1;
+    }
+    _screenUpdateThread->Start();
+    _screenUpdateThread->SetPriority(rtc::kRealtimePriority);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    DEVMODE dm;
+    // initialize the DEVMODE structure
+    ZeroMemory(&dm, sizeof(dm));
+    dm.dmSize = sizeof(dm);
+    if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm))
+    {
+        monitorFreq = dm.dmDisplayFrequency;
+    }
+    _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq);
+
+    return InitDevice();
+}
+
+int32_t VideoRenderDirect3D9::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return -1;
+}
+
+int VideoRenderDirect3D9::UpdateRenderSurface()
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+
+        D3D9Channel* channel = it->second;
+        channel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+    //nothing is updated, continue
+    if (!updated)
+        return -1;
+
+    // Clear the backbuffer to a black color
+    _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f,
+                       0);
+
+    // Begin the scene
+    if (SUCCEEDED(_pd3dDevice->BeginScene()))
+    {
+        _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX));
+        _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX);
+
+        //draw all the channels
+        //get texture from the channels
+        LPDIRECT3DTEXTURE9 textureFromChannel = NULL;
+        DWORD textureWidth, textureHeight;
+
+        std::multimap<int, unsigned int>::reverse_iterator it;
+        it = _d3dZorder.rbegin();
+        while (it != _d3dZorder.rend())
+        {
+            // loop through all channels and streams in Z order
+            int channel = it->second & 0x0000ffff;
+
+            std::map<int, D3D9Channel*>::iterator ddIt;
+            ddIt = _d3dChannels.find(channel);
+            if (ddIt != _d3dChannels.end())
+            {
+                // found the channel
+                D3D9Channel* channelObj = ddIt->second;
+                if (channelObj)
+                {
+                    textureFromChannel = channelObj->GetTexture();
+                    textureWidth = channelObj->GetTextureWidth();
+                    textureHeight = channelObj->GetTextureHeight();
+
+                    uint32_t zOrder;
+                    float startWidth, startHeight, stopWidth, stopHeight;
+                    channelObj->GetStreamSettings(0, zOrder, startWidth,
+                                                  startHeight, stopWidth,
+                                                  stopHeight);
+
+                    //draw the video stream
+                    UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight,
+                                        stopWidth, stopHeight);
+                    _pd3dDevice->SetTexture(0, textureFromChannel);
+                    _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+
+                    //Notice channel that this frame as been rendered
+                    channelObj->RenderOffFrame();
+                }
+            }
+            it++;
+        }
+
+        //draw the logo
+        if (_pTextureLogo)
+        {
+            UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight,
+                                _logoBottom);
+            _pd3dDevice->SetTexture(0, _pTextureLogo);
+            _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+        }
+
+        // End the scene
+        _pd3dDevice->EndScene();
+    }
+
+    // Present the backbuffer contents to the display
+    _pd3dDevice->Present(NULL, NULL, NULL, NULL );
+
+    return 0;
+}
+
+//set the  alpha value of the pixal with a particular colorkey as 0
+int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                                                  DDCOLORKEY* transparentColorKey,
+                                                  DWORD width,
+                                                  DWORD height)
+{
+    D3DLOCKED_RECT lr;
+    if (!pTexture)
+        return -1;
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+    if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD)))
+    {
+        for (DWORD y = 0; y < height; y++)
+        {
+            DWORD dwOffset = y * width;
+
+            for (DWORD x = 0; x < width; x)
+            {
+                DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x];
+                if ((temp & 0x00FFFFFF)
+                        == transparentColorKey->dwColorSpaceLowValue)
+                {
+                    temp &= 0x00FFFFFF;
+                }
+                else
+                {
+                    temp |= 0xFF000000;
+                }
+                ((DWORD*) lr.pBits)[dwOffset + x] = temp;
+                x++;
+            }
+        }
+        pTexture->UnlockRect(0);
+        return 0;
+    }
+    return -1;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderDirect3D9*> (obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderDirect3D9::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    if (!_screenUpdateThread)
+    {
+        //stop the thread
+        return false;
+    }
+    if (!_pd3dDevice)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "d3dDevice not created.");
+        return true;
+    }
+
+    HRESULT hr = _pd3dDevice->TestCooperativeLevel();
+
+    if (SUCCEEDED(hr))
+    {
+        UpdateRenderSurface();
+    }
+
+    if (hr == D3DERR_DEVICELOST)
+    {
+        //Device is lost and cannot be reset yet
+
+    }
+    else if (hr == D3DERR_DEVICENOTRESET)
+    {
+        //Lost but we can reset it now
+        //Note: the standard way is to call Reset, however for some reason doesn't work here.
+        //so we will release the device and create it again.
+        ResetDevice();
+    }
+
+    return true;
+}
+
+int VideoRenderDirect3D9::CloseDevice()
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::CloseDevice");
+
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+
+    if (_pVB != NULL)
+    {
+        _pVB->Release();
+        _pVB = NULL;
+    }
+
+    if (_pd3dDevice != NULL)
+    {
+        _pd3dDevice->Release();
+        _pd3dDevice = NULL;
+    }
+
+    if (_pD3D != NULL)
+    {
+        _pD3D->Release();
+        _pD3D = NULL;
+    }
+
+    if (_pd3dSurface != NULL)
+        _pd3dSurface->Release();
+    return 0;
+}
+
+D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return NULL;
+    }
+    return ddobj;
+}
+
+int32_t VideoRenderDirect3D9::DeleteChannel(const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+
+    std::multimap<int, unsigned int>::iterator it;
+    it = _d3dZorder.begin();
+    while (it != _d3dZorder.end())
+    {
+        if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            it = _d3dZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(streamId & 0x0000ffff);
+    if (ddIt != _d3dChannels.end())
+    {
+        delete ddIt->second;
+        _d3dChannels.erase(ddIt);
+        return 0;
+    }
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const uint32_t channel,
+                                                                 const uint32_t zOrder,
+                                                                 const float left,
+                                                                 const float top,
+                                                                 const float right,
+                                                                 const float bottom)
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    //FIXME this should be done in VideoAPIWindows? stop the frame deliver first
+    //remove the old channel
+    DeleteChannel(channel);
+
+    D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice,
+                                                      &_refD3DCritsect, _trace);
+    d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    // store channel
+    _d3dChannels[channel & 0x0000ffff] = d3dChannel;
+
+    // store Z order
+    // default streamID is 0
+    _d3dZorder.insert(
+                      std::pair<int, unsigned int>(zOrder, channel & 0x0000ffff));
+
+    return d3dChannel;
+}
+
+int32_t VideoRenderDirect3D9::GetStreamSettings(const uint32_t channel,
+                                                const uint16_t streamId,
+                                                uint32_t& zOrder,
+                                                float& left, float& top,
+                                                float& right, float& bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is
+    return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom);
+}
+
+int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB,
+                                                  int offset,
+                                                  float startWidth,
+                                                  float startHeight,
+                                                  float stopWidth,
+                                                  float stopHeight)
+{
+    if (pVB == NULL)
+        return -1;
+
+    float left, right, top, bottom;
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    left = startWidth * 2 - 1;
+    right = stopWidth * 2 - 1;
+
+    //0,1 => 1,-1
+    top = 1 - startHeight * 2;
+    bottom = 1 - stopHeight * 2;
+
+    CUSTOMVERTEX newVertices[] = {
+            //logo
+            { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f,
+                    0xffffffff, 0, 0 },
+            { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f,
+                    0xffffffff, 1, 0 }, };
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices),
+                         (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, newVertices, sizeof(newVertices));
+    pVB->Unlock();
+
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::StartRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::StopRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+bool VideoRenderDirect3D9::IsFullScreen()
+{
+    return _fullScreen;
+}
+
+int32_t VideoRenderDirect3D9::SetCropping(const uint32_t channel,
+                                          const uint16_t streamId,
+                                          const float left, const float top,
+                                          const float right, const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::SetTransparentBackground(
+                                                                 const bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::SetText(const uint8_t textId,
+                                      const uint8_t* text,
+                                      const int32_t textLength,
+                                      const uint32_t colorText,
+                                      const uint32_t colorBg,
+                                      const float left, const float top,
+                                      const float rigth, const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::SetBitmap(const void* bitMap,
+                                        const uint8_t pictureId,
+                                        const void* colorKey,
+                                        const float left, const float top,
+                                        const float right, const float bottom)
+{
+    if (!bitMap)
+    {
+        if (_pTextureLogo != NULL)
+        {
+            _pTextureLogo->Release();
+            _pTextureLogo = NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap.");
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f ||
+        top > 1.0f || top < 0.0f ||
+        right > 1.0f || right < 0.0f ||
+        bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    if ((bottom <= top) || (right <= left))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    unsigned char* srcPtr;
+    HGDIOBJ oldhand;
+    BITMAPINFO pbi;
+    BITMAP bmap;
+    HDC hdcNew;
+    hdcNew = CreateCompatibleDC(0);
+    // Fill out the BITMAP structure.
+    GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap);
+    //Select the bitmap handle into the new device context.
+    oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap);
+    // we are done with this object
+    DeleteObject(oldhand);
+    pbi.bmiHeader.biSize = 40;
+    pbi.bmiHeader.biWidth = bmap.bmWidth;
+    pbi.bmiHeader.biHeight = bmap.bmHeight;
+    pbi.bmiHeader.biPlanes = 1;
+    pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
+    pbi.bmiHeader.biCompression = BI_RGB;
+    pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
+    srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4];
+    // the original un-stretched image in RGB24
+    int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi,
+                                DIB_RGB_COLORS);
+    if (pixelHeight == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to GetDIBits in SetBitmap");
+        delete[] srcPtr;
+        return -1;
+    }
+    DeleteDC(hdcNew);
+    if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to SetBitmap invalid bit depth");
+        delete[] srcPtr;
+        return -1;
+    }
+
+    HRESULT ret;
+    //release the previous logo texture
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+    ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0,
+                                     D3DFMT_A8R8G8B8, D3DPOOL_MANAGED,
+                                     &_pTextureLogo, NULL);
+    if (FAILED(ret))
+    {
+        _pTextureLogo = NULL;
+        delete[] srcPtr;
+        return -1;
+    }
+    if (!_pTextureLogo)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Texture for rendering not initialized.");
+        delete[] srcPtr;
+        return -1;
+    }
+
+    D3DLOCKED_RECT lr;
+    if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0)))
+    {
+        delete[] srcPtr;
+        return -1;
+    }
+    unsigned char* dstPtr = (UCHAR*) lr.pBits;
+    int pitch = bmap.bmWidth * 4;
+
+    if (pbi.bmiHeader.biBitCount == 24)
+    {
+        ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0);
+    }
+    else
+    {
+        unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
+        for (int i = 0; i < bmap.bmHeight; ++i)
+        {
+            memcpy(dstPtr, srcTmp, bmap.bmWidth * 4);
+            srcTmp -= bmap.bmWidth * 4;
+            dstPtr += pitch;
+        }
+    }
+
+    delete[] srcPtr;
+    if (FAILED(_pTextureLogo->UnlockRect(0)))
+    {
+        return -1;
+    }
+
+    if (colorKey)
+    {
+        DDCOLORKEY* ddColorKey =
+                static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
+        SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth,
+                            bmap.bmHeight);
+    }
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    _logoLeft = left;
+    _logoRight = right;
+
+    //0,1 => 1,-1
+    _logoTop = top;
+    _logoBottom = bottom;
+
+    return 0;
+
+}
+
+int32_t VideoRenderDirect3D9::GetGraphicsMemory(uint64_t& totalMemory,
+                                                uint64_t& availableMemory)
+{
+    totalMemory = _totalMemory;
+    availableMemory = _availableMemory;
+    return 0;
+}
+
+int32_t VideoRenderDirect3D9::ConfigureRenderer(const uint32_t channel,
+                                                const uint16_t streamId,
+                                                const unsigned int zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is
+    ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    return 0;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/windows/video_render_direct3d9.h b/webrtc/modules/video_render/windows/video_render_direct3d9.h
new file mode 100644
index 0000000..eaa8c14
--- /dev/null
+++ b/webrtc/modules/video_render/windows/video_render_direct3d9.h
@@ -0,0 +1,256 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+
+#include <memory>
+
+#include "webrtc/modules/video_render/windows/i_video_render_win.h"
+
+#include <d3d9.h>
+#include <ddraw.h>
+
+#include <Map>
+
+// Added
+#include "webrtc/base/platform_thread.h"
+#include "webrtc/modules/video_render/video_render_defines.h"
+
+#pragma comment(lib, "d3d9.lib")       // located in DirectX SDK
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventTimerWrapper;
+class Trace;
+
+class D3D9Channel: public VideoRenderCallback
+{
+public:
+    D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                    CriticalSectionWrapper* critSect, Trace* trace);
+
+    virtual ~D3D9Channel();
+
+    // Inherited from VideoRencerCallback, called from VideoAPI class.
+    // Called when the incomming frame size and/or number of streams in mix changes
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    // A new frame is delivered.
+    virtual int DeliverFrame(const VideoFrame& videoFrame);
+    virtual int32_t RenderFrame(const uint32_t streamId,
+                                const VideoFrame& videoFrame);
+
+    // Called to check if the video frame is updated.
+    int IsUpdated(bool& isUpdated);
+    // Called after the video frame has been render to the screen
+    int RenderOffFrame();
+    // Called to get the texture that contains the video frame
+    LPDIRECT3DTEXTURE9 GetTexture();
+    // Called to get the texture(video frame) size
+    int GetTextureWidth();
+    int GetTextureHeight();
+    //
+    void SetStreamSettings(uint16_t streamId,
+                           uint32_t zOrder,
+                           float startWidth,
+                           float startHeight,
+                           float stopWidth,
+                           float stopHeight);
+    int GetStreamSettings(uint16_t streamId,
+                          uint32_t& zOrder,
+                          float& startWidth,
+                          float& startHeight,
+                          float& stopWidth,
+                          float& stopHeight);
+
+    int ReleaseTexture();
+    int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
+
+protected:
+
+private:
+    //critical section passed from the owner
+    CriticalSectionWrapper* _critSect;
+    LPDIRECT3DDEVICE9 _pd3dDevice;
+    LPDIRECT3DTEXTURE9 _pTexture;
+
+    bool _bufferIsUpdated;
+    // the frame size
+    int _width;
+    int _height;
+    //sream settings
+    //TODO support multiple streams in one channel
+    uint16_t _streamId;
+    uint32_t _zOrder;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+};
+
+class VideoRenderDirect3D9: IVideoRenderWin
+{
+public:
+    VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
+    ~VideoRenderDirect3D9();
+
+public:
+    //IVideoRenderWin
+
+    /**************************************************************************
+     *
+     *   Init
+     *
+     ***************************************************************************/
+    virtual int32_t Init();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback
+            * CreateChannel(const uint32_t streamId,
+                            const uint32_t zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom);
+
+    virtual int32_t DeleteChannel(const uint32_t streamId);
+
+    virtual int32_t GetStreamSettings(const uint32_t channel,
+                                      const uint16_t streamId,
+                                      uint32_t& zOrder,
+                                      float& left, float& top,
+                                      float& right, float& bottom);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen();
+
+    virtual int32_t SetCropping(const uint32_t channel,
+                                const uint16_t streamId,
+                                const float left, const float top,
+                                const float right, const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t channel,
+                                      const uint16_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t ChangeWindow(void* window);
+
+    virtual int32_t GetGraphicsMemory(uint64_t& totalMemory,
+                                      uint64_t& availableMemory);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t colorText,
+                            const uint32_t colorBg,
+                            const float left, const float top,
+                            const float rigth, const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom);
+
+public:
+    // Get a channel by channel id
+    D3D9Channel* GetD3DChannel(int channel);
+    int UpdateRenderSurface();
+
+protected:
+    // The thread rendering the screen
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+
+private:
+    // Init/close the d3d device
+    int InitDevice();
+    int CloseDevice();
+
+    // Transparent related functions
+    int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                            DDCOLORKEY* transparentColorKey,
+                            DWORD width,
+                            DWORD height);
+
+    CriticalSectionWrapper& _refD3DCritsect;
+    Trace* _trace;
+    // TODO(pbos): Remove unique_ptr and use PlatformThread directly.
+    std::unique_ptr<rtc::PlatformThread> _screenUpdateThread;
+    EventTimerWrapper* _screenUpdateEvent;
+
+    HWND _hWnd;
+    bool _fullScreen;
+    RECT _originalHwndRect;
+    //FIXME we probably don't need this since all the information can be get from _d3dChannels
+    int _channel;
+    //Window size
+    UINT _winWidth;
+    UINT _winHeight;
+
+    // Device
+    LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
+    LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
+    LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
+    LPDIRECT3DTEXTURE9 _pTextureLogo;
+
+    std::map<int, D3D9Channel*> _d3dChannels;
+    std::multimap<int, unsigned int> _d3dZorder;
+
+    // The position where the logo will be placed
+    float _logoLeft;
+    float _logoTop;
+    float _logoRight;
+    float _logoBottom;
+
+    typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
+    LPDIRECT3DSURFACE9 _pd3dSurface;
+
+    DWORD GetVertexProcessingCaps();
+    int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
+
+    D3DPRESENT_PARAMETERS _d3dpp;
+    int ResetDevice();
+
+    int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
+                            float startWidth, float startHeight,
+                            float stopWidth, float stopHeight);
+
+    //code for providing graphics settings
+    DWORD _totalMemory;
+    DWORD _availableMemory;
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
diff --git a/webrtc/modules/video_render/windows/video_render_windows_impl.cc b/webrtc/modules/video_render/windows/video_render_windows_impl.cc
new file mode 100644
index 0000000..042d7fd
--- /dev/null
+++ b/webrtc/modules/video_render/windows/video_render_windows_impl.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/engine_configurations.h"
+#include "webrtc/modules/video_render/windows/video_render_windows_impl.h"
+
+#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/include/trace.h"
+#ifdef DIRECT3D9_RENDERING
+#include "webrtc/modules/video_render/windows/video_render_direct3d9.h"
+#endif
+
+#include <tchar.h>
+
+namespace webrtc {
+
+VideoRenderWindowsImpl::VideoRenderWindowsImpl(const int32_t id,
+    const VideoRenderType videoRenderType, void* window, const bool fullscreen)
+    : _renderWindowsCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+      _prtWindow(window),
+      _fullscreen(fullscreen),
+      _renderMethod(kVideoRenderWinD3D9),
+      _ptrRendererWin(NULL) {
+}
+
+VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
+{
+    delete &_renderWindowsCritsect;
+    if (_ptrRendererWin)
+    {
+        delete _ptrRendererWin;
+        _ptrRendererWin = NULL;
+    }
+}
+
+int32_t VideoRenderWindowsImpl::Init()
+{
+    // Create the win renderer
+    switch (_renderMethod)
+    {
+        case kVideoRenderWinD3D9:
+        {
+#ifdef DIRECT3D9_RENDERING
+            VideoRenderDirect3D9* ptrRenderer;
+            ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
+            if (ptrRenderer == NULL)
+            {
+                break;
+            }
+            _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
+#else
+            return NULL;
+#endif  //DIRECT3D9_RENDERING
+        }
+            break;
+        default:
+            break;
+    }
+
+    //Init renderer
+    if (_ptrRendererWin)
+        return _ptrRendererWin->Init();
+    else
+        return -1;
+}
+
+int32_t VideoRenderWindowsImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    if (!_ptrRendererWin)
+    {
+        return -1;
+    }
+    else
+    {
+        return _ptrRendererWin->ChangeWindow(window);
+    }
+}
+
+VideoRenderCallback*
+VideoRenderWindowsImpl::AddIncomingRenderStream(const uint32_t streamId,
+                                                const uint32_t zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    VideoRenderCallback* renderCallback = NULL;
+
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
+                                                        top, right, bottom);
+    }
+
+    return renderCallback;
+}
+
+int32_t VideoRenderWindowsImpl::DeleteIncomingRenderStream(
+                                                                 const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->DeleteChannel(streamId);
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
+                                                                        const uint32_t streamId,
+                                                                        uint32_t& zOrder,
+                                                                        float& left,
+                                                                        float& top,
+                                                                        float& right,
+                                                                        float& bottom) const
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::StartRender()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StartRender();
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::StopRender()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StopRender();
+    }
+    return error;
+}
+
+VideoRenderType VideoRenderWindowsImpl::RenderType()
+{
+    return kRenderWindows;
+}
+
+RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderWindowsImpl::FullScreen()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    bool fullscreen = false;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        fullscreen = _ptrRendererWin->IsFullScreen();
+    }
+    return fullscreen;
+}
+
+int32_t VideoRenderWindowsImpl::GetGraphicsMemory(
+                                                        uint64_t& totalGraphicsMemory,
+                                                        uint64_t& availableGraphicsMemory) const
+{
+    if (_ptrRendererWin)
+    {
+        return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
+                                                  availableGraphicsMemory);
+    }
+
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+int32_t VideoRenderWindowsImpl::GetScreenResolution(
+                                                          uint32_t& screenWidth,
+                                                          uint32_t& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+uint32_t VideoRenderWindowsImpl::RenderFrameRate(
+                                                       const uint32_t streamId)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    return 0;
+}
+
+int32_t VideoRenderWindowsImpl::SetStreamCropping(
+                                                        const uint32_t streamId,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
+                                             bottom);
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::ConfigureRenderer(
+                                                        const uint32_t streamId,
+                                                        const unsigned int zOrder,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::SetTransparentBackground(
+                                                               const bool enable)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetTransparentBackground(enable);
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::SetText(
+                                              const uint8_t textId,
+                                              const uint8_t* text,
+                                              const int32_t textLength,
+                                              const uint32_t textColorRef,
+                                              const uint32_t backgroundColorRef,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetText(textId, text, textLength,
+                                         textColorRef, backgroundColorRef,
+                                         left, top, right, bottom);
+    }
+    return error;
+}
+
+int32_t VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
+                                          const uint8_t pictureId,
+                                          const void* colorKey,
+                                          const float left, const float top,
+                                          const float right, const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    int32_t error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
+                                           top, right, bottom);
+    }
+    return error;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/modules/video_render/windows/video_render_windows_impl.h b/webrtc/modules/video_render/windows/video_render_windows_impl.h
new file mode 100644
index 0000000..aaa3f81
--- /dev/null
+++ b/webrtc/modules/video_render/windows/video_render_windows_impl.h
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+
+#include <Winerror.h>
+#include <dxdiag.h>
+
+#include "webrtc/modules/video_render/i_video_render.h"
+#include "webrtc/modules/video_render/windows/i_video_render_win.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define EXPAND(x)            x, sizeof(x)/sizeof(TCHAR)
+
+enum VideoRenderWinMethod {
+  kVideoRenderWinD3D9 = 0,
+};
+
+// Class definitions
+class VideoRenderWindowsImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderWindowsImpl(const int32_t id,
+                           const VideoRenderType videoRenderType,
+                           void* window, const bool fullscreen);
+
+    virtual ~VideoRenderWindowsImpl();
+
+    virtual int32_t Init();
+
+    virtual int32_t ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const uint32_t streamId,
+                                      const uint32_t zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t
+            DeleteIncomingRenderStream(const uint32_t streamId);
+
+    virtual int32_t
+            GetIncomingRenderStreamProperties(const uint32_t streamId,
+                                              uint32_t& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual int32_t StartRender();
+
+    virtual int32_t StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual int32_t
+            GetGraphicsMemory(uint64_t& totalGraphicsMemory,
+                              uint64_t& availableGraphicsMemory) const;
+
+    virtual int32_t
+            GetScreenResolution(uint32_t& screenWidth,
+                                uint32_t& screenHeight) const;
+
+    virtual uint32_t RenderFrameRate(const uint32_t streamId);
+
+    virtual int32_t SetStreamCropping(const uint32_t streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t ConfigureRenderer(const uint32_t streamId,
+                                      const unsigned int zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual int32_t SetTransparentBackground(const bool enable);
+
+    virtual int32_t SetText(const uint8_t textId,
+                            const uint8_t* text,
+                            const int32_t textLength,
+                            const uint32_t textColorRef,
+                            const uint32_t backgroundColorRef,
+                            const float left, const float top,
+                            const float right, const float bottom);
+
+    virtual int32_t SetBitmap(const void* bitMap,
+                              const uint8_t pictureId,
+                              const void* colorKey,
+                              const float left, const float top,
+                              const float right, const float bottom);
+
+private:
+    CriticalSectionWrapper& _renderWindowsCritsect;
+
+    void* _prtWindow;
+    bool _fullscreen;
+
+    VideoRenderWinMethod _renderMethod;
+    IVideoRenderWin* _ptrRendererWin;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_