Revert "gerrit_util: Use httplib2 for communication instead of httplib."

This reverts commit d75686454bb0ad8d4718ff6bf9613772ca8027b5.

Reason for revert:
third_party.httplib2.ServerNotFoundError: Unable to find the server at metadata.google.internal

Original change's description:
> gerrit_util: Use httplib2 for communication instead of httplib.
> 
> Retain the httplib import to continue using its constants, but actually make
> the http(s) connections using httplib2. The latter has built-in support for
> proxy settings, which then actually allows people behind proxies to interact
> with Gerrit.
> 
> Compared to httplib, the biggest changes are:
> - There's only one Http class instead of HTTPConnection and HTTPSConnection.
> - Http.request() returns a tuple (response, contents).
> - Http.request() expects a full URI instead of just a path, as Http's
>   constructor does not take a host parameter.
> - The response object inherits from dict.
> - All headers in a response are lower-cased.
> 
> All in all, it is possible to see that httplib2 support was retro-fitted
> into the code, but that should not worsen its readability overall.
> 
> Patch written in collaboration with Alexis Menard <alexis.menard@intel.com>.
> 
> BUG=672729
> R=​alexis.menard@intel.com,agable@chromium.org,tandrii@chromium.org
> 
> Change-Id: Ic40e804064e74e89bc2ad979572628f1bd78c19a
> Reviewed-on: https://chromium-review.googlesource.com/458221
> Reviewed-by: Aaron Gable <agable@chromium.org>
> Commit-Queue: Raphael Kubo da Costa (rakuco) <raphael.kubo.da.costa@intel.com>
> 

TBR=agable@chromium.org,alexis.menard@intel.com,raphael.kubo.da.costa@intel.com,tandrii@chromium.org,chromium-reviews@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=672729

Change-Id: Idfacd314b381232733bb92a02ec2fb85f016effd
Reviewed-on: https://chromium-review.googlesource.com/457792
Reviewed-by: Aaron Gable <agable@chromium.org>
Commit-Queue: Aaron Gable <agable@chromium.org>
diff --git a/gerrit_util.py b/gerrit_util.py
index 2c0267e..7d42835 100755
--- a/gerrit_util.py
+++ b/gerrit_util.py
@@ -11,7 +11,7 @@
 import base64
 import contextlib
 import cookielib
-import httplib  # Still used for its constants.
+import httplib
 import json
 import logging
 import netrc
@@ -27,7 +27,6 @@
 from cStringIO import StringIO
 
 import gclient_utils
-from third_party import httplib2
 
 LOGGER = logging.getLogger()
 TRY_LIMIT = 5
@@ -63,8 +62,10 @@
 def GetConnectionClass(protocol=None):
   if protocol is None:
     protocol = GERRIT_PROTOCOL
-  if protocol in ('http', 'https'):
-    return httplib2.Http
+  if protocol == 'https':
+    return httplib.HTTPSConnection
+  elif protocol == 'http':
+    return httplib.HTTPConnection
   else:
     raise RuntimeError(
         "Don't know how to work with protocol '%s'" % protocol)
@@ -228,11 +229,11 @@
   def _test_is_gce(cls):
     # Based on https://cloud.google.com/compute/docs/metadata#runninggce
     try:
-      resp, _ = cls._get(cls._INFO_URL)
+      resp = cls._get(cls._INFO_URL)
     except socket.error:
       # Could not resolve URL.
       return False
-    return resp.get('metadata-flavor') == 'Google'
+    return resp.getheader('Metadata-Flavor', None) == 'Google'
 
   @staticmethod
   def _get(url, **kwargs):
@@ -246,11 +247,12 @@
         next_delay_sec *= 2
 
       p = urlparse.urlparse(url)
-      c = GetConnectionClass(protocol=p.scheme)()
-      resp, contents = c.request(url, 'GET', **kwargs)
+      c = GetConnectionClass(protocol=p.scheme)(p.netloc)
+      c.request('GET', url, **kwargs)
+      resp = c.getresponse()
       LOGGER.debug('GET [%s] #%d/%d (%d)', url, i+1, TRY_LIMIT, resp.status)
       if resp.status < httplib.INTERNAL_SERVER_ERROR:
-        return (resp, contents)
+        return resp
 
   @classmethod
   def _get_token_dict(cls):
@@ -259,10 +261,10 @@
       if cls._token_expiration < time.time() - 25:
         return cls._token_cache
 
-    resp, contents = cls._get(cls._ACQUIRE_URL, headers=cls._ACQUIRE_HEADERS)
+    resp = cls._get(cls._ACQUIRE_URL, headers=cls._ACQUIRE_HEADERS)
     if resp.status != httplib.OK:
       return None
-    cls._token_cache = json.loads(contents)
+    cls._token_cache = json.load(resp)
     cls._token_expiration = cls._token_cache['expires_in'] + time.time()
     return cls._token_cache
 
@@ -304,11 +306,12 @@
   conn = GetConnectionClass()(host)
   conn.req_host = host
   conn.req_params = {
-      'uri': urlparse.urljoin('%s://%s' % (GERRIT_PROTOCOL, host), url),
+      'url': url,
       'method': reqtype,
       'headers': headers,
       'body': body,
   }
+  conn.request(**conn.req_params)
   return conn
 
 
@@ -316,7 +319,7 @@
   """Reads an http response from a connection into a string buffer.
 
   Args:
-    conn: An Http object created by CreateHttpConn above.
+    conn: An HTTPSConnection or HTTPConnection created by CreateHttpConn, above.
     expect_status: Success is indicated by this status in the response.
     ignore_404: For many requests, gerrit-on-borg will return 404 if the request
                 doesn't match the database contents.  In most such cases, we
@@ -326,10 +329,10 @@
 
   sleep_time = 0.5
   for idx in range(TRY_LIMIT):
-    response, contents = conn.request(**conn.req_params)
+    response = conn.getresponse()
 
     # Check if this is an authentication issue.
-    www_authenticate = response.get('www-authenticate')
+    www_authenticate = response.getheader('www-authenticate')
     if (response.status in (httplib.UNAUTHORIZED, httplib.FOUND) and
         www_authenticate):
       auth_match = re.search('realm="([^"]+)"', www_authenticate, re.I)
@@ -341,25 +344,31 @@
     # If response.status < 500 then the result is final; break retry loop.
     if response.status < 500:
       LOGGER.debug('got response %d for %s %s', response.status,
-                   conn.req_params['method'], conn.req_params['uri'])
+                   conn.req_params['method'], conn.req_params['url'])
       break
     # A status >=500 is assumed to be a possible transient error; retry.
     http_version = 'HTTP/%s' % ('1.1' if response.version == 11 else '1.0')
     LOGGER.warn('A transient error occurred while querying %s:\n'
                 '%s %s %s\n'
                 '%s %d %s',
-                conn.host, conn.req_params['method'], conn.req_params['uri'],
+                conn.host, conn.req_params['method'], conn.req_params['url'],
                 http_version, http_version, response.status, response.reason)
     if TRY_LIMIT - idx > 1:
       LOGGER.warn('... will retry %d more times.', TRY_LIMIT - idx - 1)
       time.sleep(sleep_time)
       sleep_time = sleep_time * 2
+      req_host = conn.req_host
+      req_params = conn.req_params
+      conn = GetConnectionClass()(req_host)
+      conn.req_host = req_host
+      conn.req_params = req_params
+      conn.request(**req_params)
   if ignore_404 and response.status == 404:
     return StringIO()
   if response.status != expect_status:
-    reason = '%s: %s' % (response.reason, contents)
+    reason = '%s: %s' % (response.reason, response.read())
     raise GerritError(response.status, reason)
-  return StringIO(contents)
+  return StringIO(response.read())
 
 
 def ReadHttpJsonResponse(conn, expect_status=200, ignore_404=True):