[gs_archive_server] convert gs_archive_server source files to py3
BUG=chromium:1123908
TEST=Manually tested on chromeos2-devservertest
Change-Id: Idd5c9bcc142a6f11a7a95e8f1feb9b253adc78b8
Reviewed-on: https://chromium-review.googlesource.com/c/chromiumos/platform/dev-util/+/2720705
Tested-by: Sanika Kulkarni <sanikak@chromium.org>
Auto-Submit: Sanika Kulkarni <sanikak@chromium.org>
Commit-Queue: Sanika Kulkarni <sanikak@chromium.org>
Reviewed-by: Congbin Guo <guocb@chromium.org>
diff --git a/gs_cache/.vpython b/gs_cache/.vpython
index d6f8482..7379de9 100644
--- a/gs_cache/.vpython
+++ b/gs_cache/.vpython
@@ -1,4 +1,4 @@
-python_version: "2.7"
+python_version: "3.8"
wheel: <
name: "infra/python/wheels/attrs-py2_py3"
@@ -21,11 +21,6 @@
>
wheel: <
- name: "infra/python/wheels/coverage/${vpython_platform}"
- version: "version:4.5.1"
->
-
-wheel: <
name: "infra/python/wheels/funcsigs-py2_py3"
version: "version:1.0.2"
>
@@ -76,11 +71,6 @@
>
wheel: <
- name: "infra/python/wheels/pytest-cov-py2_py3"
- version: "version:2.5.1"
->
-
-wheel: <
name: "infra/python/wheels/pytz-py2_py3"
version: "version:2018.4"
>
diff --git a/gs_cache/fake_omaha.py b/gs_cache/fake_omaha.py
index 57ca934..c0968d0 100644
--- a/gs_cache/fake_omaha.py
+++ b/gs_cache/fake_omaha.py
@@ -7,8 +7,6 @@
This is a short term solution in order to deprecation devserver.py from labs.
"""
-from __future__ import absolute_import
-from __future__ import division
from __future__ import print_function
import cherrypy # pylint: disable=import-error
diff --git a/gs_cache/fake_telemetry.py b/gs_cache/fake_telemetry.py
index 9cd38a2..c673098 100644
--- a/gs_cache/fake_telemetry.py
+++ b/gs_cache/fake_telemetry.py
@@ -7,8 +7,6 @@
This is a short term solution in order to deprecation devserver.py from labs.
"""
-from __future__ import absolute_import
-from __future__ import division
from __future__ import print_function
import cherrypy # pylint: disable=import-error
diff --git a/gs_cache/gs_archive_server.py b/gs_cache/gs_archive_server.py
index a7095b4..c5ceccc 100644
--- a/gs_cache/gs_archive_server.py
+++ b/gs_cache/gs_archive_server.py
@@ -16,19 +16,17 @@
Extract a file form a compressed/uncompressed TAR archive.
"""
-from __future__ import absolute_import
-from __future__ import division
from __future__ import print_function
import argparse
import functools
-import httplib # pylint: disable=deprecated-module, bad-python3-import
import os
import subprocess
import sys
import tempfile
-import urllib
-import urlparse # pylint: disable=deprecated-module, bad-python3-import
+
+from six.moves import http_client
+from six.moves import urllib
import requests
import cherrypy # pylint: disable=import-error
@@ -113,7 +111,7 @@
try:
value = all_params[param_name]
except KeyError:
- raise cherrypy.HTTPError(httplib.BAD_REQUEST,
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST,
'Parameter "%s" is required!' % param_name)
return set(value) if isinstance(value, list) else {value}
@@ -134,7 +132,7 @@
except ValueError as err:
# The exception message is just a plain text, so wrap it with
# cherrypy.HTTPError to have necessary HTML tags
- raise cherrypy.HTTPError(httplib.BAD_REQUEST, err.message) # pylint: disable=exception-message-attribute
+ raise cherrypy.HTTPError(http_client.BAD_REQUEST, err.message) # pylint: disable=exception-message-attribute
return func_wrapper
@@ -170,8 +168,10 @@
def _call(self, action, path, args=None, headers=None):
"""Helper function to generate all RPC calls to the proxy server."""
- url = urlparse.urlunsplit(self._url + ('%s/%s' % (action, path),
- urllib.urlencode(args or {}), None))
+ url = urllib.parse.urlunsplit(
+ self._url + ('%s/%s' % (action, path),
+ urllib.parse.urlencode(args or {}),
+ None))
_log('Sending request to caching server: %s', url)
# The header to control using or bypass cache.
_log_filtered_headers(headers, ('Range', 'X-No-Cache',
@@ -211,7 +211,7 @@
return self._call('download', path, headers=headers)
-_SERVICE_ACCOUNT_BOTO_FILE = os.path.expanduser("~/.boto.service_account")
+_SERVICE_ACCOUNT_BOTO_FILE = os.path.expanduser('~/.boto.service_account')
class _GSContext(object):
@@ -221,7 +221,7 @@
self._ctx_default = gs.GSContext()
if os.path.isfile(_SERVICE_ACCOUNT_BOTO_FILE):
self._ctx_service_account = gs.GSContext(
- boto_file=_SERVICE_ACCOUNT_BOTO_FILE)
+ boto_file=_SERVICE_ACCOUNT_BOTO_FILE)
else:
self._ctx_service_account = None
@@ -231,6 +231,7 @@
Args:
path: The GS path of the file to fetch.
want_content: A boolean of whether fetch the content (as an iterator).
+
Returns:
A tuple of (stat, content) which is the GS file stat and content
iterator (or None).
@@ -243,13 +244,13 @@
raise
if not err.stderr.startswith("You aren't authorized to read "):
raise
- _log("Not authorized by default. Trying service account.")
+ _log('Not authorized by default. Trying service account.')
stat = self._ctx_service_account.Stat(path)
ctx = self._ctx_service_account
if want_content:
- _log('Downloading %s', path, level=logging.INFO)
- return stat, ctx.StreamingCat(path)
+ _log('Downloading %s', path, level=logging.INFO)
+ return stat, ctx.StreamingCat(path)
return stat, None
@@ -276,7 +277,7 @@
stderr=subprocess.PIPE)
content, _ = proc.communicate()
except subprocess.CalledProcessError as e:
- raise cherrypy.HTTPError(httplib.NOT_FOUND, e.output)
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, e.output)
return content
@@ -300,12 +301,12 @@
want_content = cherrypy.request.method != 'HEAD'
stat, content = self._gsutil.fetch_file(path, want_content)
except gs.GSNoSuchKey as err:
- raise cherrypy.HTTPError(httplib.NOT_FOUND, err.message) # pylint: disable=exception-message-attribute
+ raise cherrypy.HTTPError(http_client.NOT_FOUND, err.message) # pylint: disable=exception-message-attribute
except gs.GSCommandError as err:
if "You aren't authorized to read" in err.result.error:
- status = httplib.UNAUTHORIZED
+ status = http_client.UNAUTHORIZED
else:
- status = httplib.SERVICE_UNAVAILABLE
+ status = http_client.SERVICE_UNAVAILABLE
raise cherrypy.HTTPError(status, '%s: %s' % (err.message, # pylint: disable=exception-message-attribute
err.result.error))
@@ -476,11 +477,11 @@
The target type is a tuple of (scheme, netloc).
"""
- split_result = urlparse.urlsplit(input_string)
+ split_result = urllib.parse.urlsplit(input_string)
if not split_result.scheme:
input_string = 'http://%s' % input_string
- split_result = urlparse.urlsplit(input_string)
+ split_result = urllib.parse.urlsplit(input_string)
if not split_result.scheme or not split_result.netloc:
raise argparse.ArgumentTypeError('Wrong URL format: %s' % input_string)
@@ -536,7 +537,7 @@
if args.socket:
# in order to allow group user writing to domain socket, the directory
# should have GID bit set, i.e. g+s
- os.umask(0002) # pylint: disable=old-octal-literal
+ os.umask(0o002)
cherrypy.server.socket_port = args.port
cherrypy.server.socket_file = args.socket
diff --git a/gs_cache/nebraska_wrapper.py b/gs_cache/nebraska_wrapper.py
index 8b8c0cd..db97ae3 100644
--- a/gs_cache/nebraska_wrapper.py
+++ b/gs_cache/nebraska_wrapper.py
@@ -50,8 +50,6 @@
class NebraskaWrapperError(Exception):
"""Exception class used by this module."""
- # pylint: disable=unnecessary-pass
- pass
class NebraskaWrapper(object):
@@ -212,7 +210,7 @@
# We are not worried about multiple threads writing to the same file as
# we are creating a different directory for each initialization of this
# class anyway.
- with open(file_path, 'w') as f:
+ with open(file_path, 'wb') as f:
f.write(resp.content)
except Exception as e:
raise NebraskaWrapperError('An error occurred while trying to complete '
diff --git a/gs_cache/nginx_access_log_metrics.py b/gs_cache/nginx_access_log_metrics.py
index 769fa66..8b9878b 100644
--- a/gs_cache/nginx_access_log_metrics.py
+++ b/gs_cache/nginx_access_log_metrics.py
@@ -10,8 +10,6 @@
Cache performance.
"""
-from __future__ import absolute_import
-from __future__ import division
from __future__ import print_function
import argparse
diff --git a/gs_cache/pytest.ini b/gs_cache/pytest.ini
index 3cf8346..31fb6c2 100644
--- a/gs_cache/pytest.ini
+++ b/gs_cache/pytest.ini
@@ -1,4 +1,3 @@
[pytest]
addopts =
--doctest-modules
- --cov gs_cache
diff --git a/gs_cache/tarfile_utils.py b/gs_cache/tarfile_utils.py
deleted file mode 100644
index 200ceb5..0000000
--- a/gs_cache/tarfile_utils.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright 2018 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Utils for manipulating tar format archives.
-
-We use tar command to manipulate tar file other than using Python tarfile module
-because that module is very slow in the case of large file.
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-import collections
-import re
-
-from chromite.lib import cros_logging as logging
-
-_logger = logging.getLogger(__name__)
-
-
-def _round_up_to_512(number):
- """Up round the given |number| to smallest multiple of 512.
-
- Examples:
- >>> for n in (0, 1, 512, 1025):
- ... _round_up_to_512(n)
- 0
- 512
- 512
- 1536
-
- Args:
- number: Zero or positive integer.
-
- Returns:
- The smallest multiple of 512.
- """
- return (number + 511) & -512
-
-
-def _get_command_result_from_tar_tvR(an_output_line):
- """Get an object of _TarListCommandResult from one line of `tar tvR` output.
-
- Args:
- an_output_line: One line of `tar tvR` output. Trailing '\n' is acceptable.
- The last line of `tar tvR` is acceptable.
-
- Returns:
- An object of _TarListCommandResult.
- """
- separators = re.compile('[ \t:]+')
- fields_num = len(_TarListCommandResult._fields)
- fields = re.split(separators, an_output_line.rstrip('\n'),
- maxsplit=fields_num - 1)
- try:
- return _TarListCommandResult._make(fields)
- except TypeError:
- # The last line of `tar tvR` hasn't enough fields. Fill with fake data.
- _logger.debug('This should be the last line of `tar tvR`: %s',
- an_output_line)
- fields.extend(_TarListCommandResult._fields[len(fields):])
- return _TarListCommandResult._make(fields)
-
-
-def _block_to_bytes(block_num):
- """Get offset of the block |block_num| in bytes, i.e. times 512"""
- return block_num << 9 # * 512
-
-
-# The tuple of tar member information to be returned to caller.
-# Fields:
-# filename: The file name of the tar member.
-# record_start: The zero-based start offset of the file record, in bytes.
-# record_size: The size of the file record, in bytes.
-# content_start: The zero-based start offset of the file content, in bytes.
-# size: The size of the file content, in bytes.
-TarMemberInfo = collections.namedtuple(
- 'TarMemberInfo', ('filename', 'record_start', 'record_size',
- 'content_start', 'size'))
-
-
-class _TarListCommandResult(collections.namedtuple(
- '_TarListCommandResult', ('block', 'block_num', 'mode', 'ownership',
- 'size_str', 'date', 'hour', 'min', 'filename'))):
- """Information of each member in a Tar archive.
-
- This class using the output of command `tar tvR` to compute more information
- we need, e.g. file content start offset, etc.
-
- The output of `tar tvR` is like:
- block 0: -rw-r--r-- user/group <size> <date> <time> <file name>
- ...
- block 7: ** Block of NULs **
- """
-
- @property
- def record_start(self):
- """Start offset of the file record, in bytes."""
- return _block_to_bytes(int(self.block_num))
-
- @property
- def size(self):
- return int(self.size_str)
-
-
-def _get_prev_content_start(cur_record_start, prev_file):
- """Deduct prev file content information from current file record information.
-
- In tar format, each file record has a header and followed by file content.
- Both header and file content are rounded up to 512 Bytes. The header length is
- variable, but we can get the current file content starting offset by
- subtracting up rounded file size from next file header starting offset, i.e.
-
- current_offset = block(next_file) * 512 - round_up_to_512(current_size)
-
- |********|************************.......|********|****
- | header | content | header |
- | |<----- prev_size ----->|
- | |<- prev_size round up to 512 ->|
- ^prev_content_start ^cur_record_start
-
- Args:
- cur_record_start: The zero-based start position of current file record, in
- bytes.
- prev_file: An instance of _TarListCommandResult which has size of the
- previous file.
-
- Returns:
- The zero-based start position of previous file content, in bytes.
- """
- return cur_record_start - _round_up_to_512(prev_file.size)
-
-
-def list_tar_members(tar_tvR_output):
- """List the members of a tar with information.
-
- Yield each member of the tar archive with information of record start/size,
- content start/size, etc.
-
- Args:
- tar_tvR_output: The output of command 'tar tvR'. Option 'R' print out the
- starting block number of the file record.
-
- Yields:
- A tuple of data described above in the same order.
- """
- prev_file = _get_command_result_from_tar_tvR(tar_tvR_output.readline())
-
- for line in tar_tvR_output:
- cur_file = _get_command_result_from_tar_tvR(line)
-
- prev_content_start = _get_prev_content_start(cur_file.record_start,
- prev_file)
- prev_record_size = cur_file.record_start - prev_file.record_start
-
- yield TarMemberInfo(prev_file.filename,
- prev_file.record_start, prev_record_size,
- prev_content_start, prev_file.size)
-
- prev_file = cur_file
diff --git a/gs_cache/telemetry_setup.py b/gs_cache/telemetry_setup.py
index 1f795cf..642a74e 100644
--- a/gs_cache/telemetry_setup.py
+++ b/gs_cache/telemetry_setup.py
@@ -5,9 +5,6 @@
"""A class that sets up the environment for telemetry testing."""
-
-from __future__ import absolute_import
-from __future__ import division
from __future__ import print_function
import contextlib
@@ -88,7 +85,6 @@
class TelemetrySetupError(Exception):
"""Exception class used by this module."""
- pass
class TelemetrySetup(object):
@@ -195,7 +191,7 @@
resp = requests.get(url)
try:
resp.raise_for_status()
- with open(dep_path, 'w') as f:
+ with open(dep_path, 'wb') as f:
for content in resp.iter_content(constants.READ_BUFFER_SIZE_BYTES):
f.write(content)
except Exception as e: