switch to 4 space indent

Leave the recipes/ code at 2 space to match the rest of the recipes
project in other repos.

Reformatted using:
files=( $(
	git ls-tree -r --name-only HEAD | \
		grep -Ev -e '^(third_party|recipes)/' | \
		grep '\.py$';
	git grep -l '#!/usr/bin/env.*python' | grep -v '\.py$'
) )
parallel ./yapf -i -- "${files[@]}"
~/chromiumos/chromite/contrib/reflow_overlong_comments "${files[@]}"

The files that still had strings that were too long were manually
reformatted because they were easy and only a few issues.
autoninja.py
clang_format.py
download_from_google_storage.py
fix_encoding.py
gclient_utils.py
git_cache.py
git_common.py
git_map_branches.py
git_reparent_branch.py
gn.py
my_activity.py
owners_finder.py
presubmit_canned_checks.py
reclient_helper.py
reclientreport.py
roll_dep.py
rustfmt.py
siso.py
split_cl.py
subcommand.py
subprocess2.py
swift_format.py
upload_to_google_storage.py

These files still had lines (strings) that were too long, so the pylint
warnings were suppressed with a TODO.
auth.py
gclient.py
gclient_eval.py
gclient_paths.py
gclient_scm.py
gerrit_util.py
git_cl.py
presubmit_canned_checks.py
presubmit_support.py
scm.py

Change-Id: Ia6535c4f2c48d46b589ec1e791dde6c6b2ea858f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/4836379
Commit-Queue: Josip Sokcevic <sokcevic@chromium.org>
Auto-Submit: Mike Frysinger <vapier@chromium.org>
Reviewed-by: Josip Sokcevic <sokcevic@chromium.org>
diff --git a/git_common.py b/git_common.py
index e439465..592e931 100644
--- a/git_common.py
+++ b/git_common.py
@@ -15,15 +15,16 @@
 
 
 def wrapper(func):
-  def wrap(self, timeout=None):
-    return func(self, timeout=timeout or threading.TIMEOUT_MAX)
+    def wrap(self, timeout=None):
+        return func(self, timeout=timeout or threading.TIMEOUT_MAX)
 
-  return wrap
+    return wrap
+
+
 IMapIterator.next = wrapper(IMapIterator.next)
 IMapIterator.__next__ = IMapIterator.next
 # TODO(iannucci): Monkeypatch all other 'wait' methods too.
 
-
 import binascii
 import collections
 import contextlib
@@ -41,32 +42,26 @@
 
 from io import BytesIO
 
-
 ROOT = os.path.abspath(os.path.dirname(__file__))
 IS_WIN = sys.platform == 'win32'
 TEST_MODE = False
 
 
 def win_find_git():
-  for elem in os.environ.get('PATH', '').split(os.pathsep):
-    for candidate in ('git.exe', 'git.bat'):
-      path = os.path.join(elem, candidate)
-      if os.path.isfile(path):
-        return path
-  raise ValueError('Could not find Git on PATH.')
+    for elem in os.environ.get('PATH', '').split(os.pathsep):
+        for candidate in ('git.exe', 'git.bat'):
+            path = os.path.join(elem, candidate)
+            if os.path.isfile(path):
+                return path
+    raise ValueError('Could not find Git on PATH.')
 
 
 GIT_EXE = 'git' if not IS_WIN else win_find_git()
 
-
 FREEZE = 'FREEZE'
-FREEZE_SECTIONS = {
-  'indexed': 'soft',
-  'unindexed': 'mixed'
-}
+FREEZE_SECTIONS = {'indexed': 'soft', 'unindexed': 'mixed'}
 FREEZE_MATCHER = re.compile(r'%s.(%s)' % (FREEZE, '|'.join(FREEZE_SECTIONS)))
 
-
 # NOTE: This list is DEPRECATED in favor of the Infra Git wrapper:
 # https://chromium.googlesource.com/infra/infra/+/HEAD/go/src/infra/tools/git
 #
@@ -119,11 +114,8 @@
     # crbug.com/430343
     # TODO(dnj): Resync with Chromite.
     r'The requested URL returned error: 5\d+',
-
     r'Connection reset by peer',
-
     r'Unable to look up',
-
     r'Couldn\'t resolve host',
 )
 
@@ -135,15 +127,15 @@
 # See git commit b6160d95 for more information.
 MIN_UPSTREAM_TRACK_GIT_VERSION = (2, 3)
 
+
 class BadCommitRefException(Exception):
-  def __init__(self, refs):
-    msg = ('one of %s does not seem to be a valid commitref.' %
-           str(refs))
-    super(BadCommitRefException, self).__init__(msg)
+    def __init__(self, refs):
+        msg = ('one of %s does not seem to be a valid commitref.' % str(refs))
+        super(BadCommitRefException, self).__init__(msg)
 
 
 def memoize_one(**kwargs):
-  """Memoizes a single-argument pure function.
+    """Memoizes a single-argument pure function.
 
   Values of None are not cached.
 
@@ -158,64 +150,69 @@
     * clear() - Drops the entire contents of the cache.  Useful for unittests.
     * update(other) - Updates the contents of the cache from another dict.
   """
-  assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}'
-  threadsafe = kwargs['threadsafe']
+    assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}'
+    threadsafe = kwargs['threadsafe']
 
-  if threadsafe:
-    def withlock(lock, f):
-      def inner(*args, **kwargs):
-        with lock:
-          return f(*args, **kwargs)
-      return inner
-  else:
-    def withlock(_lock, f):
-      return f
+    if threadsafe:
 
-  def decorator(f):
-    # Instantiate the lock in decorator, in case users of memoize_one do:
-    #
-    # memoizer = memoize_one(threadsafe=True)
-    #
-    # @memoizer
-    # def fn1(val): ...
-    #
-    # @memoizer
-    # def fn2(val): ...
+        def withlock(lock, f):
+            def inner(*args, **kwargs):
+                with lock:
+                    return f(*args, **kwargs)
 
-    lock = threading.Lock() if threadsafe else None
-    cache = {}
-    _get = withlock(lock, cache.get)
-    _set = withlock(lock, cache.__setitem__)
+            return inner
+    else:
 
-    @functools.wraps(f)
-    def inner(arg):
-      ret = _get(arg)
-      if ret is None:
-        ret = f(arg)
-        if ret is not None:
-          _set(arg, ret)
-      return ret
-    inner.get = _get
-    inner.set = _set
-    inner.clear = withlock(lock, cache.clear)
-    inner.update = withlock(lock, cache.update)
-    return inner
-  return decorator
+        def withlock(_lock, f):
+            return f
+
+    def decorator(f):
+        # Instantiate the lock in decorator, in case users of memoize_one do:
+        #
+        # memoizer = memoize_one(threadsafe=True)
+        #
+        # @memoizer
+        # def fn1(val): ...
+        #
+        # @memoizer
+        # def fn2(val): ...
+
+        lock = threading.Lock() if threadsafe else None
+        cache = {}
+        _get = withlock(lock, cache.get)
+        _set = withlock(lock, cache.__setitem__)
+
+        @functools.wraps(f)
+        def inner(arg):
+            ret = _get(arg)
+            if ret is None:
+                ret = f(arg)
+                if ret is not None:
+                    _set(arg, ret)
+            return ret
+
+        inner.get = _get
+        inner.set = _set
+        inner.clear = withlock(lock, cache.clear)
+        inner.update = withlock(lock, cache.update)
+        return inner
+
+    return decorator
 
 
 def _ScopedPool_initer(orig, orig_args):  # pragma: no cover
-  """Initializer method for ScopedPool's subprocesses.
+    """Initializer method for ScopedPool's subprocesses.
 
   This helps ScopedPool handle Ctrl-C's correctly.
   """
-  signal.signal(signal.SIGINT, signal.SIG_IGN)
-  if orig:
-    orig(*orig_args)
+    signal.signal(signal.SIGINT, signal.SIG_IGN)
+    if orig:
+        orig(*orig_args)
 
 
 @contextlib.contextmanager
 def ScopedPool(*args, **kwargs):
-  """Context Manager which returns a multiprocessing.pool instance which
+    """Context Manager which returns a multiprocessing.pool instance which
   correctly deals with thrown exceptions.
 
   *args - Arguments to multiprocessing.pool
@@ -224,28 +221,28 @@
     kind ('threads', 'procs') - The type of underlying coprocess to use.
     **etc - Arguments to multiprocessing.pool
   """
-  if kwargs.pop('kind', None) == 'threads':
-    pool = multiprocessing.pool.ThreadPool(*args, **kwargs)
-  else:
-    orig, orig_args = kwargs.get('initializer'), kwargs.get('initargs', ())
-    kwargs['initializer'] = _ScopedPool_initer
-    kwargs['initargs'] = orig, orig_args
-    pool = multiprocessing.pool.Pool(*args, **kwargs)
+    if kwargs.pop('kind', None) == 'threads':
+        pool = multiprocessing.pool.ThreadPool(*args, **kwargs)
+    else:
+        orig, orig_args = kwargs.get('initializer'), kwargs.get('initargs', ())
+        kwargs['initializer'] = _ScopedPool_initer
+        kwargs['initargs'] = orig, orig_args
+        pool = multiprocessing.pool.Pool(*args, **kwargs)
 
-  try:
-    yield pool
-    pool.close()
-  except:
-    pool.terminate()
-    raise
-  finally:
-    pool.join()
+    try:
+        yield pool
+        pool.close()
+    except:
+        pool.terminate()
+        raise
+    finally:
+        pool.join()
 
 
 class ProgressPrinter(object):
-  """Threaded single-stat status message printer."""
-  def __init__(self, fmt, enabled=None, fout=sys.stderr, period=0.5):
-    """Create a ProgressPrinter.
+    """Threaded single-stat status message printer."""
+    def __init__(self, fmt, enabled=None, fout=sys.stderr, period=0.5):
+        """Create a ProgressPrinter.
 
     Use it as a context manager which produces a simple 'increment' method:
 
@@ -264,107 +261,111 @@
       period (float) - The time in seconds for the printer thread to wait
         between printing.
     """
-    self.fmt = fmt
-    if enabled is None:  # pragma: no cover
-      self.enabled = logging.getLogger().isEnabledFor(logging.INFO)
-    else:
-      self.enabled = enabled
+        self.fmt = fmt
+        if enabled is None:  # pragma: no cover
+            self.enabled = logging.getLogger().isEnabledFor(logging.INFO)
+        else:
+            self.enabled = enabled
 
-    self._count = 0
-    self._dead = False
-    self._dead_cond = threading.Condition()
-    self._stream = fout
-    self._thread = threading.Thread(target=self._run)
-    self._period = period
+        self._count = 0
+        self._dead = False
+        self._dead_cond = threading.Condition()
+        self._stream = fout
+        self._thread = threading.Thread(target=self._run)
+        self._period = period
 
-  def _emit(self, s):
-    if self.enabled:
-      self._stream.write('\r' + s)
-      self._stream.flush()
+    def _emit(self, s):
+        if self.enabled:
+            self._stream.write('\r' + s)
+            self._stream.flush()
 
-  def _run(self):
-    with self._dead_cond:
-      while not self._dead:
-        self._emit(self.fmt % {'count': self._count})
-        self._dead_cond.wait(self._period)
-        self._emit((self.fmt + '\n') % {'count': self._count})
+    def _run(self):
+        with self._dead_cond:
+            while not self._dead:
+                self._emit(self.fmt % {'count': self._count})
+                self._dead_cond.wait(self._period)
+                self._emit((self.fmt + '\n') % {'count': self._count})
 
-  def inc(self, amount=1):
-    self._count += amount
+    def inc(self, amount=1):
+        self._count += amount
 
-  def __enter__(self):
-    self._thread.start()
-    return self.inc
+    def __enter__(self):
+        self._thread.start()
+        return self.inc
 
-  def __exit__(self, _exc_type, _exc_value, _traceback):
-    self._dead = True
-    with self._dead_cond:
-      self._dead_cond.notifyAll()
-    self._thread.join()
-    del self._thread
+    def __exit__(self, _exc_type, _exc_value, _traceback):
+        self._dead = True
+        with self._dead_cond:
+            self._dead_cond.notifyAll()
+        self._thread.join()
+        del self._thread
 
 
 def once(function):
-  """@Decorates |function| so that it only performs its action once, no matter
+    """@Decorates |function| so that it only performs its action once, no matter
   how many times the decorated |function| is called."""
-  has_run = [False]
-  def _wrapper(*args, **kwargs):
-    if not has_run[0]:
-      has_run[0] = True
-      function(*args, **kwargs)
-  return _wrapper
+    has_run = [False]
+
+    def _wrapper(*args, **kwargs):
+        if not has_run[0]:
+            has_run[0] = True
+            function(*args, **kwargs)
+
+    return _wrapper
 
 
 def unicode_repr(s):
-  result = repr(s)
-  return result[1:] if result.startswith('u') else result
+    result = repr(s)
+    return result[1:] if result.startswith('u') else result
 
 
 ## Git functions
 
+
 def die(message, *args):
-  print(textwrap.dedent(message % args), file=sys.stderr)
-  sys.exit(1)
+    print(textwrap.dedent(message % args), file=sys.stderr)
+    sys.exit(1)
 
 
 def blame(filename, revision=None, porcelain=False, abbrev=None, *_args):
-  command = ['blame']
-  if porcelain:
-    command.append('-p')
-  if revision is not None:
-    command.append(revision)
-  if abbrev is not None:
-    command.append('--abbrev=%d' % abbrev)
-  command.extend(['--', filename])
-  return run(*command)
+    command = ['blame']
+    if porcelain:
+        command.append('-p')
+    if revision is not None:
+        command.append(revision)
+    if abbrev is not None:
+        command.append('--abbrev=%d' % abbrev)
+    command.extend(['--', filename])
+    return run(*command)
 
 
 def branch_config(branch, option, default=None):
-  return get_config('branch.%s.%s' % (branch, option), default=default)
+    return get_config('branch.%s.%s' % (branch, option), default=default)
 
 
 def branch_config_map(option):
-  """Return {branch: <|option| value>} for all branches."""
-  try:
-    reg = re.compile(r'^branch\.(.*)\.%s$' % option)
-    lines = get_config_regexp(reg.pattern)
-    return {reg.match(k).group(1): v for k, v in (l.split() for l in lines)}
-  except subprocess2.CalledProcessError:
-    return {}
+    """Return {branch: <|option| value>} for all branches."""
+    try:
+        reg = re.compile(r'^branch\.(.*)\.%s$' % option)
+        lines = get_config_regexp(reg.pattern)
+        return {reg.match(k).group(1): v for k, v in (l.split() for l in lines)}
+    except subprocess2.CalledProcessError:
+        return {}
 
 
 def branches(use_limit=True, *args):
-  NO_BRANCH = ('* (no branch', '* (detached', '* (HEAD detached')
+    NO_BRANCH = ('* (no branch', '* (detached', '* (HEAD detached')
 
-  key = 'depot-tools.branch-limit'
-  limit = get_config_int(key, 20)
+    key = 'depot-tools.branch-limit'
+    limit = get_config_int(key, 20)
 
-  raw_branches = run('branch', *args).splitlines()
+    raw_branches = run('branch', *args).splitlines()
 
-  num = len(raw_branches)
+    num = len(raw_branches)
 
-  if use_limit and num > limit:
-    die("""\
+    if use_limit and num > limit:
+        die(
+            """\
       Your git repo has too many branches (%d/%d) for this tool to work well.
 
       You may adjust this limit by running:
@@ -374,126 +375,126 @@
       git cl archive
       """, num, limit, key)
 
-  for line in raw_branches:
-    if line.startswith(NO_BRANCH):
-      continue
-    yield line.split()[-1]
+    for line in raw_branches:
+        if line.startswith(NO_BRANCH):
+            continue
+        yield line.split()[-1]
 
 
 def get_config(option, default=None):
-  try:
-    return run('config', '--get', option) or default
-  except subprocess2.CalledProcessError:
-    return default
+    try:
+        return run('config', '--get', option) or default
+    except subprocess2.CalledProcessError:
+        return default
 
 
 def get_config_int(option, default=0):
-  assert isinstance(default, int)
-  try:
-    return int(get_config(option, default))
-  except ValueError:
-    return default
+    assert isinstance(default, int)
+    try:
+        return int(get_config(option, default))
+    except ValueError:
+        return default
 
 
 def get_config_list(option):
-  try:
-    return run('config', '--get-all', option).split()
-  except subprocess2.CalledProcessError:
-    return []
+    try:
+        return run('config', '--get-all', option).split()
+    except subprocess2.CalledProcessError:
+        return []
 
 
 def get_config_regexp(pattern):
-  if IS_WIN: # pragma: no cover
-    # this madness is because we call git.bat which calls git.exe which calls
-    # bash.exe (or something to that effect). Each layer divides the number of
-    # ^'s by 2.
-    pattern = pattern.replace('^', '^' * 8)
-  return run('config', '--get-regexp', pattern).splitlines()
+    if IS_WIN:  # pragma: no cover
+        # this madness is because we call git.bat which calls git.exe which
+        # calls bash.exe (or something to that effect). Each layer divides the
+        # number of ^'s by 2.
+        pattern = pattern.replace('^', '^' * 8)
+    return run('config', '--get-regexp', pattern).splitlines()
 
 
 def is_fsmonitor_enabled():
-  """Returns true if core.fsmonitor is enabled in git config."""
-  fsmonitor = get_config('core.fsmonitor', 'False')
-  return fsmonitor.strip().lower() == 'true'
+    """Returns true if core.fsmonitor is enabled in git config."""
+    fsmonitor = get_config('core.fsmonitor', 'False')
+    return fsmonitor.strip().lower() == 'true'
 
 
 def warn_submodule():
-  """Print warnings for submodules."""
-  # TODO(crbug.com/1475405): Warn users if the project uses submodules and
-  # they have fsmonitor enabled.
-  if sys.platform.startswith('darwin') and is_fsmonitor_enabled():
-    print(colorama.Fore.RED)
-    print('WARNING: You have fsmonitor enabled. There is a major issue '
-          'resulting in git diff-index returning wrong results. Please '
-          'disable it by running:')
-    print('    git config core.fsmonitor false')
-    print('We will remove this warning once https://crbug.com/1475405 is '
-          'fixed.')
-    print(colorama.Style.RESET_ALL)
+    """Print warnings for submodules."""
+    # TODO(crbug.com/1475405): Warn users if the project uses submodules and
+    # they have fsmonitor enabled.
+    if sys.platform.startswith('darwin') and is_fsmonitor_enabled():
+        print(colorama.Fore.RED)
+        print('WARNING: You have fsmonitor enabled. There is a major issue '
+              'resulting in git diff-index returning wrong results. Please '
+              'disable it by running:')
+        print('    git config core.fsmonitor false')
+        print('We will remove this warning once https://crbug.com/1475405 is '
+              'fixed.')
+        print(colorama.Style.RESET_ALL)
 
 
 def current_branch():
-  try:
-    return run('rev-parse', '--abbrev-ref', 'HEAD')
-  except subprocess2.CalledProcessError:
-    return None
+    try:
+        return run('rev-parse', '--abbrev-ref', 'HEAD')
+    except subprocess2.CalledProcessError:
+        return None
 
 
 def del_branch_config(branch, option, scope='local'):
-  del_config('branch.%s.%s' % (branch, option), scope=scope)
+    del_config('branch.%s.%s' % (branch, option), scope=scope)
 
 
 def del_config(option, scope='local'):
-  try:
-    run('config', '--' + scope, '--unset', option)
-  except subprocess2.CalledProcessError:
-    pass
+    try:
+        run('config', '--' + scope, '--unset', option)
+    except subprocess2.CalledProcessError:
+        pass
 
 
 def diff(oldrev, newrev, *args):
-  return run('diff', oldrev, newrev, *args)
+    return run('diff', oldrev, newrev, *args)
 
 
 def freeze():
-  took_action = False
-  key = 'depot-tools.freeze-size-limit'
-  MB = 2**20
-  limit_mb = get_config_int(key, 100)
-  untracked_bytes = 0
+    took_action = False
+    key = 'depot-tools.freeze-size-limit'
+    MB = 2**20
+    limit_mb = get_config_int(key, 100)
+    untracked_bytes = 0
 
-  root_path = repo_root()
+    root_path = repo_root()
 
-  # unindexed tracks all the files which are unindexed but we want to add to
-  # the `FREEZE.unindexed` commit.
-  unindexed = []
+    # unindexed tracks all the files which are unindexed but we want to add to
+    # the `FREEZE.unindexed` commit.
+    unindexed = []
 
-  # will be set to true if there are any indexed files to commit.
-  have_indexed_files = False
+    # will be set to true if there are any indexed files to commit.
+    have_indexed_files = False
 
-  for f, s in status(ignore_submodules='all'):
-    if is_unmerged(s):
-      die("Cannot freeze unmerged changes!")
-    if s.lstat not in ' ?':
-      # This covers all changes to indexed files.
-      # lstat = ' ' means that the file is tracked and modified, but wasn't
-      # added yet.
-      # lstat = '?' means that the file is untracked.
-      have_indexed_files = True
+    for f, s in status(ignore_submodules='all'):
+        if is_unmerged(s):
+            die("Cannot freeze unmerged changes!")
+        if s.lstat not in ' ?':
+            # This covers all changes to indexed files.
+            # lstat = ' ' means that the file is tracked and modified, but
+            # wasn't added yet. lstat = '?' means that the file is untracked.
+            have_indexed_files = True
 
-      # If the file has both indexed and unindexed changes.
-      # rstat shows the status of the working tree. If the file also has changes
-      # in the working tree, it should be tracked both in indexed and unindexed
-      # changes.
-      if s.rstat != ' ':
-        unindexed.append(f.encode('utf-8'))
-    else:
-      unindexed.append(f.encode('utf-8'))
+            # If the file has both indexed and unindexed changes.
+            # rstat shows the status of the working tree. If the file also has
+            # changes in the working tree, it should be tracked both in indexed
+            # and unindexed changes.
+            if s.rstat != ' ':
+                unindexed.append(f.encode('utf-8'))
+        else:
+            unindexed.append(f.encode('utf-8'))
 
-    if s.lstat == '?' and limit_mb > 0:
-      untracked_bytes += os.lstat(os.path.join(root_path, f)).st_size
+        if s.lstat == '?' and limit_mb > 0:
+            untracked_bytes += os.lstat(os.path.join(root_path, f)).st_size
 
-  if limit_mb > 0 and untracked_bytes > limit_mb * MB:
-    die("""\
+    if limit_mb > 0 and untracked_bytes > limit_mb * MB:
+        die(
+            """\
       You appear to have too much untracked+unignored data in your git
       checkout: %.1f / %d MB.
 
@@ -510,116 +511,117 @@
       freeze limit by running:
         git config %s <new_limit>
       Where <new_limit> is an integer threshold in megabytes.""",
-      untracked_bytes / (MB * 1.0), limit_mb, key)
+            untracked_bytes / (MB * 1.0), limit_mb, key)
 
-  if have_indexed_files:
-    try:
-      run('commit', '--no-verify', '-m', f'{FREEZE}.indexed')
-      took_action = True
-    except subprocess2.CalledProcessError:
-      pass
+    if have_indexed_files:
+        try:
+            run('commit', '--no-verify', '-m', f'{FREEZE}.indexed')
+            took_action = True
+        except subprocess2.CalledProcessError:
+            pass
 
-  add_errors = False
-  if unindexed:
-    try:
-      run('add',
-          '--pathspec-from-file',
-          '-',
-          '--ignore-errors',
-          indata=b'\n'.join(unindexed),
-          cwd=root_path)
-    except subprocess2.CalledProcessError:
-      add_errors = True
+    add_errors = False
+    if unindexed:
+        try:
+            run('add',
+                '--pathspec-from-file',
+                '-',
+                '--ignore-errors',
+                indata=b'\n'.join(unindexed),
+                cwd=root_path)
+        except subprocess2.CalledProcessError:
+            add_errors = True
 
-    try:
-      run('commit', '--no-verify', '-m', f'{FREEZE}.unindexed')
-      took_action = True
-    except subprocess2.CalledProcessError:
-      pass
+        try:
+            run('commit', '--no-verify', '-m', f'{FREEZE}.unindexed')
+            took_action = True
+        except subprocess2.CalledProcessError:
+            pass
 
-  ret = []
-  if add_errors:
-    ret.append('Failed to index some unindexed files.')
-  if not took_action:
-    ret.append('Nothing to freeze.')
-  return ' '.join(ret) or None
+    ret = []
+    if add_errors:
+        ret.append('Failed to index some unindexed files.')
+    if not took_action:
+        ret.append('Nothing to freeze.')
+    return ' '.join(ret) or None
 
 
 def get_branch_tree(use_limit=False):
-  """Get the dictionary of {branch: parent}, compatible with topo_iter.
+    """Get the dictionary of {branch: parent}, compatible with topo_iter.
 
   Returns a tuple of (skipped, <branch_tree dict>) where skipped is a set of
   branches without upstream branches defined.
   """
-  skipped = set()
-  branch_tree = {}
+    skipped = set()
+    branch_tree = {}
 
-  for branch in branches(use_limit=use_limit):
-    parent = upstream(branch)
-    if not parent:
-      skipped.add(branch)
-      continue
-    branch_tree[branch] = parent
+    for branch in branches(use_limit=use_limit):
+        parent = upstream(branch)
+        if not parent:
+            skipped.add(branch)
+            continue
+        branch_tree[branch] = parent
 
-  return skipped, branch_tree
+    return skipped, branch_tree
 
 
 def get_or_create_merge_base(branch, parent=None):
-  """Finds the configured merge base for branch.
+    """Finds the configured merge base for branch.
 
   If parent is supplied, it's used instead of calling upstream(branch).
   """
-  base = branch_config(branch, 'base')
-  base_upstream = branch_config(branch, 'base-upstream')
-  parent = parent or upstream(branch)
-  if parent is None or branch is None:
-    return None
-  actual_merge_base = run('merge-base', parent, branch)
+    base = branch_config(branch, 'base')
+    base_upstream = branch_config(branch, 'base-upstream')
+    parent = parent or upstream(branch)
+    if parent is None or branch is None:
+        return None
+    actual_merge_base = run('merge-base', parent, branch)
 
-  if base_upstream != parent:
-    base = None
-    base_upstream = None
+    if base_upstream != parent:
+        base = None
+        base_upstream = None
 
-  def is_ancestor(a, b):
-    return run_with_retcode('merge-base', '--is-ancestor', a, b) == 0
+    def is_ancestor(a, b):
+        return run_with_retcode('merge-base', '--is-ancestor', a, b) == 0
 
-  if base and base != actual_merge_base:
-    if not is_ancestor(base, branch):
-      logging.debug('Found WRONG pre-set merge-base for %s: %s', branch, base)
-      base = None
-    elif is_ancestor(base, actual_merge_base):
-      logging.debug('Found OLD pre-set merge-base for %s: %s', branch, base)
-      base = None
-    else:
-      logging.debug('Found pre-set merge-base for %s: %s', branch, base)
+    if base and base != actual_merge_base:
+        if not is_ancestor(base, branch):
+            logging.debug('Found WRONG pre-set merge-base for %s: %s', branch,
+                          base)
+            base = None
+        elif is_ancestor(base, actual_merge_base):
+            logging.debug('Found OLD pre-set merge-base for %s: %s', branch,
+                          base)
+            base = None
+        else:
+            logging.debug('Found pre-set merge-base for %s: %s', branch, base)
 
-  if not base:
-    base = actual_merge_base
-    manual_merge_base(branch, base, parent)
+    if not base:
+        base = actual_merge_base
+        manual_merge_base(branch, base, parent)
 
-  return base
+    return base
 
 
 def hash_multi(*reflike):
-  return run('rev-parse', *reflike).splitlines()
+    return run('rev-parse', *reflike).splitlines()
 
 
 def hash_one(reflike, short=False):
-  args = ['rev-parse', reflike]
-  if short:
-    args.insert(1, '--short')
-  return run(*args)
+    args = ['rev-parse', reflike]
+    if short:
+        args.insert(1, '--short')
+    return run(*args)
 
 
 def in_rebase():
-  git_dir = run('rev-parse', '--git-dir')
-  return (
-    os.path.exists(os.path.join(git_dir, 'rebase-merge')) or
-    os.path.exists(os.path.join(git_dir, 'rebase-apply')))
+    git_dir = run('rev-parse', '--git-dir')
+    return (os.path.exists(os.path.join(git_dir, 'rebase-merge'))
+            or os.path.exists(os.path.join(git_dir, 'rebase-apply')))
 
 
 def intern_f(f, kind='blob'):
-  """Interns a file object into the git object store.
+    """Interns a file object into the git object store.
 
   Args:
     f (file-like object) - The file-like object to intern
@@ -627,62 +629,61 @@
 
   Returns the git hash of the interned object (hex encoded).
   """
-  ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f)
-  f.close()
-  return ret
+    ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f)
+    f.close()
+    return ret
 
 
 def is_dormant(branch):
-  # TODO(iannucci): Do an oldness check?
-  return branch_config(branch, 'dormant', 'false') != 'false'
+    # TODO(iannucci): Do an oldness check?
+    return branch_config(branch, 'dormant', 'false') != 'false'
 
 
 def is_unmerged(stat_value):
-  return (
-      'U' in (stat_value.lstat, stat_value.rstat) or
-      ((stat_value.lstat == stat_value.rstat) and stat_value.lstat in 'AD')
-  )
+    return ('U' in (stat_value.lstat, stat_value.rstat)
+            or ((stat_value.lstat == stat_value.rstat)
+                and stat_value.lstat in 'AD'))
 
 
 def manual_merge_base(branch, base, parent):
-  set_branch_config(branch, 'base', base)
-  set_branch_config(branch, 'base-upstream', parent)
+    set_branch_config(branch, 'base', base)
+    set_branch_config(branch, 'base-upstream', parent)
 
 
 def mktree(treedict):
-  """Makes a git tree object and returns its hash.
+    """Makes a git tree object and returns its hash.
 
   See |tree()| for the values of mode, type, and ref.
 
   Args:
     treedict - { name: (mode, type, ref) }
   """
-  with tempfile.TemporaryFile() as f:
-    for name, (mode, typ, ref) in treedict.items():
-      f.write(('%s %s %s\t%s\0' % (mode, typ, ref, name)).encode('utf-8'))
-    f.seek(0)
-    return run('mktree', '-z', stdin=f)
+    with tempfile.TemporaryFile() as f:
+        for name, (mode, typ, ref) in treedict.items():
+            f.write(('%s %s %s\t%s\0' % (mode, typ, ref, name)).encode('utf-8'))
+        f.seek(0)
+        return run('mktree', '-z', stdin=f)
 
 
 def parse_commitrefs(*commitrefs):
-  """Returns binary encoded commit hashes for one or more commitrefs.
+    """Returns binary encoded commit hashes for one or more commitrefs.
 
   A commitref is anything which can resolve to a commit. Popular examples:
     * 'HEAD'
     * 'origin/main'
     * 'cool_branch~2'
   """
-  try:
-    return [binascii.unhexlify(h) for h in hash_multi(*commitrefs)]
-  except subprocess2.CalledProcessError:
-    raise BadCommitRefException(commitrefs)
+    try:
+        return [binascii.unhexlify(h) for h in hash_multi(*commitrefs)]
+    except subprocess2.CalledProcessError:
+        raise BadCommitRefException(commitrefs)
 
 
 RebaseRet = collections.namedtuple('RebaseRet', 'success stdout stderr')
 
 
 def rebase(parent, start, branch, abort=False, allow_gc=False):
-  """Rebases |start|..|branch| onto the branch |parent|.
+    """Rebases |start|..|branch| onto the branch |parent|.
 
   Sets 'gc.auto=0' for the duration of this call to prevent the rebase from
   running a potentially slow garbage collection cycle.
@@ -704,140 +705,145 @@
     message - if the rebase failed, this contains the stdout of the failed
               rebase.
   """
-  try:
-    args = [
-      '-c', 'gc.auto={}'.format('1' if allow_gc else '0'),
-      'rebase',
-    ]
-    if TEST_MODE:
-      args.append('--committer-date-is-author-date')
-    args += [
-      '--onto', parent, start, branch,
-    ]
-    run(*args)
-    return RebaseRet(True, '', '')
-  except subprocess2.CalledProcessError as cpe:
-    if abort:
-      run_with_retcode('rebase', '--abort')  # ignore failure
-    return RebaseRet(False, cpe.stdout.decode('utf-8', 'replace'),
-                     cpe.stderr.decode('utf-8', 'replace'))
+    try:
+        args = [
+            '-c',
+            'gc.auto={}'.format('1' if allow_gc else '0'),
+            'rebase',
+        ]
+        if TEST_MODE:
+            args.append('--committer-date-is-author-date')
+        args += [
+            '--onto',
+            parent,
+            start,
+            branch,
+        ]
+        run(*args)
+        return RebaseRet(True, '', '')
+    except subprocess2.CalledProcessError as cpe:
+        if abort:
+            run_with_retcode('rebase', '--abort')  # ignore failure
+        return RebaseRet(False, cpe.stdout.decode('utf-8', 'replace'),
+                         cpe.stderr.decode('utf-8', 'replace'))
 
 
 def remove_merge_base(branch):
-  del_branch_config(branch, 'base')
-  del_branch_config(branch, 'base-upstream')
+    del_branch_config(branch, 'base')
+    del_branch_config(branch, 'base-upstream')
 
 
 def repo_root():
-  """Returns the absolute path to the repository root."""
-  return run('rev-parse', '--show-toplevel')
+    """Returns the absolute path to the repository root."""
+    return run('rev-parse', '--show-toplevel')
 
 
 def upstream_default():
-  """Returns the default branch name of the origin repository."""
-  try:
-    ret = run('rev-parse', '--abbrev-ref', 'origin/HEAD')
-    # Detect if the repository migrated to main branch
-    if ret == 'origin/master':
-      try:
-        ret = run('rev-parse', '--abbrev-ref', 'origin/main')
-        run('remote', 'set-head', '-a', 'origin')
+    """Returns the default branch name of the origin repository."""
+    try:
         ret = run('rev-parse', '--abbrev-ref', 'origin/HEAD')
-      except subprocess2.CalledProcessError:
-        pass
-    return ret
-  except subprocess2.CalledProcessError:
-    return 'origin/main'
+        # Detect if the repository migrated to main branch
+        if ret == 'origin/master':
+            try:
+                ret = run('rev-parse', '--abbrev-ref', 'origin/main')
+                run('remote', 'set-head', '-a', 'origin')
+                ret = run('rev-parse', '--abbrev-ref', 'origin/HEAD')
+            except subprocess2.CalledProcessError:
+                pass
+        return ret
+    except subprocess2.CalledProcessError:
+        return 'origin/main'
 
 
 def root():
-  return get_config('depot-tools.upstream', upstream_default())
+    return get_config('depot-tools.upstream', upstream_default())
 
 
 @contextlib.contextmanager
 def less():  # pragma: no cover
-  """Runs 'less' as context manager yielding its stdin as a PIPE.
+    """Runs 'less' as context manager yielding its stdin as a PIPE.
 
   Automatically checks if sys.stdout is a non-TTY stream. If so, it avoids
   running less and just yields sys.stdout.
 
   The returned PIPE is opened on binary mode.
   """
-  if not setup_color.IS_TTY:
-    # On Python 3, sys.stdout doesn't accept bytes, and sys.stdout.buffer must
-    # be used.
-    yield getattr(sys.stdout, 'buffer', sys.stdout)
-    return
+    if not setup_color.IS_TTY:
+        # On Python 3, sys.stdout doesn't accept bytes, and sys.stdout.buffer
+        # must be used.
+        yield getattr(sys.stdout, 'buffer', sys.stdout)
+        return
 
-  # Run with the same options that git uses (see setup_pager in git repo).
-  # -F: Automatically quit if the output is less than one screen.
-  # -R: Don't escape ANSI color codes.
-  # -X: Don't clear the screen before starting.
-  cmd = ('less', '-FRX')
-  try:
-    proc = subprocess2.Popen(cmd, stdin=subprocess2.PIPE)
-    yield proc.stdin
-  finally:
+    # Run with the same options that git uses (see setup_pager in git repo).
+    # -F: Automatically quit if the output is less than one screen.
+    # -R: Don't escape ANSI color codes.
+    # -X: Don't clear the screen before starting.
+    cmd = ('less', '-FRX')
     try:
-      proc.stdin.close()
-    except BrokenPipeError:
-      # BrokenPipeError is raised if proc has already completed,
-      pass
-    proc.wait()
+        proc = subprocess2.Popen(cmd, stdin=subprocess2.PIPE)
+        yield proc.stdin
+    finally:
+        try:
+            proc.stdin.close()
+        except BrokenPipeError:
+            # BrokenPipeError is raised if proc has already completed,
+            pass
+        proc.wait()
 
 
 def run(*cmd, **kwargs):
-  """The same as run_with_stderr, except it only returns stdout."""
-  return run_with_stderr(*cmd, **kwargs)[0]
+    """The same as run_with_stderr, except it only returns stdout."""
+    return run_with_stderr(*cmd, **kwargs)[0]
 
 
 def run_with_retcode(*cmd, **kwargs):
-  """Run a command but only return the status code."""
-  try:
-    run(*cmd, **kwargs)
-    return 0
-  except subprocess2.CalledProcessError as cpe:
-    return cpe.returncode
+    """Run a command but only return the status code."""
+    try:
+        run(*cmd, **kwargs)
+        return 0
+    except subprocess2.CalledProcessError as cpe:
+        return cpe.returncode
+
 
 def run_stream(*cmd, **kwargs):
-  """Runs a git command. Returns stdout as a PIPE (file-like object).
+    """Runs a git command. Returns stdout as a PIPE (file-like object).
 
   stderr is dropped to avoid races if the process outputs to both stdout and
   stderr.
   """
-  kwargs.setdefault('stderr', subprocess2.DEVNULL)
-  kwargs.setdefault('stdout', subprocess2.PIPE)
-  kwargs.setdefault('shell', False)
-  cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
-  proc = subprocess2.Popen(cmd, **kwargs)
-  return proc.stdout
+    kwargs.setdefault('stderr', subprocess2.DEVNULL)
+    kwargs.setdefault('stdout', subprocess2.PIPE)
+    kwargs.setdefault('shell', False)
+    cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
+    proc = subprocess2.Popen(cmd, **kwargs)
+    return proc.stdout
 
 
 @contextlib.contextmanager
 def run_stream_with_retcode(*cmd, **kwargs):
-  """Runs a git command as context manager yielding stdout as a PIPE.
+    """Runs a git command as context manager yielding stdout as a PIPE.
 
   stderr is dropped to avoid races if the process outputs to both stdout and
   stderr.
 
   Raises subprocess2.CalledProcessError on nonzero return code.
   """
-  kwargs.setdefault('stderr', subprocess2.DEVNULL)
-  kwargs.setdefault('stdout', subprocess2.PIPE)
-  kwargs.setdefault('shell', False)
-  cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
-  try:
-    proc = subprocess2.Popen(cmd, **kwargs)
-    yield proc.stdout
-  finally:
-    retcode = proc.wait()
-    if retcode != 0:
-      raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(),
-                                           b'', b'')
+    kwargs.setdefault('stderr', subprocess2.DEVNULL)
+    kwargs.setdefault('stdout', subprocess2.PIPE)
+    kwargs.setdefault('shell', False)
+    cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
+    try:
+        proc = subprocess2.Popen(cmd, **kwargs)
+        yield proc.stdout
+    finally:
+        retcode = proc.wait()
+        if retcode != 0:
+            raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(), b'',
+                                                 b'')
 
 
 def run_with_stderr(*cmd, **kwargs):
-  """Runs a git command.
+    """Runs a git command.
 
   Returns (stdout, stderr) as a pair of strings.
 
@@ -845,64 +851,66 @@
     autostrip (bool) - Strip the output. Defaults to True.
     indata (str) - Specifies stdin data for the process.
   """
-  kwargs.setdefault('stdin', subprocess2.PIPE)
-  kwargs.setdefault('stdout', subprocess2.PIPE)
-  kwargs.setdefault('stderr', subprocess2.PIPE)
-  kwargs.setdefault('shell', False)
-  autostrip = kwargs.pop('autostrip', True)
-  indata = kwargs.pop('indata', None)
-  decode = kwargs.pop('decode', True)
-  accepted_retcodes = kwargs.pop('accepted_retcodes', [0])
+    kwargs.setdefault('stdin', subprocess2.PIPE)
+    kwargs.setdefault('stdout', subprocess2.PIPE)
+    kwargs.setdefault('stderr', subprocess2.PIPE)
+    kwargs.setdefault('shell', False)
+    autostrip = kwargs.pop('autostrip', True)
+    indata = kwargs.pop('indata', None)
+    decode = kwargs.pop('decode', True)
+    accepted_retcodes = kwargs.pop('accepted_retcodes', [0])
 
-  cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
-  proc = subprocess2.Popen(cmd, **kwargs)
-  ret, err = proc.communicate(indata)
-  retcode = proc.wait()
-  if retcode not in accepted_retcodes:
-    raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(), ret, err)
+    cmd = (GIT_EXE, '-c', 'color.ui=never') + cmd
+    proc = subprocess2.Popen(cmd, **kwargs)
+    ret, err = proc.communicate(indata)
+    retcode = proc.wait()
+    if retcode not in accepted_retcodes:
+        raise subprocess2.CalledProcessError(retcode, cmd, os.getcwd(), ret,
+                                             err)
 
-  if autostrip:
-    ret = (ret or b'').strip()
-    err = (err or b'').strip()
+    if autostrip:
+        ret = (ret or b'').strip()
+        err = (err or b'').strip()
 
-  if decode:
-    ret = ret.decode('utf-8', 'replace')
-    err = err.decode('utf-8', 'replace')
+    if decode:
+        ret = ret.decode('utf-8', 'replace')
+        err = err.decode('utf-8', 'replace')
 
-  return ret, err
+    return ret, err
 
 
 def set_branch_config(branch, option, value, scope='local'):
-  set_config('branch.%s.%s' % (branch, option), value, scope=scope)
+    set_config('branch.%s.%s' % (branch, option), value, scope=scope)
 
 
 def set_config(option, value, scope='local'):
-  run('config', '--' + scope, option, value)
+    run('config', '--' + scope, option, value)
 
 
 def get_dirty_files():
-  # Make sure index is up-to-date before running diff-index.
-  run_with_retcode('update-index', '--refresh', '-q')
-  return run('diff-index', '--ignore-submodules', '--name-status', 'HEAD')
+    # Make sure index is up-to-date before running diff-index.
+    run_with_retcode('update-index', '--refresh', '-q')
+    return run('diff-index', '--ignore-submodules', '--name-status', 'HEAD')
 
 
 def is_dirty_git_tree(cmd):
-  w = lambda s: sys.stderr.write(s+"\n")
+    w = lambda s: sys.stderr.write(s + "\n")
 
-  dirty = get_dirty_files()
-  if dirty:
-    w('Cannot %s with a dirty tree. Commit%s or stash your changes first.' %
-      (cmd, '' if cmd == 'upload' else ', freeze'))
-    w('Uncommitted files: (git diff-index --name-status HEAD)')
-    w(dirty[:4096])
-    if len(dirty) > 4096: # pragma: no cover
-      w('... (run "git diff-index --name-status HEAD" to see full output).')
-    return True
-  return False
+    dirty = get_dirty_files()
+    if dirty:
+        w('Cannot %s with a dirty tree. Commit%s or stash your changes first.' %
+          (cmd, '' if cmd == 'upload' else ', freeze'))
+        w('Uncommitted files: (git diff-index --name-status HEAD)')
+        w(dirty[:4096])
+        if len(dirty) > 4096:  # pragma: no cover
+            w('... (run "git diff-index --name-status HEAD" to see full '
+              'output).')
+        return True
+    return False
 
 
 def status(ignore_submodules=None):
-  """Returns a parsed version of git-status.
+    """Returns a parsed version of git-status.
 
   Args:
    ignore_submodules (str|None): "all", "none", or None.
@@ -916,86 +924,93 @@
       if lstat == 'R'
   """
 
-  ignore_submodules = ignore_submodules or 'none'
-  assert ignore_submodules in (
-      'all', 'none'), f'ignore_submodules value {ignore_submodules} is invalid'
+    ignore_submodules = ignore_submodules or 'none'
+    assert ignore_submodules in (
+        'all',
+        'none'), f'ignore_submodules value {ignore_submodules} is invalid'
 
-  stat_entry = collections.namedtuple('stat_entry', 'lstat rstat src')
+    stat_entry = collections.namedtuple('stat_entry', 'lstat rstat src')
 
-  def tokenizer(stream):
-    acc = BytesIO()
-    c = None
-    while c != b'':
-      c = stream.read(1)
-      if c in (None, b'', b'\0'):
-        if len(acc.getvalue()) > 0:
-          yield acc.getvalue()
-          acc = BytesIO()
-      else:
-        acc.write(c)
+    def tokenizer(stream):
+        acc = BytesIO()
+        c = None
+        while c != b'':
+            c = stream.read(1)
+            if c in (None, b'', b'\0'):
+                if len(acc.getvalue()) > 0:
+                    yield acc.getvalue()
+                    acc = BytesIO()
+            else:
+                acc.write(c)
 
-  def parser(tokens):
-    while True:
-      try:
-        status_dest = next(tokens).decode('utf-8')
-      except StopIteration:
-        return
-      stat, dest = status_dest[:2], status_dest[3:]
-      lstat, rstat = stat
-      if lstat == 'R':
-        src = next(tokens).decode('utf-8')
-      else:
-        src = dest
-      yield (dest, stat_entry(lstat, rstat, src))
+    def parser(tokens):
+        while True:
+            try:
+                status_dest = next(tokens).decode('utf-8')
+            except StopIteration:
+                return
+            stat, dest = status_dest[:2], status_dest[3:]
+            lstat, rstat = stat
+            if lstat == 'R':
+                src = next(tokens).decode('utf-8')
+            else:
+                src = dest
+            yield (dest, stat_entry(lstat, rstat, src))
 
-  return parser(
-      tokenizer(
-          run_stream('status',
-                     '-z',
-                     f'--ignore-submodules={ignore_submodules}',
-                     bufsize=-1)))
+    return parser(
+        tokenizer(
+            run_stream('status',
+                       '-z',
+                       f'--ignore-submodules={ignore_submodules}',
+                       bufsize=-1)))
 
 
 def squash_current_branch(header=None, merge_base=None):
-  header = header or 'git squash commit for %s.' % current_branch()
-  merge_base = merge_base or get_or_create_merge_base(current_branch())
-  log_msg = header + '\n'
-  if log_msg:
-    log_msg += '\n'
-  log_msg += run('log', '--reverse', '--format=%H%n%B', '%s..HEAD' % merge_base)
-  run('reset', '--soft', merge_base)
+    header = header or 'git squash commit for %s.' % current_branch()
+    merge_base = merge_base or get_or_create_merge_base(current_branch())
+    log_msg = header + '\n'
+    if log_msg:
+        log_msg += '\n'
+    log_msg += run('log', '--reverse', '--format=%H%n%B',
+                   '%s..HEAD' % merge_base)
+    run('reset', '--soft', merge_base)
 
-  if not get_dirty_files():
-    # Sometimes the squash can result in the same tree, meaning that there is
-    # nothing to commit at this point.
-    print('Nothing to commit; squashed branch is empty')
-    return False
-  run('commit', '--no-verify', '-a', '-F', '-', indata=log_msg.encode('utf-8'))
-  return True
+    if not get_dirty_files():
+        # Sometimes the squash can result in the same tree, meaning that there
+        # is nothing to commit at this point.
+        print('Nothing to commit; squashed branch is empty')
+        return False
+    run('commit',
+        '--no-verify',
+        '-a',
+        '-F',
+        '-',
+        indata=log_msg.encode('utf-8'))
+    return True
 
 
 def tags(*args):
-  return run('tag', *args).splitlines()
+    return run('tag', *args).splitlines()
 
 
 def thaw():
-  took_action = False
-  with run_stream('rev-list', 'HEAD') as stream:
-    for sha in stream:
-      sha = sha.strip().decode('utf-8')
-      msg = run('show', '--format=%f%b', '-s', 'HEAD')
-      match = FREEZE_MATCHER.match(msg)
-      if not match:
-        if not took_action:
-          return 'Nothing to thaw.'
-        break
+    took_action = False
+    with run_stream('rev-list', 'HEAD') as stream:
+        for sha in stream:
+            sha = sha.strip().decode('utf-8')
+            msg = run('show', '--format=%f%b', '-s', 'HEAD')
+            match = FREEZE_MATCHER.match(msg)
+            if not match:
+                if not took_action:
+                    return 'Nothing to thaw.'
+                break
 
-      run('reset', '--' + FREEZE_SECTIONS[match.group(1)], sha)
-      took_action = True
+            run('reset', '--' + FREEZE_SECTIONS[match.group(1)], sha)
+            took_action = True
 
 
 def topo_iter(branch_tree, top_down=True):
-  """Generates (branch, parent) in topographical order for a branch tree.
+    """Generates (branch, parent) in topographical order for a branch tree.
 
   Given a tree:
 
@@ -1018,34 +1033,34 @@
   if top_down is True, items are yielded from A->D. Otherwise they're yielded
   from D->A. Within a layer the branches will be yielded in sorted order.
   """
-  branch_tree = branch_tree.copy()
+    branch_tree = branch_tree.copy()
 
-  # TODO(iannucci): There is probably a more efficient way to do these.
-  if top_down:
-    while branch_tree:
-      this_pass = [(b, p) for b, p in branch_tree.items()
-                   if p not in branch_tree]
-      assert this_pass, "Branch tree has cycles: %r" % branch_tree
-      for branch, parent in sorted(this_pass):
-        yield branch, parent
-        del branch_tree[branch]
-  else:
-    parent_to_branches = collections.defaultdict(set)
-    for branch, parent in branch_tree.items():
-      parent_to_branches[parent].add(branch)
+    # TODO(iannucci): There is probably a more efficient way to do these.
+    if top_down:
+        while branch_tree:
+            this_pass = [(b, p) for b, p in branch_tree.items()
+                         if p not in branch_tree]
+            assert this_pass, "Branch tree has cycles: %r" % branch_tree
+            for branch, parent in sorted(this_pass):
+                yield branch, parent
+                del branch_tree[branch]
+    else:
+        parent_to_branches = collections.defaultdict(set)
+        for branch, parent in branch_tree.items():
+            parent_to_branches[parent].add(branch)
 
-    while branch_tree:
-      this_pass = [(b, p) for b, p in branch_tree.items()
-                   if not parent_to_branches[b]]
-      assert this_pass, "Branch tree has cycles: %r" % branch_tree
-      for branch, parent in sorted(this_pass):
-        yield branch, parent
-        parent_to_branches[parent].discard(branch)
-        del branch_tree[branch]
+        while branch_tree:
+            this_pass = [(b, p) for b, p in branch_tree.items()
+                         if not parent_to_branches[b]]
+            assert this_pass, "Branch tree has cycles: %r" % branch_tree
+            for branch, parent in sorted(this_pass):
+                yield branch, parent
+                parent_to_branches[parent].discard(branch)
+                del branch_tree[branch]
 
 
 def tree(treeref, recurse=False):
-  """Returns a dict representation of a git tree object.
+    """Returns a dict representation of a git tree object.
 
   Args:
     treeref (str) - a git ref which resolves to a tree (commits count as trees).
@@ -1067,122 +1082,129 @@
 
     ref is the hex encoded hash of the entry.
   """
-  ret = {}
-  opts = ['ls-tree', '--full-tree']
-  if recurse:
-    opts.append('-r')
-  opts.append(treeref)
-  try:
-    for line in run(*opts).splitlines():
-      mode, typ, ref, name = line.split(None, 3)
-      ret[name] = (mode, typ, ref)
-  except subprocess2.CalledProcessError:
-    return None
-  return ret
+    ret = {}
+    opts = ['ls-tree', '--full-tree']
+    if recurse:
+        opts.append('-r')
+    opts.append(treeref)
+    try:
+        for line in run(*opts).splitlines():
+            mode, typ, ref, name = line.split(None, 3)
+            ret[name] = (mode, typ, ref)
+    except subprocess2.CalledProcessError:
+        return None
+    return ret
 
 
 def get_remote_url(remote='origin'):
-  try:
-    return run('config', 'remote.%s.url' % remote)
-  except subprocess2.CalledProcessError:
-    return None
+    try:
+        return run('config', 'remote.%s.url' % remote)
+    except subprocess2.CalledProcessError:
+        return None
 
 
 def upstream(branch):
-  try:
-    return run('rev-parse', '--abbrev-ref', '--symbolic-full-name',
-               branch+'@{upstream}')
-  except subprocess2.CalledProcessError:
-    return None
+    try:
+        return run('rev-parse', '--abbrev-ref', '--symbolic-full-name',
+                   branch + '@{upstream}')
+    except subprocess2.CalledProcessError:
+        return None
 
 
 def get_git_version():
-  """Returns a tuple that contains the numeric components of the current git
+    """Returns a tuple that contains the numeric components of the current git
   version."""
-  version_string = run('--version')
-  version_match = re.search(r'(\d+.)+(\d+)', version_string)
-  version = version_match.group() if version_match else ''
+    version_string = run('--version')
+    version_match = re.search(r'(\d+.)+(\d+)', version_string)
+    version = version_match.group() if version_match else ''
 
-  return tuple(int(x) for x in version.split('.'))
+    return tuple(int(x) for x in version.split('.'))
 
 
 def get_branches_info(include_tracking_status):
-  format_string = (
-      '--format=%(refname:short):%(objectname:short):%(upstream:short):')
+    format_string = (
+        '--format=%(refname:short):%(objectname:short):%(upstream:short):')
 
-  # This is not covered by the depot_tools CQ which only has git version 1.8.
-  if (include_tracking_status and
-      get_git_version() >= MIN_UPSTREAM_TRACK_GIT_VERSION):  # pragma: no cover
-    format_string += '%(upstream:track)'
+    # This is not covered by the depot_tools CQ which only has git version 1.8.
+    if (include_tracking_status and get_git_version() >=
+            MIN_UPSTREAM_TRACK_GIT_VERSION):  # pragma: no cover
+        format_string += '%(upstream:track)'
 
-  info_map = {}
-  data = run('for-each-ref', format_string, 'refs/heads')
-  BranchesInfo = collections.namedtuple(
-      'BranchesInfo', 'hash upstream commits behind')
-  for line in data.splitlines():
-    (branch, branch_hash, upstream_branch, tracking_status) = line.split(':')
+    info_map = {}
+    data = run('for-each-ref', format_string, 'refs/heads')
+    BranchesInfo = collections.namedtuple('BranchesInfo',
+                                          'hash upstream commits behind')
+    for line in data.splitlines():
+        (branch, branch_hash, upstream_branch,
+         tracking_status) = line.split(':')
 
-    commits = None
-    if include_tracking_status:
-      base = get_or_create_merge_base(branch)
-      if base:
-        commits_list = run('rev-list', '--count', branch, '^%s' % base, '--')
-        commits = int(commits_list) or None
+        commits = None
+        if include_tracking_status:
+            base = get_or_create_merge_base(branch)
+            if base:
+                commits_list = run('rev-list', '--count', branch, '^%s' % base,
+                                   '--')
+                commits = int(commits_list) or None
 
-    behind_match = re.search(r'behind (\d+)', tracking_status)
-    behind = int(behind_match.group(1)) if behind_match else None
+        behind_match = re.search(r'behind (\d+)', tracking_status)
+        behind = int(behind_match.group(1)) if behind_match else None
 
-    info_map[branch] = BranchesInfo(
-        hash=branch_hash, upstream=upstream_branch, commits=commits,
-        behind=behind)
+        info_map[branch] = BranchesInfo(hash=branch_hash,
+                                        upstream=upstream_branch,
+                                        commits=commits,
+                                        behind=behind)
 
-  # Set None for upstreams which are not branches (e.g empty upstream, remotes
-  # and deleted upstream branches).
-  missing_upstreams = {}
-  for info in info_map.values():
-    if info.upstream not in info_map and info.upstream not in missing_upstreams:
-      missing_upstreams[info.upstream] = None
+    # Set None for upstreams which are not branches (e.g empty upstream, remotes
+    # and deleted upstream branches).
+    missing_upstreams = {}
+    for info in info_map.values():
+        if (info.upstream not in info_map
+                and info.upstream not in missing_upstreams):
+            missing_upstreams[info.upstream] = None
 
-  result = info_map.copy()
-  result.update(missing_upstreams)
-  return result
+    result = info_map.copy()
+    result.update(missing_upstreams)
+    return result
 
 
-def make_workdir_common(repository, new_workdir, files_to_symlink,
-                        files_to_copy, symlink=None):
-  if not symlink:
-    symlink = os.symlink
-  os.makedirs(new_workdir)
-  for entry in files_to_symlink:
-    clone_file(repository, new_workdir, entry, symlink)
-  for entry in files_to_copy:
-    clone_file(repository, new_workdir, entry, shutil.copy)
+def make_workdir_common(repository,
+                        new_workdir,
+                        files_to_symlink,
+                        files_to_copy,
+                        symlink=None):
+    if not symlink:
+        symlink = os.symlink
+    os.makedirs(new_workdir)
+    for entry in files_to_symlink:
+        clone_file(repository, new_workdir, entry, symlink)
+    for entry in files_to_copy:
+        clone_file(repository, new_workdir, entry, shutil.copy)
 
 
 def make_workdir(repository, new_workdir):
-  GIT_DIRECTORY_WHITELIST = [
-    'config',
-    'info',
-    'hooks',
-    'logs/refs',
-    'objects',
-    'packed-refs',
-    'refs',
-    'remotes',
-    'rr-cache',
-    'shallow',
-  ]
-  make_workdir_common(repository, new_workdir, GIT_DIRECTORY_WHITELIST,
-                      ['HEAD'])
+    GIT_DIRECTORY_WHITELIST = [
+        'config',
+        'info',
+        'hooks',
+        'logs/refs',
+        'objects',
+        'packed-refs',
+        'refs',
+        'remotes',
+        'rr-cache',
+        'shallow',
+    ]
+    make_workdir_common(repository, new_workdir, GIT_DIRECTORY_WHITELIST,
+                        ['HEAD'])
 
 
 def clone_file(repository, new_workdir, link, operation):
-  if not os.path.exists(os.path.join(repository, link)):
-    return
-  link_dir = os.path.dirname(os.path.join(new_workdir, link))
-  if not os.path.exists(link_dir):
-    os.makedirs(link_dir)
-  src = os.path.join(repository, link)
-  if os.path.islink(src):
-    src = os.path.realpath(src)
-  operation(src, os.path.join(new_workdir, link))
+    if not os.path.exists(os.path.join(repository, link)):
+        return
+    link_dir = os.path.dirname(os.path.join(new_workdir, link))
+    if not os.path.exists(link_dir):
+        os.makedirs(link_dir)
+    src = os.path.join(repository, link)
+    if os.path.islink(src):
+        src = os.path.realpath(src)
+    operation(src, os.path.join(new_workdir, link))