Re-land "Split up artifacts to download some in the foreground/background.""

This refactors the devserver to place artifacts into their own classes and
enables us to download certain artifacts in the foreground and some in the
background. I also add wait_for_status, an RPC you can call using the same
archive_url to determine whether background artifacts have finished
downloading (a blocking call).

My next change is to modify the dynamic_suite code to do:

Download()

Process Duts

Wait_For_Status

Run tests.

BUG=chromium-os:27285
TEST=Ran all unittests + pylint -- added a whole bunch of tests.
Ran downloader from localhost?downloader and saw artifacts staged correctly.

Change-Id: I14c888dcc08cf4df2719915fe2fe88d52fbcdc62
Reviewed-on: https://gerrit.chromium.org/gerrit/19244
Tested-by: Chris Sosa <sosa@chromium.org>
Reviewed-by: Scott Zawalski <scottz@chromium.org>
Commit-Ready: Chris Sosa <sosa@chromium.org>
diff --git a/downloader.py b/downloader.py
index a775250..3c23c84 100755
--- a/downloader.py
+++ b/downloader.py
@@ -1,10 +1,11 @@
 #!/usr/bin/python
 #
-# Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
 import cherrypy
+import multiprocessing
 import shutil
 import tempfile
 
@@ -24,50 +25,122 @@
 
   def __init__(self, static_dir):
     self._static_dir = static_dir
+    self._build_dir = None
+    self._staging_dir = None
+    self._status_queue = multiprocessing.Queue()
+    self._lock_tag = None
+    self._archive_url = None
 
-  def Download(self, archive_url):
+  def Download(self, archive_url, background=False):
+    """Downloads the given build artifacts defined by the |archive_url|.
+
+    If background is set to True, will return back early before all artifacts
+    have been downloaded. The artifacts that can be backgrounded are all those
+    that are not set as synchronous.
+    """
     # Parse archive_url into board and build.
     # e.g. gs://chromeos-image-archive/{board}/{build}
-    archive_url = archive_url.strip('/')
-    board, build = archive_url.rsplit('/', 2)[-2:]
+    self._archive_url = archive_url.strip('/')
+    board, build = self._archive_url.rsplit('/', 2)[-2:]
 
     # Bind build_dir and staging_dir here so we can tell if we need to do any
     # cleanup after an exception occurs before build_dir is set.
-    build_dir = staging_dir = None
-    lock_tag = '/'.join([board, build])
+    self._lock_tag = '/'.join([board, build])
     try:
       # Create Dev Server directory for this build and tell other Downloader
       # instances we have processed this build.
       try:
-        build_dir = devserver_util.AcquireLock(static_dir=self._static_dir,
-                                               tag=lock_tag)
+        self._build_dir = devserver_util.AcquireLock(
+            static_dir=self._static_dir, tag=self._lock_tag)
       except devserver_util.DevServerUtilError, e:
         cherrypy.log('Refused lock "%s". Assuming build has already been'
-                     'processed: %s' % (lock_tag, str(e)), 'DOWNLOAD')
+                     'processed: %s' % (self._lock_tag, str(e)), 'DOWNLOAD')
+        self._status_queue.put('Success')
         return 'Success'
 
-      cherrypy.log('Downloading build from %s' % archive_url, 'DOWNLOAD')
-      staging_dir = tempfile.mkdtemp(suffix='_'.join([board, build]))
-      devserver_util.DownloadBuildFromGS(
-          staging_dir=staging_dir, archive_url=archive_url, build=build)
+      self._staging_dir = tempfile.mkdtemp(suffix='_'.join([board, build]))
+      cherrypy.log('Gathering download requirements %s' % self._archive_url,
+                   'DOWNLOAD')
+      artifacts = devserver_util.GatherArtifactDownloads(
+          self._staging_dir, self._archive_url, build, self._build_dir)
+      devserver_util.PrepareBuildDirectory(self._build_dir)
 
-      cherrypy.log('Packaging autotest tests.', 'DOWNLOAD')
-      devserver_util.PrepareAutotestPkgs(staging_dir)
+      cherrypy.log('Downloading foreground artifacts from %s' % archive_url,
+                   'DOWNLOAD')
+      background_artifacts = []
+      for artifact in artifacts:
+        if artifact.Synchronous():
+          artifact.Download()
+          artifact.Stage()
+        else:
+          background_artifacts.append(artifact)
 
-      cherrypy.log('Installing build components.', 'DOWNLOAD')
-      devserver_util.InstallBuild(
-          staging_dir=staging_dir, build_dir=build_dir)
-    except Exception:
+      if background:
+        self._DownloadArtifactsInBackground(background_artifacts)
+      else:
+        self._DownloadArtifactsSerially(background_artifacts)
+
+      self._status_queue.put('Success')
+    except Exception, e:
       # Release processing lock, which will remove build components directory
       # so future runs can retry.
-      if build_dir:
-        devserver_util.ReleaseLock(static_dir=self._static_dir, tag=lock_tag)
+      if self._build_dir:
+        devserver_util.ReleaseLock(static_dir=self._static_dir,
+                                   tag=self._lock_tag)
+
+      self._status_queue.put(e)
+      self._Cleanup()
       raise
-    finally:
-      # Always cleanup after ourselves.
-      if staging_dir:
-        cherrypy.log('Cleaning up staging directory %s' % staging_dir,
-                     'DOWNLOAD')
-        shutil.rmtree(staging_dir)
 
     return 'Success'
+
+  def _Cleanup(self):
+    """Cleans up the staging dir for this downloader instanfce."""
+    if self._staging_dir:
+      cherrypy.log('Cleaning up staging directory %s' % self._staging_dir,
+                   'DOWNLOAD')
+      shutil.rmtree(self._staging_dir)
+
+    self._staging_dir = None
+
+  def _DownloadArtifactsSerially(self, artifacts):
+    """Simple function to download all the given artifacts serially."""
+    cherrypy.log('Downloading background artifacts for %s' % self._archive_url,
+                 'DOWNLOAD')
+    try:
+      for artifact in artifacts:
+        artifact.Download()
+        artifact.Stage()
+    except Exception, e:
+      self._status_queue.put(e)
+
+      # Release processing lock, which will remove build components directory
+      # so future runs can retry.
+      if self._build_dir:
+        devserver_util.ReleaseLock(static_dir=self._static_dir,
+                                   tag=self._lock_tag)
+    else:
+      self._status_queue.put('Success')
+    finally:
+      self._Cleanup()
+
+  def _DownloadArtifactsInBackground(self, artifacts):
+    """Downloads |artifacts| in the background and signals when complete."""
+    proc = multiprocessing.Process(target=self._DownloadArtifactsSerially,
+                                   args=(artifacts,))
+    proc.run()
+
+  def GetStatusOfBackgroundDownloads(self):
+    """Returns the status of the background downloads.
+
+    This commands returns the status of the background downloads and blocks
+    until a status is returned.
+    """
+    status = self._status_queue.get()
+    # In case anyone else is calling.
+    self._status_queue.put(status)
+    # It's possible we received an exception, if so, re-raise it here.
+    if isinstance(status, Exception):
+      raise status
+
+    return status