Update devserver to support downloader other than from Google Storage

Main changes:
1. Restructure artifact wrappers to support both CrOS and Android artifacts.
2. Support different downloaders in devserver.py.
3. Add LaunchControlDownloader class, the functions are to be implemented.

BUG=chromium:512668
TEST=run_unittests, devserver_integration_test.py, guado_moblab (au and dummy)
cros flash and cros stage to guado moblab

Change-Id: Ia350b00a2a5ceaeff6d922600dc84c8fc7295ef9
Reviewed-on: https://chromium-review.googlesource.com/301992
Commit-Ready: Dan Shi <dshi@chromium.org>
Tested-by: Dan Shi <dshi@chromium.org>
Reviewed-by: Dan Shi <dshi@chromium.org>
diff --git a/artifact_info.py b/artifact_info.py
index c1052d2..3e63c49 100644
--- a/artifact_info.py
+++ b/artifact_info.py
@@ -70,6 +70,27 @@
 # The factory test image.
 FACTORY_IMAGE = 'factory_image'
 
+#### Android artifacts. These are in a different namespace from the above. ####
+
+# Various android images stored in a zip file (including boot and system).
+# For example, shamu-img-2284311.zip contains boot.img, cache.img, recovery.img,
+# system.img and userdata.img. fastboot can use the zip file to update the dut
+# in a single command. Therefore, devserver does not unzip the zip file to avoid
+# unnecessary load on the devserver.
+ANDROID_ZIP_IMAGES = 'zip_images'
+
+# Radio image.
+ANDROID_RADIO_IMAGE = 'radio_image'
+
+# Bootloader image.
+ANDROID_BOOTLOADER_IMAGE = 'bootloader_image'
+
+# fastboot, utility to flash image to Android device.
+ANDROID_FASTBOOT = 'fastboot'
+
+# Test zip file for Android build, e.g., shamu-tests-2284311.zip
+ANDROID_TEST_ZIP = 'test_zip'
+
 # In general, downloading one artifact usually indicates that the caller will
 # want to download other artifacts later. The following map explicitly defines
 # this relationship. Specifically:
diff --git a/build_artifact.py b/build_artifact.py
index 5a77ff0..884cd75 100755
--- a/build_artifact.py
+++ b/build_artifact.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
 
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,7 +6,9 @@
 
 """Module containing classes that wrap artifact downloads."""
 
-import glob
+from __future__ import print_function
+
+import itertools
 import os
 import pickle
 import re
@@ -16,7 +18,6 @@
 import artifact_info
 import common_util
 import devserver_constants
-import gsutil_util
 import log_util
 
 
@@ -42,6 +43,13 @@
 TEST_IMAGE_FILE = 'chromiumos_test_image.tar.xz'
 RECOVERY_IMAGE_FILE = 'recovery_image.tar.xz'
 
+############ Actual filenames of Android build artifacts ############
+
+ANDROID_IMAGE_ZIP = '*-img-*.zip'
+ANDROID_RADIO_IMAGE = 'radio.img'
+ANDROID_BOOTLOADER_IMAGE = 'bootloader.img'
+ANDROID_FASTBOOT = 'fastboot'
+ANDROID_TEST_ZIP = r'[^-]*-tests-.*\.zip'
 
 _build_artifact_locks = common_util.LockDict()
 
@@ -51,8 +59,26 @@
   pass
 
 
-class BuildArtifact(log_util.Loggable):
-  """Wrapper around an artifact to download from gsutil.
+class ArtifactMeta(type):
+  """metaclass for an artifact type.
+
+  This metaclass is for class Artifact and its subclasses to have a meaningful
+  string composed of class name and the corresponding artifact name, e.g.,
+  `Artifact_full_payload`. This helps to better logging, refer to logging in
+  method Downloader.Download.
+  """
+
+  ARTIFACT_NAME = None
+
+  def __str__(cls):
+    return '%s_%s' % (cls.__name__, cls.ARTIFACT_NAME)
+
+  def __repr__(cls):
+    return str(cls)
+
+
+class Artifact(log_util.Loggable):
+  """Wrapper around an artifact to download using a fetcher.
 
   The purpose of this class is to download objects from Google Storage
   and install them to a local directory. There are two main functions, one to
@@ -64,7 +90,7 @@
   between a glob (full name string match) and a regex (partial match).
 
   Class members:
-    archive_url: An archive URL.
+    fetcher: An object which knows how to fetch the artifact.
     name: Name given for artifact; in fact, it is a pattern that captures the
           names of files contained in the artifact. This can either be an
           ordinary shell-style glob (the default), or a regular expression (if
@@ -80,6 +106,8 @@
                          install_dir will be deleted if the build does not
                          existed.
     install_path: Path to artifact.
+    install_subdir: Directory within install_path where the artifact is actually
+                    stored.
     install_dir: The final location where the artifact should be staged to.
     single_name: If True the name given should only match one item. Note, if not
                  True, self.name will become a list of items returned.
@@ -89,28 +117,30 @@
                            marker file.
   """
 
-  def __init__(self, install_dir, archive_url, name, build,
+  __metaclass__ = ArtifactMeta
+
+  def __init__(self, name, install_dir, build, install_subdir='',
                is_regex_name=False, optional_name=None):
     """Constructor.
 
     Args:
       install_dir: Where to install the artifact.
-      archive_url: The Google Storage URL or local path to find the artifact.
       name: Identifying name to be used to find/store the artifact.
       build: The name of the build e.g. board/release.
+      install_subdir: Directory within install_path where the artifact is
+                      actually stored.
       is_regex_name: Whether the name pattern is a regex (default: glob).
       optional_name: An alternative name to find the artifact, which can lead
         to faster download. Unlike |name|, there is no guarantee that an
         artifact named |optional_name| is/will be on Google Storage. If it
         exists, we download it. Otherwise, we fall back to wait for |name|.
     """
-    super(BuildArtifact, self).__init__()
+    super(Artifact, self).__init__()
 
     # In-memory lock to keep the devserver from colliding with itself while
     # attempting to stage the same artifact.
     self._process_lock = None
 
-    self.archive_url = archive_url
     self.name = name
     self.optional_name = optional_name
     self.is_regex_name = is_regex_name
@@ -128,6 +158,7 @@
     self.install_path = None
 
     self.install_dir = install_dir
+    self.install_subdir = install_subdir
 
     self.single_name = True
 
@@ -155,7 +186,7 @@
     presence of each installed file listed in this marker. Both must hold for
     the artifact to be considered staged. Note that this method is safe for use
     even if the artifacts were not stageed by this instance, as it is assumed
-    that any BuildArtifact instance that did the staging wrote the list of
+    that any Artifact instance that did the staging wrote the list of
     files actually installed into the marker.
     """
     marker_file = os.path.join(self.install_dir, self.marker_name)
@@ -186,87 +217,12 @@
     with open(os.path.join(self.install_dir, self.marker_name), 'w') as f:
       f.write('\n'.join(self.installed_files))
 
-  def _WaitForArtifactToExist(self, name, timeout):
-    """Waits for artifact to exist and returns the appropriate names.
-
-    Args:
-      name: Name to look at.
-      timeout: How long to wait for artifact to become available. Only matters
-               if self.archive_url is a Google Storage URL.
-
-    Returns:
-      A list of names that match.
-
-    Raises:
-      ArtifactDownloadError: An error occurred when obtaining artifact.
-    """
-    if self.archive_url.startswith('gs://'):
-      return self._WaitForGSArtifactToExist(name, timeout)
-    return self._VerifyLocalArtifactExists(name)
-
-  def _WaitForGSArtifactToExist(self, name, timeout):
-    """Waits for artifact to exist and returns the appropriate names.
-
-    Args:
-      name: Name to look at.
-      timeout: How long to wait for the artifact to become available.
-
-    Returns:
-      A list of names that match.
-
-    Raises:
-      ArtifactDownloadError: An error occurred when obtaining artifact.
-    """
-    names = gsutil_util.GetGSNamesWithWait(
-        name, self.archive_url, str(self), timeout=timeout,
-        is_regex_pattern=self.is_regex_name)
-    if not names:
-      raise ArtifactDownloadError('Could not find %s in Google Storage at %s' %
-                                  (name, self.archive_url))
-    return names
-
-  def _VerifyLocalArtifactExists(self, name):
-    """Verifies the local artifact exists and returns the appropriate names.
-
-    Args:
-      name: Name to look at.
-
-    Returns:
-      A list of names that match.
-
-    Raises:
-      ArtifactDownloadError: An error occurred when obtaining artifact.
-    """
-    local_path = os.path.join(self.archive_url, name)
-    if self.is_regex_name:
-      filter_re = re.compile(name)
-      for filename in os.listdir(self.archive_url):
-        if filter_re.match(filename):
-          return [filename]
-    else:
-      glob_search = glob.glob(local_path)
-      if glob_search and len(glob_search) == 1:
-        return [os.path.basename(glob_search[0])]
-    raise ArtifactDownloadError('Artifact not found.')
-
   def _UpdateName(self, names):
     if self.single_name and len(names) > 1:
       raise ArtifactDownloadError('Too many artifacts match %s' % self.name)
 
     self.name = names[0]
 
-  def _Download(self):
-    """Downloads artifact from Google Storage to a local directory."""
-    self.install_path = os.path.join(self.install_dir, self.name)
-    if self.archive_url.startswith('gs://'):
-      gs_path = '/'.join([self.archive_url, self.name])
-      gsutil_util.DownloadFromGS(gs_path, self.install_path)
-    else:
-      # It's a local path so just copy it into the staged directory.
-      shutil.copyfile(os.path.join(self.archive_url, self.name),
-                      self.install_path)
-
-
   def _Setup(self):
     """Process the downloaded content, update the list of installed files."""
     # In this primitive case, what was downloaded (has to be a single file) is
@@ -299,7 +255,7 @@
     with open(self.exception_file_path, 'r') as f:
       return pickle.load(f)
 
-  def Process(self, no_wait):
+  def Process(self, downloader, no_wait):
     """Main call point to all artifacts. Downloads and Stages artifact.
 
     Downloads and Stages artifact from Google Storage to the install directory
@@ -315,6 +271,8 @@
     process of being staged.
 
     Args:
+      downloader: A downloader instance containing the logic to download
+                  artifacts.
       no_wait: If True, don't block waiting for artifact to exist if we fail to
                immediately find it.
 
@@ -327,19 +285,20 @@
       self._process_lock = _build_artifact_locks.lock(
           os.path.join(self.install_dir, self.name))
 
+    real_install_dir = os.path.join(self.install_dir, self.install_subdir)
     with self._process_lock:
-      common_util.MkDirP(self.install_dir)
+      common_util.MkDirP(real_install_dir)
       if not self.ArtifactStaged():
         # Delete any existing exception saved for this artifact.
         self._ClearException()
         found_artifact = False
         if self.optional_name:
           try:
-            # Check if the artifact named |optional_name| exists on GS.
+            # Check if the artifact named |optional_name| exists.
             # Because this artifact may not always exist, don't bother
             # to wait for it (set timeout=1).
-            new_names = self._WaitForArtifactToExist(
-                self.optional_name, timeout=1)
+            new_names = downloader.Wait(
+                self.optional_name, self.is_regex_name, timeout=1)
             self._UpdateName(new_names)
 
           except ArtifactDownloadError:
@@ -353,11 +312,12 @@
           # cycles waiting around for it to exist.
           if not found_artifact:
             timeout = 1 if no_wait else 10
-            new_names = self._WaitForArtifactToExist(self.name, timeout)
+            new_names = downloader.Wait(
+                self.name, self.is_regex_name, timeout)
             self._UpdateName(new_names)
 
           self._Log('Downloading file %s', self.name)
-          self._Download()
+          self.install_path = downloader.Fetch(self.name, real_install_dir)
           self._Setup()
           self._MarkArtifactStaged()
         except Exception as e:
@@ -374,22 +334,22 @@
 
   def __str__(self):
     """String representation for the download."""
-    return '->'.join(['%s/%s' % (self.archive_url, self.name),
-                      self.install_dir])
+    return '%s->%s' % (self.name, self.install_dir)
 
   def __repr__(self):
     return str(self)
 
 
-class AUTestPayloadBuildArtifact(BuildArtifact):
+class AUTestPayload(Artifact):
   """Wrapper for AUTest delta payloads which need additional setup."""
 
   def _Setup(self):
-    super(AUTestPayloadBuildArtifact, self)._Setup()
+    super(AUTestPayload, self)._Setup()
 
     # Rename to update.gz.
-    install_path = os.path.join(self.install_dir, self.name)
-    new_install_path = os.path.join(self.install_dir,
+    install_path = os.path.join(self.install_dir, self.install_subdir,
+                                self.name)
+    new_install_path = os.path.join(self.install_dir, self.install_subdir,
                                     devserver_constants.UPDATE_FILE)
     shutil.move(install_path, new_install_path)
 
@@ -398,85 +358,47 @@
     self.installed_files = [new_install_path]
 
 
-# TODO(sosa): Change callers to make this artifact more sane.
-class DeltaPayloadsArtifact(BuildArtifact):
+class DeltaPayloadBase(AUTestPayload):
   """Delta payloads from the archive_url.
 
-  This artifact is super strange. It custom handles directories and
-  pulls in all delta payloads. We can't specify exactly what we want
+  These artifacts are super strange. They custom handle directories and
+  pull in all delta payloads. We can't specify exactly what we want
   because unlike other artifacts, this one does not conform to something a
   client might know. The client doesn't know the version of n-1 or whether it
   was even generated.
 
   IMPORTANT! Note that this artifact simply ignores the `name' argument because
-  that name is derived internally in accordance with sub-artifacts. Also note
-  the different types of names (in fact, file name patterns) used for the
-  different sub-artifacts.
+  that name is derived internally.
   """
 
-  def __init__(self, *args):
-    super(DeltaPayloadsArtifact, self).__init__(*args)
-    # Override the name field, we know what it should be.
-    self.name = '*_delta_*'
-    self.is_regex_name = False
-    self.single_name = False  # Expect multiple deltas
-
-    # We use a regular glob for the N-to-N delta payload.
-    nton_name = 'chromeos_%s*_delta_*' % self.build
-    # We use a regular expression for the M-to-N delta payload.
-    mton_name = ('chromeos_(?!%s).*_delta_.*' % re.escape(self.build))
-
-    nton_install_dir = os.path.join(self.install_dir, _AU_BASE,
-                                    self.build + _NTON_DIR_SUFFIX)
-    mton_install_dir = os.path.join(self.install_dir, _AU_BASE,
-                                    self.build + _MTON_DIR_SUFFIX)
-    self._sub_artifacts = [
-        AUTestPayloadBuildArtifact(mton_install_dir, self.archive_url,
-                                   mton_name, self.build, is_regex_name=True),
-        AUTestPayloadBuildArtifact(nton_install_dir, self.archive_url,
-                                   nton_name, self.build)]
-
-  def _Download(self):
-    """With sub-artifacts we do everything in _Setup()."""
-    pass
-
   def _Setup(self):
-    """Process each sub-artifact. Only error out if none can be found."""
-    for artifact in self._sub_artifacts:
-      try:
-        artifact.Process(no_wait=True)
-        # Setup symlink so that AU will work for this payload.
-        stateful_update_symlink = os.path.join(
-            artifact.install_dir, devserver_constants.STATEFUL_FILE)
-        os.symlink(
-            os.path.join(os.pardir, os.pardir,
-                         devserver_constants.STATEFUL_FILE),
-            stateful_update_symlink)
-
-        # Aggregate sub-artifact file lists, including stateful symlink.
-        self.installed_files += artifact.installed_files
-        self.installed_files.append(stateful_update_symlink)
-      except ArtifactDownloadError as e:
-        self._Log('Could not process %s: %s', artifact, e)
-        raise
+    super(DeltaPayloadBase, self)._Setup()
+    # Setup symlink so that AU will work for this payload.
+    stateful_update_symlink = os.path.join(
+        self.install_dir, self.install_subdir,
+        devserver_constants.STATEFUL_FILE)
+    os.symlink(os.path.join(os.pardir, os.pardir,
+                            devserver_constants.STATEFUL_FILE),
+               stateful_update_symlink)
+    self.installed_files.append(stateful_update_symlink)
 
 
-class BundledBuildArtifact(BuildArtifact):
+class BundledArtifact(Artifact):
   """A single build artifact bundle e.g. zip file or tar file."""
 
   def __init__(self, *args, **kwargs):
-    """Takes BuildArtifact args with some additional ones.
+    """Takes Artifact args with some additional ones.
 
     Args:
-      *args: See BuildArtifact documentation.
-      **kwargs: See BuildArtifact documentation.
+      *args: See Artifact documentation.
+      **kwargs: See Artifact documentation.
       files_to_extract: A list of files to extract. If set to None, extract
                         all files.
       exclude: A list of files to exclude. If None, no files are excluded.
     """
     self._files_to_extract = kwargs.pop('files_to_extract', None)
     self._exclude = kwargs.pop('exclude', None)
-    super(BundledBuildArtifact, self).__init__(*args, **kwargs)
+    super(BundledArtifact, self).__init__(*args, **kwargs)
 
     # We modify the marker so that it is unique to what was staged.
     if self._files_to_extract:
@@ -543,18 +465,18 @@
       raise ArtifactDownloadError(str(e))
 
 
-class AutotestTarballBuildArtifact(BundledBuildArtifact):
+class AutotestTarball(BundledArtifact):
   """Wrapper around the autotest tarball to download from gsutil."""
 
   def __init__(self, *args, **kwargs):
-    super(AutotestTarballBuildArtifact, self).__init__(*args, **kwargs)
+    super(AutotestTarball, self).__init__(*args, **kwargs)
     # We don't store/check explicit file lists in Autotest tarball markers;
     # this can get huge and unwieldy, and generally make little sense.
     self.store_installed_files = False
 
   def _Setup(self):
     """Extracts the tarball into the install path excluding test suites."""
-    super(AutotestTarballBuildArtifact, self)._Setup()
+    super(AutotestTarball, self)._Setup()
 
     # Deal with older autotest packages that may not be bundled.
     autotest_dir = os.path.join(self.install_dir,
@@ -575,101 +497,150 @@
       self._Log('Using pre-generated packages from autotest')
 
 
-class ImplDescription(object):
-  """Data wrapper that describes an artifact's implementation."""
+def _CreateNewArtifact(tag, base, name, *fixed_args, **fixed_kwargs):
+  """Get a data wrapper that describes an artifact's implementation.
 
-  def __init__(self, artifact_class, name, *additional_args,
-               **additional_dargs):
-    """Constructor.
+  Args:
+    tag: Tag of the artifact, defined in artifact_info.
+    base: Class of the artifact, e.g., BundledArtifact.
+    name: Name of the artifact, e.g., image.zip.
+    *fixed_args: Fixed arguments that are additional to the one used in base
+                 class.
+    **fixed_kwargs: Fixed keyword arguments that are additional to the one used
+                    in base class.
 
-    Args:
-      artifact_class: BuildArtifact class to use for the artifact.
-      name: name to use to identify artifact (see BuildArtifact.name)
-      *additional_args: Additional arguments to pass to artifact_class.
-      **additional_dargs: Additional named arguments to pass to artifact_class.
-    """
-    self.artifact_class = artifact_class
-    self.name = name
-    self.additional_args = additional_args
-    self.additional_dargs = additional_dargs
+  Returns:
+    A data wrapper that describes an artifact's implementation.
 
-  def __repr__(self):
-    return '%s_%s' % (self.artifact_class, self.name)
+  """
+  class NewArtifact(base):
+    """A data wrapper that describes an artifact's implementation."""
+    ARTIFACT_TAG = tag
+    ARTIFACT_NAME = name
+
+    def __init__(self, *args, **kwargs):
+      all_args = fixed_args + args
+      all_kwargs = {}
+      all_kwargs.update(fixed_kwargs)
+      all_kwargs.update(kwargs)
+      super(NewArtifact, self).__init__(self.ARTIFACT_NAME,
+                                        *all_args, **all_kwargs)
+
+  NewArtifact.__name__ = base.__name__
+  return NewArtifact
 
 
-# Maps artifact names to their implementation description.
-# Please note, it is good practice to use constants for these names if you're
-# going to re-use the names ANYWHERE else in the devserver code.
-ARTIFACT_IMPLEMENTATION_MAP = {
-    artifact_info.FULL_PAYLOAD:
-    ImplDescription(AUTestPayloadBuildArtifact, ('*_full_*')),
-    artifact_info.DELTA_PAYLOADS:
-    ImplDescription(DeltaPayloadsArtifact, ('DONTCARE')),
-    artifact_info.STATEFUL_PAYLOAD:
-    ImplDescription(BuildArtifact, (devserver_constants.STATEFUL_FILE)),
+# TODO(dshi): Refactor the code here to split out the logic of creating the
+# artifacts mapping to a different module.
+chromeos_artifact_map = {}
 
-    artifact_info.BASE_IMAGE:
-    ImplDescription(BundledBuildArtifact, IMAGE_FILE,
-                    optional_name=BASE_IMAGE_FILE,
-                    files_to_extract=[devserver_constants.BASE_IMAGE_FILE]),
-    artifact_info.RECOVERY_IMAGE:
-    ImplDescription(BundledBuildArtifact, IMAGE_FILE,
-                    optional_name=RECOVERY_IMAGE_FILE,
-                    files_to_extract=[devserver_constants.RECOVERY_IMAGE_FILE]),
-    artifact_info.DEV_IMAGE:
-    ImplDescription(BundledBuildArtifact, IMAGE_FILE,
-                    files_to_extract=[devserver_constants.IMAGE_FILE]),
-    artifact_info.TEST_IMAGE:
-    ImplDescription(BundledBuildArtifact, IMAGE_FILE,
-                    optional_name=TEST_IMAGE_FILE,
-                    files_to_extract=[devserver_constants.TEST_IMAGE_FILE]),
 
-    artifact_info.AUTOTEST:
-    ImplDescription(AutotestTarballBuildArtifact, AUTOTEST_FILE,
-                    files_to_extract=None,
-                    exclude=['autotest/test_suites']),
-    artifact_info.CONTROL_FILES:
-    ImplDescription(BundledBuildArtifact, CONTROL_FILES_FILE),
-    artifact_info.AUTOTEST_PACKAGES:
-    ImplDescription(AutotestTarballBuildArtifact, AUTOTEST_PACKAGES_FILE),
-    artifact_info.TEST_SUITES:
-    ImplDescription(BundledBuildArtifact, TEST_SUITES_FILE),
-    artifact_info.AU_SUITE:
-    ImplDescription(BundledBuildArtifact, AU_SUITE_FILE),
-    artifact_info.AUTOTEST_SERVER_PACKAGE:
-    ImplDescription(BuildArtifact, AUTOTEST_SERVER_PACKAGE_FILE),
+def _AddCrOSArtifact(tag, base, name, *fixed_args, **fixed_kwargs):
+  """Add a data wrapper that describes a ChromeOS artifact's implementation to
+  chromeos_artifact_map.
+  """
+  artifact = _CreateNewArtifact(tag, base, name, *fixed_args, **fixed_kwargs)
+  chromeos_artifact_map.setdefault(tag, []).append(artifact)
 
-    artifact_info.FIRMWARE:
-    ImplDescription(BuildArtifact, FIRMWARE_FILE),
-    artifact_info.SYMBOLS:
-    ImplDescription(BundledBuildArtifact, DEBUG_SYMBOLS_FILE,
-                    files_to_extract=['debug/breakpad']),
 
-    artifact_info.FACTORY_IMAGE:
-    ImplDescription(BundledBuildArtifact, FACTORY_FILE,
-                    files_to_extract=[devserver_constants.FACTORY_IMAGE_FILE])
-}
+_AddCrOSArtifact(artifact_info.FULL_PAYLOAD, AUTestPayload, '*_full_*')
+
+
+class DeltaPayloadNtoN(DeltaPayloadBase):
+  """ChromeOS Delta payload artifact for updating from version N to N."""
+  ARTIFACT_TAG = artifact_info.DELTA_PAYLOADS
+  ARTIFACT_NAME = 'NOT_APPLICABLE'
+
+  def __init__(self, install_dir, build, *args, **kwargs):
+    name = 'chromeos_%s*_delta_*' % build
+    install_subdir = os.path.join(_AU_BASE, build + _NTON_DIR_SUFFIX)
+    super(DeltaPayloadNtoN, self).__init__(name, install_dir, build, *args,
+                                           install_subdir=install_subdir,
+                                           **kwargs)
+
+
+class DeltaPayloadMtoN(DeltaPayloadBase):
+  """ChromeOS Delta payload artifact for updating from version M to N."""
+  ARTIFACT_TAG = artifact_info.DELTA_PAYLOADS
+  ARTIFACT_NAME = 'NOT_APPLICABLE'
+
+  def __init__(self, install_dir, build, *args, **kwargs):
+    name = ('chromeos_(?!%s).*_delta_.*' % re.escape(build))
+    install_subdir = os.path.join(_AU_BASE, build + _MTON_DIR_SUFFIX)
+    super(DeltaPayloadMtoN, self).__init__(name, install_dir, build, *args,
+                                           install_subdir=install_subdir,
+                                           is_regex_name=True, **kwargs)
+
+
+chromeos_artifact_map[artifact_info.DELTA_PAYLOADS] = [DeltaPayloadNtoN,
+                                                       DeltaPayloadMtoN]
+
+
+_AddCrOSArtifact(artifact_info.STATEFUL_PAYLOAD, Artifact,
+                 devserver_constants.STATEFUL_FILE)
+_AddCrOSArtifact(artifact_info.BASE_IMAGE, BundledArtifact, IMAGE_FILE,
+                 optional_name=BASE_IMAGE_FILE,
+                 files_to_extract=[devserver_constants.BASE_IMAGE_FILE])
+_AddCrOSArtifact(artifact_info.RECOVERY_IMAGE, BundledArtifact, IMAGE_FILE,
+                 optional_name=RECOVERY_IMAGE_FILE,
+                 files_to_extract=[devserver_constants.RECOVERY_IMAGE_FILE])
+_AddCrOSArtifact(artifact_info.DEV_IMAGE, BundledArtifact, IMAGE_FILE,
+                 files_to_extract=[devserver_constants.IMAGE_FILE])
+_AddCrOSArtifact(artifact_info.TEST_IMAGE, BundledArtifact, IMAGE_FILE,
+                 optional_name=TEST_IMAGE_FILE,
+                 files_to_extract=[devserver_constants.TEST_IMAGE_FILE])
+_AddCrOSArtifact(artifact_info.AUTOTEST, AutotestTarball, AUTOTEST_FILE,
+                 files_to_extract=None, exclude=['autotest/test_suites'])
+_AddCrOSArtifact(artifact_info.CONTROL_FILES, BundledArtifact,
+                 CONTROL_FILES_FILE)
+_AddCrOSArtifact(artifact_info.AUTOTEST_PACKAGES, AutotestTarball,
+                 AUTOTEST_PACKAGES_FILE)
+_AddCrOSArtifact(artifact_info.TEST_SUITES, BundledArtifact, TEST_SUITES_FILE)
+_AddCrOSArtifact(artifact_info.AU_SUITE, BundledArtifact, AU_SUITE_FILE)
+_AddCrOSArtifact(artifact_info.AUTOTEST_SERVER_PACKAGE, Artifact,
+                 AUTOTEST_SERVER_PACKAGE_FILE)
+_AddCrOSArtifact(artifact_info.FIRMWARE, Artifact, FIRMWARE_FILE)
+_AddCrOSArtifact(artifact_info.SYMBOLS, BundledArtifact, DEBUG_SYMBOLS_FILE,
+                 files_to_extract=['debug/breakpad'])
+_AddCrOSArtifact(artifact_info.FACTORY_IMAGE, BundledArtifact, FACTORY_FILE,
+                 files_to_extract=[devserver_constants.FACTORY_IMAGE_FILE])
 
 # Add all the paygen_au artifacts in one go.
-ARTIFACT_IMPLEMENTATION_MAP.update({
-    artifact_info.PAYGEN_AU_SUITE_TEMPLATE % {'channel': c}:
-    ImplDescription(
-        BundledBuildArtifact, PAYGEN_AU_SUITE_FILE_TEMPLATE % {'channel': c})
-    for c in devserver_constants.CHANNELS
-})
+for c in devserver_constants.CHANNELS:
+  _AddCrOSArtifact(artifact_info.PAYGEN_AU_SUITE_TEMPLATE % {'channel': c},
+                   BundledArtifact,
+                   PAYGEN_AU_SUITE_FILE_TEMPLATE % {'channel': c})
+
+android_artifact_map = {}
 
 
-class ArtifactFactory(object):
+def _AddAndroidArtifact(tag, base, name, *fixed_args, **fixed_kwargs):
+  """Add a data wrapper that describes an Android artifact's implementation to
+  android_artifact_map.
+  """
+  artifact = _CreateNewArtifact(tag, base, name, *fixed_args, **fixed_kwargs)
+  android_artifact_map.setdefault(tag, []).append(artifact)
+
+
+_AddAndroidArtifact(artifact_info.ANDROID_ZIP_IMAGES, BundledArtifact,
+                    ANDROID_IMAGE_ZIP, is_regex_name=True)
+_AddAndroidArtifact(artifact_info.ANDROID_RADIO_IMAGE, Artifact,
+                    ANDROID_RADIO_IMAGE)
+_AddAndroidArtifact(artifact_info.ANDROID_BOOTLOADER_IMAGE, Artifact,
+                    ANDROID_BOOTLOADER_IMAGE)
+_AddAndroidArtifact(artifact_info.ANDROID_FASTBOOT, Artifact, ANDROID_FASTBOOT)
+_AddAndroidArtifact(artifact_info.ANDROID_TEST_ZIP, BundledArtifact,
+                    ANDROID_TEST_ZIP, is_regex_name=True)
+
+class BaseArtifactFactory(object):
   """A factory class that generates build artifacts from artifact names."""
 
-  def __init__(self, download_dir, archive_url, artifacts, files,
-               build):
+  def __init__(self, artifact_map, download_dir, artifacts, files, build):
     """Initalizes the member variables for the factory.
 
     Args:
+      artifact_map: A map from artifact names to ImplDescription objects.
       download_dir: A directory to which artifacts are downloaded.
-      archive_url: the Google Storage url of the bucket where the debug
-                   symbols for the desired build are stored.
       artifacts: List of artifacts to stage. These artifacts must be
                  defined in artifact_info.py and have a mapping in the
                  ARTIFACT_IMPLEMENTATION_MAP.
@@ -677,38 +648,14 @@
              as files into the download_dir.
       build: The name of the build.
     """
+    self.artifact_map = artifact_map
     self.download_dir = download_dir
-    self.archive_url = archive_url
     self.artifacts = artifacts
     self.files = files
     self.build = build
 
-  @staticmethod
-  def _GetDescriptionComponents(name, is_artifact):
-    """Returns components for constructing a BuildArtifact.
-
-    Args:
-      name: The artifact name / file pattern.
-      is_artifact: Whether this is a named (True) or file (False) artifact.
-
-    Returns:
-      A tuple consisting of the BuildArtifact subclass, name, and additional
-      list- and named-arguments.
-
-    Raises:
-      KeyError: if artifact doesn't exist in ARTIFACT_IMPLEMENTATION_MAP.
-    """
-
-    if is_artifact:
-      description = ARTIFACT_IMPLEMENTATION_MAP[name]
-    else:
-      description = ImplDescription(BuildArtifact, name)
-
-    return (description.artifact_class, description.name,
-            description.additional_args, description.additional_dargs)
-
   def _Artifacts(self, names, is_artifact):
-    """Returns the BuildArtifacts from |names|.
+    """Returns the Artifacts from |names|.
 
     If is_artifact is true, then these names define artifacts that must exist in
     the ARTIFACT_IMPLEMENTATION_MAP. Otherwise, treat as filenames to stage as
@@ -719,19 +666,17 @@
       is_artifact: Whether this is a named (True) or file (False) artifact.
 
     Returns:
-      An iterable of BuildArtifacts.
+      An iterable of Artifacts.
 
     Raises:
-      KeyError: if artifact doesn't exist in ARTIFACT_IMPLEMENTATION_MAP.
+      KeyError: if artifact doesn't exist in the artifact map.
     """
-    artifacts = []
-    for name in names:
-      artifact_class, path, args, dargs = self._GetDescriptionComponents(
-          name, is_artifact)
-      artifacts.append(artifact_class(self.download_dir, self.archive_url, path,
-                                      self.build, *args, **dargs))
-
-    return artifacts
+    if is_artifact:
+      classes = itertools.chain(*(self.artifact_map[name] for name in names))
+      return list(cls(self.download_dir, self.build) for cls in classes)
+    else:
+      return list(Artifact(name, self.download_dir, self.build)
+                  for name in names)
 
   def RequiredArtifacts(self):
     """Returns BuildArtifacts for the factory's artifacts.
@@ -771,10 +716,31 @@
     return self._Artifacts(optional_names - set(self.artifacts), True)
 
 
+class ChromeOSArtifactFactory(BaseArtifactFactory):
+  """A factory class that generates ChromeOS build artifacts from names."""
+
+  def __init__(self, download_dir, artifacts, files, build):
+    """Pass the ChromeOS artifact map to the base class."""
+    super(ChromeOSArtifactFactory, self).__init__(
+        chromeos_artifact_map, download_dir, artifacts, files, build)
+
+
+class AndroidArtifactFactory(BaseArtifactFactory):
+  """A factory class that generates Android build artifacts from names."""
+
+  def __init__(self, download_dir, artifacts, files, build):
+    """Pass the Android artifact map to the base class."""
+    super(AndroidArtifactFactory, self).__init__(
+        android_artifact_map, download_dir, artifacts, files, build)
+
+
 # A simple main to verify correctness of the artifact map when making simple
 # name changes.
 if __name__ == '__main__':
-  print 'ARTIFACT IMPLEMENTATION MAP (for debugging)'
-  print 'FORMAT: ARTIFACT -> IMPLEMENTATION (<class>_file)'
-  for key, value in sorted(ARTIFACT_IMPLEMENTATION_MAP.items()):
-    print '%s -> %s' % (key, value)
+  print('ARTIFACT IMPLEMENTATION MAPs (for debugging)')
+  print('FORMAT: ARTIFACT -> IMPLEMENTATION (<type>_file)')
+  for label, mapping in (('CHROMEOS', chromeos_artifact_map),
+                         ('ANDROID', android_artifact_map)):
+    print('%s:' % label)
+    for key, value in sorted(mapping.items()):
+      print('  %s -> %s' % (key, ', '.join(str(val) for val in value)))
diff --git a/build_artifact_unittest.py b/build_artifact_unittest.py
index 4470825..fa32e79 100755
--- a/build_artifact_unittest.py
+++ b/build_artifact_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -10,6 +10,9 @@
 the artifact download process. Please make sure to set up your boto file.
 """
 
+from __future__ import print_function
+
+import itertools
 import os
 import random
 import shutil
@@ -21,6 +24,7 @@
 
 import build_artifact
 import devserver_constants
+import downloader
 
 
 _VERSION = 'R26-3646.0.0-rc1'
@@ -100,6 +104,7 @@
 
 # pylint: disable=W0212
 class BuildArtifactTest(mox.MoxTestBase):
+  """Test different BuildArtifact operations."""
 
   def setUp(self):
     mox.MoxTestBase.setUp(self)
@@ -115,20 +120,17 @@
   def testBundledArtifactTypes(self):
     """Tests that all known bundled artifacts are either zip or tar files."""
     known_names = ['zip', '.tgz', '.tar', 'tar.bz2', 'tar.xz', 'tar.gz']
-    for d in build_artifact.ARTIFACT_IMPLEMENTATION_MAP.values():
-      if d.artifact_class == build_artifact.BundledBuildArtifact:
-        for name in known_names:
-          if d.name.endswith(name):
-            break
-        else:
-          self.assertTrue('False')
+    for d in itertools.chain(*build_artifact.chromeos_artifact_map.values()):
+      if issubclass(d, build_artifact.BundledArtifact):
+        self.assertTrue(any(d.ARTIFACT_NAME.endswith(name)
+                            for name in known_names))
 
   def testProcessBuildArtifact(self):
     """Processes a real tarball from GSUtil and stages it."""
-    artifact = build_artifact.BuildArtifact(
-        self.work_dir,
-        _TEST_GOLO_ARCHIVE, build_artifact.TEST_SUITES_FILE, _VERSION)
-    artifact.Process(False)
+    artifact = build_artifact.Artifact(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(
         artifact.installed_files,
         [os.path.join(self.work_dir, build_artifact.TEST_SUITES_FILE)])
@@ -138,14 +140,14 @@
 
   def testProcessTarball(self):
     """Downloads a real tarball and untars it."""
-    artifact = build_artifact.BundledBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.TEST_SUITES_FILE,
-        _VERSION)
+    artifact = build_artifact.BundledArtifact(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION)
     expected_installed_files = [
         os.path.join(self.work_dir, filename)
         for filename in ([build_artifact.TEST_SUITES_FILE] +
                          _TEST_GOLO_ARCHIVE_TEST_TARBALL_CONTENT)]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(artifact.installed_files, expected_installed_files)
     self.assertTrue(os.path.isdir(os.path.join(
         self.work_dir, 'autotest', 'test_suites')))
@@ -154,13 +156,14 @@
   def testProcessTarballWithFile(self):
     """Downloads a real tarball and only untars one file from it."""
     file_to_download = 'autotest/test_suites/control.au'
-    artifact = build_artifact.BundledBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.TEST_SUITES_FILE,
-        _VERSION, files_to_extract=[file_to_download])
+    artifact = build_artifact.BundledArtifact(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION,
+        files_to_extract=[file_to_download])
     expected_installed_files = [
         os.path.join(self.work_dir, filename)
         for filename in [build_artifact.TEST_SUITES_FILE] + [file_to_download]]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(artifact.installed_files, expected_installed_files)
     self.assertTrue(os.path.exists(os.path.join(
         self.work_dir, file_to_download)))
@@ -168,24 +171,24 @@
 
   def testDownloadAutotest(self):
     """Downloads a real autotest tarball for test."""
-    self.mox.StubOutWithMock(build_artifact.AutotestTarballBuildArtifact,
-                             '_Extract')
-    artifact = build_artifact.AutotestTarballBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.AUTOTEST_FILE,
-        _VERSION, files_to_extract=None, exclude=['autotest/test_suites'])
+    self.mox.StubOutWithMock(build_artifact.AutotestTarball, '_Extract')
+    artifact = build_artifact.AutotestTarball(
+        build_artifact.AUTOTEST_FILE, self.work_dir, _VERSION,
+        files_to_extract=None, exclude=['autotest/test_suites'])
 
     install_dir = self.work_dir
     artifact.staging_dir = install_dir
     self.mox.StubOutWithMock(subprocess, 'check_call')
     subprocess.check_call(mox.In('autotest/utils/packager.py'), cwd=install_dir)
-    self.mox.StubOutWithMock(artifact, '_WaitForArtifactToExist')
+    self.mox.StubOutWithMock(downloader.GoogleStorageDownloader, 'Wait')
     self.mox.StubOutWithMock(artifact, '_UpdateName')
-    artifact._WaitForArtifactToExist(artifact.name, 1)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    dl.Wait(artifact.name, False, 1)
     artifact._UpdateName(mox.IgnoreArg())
-    artifact._Download()
+    dl.Fetch(artifact.name, install_dir)
     artifact._Extract()
     self.mox.ReplayAll()
-    artifact.Process(True)
+    artifact.Process(dl, True)
     self.mox.VerifyAll()
     self.assertItemsEqual(artifact.installed_files, [])
     self.assertTrue(os.path.isdir(
@@ -194,12 +197,12 @@
 
   def testAUTestPayloadBuildArtifact(self):
     """Downloads a real tarball and treats it like an AU payload."""
-    artifact = build_artifact.AUTestPayloadBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.TEST_SUITES_FILE,
-        _VERSION)
+    artifact = build_artifact.AUTestPayload(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION)
     expected_installed_files = [
         os.path.join(self.work_dir, devserver_constants.UPDATE_FILE)]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(artifact.installed_files, expected_installed_files)
     self.assertTrue(os.path.exists(os.path.join(
         self.work_dir, devserver_constants.UPDATE_FILE)))
@@ -207,31 +210,37 @@
 
   def testDeltaPayloadsArtifact(self):
     """Downloads delta paylaods from test bucket."""
-    artifact = build_artifact.DeltaPayloadsArtifact(
-        self.work_dir, _TEST_GOLO_FOR_DELTAS, 'DONTCARE', _DELTA_VERSION)
+    nton = build_artifact.DeltaPayloadNtoN(self.work_dir, _DELTA_VERSION)
+    mton = build_artifact.DeltaPayloadMtoN(self.work_dir, _DELTA_VERSION)
     delta_installed_files = ('update.gz', 'stateful.tgz')
     nton_dir = os.path.join(self.work_dir, 'au', '%s_nton' % _DELTA_VERSION)
     mton_dir = os.path.join(self.work_dir, 'au', '%s_mton' % _DELTA_VERSION)
-    expected_installed_files = ([os.path.join(nton_dir, filename)
-                                 for filename in delta_installed_files] +
-                                [os.path.join(mton_dir, filename)
-                                 for filename in delta_installed_files])
-    artifact.Process(False)
-    self.assertItemsEqual(artifact.installed_files, expected_installed_files)
+    dl = downloader.GoogleStorageDownloader(self.work_dir,
+                                            _TEST_GOLO_FOR_DELTAS)
+    nton.Process(dl, False)
+    mton.Process(dl, False)
+    self.assertItemsEqual(nton.installed_files,
+                          [os.path.join(nton_dir, filename)
+                           for filename in delta_installed_files])
+    self.assertItemsEqual(mton.installed_files,
+                          [os.path.join(mton_dir, filename)
+                           for filename in delta_installed_files])
     self.assertTrue(os.path.exists(os.path.join(nton_dir, 'update.gz')))
     self.assertTrue(os.path.exists(os.path.join(mton_dir, 'update.gz')))
-    self._CheckMarker(artifact.marker_name, artifact.installed_files)
+    self._CheckMarker(nton.marker_name, nton.installed_files)
+    self._CheckMarker(mton.marker_name, mton.installed_files)
 
   def testImageUnzip(self):
     """Downloads and stages a zip file and extracts a test image."""
     files_to_extract = ['chromiumos_test_image.bin']
-    artifact = build_artifact.BundledBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.IMAGE_FILE,
-        _VERSION, files_to_extract=files_to_extract)
+    artifact = build_artifact.BundledArtifact(
+        build_artifact.IMAGE_FILE, self.work_dir, _VERSION,
+        files_to_extract=files_to_extract)
     expected_installed_files = [
         os.path.join(self.work_dir, filename)
         for filename in [build_artifact.IMAGE_FILE] + files_to_extract]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(expected_installed_files, artifact.installed_files)
     self.assertTrue(os.path.exists(os.path.join(
         self.work_dir, 'chromiumos_test_image.bin')))
@@ -239,16 +248,16 @@
 
   def testImageUnzipWithExcludes(self):
     """Downloads and stages a zip file while excluding all large files."""
-    artifact = build_artifact.BundledBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.IMAGE_FILE,
-        _VERSION, exclude=['*.bin'])
+    artifact = build_artifact.BundledArtifact(
+        build_artifact.IMAGE_FILE, self.work_dir, _VERSION, exclude=['*.bin'])
     expected_extracted_files = [
         filename for filename in _TEST_GOLO_ARCHIVE_IMAGE_ZIPFILE_CONTENT
         if not filename.endswith('.bin')]
     expected_installed_files = [
         os.path.join(self.work_dir, filename)
         for filename in [build_artifact.IMAGE_FILE] + expected_extracted_files]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
     self.assertItemsEqual(expected_installed_files, artifact.installed_files)
     self.assertFalse(os.path.exists(os.path.join(
         self.work_dir, 'chromiumos_test_image.bin')))
@@ -256,12 +265,12 @@
 
   def testArtifactFactory(self):
     """Tests that BuildArtifact logic works for both named and file artifacts.
+
     """
     name_artifact = 'test_suites' # This file is in every real GS dir.
     file_artifact = 'metadata.json' # This file is in every real GS dir.
-    factory = build_artifact.ArtifactFactory(self.work_dir, _TEST_GOLO_ARCHIVE,
-                                             [name_artifact], [file_artifact],
-                                             _VERSION)
+    factory = build_artifact.ChromeOSArtifactFactory(
+        self.work_dir, [name_artifact], [file_artifact], _VERSION)
     artifacts = factory.RequiredArtifacts()
     self.assertEqual(len(artifacts), 2)
     expected_installed_files_0 = [
@@ -269,8 +278,9 @@
         in ([build_artifact.TEST_SUITES_FILE] +
             _TEST_GOLO_ARCHIVE_TEST_TARBALL_CONTENT)]
     expected_installed_files_1 = [os.path.join(self.work_dir, file_artifact)]
-    artifacts[0].Process(False)
-    artifacts[1].Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifacts[0].Process(dl, False)
+    artifacts[1].Process(dl, False)
     self.assertItemsEqual(artifacts[0].installed_files,
                           expected_installed_files_0)
     self.assertItemsEqual(artifacts[1].installed_files,
@@ -286,11 +296,12 @@
 
   def testProcessBuildArtifactWithException(self):
     """Test processing a non-existing artifact from GSUtil."""
-    artifact = build_artifact.BuildArtifact(
-        self.work_dir, _TEST_NON_EXISTING_GOLO_ARCHIVE,
-        build_artifact.TEST_SUITES_FILE, _VERSION)
+    artifact = build_artifact.Artifact(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION)
     try:
-      artifact.Process(False)
+      dl = downloader.GoogleStorageDownloader(self.work_dir,
+                                              _TEST_NON_EXISTING_GOLO_ARCHIVE)
+      artifact.Process(dl, False)
     except Exception as e:
       expected_exception = e
     exception = artifact.GetException()
@@ -298,14 +309,14 @@
 
   def testArtifactStaged(self):
     """Tests the artifact staging verification logic."""
-    artifact = build_artifact.BundledBuildArtifact(
-        self.work_dir, _TEST_GOLO_ARCHIVE, build_artifact.TEST_SUITES_FILE,
-        _VERSION)
+    artifact = build_artifact.BundledArtifact(
+        build_artifact.TEST_SUITES_FILE, self.work_dir, _VERSION)
     expected_installed_files = [
         os.path.join(self.work_dir, filename)
         for filename in ([build_artifact.TEST_SUITES_FILE] +
                          _TEST_GOLO_ARCHIVE_TEST_TARBALL_CONTENT)]
-    artifact.Process(False)
+    dl = downloader.GoogleStorageDownloader(self.work_dir, _TEST_GOLO_ARCHIVE)
+    artifact.Process(dl, False)
 
     # Check that it works when all files are there.
     self.assertTrue(artifact.ArtifactStaged())
diff --git a/devserver.py b/devserver.py
index b4bb063..5d627dd 100755
--- a/devserver.py
+++ b/devserver.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
 
 # Copyright (c) 2009-2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -39,6 +39,7 @@
 how to update and which payload to give to a requester.
 """
 
+from __future__ import print_function
 
 import json
 import optparse
@@ -119,20 +120,21 @@
 
 
 def require_psutil():
-  """Decorator for functions require psutil to run.
-  """
+  """Decorator for functions require psutil to run."""
   def deco_require_psutil(func):
     """Wrapper of the decorator function.
 
-    @param func: function to be called.
+    Args:
+      func: function to be called.
     """
     def func_require_psutil(*args, **kwargs):
       """Decorator for functions require psutil to run.
 
       If psutil is not installed, skip calling the function.
 
-      @param args: arguments for function to be called.
-      @param kwargs: keyword arguments for function to be called.
+      Args:
+        *args: arguments for function to be called.
+        **kwargs: keyword arguments for function to be called.
       """
       if psutil:
         return func(*args, **kwargs)
@@ -143,6 +145,116 @@
   return deco_require_psutil
 
 
+def _canonicalize_archive_url(archive_url):
+  """Canonicalizes archive_url strings.
+
+  Raises:
+    DevserverError: if archive_url is not set.
+  """
+  if archive_url:
+    if not archive_url.startswith('gs://'):
+      raise DevServerError("Archive URL isn't from Google Storage (%s) ." %
+                           archive_url)
+
+    return archive_url.rstrip('/')
+  else:
+    raise DevServerError("Must specify an archive_url in the request")
+
+
+def _canonicalize_local_path(local_path):
+  """Canonicalizes |local_path| strings.
+
+  Raises:
+    DevserverError: if |local_path| is not set.
+  """
+  # Restrict staging of local content to only files within the static
+  # directory.
+  local_path = os.path.abspath(local_path)
+  if not local_path.startswith(updater.static_dir):
+    raise DevServerError('Local path %s must be a subdirectory of the static'
+                         ' directory: %s' % (local_path, updater.static_dir))
+
+  return local_path.rstrip('/')
+
+
+def _get_artifacts(kwargs):
+  """Returns a tuple of named and file artifacts given the stage rpc kwargs.
+
+  Raises:
+    DevserverError if no artifacts would be returned.
+  """
+  artifacts = kwargs.get('artifacts')
+  files = kwargs.get('files')
+  if not artifacts and not files:
+    raise DevServerError('No artifacts specified.')
+
+  # Note we NEED to coerce files to a string as we get raw unicode from
+  # cherrypy and we treat files as strings elsewhere in the code.
+  return (str(artifacts).split(',') if artifacts else [],
+          str(files).split(',') if files else [])
+
+
+def _get_downloader(kwargs):
+  """Returns the downloader based on passed in arguments.
+
+  Args:
+      kwargs: Keyword arguments for the request.
+  """
+  local_path = kwargs.get('local_path')
+  if local_path:
+    local_path = _canonicalize_local_path(local_path)
+
+  dl = None
+  if local_path:
+    dl = downloader.LocalDownloader(updater.static_dir, local_path)
+
+  # Only Android build requires argument build_id. If it's not set, assume
+  # the download request is for ChromeOS.
+  build_id = kwargs.get('build_id', None)
+  if not build_id:
+    archive_url = kwargs.get('archive_url')
+    if not archive_url and not local_path:
+      raise DevServerError('Requires archive_url or local_path to be '
+                           'specified.')
+    if archive_url and local_path:
+      raise DevServerError('archive_url and local_path can not both be '
+                           'specified.')
+    if not dl:
+      archive_url = _canonicalize_archive_url(archive_url)
+      dl = downloader.GoogleStorageDownloader(updater.static_dir, archive_url)
+  elif not dl:
+    target = kwargs.get('target', None)
+    if not target:
+      raise DevServerError('target must be specified for Android build.')
+    dl = downloader.LaunchControlDownloader(updater.static_dir, build_id,
+                                            target)
+
+  return dl
+
+
+def _get_downloader_and_factory(kwargs):
+  """Returns the downloader and artifact factory based on passed in arguments.
+
+  Args:
+      kwargs: Keyword arguments for the request.
+  """
+  artifacts, files = _get_artifacts(kwargs)
+  dl = _get_downloader(kwargs)
+
+  if (isinstance(dl, downloader.GoogleStorageDownloader) or
+      isinstance(dl, downloader.LocalDownloader)):
+    factory_class = build_artifact.ChromeOSArtifactFactory
+  elif isinstance(dl, downloader.LaunchControlDownloader):
+    factory_class = build_artifact.AndroidArtifactFactory
+  else:
+    raise DevServerError('Unrecognized value for downloader type: %s' %
+                         type(dl))
+
+  factory = factory_class(dl.GetBuildDir(), artifacts, files, dl.GetBuild())
+
+  return dl, factory
+
+
 def _LeadingWhiteSpaceCount(string):
   """Count the amount of leading whitespace in a string.
 
@@ -152,7 +264,7 @@
   Returns:
     number of white space chars before characters start.
   """
-  matched = re.match('^\s+', string)
+  matched = re.match(r'^\s+', string)
   if matched:
     return len(matched.group())
 
@@ -216,40 +328,39 @@
   cherrypy.tools.update_timestamp = cherrypy.Tool(
       'on_end_resource', _GetUpdateTimestampHandler(options.static_dir))
 
-  base_config = { 'global':
-                  { 'server.log_request_headers': True,
-                    'server.protocol_version': 'HTTP/1.1',
-                    'server.socket_host': socket_host,
-                    'server.socket_port': int(options.port),
-                    'response.timeout': 6000,
-                    'request.show_tracebacks': True,
-                    'server.socket_timeout': 60,
-                    'server.thread_pool': 2,
-                    'engine.autoreload.on': False,
-                  },
-                  '/api':
-                  {
-                    # Gets rid of cherrypy parsing post file for args.
-                    'request.process_request_body': False,
-                  },
-                  '/build':
-                  {
-                    'response.timeout': 100000,
-                  },
-                  '/update':
-                  {
-                    # Gets rid of cherrypy parsing post file for args.
-                    'request.process_request_body': False,
-                    'response.timeout': 10000,
-                  },
-                  # Sets up the static dir for file hosting.
-                  '/static':
-                  { 'tools.staticdir.dir': options.static_dir,
-                    'tools.staticdir.on': True,
-                    'response.timeout': 10000,
-                    'tools.update_timestamp.on': True,
-                  },
-                }
+  base_config = {'global':
+                 {'server.log_request_headers': True,
+                  'server.protocol_version': 'HTTP/1.1',
+                  'server.socket_host': socket_host,
+                  'server.socket_port': int(options.port),
+                  'response.timeout': 6000,
+                  'request.show_tracebacks': True,
+                  'server.socket_timeout': 60,
+                  'server.thread_pool': 2,
+                  'engine.autoreload.on': False,
+                 },
+                 '/api':
+                 {
+                  # Gets rid of cherrypy parsing post file for args.
+                  'request.process_request_body': False,
+                 },
+                 '/build':
+                 {'response.timeout': 100000,
+                 },
+                 '/update':
+                 {
+                  # Gets rid of cherrypy parsing post file for args.
+                  'request.process_request_body': False,
+                  'response.timeout': 10000,
+                 },
+                 # Sets up the static dir for file hosting.
+                 '/static':
+                 {'tools.staticdir.dir': options.static_dir,
+                  'tools.staticdir.on': True,
+                  'response.timeout': 10000,
+                  'tools.update_timestamp.on': True,
+                 },
+               }
   if options.production:
     base_config['global'].update({'server.thread_pool': 150})
     # TODO(sosa): Do this more cleanly.
@@ -296,7 +407,7 @@
   """
   method = (not (ignored and nested_member in ignored) and
             _GetRecursiveMemberObject(root, nested_member.split('/')))
-  if (method and type(method) == types.FunctionType and _IsExposed(method)):
+  if method and type(method) == types.FunctionType and _IsExposed(method):
     return method
 
 
@@ -479,8 +590,7 @@
 
   @require_psutil()
   def _start_io_stat_thread(self):
-    """Start the thread to collect IO stats.
-    """
+    """Start the thread to collect IO stats."""
     thread = threading.Thread(target=self._refresh_io_stats)
     thread.daemon = True
     thread.start()
@@ -500,23 +610,6 @@
     self.network_recv_bytes_per_sec = 0
     self._start_io_stat_thread()
 
-  @staticmethod
-  def _get_artifacts(kwargs):
-    """Returns a tuple of named and file artifacts given the stage rpc kwargs.
-
-    Raises:
-      DevserverError if no artifacts would be returned.
-    """
-    artifacts = kwargs.get('artifacts')
-    files = kwargs.get('files')
-    if not artifacts and not files:
-      raise DevServerError('No artifacts specified.')
-
-    # Note we NEED to coerce files to a string as we get raw unicode from
-    # cherrypy and we treat files as strings elsewhere in the code.
-    return (str(artifacts).split(',') if artifacts else [],
-            str(files).split(',') if files else [])
-
   @cherrypy.expose
   def build(self, board, pkg, **kwargs):
     """Builds the package specified."""
@@ -525,38 +618,6 @@
       self._builder = builder.Builder()
     return self._builder.Build(board, pkg, kwargs)
 
-  @staticmethod
-  def _canonicalize_archive_url(archive_url):
-    """Canonicalizes archive_url strings.
-
-    Raises:
-      DevserverError: if archive_url is not set.
-    """
-    if archive_url:
-      if not archive_url.startswith('gs://'):
-        raise DevServerError("Archive URL isn't from Google Storage (%s) ." %
-                             archive_url)
-
-      return archive_url.rstrip('/')
-    else:
-      raise DevServerError("Must specify an archive_url in the request")
-
-  @staticmethod
-  def _canonicalize_local_path(local_path):
-    """Canonicalizes |local_path| strings.
-
-    Raises:
-      DevserverError: if |local_path| is not set.
-    """
-    # Restrict staging of local content to only files within the static
-    # directory.
-    local_path = os.path.abspath(local_path)
-    if not local_path.startswith(updater.static_dir):
-      raise DevServerError('Local path %s must be a subdirectory of the static'
-                           ' directory: %s' % (local_path, updater.static_dir))
-
-    return local_path.rstrip('/')
-
   @cherrypy.expose
   def is_staged(self, **kwargs):
     """Check if artifacts have been downloaded.
@@ -576,10 +637,8 @@
         http://devserver_url:<port>/is_staged?archive_url=gs://your_url/path&
             artifacts=autotest,test_suites
     """
-    archive_url = self._canonicalize_archive_url(kwargs.get('archive_url'))
-    artifacts, files = self._get_artifacts(kwargs)
-    return str(downloader.Downloader(updater.static_dir, archive_url).IsStaged(
-        artifacts, files))
+    dl, factory = _get_downloader_and_factory(kwargs)
+    return str(dl.IsStaged(factory))
 
   @cherrypy.expose
   def list_image_dir(self, **kwargs):
@@ -597,24 +656,24 @@
     Returns:
       A string with information about the contents of the image directory.
     """
-    archive_url = self._canonicalize_archive_url(kwargs.get('archive_url'))
-    download_helper = downloader.Downloader(updater.static_dir, archive_url)
+    dl = _get_downloader(kwargs)
     try:
-      image_dir_contents = download_helper.ListBuildDir()
+      image_dir_contents = dl.ListBuildDir()
     except build_artifact.ArtifactDownloadError as e:
       return 'Cannot list the contents of staged artifacts. %s' % e
     if not image_dir_contents:
-      return '%s has not been staged on this devserver.' % archive_url
+      return '%s has not been staged on this devserver.' % dl.DescribeSource()
     return image_dir_contents
 
   @cherrypy.expose
   def stage(self, **kwargs):
-    """Downloads and caches the artifacts from Google Storage URL.
+    """Downloads and caches build artifacts.
 
-    Downloads and caches the artifacts Google Storage URL. Returns once these
-    have been downloaded on the devserver. A call to this will attempt to cache
-    non-specified artifacts in the background for the given from the given URL
-    following the principle of spatial locality. Spatial locality of different
+    Downloads and caches build artifacts, possibly from a Google Storage URL,
+    or from Android's LaunchControl. Returns once these have been downloaded
+    on the devserver. A call to this will attempt to cache non-specified
+    artifacts in the background for the given from the given URL following
+    the principle of spatial locality. Spatial locality of different
     artifacts is explicitly defined in the build_artifact module.
 
     These artifacts will then be available from the static/ sub-directory of
@@ -654,26 +713,13 @@
 
       http://devserver_url:<port>/static/x86-mario-release/R26-3920.0.0
     """
-    archive_url = kwargs.get('archive_url')
-    local_path = kwargs.get('local_path')
-    if not archive_url and not local_path:
-      raise DevServerError('Requires archive_url or local_path to be '
-                           'specified.')
-    if archive_url and local_path:
-      raise DevServerError('archive_url and local_path can not both be '
-                           'specified.')
-    if archive_url:
-      archive_url = self._canonicalize_archive_url(archive_url)
-    if local_path:
-      local_path = self._canonicalize_local_path(local_path)
-    async = kwargs.get('async', False)
-    artifacts, files = self._get_artifacts(kwargs)
+    dl, factory = _get_downloader_and_factory(kwargs)
+
     with DevServerRoot._staging_thread_count_lock:
       DevServerRoot._staging_thread_count += 1
     try:
-      downloader.Downloader(
-          updater.static_dir, (archive_url or local_path)).Download(
-              artifacts, files, async=async)
+      async = kwargs.get('async', False)
+      dl.Download(factory, async=async)
     finally:
       with DevServerRoot._staging_thread_count_lock:
         DevServerRoot._staging_thread_count -= 1
@@ -692,10 +738,9 @@
     Returns:
       Path to the source folder for the telemetry codebase once it is staged.
     """
-    archive_url = kwargs.get('archive_url')
+    dl = _get_downloader(kwargs)
 
-    build = '/'.join(downloader.Downloader.ParseUrl(archive_url))
-    build_path = os.path.join(updater.static_dir, build)
+    build_path = dl.GetBuildDir()
     deps_path = os.path.join(build_path, 'autotest/packages')
     telemetry_path = os.path.join(build_path, TELEMETRY_FOLDER)
     src_folder = os.path.join(telemetry_path, 'src')
@@ -727,8 +772,9 @@
       except shutil.Error:
         # This can occur if src_folder already exists. Remove and retry move.
         shutil.rmtree(src_folder)
-        raise DevServerError('Failure in telemetry setup for build %s. Appears'
-                             ' that the test_src to src move failed.' % build)
+        raise DevServerError(
+            'Failure in telemetry setup for build %s. Appears that the '
+            'test_src to src move failed.' % dl.GetBuild())
 
       return src_folder
 
@@ -745,10 +791,13 @@
       archive_url: Google Storage URL for the build.
       minidump: The binary minidump file to symbolicate.
     """
+    kwargs['artifacts'] = 'symbols'
+    dl = _get_downloader(kwargs)
+
     # Ensure the symbols have been staged.
-    archive_url = self._canonicalize_archive_url(kwargs.get('archive_url'))
-    if self.stage(archive_url=archive_url, artifacts='symbols') != 'Success':
-      raise DevServerError('Failed to stage symbols for %s' % archive_url)
+    if self.stage(**kwargs) != 'Success':
+      raise DevServerError('Failed to stage symbols for %s' %
+                           dl.DescribeSource())
 
     to_return = ''
     with tempfile.NamedTemporaryFile() as local:
@@ -760,8 +809,7 @@
 
       local.flush()
 
-      symbols_directory = os.path.join(downloader.Downloader.GetBuildDir(
-          updater.static_dir, archive_url), 'debug', 'breakpad')
+      symbols_directory = os.path.join(dl.GetBuildDir(), 'debug', 'breakpad')
 
       stackwalk = subprocess.Popen(
           ['minidump_stackwalk', local.name, symbols_directory],
@@ -862,7 +910,7 @@
       or in Google Storage.
     """
     build_id, filename = self._xbuddy.Translate(
-          args, image_dir=kwargs.get('image_dir'))
+        args, image_dir=kwargs.get('image_dir'))
     response = os.path.join(build_id, filename)
     _Log('Path translation requested, returning: %s', response)
     return response
@@ -1069,7 +1117,8 @@
   def _get_io_stats(self):
     """Get the IO stats as a dictionary.
 
-    @return: A dictionary of IO stats collected by psutil.
+    Returns:
+      A dictionary of IO stats collected by psutil.
 
     """
     return {'disk_read_bytes_per_second': self.disk_read_bytes_per_sec,
@@ -1139,7 +1188,7 @@
                    action='store_true', default=False,
                    help='record history of host update events (/api/hostlog)')
   group.add_option('--max_updates',
-                   metavar='NUM', default= -1, type='int',
+                   metavar='NUM', default=-1, type='int',
                    help='maximum number of update checks handled positively '
                         '(default: unlimited)')
   group.add_option('--private_key',
@@ -1173,8 +1222,8 @@
                    'protocol, such as hardware class, being sent.')
   group.add_option('-u', '--urlbase',
                    metavar='URL',
-                     help='base URL for update images, other than the '
-                     'devserver. Use in conjunction with remote_payload.')
+                   help='base URL for update images, other than the '
+                   'devserver. Use in conjunction with remote_payload.')
   parser.add_option_group(group)
 
 
@@ -1333,8 +1382,8 @@
       board=options.board,
       copy_to_static_root=not options.exit,
       private_key=options.private_key,
-      private_key_for_metadata_hash_signature=
-        options.private_key_for_metadata_hash_signature,
+      private_key_for_metadata_hash_signature=(
+          options.private_key_for_metadata_hash_signature),
       public_key=options.public_key,
       critical_update=options.critical_update,
       remote_payload=options.remote_payload,
diff --git a/devserver_constants.py b/devserver_constants.py
index 35ac8b9..84ef53a 100644
--- a/devserver_constants.py
+++ b/devserver_constants.py
@@ -43,3 +43,8 @@
 METADATA_HASH_FILE = 'metadata_hash'
 STATEFUL_FILE = 'stateful.tgz'
 UPDATE_FILE = 'update.gz'
+
+#### Android files
+ANDROID_BOOT_IMAGE_FILE = 'boot.img'
+ANDROID_SYSTEM_IMAGE_FILE = 'system.img'
+ANDROID_FASTBOOT = 'fastboot'
diff --git a/devserver_integration_test.py b/devserver_integration_test.py
index 49eea74..d37d62d 100755
--- a/devserver_integration_test.py
+++ b/devserver_integration_test.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
 
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -18,6 +18,8 @@
   ./devserver_integration_tests.py DevserverIntegrationTests
 """
 
+from __future__ import print_function
+
 import devserver_constants
 import json
 import logging
@@ -176,7 +178,7 @@
       try:
         self.port = self._ReadIntValueFromFile(self.portfile, 'portfile')
         self.devserver_url = 'http://127.0.0.1:%d' % self.port
-        self._MakeRPC(CHECK_HEALTH, timeout=0.1)
+        self._MakeRPC(CHECK_HEALTH, timeout=1)
         break
       except Exception:
         time.sleep(DEVSERVER_START_SLEEP)
@@ -434,6 +436,7 @@
 
   def testStageAndUpdate(self):
     """Tests core autotest workflow where we stage/update with a test payload.
+
     """
     build_id = 'x86-mario-release/R32-4810.0.0'
     archive_url = 'gs://chromeos-image-archive/%s' % build_id
@@ -463,6 +466,7 @@
 
   def testStageAutotestAndGetPackages(self):
     """Another autotest workflow test where we stage/update with a test payload.
+
     """
     build_id = 'x86-mario-release/R32-4810.0.0'
     archive_url = 'gs://chromeos-image-archive/%s' % build_id
@@ -491,6 +495,7 @@
 
   def testRemoteXBuddyAlias(self):
     """Another autotest workflow test where we stage/update with a test payload.
+
     """
     build_id = 'x86-mario-release/R32-4810.0.0'
     xbuddy_path = 'remote/x86-mario/R32-4810.0.0/full_payload'
diff --git a/downloader.py b/downloader.py
index 2a319d6..554ce33 100755
--- a/downloader.py
+++ b/downloader.py
@@ -1,14 +1,24 @@
+#!/usr/bin/python2
+#
 # Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+"""Downloaders used to download artifacts and files from a given source."""
+
+from __future__ import print_function
+
 import collections
+import glob
 import os
+import re
+import shutil
 import threading
 from datetime import datetime
 
 import build_artifact
 import common_util
+import gsutil_util
 import log_util
 
 
@@ -23,7 +33,8 @@
   def __init__(self, exceptions):
     """Initialize a DownloaderException instance with a list of exceptions.
 
-    @param exceptions: Exceptions raised when downloading artifacts.
+    Args:
+      exceptions: Exceptions raised when downloading artifacts.
     """
     message = 'Exceptions were raised when downloading artifacts.'
     Exception.__init__(self, message)
@@ -39,92 +50,40 @@
 class Downloader(log_util.Loggable):
   """Downloader of images to the devsever.
 
+  This is the base class for different types of downloaders, including
+  GoogleStorageDownloader, LocalDownloader and LaunchControlDownloader.
+
   Given a URL to a build on the archive server:
     - Caches that build and the given artifacts onto the devserver.
     - May also initiate caching of related artifacts in the background.
 
   Private class members:
-    archive_url: a URL where to download build artifacts from.
     static_dir: local filesystem directory to store all artifacts.
     build_dir: the local filesystem directory to store artifacts for the given
-      build defined by the archive_url.
+      build based on the remote source.
+
+  Public methods must be overridden:
+    Wait: Verifies the local artifact exists and returns the appropriate names.
+    Fetch: Downloads artifact from given source to a local directory.
+    DescribeSource: Gets the source of the download, e.g., a url to GS.
   """
 
   # This filename must be kept in sync with clean_staged_images.py
   _TIMESTAMP_FILENAME = 'staged.timestamp'
 
-  def __init__(self, static_dir, archive_url):
+  def __init__(self, static_dir, build_dir, build):
     super(Downloader, self).__init__()
-    self._archive_url = archive_url
     self._static_dir = static_dir
-    self._build_dir = Downloader.GetBuildDir(static_dir, archive_url)
+    self._build_dir = build_dir
+    self._build = build
 
-  @staticmethod
-  def ParseUrl(path_or_url):
-    """Parses |path_or_url| into build relative path and the shorter build name.
+  def GetBuildDir(self):
+    """Returns the path to where the artifacts will be staged."""
+    return self._build_dir
 
-    Args:
-      path_or_url: a local path or URL at which build artifacts are archived.
-
-    Returns:
-      A tuple of (build relative path, short build name)
-    """
-    if path_or_url.startswith('gs://'):
-      return Downloader.ParseGSUrl(path_or_url)
-    return Downloader.ParseLocalPath(path_or_url)
-
-  @staticmethod
-  def ParseGSUrl(archive_url):
-    """Parses |path_or_url| into build relative path and the shorter build name.
-
-    Parses archive_url into rel_path and build e.g.
-    gs://chromeos-image-archive/{rel_path}/{build}.
-
-    Args:
-      archive_url: a URL at which build artifacts are archived.
-
-    Returns:
-      A tuple of (build relative path, short build name)
-    """
-    # The archive_url is of the form gs://server/[some_path/target]/...]/build
-    # This function discards 'gs://server/' and extracts the [some_path/target]
-    # as rel_path and the build as build.
-    sub_url = archive_url.partition('://')[2]
-    split_sub_url = sub_url.split('/')
-    rel_path = '/'.join(split_sub_url[1:-1])
-    build = split_sub_url[-1]
-    return rel_path, build
-
-  @staticmethod
-  def ParseLocalPath(local_path):
-    """Parses local_path into rel_path and build.
-
-    Parses a local path into rel_path and build e.g.
-    /{path to static dir}/{rel_path}/{build}.
-
-    Args:
-      local_path: a local path that the build artifacts are stored. Must be a
-                  subpath of the static directory.
-
-    Returns:
-      A tuple of (build relative path, short build name)
-    """
-    rel_path = os.path.basename(os.path.dirname(local_path))
-    build = os.path.basename(local_path)
-    return rel_path, build
-
-  @staticmethod
-  def GetBuildDir(static_dir, archive_url):
-    """Returns the path to where the artifacts will be staged.
-
-    Args:
-      static_dir: The base static dir that will be used.
-      archive_url: The gs path to the archive url.
-    """
-    # Parse archive_url into rel_path (contains the build target) and
-    # build e.g. gs://chromeos-image-archive/{rel_path}/{build}.
-    rel_path, build = Downloader.ParseUrl(archive_url)
-    return os.path.join(static_dir, rel_path, build)
+  def GetBuild(self):
+    """Returns the path to where the artifacts will be staged."""
+    return self._build
 
   @staticmethod
   def TouchTimestampForStaged(directory_path):
@@ -141,8 +100,8 @@
     is the only file in that directory. The build could be non-existing, and
     the directory should be removed.
 
-    @param directory_path: directory used to stage the image.
-
+    Args:
+      directory_path: directory used to stage the image.
     """
     file_name = os.path.join(directory_path, Downloader._TIMESTAMP_FILENAME)
     if os.path.exists(file_name) and len(os.listdir(directory_path)) == 1:
@@ -171,9 +130,9 @@
           'not a directory.' % (self._archive_url, self._build_dir))
 
     ls_format = collections.namedtuple(
-            'ls', ['name', 'accessed', 'modified', 'size'])
+        'ls', ['name', 'accessed', 'modified', 'size'])
     output_format = ('Name: %(name)s Accessed: %(accessed)s '
-            'Modified: %(modified)s Size: %(size)s bytes.\n')
+                     'Modified: %(modified)s Size: %(size)s bytes.\n')
 
     build_dir_info = 'Listing contents of :%s \n' % self._build_dir
     for file_name in os.listdir(self._build_dir):
@@ -186,18 +145,15 @@
       build_dir_info += output_format % ls_info._asdict()
     return build_dir_info
 
-  def Download(self, artifacts, files, async=False):
+  def Download(self, factory, async=False):
     """Downloads and caches the |artifacts|.
 
-    Downloads and caches the |artifacts|. Returns once these
-    are present on the devserver. A call to this will attempt to cache
-    non-specified artifacts in the background following the principle of
-    spatial locality.
+    Downloads and caches the |artifacts|. Returns once these are present on the
+    devserver. A call to this will attempt to cache non-specified artifacts in
+    the background following the principle of spatial locality.
 
     Args:
-      artifacts: A list of artifact names that correspond to
-                 artifacts defined in artifact_info.py to stage.
-     files: A list of filenames to stage from an archive_url.
+     factory: The artifact factory.
      async: If True, return without waiting for download to complete.
 
     Raises:
@@ -211,10 +167,6 @@
     Downloader.TouchTimestampForStaged(self._build_dir)
 
     # Create factory to create build_artifacts from artifact names.
-    build = self.ParseUrl(self._archive_url)[1]
-    factory = build_artifact.ArtifactFactory(
-        self._build_dir, self._archive_url, artifacts, files,
-        build)
     background_artifacts = factory.OptionalArtifacts()
     if background_artifacts:
       self._DownloadArtifactsInBackground(background_artifacts)
@@ -228,20 +180,21 @@
     else:
       self._DownloadArtifactsSerially(required_artifacts, no_wait=True)
 
-  def IsStaged(self, artifacts, files):
+  def IsStaged(self, factory):
     """Check if all artifacts have been downloaded.
 
-    artifacts: A list of artifact names that correspond to
-               artifacts defined in artifact_info.py to stage.
-    files: A list of filenames to stage from an archive_url.
-    @returns: True if all artifacts are staged.
-    @raise exception: that was raised by any artifact when calling Process.
+    Args:
+      factory: An instance of BaseArtifactFactory to be used to check if desired
+               artifacts or files are staged.
+
+    Returns:
+      True if all artifacts are staged.
+
+    Raises:
+      DownloaderException: A wrapper for exceptions raised by any artifact when
+                           calling Process.
 
     """
-    # Create factory to create build_artifacts from artifact names.
-    build = self.ParseUrl(self._archive_url)[1]
-    factory = build_artifact.ArtifactFactory(
-        self._build_dir, self._archive_url, artifacts, files, build)
     required_artifacts = factory.RequiredArtifacts()
     exceptions = [artifact.GetException() for artifact in required_artifacts if
                   artifact.GetException()]
@@ -266,7 +219,7 @@
     """
     try:
       for artifact in artifacts:
-        artifact.Process(no_wait)
+        artifact.Process(self, no_wait)
     except build_artifact.ArtifactDownloadError:
       Downloader._TryRemoveStageDir(self._build_dir)
       raise
@@ -284,3 +237,196 @@
     thread = threading.Thread(target=self._DownloadArtifactsSerially,
                               args=(artifacts, False))
     thread.start()
+
+  def Wait(self, name, is_regex_name, timeout):
+    """Waits for artifact to exist and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+      is_regex_name: True if the name is a regex pattern.
+      timeout: How long to wait for the artifact to become available.
+
+    Returns:
+      A list of names that match.
+    """
+    raise NotImplementedError()
+
+  def Fetch(self, remote_name, local_path):
+    """Downloads artifact from given source to a local directory.
+
+    Args:
+      remote_name: Remote name of the file to fetch.
+      local_path: Local path to the folder to store fetched file.
+
+    Returns:
+      The path to fetched file.
+    """
+    raise NotImplementedError()
+
+  def DescribeSource(self):
+    """Gets the source of the download, e.g., a url to GS."""
+    raise NotImplementedError()
+
+
+class GoogleStorageDownloader(Downloader):
+  """Downloader of images to the devserver from Google Storage.
+
+  Given a URL to a build on the archive server:
+    - Caches that build and the given artifacts onto the devserver.
+    - May also initiate caching of related artifacts in the background.
+
+  This is intended to be used with ChromeOS.
+
+  Private class members:
+    archive_url: Google Storage URL to download build artifacts from.
+  """
+
+  def __init__(self, static_dir, archive_url):
+    # The archive_url is of the form gs://server/[some_path/target]/...]/build
+    # This function discards 'gs://server/' and extracts the [some_path/target]
+    # as rel_path and the build as build.
+    sub_url = archive_url.partition('://')[2]
+    split_sub_url = sub_url.split('/')
+    rel_path = '/'.join(split_sub_url[1:-1])
+    build = split_sub_url[-1]
+    build_dir = os.path.join(static_dir, rel_path, build)
+
+    super(GoogleStorageDownloader, self).__init__(static_dir, build_dir, build)
+
+    self._archive_url = archive_url
+
+  def Wait(self, name, is_regex_name, timeout):
+    """Waits for artifact to exist and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+      is_regex_name: True if the name is a regex pattern.
+      timeout: How long to wait for the artifact to become available.
+
+    Returns:
+      A list of names that match.
+
+    Raises:
+      ArtifactDownloadError: An error occurred when obtaining artifact.
+    """
+    names = gsutil_util.GetGSNamesWithWait(
+        name, self._archive_url, str(self), timeout=timeout,
+        is_regex_pattern=is_regex_name)
+    if not names:
+      raise build_artifact.ArtifactDownloadError(
+          'Could not find %s in Google Storage at %s' %
+          (name, self._archive_url))
+    return names
+
+  def Fetch(self, remote_name, local_path):
+    """Downloads artifact from Google Storage to a local directory."""
+    install_path = os.path.join(local_path, remote_name)
+    gs_path = '/'.join([self._archive_url, remote_name])
+    gsutil_util.DownloadFromGS(gs_path, local_path)
+    return install_path
+
+  def DescribeSource(self):
+    return self._archive_url
+
+
+class LocalDownloader(Downloader):
+  """Downloader of images to the devserver from local storage.
+
+  Given a local path:
+    - Caches that build and the given artifacts onto the devserver.
+    - May also initiate caching of related artifacts in the background.
+
+  Private class members:
+    archive_params: parameters for where to download build artifacts from.
+  """
+
+  def __init__(self, static_dir, source_path):
+    # The local path is of the form /{path to static dir}/{rel_path}/{build}.
+    # local_path must be a subpath of the static directory.
+    self.source_path = source_path
+    rel_path = os.path.basename(os.path.dirname(source_path))
+    build = os.path.basename(source_path)
+    build_dir = os.path.join(static_dir, rel_path, build)
+
+    super(LocalDownloader, self).__init__(static_dir, build_dir, build)
+
+  def Wait(self, name, is_regex_name, timeout):
+    """Verifies the local artifact exists and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+      is_regex_name: True if the name is a regex pattern.
+      timeout: How long to wait for the artifact to become available.
+
+    Returns:
+      A list of names that match.
+
+    Raises:
+      ArtifactDownloadError: An error occurred when obtaining artifact.
+    """
+    local_path = os.path.join(self.source_path, name)
+    if is_regex_name:
+      filter_re = re.compile(name)
+      for filename in os.listdir(self.source_path):
+        if filter_re.match(filename):
+          return [filename]
+    else:
+      glob_search = glob.glob(local_path)
+      if glob_search and len(glob_search) == 1:
+        return [os.path.basename(glob_search[0])]
+    raise build_artifact.ArtifactDownloadError('Artifact not found.')
+
+  def Fetch(self, remote_name, local_path):
+    """Downloads artifact from Google Storage to a local directory."""
+    install_path = os.path.join(local_path, remote_name)
+    # It's a local path so just copy it into the staged directory.
+    shutil.copyfile(os.path.join(self.source_path, remote_name),
+                    install_path)
+    return install_path
+
+  def DescribeSource(self):
+    return self.source_path
+
+
+class LaunchControlDownloader(Downloader):
+  """Downloader of images to the devserver from launch control."""
+
+  def __init__(self, static_dir, build_id, target):
+    """Initialize LaunchControlDownloader.
+
+    Args:
+      static_dir: Root directory to store the build.
+      build_id: Build id of the Android build, e.g., 2155602.
+      target: Target of the Android build, e.g., shamu-userdebug.
+    """
+    build = '%s/%s' % (target, build_id)
+    build_dir = os.path.join(static_dir, '', build)
+
+    self.build_id = build_id
+    self.target = target
+
+    super(LaunchControlDownloader, self).__init__(static_dir, build_dir, build)
+
+  def Wait(self, name, is_regex_name, timeout):
+    """Verifies the local artifact exists and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+      is_regex_name: True if the name is a regex pattern.
+      timeout: How long to wait for the artifact to become available.
+
+    Returns:
+      A list of names that match.
+
+    Raises:
+      ArtifactDownloadError: An error occurred when obtaining artifact.
+    """
+    raise NotImplementedError()
+
+  def Fetch(self, remote_name, local_path):
+    """Downloads artifact from LaunchControl to a local directory."""
+    install_path = os.path.join(local_path, remote_name)
+    return install_path
+
+  def DescribeSource(self):
+    raise NotImplementedError()
diff --git a/downloader_unittest.py b/downloader_unittest.py
index 64ca12e..16909da 100755
--- a/downloader_unittest.py
+++ b/downloader_unittest.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2
 #
 # Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -6,6 +6,8 @@
 
 """Unit tests for downloader module."""
 
+from __future__ import print_function
+
 import mox
 import os
 import shutil
@@ -32,26 +34,26 @@
   def tearDown(self):
     shutil.rmtree(self._work_dir, ignore_errors=True)
 
-  def _SimpleDownloadOfTestSuites(self, archive_path):
+  def _SimpleDownloadOfTestSuites(self, downloader_instance):
     """Helper to verify test_suites are downloaded correctly.
 
     Args:
-      archive_path: Archive url or local path to test with.
+      downloader_instance: Downloader object to test with.
     """
-    downloader_instance = downloader.Downloader(self._work_dir,
-                                                archive_path)
+    factory = build_artifact.ChromeOSArtifactFactory(
+        downloader_instance.GetBuildDir(), ['test_suites'],
+        None, downloader_instance.GetBuild())
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsSerially')
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsInBackground')
 
     downloader.Downloader._DownloadArtifactsInBackground(mox.In(mox.IsA(
-        build_artifact.AutotestTarballBuildArtifact)))
+        build_artifact.AutotestTarball)))
     downloader.Downloader._DownloadArtifactsSerially(
-        [mox.IsA(build_artifact.BundledBuildArtifact)], no_wait=True)
+        [mox.IsA(build_artifact.BundledArtifact)], no_wait=True)
     self.mox.ReplayAll()
-    downloader_instance.Download(artifacts=['test_suites'],
-                                 files=None)
+    downloader_instance.Download(factory)
     # Sanity check the timestamp file exists.
     self.assertTrue(os.path.exists(
         os.path.join(self._work_dir, self.board, self.build,
@@ -64,7 +66,8 @@
     Verifies that if we request the test_suites from Google Storage, it gets
     downloaded and the autotest tarball is attempted in the background.
     """
-    self._SimpleDownloadOfTestSuites(self.archive_url)
+    self._SimpleDownloadOfTestSuites(
+        downloader.GoogleStorageDownloader(self._work_dir, self.archive_url))
 
   def testSimpleDownloadOfTestSuitesFromLocal(self):
     """Basic test_suites test.
@@ -72,30 +75,74 @@
     Verifies that if we request the test_suites from a local path, it gets
     downloaded and the autotest tarball is attempted in the background.
     """
-    self._SimpleDownloadOfTestSuites(self.local_path)
+    self._SimpleDownloadOfTestSuites(
+        downloader.LocalDownloader(self._work_dir, self.local_path))
 
-  def _DownloadSymbolsHelper(self, archive_path):
+  def _DownloadSymbolsHelper(self, downloader_instance):
     """Basic symbols download."""
-    downloader_instance = downloader.Downloader(self._work_dir, archive_path)
+    factory = build_artifact.ChromeOSArtifactFactory(
+        downloader_instance.GetBuildDir(), ['symbols'],
+        None, downloader_instance.GetBuild())
+
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsSerially')
     # Should not get called but mocking so that we know it wasn't called.
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsInBackground')
     downloader.Downloader._DownloadArtifactsSerially(
-        [mox.IsA(build_artifact.BundledBuildArtifact)], no_wait=True)
+        [mox.IsA(build_artifact.BundledArtifact)], no_wait=True)
     self.mox.ReplayAll()
-    downloader_instance.Download(artifacts=['symbols'],
-                                 files=None)
+    downloader_instance.Download(factory)
     self.mox.VerifyAll()
 
   def testDownloadSymbolsFromGS(self):
     """Basic symbols download from Google Storage."""
-    self._DownloadSymbolsHelper(self.archive_url)
+    self._DownloadSymbolsHelper(
+        downloader.GoogleStorageDownloader(self._work_dir, self.archive_url))
 
   def testDownloadSymbolsFromLocal(self):
     """Basic symbols download from a Local Path."""
-    self._DownloadSymbolsHelper(self.local_path)
+    self._DownloadSymbolsHelper(
+        downloader.LocalDownloader(self._work_dir, self.local_path))
+
+
+class AndroidDownloaderTestBase(mox.MoxTestBase):
+  """Android Downloader Unittests."""
+
+  def setUp(self):
+    mox.MoxTestBase.setUp(self)
+    self._work_dir = tempfile.mkdtemp('downloader-test')
+    self.target = 'shamu-userdebug'
+    self.build_id = '123456'
+
+  def tearDown(self):
+    shutil.rmtree(self._work_dir, ignore_errors=True)
+
+  def testDownloadFromLaunchControl(self):
+    """Basic test to check download from LaunchControl works."""
+    downloader_instance = downloader.LaunchControlDownloader(
+        self._work_dir, self.build_id, self.target)
+    factory = build_artifact.AndroidArtifactFactory(
+        downloader_instance.GetBuildDir(), ['fastboot'],
+        None, downloader_instance.GetBuild())
+    self.mox.StubOutWithMock(downloader.Downloader,
+                             '_DownloadArtifactsSerially')
+    self.mox.StubOutWithMock(downloader.Downloader,
+                             '_DownloadArtifactsInBackground')
+
+    # TODO(dshi): Uncomment following line after Fetch method is implemented in
+    # LaunchControlDownloader.
+    # downloader.Downloader._DownloadArtifactsInBackground(mox.In(mox.IsA(
+    #     build_artifact.ANDROID_FASTBOOT)))
+    downloader.Downloader._DownloadArtifactsSerially(
+        [mox.IsA(build_artifact.Artifact)], no_wait=True)
+    self.mox.ReplayAll()
+    downloader_instance.Download(factory)
+    # Sanity check the timestamp file exists.
+    self.assertTrue(os.path.exists(
+        os.path.join(self._work_dir, self.target, self.build_id,
+                     downloader.Downloader._TIMESTAMP_FILENAME)))
+    self.mox.VerifyAll()
 
 
 if __name__ == '__main__':
diff --git a/xbuddy.py b/xbuddy.py
index 384de49..9846558 100644
--- a/xbuddy.py
+++ b/xbuddy.py
@@ -16,8 +16,9 @@
 import time
 import threading
 
-import build_util
 import artifact_info
+import build_artifact
+import build_util
 import common_util
 import devserver_constants
 import downloader
@@ -643,7 +644,10 @@
       XBuddy._staging_thread_count += 1
     try:
       _Log("Downloading %s from %s", artifacts, gs_url)
-      downloader.Downloader(self.static_dir, gs_url).Download(artifacts, [])
+      dl = downloader.GoogleStorageDownloader(self.static_dir, gs_url)
+      factory = build_artifact.ChromeOSArtifactFactory(
+          dl.GetBuildDir(), artifacts, [], dl.GetBuild())
+      dl.Download(factory)
     finally:
       with XBuddy._staging_thread_count_lock:
         XBuddy._staging_thread_count -= 1