[dev-util] Add stage_debug,symbolicate_dump endpoints to dev server

Add an endpoint to the dev server that will synchronously download
and stage the debug symbols for a given build.
Add an endpoint to the dev server that will symbolicate a minidump.

BUG=chromium-os:29850,chromium-os:30399
TEST=unit
TEST=run dev server, use curl to make it download some artifacts; ensure
TEST=debug.tgz is _not_ downloaded immediately, but that the rest of the build is staged.

TEST=run dev server, use curl to make it download debug_symbols; check to
TEST=see that debug symbols are staged in static/archive
TEST=once symbols are staged, run the dev server in your
TEST=chroot and use curl with a minidump file like this:
TEST=  curl -F minidump=@/home/cmasone/chromeos/phooey/powerd.20120424.141235.1005.dmp http://localhost:8080/symbolicate_dump

Change-Id: Ie460526396d2b9999137142c723b87793bc23aaa
Reviewed-on: https://gerrit.chromium.org/gerrit/21696
Reviewed-by: Chris Sosa <sosa@chromium.org>
Commit-Ready: Chris Masone <cmasone@chromium.org>
Tested-by: Chris Masone <cmasone@chromium.org>
diff --git a/devserver.py b/devserver.py
index bb22e22..a9c9cd0 100755
--- a/devserver.py
+++ b/devserver.py
@@ -7,11 +7,14 @@
 """A CherryPy-based webserver to host images and build packages."""
 
 import cherrypy
+import cStringIO
 import logging
 import optparse
 import os
 import re
 import sys
+import subprocess
+import tempfile
 
 import autoupdate
 import devserver_util
@@ -240,6 +243,61 @@
     return return_obj
 
   @cherrypy.expose
+  def stage_debug(self, **kwargs):
+    """Downloads and stages debug symbol payloads from Google Storage.
+
+    This methods downloads the debug symbol build artifact synchronously,
+    and then stages it for use by symbolicate_dump/.
+
+    Args:
+      archive_url: Google Storage URL for the build.
+
+    Example URL:
+      'http://myhost/stage_debug?archive_url=gs://chromeos-image-archive/'
+      'x86-generic/R17-1208.0.0-a1-b338'
+    """
+    archive_url = kwargs.get('archive_url')
+    if not archive_url:
+      raise DevServerError("Didn't specify the archive_url in request")
+
+    return downloader.SymbolDownloader(updater.static_dir).Download(archive_url)
+
+  @cherrypy.expose
+  def symbolicate_dump(self, minidump):
+    """Symbolicates a minidump using pre-downloaded symbols, returns it.
+
+    Callers will need to POST to this URL with a body of MIME-type
+    "multipart/form-data".
+    The body should include a single argument, 'minidump', containing the
+    binary-formatted minidump to symbolicate.
+
+    It is up to the caller to ensure that the symbols they want are currently
+    staged.
+
+    Args:
+      minidump: The binary minidump file to symbolicate.
+    """
+    to_return = ''
+    with tempfile.NamedTemporaryFile() as local:
+      while True:
+        data = minidump.file.read(8192)
+        if not data:
+          break
+        local.write(data)
+      local.flush()
+      stackwalk = subprocess.Popen(['minidump_stackwalk',
+                                    local.name,
+                                    updater.static_dir + '/debug/breakpad'],
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.PIPE)
+      to_return, error_text = stackwalk.communicate()
+      if stackwalk.returncode != 0:
+        raise DevServerError("Can't generate stack trace: %s (rc=%d)" % (
+            error_text, stackwalk.returncode))
+
+    return to_return
+
+  @cherrypy.expose
   def wait_for_status(self, **kwargs):
     """Waits for background artifacts to be downloaded from Google Storage.
 
diff --git a/devserver_util.py b/devserver_util.py
index eb498f4..c2a659b 100644
--- a/devserver_util.py
+++ b/devserver_util.py
@@ -8,7 +8,9 @@
 import distutils.version
 import errno
 import os
+import random
 import shutil
+import time
 
 import downloadable_artifact
 import gsutil_util
@@ -61,8 +63,10 @@
   """Generates artifacts that we mean to download and install for autotest.
 
   This method generates the list of artifacts we will need for autotest. These
-  artifacts are instances of downloadable_artifact.DownloadableArtifact.Note,
-  these artifacts can be downloaded asynchronously iff !artifact.Synchronous().
+  artifacts are instances of downloadable_artifact.DownloadableArtifact.
+
+  Note, these artifacts can be downloaded asynchronously iff
+  !artifact.Synchronous().
   """
   cmd = 'gsutil ls %s/*.bin' % archive_url
   msg = 'Failed to get a list of payloads.'
@@ -104,6 +108,49 @@
   return artifacts
 
 
+def GatherSymbolArtifactDownloads(temp_download_dir, archive_url, staging_dir,
+                                  timeout=600, delay=10):
+  """Generates debug symbol artifacts that we mean to download and stage.
+
+  This method generates the list of artifacts we will need to
+  symbolicate crash dumps that occur during autotest runs.  These
+  artifacts are instances of downloadable_artifact.DownloadableArtifact.
+
+  This will poll google storage until the debug symbol artifact becomes
+  available, or until the 10 minute timeout is up.
+
+  @param temp_download_dir: the tempdir into which we're downloading artifacts
+                            prior to staging them.
+  @param archive_url: the google storage url of the bucket where the debug
+                      symbols for the desired build are stored.
+  @param staging_dir: the dir into which to stage the symbols
+
+  @return an iterable of one DebugTarball pointing to the right debug symbols.
+          This is an iterable so that it's similar to GatherArtifactDownloads.
+          Also, it's possible that someday we might have more than one.
+  """
+  symbol_url = archive_url + '/' + downloadable_artifact.DEBUG_SYMBOLS
+  cmd = 'gsutil ls %s' % symbol_url
+  msg = 'Debug symbols for %s not archived.' % archive_url
+
+  deadline = time.time() + timeout
+  while time.time() < deadline:
+    to_delay = delay + random.choice([-1, 1]) * random.random() * .5 * delay
+    try:
+      gsutil_util.GSUtilRun(cmd, msg)
+      break
+    except gsutil_util.GSUtilError as e:
+      cherrypy.log('%s, Retrying in %f seconds...' % (e, to_delay),
+                   'SYMBOL_DOWNLOAD')
+      time.sleep(to_delay)
+  else:
+    # On the last try, run and allow exceptions to escape.
+    gsutil_util.GSUtilRun(cmd, msg)
+
+  return [downloadable_artifact.DebugTarball(symbol_url, temp_download_dir,
+                                             staging_dir)]
+
+
 def PrepareBuildDirectory(build_dir):
   """Preliminary staging of installation directory for build.
 
diff --git a/devserver_util_unittest.py b/devserver_util_unittest.py
index 15b41ff..90f05b3 100755
--- a/devserver_util_unittest.py
+++ b/devserver_util_unittest.py
@@ -339,6 +339,78 @@
 
     self.mox.VerifyAll()
 
+  def testGatherSymbolArtifactDownloads(self):
+    """Tests that we can find debug symbol artifacts to download."""
+    build = 'R17-1413.0.0-a1-b1346'
+    archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
+                          build)
+    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
+    mock_data = 'mock data\nmock_data\nmock_data'
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
+
+    # GSUtil ls.
+    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+                          mox.IgnoreArg()).AndReturn(mock_data)
+
+    self.mox.ReplayAll()
+    artifacts = devserver_util.GatherSymbolArtifactDownloads(
+        self._static_dir, archive_url_prefix, self._install_dir)
+    for index, artifact in enumerate(artifacts):
+      self.assertEqual(artifact._gs_path, symbol_url)
+      self.assertTrue(artifact._tmp_staging_dir.startswith(self._static_dir))
+
+    self.mox.VerifyAll()
+
+  def testGatherSymbolArtifactDownloadsWithRetry(self):
+    """Tests that we can poll for debug symbol artifacts to download."""
+    build = 'R17-1413.0.0-a1-b1346'
+    archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
+                          build)
+    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
+    mock_data = 'mock data\nmock_data\nmock_data'
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
+
+    # GSUtil ls.
+    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+                          mox.IgnoreArg()).AndRaise(gsutil_util.GSUtilError())
+    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+                          mox.IgnoreArg()).AndReturn(mock_data)
+
+    self.mox.ReplayAll()
+    artifacts = devserver_util.GatherSymbolArtifactDownloads(
+        self._static_dir, archive_url_prefix, self._install_dir, delay=1)
+    for index, artifact in enumerate(artifacts):
+      self.assertEqual(artifact._gs_path, symbol_url)
+      self.assertTrue(artifact._tmp_staging_dir.startswith(self._static_dir))
+
+    self.mox.VerifyAll()
+
+  def testGatherSymbolArtifactDownloadsFailAfterRetry(self):
+    """Tests that we can poll for debug symbol artifacts to download."""
+    build = 'R17-1413.0.0-a1-b1346'
+    archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
+                          build)
+    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
+
+    # GSUtil ls.
+    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+                          mox.IgnoreArg()
+                          ).MultipleTimes().AndRaise(gsutil_util.GSUtilError())
+
+    self.mox.ReplayAll()
+    self.assertRaises(gsutil_util.GSUtilError,
+                      devserver_util.GatherSymbolArtifactDownloads,
+                      self._static_dir,
+                      archive_url_prefix,
+                      self._install_dir,
+                      timeout=1,
+                      delay=1)
+    self.mox.VerifyAll()
+
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/downloadable_artifact.py b/downloadable_artifact.py
index f4a3f82..bf16885 100644
--- a/downloadable_artifact.py
+++ b/downloadable_artifact.py
@@ -13,6 +13,7 @@
 
 
 # Names of artifacts we care about.
+DEBUG_SYMBOLS = 'debug.tgz'
 STATEFUL_UPDATE = 'stateful.tgz'
 TEST_IMAGE = 'chromiumos_test_image.bin'
 ROOT_UPDATE = 'update.gz'
@@ -137,3 +138,17 @@
     # code.
     cmd = 'cp %s/* %s' % (autotest_pkgs_dir, autotest_dir)
     subprocess.check_call(cmd, shell=True)
+
+
+class DebugTarball(Tarball):
+  """Wrapper around the debug symbols tarball to download from gsutil."""
+
+  def _ExtractTarball(self):
+    """Extracts debug/breakpad from the tarball into the install_path."""
+    cmd = 'tar xzf %s --directory=%s debug/breakpad' % (
+        self._tmp_stage_path, self._install_path)
+    msg = 'An error occurred when attempting to untar %s' % self._tmp_stage_path
+    try:
+      subprocess.check_call(cmd, shell=True)
+    except subprocess.CalledProcessError, e:
+      raise ArtifactDownloadError('%s %s' % (msg, e))
diff --git a/downloader.py b/downloader.py
index 99988e1..dc22ad5 100755
--- a/downloader.py
+++ b/downloader.py
@@ -30,13 +30,33 @@
     self._staging_dir = None
     self._status_queue = multiprocessing.Queue()
     self._lock_tag = None
-    self._archive_url = None
+
+  @staticmethod
+  def CanonicalizeAndParse(archive_url):
+    """Canonicalize archive_url and parse it into its component parts.
+
+    @param archive_url: a URL at which build artifacts are archived.
+    @return a tuple of (canonicalized URL, build target, short build name)
+    """
+    archive_url = archive_url.rstrip('/')
+    target, short_build = archive_url.rsplit('/', 2)[-2:]
+    return archive_url, target, short_build
+
+  @staticmethod
+  def GenerateLockTag(target, short_build):
+    """Generate a name for a lock scoped to this target/build pair.
+
+    @param target: the target the build was for.
+    @param short_build: short build name
+    @return a name to use with AcquireLock that will scope the lock.
+    """
+    return '/'.join([target, short_build])
 
   @staticmethod
   def BuildStaged(archive_url, static_dir):
     """Returns True if the build is already staged."""
-    target, short_build = archive_url.rsplit('/', 2)[-2:]
-    sub_directory = '/'.join([target, short_build])
+    _, target, short_build = Downloader.CanonicalizeAndParse(archive_url)
+    sub_directory = Downloader.GenerateLockTag(target, short_build)
     return os.path.isdir(os.path.join(static_dir, sub_directory))
 
   def Download(self, archive_url, background=False):
@@ -45,37 +65,36 @@
     If background is set to True, will return back early before all artifacts
     have been downloaded. The artifacts that can be backgrounded are all those
     that are not set as synchronous.
+
+    TODO: refactor this into a common Download method, once unit tests are
+    fixed up to make iterating on the code easier.
     """
     # Parse archive_url into target and short_build.
     # e.g. gs://chromeos-image-archive/{target}/{short_build}
-    self._archive_url = archive_url.strip('/')
-    target, short_build = self._archive_url.rsplit('/', 2)[-2:]
+    archive_url, target, short_build = self.CanonicalizeAndParse(archive_url)
 
     # Bind build_dir and staging_dir here so we can tell if we need to do any
     # cleanup after an exception occurs before build_dir is set.
-    self._lock_tag = '/'.join([target, short_build])
+    self._lock_tag = self.GenerateLockTag(target, short_build)
+
+    if Downloader.BuildStaged(archive_url, self._static_dir):
+      cherrypy.log('Build %s has already been processed.' % self._lock_tag,
+                   'DOWNLOAD')
+      self._status_queue.put('Success')
+      return 'Success'
+
     try:
       # Create Dev Server directory for this build and tell other Downloader
       # instances we have processed this build.
-      try:
-        self._build_dir = devserver_util.AcquireLock(
-            static_dir=self._static_dir, tag=self._lock_tag)
-      except devserver_util.DevServerUtilError, e:
-        if Downloader.BuildStaged(archive_url, self._static_dir):
-          cherrypy.log(
-              'Build %s has already been processed.' % self._lock_tag,
-              'DOWNLOAD')
-          self._status_queue.put('Success')
-          return 'Success'
-        else:
-          raise
+      self._build_dir = devserver_util.AcquireLock(
+          static_dir=self._static_dir, tag=self._lock_tag)
 
       self._staging_dir = tempfile.mkdtemp(suffix='_'.join([target,
                                                             short_build]))
-      cherrypy.log('Gathering download requirements %s' % self._archive_url,
+      cherrypy.log('Gathering download requirements %s' % archive_url,
                    'DOWNLOAD')
-      artifacts = devserver_util.GatherArtifactDownloads(
-          self._staging_dir, self._archive_url, short_build, self._build_dir)
+      artifacts = self.GatherArtifactDownloads(
+          self._staging_dir, archive_url, short_build, self._build_dir)
       devserver_util.PrepareBuildDirectory(self._build_dir)
 
       cherrypy.log('Downloading foreground artifacts from %s' % archive_url,
@@ -89,7 +108,7 @@
           background_artifacts.append(artifact)
 
       if background:
-        self._DownloadArtifactsInBackground(background_artifacts)
+        self._DownloadArtifactsInBackground(background_artifacts, archive_url)
       else:
         self._DownloadArtifactsSerially(background_artifacts)
 
@@ -117,8 +136,7 @@
 
   def _DownloadArtifactsSerially(self, artifacts):
     """Simple function to download all the given artifacts serially."""
-    cherrypy.log('Downloading background artifacts for %s' % self._archive_url,
-                 'DOWNLOAD')
+    cherrypy.log('Downloading background artifacts serially.', 'DOWNLOAD')
     try:
       for artifact in artifacts:
         artifact.Download()
@@ -136,12 +154,21 @@
     finally:
       self._Cleanup()
 
-  def _DownloadArtifactsInBackground(self, artifacts):
+  def _DownloadArtifactsInBackground(self, artifacts, archive_url):
     """Downloads |artifacts| in the background and signals when complete."""
     proc = multiprocessing.Process(target=self._DownloadArtifactsSerially,
                                    args=(artifacts,))
     proc.start()
 
+  def GatherArtifactDownloads(self, main_staging_dir, archive_url, short_build,
+                              build_dir):
+    """Wrapper around devserver_util.GatherArtifactDownloads().
+
+    The wrapper allows mocking and overriding in derived classes.
+    """
+    return devserver_util.GatherArtifactDownloads(main_staging_dir, archive_url,
+                                                  short_build, build_dir)
+
   def GetStatusOfBackgroundDownloads(self):
     """Returns the status of the background downloads.
 
@@ -156,3 +183,98 @@
       raise status
 
     return status
+
+
+class SymbolDownloader(Downloader):
+  """Download and stage debug symbols for a build on the devsever.
+
+  Given a URL to a build on the archive server:
+
+    - Determine if the build already exists.
+    - Download and extract the debug symbols to a staging directory.
+    - Install symbols to static dir.
+  """
+
+  _DONE_FLAG = 'done'
+
+  @staticmethod
+  def GenerateLockTag(target, short_build):
+    return '/'.join([target, short_build, 'symbols'])
+
+  def Download(self, archive_url):
+    """Downloads debug symbols for the build defined by the |archive_url|.
+
+    The symbols will be downloaded synchronously
+    """
+    # Parse archive_url into target and short_build.
+    # e.g. gs://chromeos-image-archive/{target}/{short_build}
+    archive_url, target, short_build = self.CanonicalizeAndParse(archive_url)
+
+    # Bind build_dir and staging_dir here so we can tell if we need to do any
+    # cleanup after an exception occurs before build_dir is set.
+    self._lock_tag = self.GenerateLockTag(target, short_build)
+    if self.SymbolsStaged(archive_url, self._static_dir):
+      cherrypy.log(
+          'Symbols for build %s have already been staged.' % self._lock_tag,
+          'SYMBOL_DOWNLOAD')
+      return 'Success'
+
+    try:
+      # Create Dev Server directory for this build and tell other Downloader
+      # instances we have processed this build.
+      self._build_dir = devserver_util.AcquireLock(
+          static_dir=self._static_dir, tag=self._lock_tag)
+
+      self._staging_dir = tempfile.mkdtemp(suffix='_'.join([target,
+                                                            short_build]))
+      cherrypy.log('Downloading debug symbols from %s' % archive_url,
+                   'SYMBOL_DOWNLOAD')
+
+      [symbol_artifact] = self.GatherArtifactDownloads(
+          self._staging_dir, archive_url, '', self._static_dir)
+      symbol_artifact.Download()
+      symbol_artifact.Stage()
+
+    except Exception:
+      # Release processing "lock", which will indicate to future runs that we
+      # did not succeed, and so they should try again.
+      if self._build_dir:
+        devserver_util.ReleaseLock(static_dir=self._static_dir,
+                                   tag=self._lock_tag)
+      self._Cleanup()
+      raise
+
+    self.MarkSymbolsStaged()
+    return 'Success'
+
+  def GatherArtifactDownloads(self, temp_download_dir, archive_url, short_build,
+                              static_dir):
+    """Call SymbolDownloader-appropriate artifact gathering method.
+
+    @param temp_download_dir: the tempdir into which we're downloading artifacts
+                              prior to staging them.
+    @param archive_url: the google storage url of the bucket where the debug
+                        symbols for the desired build are stored.
+    @param short_build: IGNORED
+    @param staging_dir: the dir into which to stage the symbols
+
+    @return an iterable of one DebugTarball pointing to the right debug symbols.
+            This is an iterable so that it's similar to GatherArtifactDownloads.
+            Also, it's possible that someday we might have more than one.
+    """
+    return devserver_util.GatherSymbolArtifactDownloads(temp_download_dir,
+                                                        archive_url,
+                                                        static_dir)
+
+  def MarkSymbolsStaged(self):
+    """Puts a flag file on disk to signal that symbols are staged."""
+    with open(os.path.join(self._build_dir, self._DONE_FLAG), 'w') as flag:
+      flag.write(self._DONE_FLAG)
+
+  def SymbolsStaged(self, archive_url, static_dir):
+    """Returns True if the build is already staged."""
+    _, target, short_build = self.CanonicalizeAndParse(archive_url)
+    sub_directory = self.GenerateLockTag(target, short_build)
+    return os.path.isfile(os.path.join(static_dir,
+                                       sub_directory,
+                                       self._DONE_FLAG))
diff --git a/downloader_unittest.py b/downloader_unittest.py
index 693689a..a470f84 100755
--- a/downloader_unittest.py
+++ b/downloader_unittest.py
@@ -12,7 +12,7 @@
 import tempfile
 import unittest
 
-import artifact_download
+import downloadable_artifact
 import devserver
 import devserver_util
 import downloader
@@ -26,7 +26,7 @@
 }
 
 
-class DownloaderTest(mox.MoxTestBase):
+class DownloaderTestBase(mox.MoxTestBase):
 
   def setUp(self):
     mox.MoxTestBase.setUp(self)
@@ -42,10 +42,10 @@
   def _CommonDownloaderSetup(self):
     """Common code to downloader tests.
 
-    Sets up artifacts and sets up expectations for synchronous artifacts to
-    be downloaded first.
+    Mocks out key devserver_util module methods, creates mock artifacts
+    and sets appropriate expectations.
 
-    Returns the artifacts to use in the test.
+    @return iterable of artifact objects with appropriate expectations.
     """
     board = 'x86-mario-release'
     self.mox.StubOutWithMock(devserver_util, 'AcquireLock')
@@ -53,32 +53,78 @@
     self.mox.StubOutWithMock(devserver_util, 'ReleaseLock')
     self.mox.StubOutWithMock(tempfile, 'mkdtemp')
 
-    artifacts = []
+    devserver_util.AcquireLock(
+        static_dir=self._work_dir,
+        tag=self._ClassUnderTest().GenerateLockTag(board, self.build)
+        ).AndReturn(self._work_dir)
 
+    tempfile.mkdtemp(suffix=mox.IgnoreArg()).AndReturn(self._work_dir)
+    return self._GenerateArtifacts()
+
+  def _CreateArtifactDownloader(self, artifacts):
+    """Create and return a Downloader of the appropriate type.
+
+    The returned downloader will expect to download and stage the
+    DownloadableArtifacts listed in [artifacts].
+
+    @param artifacts: iterable of DownloadableArtifacts.
+    @return instance of downloader.Downloader or subclass.
+    """
+    raise NotImplementedError()
+
+  def _ClassUnderTest(self):
+    """Return class object of the type being tested.
+
+    @return downloader.Downloader class object, or subclass.
+    """
+    raise NotImplementedError()
+
+  def _GenerateArtifacts(self):
+    """Instantiate artifact mocks and set expectations on them.
+
+    @return iterable of artifact objects with appropriate expectations.
+    """
+    raise NotImplementedError()
+
+
+class DownloaderTest(DownloaderTestBase):
+  """Unit tests for downloader.Downloader.
+
+  setUp() and tearDown() inherited from DownloaderTestBase.
+  """
+
+  def _CreateArtifactDownloader(self, artifacts):
+    d = downloader.Downloader(self._work_dir)
+    self.mox.StubOutWithMock(d, 'GatherArtifactDownloads')
+    d.GatherArtifactDownloads(
+        self._work_dir, self.archive_url_prefix, self.build,
+        self._work_dir).AndReturn(artifacts)
+    return d
+
+  def _ClassUnderTest(self):
+    return downloader.Downloader
+
+  def _GenerateArtifacts(self):
+    """Instantiate artifact mocks and set expectations on them.
+
+    Sets up artifacts and sets up expectations for synchronous artifacts to
+    be downloaded first.
+
+    @return iterable of artifact objects with appropriate expectations.
+    """
+    artifacts = []
     for index in range(5):
-      artifact = self.mox.CreateMock(artifact_download.DownloadableArtifact)
+      artifact = self.mox.CreateMock(downloadable_artifact.DownloadableArtifact)
       # Make every other artifact synchronous.
       if index % 2 == 0:
         artifact.Synchronous = lambda: True
+        artifact.Download()
+        artifact.Stage()
       else:
         artifact.Synchronous = lambda: False
 
       artifacts.append(artifact)
 
-    devserver_util.AcquireLock(
-        static_dir=self._work_dir,
-        tag='/'.join([board, self.build])).AndReturn(self._work_dir)
-
-    tempfile.mkdtemp(suffix=mox.IgnoreArg()).AndReturn(self._work_dir)
-    devserver_util.GatherArtifactDownloads(
-        self._work_dir, self.archive_url_prefix, self.build,
-        self._work_dir).AndReturn(artifacts)
-
-    for index, artifact in enumerate(artifacts):
-      if index % 2 == 0:
-        artifact.Download()
-        artifact.Stage()
-
     return artifacts
 
   def testDownloaderSerially(self):
@@ -91,9 +137,10 @@
         artifact.Download()
         artifact.Stage()
 
+    d = self._CreateArtifactDownloader(artifacts)
     self.mox.ReplayAll()
-    self.assertEqual(downloader.Downloader(self._work_dir).Download(
-        self.archive_url_prefix, background=False), 'Success')
+    self.assertEqual(d.Download(self.archive_url_prefix, background=False),
+                     'Success')
     self.mox.VerifyAll()
 
   def testDownloaderInBackground(self):
@@ -106,8 +153,8 @@
         artifact.Download()
         artifact.Stage()
 
+    d = self._CreateArtifactDownloader(artifacts)
     self.mox.ReplayAll()
-    d = downloader.Downloader(self._work_dir)
     d.Download(self.archive_url_prefix, background=True)
     self.assertEqual(d.GetStatusOfBackgroundDownloads(), 'Success')
     self.mox.VerifyAll()
@@ -115,6 +162,10 @@
   def testInteractionWithDevserver(self):
     """Tests interaction between the downloader and devserver methods."""
     artifacts = self._CommonDownloaderSetup()
+    devserver_util.GatherArtifactDownloads(
+        self._work_dir, self.archive_url_prefix, self.build,
+        self._work_dir).AndReturn(artifacts)
+
     class FakeUpdater():
       static_dir = self._work_dir
 
@@ -147,5 +198,45 @@
                                                        self._work_dir))
 
 
+class SymbolDownloaderTest(DownloaderTestBase):
+  """Unit tests for downloader.SymbolDownloader.
+
+  setUp() and tearDown() inherited from DownloaderTestBase.
+  """
+
+  def _CreateArtifactDownloader(self, artifacts):
+    d = downloader.SymbolDownloader(self._work_dir)
+    self.mox.StubOutWithMock(d, 'GatherArtifactDownloads')
+    d.GatherArtifactDownloads(
+        self._work_dir, self.archive_url_prefix, '',
+        self._work_dir).AndReturn(artifacts)
+    return d
+
+  def _ClassUnderTest(self):
+    return downloader.SymbolDownloader
+
+  def _GenerateArtifacts(self):
+    """Instantiate artifact mocks and set expectations on them.
+
+    Sets up a DebugTarball and sets up expectation that it will be
+    downloaded and staged.
+
+    @return iterable of one artifact object with appropriate expectations.
+    """
+    artifact = self.mox.CreateMock(downloadable_artifact.DownloadableArtifact)
+    artifact.Synchronous = lambda: True
+    artifact.Download()
+    artifact.Stage()
+    return [artifact]
+
+  def testDownloaderSerially(self):
+    """Runs through the symbol downloader workflow."""
+    d = self._CreateArtifactDownloader(self._CommonDownloaderSetup())
+
+    self.mox.ReplayAll()
+    self.assertEqual(d.Download(self.archive_url_prefix), 'Success')
+    self.mox.VerifyAll()
+
+
 if __name__ == '__main__':
   unittest.main()