Use "UPLOADED" to determine what artifacts are available on Google Storage

This change determines whether a file is available on Google Storage
(GS) by looking up the filenames listed in the "UPLOADED" in the same
bucket.

Currently the dev server uses "gsutils ls" to determine whether a file
is available on GS. This approach is flaky because of the following two
reasons.

1. GS provides strong consistenty for objects reads/writes, but only
(non-time bound) eventual consistency for bucket listing performed by
"ls". This means that the the dev server may be waiting for an object to
show up in the listing indefinitely.

2. GS does not guarantee causal consistency, which means that the dev
server may see the new bucket listing once but get an older listing
afterwards. This is caused by the multiple replicas used in distributed
caching system in GS.

Instead of relying on "gsutils ls", I have patched cbuildbot to maintain
a manifest of uploaded artifacts and store the manifest (named UPLOADED)
in the same bucket. This change allows the dev server to poll UPLOADED
for the available artifacts and to check whether a specific artifact is
available (e.g. the full payload) by using a pattern to match names in
UPLOADED.

BUG=chromium-os:32361
TEST=unittest + local dev server

Change-Id: Ic43a5be20840fb9f70a65ca848fe22f2dc701a3d
Reviewed-on: https://gerrit.chromium.org/gerrit/30573
Commit-Ready: Yu-Ju Hong <yjhong@chromium.org>
Reviewed-by: Yu-Ju Hong <yjhong@chromium.org>
Tested-by: Yu-Ju Hong <yjhong@chromium.org>
diff --git a/devserver_util.py b/devserver_util.py
index 4edfafa..afba671 100644
--- a/devserver_util.py
+++ b/devserver_util.py
@@ -9,6 +9,7 @@
 import errno
 import os
 import random
+import re
 import shutil
 import time
 
@@ -19,18 +20,19 @@
 NTON_DIR_SUFFIX = '_nton'
 MTON_DIR_SUFFIX = '_mton'
 DEV_BUILD_PREFIX = 'dev'
-
+UPLOADED_LIST = 'UPLOADED'
 
 class DevServerUtilError(Exception):
   """Exception classes used by this module."""
   pass
 
 
-def ParsePayloadList(payload_list):
+def ParsePayloadList(archive_url, payload_list):
   """Parse and return the full/delta payload URLs.
 
   Args:
-    payload_list: A list of Google Storage URLs.
+    archive_url: The URL of the Google Storage bucket.
+    payload_list: A list filenames.
 
   Returns:
     Tuple of 3 payloads URLs: (full, nton, mton).
@@ -43,14 +45,14 @@
   nton_payload_url = None
   for payload in payload_list:
     if '_full_' in payload:
-      full_payload_url = payload
+      full_payload_url = '/'.join([archive_url, payload])
     elif '_delta_' in payload:
       # e.g. chromeos_{from_version}_{to_version}_x86-generic_delta_dev.bin
-      from_version, to_version = payload.rsplit('/', 1)[1].split('_')[1:3]
+      from_version, to_version = payload.split('_')[1:3]
       if from_version == to_version:
-        nton_payload_url = payload
+        nton_payload_url = '/'.join([archive_url, payload])
       else:
-        mton_payload_url = payload
+        mton_payload_url = '/'.join([archive_url, payload])
 
   if not full_payload_url:
     raise DevServerUtilError(
@@ -59,24 +61,83 @@
   return full_payload_url, nton_payload_url, mton_payload_url
 
 
-def _GetAutotestURL(archive_url):
-  """Find out what type of autotest tarball is available and return the
-  coresponding URL."""
+def IsAvailable(pattern_list, uploaded_list):
+  """Checks whether the target artifacts we wait for are available.
 
-  cmd = 'gsutil ls %s/autotest.*' % archive_url
-  msg = 'Failed to retrieve the list of autotest tarballs.'
-  autotest_tarballs = gsutil_util.GSUtilRun(cmd, msg).splitlines()
+  This method searches the uploaded_list for a match for every pattern
+  in the pattern_list. It aborts and returns false if no filename
+  matches a given pattern.
 
-  # Use autotest.tar if it is available.
-  for tarball in autotest_tarballs:
-    if os.path.basename(tarball) == downloadable_artifact.AUTOTEST_PACKAGE:
-     return '%s/%s' % (archive_url, downloadable_artifact.AUTOTEST_PACKAGE)
+  Args:
+    pattern_list: List of regular expression patterns to identify
+        the target artifacts.
+    uploaded_list: List of all uploaded files.
 
-  # Use autotest.tar.bz2 by default for backward compatibility.
-  return '%s/%s' % (archive_url,
-                    downloadable_artifact.AUTOTEST_ZIPPED_PACKAGE)
+  Returns:
+    True if there is a match for every pattern; false otherwise.
+  """
 
-def GatherArtifactDownloads(main_staging_dir, archive_url, build, build_dir):
+  # Pre-compile the regular expression patterns
+  compiled_patterns = []
+  for p in pattern_list:
+    compiled_patterns.append(re.compile(p))
+
+  for pattern in compiled_patterns:
+    found = False
+    for filename in uploaded_list:
+      if re.search(pattern, filename):
+        found = True
+        break
+    if not found:
+      return False
+
+  return True
+
+
+def WaitUntilAvailable(to_wait_list, archive_url, err_str, timeout=600,
+                       delay=10):
+  """Waits until all target artifacts are available in Google Storage or
+  until the request times out.
+
+  This method polls Google Storage until all target artifacts are
+  available or until the timeout occurs. Because we may not know the
+  exact name of the target artifacts, the method accepts to_wait_list, a
+  list of filename patterns, to identify whether an artifact whose name
+  matches the pattern exists (e.g. use pattern '_full_' to search for
+  the full payload 'chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin').
+
+  Args:
+    to_wait_list: List of regular expression patterns to identify
+        the target artifacts.
+    archive_url: URL of the Google Storage bucket.
+    err_str: String to display in the error message.
+
+  Returns:
+    The list of artifacts in the Google Storage bucket.
+
+  Raises:
+    DevServerUtilError: If timeout occurs.
+  """
+
+  cmd = 'gsutil cat %s/%s' % (archive_url, UPLOADED_LIST)
+  msg = 'Failed to get a list of uploaded files.'
+
+  deadline = time.time() + timeout
+  while time.time() < deadline:
+    to_delay = delay + random.uniform(.5 * delay, 1.5 * delay)
+    # Run "gsutil cat" to retrieve the list
+    uploaded_list = gsutil_util.GSUtilRun(cmd, msg).splitlines()
+    # Check if all target artifacts are available
+    if IsAvailable(to_wait_list, uploaded_list):
+      return uploaded_list
+    cherrypy.log('Retrying in %f seconds...%s' % (to_delay, err_str))
+    time.sleep(to_delay)
+
+  raise DevServerUtilError('Missing %s for %s.' % (err_str, archive_url))
+
+
+def GatherArtifactDownloads(main_staging_dir, archive_url, build, build_dir,
+                            timeout=600, delay=10):
   """Generates artifacts that we mean to download and install for autotest.
 
   This method generates the list of artifacts we will need for autotest. These
@@ -85,12 +146,21 @@
   Note, these artifacts can be downloaded asynchronously iff
   !artifact.Synchronous().
   """
-  cmd = 'gsutil ls %s/*.bin' % archive_url
-  msg = 'Failed to get a list of payloads.'
-  payload_list = gsutil_util.GSUtilRun(cmd, msg).splitlines()
+
+  # Wait up to 10 minutes for the full payload to be uploaded because we
+  # do not know the exact name of the full payload.
+
+  # We also wait for 'autotest.tar' because we do not know what type of
+  # autotest tarballs (tar or tar.bz2) is available
+  # (crosbug.com/32312). This dependency can be removed once all
+  # branches move to the new 'tar' format.
+  to_wait_list = ['_full_', 'autotest.tar']
+  err_str = 'full payload or autotest tarball'
+  uploaded_list = WaitUntilAvailable(to_wait_list, archive_url, err_str,
+                                     timeout=600)
 
   # First we gather the urls/paths for the update payloads.
-  full_url, nton_url, mton_url = ParsePayloadList(payload_list)
+  full_url, nton_url, mton_url = ParsePayloadList(archive_url, uploaded_list)
 
   full_payload = os.path.join(build_dir, downloadable_artifact.ROOT_UPDATE)
 
@@ -111,8 +181,17 @@
         mton_url, main_staging_dir, mton_payload))
 
 
+  # Gather information about autotest tarballs. Use autotest.tar if available.
+  if downloadable_artifact.AUTOTEST_PACKAGE in uploaded_list:
+    autotest_url = '%s/%s' % (archive_url,
+                              downloadable_artifact.AUTOTEST_PACKAGE)
+  else:
+    # Use autotest.tar.bz for backward compatibility. This can be
+    # removed once all branches start using "autotest.tar"
+    autotest_url = '%s/%s' % (archive_url,
+                              downloadable_artifact.AUTOTEST_ZIPPED_PACKAGE)
+
   # Next we gather the miscellaneous payloads.
-  autotest_url = _GetAutotestURL(archive_url)
   stateful_url = archive_url + '/' + downloadable_artifact.STATEFUL_UPDATE
   test_suites_url = (archive_url + '/' +
                      downloadable_artifact.TEST_SUITES_PACKAGE)
@@ -150,24 +229,14 @@
           This is an iterable so that it's similar to GatherArtifactDownloads.
           Also, it's possible that someday we might have more than one.
   """
+
+  # Wait up to 10 minutes for the debug symbols to be uploaded.
+  to_wait_list = [downloadable_artifact.DEBUG_SYMBOLS]
+  err_str = 'debug symbols'
+  WaitUntilAvailable(to_wait_list, archive_url, err_str, timeout=timeout,
+                     delay=delay)
+
   symbol_url = archive_url + '/' + downloadable_artifact.DEBUG_SYMBOLS
-  cmd = 'gsutil ls %s' % symbol_url
-  msg = 'Debug symbols for %s not archived.' % archive_url
-
-  deadline = time.time() + timeout
-  while time.time() < deadline:
-    to_delay = delay + random.choice([-1, 1]) * random.random() * .5 * delay
-    try:
-      gsutil_util.GSUtilRun(cmd, msg)
-      break
-    except gsutil_util.GSUtilError as e:
-      cherrypy.log('%s, Retrying in %f seconds...' % (e, to_delay),
-                   'SYMBOL_DOWNLOAD')
-      time.sleep(to_delay)
-  else:
-    # On the last try, run and allow exceptions to escape.
-    gsutil_util.GSUtilRun(cmd, msg)
-
   return [downloadable_artifact.DebugTarball(symbol_url, temp_download_dir,
                                              staging_dir)]
 
diff --git a/devserver_util_unittest.py b/devserver_util_unittest.py
index 3864522..aac0f81 100755
--- a/devserver_util_unittest.py
+++ b/devserver_util_unittest.py
@@ -75,39 +75,53 @@
   def testParsePayloadList(self):
     """Tests we can parse the payload list into urls."""
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/'
-                          'R17-1413.0.0-a1-b1346/')
-    mton_url = (archive_url_prefix + 'chromeos_R17-1412.0.0-a1-b1345_'
-                'R17-1413.0.0-a1_x86-mario_delta_dev.bin')
-    nton_url = (archive_url_prefix + 'chromeos_R17-1413.0.0-a1_'
-                'R17-1413.0.0-a1_x86-mario_delta_dev.bin')
-    full_url = (archive_url_prefix + 'chromeos_R17-1413.0.0-a1_'
-                'x86-mario_full_dev.bin')
+                          'R17-1413.0.0-a1-b1346')
+    mton_basename = ('chromeos_R17-1412.0.0-a1-b1345_R17-1413.0.0-a1_'
+                     'x86-mario_delta_dev.bin')
+    nton_basename = ('chromeos_R17-1413.0.0-a1_R17-1413.0.0-a1_'
+                     'x86-mario_delta_dev.bin')
+    full_basename = ('chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin')
+
+    mton_url = '/'.join([archive_url_prefix, mton_basename])
+    nton_url = '/'.join([archive_url_prefix, nton_basename])
+    full_url = '/'.join([archive_url_prefix, full_basename])
+
     full_url_out, nton_url_out, mton_url_out = (
-        devserver_util.ParsePayloadList([full_url, nton_url, mton_url]))
+        devserver_util.ParsePayloadList(archive_url_prefix,
+                                        [full_basename, nton_basename,
+                                         mton_basename]))
     self.assertEqual([full_url, nton_url, mton_url],
                      [full_url_out, nton_url_out, mton_url_out])
 
     archive_url_prefix = ('gs://chromeos-image-archive/x86-alex_he-release/'
                           'R18-1420.0.0-a1-b541')
-    mton_url = (archive_url_prefix + 'chromeos_R18-1418.0.0-a1-b540_'
-                'R18-1420.0.0-a1_x86-alex_he_delta_dev.bin')
-    nton_url = (archive_url_prefix + 'chromeos_R18-1420.0.0-a1_'
-                'R18-1420.0.0-a1_x86-alex_he_delta_dev.bin')
-    full_url = (archive_url_prefix + 'chromeos_R18-1420.0.0-a1_'
-                'x86-alex_he_full_dev.bin')
+
+    mton_basename = ('chromeos_R18-1418.0.0-a1-b54a0_R18-1420.0.0-a1'
+                     '_x86-alex_he_delta_dev.bin')
+    nton_basename = ('chromeos_R18-1420.0.0-a1_R18-1420.0.0-a1_'
+                     'x86-alex_he_delta_dev.bin')
+    full_basename = ('chromeos_R18-1420.0.0-a1_x86-alex_he_full_dev.bin')
+
+    mton_url = '/'.join([archive_url_prefix, mton_basename])
+    nton_url = '/'.join([archive_url_prefix, nton_basename])
+    full_url = '/'.join([archive_url_prefix, full_basename])
+
     full_url_out, nton_url_out, mton_url_out = (
-        devserver_util.ParsePayloadList([full_url, nton_url, mton_url]))
+        devserver_util.ParsePayloadList(archive_url_prefix,
+                                        [full_basename, nton_basename,
+                                         mton_basename]))
     self.assertEqual([full_url, nton_url, mton_url],
                      [full_url_out, nton_url_out, mton_url_out])
 
   def testParsePayloadListWithoutDeltas(self):
     """Tests we can parse the payload list when no delta updates exist."""
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/'
-                          'R17-1413.0.0-a1-b1346/')
-    full_url = (archive_url_prefix + 'chromeos_R17-1413.0.0-a1_'
-                'x86-mario_full_dev.bin')
+                          'R17-1413.0.0-a1-b1346')
+    full_basename = ('chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin')
+    full_url = '/'.join([archive_url_prefix, full_basename])
     full_url_out, nton_url_out, mton_url_out = (
-        devserver_util.ParsePayloadList([full_url, '', '']))
+        devserver_util.ParsePayloadList(archive_url_prefix,
+                                        [full_basename, '', '']))
     self.assertEqual([full_url, None, None],
                      [full_url_out, nton_url_out, mton_url_out])
 
@@ -115,12 +129,16 @@
     """Tests that we can parse a payload list with missing optional payload."""
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/'
                           'R17-1413.0.0-a1-b1346/')
-    nton_url = (archive_url_prefix + 'chromeos_R17-1413.0.0-a1_'
-                'R17-1413.0.0-a1_x86-mario_delta_dev.bin')
-    full_url = (archive_url_prefix + 'chromeos_R17-1413.0.0-a1_'
-                'x86-mario_full_dev.bin')
+    nton_basename = ('chromeos_R17-1413.0.0-a1_R17-1413.0.0-a1_x86-'
+                     'mario_delta_dev.bin')
+    full_basename = ('chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin')
+
+    nton_url = '/'.join([archive_url_prefix, nton_basename])
+    full_url = '/'.join([archive_url_prefix, full_basename])
+
     full_url_out, nton_url_out, mton_url_out = (
-        devserver_util.ParsePayloadList([full_url, nton_url]))
+        devserver_util.ParsePayloadList(archive_url_prefix,
+                                        [full_basename, nton_basename]))
     self.assertEqual([full_url, nton_url, None],
                      [full_url_out, nton_url_out, mton_url_out])
 
@@ -293,21 +311,25 @@
     build = 'R17-1413.0.0-a1-b1346'
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
                           build)
-    mock_data = 'mock data\nmock_data\nmock_data'
+    mock_data = 'mock data\nmock_data'
     payloads = map(lambda x: '/'.join([archive_url_prefix, x]),
                    ['p1', 'p2', 'p3'])
     expected_payloads = payloads + map(
         lambda x: '/'.join([archive_url_prefix, x]),
             [downloadable_artifact.STATEFUL_UPDATE,
-             downloadable_artifact.AUTOTEST_PACKAGE,
+             downloadable_artifact.AUTOTEST_ZIPPED_PACKAGE,
              downloadable_artifact.TEST_SUITES_PACKAGE])
     self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
     self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(archive_url_prefix),
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
                           mox.IgnoreArg()).AndReturn(mock_data)
-    devserver_util.ParsePayloadList(mock_data.splitlines()).AndReturn(payloads)
+    devserver_util.IsAvailable(mox.IgnoreArg(),
+                               mock_data.splitlines()).AndReturn(True)
+    devserver_util.ParsePayloadList(archive_url_prefix,
+                                    mock_data.splitlines()).AndReturn(payloads)
 
     self.mox.ReplayAll()
     artifacts = devserver_util.GatherArtifactDownloads(
@@ -323,22 +345,26 @@
     build = 'R17-1413.0.0-a1-b1346'
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
                           build)
-    mock_data = 'mock data\nmock_data'
+    mock_data = 'mock data\nmock_data\nmock_data'
     payloads = map(lambda x: '/'.join([archive_url_prefix, x]),
                    ['p1', 'p2'])
     expected_payloads = payloads + map(
         lambda x: '/'.join([archive_url_prefix, x]),
             [downloadable_artifact.STATEFUL_UPDATE,
-             downloadable_artifact.AUTOTEST_PACKAGE,
+             downloadable_artifact.AUTOTEST_ZIPPED_PACKAGE,
              downloadable_artifact.TEST_SUITES_PACKAGE])
     self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
     self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(archive_url_prefix),
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
                           mox.IgnoreArg()).AndReturn(mock_data)
-    devserver_util.ParsePayloadList(mock_data.splitlines()).AndReturn(
-        payloads + [None])
+    devserver_util.IsAvailable(mox.IgnoreArg(),
+                               mock_data.splitlines()).AndReturn(True)
+    devserver_util.ParsePayloadList(archive_url_prefix,
+                                    mock_data.splitlines()
+                                    ).AndReturn(payloads + [None])
 
     self.mox.ReplayAll()
     artifacts = devserver_util.GatherArtifactDownloads(
@@ -355,21 +381,26 @@
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
                           build)
     mock_data = 'mock data\nmock_data'
+
     payloads = map(lambda x: '/'.join([archive_url_prefix, x]),
                    ['p1'])
     expected_payloads = payloads + map(
         lambda x: '/'.join([archive_url_prefix, x]),
             [downloadable_artifact.STATEFUL_UPDATE,
-             downloadable_artifact.AUTOTEST_PACKAGE,
+             downloadable_artifact.AUTOTEST_ZIPPED_PACKAGE,
              downloadable_artifact.TEST_SUITES_PACKAGE])
     self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
     self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(archive_url_prefix),
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
                           mox.IgnoreArg()).AndReturn(mock_data)
-    devserver_util.ParsePayloadList(mock_data.splitlines()).AndReturn(
-        payloads + [None, None])
+    devserver_util.IsAvailable(mox.IgnoreArg(),
+                               mock_data.splitlines()).AndReturn(True)
+    devserver_util.ParsePayloadList(archive_url_prefix,
+                                    mock_data.splitlines()
+                                    ).AndReturn(payloads + [None, None])
 
     self.mox.ReplayAll()
     artifacts = devserver_util.GatherArtifactDownloads(
@@ -385,13 +416,15 @@
     build = 'R17-1413.0.0-a1-b1346'
     archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
                           build)
-    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
-    mock_data = 'mock data\nmock_data\nmock_data'
+    symbol_url = '/'.join([archive_url_prefix,
+                           downloadable_artifact.DEBUG_SYMBOLS])
+    uploaded_list_url = '/'.join([archive_url_prefix,
+                                  devserver_util.UPLOADED_LIST])
+    mock_data = 'mock-tarball.tgz\nmock-debug.tgz'
     self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
-    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
                           mox.IgnoreArg()).AndReturn(mock_data)
 
     self.mox.ReplayAll()
@@ -403,55 +436,100 @@
 
     self.mox.VerifyAll()
 
-  def testGatherSymbolArtifactDownloadsWithRetry(self):
-    """Tests that we can poll for debug symbol artifacts to download."""
+  def testIsAvailable(self):
+    """Test that we can detect whether the target artifacts are avaialble."""
+    # Test when the all target files are available
+    pattern_list = ['_full_', 'autotest.tar']
+    uploaded_list = ['chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin',
+                     'debug.tgz',
+                     'autotest.tar.bz2']
+
+    available = devserver_util.IsAvailable(pattern_list, uploaded_list)
+    self.assertTrue(available)
+
+    # Test when some target files are missing
+    pattern_list = ['_full_', 'autotest.tar']
+    uploaded_list = ['chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin',
+                     'debug.tgz']
+
+    available = devserver_util.IsAvailable(pattern_list, uploaded_list)
+    self.assertFalse(available)
+
+  def testWaitUntilAvailable(self):
+    """Test that we can poll until all target artifacts are available."""
     build = 'R17-1413.0.0-a1-b1346'
-    archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
-                          build)
-    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
+    archive_url = ('gs://chromeos-image-archive/x86-mario-release/'
+                   'R17-1413.0.0-a1-b1346')
+    to_wait_list = ['_full_']
     mock_data = 'mock data\nmock_data\nmock_data'
-    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
-    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
-                          mox.IgnoreArg()).AndRaise(gsutil_util.GSUtilError())
-    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
+
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
                           mox.IgnoreArg()).AndReturn(mock_data)
+    devserver_util.IsAvailable(mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
 
     self.mox.ReplayAll()
-    artifacts = devserver_util.GatherSymbolArtifactDownloads(
-        self._static_dir, archive_url_prefix, self._install_dir, delay=1)
-    for index, artifact in enumerate(artifacts):
-      self.assertEqual(artifact._gs_path, symbol_url)
-      self.assertTrue(artifact._tmp_staging_dir.startswith(self._static_dir))
-
+    uploaded_list = devserver_util.WaitUntilAvailable(to_wait_list, archive_url,
+                                                      'UNIT TEST', delay=1)
+    self.assertEqual(uploaded_list, mock_data.splitlines())
     self.mox.VerifyAll()
 
-  def testGatherSymbolArtifactDownloadsFailAfterRetry(self):
-    """Tests that we can poll for debug symbol artifacts to download."""
+  def testWaitUntilAvailableWithRetry(self):
+    """Test that we can poll until all target artifacts are available."""
     build = 'R17-1413.0.0-a1-b1346'
-    archive_url_prefix = ('gs://chromeos-image-archive/x86-mario-release/' +
-                          build)
-    symbol_url = archive_url_prefix + '/' + downloadable_artifact.DEBUG_SYMBOLS
-    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
-    self.mox.StubOutWithMock(devserver_util, 'ParsePayloadList')
+    archive_url = ('gs://chromeos-image-archive/x86-mario-release/'
+                   'R17-1413.0.0-a1-b1346')
+    to_wait_list = ['_full_']
+    mock_data = 'mock data\nmock_data\nmock_data'
 
-    # GSUtil ls.
-    gsutil_util.GSUtilRun(mox.StrContains(symbol_url),
-                          mox.IgnoreArg()
-                          ).MultipleTimes().AndRaise(gsutil_util.GSUtilError())
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
+
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
+                          mox.IgnoreArg()).AndReturn(mock_data)
+    devserver_util.IsAvailable(mox.IgnoreArg(),
+                               mox.IgnoreArg()).AndReturn(False)
+
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
+                          mox.IgnoreArg()).AndReturn(mock_data)
+    devserver_util.IsAvailable(mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(True)
 
     self.mox.ReplayAll()
-    self.assertRaises(gsutil_util.GSUtilError,
-                      devserver_util.GatherSymbolArtifactDownloads,
-                      self._static_dir,
-                      archive_url_prefix,
-                      self._install_dir,
-                      timeout=1,
-                      delay=1)
+    uploaded_list = devserver_util.WaitUntilAvailable(to_wait_list, archive_url,
+                                                      'UNIT TEST', delay=1)
+    self.assertEqual(uploaded_list, mock_data.splitlines())
     self.mox.VerifyAll()
 
+  def testWaitUntilAvailableTimeout(self):
+    """Test that we wait for the target artifacts until timeout occurs."""
+    build = 'R17-1413.0.0-a1-b1346'
+    archive_url = ('gs://chromeos-image-archive/x86-mario-release/'
+                   'R17-1413.0.0-a1-b1346')
+    to_wait_list = ['_full_']
+    mock_data = 'mock data\nmock_data\nmock_data'
+
+    self.mox.StubOutWithMock(gsutil_util, 'GSUtilRun')
+    self.mox.StubOutWithMock(devserver_util, 'IsAvailable')
+
+    # GSUtil cat gs://archive_url_prefix/UPLOADED.
+    gsutil_util.GSUtilRun(mox.StrContains(devserver_util.UPLOADED_LIST),
+                          mox.IgnoreArg()).AndReturn(mock_data)
+    devserver_util.IsAvailable(mox.IgnoreArg(),
+                               mox.IgnoreArg()).AndReturn(False)
+
+    self.mox.ReplayAll()
+    self.assertRaises(devserver_util.DevServerUtilError,
+                      devserver_util.WaitUntilAvailable,
+                      to_wait_list,
+                      archive_url,
+                      'UNIT TEST',
+                      delay=2,
+                      timeout=1)
+    self.mox.VerifyAll()
 
 if __name__ == '__main__':
   unittest.main()