devserver: Add functionality to stage artifacts from a local directory.

This change updates the devserver so that local build and
test artifacts  can be staged onto the devserver.

The devserver stage function will now also accept a local_path
as an argument. local_path specifies a subdirectory of the
devserver's static directory that contains build artifacts we
want to stage. It's restricted to a subdirectory of the static
directory in order to not allow callers access to the whole
file system of the devserver.

This functionality will be used in supporting custom image
staging for the new 'cros stage-on-moblab' command.

BUG=chromium:370909
TEST=devserver_integration_test, unittests, & local moblab test run.
Change-Id: I617a61066e644657cdbfddee9762691c495d26b5
Reviewed-on: https://chromium-review.googlesource.com/239321
Reviewed-by: Simran Basi <sbasi@chromium.org>
Tested-by: Simran Basi <sbasi@chromium.org>
Commit-Queue: Simran Basi <sbasi@chromium.org>
diff --git a/artifact_info.py b/artifact_info.py
index 0a5c674..712b287 100644
--- a/artifact_info.py
+++ b/artifact_info.py
@@ -72,5 +72,5 @@
 # this relationship. Specifically:
 # If X is requested, all items in Y should also get triggered for download.
 REQUESTED_TO_OPTIONAL_MAP = {
-  TEST_SUITES: [AUTOTEST],
+    TEST_SUITES: [CONTROL_FILES, AUTOTEST_PACKAGES],
 }
diff --git a/build_artifact.py b/build_artifact.py
index ae55848..c80fe7d 100755
--- a/build_artifact.py
+++ b/build_artifact.py
@@ -6,6 +6,7 @@
 
 """Module containing classes that wrap artifact downloads."""
 
+import glob
 import os
 import pickle
 import re
@@ -93,7 +94,7 @@
 
     Args:
       install_dir: Where to install the artifact.
-      archive_url: The Google Storage path to find the artifact.
+      archive_url: The Google Storage URL or local path to find the artifact.
       name: Identifying name to be used to find/store the artifact.
       build: The name of the build e.g. board/release.
       is_regex_name: Whether the name pattern is a regex (default: glob).
@@ -185,11 +186,32 @@
       f.write('\n'.join(self.installed_files))
 
   def _WaitForArtifactToExist(self, name, timeout):
-    """Waits for artifact to exist and sets self.name to appropriate name.
+    """Waits for artifact to exist and returns the appropriate names.
 
     Args:
       name: Name to look at.
-      timeout: How long to wait for artifact to become available.
+      timeout: How long to wait for artifact to become available. Only matters
+               if self.archive_url is a Google Storage URL.
+
+    Returns:
+      A list of names that match.
+
+    Raises:
+      ArtifactDownloadError: An error occurred when obtaining artifact.
+    """
+    if self.archive_url.startswith('gs://'):
+      return self._WaitForGSArtifactToExist(name, timeout)
+    return self._VerifyLocalArtifactExists(name)
+
+  def _WaitForGSArtifactToExist(self, name, timeout):
+    """Waits for artifact to exist and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+      timeout: How long to wait for the artifact to become available.
+
+    Returns:
+      A list of names that match.
 
     Raises:
       ArtifactDownloadError: An error occurred when obtaining artifact.
@@ -202,6 +224,30 @@
                                   (name, self.archive_url))
     return names
 
+  def _VerifyLocalArtifactExists(self, name):
+    """Verifies the local artifact exists and returns the appropriate names.
+
+    Args:
+      name: Name to look at.
+
+    Returns:
+      A list of names that match.
+
+    Raises:
+      ArtifactDownloadError: An error occurred when obtaining artifact.
+    """
+    local_path = os.path.join(self.archive_url, name)
+    if self.is_regex_name:
+      filter_re = re.compile(name)
+      for filename in os.listdir(self.archive_url):
+        if filter_re.match(filename):
+          return [filename]
+    else:
+      glob_search = glob.glob(local_path)
+      if glob_search and len(glob_search) == 1:
+        return [os.path.basename(glob_search[0])]
+    raise ArtifactDownloadError('Artifact not found.')
+
   def _UpdateName(self, names):
     if self.single_name and len(names) > 1:
       raise ArtifactDownloadError('Too many artifacts match %s' % self.name)
@@ -210,9 +256,15 @@
 
   def _Download(self):
     """Downloads artifact from Google Storage to a local directory."""
-    gs_path = '/'.join([self.archive_url, self.name])
     self.install_path = os.path.join(self.install_dir, self.name)
-    gsutil_util.DownloadFromGS(gs_path, self.install_path)
+    if self.archive_url.startswith('gs://'):
+      gs_path = '/'.join([self.archive_url, self.name])
+      gsutil_util.DownloadFromGS(gs_path, self.install_path)
+    else:
+      # It's a local path so just copy it into the staged directory.
+      shutil.copyfile(os.path.join(self.archive_url, self.name),
+                      self.install_path)
+
 
   def _Setup(self):
     """Process the downloaded content, update the list of installed files."""
diff --git a/devserver.py b/devserver.py
index c398c9e..16e895e 100755
--- a/devserver.py
+++ b/devserver.py
@@ -446,6 +446,22 @@
     else:
       raise DevServerError("Must specify an archive_url in the request")
 
+  @staticmethod
+  def _canonicalize_local_path(local_path):
+    """Canonicalizes |local_path| strings.
+
+    Raises:
+      DevserverError: if |local_path| is not set.
+    """
+    # Restrict staging of local content to only files within the static
+    # directory.
+    local_path = os.path.abspath(local_path)
+    if not local_path.startswith(updater.static_dir):
+      raise DevServerError('Local path %s must be a subdirectory of the static'
+                           ' directory: %s' % (local_path, updater.static_dir))
+
+    return local_path.rstrip('/')
+
   @cherrypy.expose
   def is_staged(self, **kwargs):
     """Check if artifacts have been downloaded.
@@ -511,6 +527,7 @@
 
     Args:
       archive_url: Google Storage URL for the build.
+      local_path: Local path for the build.
       async: True to return without waiting for download to complete.
       artifacts: Comma separated list of named artifacts to download.
         These are defined in artifact_info and have their implementation
@@ -542,14 +559,26 @@
 
       http://devserver_url:<port>/static/x86-mario-release/R26-3920.0.0
     """
-    archive_url = self._canonicalize_archive_url(kwargs.get('archive_url'))
+    archive_url = kwargs.get('archive_url')
+    local_path = kwargs.get('local_path')
+    if not archive_url and not local_path:
+      raise DevServerError('Requires archive_url or local_path to be '
+                           'specified.')
+    if archive_url and local_path:
+      raise DevServerError('archive_url and local_path can not both be '
+                           'specified.')
+    if archive_url:
+      archive_url = self._canonicalize_archive_url(archive_url)
+    if local_path:
+      local_path = self._canonicalize_local_path(local_path)
     async = kwargs.get('async', False)
     artifacts, files = self._get_artifacts(kwargs)
     with DevServerRoot._staging_thread_count_lock:
       DevServerRoot._staging_thread_count += 1
     try:
-      downloader.Downloader(updater.static_dir, archive_url).Download(
-          artifacts, files, async=async)
+      downloader.Downloader(
+          updater.static_dir, (archive_url or local_path)).Download(
+              artifacts, files, async=async)
     finally:
       with DevServerRoot._staging_thread_count_lock:
         DevServerRoot._staging_thread_count -= 1
diff --git a/downloader.py b/downloader.py
index 9174f10..2a319d6 100755
--- a/downloader.py
+++ b/downloader.py
@@ -60,8 +60,22 @@
     self._build_dir = Downloader.GetBuildDir(static_dir, archive_url)
 
   @staticmethod
-  def ParseUrl(archive_url):
-    """Parses archive_url into rel_path and build.
+  def ParseUrl(path_or_url):
+    """Parses |path_or_url| into build relative path and the shorter build name.
+
+    Args:
+      path_or_url: a local path or URL at which build artifacts are archived.
+
+    Returns:
+      A tuple of (build relative path, short build name)
+    """
+    if path_or_url.startswith('gs://'):
+      return Downloader.ParseGSUrl(path_or_url)
+    return Downloader.ParseLocalPath(path_or_url)
+
+  @staticmethod
+  def ParseGSUrl(archive_url):
+    """Parses |path_or_url| into build relative path and the shorter build name.
 
     Parses archive_url into rel_path and build e.g.
     gs://chromeos-image-archive/{rel_path}/{build}.
@@ -82,6 +96,24 @@
     return rel_path, build
 
   @staticmethod
+  def ParseLocalPath(local_path):
+    """Parses local_path into rel_path and build.
+
+    Parses a local path into rel_path and build e.g.
+    /{path to static dir}/{rel_path}/{build}.
+
+    Args:
+      local_path: a local path that the build artifacts are stored. Must be a
+                  subpath of the static directory.
+
+    Returns:
+      A tuple of (build relative path, short build name)
+    """
+    rel_path = os.path.basename(os.path.dirname(local_path))
+    build = os.path.basename(local_path)
+    return rel_path, build
+
+  @staticmethod
   def GetBuildDir(static_dir, archive_url):
     """Returns the path to where the artifacts will be staged.
 
diff --git a/downloader_unittest.py b/downloader_unittest.py
index 3c8083a..64ca12e 100755
--- a/downloader_unittest.py
+++ b/downloader_unittest.py
@@ -18,6 +18,7 @@
 
 # pylint: disable=W0212,E1120
 class DownloaderTestBase(mox.MoxTestBase):
+  """Downloader Unittests."""
 
   def setUp(self):
     mox.MoxTestBase.setUp(self)
@@ -26,18 +27,19 @@
     self.build = 'R17-1413.0.0-a1-b1346'
     self.archive_url = (
         'gs://chromeos-image-archive/%s/%s' % (self.board, self.build))
+    self.local_path = ('/local/path/x86-mario-release/R17-1413.0.0-a1-b1346')
 
   def tearDown(self):
     shutil.rmtree(self._work_dir, ignore_errors=True)
 
-  def testSimpleDownloadOfTestSuites(self):
-    """Basic test_suites test.
+  def _SimpleDownloadOfTestSuites(self, archive_path):
+    """Helper to verify test_suites are downloaded correctly.
 
-    Verifies that if we request the test_suites, it gets downloaded and
-    the autotest tarball is attempted in the background.
+    Args:
+      archive_path: Archive url or local path to test with.
     """
     downloader_instance = downloader.Downloader(self._work_dir,
-                                                self.archive_url)
+                                                archive_path)
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsSerially')
     self.mox.StubOutWithMock(downloader.Downloader,
@@ -46,7 +48,7 @@
     downloader.Downloader._DownloadArtifactsInBackground(mox.In(mox.IsA(
         build_artifact.AutotestTarballBuildArtifact)))
     downloader.Downloader._DownloadArtifactsSerially(
-        [mox.IsA(build_artifact.TarballBuildArtifact)], no_wait=True)
+        [mox.IsA(build_artifact.BundledBuildArtifact)], no_wait=True)
     self.mox.ReplayAll()
     downloader_instance.Download(artifacts=['test_suites'],
                                  files=None)
@@ -56,22 +58,45 @@
                      downloader.Downloader._TIMESTAMP_FILENAME)))
     self.mox.VerifyAll()
 
-  def testDownloadSymbols(self):
+  def testSimpleDownloadOfTestSuitesFromGS(self):
+    """Basic test_suites test.
+
+    Verifies that if we request the test_suites from Google Storage, it gets
+    downloaded and the autotest tarball is attempted in the background.
+    """
+    self._SimpleDownloadOfTestSuites(self.archive_url)
+
+  def testSimpleDownloadOfTestSuitesFromLocal(self):
+    """Basic test_suites test.
+
+    Verifies that if we request the test_suites from a local path, it gets
+    downloaded and the autotest tarball is attempted in the background.
+    """
+    self._SimpleDownloadOfTestSuites(self.local_path)
+
+  def _DownloadSymbolsHelper(self, archive_path):
     """Basic symbols download."""
-    downloader_instance = downloader.Downloader(self._work_dir,
-                                                self.archive_url)
+    downloader_instance = downloader.Downloader(self._work_dir, archive_path)
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsSerially')
     # Should not get called but mocking so that we know it wasn't called.
     self.mox.StubOutWithMock(downloader.Downloader,
                              '_DownloadArtifactsInBackground')
     downloader.Downloader._DownloadArtifactsSerially(
-        [mox.IsA(build_artifact.TarballBuildArtifact)], no_wait=True)
+        [mox.IsA(build_artifact.BundledBuildArtifact)], no_wait=True)
     self.mox.ReplayAll()
     downloader_instance.Download(artifacts=['symbols'],
                                  files=None)
     self.mox.VerifyAll()
 
+  def testDownloadSymbolsFromGS(self):
+    """Basic symbols download from Google Storage."""
+    self._DownloadSymbolsHelper(self.archive_url)
+
+  def testDownloadSymbolsFromLocal(self):
+    """Basic symbols download from a Local Path."""
+    self._DownloadSymbolsHelper(self.local_path)
+
 
 if __name__ == '__main__':
   unittest.main()