cros_generate_test_payload: Do not use devserver to generate payloads

There is no need to run a devserver just to create an update
payload. This patch removes this use case and replaces it with direct
calls to cros_generate_update_payload.

--nplu1, --nplus1_archive_dir, --full_payload, --base_latest_from_dir
flags and the code for generating stateful partition payload are seem to
be dead code. This patch also removes them.

BUG=chromium:872441
TEST=betty-chrome-pfq-tryjob at https://cros-goldeneye.corp.google.com/chromeos/healthmonitoring/buildDetails?buildbucketId=8915197919992284560

Change-Id: I8fa87642c016687843fe0c553e846a9db399a2d0
Reviewed-on: https://chromium-review.googlesource.com/1168126
Commit-Ready: ChromeOS CL Exonerator Bot <chromiumos-cl-exonerator@appspot.gserviceaccount.com>
Tested-by: Amin Hassani <ahassani@chromium.org>
Reviewed-by: Amin Hassani <ahassani@chromium.org>
diff --git a/ctest/ctest.py b/ctest/ctest.py
index c68da93..c308462 100755
--- a/ctest/ctest.py
+++ b/ctest/ctest.py
@@ -37,7 +37,6 @@
     crosutils_root: Location of crosutils.
     jobs: Numbers of threads to run in parallel.
     no_graphics: boolean: If True, disable graphics during vm test.
-    nplus1_archive_dir: Archive directory to store nplus1 payloads.
     payload_signing_key: Signs payloads with this key.
     public_key: Loads key to verify signed payloads.
     remote: ip address for real test harness run.
@@ -77,7 +76,6 @@
       self.payload_signing_key = None
 
     self.jobs = opts.jobs
-    self.nplus1_archive_dir = opts.nplus1_archive_dir
 
     # An optional ssh private key used for testing.
     self.ssh_private_key = opts.ssh_private_key
@@ -133,9 +131,6 @@
     cmd.append('--base=%s' % self.base)
     cmd.append('--board=%s' % self.board)
     cmd.append('--jobs=%d' % self.jobs)
-    if self.nplus1_archive_dir:
-      cmd.append('--nplus1')
-      cmd.append('--nplus1_archive_dir=%s' % self.nplus1_archive_dir)
 
     if full:
       cmd.append('--full_suite')
@@ -237,8 +232,6 @@
                       help='Run a quick update test. This will run a subset of '
                       'test suite after running autoupdate from target '
                       'image to itself.')
-  parser.add_argument('--nplus1_archive_dir', default=None,
-                      help='If set, directory to archive nplus1 payloads.')
   parser.add_argument('--remote', default='0.0.0.0',
                       help='For real tests, ip address of the target machine.')
   parser.add_argument('--target_image', default=None,
@@ -275,7 +268,7 @@
 
   # force absolute path for these opts, since a chdir occurs deeper in the
   # codebase.
-  for x in ('nplus1_archive_dir', 'target_image', 'test_results_root'):
+  for x in ('target_image', 'test_results_root'):
     if x == 'target_image' and opts.type == 'gce':
       # In this case |target_image| is a Google Storage path.
       continue
diff --git a/generate_test_payloads/cros_generate_test_payloads.py b/generate_test_payloads/cros_generate_test_payloads.py
index bf8b418..95eb929 100755
--- a/generate_test_payloads/cros_generate_test_payloads.py
+++ b/generate_test_payloads/cros_generate_test_payloads.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2
+#!/usr/bin/env python2
 # -*- coding: utf-8 -*-
 # Copyright (c) 2011 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
@@ -19,37 +19,38 @@
 
 from __future__ import print_function
 
-import functools
+import hashlib
 import optparse
 import os
 import pickle
-import re
 import shutil
 import sys
-import tempfile
 
 import constants
 sys.path.append(constants.CROS_PLATFORM_ROOT)
 sys.path.append(constants.SOURCE_ROOT)
 
-from chromite.lib import cros_build_lib
 from chromite.lib import cros_logging as logging
 from chromite.lib import dev_server_wrapper
 from chromite.lib import locking
-from chromite.lib import osutils
-from chromite.lib import parallel
-from chromite.lib import path_util
 from chromite.lib import sudo
-from chromite.lib import timeout_util
+
+from chromite.lib.paygen import paygen_payload_lib
+from chromite.lib.paygen import paygen_stateful_payload_lib
+
 from crostestutils.au_test_harness import cros_au_test_harness
-from crostestutils.generate_test_payloads import payload_generation_exception
-from crostestutils.lib import image_extractor
+
 from crostestutils.lib import public_key_manager
 from crostestutils.lib import test_helper
 
 
-class InvalidDevserverOutput(Exception):
-  """If we are unable to parse devserver output, this is raised."""
+def _GetFileMd5(path):
+  """Calculates the Md5 hash of a file."""
+  hash_md5 = hashlib.md5()
+  with open(path, "rb") as f:
+    for chunk in iter(lambda: f.read(4096 * 10), b""):
+      hash_md5.update(chunk)
+  return hash_md5.hexdigest()
 
 
 class UpdatePayload(object):
@@ -60,45 +61,40 @@
   this defines a full update payload to the target image.
 
   Variables:
-    target: Payload to this image.
-    base: If not None, a delta payload from this image.
+    target: Create payload for this image.
+    base: If not None, a delta payload with this image as the source.
     key: If set, signed payload using this private key.
-    archive: If set, this payload should be archived.
-    archive_stateful: If set and archive is set, archive the stateful tarball
-      for the target image.
+    for_vm: Whether we want the payload for a VM image.
   """
   NAME_SPLITTER = '_'
 
-  def __init__(self, target, base, key=None, archive=False,
-               archive_stateful=False, for_vm=False):
+  def __init__(self, target, base, key=None, for_vm=False):
     self.base = base
     self.target = target
     self.key = key
-    self.archive = archive
-    self.archive_stateful = archive_stateful
     self.for_vm = for_vm
 
-  def GetNameForBin(self):
-    """Returns the path we should name an archived payload."""
-    real_target = os.path.realpath(self.target)
-    board, target_os_version, _ = real_target.split('/')[-3:]
-    prefix = 'chromeos'
-    suffix = 'dev.bin'
-    if self.base:
-      real_base = os.path.realpath(self.base)
-      base_os_version, _ = real_base.split('/')[-2:]
-      name = self.NAME_SPLITTER.join([base_os_version, target_os_version, board,
-                                      'delta'])
-    else:
-      name = self.NAME_SPLITTER.join([target_os_version, board, 'full'])
+    self.payload_dir = None
 
-    return self.NAME_SPLITTER.join([prefix, name, suffix])
+    self._CalculateUpdateCacheLabel()
 
   def UpdateId(self):
     """Generates a unique update id the test harness can understand."""
     return dev_server_wrapper.GenerateUpdateId(self.target, self.base,
                                                self.key, self.for_vm)
 
+  def _CalculateUpdateCacheLabel(self):
+    """Calculates the label associated with this payload.
+
+    It is exactly what devserver does.
+    """
+    self.label = ''
+    if self.base:
+      self.label += _GetFileMd5(self.base) + '_'
+    self.label += _GetFileMd5(self.target)
+    if self.key:
+      self.label += '+' + _GetFileMd5(self.key)
+
   def __str__(self):
     my_repr = self.target
     if self.base:
@@ -121,7 +117,9 @@
 
 class UpdatePayloadGenerator(object):
   """Class responsible for generating update payloads."""
-  CHROOT_PATH_TO_DEVSERVER_CACHE = 'var/lib/devserver/static/cache'
+
+  PATH_TO_CACHE_DIR = os.path.join(
+      constants.SOURCE_ROOT, 'chroot/var/lib/devserver/static/cache')
 
   def __init__(self, options):
     """Initializes a generator object from parsed options.
@@ -147,19 +145,19 @@
     self.basic_suite = options.basic_suite
     self.full_suite = options.full_suite
     self.payloads = set([])
-    self.full_payload = options.full_payload
-    self.nplus1_archive_dir = options.nplus1_archive_dir
 
     self.jobs = options.jobs
-    self.nplus1 = options.nplus1
 
     self.vm = _ShouldGenerateVM(options)
 
-  def _AddUpdatePayload(self, target, base, key=None, archive=False,
-                        archive_stateful=False, for_vm=False):
+
+  def _AddUpdatePayload(self, target, base, key=None, for_vm=False):
     """Adds a new required update payload.  If base is None, a full payload."""
-    self.payloads.add(UpdatePayload(target, base, key, archive,
-                                    archive_stateful, for_vm))
+    payload = UpdatePayload(target, base, key, for_vm)
+
+    payload.payload_dir = os.path.join(self.PATH_TO_CACHE_DIR, payload.label)
+
+    self.payloads.add(payload)
 
   def GenerateImagesForTesting(self):
     # All vm testing requires a VM'ized target.
@@ -183,15 +181,13 @@
   def GeneratePayloadRequirements(self):
     """Generate Payload Requirements for AUTestHarness and NPlus1 Testing."""
     if self.full_suite:
-      # N-1->N.
-      self._AddUpdatePayload(self.target, self.base, for_vm=self.vm)
+      if self.target != self.base:
+        # N-1->N. But only add it if the base is different than the target.
+        self._AddUpdatePayload(self.target, self.base, for_vm=self.vm)
 
       # N->N after N-1->N.
       self._AddUpdatePayload(self.target, self.target, for_vm=self.vm)
 
-      # N->N From VM base.
-      self._AddUpdatePayload(self.target, self.target, for_vm=self.vm)
-
       # Need a signed payload for the signed payload test.
       if self.target_signed:
         self._AddUpdatePayload(self.target_signed, self.target_signed,
@@ -201,18 +197,7 @@
       # Update image to itself from VM base.
       self._AddUpdatePayload(self.target, self.target, for_vm=self.vm)
 
-    # Add deltas for m minus 1 to n and n to n.
-    if self.nplus1:
-      self._AddUpdatePayload(self.target_no_vm, self.base_no_vm, archive=True)
-      self._AddUpdatePayload(self.target_no_vm, self.target_no_vm, archive=True)
-
-    # Add the full payload.
-    if self.nplus1 or self.full_payload:
-      self._AddUpdatePayload(self.target_no_vm, None, archive=True,
-                             archive_stateful=True)
-
-
-  def GeneratePayloads(self):
+  def GenerateUpdatePayloads(self):
     """Iterates through payload requirements and generates them.
 
     This is the main method of this class.  It iterates through payloads
@@ -223,143 +208,36 @@
       The cache as a Python dict.
     """
 
-    def GeneratePayload(payload, log_file):
-      """Returns the error code from generating an update with the devserver."""
-      # Base command.
-      command = ['start_devserver', '--pregenerate_update', '--exit']
-
-      in_chroot_key = in_chroot_base = None
-      in_chroot_target = path_util.ToChrootPath(payload.target)
-      if payload.base:
-        in_chroot_base = path_util.ToChrootPath(payload.base)
-
-      if payload.key:
-        in_chroot_key = path_util.ToChrootPath(payload.key)
-
-      command.append('--image=%s' % in_chroot_target)
-      if payload.base:
-        command.append('--src_image=%s' % in_chroot_base)
-      if payload.key:
-        command.append('--private_key=%s' % in_chroot_key)
-
-      if payload.base:
-        debug_message = 'delta payload from %s to %s' % (payload.base,
-                                                         payload.target)
-      else:
-        debug_message = 'full payload to %s' % payload.target
-
-      if payload.for_vm:
-        debug_message += ' and not patching the kernel.'
-
-      if in_chroot_key:
-        debug_message = 'Generating a signed %s' % debug_message
-      else:
-        debug_message = 'Generating an unsigned %s' % debug_message
-
-      logging.info(debug_message)
-      try:
-        with timeout_util.Timeout(constants.MAX_TIMEOUT_SECONDS):
-          cros_build_lib.RunCommand(command, log_stdout_to_file=log_file,
-                                    combine_stdout_stderr=True,
-                                    enter_chroot=True, print_cmd=False,
-                                    cwd=constants.SOURCE_ROOT)
-      except (timeout_util.TimeoutError, cros_build_lib.RunCommandError):
-        # Print output first, then re-raise the exception.
-        if os.path.isfile(log_file):
-          logging.error(osutils.ReadFile(log_file))
-        raise
-
-    def ProcessOutput(log_files):
-      """Processes results from the log files of GeneratePayload invocations.
+    def GenerateUpdatePayload(payload):
+      """Generates an update payload and writes it into payload file.
 
       Args:
-        log_files: A list of filename strings with stored logs.
-
-      Returns:
-        An array of cache entries from the log files.
-
-      Raises:
-        payload_generation_exception.PayloadGenerationException: Raises this
-          exception if we failed to parse the devserver output to find the
-          location of the update path.
+        payload: An object of UpdatePayload that defines the parameters of the
+                 payload.
       """
-      # Looking for this line in the output.
-      key_line_re = re.compile(r'^PREGENERATED_UPDATE=([\w/./+]+)')
-      return_array = []
-      for log_file in log_files:
-        with open(log_file) as f:
-          for line in f:
-            match = key_line_re.search(line)
-            if match:
-              # Convert cache/label/update.gz -> update/cache/label.
-              path_to_update_gz = match.group(1).rstrip()
-              path_to_update_dir = path_to_update_gz.rpartition(
-                  '/update.gz')[0]
+      payload_file = os.path.join(payload.payload_dir, 'update.gz')
 
-              # Check that we could actually parse the directory correctly.
-              if not path_to_update_dir:
-                raise payload_generation_exception.PayloadGenerationException(
-                    'Payload generated but failed to parse cache directory.')
+      logging.info('Generating a%s %s payload %s to %s %s',
+                   ' signed' if payload.key else 'n unsigned',
+                   'delta' if payload.base else 'full',
+                   ('from %s' % payload.base) if payload.base else '',
+                   payload.target,
+                   'and not patching the kernel.' if payload.for_vm else '')
 
-              return_array.append('/'.join(['update', path_to_update_dir]))
-              break
-          else:
-            logging.error('Could not find PREGENERATED_UPDATE in log:')
-            f.seek(0)
-            for line in f:
-              logging.error('  log: %s', line)
-            # This is not a recoverable error.
-            raise InvalidDevserverOutput('Could not parse devserver log')
+      paygen_payload_lib.GenerateUpdatePayload(payload.target, payload_file,
+                                               src_image=payload.base,
+                                               private_key=payload.key)
 
-      return return_array
+      # Generating the stateful update as devserver would've done.
+      paygen_stateful_payload_lib.GenerateStatefulPayload(payload.target,
+                                                          payload.payload_dir)
 
-    jobs = []
-    log_files = []
-    # Generate list of paylods and list of log files.
-    for payload in self.payloads:
-      fd, log_file = tempfile.mkstemp('GenerateVMUpdate')
-      os.close(fd)  # Just want filename so close file immediately.
-
-      jobs.append(functools.partial(GeneratePayload, payload, log_file))
-      log_files.append(log_file)
-
-    # Run update generation code and wait for output.
-    logging.info('Generating updates required for this test suite in parallel.')
-    try:
-      parallel.RunParallelSteps(jobs, max_parallel=self.jobs)
-    except parallel.BackgroundFailure as ex:
-      logging.error(ex)
-      raise payload_generation_exception.PayloadGenerationException(
-          'Failed to generate a required update.')
-
-    results = ProcessOutput(log_files)
+    for p in self.payloads:
+      GenerateUpdatePayload(p)
 
     # Build the dictionary from our id's and returned cache paths.
-    cache_dictionary = {}
-    for index, payload in enumerate(self.payloads):
-      # Path return is of the form update/cache/directory.
-      update_path = results[index]
-      cache_dictionary[payload.UpdateId()] = update_path
-      # Archive payload to payload directory.
-      if payload.archive and self.nplus1_archive_dir:
-        # Only need directory as we know the rest.
-        path_to_payload_dir = os.path.join(
-            constants.SOURCE_ROOT, 'chroot',
-            self.CHROOT_PATH_TO_DEVSERVER_CACHE, os.path.basename(update_path))
-        payload_path = os.path.join(path_to_payload_dir, 'update.gz')
-        archive_path = os.path.join(self.nplus1_archive_dir,
-                                    payload.GetNameForBin())
-        logging.info('Archiving %s to %s.', payload.GetNameForBin(),
-                     archive_path)
-        shutil.copyfile(payload_path, archive_path)
-        if payload.archive_stateful:
-          stateful_path = os.path.join(path_to_payload_dir, 'stateful.tgz')
-          archive_path = os.path.join(self.nplus1_archive_dir, 'stateful.tgz')
-          logging.info('Archiving stateful payload from %s to %s',
-                       payload.GetNameForBin(), archive_path)
-          shutil.copyfile(stateful_path, archive_path)
-
-    return cache_dictionary
+    return {p.UpdateId(): os.path.join('update', 'cache', p.label)
+            for p in self.payloads}
 
   def DumpCacheToDisk(self, cache):
     """Dumps the cache to the same folder as the images."""
@@ -367,8 +245,8 @@
       logging.info('Not dumping payload cache to disk as payloads for the '
                    'test harness were not requested.')
     else:
-      path_to_dump = os.path.dirname(self.target)
-      cache_file = os.path.join(path_to_dump, cros_au_test_harness.CACHE_FILE)
+      cache_file = os.path.join(os.path.dirname(self.target),
+                                cros_au_test_harness.CACHE_FILE)
 
       logging.info('Dumping %s', cache_file)
       with open(cache_file, 'w') as file_handle:
@@ -392,19 +270,6 @@
   if not options.target or not os.path.isfile(options.target):
     parser.error('Target image must exist.')
 
-  # Determine the base image. If latest_from_config specified, find the latest
-  # image from the given config. If it doesn't exist, use the target image.
-  target_version = os.path.realpath(options.target).rsplit('/', 2)[-2]
-  if options.base_latest_from_dir:
-    # Extract the latest build.
-    extractor = image_extractor.ImageExtractor(options.base_latest_from_dir,
-                                               os.path.basename(options.target))
-    latest_image_dir = extractor.GetLatestImage(target_version)
-    if latest_image_dir:
-      options.base = extractor.UnzipImage(latest_image_dir)
-    else:
-      logging.warning('No previous image.zip found in local archive.')
-
   if not options.base:
     logging.info('Using target image as base image.')
     options.base = options.target
@@ -424,11 +289,6 @@
       parser.error('Board must be set to generate update '
                    'payloads for vm.')
 
-  if options.full_payload or options.nplus1:
-    if not options.nplus1_archive_dir:
-      parser.error('Must specify an archive directory if nplus1 or '
-                   'full payload are specified.')
-
 
 def main():
   test_helper.SetupCommonLoggingFormat()
@@ -439,14 +299,6 @@
                     help='Prepare to run the basic au test suite.')
   parser.add_option('--full_suite', default=False, action='store_true',
                     help='Prepare to run the full au test suite.')
-  parser.add_option('--full_payload', default=False, action='store_true',
-                    help='Generate the full update payload and store it in '
-                    'the nplus1 archive dir.')
-  parser.add_option('--nplus1', default=False, action='store_true',
-                    help='Produce nplus1 updates for testing in lab and store '
-                    'them in the nplus1 archive dir.')
-  parser.add_option('--nplus1_archive_dir', default=None,
-                    help='Archive nplus1 updates into this directory.')
 
   # Options related to how to generate test payloads for the test harness.
   parser.add_option('--novm', default=True, action='store_false', dest='vm',
@@ -459,10 +311,6 @@
   # Options related to the images to test.
   parser.add_option('--board', help='Board used for the images.')
   parser.add_option('--base', help='Image we want to test updates from.')
-  parser.add_option('--base_latest_from_dir', help='Ignore the base '
-                    'option and use the latest image from the specified '
-                    'directory as the base image. If none exists, default to '
-                    'target image.')
   parser.add_option('--target', help='Image we want to test updates to.')
 
   # Miscellaneous options.
@@ -472,9 +320,6 @@
 
   options = parser.parse_args()[0]
   CheckOptions(parser, options)
-  if options.nplus1_archive_dir and not os.path.exists(
-      options.nplus1_archive_dir):
-    os.makedirs(options.nplus1_archive_dir)
 
   # Don't allow this code to be run more than once at a time.
   lock_path = os.path.join(os.path.dirname(__file__), '.lock_file')
@@ -484,7 +329,7 @@
       generator = UpdatePayloadGenerator(options)
       generator.GenerateImagesForTesting()
       generator.GeneratePayloadRequirements()
-      cache = generator.GeneratePayloads()
+      cache = generator.GenerateUpdatePayloads()
       generator.DumpCacheToDisk(cache)