paycheck: unit tests + fixes to checker module

This adds missing unit tests for the checker module, bundled with fixes
to some bugs that surfaced due to unit tests. This includes:

* A fake extent (signified by start_block == UINT64_MAX) that
  accompanies a signature data blob bears different requirements than
  previously implemented.  Specifically, the extent sequence must have
  exactly one extent; and the number of blocks is not necessarily one,
  rather it is the correct number that corresponds to the actual length
  of the signature blob.

* REPLACE/REPLACE_BZ operations must contain data.

* MOVE operation validation must ensure that all of the actual message
  extents are being used.

* BSDIFF operation must contain data (the diff).

* Signature pseudo-operation should be a REPLACE.

BUG=chromium-os:34911,chromium-os:33607,chromium-os:7597
TEST=Passes unittests (upcoming); works with actual payloads.

Change-Id: I4d839d1d4da1fbb4a493b208958a139368e2c8ca
Reviewed-on: https://gerrit.chromium.org/gerrit/45429
Tested-by: Gilad Arnold <garnold@chromium.org>
Reviewed-by: Chris Sosa <sosa@chromium.org>
Commit-Queue: Gilad Arnold <garnold@chromium.org>
diff --git a/host/lib/update_payload/checker.py b/host/lib/update_payload/checker.py
index e47456d..b85e2b8 100644
--- a/host/lib/update_payload/checker.py
+++ b/host/lib/update_payload/checker.py
@@ -28,12 +28,6 @@
 #
 # Constants / helper functions.
 #
-_SIG_ASN1_HEADER = (
-    '\x30\x31\x30\x0d\x06\x09\x60\x86'
-    '\x48\x01\x65\x03\x04\x02\x01\x05'
-    '\x00\x04\x20'
-)
-
 _TYPE_FULL = 'full'
 _TYPE_DELTA = 'delta'
 
@@ -398,14 +392,14 @@
         ['openssl', 'rsautl', '-verify', '-pubin', '-inkey', pubkey_file_name],
         send_data=sig_data)
 
-    if len(signed_data) != len(_SIG_ASN1_HEADER) + 32:
+    if len(signed_data) != len(common.SIG_ASN1_HEADER) + 32:
       raise PayloadError('%s: unexpected signed data length (%d)' %
                          (sig_name, len(signed_data)))
 
-    if not signed_data.startswith(_SIG_ASN1_HEADER):
+    if not signed_data.startswith(common.SIG_ASN1_HEADER):
       raise PayloadError('%s: not containing standard ASN.1 prefix' % sig_name)
 
-    signed_hash = signed_data[len(_SIG_ASN1_HEADER):]
+    signed_hash = signed_data[len(common.SIG_ASN1_HEADER):]
     if signed_hash != actual_hash:
       raise PayloadError('%s: signed hash (%s) different from actual (%s)' %
                          (sig_name, signed_hash.encode('hex'),
@@ -560,10 +554,7 @@
 
     """
     total_num_blocks = 0
-    num_extents = 0
     for ex, ex_name in common.ExtentIter(extents, name):
-      num_extents += 1
-
       # Check: mandatory fields.
       start_block = PayloadChecker._CheckMandatoryField(ex, 'start_block',
                                                         None, ex_name)
@@ -585,9 +576,9 @@
         # Record block usage.
         for i in range(start_block, end_block):
           block_counters[i] += 1
-      elif not (allow_pseudo or
-                (allow_signature and
-                 (num_extents == len(extents) and num_blocks == 1))):
+      elif not (allow_pseudo or (allow_signature and len(extents) == 1)):
+        # Pseudo-extents must be allowed explicitly, or otherwise be part of a
+        # signature operation (in which case there has to be exactly one).
         raise PayloadError('%s: unexpected pseudo-extent' % ex_name)
 
       total_num_blocks += num_blocks
@@ -606,9 +597,14 @@
       PayloadError if any check fails.
 
     """
+    # Check: does not contain src extents.
     if op.src_extents:
       raise PayloadError('%s: contains src_extents' % op_name)
 
+    # Check: contains data.
+    if data_length is None:
+      raise PayloadError('%s: missing data_{offset,length}' % op_name)
+
     if op.type == common.OpType.REPLACE:
       PayloadChecker._CheckBlocksFitLength(data_length, total_dst_blocks,
                                            self.block_size,
@@ -673,7 +669,7 @@
         dst_num = dst_extent.num_blocks
 
       if src_idx == dst_idx:
-        raise PayloadError('%s: src/dst blocks %d are the same (%d)' %
+        raise PayloadError('%s: src/dst block number %d is the same (%d)' %
                            (op_name, i, src_idx))
 
       advance = min(src_num, dst_num)
@@ -689,6 +685,12 @@
       if dst_num == 0:
         dst_extent = None
 
+    # Make sure we've exhausted all src/dst extents.
+    if src_extent:
+      raise PayloadError('%s: excess src blocks' % op_name)
+    if dst_extent:
+      raise PayloadError('%s: excess dst blocks' % op_name)
+
   def _CheckBsdiffOperation(self, data_length, total_dst_blocks, op_name):
     """Specific checks for BSDIFF operations.
 
@@ -700,12 +702,17 @@
       PayloadError if any check fails.
 
     """
+    # Check: data_{offset,length} present.
+    if data_length is None:
+      raise PayloadError('%s: missing data_{offset,length}' % op_name)
+
     # Check: data_length is strictly smaller than the alotted dst blocks.
     if data_length >= total_dst_blocks * self.block_size:
       raise PayloadError(
-          '%s: data_length (%d) must be smaller than num dst blocks (%d) * '
-          'block_size (%d)' %
-          (op_name, data_length, total_dst_blocks, self.block_size))
+          '%s: data_length (%d) must be smaller than allotted dst space '
+          '(%d * %d = %d)' %
+          (op_name, data_length, total_dst_blocks, self.block_size,
+           total_dst_blocks * self.block_size))
 
   def _CheckOperation(self, op, op_name, is_last, old_block_counters,
                       new_block_counters, old_part_size, new_part_size,
@@ -806,7 +813,7 @@
 
     return data_length if data_length is not None else 0
 
-  def _AllocBlockCounterss(self, part_size):
+  def _AllocBlockCounters(self, part_size):
     """Returns a freshly initialized array of block counters.
 
     Args:
@@ -834,8 +841,7 @@
       allow_unhashed: allow operations with unhashed data blobs
       allow_signature: whether this sequence may contain signature operations
     Returns:
-      A pair consisting of the number of operations and the total data blob
-      size used.
+      The total data blob size used.
     Raises:
       PayloadError if any of the checks fails.
 
@@ -865,9 +871,9 @@
       blob_hash_counts['signature'] = 0
 
     # Allocate old and new block counters.
-    old_block_counters = (self._AllocBlockCounterss(old_part_size)
+    old_block_counters = (self._AllocBlockCounters(old_part_size)
                           if old_part_size else None)
-    new_block_counters = self._AllocBlockCounterss(new_part_size)
+    new_block_counters = self._AllocBlockCounters(new_part_size)
 
     # Process and verify each operation.
     op_num = 0
@@ -937,11 +943,12 @@
     if not sigs.signatures:
       raise PayloadError('signature block is empty')
 
-    # Check: signatures_{offset,size} must match the last (fake) operation.
     last_ops_section = (self.payload.manifest.kernel_install_operations or
                         self.payload.manifest.install_operations)
     fake_sig_op = last_ops_section[-1]
-    if not (self.sigs_offset == fake_sig_op.data_offset and
+    # Check: signatures_{offset,size} must match the last (fake) operation.
+    if not (fake_sig_op.type == common.OpType.REPLACE and
+            self.sigs_offset == fake_sig_op.data_offset and
             self.sigs_size == fake_sig_op.data_length):
       raise PayloadError(
           'signatures_{offset,size} (%d+%d) does not match last operation '
diff --git a/host/lib/update_payload/checker_unittest.py b/host/lib/update_payload/checker_unittest.py
new file mode 100755
index 0000000..681a920
--- /dev/null
+++ b/host/lib/update_payload/checker_unittest.py
@@ -0,0 +1,1194 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit testing checker.py."""
+
+import array
+import collections
+import cStringIO
+import hashlib
+import itertools
+import os
+import unittest
+
+# Pylint cannot find mox.
+# pylint: disable=F0401
+import mox
+
+import checker
+import common
+import payload as update_payload  # avoid name conflicts later.
+import test_utils
+import update_metadata_pb2
+
+
+_PRIVKEY_FILE_NAME = 'payload-test-key.pem'
+_PUBKEY_FILE_NAME = 'payload-test-key.pub'
+
+
+def _OpTypeByName(op_name):
+  op_name_to_type = {
+      'REPLACE': common.OpType.REPLACE,
+      'REPLACE_BZ': common.OpType.REPLACE_BZ,
+      'MOVE': common.OpType.MOVE,
+      'BSDIFF': common.OpType.BSDIFF,
+  }
+  return op_name_to_type[op_name]
+
+
+def _KiB(count):
+  """Return the byte size of a given number of (binary) kilobytes."""
+  return count << 10
+
+
+def _MiB(count):
+  """Return the byte size of a given number of (binary) megabytes."""
+  return count << 20
+
+
+def _GiB(count):
+  """Return the byte size of a given number of (binary) gigabytes."""
+  return count << 30
+
+
+def _GetPayloadChecker(payload_gen_write_to_file_func, *largs, **dargs):
+  """Returns a payload checker from a given payload generator."""
+  payload_file = cStringIO.StringIO()
+  payload_gen_write_to_file_func(payload_file, *largs, **dargs)
+  payload_file.seek(0)
+  payload = update_payload.Payload(payload_file)
+  payload.Init()
+  return checker.PayloadChecker(payload)
+
+
+def _GetPayloadCheckerWithData(payload_gen):
+  """Returns a payload checker from a given payload generator."""
+  payload_file = cStringIO.StringIO()
+  payload_gen.WriteToFile(payload_file)
+  payload_file.seek(0)
+  payload = update_payload.Payload(payload_file)
+  payload.Init()
+  return checker.PayloadChecker(payload)
+
+
+# (i) this class doesn't need an __init__();  (ii) unit testing is all about
+# running protected methods;  (iii) don't bark about missing members of classes
+# you cannot import.
+# pylint: disable=W0232
+# pylint: disable=W0212
+# pylint: disable=E1101
+class PayloadCheckerTest(mox.MoxTestBase):
+  """Tests the PayloadChecker class.
+
+  In addition to ordinary testFoo() methods, which are automatically invoked by
+  the unittest framework, in this class we make use of DoBarTest() calls that
+  implement parametric tests of certain features. In order to invoke each test,
+  which embodies a unique combination of parameter values, as a complete unit
+  test, we perform explicit enumeration of the parameter space and create
+  individual invocation contexts for each, which are then bound as
+  testBar__param1=val1__param2=val2(). The enumeration of parameter spaces for
+  all such tests is done in AddAllParametricTests().
+
+  """
+
+  def MockPayload(self):
+    """Create a mock payload object, complete with a mock menifest."""
+    payload = self.mox.CreateMock(update_payload.Payload)
+    payload.is_init = True
+    payload.manifest = self.mox.CreateMock(
+        update_metadata_pb2.DeltaArchiveManifest)
+    return payload
+
+  @staticmethod
+  def NewExtent(start_block, num_blocks):
+    """Returns an Extent message.
+
+    Each of the provided fields is set iff it is >= 0; otherwise, it's left at
+    its default state.
+
+    Args:
+      start_block: the starting block of the extent
+      num_blocks: the number of blocks in the extent
+    Returns:
+      An Extent message.
+
+    """
+    ex = update_metadata_pb2.Extent()
+    if start_block >= 0:
+      ex.start_block = start_block
+    if num_blocks >= 0:
+      ex.num_blocks = num_blocks
+    return ex
+
+  @staticmethod
+  def NewExtentList(*args):
+    """Returns an list of extents.
+
+    Args:
+      *args: (start_block, num_blocks) pairs defining the extents
+    Returns:
+      A list of Extent objects.
+
+    """
+    ex_list = []
+    for start_block, num_blocks in args:
+      ex_list.append(PayloadCheckerTest.NewExtent(start_block, num_blocks))
+    return ex_list
+
+  @staticmethod
+  def AddToMessage(repeated_field, field_vals):
+    for field_val in field_vals:
+      new_field = repeated_field.add()
+      new_field.CopyFrom(field_val)
+
+  def assertIsNone(self, val):
+    """Asserts that val is None (TODO remove once we upgrade to Python 2.7).
+
+    Note that we're using assertEqual so as for it to show us the actual
+    non-None value.
+
+    Args:
+      val: value/object to be equated to None
+
+    """
+    self.assertEqual(val, None)
+
+  def SetupAddElemTest(self, is_present, is_submsg, convert=str,
+                       linebreak=False, indent=0):
+    """Setup for testing of _CheckElem() and its derivatives.
+
+    Args:
+      is_present: whether or not the element is found in the message
+      is_submsg: whether the element is a sub-message itself
+      convert: a representation conversion function
+      linebreak: whether or not a linebreak is to be used in the report
+      indent: indentation used for the report
+    Returns:
+      msg: a mock message object
+      report: a mock report object
+      subreport: a mock sub-report object
+      name: an element name to check
+      val: expected element value
+
+    """
+    name = 'foo'
+    val = 'fake submsg' if is_submsg else 'fake field'
+    subreport = 'fake subreport'
+
+    # Create a mock message.
+    msg = self.mox.CreateMock(update_metadata_pb2.message.Message)
+    msg.HasField(name).AndReturn(is_present)
+    setattr(msg, name, val)
+
+    # Create a mock report.
+    report = self.mox.CreateMock(checker._PayloadReport)
+    if is_present:
+      if is_submsg:
+        report.AddSubReport(name).AndReturn(subreport)
+      else:
+        report.AddField(name, convert(val), linebreak=linebreak, indent=indent)
+
+    self.mox.ReplayAll()
+    return (msg, report, subreport, name, val)
+
+  def DoAddElemTest(self, is_present, is_mandatory, is_submsg, convert,
+                    linebreak, indent):
+    """Parametric testing of _CheckElem().
+
+    Args:
+      is_present: whether or not the element is found in the message
+      is_mandatory: whether or not it's a mandatory element
+      is_submsg: whether the element is a sub-message itself
+      convert: a representation conversion function
+      linebreak: whether or not a linebreak is to be used in the report
+      indent: indentation used for the report
+
+    """
+    msg, report, subreport, name, val = self.SetupAddElemTest(
+        is_present, is_submsg, convert, linebreak, indent)
+
+    largs = [msg, name, report, is_mandatory, is_submsg]
+    dargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
+    if is_mandatory and not is_present:
+      self.assertRaises(update_payload.PayloadError,
+                        checker.PayloadChecker._CheckElem, *largs, **dargs)
+    else:
+      ret_val, ret_subreport = checker.PayloadChecker._CheckElem(*largs,
+                                                                 **dargs)
+      self.assertEquals(ret_val, val if is_present else None)
+      self.assertEquals(ret_subreport,
+                        subreport if is_present and is_submsg else None)
+
+  def DoAddFieldTest(self, is_mandatory, is_present, convert, linebreak,
+                     indent):
+    """Parametric testing of _Check{Mandatory,Optional}Field().
+
+    Args:
+      is_mandatory: whether we're testing a mandatory call
+      is_present: whether or not the element is found in the message
+      convert: a representation conversion function
+      linebreak: whether or not a linebreak is to be used in the report
+      indent: indentation used for the report
+
+    """
+    msg, report, _, name, val = self.SetupAddElemTest(
+        is_present, False, convert, linebreak, indent)
+
+    # Prepare for invocation of the tested method.
+    largs = [msg, name, report]
+    dargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
+    if is_mandatory:
+      largs.append('bar')
+      tested_func = checker.PayloadChecker._CheckMandatoryField
+    else:
+      tested_func = checker.PayloadChecker._CheckOptionalField
+
+    # Test the method call.
+    if is_mandatory and not is_present:
+      self.assertRaises(update_payload.PayloadError, tested_func, *largs,
+                        **dargs)
+    else:
+      ret_val = tested_func(*largs, **dargs)
+      self.assertEquals(ret_val, val if is_present else None)
+
+  def DoAddSubMsgTest(self, is_mandatory, is_present):
+    """Parametrized testing of _Check{Mandatory,Optional}SubMsg().
+
+    Args:
+      is_mandatory: whether we're testing a mandatory call
+      is_present: whether or not the element is found in the message
+
+    """
+    msg, report, subreport, name, val = self.SetupAddElemTest(is_present, True)
+
+    # Prepare for invocation of the tested method.
+    largs = [msg, name, report]
+    if is_mandatory:
+      largs.append('bar')
+      tested_func = checker.PayloadChecker._CheckMandatorySubMsg
+    else:
+      tested_func = checker.PayloadChecker._CheckOptionalSubMsg
+
+    # Test the method call.
+    if is_mandatory and not is_present:
+      self.assertRaises(update_payload.PayloadError, tested_func, *largs)
+    else:
+      ret_val, ret_subreport = tested_func(*largs)
+      self.assertEquals(ret_val, val if is_present else None)
+      self.assertEquals(ret_subreport, subreport if is_present else None)
+
+  def testCheckPresentIff(self):
+    """Tests _CheckPresentIff()."""
+    self.assertIsNone(checker.PayloadChecker._CheckPresentIff(
+        None, None, 'foo', 'bar', 'baz'))
+    self.assertIsNone(checker.PayloadChecker._CheckPresentIff(
+        'a', 'b', 'foo', 'bar', 'baz'))
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckPresentIff,
+                      'a', None, 'foo', 'bar', 'baz')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckPresentIff,
+                      None, 'b', 'foo', 'bar', 'baz')
+
+  def DoCheckSha256SignatureTest(self, expect_pass, expect_subprocess_call,
+                                 sig_data, sig_asn1_header,
+                                 returned_signed_hash, expected_signed_hash):
+    """Parametric testing of _CheckSha256SignatureTest().
+
+    Args:
+      expect_pass: whether or not it should pass
+      expect_subprocess_call: whether to expect the openssl call to happen
+      sig_data: the signature raw data
+      sig_asn1_header: the ASN1 header
+      returned_signed_hash: the signed hash data retuned by openssl
+      expected_signed_hash: the signed hash data to compare against
+
+    """
+    # Stub out the subprocess invocation.
+    self.mox.StubOutWithMock(checker.PayloadChecker, '_Run')
+    if expect_subprocess_call:
+      checker.PayloadChecker._Run(mox.IsA(list), send_data=sig_data).AndReturn(
+          (sig_asn1_header + returned_signed_hash, None))
+
+    self.mox.ReplayAll()
+    if expect_pass:
+      self.assertIsNone(checker.PayloadChecker._CheckSha256Signature(
+          sig_data, 'foo', expected_signed_hash, 'bar'))
+    else:
+      self.assertRaises(update_payload.PayloadError,
+                        checker.PayloadChecker._CheckSha256Signature,
+                        sig_data, 'foo', expected_signed_hash, 'bar')
+
+    self.mox.UnsetStubs()
+
+  def testCheckSha256Signature_Pass(self):
+    """Tests _CheckSha256Signature(); pass case."""
+    sig_data = 'fake-signature'.ljust(256)
+    signed_hash = hashlib.sha256('fake-data').digest()
+    self.DoCheckSha256SignatureTest(True, True, sig_data,
+                                    common.SIG_ASN1_HEADER, signed_hash,
+                                    signed_hash)
+
+  def testCheckSha256Signature_FailBadSignature(self):
+    """Tests _CheckSha256Signature(); fails due to malformed signature."""
+    sig_data = 'fake-signature'  # malformed (not 256 bytes in length)
+    signed_hash = hashlib.sha256('fake-data').digest()
+    self.DoCheckSha256SignatureTest(False, False, sig_data,
+                                    common.SIG_ASN1_HEADER, signed_hash,
+                                    signed_hash)
+
+  def testCheckSha256Signature_FailBadOutputLength(self):
+    """Tests _CheckSha256Signature(); fails due to unexpected output length."""
+    sig_data = 'fake-signature'.ljust(256)
+    signed_hash = 'fake-hash'  # malformed (not 32 bytes in length)
+    self.DoCheckSha256SignatureTest(False, True, sig_data,
+                                    common.SIG_ASN1_HEADER, signed_hash,
+                                    signed_hash)
+
+  def testCheckSha256Signature_FailBadAsnHeader(self):
+    """Tests _CheckSha256Signature(); fails due to bad ASN1 header."""
+    sig_data = 'fake-signature'.ljust(256)
+    signed_hash = hashlib.sha256('fake-data').digest()
+    bad_asn1_header = 'bad-asn-header'.ljust(len(common.SIG_ASN1_HEADER))
+    self.DoCheckSha256SignatureTest(False, True, sig_data, bad_asn1_header,
+                                    signed_hash, signed_hash)
+
+  def testCheckSha256Signature_FailBadHash(self):
+    """Tests _CheckSha256Signature(); fails due to bad hash returned."""
+    sig_data = 'fake-signature'.ljust(256)
+    expected_signed_hash = hashlib.sha256('fake-data').digest()
+    returned_signed_hash = hashlib.sha256('bad-fake-data').digest()
+    self.DoCheckSha256SignatureTest(False, True, sig_data,
+                                    common.SIG_ASN1_HEADER,
+                                    expected_signed_hash, returned_signed_hash)
+
+  def testCheckBlocksFitLength_Pass(self):
+    """Tests _CheckBlocksFitLength(); pass case."""
+    self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(
+        64, 4, 16, 'foo'))
+    self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(
+        60, 4, 16, 'foo'))
+    self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(
+        49, 4, 16, 'foo'))
+    self.assertIsNone(checker.PayloadChecker._CheckBlocksFitLength(
+        48, 3, 16, 'foo'))
+
+  def testCheckBlocksFitLength_TooManyBlocks(self):
+    """Tests _CheckBlocksFitLength(); fails due to excess blocks."""
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      64, 5, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      60, 5, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      49, 5, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      48, 4, 16, 'foo')
+
+  def testCheckBlocksFitLength_TooFewBlocks(self):
+    """Tests _CheckBlocksFitLength(); fails due to insufficient blocks."""
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      64, 3, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      60, 3, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      49, 3, 16, 'foo')
+    self.assertRaises(update_payload.PayloadError,
+                      checker.PayloadChecker._CheckBlocksFitLength,
+                      48, 2, 16, 'foo')
+
+  def DoCheckManifestTest(self, fail_mismatched_block_size, fail_bad_sigs,
+                          fail_mismatched_oki_ori, fail_bad_oki, fail_bad_ori,
+                          fail_bad_nki, fail_bad_nri, fail_missing_ops):
+    """Parametric testing of _CheckManifest().
+
+    Args:
+      fail_mismatched_block_size: simulate a missing block_size field
+      fail_bad_sigs: make signatures descriptor inconsistent
+      fail_mismatched_oki_ori: make old rootfs/kernel info partially present
+      fail_bad_oki: tamper with old kernel info
+      fail_bad_ori: tamper with old rootfs info
+      fail_bad_nki: tamper with new kernel info
+      fail_bad_nri: tamper with new rootfs info
+      fail_missing_ops: simulate a manifest without any operations
+
+    """
+    # Generate a test payload. For this test, we only care about the manifest
+    # and don't need any data blobs, hence we can use a plain paylaod generator
+    # (which also gives us more control on things that can be screwed up).
+    payload_gen = test_utils.PayloadGenerator()
+
+    # Tamper with block size, if required.
+    if fail_mismatched_block_size:
+      payload_gen.SetBlockSize(_KiB(1))
+    else:
+      payload_gen.SetBlockSize(_KiB(4))
+
+    # Add some operations.
+    if not fail_missing_ops:
+      payload_gen.AddOperation(False, common.OpType.MOVE,
+                               src_extents=[(0, 16), (16, 497)],
+                               dst_extents=[(16, 496), (0, 16)])
+      payload_gen.AddOperation(True, common.OpType.MOVE,
+                               src_extents=[(0, 8), (8, 8)],
+                               dst_extents=[(8, 8), (0, 8)])
+
+    # Set an invalid signatures block (offset but no size), if required.
+    if fail_bad_sigs:
+      payload_gen.SetSignatures(32, None)
+
+    # Add old kernel/rootfs partition info, as required.
+    if fail_mismatched_oki_ori or fail_bad_oki:
+      oki_hash = (None if fail_bad_oki
+                  else hashlib.sha256('fake-oki-content').digest())
+      payload_gen.SetPartInfo(True, False, _KiB(512), oki_hash)
+    if not fail_mismatched_oki_ori and fail_bad_ori:
+      payload_gen.SetPartInfo(False, False, _MiB(8), None)
+
+    # Add new kernel/rootfs partition info.
+    payload_gen.SetPartInfo(
+        True, True, _KiB(512),
+        None if fail_bad_nki else hashlib.sha256('fake-nki-content').digest())
+    payload_gen.SetPartInfo(
+        False, True, _MiB(8),
+        None if fail_bad_nri else hashlib.sha256('fake-nri-content').digest())
+
+    # Create the test object.
+    payload_checker = _GetPayloadChecker(payload_gen.WriteToFile)
+    report = checker._PayloadReport()
+
+    should_fail = (fail_mismatched_block_size or fail_bad_sigs or
+                   fail_mismatched_oki_ori or fail_bad_oki or fail_bad_ori or
+                   fail_bad_nki or fail_bad_nri or fail_missing_ops)
+    if should_fail:
+      self.assertRaises(update_payload.PayloadError,
+                        payload_checker._CheckManifest, report)
+    else:
+      self.assertIsNone(payload_checker._CheckManifest(report))
+
+  def testCheckLength(self):
+    """Tests _CheckLength()."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+
+    # Passes.
+    self.assertIsNone(payload_checker._CheckLength(
+        int(3.5 * block_size), 4, 'foo', 'bar'))
+    # Fails, too few blocks.
+    self.assertRaises(update_payload.PayloadError,
+                      payload_checker._CheckLength,
+                      int(3.5 * block_size), 3, 'foo', 'bar')
+    # Fails, too many blocks.
+    self.assertRaises(update_payload.PayloadError,
+                      payload_checker._CheckLength,
+                      int(3.5 * block_size), 5, 'foo', 'bar')
+
+  def testCheckExtents(self):
+    """Tests _CheckExtents()."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+
+    # Passes w/ all real extents.
+    extents = self.NewExtentList((0, 4), (8, 3), (1024, 16))
+    self.assertEquals(
+        payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
+                                      collections.defaultdict(int), 'foo'),
+        23)
+
+    # Passes w/ pseudo-extents (aka sparse holes).
+    extents = self.NewExtentList((0, 4), (common.PSEUDO_EXTENT_MARKER, 5),
+                                 (8, 3))
+    self.assertEquals(
+        payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
+                                      collections.defaultdict(int), 'foo',
+                                      allow_pseudo=True),
+        12)
+
+    # Passes w/ pseudo-extent due to a signature.
+    extents = self.NewExtentList((common.PSEUDO_EXTENT_MARKER, 2))
+    self.assertEquals(
+        payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
+                                      collections.defaultdict(int), 'foo',
+                                      allow_signature=True),
+        2)
+
+    # Fails, extent missing a start block.
+    extents = self.NewExtentList((-1, 4), (8, 3), (1024, 16))
+    self.assertRaises(
+        update_payload.PayloadError, payload_checker._CheckExtents,
+        extents, (1024 + 16) * block_size, collections.defaultdict(int),
+        'foo')
+
+    # Fails, extent missing block count.
+    extents = self.NewExtentList((0, -1), (8, 3), (1024, 16))
+    self.assertRaises(
+        update_payload.PayloadError, payload_checker._CheckExtents,
+        extents, (1024 + 16) * block_size, collections.defaultdict(int),
+        'foo')
+
+    # Fails, extent has zero blocks.
+    extents = self.NewExtentList((0, 4), (8, 3), (1024, 0))
+    self.assertRaises(
+        update_payload.PayloadError, payload_checker._CheckExtents,
+        extents, (1024 + 16) * block_size, collections.defaultdict(int),
+        'foo')
+
+    # Fails, extent exceeds partition boundaries.
+    extents = self.NewExtentList((0, 4), (8, 3), (1024, 16))
+    self.assertRaises(
+        update_payload.PayloadError, payload_checker._CheckExtents,
+        extents, (1024 + 15) * block_size, collections.defaultdict(int),
+        'foo')
+
+  def testCheckReplaceOperation(self):
+    """Tests _CheckReplaceOperation() where op.type == REPLACE."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+    data_length = 10000
+
+    op = self.mox.CreateMock(
+        update_metadata_pb2.DeltaArchiveManifest.InstallOperation)
+    op.type = common.OpType.REPLACE
+
+    # Pass.
+    op.src_extents = []
+    self.assertIsNone(
+        payload_checker._CheckReplaceOperation(
+            op, data_length, (data_length + block_size - 1) / block_size,
+            'foo'))
+
+    # Fail, src extents founds.
+    op.src_extents = ['bar']
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size, 'foo')
+
+    # Fail, missing data.
+    op.src_extents = []
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, None, (data_length + block_size - 1) / block_size, 'foo')
+
+    # Fail, length / block number mismatch.
+    op.src_extents = ['bar']
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size + 1, 'foo')
+
+  def testCheckReplaceBzOperation(self):
+    """Tests _CheckReplaceOperation() where op.type == REPLACE_BZ."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+    data_length = block_size * 3
+
+    op = self.mox.CreateMock(
+        update_metadata_pb2.DeltaArchiveManifest.InstallOperation)
+    op.type = common.OpType.REPLACE_BZ
+
+    # Pass.
+    op.src_extents = []
+    self.assertIsNone(
+        payload_checker._CheckReplaceOperation(
+            op, data_length, (data_length + block_size - 1) / block_size + 5,
+            'foo'))
+
+    # Fail, src extents founds.
+    op.src_extents = ['bar']
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size + 5, 'foo')
+
+    # Fail, missing data.
+    op.src_extents = []
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, None, (data_length + block_size - 1) / block_size, 'foo')
+
+    # Fail, too few blocks to justify BZ.
+    op.src_extents = []
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckReplaceOperation,
+        op, data_length, (data_length + block_size - 1) / block_size, 'foo')
+
+  def testCheckMoveOperation_Pass(self):
+    """Tests _CheckMoveOperation(); pass case."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 6)))
+    self.assertIsNone(
+        payload_checker._CheckMoveOperation(op, None, 134, 134, 'foo'))
+
+  def testCheckMoveOperation_FailContainsData(self):
+    """Tests _CheckMoveOperation(); fails, message contains data."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 6)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, 1024, 134, 134, 'foo')
+
+  def testCheckMoveOperation_FailInsufficientSrcBlocks(self):
+    """Tests _CheckMoveOperation(); fails, not enough actual src blocks."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 127)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 6)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+
+  def testCheckMoveOperation_FailInsufficientDstBlocks(self):
+    """Tests _CheckMoveOperation(); fails, not enough actual dst blocks."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 5)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+
+  def testCheckMoveOperation_FailExcessSrcBlocks(self):
+    """Tests _CheckMoveOperation(); fails, too many actual src blocks."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 5)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 129)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 6)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+
+  def testCheckMoveOperation_FailExcessDstBlocks(self):
+    """Tests _CheckMoveOperation(); fails, too many actual dst blocks."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((16, 128), (512, 7)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+
+  def testCheckMoveOperation_FailStagnantBlocks(self):
+    """Tests _CheckMoveOperation(); fails, there are blocks that do not move."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = common.OpType.MOVE
+
+    self.AddToMessage(op.src_extents,
+                      self.NewExtentList((0, 4), (12, 2), (1024, 128)))
+    self.AddToMessage(op.dst_extents,
+                      self.NewExtentList((8, 128), (512, 6)))
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckMoveOperation,
+        op, None, 134, 134, 'foo')
+
+  def testCheckBsdiff(self):
+    """Tests _CheckMoveOperation()."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+
+    # Pass.
+    self.assertIsNone(
+        payload_checker._CheckBsdiffOperation(10000, 3, 'foo'))
+
+    # Fail, missing data blob.
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckBsdiffOperation,
+        None, 3, 'foo')
+
+    # Fail, too big of a diff blob (unjustified).
+    self.assertRaises(
+        update_payload.PayloadError,
+        payload_checker._CheckBsdiffOperation,
+        10000, 2, 'foo')
+
+  def DoCheckOperationTest(self, op_type_name, is_last, allow_signature,
+                           allow_unhashed, fail_src_extents, fail_dst_extents,
+                           fail_mismatched_data_offset_length,
+                           fail_missing_dst_extents, fail_src_length,
+                           fail_dst_length, fail_data_hash,
+                           fail_prev_data_offset):
+    """Parametric testing of _CheckOperation().
+
+    Args:
+      op_type_name: 'REPLACE', 'REPLACE_BZ', 'MOVE' or 'BSDIFF'
+      is_last: whether we're testing the last operation in a sequence
+      allow_signature: whether we're testing a signature-capable operation
+      allow_unhashed: whether we're allowing to not hash the data
+      fail_src_extents: tamper with src extents
+      fail_dst_extents: tamper with dst extents
+      fail_mismatched_data_offset_length: make data_{offset,length} inconsistent
+      fail_missing_dst_extents: do not include dst extents
+      fail_src_length: make src length inconsistent
+      fail_dst_length: make dst length inconsistent
+      fail_data_hash: tamper with the data blob hash
+      fail_prev_data_offset: make data space uses incontiguous
+
+    """
+    op_type = _OpTypeByName(op_type_name)
+
+    # Create the test object.
+    payload = self.MockPayload()
+    payload_checker = checker.PayloadChecker(payload)
+    block_size = payload_checker.block_size
+
+    # Create auxiliary arguments.
+    old_part_size = _MiB(4)
+    new_part_size = _MiB(8)
+    old_block_counters = array.array(
+        'B', [0] * ((old_part_size + block_size - 1) / block_size))
+    new_block_counters = array.array(
+        'B', [0] * ((new_part_size + block_size - 1) / block_size))
+    prev_data_offset = 1876
+    blob_hash_counts = collections.defaultdict(int)
+
+    # Create the operation object for the test.
+    op = update_metadata_pb2.DeltaArchiveManifest.InstallOperation()
+    op.type = op_type
+
+    total_src_blocks = 0
+    if op_type in (common.OpType.MOVE, common.OpType.BSDIFF):
+      if fail_src_extents:
+        self.AddToMessage(op.src_extents,
+                          self.NewExtentList((0, 0)))
+      else:
+        self.AddToMessage(op.src_extents,
+                          self.NewExtentList((0, 16)))
+        total_src_blocks = 16
+
+    if op_type != common.OpType.MOVE:
+      if not fail_mismatched_data_offset_length:
+        op.data_length = 16 * block_size - 8
+      if fail_prev_data_offset:
+        op.data_offset = prev_data_offset + 16
+      else:
+        op.data_offset = prev_data_offset
+
+      fake_data = 'fake-data'.ljust(op.data_length)
+      if not (allow_unhashed or (is_last and allow_signature and
+                                 op_type == common.OpType.REPLACE)):
+        if not fail_data_hash:
+          # Create a valid data blob hash.
+          op.data_sha256_hash = hashlib.sha256(fake_data).digest()
+          payload.ReadDataBlob(op.data_offset, op.data_length).AndReturn(
+              fake_data)
+      elif fail_data_hash:
+        # Create an invalid data blob hash.
+        op.data_sha256_hash = hashlib.sha256(
+            fake_data.replace(' ', '-')).digest()
+        payload.ReadDataBlob(op.data_offset, op.data_length).AndReturn(
+            fake_data)
+
+    total_dst_blocks = 0
+    if not fail_missing_dst_extents:
+      total_dst_blocks = 16
+      if fail_dst_extents:
+        self.AddToMessage(op.dst_extents,
+                          self.NewExtentList((4, 16), (32, 0)))
+      else:
+        self.AddToMessage(op.dst_extents,
+                          self.NewExtentList((4, 8), (64, 8)))
+
+    if total_src_blocks:
+      if fail_src_length:
+        op.src_length = total_src_blocks * block_size + 8
+      else:
+        op.src_length = total_src_blocks * block_size
+    elif fail_src_length:
+      # Add an orphaned src_length.
+      op.src_length = 16
+
+    if total_dst_blocks:
+      if fail_dst_length:
+        op.dst_length = total_dst_blocks * block_size + 8
+      else:
+        op.dst_length = total_dst_blocks * block_size
+
+    self.mox.ReplayAll()
+    should_fail = (fail_src_extents or fail_dst_extents or
+                   fail_mismatched_data_offset_length or
+                   fail_missing_dst_extents or fail_src_length or
+                   fail_dst_length or fail_data_hash or fail_prev_data_offset)
+    largs = [op, 'foo', is_last, old_block_counters, new_block_counters,
+             old_part_size, new_part_size, prev_data_offset, allow_signature,
+             allow_unhashed, blob_hash_counts]
+    if should_fail:
+      self.assertRaises(update_payload.PayloadError,
+                        payload_checker._CheckOperation, *largs)
+    else:
+      self.assertEqual(payload_checker._CheckOperation(*largs),
+                       op.data_length if op.HasField('data_length') else 0)
+
+  def testAllocBlockCounters(self):
+    """Tests _CheckMoveOperation()."""
+    payload_checker = checker.PayloadChecker(self.MockPayload())
+    block_size = payload_checker.block_size
+
+    # Check allocation for block-aligned partition size, ensure it's integers.
+    result = payload_checker._AllocBlockCounters(16 * block_size)
+    self.assertEqual(len(result), 16)
+    self.assertEqual(type(result[0]), int)
+
+    # Check allocation of unaligned partition sizes.
+    result = payload_checker._AllocBlockCounters(16 * block_size - 1)
+    self.assertEqual(len(result), 16)
+    result = payload_checker._AllocBlockCounters(16 * block_size + 1)
+    self.assertEqual(len(result), 17)
+
+  def DoCheckOperationsTest(self, fail_bad_type,
+                            fail_nonexhaustive_full_update):
+    # Generate a test payload. For this test, we only care about one
+    # (arbitrary) set of operations, so we'll only be generating kernel and
+    # test with them.
+    payload_gen = test_utils.PayloadGenerator()
+
+    block_size = _KiB(4)
+    payload_gen.SetBlockSize(block_size)
+
+    rootfs_part_size = _MiB(8)
+
+    # Fake rootfs operations in a full update, tampered with as required.
+    rootfs_op_type = common.OpType.REPLACE
+    if fail_bad_type:
+      # Choose a type value that's bigger than the highest valid value.
+      for valid_op_type in common.OpType.ALL:
+        rootfs_op_type = max(rootfs_op_type, valid_op_type)
+      rootfs_op_type += 1
+
+    rootfs_data_length = rootfs_part_size
+    if fail_nonexhaustive_full_update:
+      rootfs_data_length -= block_size
+
+    payload_gen.AddOperation(False, rootfs_op_type,
+                             dst_extents=[(0, rootfs_data_length / block_size)],
+                             data_offset=0,
+                             data_length=rootfs_data_length)
+
+    # Create the test object.
+    payload_checker = _GetPayloadChecker(payload_gen.WriteToFile)
+    payload_checker.payload_type = checker._TYPE_FULL
+    report = checker._PayloadReport()
+
+    should_fail = (fail_bad_type or fail_nonexhaustive_full_update)
+    largs = (payload_checker.payload.manifest.install_operations, report,
+             'foo', 0, rootfs_part_size, 0, True, False)
+    if should_fail:
+      self.assertRaises(update_payload.PayloadError,
+                        payload_checker._CheckOperations, *largs)
+    else:
+      self.assertEqual(payload_checker._CheckOperations(*largs),
+                       rootfs_data_length)
+
+  def DoCheckSignaturesTest(self, fail_empty_sigs_blob, fail_missing_pseudo_op,
+                            fail_mismatched_pseudo_op, fail_sig_missing_fields,
+                            fail_unknown_sig_version, fail_incorrect_sig):
+    # Generate a test payload. For this test, we only care about the signature
+    # block and how it relates to the payload hash. Therefore, we're generating
+    # a random (otherwise useless) payload for this purpose.
+    payload_gen = test_utils.EnhancedPayloadGenerator()
+    block_size = _KiB(4)
+    payload_gen.SetBlockSize(block_size)
+    rootfs_part_size = _MiB(2)
+    payload_gen.SetPartInfo(False, True, rootfs_part_size,
+                            hashlib.sha256('fake-new-rootfs-content').digest())
+    payload_gen.SetPartInfo(True, True, _KiB(16),
+                            hashlib.sha256('fake-new-kernel-content').digest())
+    payload_gen.AddOperationWithData(
+        False, common.OpType.REPLACE,
+        dst_extents=[(0, rootfs_part_size / block_size)],
+        data_blob=os.urandom(rootfs_part_size))
+
+    do_forge_pseudo_op = (fail_missing_pseudo_op or fail_mismatched_pseudo_op)
+    do_forge_sigs_data = (do_forge_pseudo_op or fail_empty_sigs_blob or
+                          fail_sig_missing_fields or fail_unknown_sig_version
+                          or fail_incorrect_sig)
+
+    sigs_data = None
+    if do_forge_sigs_data:
+      sigs_gen = test_utils.SignaturesGenerator()
+      if not fail_empty_sigs_blob:
+        if fail_sig_missing_fields:
+          sig_data = None
+        else:
+          sig_data = test_utils.SignSha256('fake-payload-content',
+                                           _PRIVKEY_FILE_NAME)
+        sigs_gen.AddSig(5 if fail_unknown_sig_version else 1, sig_data)
+
+      sigs_data = sigs_gen.ToBinary()
+      payload_gen.SetSignatures(payload_gen.curr_offset, len(sigs_data))
+
+    if do_forge_pseudo_op:
+      assert sigs_data is not None, 'should have forged signatures blob by now'
+      sigs_len = len(sigs_data)
+      payload_gen.AddOperation(
+          False, common.OpType.REPLACE,
+          data_offset=payload_gen.curr_offset / 2,
+          data_length=sigs_len / 2,
+          dst_extents=[(0, (sigs_len / 2 + block_size - 1) / block_size)])
+
+    # Generate payload (complete w/ signature) and create the test object.
+    payload_checker = _GetPayloadChecker(
+        payload_gen.WriteToFileWithData, sigs_data=sigs_data,
+        privkey_file_name=_PRIVKEY_FILE_NAME,
+        do_add_pseudo_operation=(not do_forge_pseudo_op))
+    payload_checker.payload_type = checker._TYPE_FULL
+    report = checker._PayloadReport()
+
+    # We have to check the manifest first in order to set signature attributes.
+    payload_checker._CheckManifest(report)
+
+    should_fail = (fail_empty_sigs_blob or fail_missing_pseudo_op or
+                   fail_mismatched_pseudo_op or fail_sig_missing_fields or
+                   fail_unknown_sig_version or fail_incorrect_sig)
+    largs = (report, _PUBKEY_FILE_NAME)
+    if should_fail:
+      self.assertRaises(update_payload.PayloadError,
+                        payload_checker._CheckSignatures, *largs)
+    else:
+      self.assertIsNone(payload_checker._CheckSignatures(*largs))
+
+  def DoRunTest(self, fail_wrong_payload_type, fail_invalid_block_size,
+                fail_mismatched_block_size, fail_excess_data):
+    # Generate a test payload. For this test, we generate a full update that
+    # has samle kernel and rootfs operations. Since most testing is done with
+    # internal PayloadChecker methods that are tested elsewhere, here we only
+    # tamper with what's actually being manipulated and/or tested in the Run()
+    # method itself. Note that the checker doesn't verify partition hashes, so
+    # they're safe to fake.
+    payload_gen = test_utils.EnhancedPayloadGenerator()
+    block_size = _KiB(4)
+    payload_gen.SetBlockSize(block_size)
+    kernel_part_size = _KiB(16)
+    rootfs_part_size = _MiB(2)
+    payload_gen.SetPartInfo(False, True, rootfs_part_size,
+                            hashlib.sha256('fake-new-rootfs-content').digest())
+    payload_gen.SetPartInfo(True, True, kernel_part_size,
+                            hashlib.sha256('fake-new-kernel-content').digest())
+    payload_gen.AddOperationWithData(
+        False, common.OpType.REPLACE,
+        dst_extents=[(0, rootfs_part_size / block_size)],
+        data_blob=os.urandom(rootfs_part_size))
+    payload_gen.AddOperationWithData(
+        True, common.OpType.REPLACE,
+        dst_extents=[(0, kernel_part_size / block_size)],
+        data_blob=os.urandom(kernel_part_size))
+
+    # Generate payload (complete w/ signature) and create the test object.
+    payload_checker = _GetPayloadChecker(
+        payload_gen.WriteToFileWithData,
+        privkey_file_name=_PRIVKEY_FILE_NAME,
+        do_add_pseudo_operation=True, is_pseudo_in_kernel=True,
+        padding=os.urandom(1024) if fail_excess_data else None)
+
+    if fail_invalid_block_size:
+      use_block_size = block_size + 5  # not a power of two
+    elif fail_mismatched_block_size:
+      use_block_size = block_size * 2  # different that payload stated
+    else:
+      use_block_size = block_size
+    dargs = {
+        'pubkey_file_name': _PUBKEY_FILE_NAME,
+        'assert_type': 'delta' if fail_wrong_payload_type else 'full',
+        'block_size': use_block_size}
+
+    should_fail = (fail_wrong_payload_type or fail_invalid_block_size or
+                   fail_mismatched_block_size or fail_excess_data)
+    if should_fail:
+      self.assertRaises(update_payload.PayloadError,
+                        payload_checker.Run, **dargs)
+    else:
+      self.assertIsNone(payload_checker.Run(**dargs))
+
+
+# This implements a generic API, hence the occasional unused args.
+# pylint: disable=W0613
+def ValidateCheckOperationTest(op_type_name, is_last, allow_signature,
+                               allow_unhashed, fail_src_extents,
+                               fail_dst_extents,
+                               fail_mismatched_data_offset_length,
+                               fail_missing_dst_extents, fail_src_length,
+                               fail_dst_length, fail_data_hash,
+                               fail_prev_data_offset):
+  """Returns True iff the combination of arguments represents a valid test."""
+  op_type = _OpTypeByName(op_type_name)
+
+  # REPLACE/REPLACE_BZ operations don't read data from src partition.
+  if (op_type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ) and (
+      fail_src_extents or fail_src_length)):
+    return False
+
+  # MOVE operations don't carry data.
+  if (op_type == common.OpType.MOVE and (
+      fail_mismatched_data_offset_length or fail_data_hash or
+      fail_prev_data_offset)):
+    return False
+
+  return True
+
+
+def TestMethodBody(run_method_name, run_dargs):
+  """Returns a function that invokes a named method with named arguments."""
+  return lambda self: getattr(self, run_method_name)(**run_dargs)
+
+
+def AddParametricTests(tested_method_name, arg_space, validate_func=None):
+  """Enumerates and adds specific parametric tests to PayloadCheckerTest.
+
+  This function enumerates a space of test parameters (defined by arg_space),
+  then binds a new, unique method name in PayloadCheckerTest to a test function
+  that gets handed the said parameters. This is a preferable approach to doing
+  the enumeration and invocation during the tests because this way each test is
+  treated as a complete run by the unittest framework, and so benefits from the
+  usual setUp/tearDown mechanics.
+
+  Args:
+    tested_method_name: name of the tested PayloadChecker method
+    arg_space: a dictionary containing variables (keys) and lists of values
+               (values) associated with them
+    validate_func: a function used for validating test argument combinations
+
+  """
+  for value_tuple in itertools.product(*arg_space.itervalues()):
+    run_dargs = dict(zip(arg_space.iterkeys(), value_tuple))
+    if validate_func and not validate_func(**run_dargs):
+      continue
+    run_method_name = 'Do%sTest' % tested_method_name
+    test_method_name = 'test%s' % tested_method_name
+    for arg_key, arg_val in run_dargs.iteritems():
+      if arg_val or type(arg_val) is int:
+        test_method_name += '__%s=%s' % (arg_key, arg_val)
+    setattr(PayloadCheckerTest, test_method_name,
+            TestMethodBody(run_method_name, run_dargs))
+
+
+def AddAllParametricTests():
+  """Enumerates and adds all parametric tests to PayloadCheckerTest."""
+  # Add all _CheckElem() test cases.
+  AddParametricTests('AddElem',
+                     {'linebreak': (True, False),
+                      'indent': (0, 1, 2),
+                      'convert': (str, lambda s: s[::-1]),
+                      'is_present': (True, False),
+                      'is_mandatory': (True, False),
+                      'is_submsg': (True, False)})
+
+  # Add all _Add{Mandatory,Optional}Field tests.
+  AddParametricTests('AddField',
+                     {'is_mandatory': (True, False),
+                      'linebreak': (True, False),
+                      'indent': (0, 1, 2),
+                      'convert': (str, lambda s: s[::-1]),
+                      'is_present': (True, False)})
+
+  # Add all _Add{Mandatory,Optional}SubMsg tests.
+  AddParametricTests('AddSubMsg',
+                     {'is_mandatory': (True, False),
+                      'is_present': (True, False)})
+
+  # Add all _CheckManifest() test cases.
+  AddParametricTests('CheckManifest',
+                     {'fail_mismatched_block_size': (True, False),
+                      'fail_bad_sigs': (True, False),
+                      'fail_mismatched_oki_ori': (True, False),
+                      'fail_bad_oki': (True, False),
+                      'fail_bad_ori': (True, False),
+                      'fail_bad_nki': (True, False),
+                      'fail_bad_nri': (True, False),
+                      'fail_missing_ops': (True, False)})
+
+  # Add all _CheckOperation() test cases.
+  AddParametricTests('CheckOperation',
+                     {'op_type_name': ('REPLACE', 'REPLACE_BZ', 'MOVE',
+                                       'BSDIFF'),
+                      'is_last': (True, False),
+                      'allow_signature': (True, False),
+                      'allow_unhashed': (True, False),
+                      'fail_src_extents': (True, False),
+                      'fail_dst_extents': (True, False),
+                      'fail_mismatched_data_offset_length': (True, False),
+                      'fail_missing_dst_extents': (True, False),
+                      'fail_src_length': (True, False),
+                      'fail_dst_length': (True, False),
+                      'fail_data_hash': (True, False),
+                      'fail_prev_data_offset': (True, False)},
+                     validate_func=ValidateCheckOperationTest)
+
+  # Add all _CheckOperations() test cases.
+  AddParametricTests('CheckOperations',
+                     {'fail_bad_type': (True, False),
+                      'fail_nonexhaustive_full_update': (True, False)})
+
+  # Add all _CheckOperations() test cases.
+  AddParametricTests('CheckSignatures',
+                     {'fail_empty_sigs_blob': (True, False),
+                      'fail_missing_pseudo_op': (True, False),
+                      'fail_mismatched_pseudo_op': (True, False),
+                      'fail_sig_missing_fields': (True, False),
+                      'fail_unknown_sig_version': (True, False),
+                      'fail_incorrect_sig': (True, False)})
+
+  # Add all Run() test cases.
+  AddParametricTests('Run',
+                     {'fail_wrong_payload_type': (True, False),
+                      'fail_invalid_block_size': (True, False),
+                      'fail_mismatched_block_size': (True, False),
+                      'fail_excess_data': (True, False)})
+
+
+if __name__ == '__main__':
+  AddAllParametricTests()
+  unittest.main()
diff --git a/host/lib/update_payload/common.py b/host/lib/update_payload/common.py
index 5e0087b..6b5dbad 100644
--- a/host/lib/update_payload/common.py
+++ b/host/lib/update_payload/common.py
@@ -15,6 +15,12 @@
 #
 PSEUDO_EXTENT_MARKER = ctypes.c_uint64(-1).value
 
+SIG_ASN1_HEADER = (
+    '\x30\x31\x30\x0d\x06\x09\x60\x86'
+    '\x48\x01\x65\x03\x04\x02\x01\x05'
+    '\x00\x04\x20'
+)
+
 
 #
 # Payload operation types.
@@ -27,6 +33,7 @@
   REPLACE_BZ = _CLASS.REPLACE_BZ
   MOVE = _CLASS.MOVE
   BSDIFF = _CLASS.BSDIFF
+  ALL = (REPLACE, REPLACE_BZ, MOVE, BSDIFF)
   NAMES = {
       REPLACE: 'REPLACE',
       REPLACE_BZ: 'REPLACE_BZ',
@@ -41,6 +48,39 @@
 #
 # Checker and hashed reading of data.
 #
+def IntPackingFmtStr(size, is_unsigned):
+  """Returns an integer format string for use by the struct module.
+
+  Args:
+    size: the integer size in bytes (2, 4 or 8)
+    is_unsigned: whether it is signed or not
+  Returns:
+    A format string for packing/unpacking integer values; assumes network byte
+    order (big-endian).
+  Raises:
+    PayloadError if something is wrong with the arguments.
+
+  """
+  # Determine the base conversion format.
+  if size == 2:
+    fmt = 'h'
+  elif size == 4:
+    fmt = 'i'
+  elif size == 8:
+    fmt = 'q'
+  else:
+    raise PayloadError('unsupport numeric field size (%s)' % size)
+
+  # Signed or unsigned?
+  if is_unsigned:
+    fmt = fmt.upper()
+
+  # Make it network byte order (big-endian).
+  fmt = '!' + fmt
+
+  return fmt
+
+
 def Read(file_obj, length, offset=None, hasher=None):
   """Reads binary data from a file.
 
diff --git a/host/lib/update_payload/payload-test-key.pem b/host/lib/update_payload/payload-test-key.pem
new file mode 100644
index 0000000..342e923
--- /dev/null
+++ b/host/lib/update_payload/payload-test-key.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEAvtGHtqO21Uhy2wGz9fluIpIUR8G7dZoCZhZukGkm4mlfgL71
+xPSArjx02/w/FhYxOusV6/XQeKgL3i8cni3HCkCOurZLpi2L5Ver6qrxKFh6WBVZ
+0Dj7N6P/Mf5jZdhfvVyweLlsNK8Ypeb+RazfrsXhd4cy3dBMxouGwH7R7QQXTFCo
+Cc8kgJBTxILl3jfvY8OrNKgYiCETa7tQdFkP0bfPwH9cAXuMjHXiZatim0tF+ivp
+kM2v/6LTxtD6Rq1wks/N6CHi8efrRaviFp7c0mNmBNFaV54cHEUW2SlNIiRun7L0
+1nAz/D8kuoHfx4E3Mtj0DbvngZJMX/X+rJQ5cQIDAQABAoIBADmE2X7hbJxwAUcp
+BUExFdTP6dMTf9lcOjrhqiRXvgPjtYkOhvD+rsdWq/cf2zhiKibTdEEzUMr+BM3N
+r7eyntvlR+DaUIVgF1pjigvryVPbD837aZ5NftRv194PC5FInttq1Dsf0ZEz8p8X
+uS/xg1+ggG1SUK/yOSJkLpNZ5xelbclQJ9bnJST8PR8XbEieA83xt5M2DcooPzq0
+/99m/daA5hmSWs6n8sFrIZDQxDhLyyW4J72jjoNTE87eCpwK855yXMelpEPDZNQi
+nB3x5Y/bGbl81PInqL2q14lekrVYdYZ7bOBVlsmyvz6f1e4OOE1aaAM+w6ArA4az
+6elZQE0CgYEA4GOU6BBu9jLqFdqV9jIkWsgz5ZWINz8PLJPtZzk5I9KO1m+GAUy2
+h/1IGGR6qRQR49hMtq4C0lUifxquq0xivzJ87U9oxKC9yEeTxkmDe5csVHsnAtqT
+xRgVM7Ysrut5NLU1zm0q3jBmkDu7d99LvscM/3n7eJ6RiYpnA54O6I8CgYEA2bNA
+34PTvxBS2deRoxKQNlVU14FtirE+q0+k0wcE85wr7wIMpR13al8T1TpE8J1yvvZM
+92HMGFGfYNDB46b8VfJ5AxEUFwdruec6sTVVfkMZMOqM/A08yiaLzQ1exDxNwaja
+fLuG5FAVRD/2g7fLBcsmosyNgcgNr1XA8Q/nvf8CgYEAwaSOg7py19rWcqehlMZu
+4z00tCNYWzz7LmA2l0clzYlPJTU3MvXt6+ujhRFpXXJpgfRPN7Nx0ewQihoPtNqF
+uTSr5OwLoOyK+0Tx/UPByS2L3xgscWUJ8yQ2X9sOMqIZhmf/mDZTsU2ZpU03GlrE
+dk43JF4zq0NEm6qp/dAwU3cCgYEAvECl+KKmmLIk8vvWlI2Y52Mi2rixYR2kc7+L
+aHDJd1+1HhlHlgDFItbU765Trz5322phZArN0rnCeJYNFC9yRWBIBL7gAIoKPdgW
+iOb15xlez04EXHGV/7kVa1wEdu0u0CiTxwjivMwDl+E36u8kQP5LirwYIgI800H0
+doCqhUECgYEAjvA38OS7hy56Q4LQtmHFBuRIn4E5SrIGMwNIH6TGbEKQix3ajTCQ
+0fSoLDGTkU6dH+T4v0WheveN2a2Kofqm0UQx5V2rfnY/Ut1fAAWgL/lsHLDnzPUZ
+bvTOANl8TbT49xAfNXTaGWe7F7nYz+bK0UDif1tJNDLQw7USD5I8lbQ=
+-----END RSA PRIVATE KEY-----
diff --git a/host/lib/update_payload/payload-test-key.pub b/host/lib/update_payload/payload-test-key.pub
new file mode 100644
index 0000000..fdae963
--- /dev/null
+++ b/host/lib/update_payload/payload-test-key.pub
@@ -0,0 +1,9 @@
+-----BEGIN PUBLIC KEY-----
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvtGHtqO21Uhy2wGz9flu
+IpIUR8G7dZoCZhZukGkm4mlfgL71xPSArjx02/w/FhYxOusV6/XQeKgL3i8cni3H
+CkCOurZLpi2L5Ver6qrxKFh6WBVZ0Dj7N6P/Mf5jZdhfvVyweLlsNK8Ypeb+Razf
+rsXhd4cy3dBMxouGwH7R7QQXTFCoCc8kgJBTxILl3jfvY8OrNKgYiCETa7tQdFkP
+0bfPwH9cAXuMjHXiZatim0tF+ivpkM2v/6LTxtD6Rq1wks/N6CHi8efrRaviFp7c
+0mNmBNFaV54cHEUW2SlNIiRun7L01nAz/D8kuoHfx4E3Mtj0DbvngZJMX/X+rJQ5
+cQIDAQAB
+-----END PUBLIC KEY-----
diff --git a/host/lib/update_payload/payload.py b/host/lib/update_payload/payload.py
index 6dda644..dbb385a 100644
--- a/host/lib/update_payload/payload.py
+++ b/host/lib/update_payload/payload.py
@@ -19,7 +19,7 @@
 # Helper functions.
 #
 def _ReadInt(file_obj, size, is_unsigned, hasher=None):
-  """Read a binary-encoded integer from a file.
+  """Reads a binary-encoded integer from a file.
 
   It will do the correct conversion based on the reported size and whether or
   not a signed number is expected. Assumes a network (big-endian) byte
@@ -36,24 +36,8 @@
     PayloadError if an read error occurred.
 
   """
-  # Determine the base conversion format.
-  if size == 2:
-    fmt = 'h'
-  elif size == 4:
-    fmt = 'i'
-  elif size == 8:
-    fmt = 'q'
-  else:
-    raise PayloadError('unsupport numeric field size (%s)' % size)
-
-  # Signed or unsigned?
-  if is_unsigned:
-    fmt = fmt.upper()
-
-  # Our numeric values are in network byte order (big-endian).
-  fmt = '!' + fmt
-
-  return struct.unpack(fmt, common.Read(file_obj, size, hasher=hasher))[0]
+  return struct.unpack(common.IntPackingFmtStr(size, is_unsigned),
+                       common.Read(file_obj, size, hasher=hasher))[0]
 
 
 #
diff --git a/host/lib/update_payload/test_utils.py b/host/lib/update_payload/test_utils.py
new file mode 100644
index 0000000..d05aafd
--- /dev/null
+++ b/host/lib/update_payload/test_utils.py
@@ -0,0 +1,340 @@
+# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for unit testing."""
+
+import cStringIO
+import hashlib
+import struct
+import subprocess
+
+import common
+import payload
+import update_metadata_pb2
+
+
+class TestError(Exception):
+  """An error during testing of update payload code."""
+
+
+def _WriteInt(file_obj, size, is_unsigned, val):
+  """Writes a binary-encoded integer to a file.
+
+  It will do the correct conversion based on the reported size and whether or
+  not a signed number is expected. Assumes a network (big-endian) byte
+  ordering.
+
+  Args:
+    file_obj: a file object
+    size: the integer size in bytes (2, 4 or 8)
+    is_unsigned: whether it is signed or not
+    val: integer value to encode
+  Raises:
+    PayloadError if a write error occurred.
+
+  """
+  try:
+    file_obj.write(struct.pack(common.IntPackingFmtStr(size, is_unsigned), val))
+  except IOError, e:
+    raise payload.PayloadError('error writing to file (%s): %s' %
+                               (file_obj.name, e))
+
+
+def _SetMsgField(msg, field_name, val):
+  """Sets or clears a field in a protobuf message."""
+  if val is None:
+    msg.ClearField(field_name)
+  else:
+    setattr(msg, field_name, val)
+
+
+def SignSha256(data, privkey_file_name):
+  """Signs the data's SHA256 hash with an RSA private key.
+
+  Args:
+    data: the data whose SHA256 hash we want to sign
+    privkey_file_name: private key used for signing data
+  Returns:
+    The signature string, prepended with an ASN1 header.
+  Raises:
+    TestError if something goes wrong.
+
+  """
+  # pylint: disable=E1101
+  data_sha256_hash = common.SIG_ASN1_HEADER + hashlib.sha256(data).digest()
+  sign_cmd = ['openssl', 'rsautl', '-sign', '-inkey', privkey_file_name]
+  try:
+    sign_process = subprocess.Popen(sign_cmd, stdin=subprocess.PIPE,
+                                    stdout=subprocess.PIPE)
+    sig, _ = sign_process.communicate(input=data_sha256_hash)
+  except Exception as e:
+    raise TestError('signing subprocess failed: %s' % e)
+
+  return sig
+
+
+class SignaturesGenerator(object):
+  """Generates a payload signatures data block."""
+
+  def __init__(self):
+    self.sigs = update_metadata_pb2.Signatures()
+
+  def AddSig(self, version, data):
+    """Adds a signature to the signature sequence.
+
+    Args:
+      version: signature version (None means do not assign)
+      data: signature binary data (None means do not assign)
+
+    """
+    # Pylint fails to identify a member of the Signatures message.
+    # pylint: disable=E1101
+    sig = self.sigs.signatures.add()
+    if version is not None:
+      sig.version = version
+    if data is not None:
+      sig.data = data
+
+  def ToBinary(self):
+    """Returns the binary representation of the signature block."""
+    return self.sigs.SerializeToString()
+
+
+class PayloadGenerator(object):
+  """Generates an update payload allowing low-level control.
+
+  Attributes:
+    manifest: the protobuf containing the payload manifest
+    version: the payload version identifier
+    block_size: the block size pertaining to update operations
+
+  """
+
+  def __init__(self, version=1):
+    self.manifest = update_metadata_pb2.DeltaArchiveManifest()
+    self.version = version
+    self.block_size = 0
+
+  @staticmethod
+  def _WriteExtent(ex, val):
+    """Returns an Extent message."""
+    start_block, num_blocks = val
+    _SetMsgField(ex, 'start_block', start_block)
+    _SetMsgField(ex, 'num_blocks', num_blocks)
+
+  @staticmethod
+  def _AddValuesToRepeatedField(repeated_field, values, write_func):
+    """Adds values to a repeated message field."""
+    if values:
+      for val in values:
+        new_item = repeated_field.add()
+        write_func(new_item, val)
+
+  @staticmethod
+  def _AddExtents(extents_field, values):
+    """Adds extents to an extents field."""
+    PayloadGenerator._AddValuesToRepeatedField(
+        extents_field, values, PayloadGenerator._WriteExtent)
+
+  def SetBlockSize(self, block_size):
+    """Sets the payload's block size."""
+    self.block_size = block_size
+    _SetMsgField(self.manifest, 'block_size', block_size)
+
+  def SetPartInfo(self, is_kernel, is_new, part_size, part_hash):
+    """Set the partition info entry.
+
+    Args:
+      is_kernel: whether this is kernel partition info
+      is_new: whether to set old (False) or new (True) info
+      part_size: the partition size (in fact, filesystem size)
+      part_hash: the partition hash
+
+    """
+    if is_kernel:
+      # pylint: disable=E1101
+      part_info = (self.manifest.new_kernel_info if is_new
+                   else self.manifest.old_kernel_info)
+    else:
+      # pylint: disable=E1101
+      part_info = (self.manifest.new_rootfs_info if is_new
+                   else self.manifest.old_rootfs_info)
+    _SetMsgField(part_info, 'size', part_size)
+    _SetMsgField(part_info, 'hash', part_hash)
+
+  def AddOperation(self, is_kernel, op_type, data_offset=None,
+                   data_length=None, src_extents=None, src_length=None,
+                   dst_extents=None, dst_length=None, data_sha256_hash=None):
+    """Adds an InstallOperation entry."""
+    # pylint: disable=E1101
+    operations = (self.manifest.kernel_install_operations if is_kernel
+                  else self.manifest.install_operations)
+
+    op = operations.add()
+    op.type = op_type
+
+    _SetMsgField(op, 'data_offset', data_offset)
+    _SetMsgField(op, 'data_length', data_length)
+
+    self._AddExtents(op.src_extents, src_extents)
+    _SetMsgField(op, 'src_length', src_length)
+
+    self._AddExtents(op.dst_extents, dst_extents)
+    _SetMsgField(op, 'dst_length', dst_length)
+
+    _SetMsgField(op, 'data_sha256_hash', data_sha256_hash)
+
+  def SetSignatures(self, sigs_offset, sigs_size):
+    """Set the payload's signature block descriptors."""
+    _SetMsgField(self.manifest, 'signatures_offset', sigs_offset)
+    _SetMsgField(self.manifest, 'signatures_size', sigs_size)
+
+  def _WriteHeaderToFile(self, file_obj, manifest_len):
+    """Writes a payload heaer to a file."""
+    # We need to access protected members in Payload for writing the header.
+    # pylint: disable=W0212
+    file_obj.write(payload.Payload._MAGIC)
+    _WriteInt(file_obj, payload.Payload._VERSION_SIZE, True, self.version)
+    _WriteInt(file_obj, payload.Payload._MANIFEST_LEN_SIZE, True, manifest_len)
+
+  def WriteToFile(self, file_obj, manifest_len=-1, data_blobs=None,
+                  sigs_data=None, padding=None):
+    """Writes the payload content to a file.
+
+    Args:
+      file_obj: a file object open for writing
+      manifest_len: manifest len to dump (otherwise computed automatically)
+      data_blobs: a list of data blobs to be concatenated to the payload
+      sigs_data: a binary Signatures message to be concatenated to the payload
+      padding: stuff to dump past the normal data blobs provided (optional)
+
+    """
+    manifest = self.manifest.SerializeToString()
+    if manifest_len < 0:
+      manifest_len = len(manifest)
+    self._WriteHeaderToFile(file_obj, manifest_len)
+    file_obj.write(manifest)
+    if data_blobs:
+      for data_blob in data_blobs:
+        file_obj.write(data_blob)
+    if sigs_data:
+      file_obj.write(sigs_data)
+    if padding:
+      file_obj.write(padding)
+
+
+class EnhancedPayloadGenerator(PayloadGenerator):
+  """Payload generator with automatic handling of data blobs.
+
+  Attributes:
+    data_blobs: a list of blobs, in the order they were added
+    curr_offset: the currently consumed offset of blobs added to the payload
+
+  """
+
+  def __init__(self):
+    super(EnhancedPayloadGenerator, self).__init__()
+    self.data_blobs = []
+    self.curr_offset = 0
+
+  def AddData(self, data_blob):
+    """Adds a (possibly orphan) data blob."""
+    data_length = len(data_blob)
+    data_offset = self.curr_offset
+    self.curr_offset += data_length
+    self.data_blobs.append(data_blob)
+    return data_length, data_offset
+
+  def AddOperationWithData(self, is_kernel, op_type, src_extents=None,
+                           src_length=None, dst_extents=None, dst_length=None,
+                           data_blob=None, do_hash_data_blob=True):
+    """Adds an install operation and associated data blob.
+
+    This takes care of obtaining a hash of the data blob (if so instructed)
+    and appending it to the internally maintained list of blobs, including the
+    necessary offset/length accounting.
+
+    Args:
+      is_kernel: whether this is a kernel (True) or rootfs (False) operation
+      op_type: one of REPLACE, REPLACE_BZ, MOVE or BSDIFF
+      src_extents: list of (start, length) pairs indicating src block ranges
+      src_length: size of the src data in bytes (needed for BSDIFF)
+      dst_extents: list of (start, length) pairs indicating dst block ranges
+      dst_length: size of the dst data in bytes (needed for BSDIFF)
+      data_blob: a data blob associated with this operation
+      do_hash_data_blob: whether or not to compute and add a data blob hash
+
+    """
+    data_offset = data_length = data_sha256_hash = None
+    if data_blob is not None:
+      if do_hash_data_blob:
+        # pylint: disable=E1101
+        data_sha256_hash = hashlib.sha256(data_blob).digest()
+      data_length, data_offset = self.AddData(data_blob)
+
+    self.AddOperation(is_kernel, op_type, data_offset=data_offset,
+                      data_length=data_length, src_extents=src_extents,
+                      src_length=src_length, dst_extents=dst_extents,
+                      dst_length=dst_length, data_sha256_hash=data_sha256_hash)
+
+  def WriteToFileWithData(self, file_obj, sigs_data=None,
+                          privkey_file_name=None,
+                          do_add_pseudo_operation=False,
+                          is_pseudo_in_kernel=False, padding=None):
+    """Writes the payload content to a file, optionally signing the content.
+
+    Args:
+      file_obj: a file object open for writing
+      sigs_data: signatures blob to be appended to the payload (optional;
+                 payload signature fields assumed to be preset by the caller)
+      privkey_file_name: key used for signing the payload (optional; used only
+                         if explicit signatures blob not provided)
+      do_add_pseudo_operation: whether a pseudo-operation should be added to
+                               account for the signature blob
+      is_pseudo_in_kernel: whether the pseudo-operation should be added to
+                           kernel (True) or rootfs (False) operations
+      padding: stuff to dump past the normal data blobs provided (optional)
+    Raises:
+      TestError: if arguments are inconsistent or something goes wrong.
+
+    """
+    sigs_len = len(sigs_data) if sigs_data else 0
+
+    # Do we need to generate a genuine signatures blob?
+    do_generate_sigs_data = sigs_data is None and privkey_file_name
+
+    if do_generate_sigs_data:
+      # First, sign some arbitrary data to obtain the size of a signature blob.
+      fake_sig = SignSha256('fake-payload-data', privkey_file_name)
+      fake_sigs_gen = SignaturesGenerator()
+      fake_sigs_gen.AddSig(1, fake_sig)
+      sigs_len = len(fake_sigs_gen.ToBinary())
+
+      # Update the payload with proper signature attributes.
+      self.SetSignatures(self.curr_offset, sigs_len)
+
+    # Add a pseudo-operation to account for the signature blob, if requested.
+    if do_add_pseudo_operation:
+      if not self.block_size:
+        raise TestError('cannot add pseudo-operation without knowing the '
+                        'payload block size')
+      self.AddOperation(
+          is_pseudo_in_kernel, common.OpType.REPLACE,
+          data_offset=self.curr_offset, data_length=sigs_len,
+          dst_extents=[(common.PSEUDO_EXTENT_MARKER,
+                        (sigs_len + self.block_size - 1) / self.block_size)])
+
+    if do_generate_sigs_data:
+      # Once all payload fields are updated, dump and sign it.
+      temp_payload_file = cStringIO.StringIO()
+      self.WriteToFile(temp_payload_file, data_blobs=self.data_blobs)
+      sig = SignSha256(temp_payload_file.getvalue(), privkey_file_name)
+      sigs_gen = SignaturesGenerator()
+      sigs_gen.AddSig(1, sig)
+      sigs_data = sigs_gen.ToBinary()
+      assert len(sigs_data) == sigs_len, 'signature blob lengths mismatch'
+
+    # Dump the whole thing, complete with data and signature blob, to a file.
+    self.WriteToFile(file_obj, data_blobs=self.data_blobs, sigs_data=sigs_data,
+                     padding=padding)