paycheck: fixes to checker module (readability review)

BUG=None
TEST=Passes unit tests
TEST=Passes integration tests

Change-Id: Ifd502af9d2755b2c23805cd03857ebbf0e633732
Reviewed-on: https://chromium-review.googlesource.com/59752
Reviewed-by: Gilad Arnold <garnold@chromium.org>
Tested-by: Gilad Arnold <garnold@chromium.org>
Reviewed-by: Chris Sosa <sosa@chromium.org>
Commit-Queue: Gilad Arnold <garnold@chromium.org>
diff --git a/host/lib/update_payload/checker.py b/host/lib/update_payload/checker.py
index 2a1dd31..549ffcb 100644
--- a/host/lib/update_payload/checker.py
+++ b/host/lib/update_payload/checker.py
@@ -10,17 +10,17 @@
 
   checker = PayloadChecker(payload)
   checker.Run(...)
-
 """
 
 import array
 import base64
 import hashlib
+import itertools
 import os
 import subprocess
 
 import common
-from error import PayloadError
+import error
 import format_utils
 import histogram
 import update_metadata_pb2
@@ -29,6 +29,7 @@
 #
 # Constants.
 #
+
 _CHECK_DST_PSEUDO_EXTENTS = 'dst-pseudo-extents'
 _CHECK_MOVE_SAME_SRC_DST_BLOCK = 'move-same-src-dst-block'
 _CHECK_PAYLOAD_SIG = 'payload-sig'
@@ -51,6 +52,7 @@
 #
 # Helper functions.
 #
+
 def _IsPowerOfTwo(val):
   """Returns True iff val is a power of two."""
   return val > 0 and (val & (val - 1)) == 0
@@ -60,11 +62,10 @@
   """Adds a custom formatted representation to ordinary string representation.
 
   Args:
-    format_func: a value formatter
-    value: value to be formatted and returned
+    format_func: A value formatter.
+    value: Value to be formatted and returned.
   Returns:
     A string 'x (y)' where x = str(value) and y = format_func(value).
-
   """
   return '%s (%s)' % (value, format_func(value))
 
@@ -77,16 +78,16 @@
 #
 # Payload report generator.
 #
+
 class _PayloadReport(object):
   """A payload report generator.
 
   A report is essentially a sequence of nodes, which represent data points. It
   is initialized to have a "global", untitled section. A node may be a
   sub-report itself.
-
   """
 
-  # Report nodes: field, sub-report, section.
+  # Report nodes: Field, sub-report, section.
   class Node(object):
     """A report node interface."""
 
@@ -95,11 +96,10 @@
       """Indents a line by a given indentation amount.
 
       Args:
-        indent: the indentation amount
-        line: the line content (string)
+        indent: The indentation amount.
+        line: The line content (string).
       Returns:
         The properly indented line (string).
-
       """
       return '%*s%s' % (indent, '', line)
 
@@ -107,15 +107,14 @@
       """Generates the report lines for this node.
 
       Args:
-        base_indent: base indentation for each line
-        sub_indent: additional indentation for sub-nodes
-        curr_section: the current report section object
+        base_indent: Base indentation for each line.
+        sub_indent: Additional indentation for sub-nodes.
+        curr_section: The current report section object.
       Returns:
         A pair consisting of a list of properly indented report lines and a new
         current section object.
-
       """
-      raise NotImplementedError()
+      raise NotImplementedError
 
   class FieldNode(Node):
     """A field report node, representing a (name, value) pair."""
@@ -190,11 +189,10 @@
     """Generates the lines in the report, properly indented.
 
     Args:
-      base_indent: the indentation used for root-level report lines
-      sub_indent: the indentation offset used for sub-reports
+      base_indent: The indentation used for root-level report lines.
+      sub_indent: The indentation offset used for sub-reports.
     Returns:
       A list of indented report lines.
-
     """
     report_lines = []
     curr_section = self.global_section
@@ -209,12 +207,10 @@
     """Dumps the report to a file.
 
     Args:
-      out_file: file object to output the content to
-      base_indent: base indentation for report lines
-      sub_indent: added indentation for sub-reports
-
+      out_file: File object to output the content to.
+      base_indent: Base indentation for report lines.
+      sub_indent: Added indentation for sub-reports.
     """
-
     report_lines = self.GenerateLines(base_indent, sub_indent)
     if report_lines and not self.is_finalized:
       report_lines.append('(incomplete report)\n')
@@ -226,11 +222,10 @@
     """Adds a field/value pair to the payload report.
 
     Args:
-      name: the field's name
-      value: the field's value
-      linebreak: whether the value should be printed on a new line
-      indent: amount of extra indent for each line of the value
-
+      name: The field's name.
+      value: The field's value.
+      linebreak: Whether the value should be printed on a new line.
+      indent: Amount of extra indent for each line of the value.
     """
     assert not self.is_finalized
     if name and self.last_section.max_field_name_len < len(name):
@@ -258,12 +253,12 @@
 #
 # Payload verification.
 #
+
 class PayloadChecker(object):
   """Checking the integrity of an update payload.
 
   This is a short-lived object whose purpose is to isolate the logic used for
   verifying the integrity of an update payload.
-
   """
 
   def __init__(self, payload, assert_type=None, block_size=0,
@@ -271,23 +266,24 @@
     """Initialize the checker.
 
     Args:
-      payload: the payload object to check
-      assert_type: assert that payload is either 'full' or 'delta' (optional)
-      block_size: expected filesystem / payload block size (optional)
-      allow_unhashed: allow operations with unhashed data blobs
-      disabled_tests: list of tests to disable
-
+      payload: The payload object to check.
+      assert_type: Assert that payload is either 'full' or 'delta' (optional).
+      block_size: Expected filesystem / payload block size (optional).
+      allow_unhashed: Allow operations with unhashed data blobs.
+      disabled_tests: Sequence of tests to disable.
     """
-    assert payload.is_init, 'uninitialized update payload'
+    if not payload.is_init:
+      raise ValueError('Uninitialized update payload.')
 
     # Set checker configuration.
     self.payload = payload
     self.block_size = block_size if block_size else _DEFAULT_BLOCK_SIZE
     if not _IsPowerOfTwo(self.block_size):
-      raise PayloadError('expected block (%d) size is not a power of two' %
-                         self.block_size)
+      raise error.PayloadError(
+          'Expected block (%d) size is not a power of two.' % self.block_size)
     if assert_type not in (None, _TYPE_FULL, _TYPE_DELTA):
-      raise PayloadError("invalid assert_type value (`%s')" % assert_type)
+      raise error.PayloadError('Invalid assert_type value (%r).' %
+                               assert_type)
     self.payload_type = assert_type
     self.allow_unhashed = allow_unhashed
 
@@ -316,39 +312,38 @@
     causes an exception to be raised.
 
     Args:
-      msg: the message containing the element
-      name: the name of the element
-      report: a report object to add the element name/value to
-      is_mandatory: whether or not this element must be present
-      is_submsg: whether this element is itself a message
-      convert: a function for converting the element value for reporting
-      msg_name: the name of the message object (for error reporting)
-      linebreak: whether the value report should induce a line break
-      indent: amount of indent used for reporting the value
+      msg: The message containing the element.
+      name: The name of the element.
+      report: A report object to add the element name/value to.
+      is_mandatory: Whether or not this element must be present.
+      is_submsg: Whether this element is itself a message.
+      convert: A function for converting the element value for reporting.
+      msg_name: The name of the message object (for error reporting).
+      linebreak: Whether the value report should induce a line break.
+      indent: Amount of indent used for reporting the value.
     Returns:
       A pair consisting of the element value and the generated sub-report for
       it (if the element is a sub-message, None otherwise). If the element is
       missing, returns (None, None).
     Raises:
-      PayloadError if a mandatory element is missing.
-
+      error.PayloadError if a mandatory element is missing.
     """
     if not msg.HasField(name):
       if is_mandatory:
-        raise PayloadError("%smissing mandatory %s '%s'" %
-                           (msg_name + ' ' if msg_name else '',
-                            'sub-message' if is_submsg else 'field',
-                            name))
-      return (None, None)
+        raise error.PayloadError('%smissing mandatory %s %r.' %
+                                 (msg_name + ' ' if msg_name else '',
+                                  'sub-message' if is_submsg else 'field',
+                                  name))
+      return None, None
 
     value = getattr(msg, name)
     if is_submsg:
-      return (value, report and report.AddSubReport(name))
+      return value, report and report.AddSubReport(name)
     else:
       if report:
         report.AddField(name, convert(value), linebreak=linebreak,
                         indent=indent)
-      return (value, None)
+      return value, None
 
   @staticmethod
   def _CheckMandatoryField(msg, field_name, report, msg_name, convert=str,
@@ -382,68 +377,77 @@
     """Checks that val1 is None iff val2 is None.
 
     Args:
-      val1: first value to be compared
-      val2: second value to be compared
-      name1: name of object holding the first value
-      name2: name of object holding the second value
-      obj_name: name of the object containing these values
+      val1: first value to be compared.
+      val2: second value to be compared.
+      name1: name of object holding the first value.
+      name2: name of object holding the second value.
+      obj_name: Name of the object containing these values.
     Raises:
-      PayloadError if assertion does not hold.
-
+      error.PayloadError if assertion does not hold.
     """
     if None in (val1, val2) and val1 is not val2:
       present, missing = (name1, name2) if val2 is None else (name2, name1)
-      raise PayloadError("'%s' present without '%s'%s" %
-                         (present, missing,
-                          ' in ' + obj_name if obj_name else ''))
+      raise error.PayloadError('%r present without %r%s.' %
+                               (present, missing,
+                                ' in ' + obj_name if obj_name else ''))
 
   @staticmethod
   def _Run(cmd, send_data=None):
     """Runs a subprocess, returns its output.
 
     Args:
-      cmd: list of command-line argument for invoking the subprocess
-      send_data: data to feed to the process via its stdin
+      cmd: Sequence of command-line argument for invoking the subprocess.
+      send_data: Data to feed to the process via its stdin.
     Returns:
       A tuple containing the stdout and stderr output of the process.
-
     """
     run_process = subprocess.Popen(cmd, stdin=subprocess.PIPE,
                                    stdout=subprocess.PIPE)
-    return run_process.communicate(input=send_data)
+    try:
+      result = run_process.communicate(input=send_data)
+    finally:
+      exit_code = run_process.wait()
+
+    if exit_code:
+      raise RuntimeError('Subprocess %r failed with code %r.' %
+                         (cmd, exit_code))
+
+    return result
 
   @staticmethod
   def _CheckSha256Signature(sig_data, pubkey_file_name, actual_hash, sig_name):
     """Verifies an actual hash against a signed one.
 
     Args:
-      sig_data: the raw signature data
-      pubkey_file_name: public key used for verifying signature
-      actual_hash: the actual hash digest
-      sig_name: signature name for error reporting
+      sig_data: The raw signature data.
+      pubkey_file_name: Public key used for verifying signature.
+      actual_hash: The actual hash digest.
+      sig_name: Signature name for error reporting.
     Raises:
-      PayloadError if signature could not be verified.
-
+      error.PayloadError if signature could not be verified.
     """
     if len(sig_data) != 256:
-      raise PayloadError('%s: signature size (%d) not as expected (256)' %
-                         (sig_name, len(sig_data)))
+      raise error.PayloadError(
+          '%s: signature size (%d) not as expected (256).' %
+          (sig_name, len(sig_data)))
     signed_data, _ = PayloadChecker._Run(
         ['openssl', 'rsautl', '-verify', '-pubin', '-inkey', pubkey_file_name],
         send_data=sig_data)
 
     if len(signed_data) != len(common.SIG_ASN1_HEADER) + 32:
-      raise PayloadError('%s: unexpected signed data length (%d)' %
-                         (sig_name, len(signed_data)))
+      raise error.PayloadError('%s: unexpected signed data length (%d).' %
+                               (sig_name, len(signed_data)))
 
     if not signed_data.startswith(common.SIG_ASN1_HEADER):
-      raise PayloadError('%s: not containing standard ASN.1 prefix' % sig_name)
+      raise error.PayloadError('%s: not containing standard ASN.1 prefix.' %
+                               sig_name)
 
     signed_hash = signed_data[len(common.SIG_ASN1_HEADER):]
     if signed_hash != actual_hash:
-      raise PayloadError('%s: signed hash (%s) different from actual (%s)' %
-                         (sig_name, common.FormatSha256(signed_hash),
-                          common.FormatSha256(actual_hash)))
+      raise error.PayloadError(
+          '%s: signed hash (%s) different from actual (%s).' %
+          (sig_name, common.FormatSha256(signed_hash),
+           common.FormatSha256(actual_hash)))
 
   @staticmethod
   def _CheckBlocksFitLength(length, num_blocks, block_size, length_name,
@@ -454,40 +458,38 @@
     length of the data residing in these blocks.
 
     Args:
-      length: the actual length of the data
-      num_blocks: the number of blocks allocated for it
-      block_size: the size of each block in bytes
-      length_name: name of length (used for error reporting)
-      block_name: name of block (used for error reporting)
+      length: The actual length of the data.
+      num_blocks: The number of blocks allocated for it.
+      block_size: The size of each block in bytes.
+      length_name: Name of length (used for error reporting).
+      block_name: Name of block (used for error reporting).
     Raises:
-      PayloadError if the aforementioned invariant is not satisfied.
-
+      error.PayloadError if the aforementioned invariant is not satisfied.
     """
     # Check: length <= num_blocks * block_size.
     if length > num_blocks * block_size:
-      raise PayloadError(
-          '%s (%d) > num %sblocks (%d) * block_size (%d)' %
+      raise error.PayloadError(
+          '%s (%d) > num %sblocks (%d) * block_size (%d).' %
           (length_name, length, block_name or '', num_blocks, block_size))
 
     # Check: length > (num_blocks - 1) * block_size.
     if length <= (num_blocks - 1) * block_size:
-      raise PayloadError(
-          '%s (%d) <= (num %sblocks - 1 (%d)) * block_size (%d)' %
+      raise error.PayloadError(
+          '%s (%d) <= (num %sblocks - 1 (%d)) * block_size (%d).' %
           (length_name, length, block_name or '', num_blocks - 1, block_size))
 
   def _CheckManifest(self, report, rootfs_part_size=0, kernel_part_size=0):
     """Checks the payload manifest.
 
     Args:
-      report: a report object to add to
-      rootfs_part_size: size of the rootfs partition in bytes
-      kernel_part_size: size of the kernel partition in bytes
+      report: A report object to add to.
+      rootfs_part_size: Size of the rootfs partition in bytes.
+      kernel_part_size: Size of the kernel partition in bytes.
     Returns:
       A tuple consisting of the partition block size used during the update
       (integer), the signatures block offset and size.
     Raises:
-      PayloadError if any of the checks fail.
-
+      error.PayloadError if any of the checks fail.
     """
     manifest = self.payload.manifest
     report.AddSection('manifest')
@@ -496,8 +498,8 @@
     actual_block_size = self._CheckMandatoryField(manifest, 'block_size',
                                                   report, 'manifest')
     if actual_block_size != self.block_size:
-      raise PayloadError('block_size (%d) not as expected (%d)' %
-                         (actual_block_size, self.block_size))
+      raise error.PayloadError('Block_size (%d) not as expected (%d).' %
+                               (actual_block_size, self.block_size))
 
     # Check: signatures_offset <==> signatures_size.
     self.sigs_offset = self._CheckOptionalField(manifest, 'signatures_offset',
@@ -517,8 +519,8 @@
     if oki_msg:  # equivalently, ori_msg
       # Assert/mark delta payload.
       if self.payload_type == _TYPE_FULL:
-        raise PayloadError(
-            'apparent full payload contains old_{kernel,rootfs}_info')
+        raise error.PayloadError(
+            'Apparent full payload contains old_{kernel,rootfs}_info.')
       self.payload_type = _TYPE_DELTA
 
       # Check: {size, hash} present in old_{kernel,rootfs}_info.
@@ -533,18 +535,18 @@
 
       # Check: old_{kernel,rootfs} size must fit in respective partition.
       if kernel_part_size and self.old_kernel_fs_size > kernel_part_size:
-        raise PayloadError(
-            'old kernel content (%d) exceed partition size (%d)' %
+        raise error.PayloadError(
+            'Old kernel content (%d) exceed partition size (%d).' %
             (self.old_kernel_fs_size, kernel_part_size))
       if rootfs_part_size and self.old_rootfs_fs_size > rootfs_part_size:
-        raise PayloadError(
-            'old rootfs content (%d) exceed partition size (%d)' %
+        raise error.PayloadError(
+            'Old rootfs content (%d) exceed partition size (%d).' %
             (self.old_rootfs_fs_size, rootfs_part_size))
     else:
       # Assert/mark full payload.
       if self.payload_type == _TYPE_DELTA:
-        raise PayloadError(
-            'apparent delta payload missing old_{kernel,rootfs}_info')
+        raise error.PayloadError(
+            'Apparent delta payload missing old_{kernel,rootfs}_info.')
       self.payload_type = _TYPE_FULL
 
     # Check: new_kernel_info present; contains {size, hash}.
@@ -565,34 +567,33 @@
 
     # Check: new_{kernel,rootfs} size must fit in respective partition.
     if kernel_part_size and self.new_kernel_fs_size > kernel_part_size:
-      raise PayloadError(
-          'new kernel content (%d) exceed partition size (%d)' %
+      raise error.PayloadError(
+          'New kernel content (%d) exceed partition size (%d).' %
           (self.new_kernel_fs_size, kernel_part_size))
     if rootfs_part_size and self.new_rootfs_fs_size > rootfs_part_size:
-      raise PayloadError(
-          'new rootfs content (%d) exceed partition size (%d)' %
+      raise error.PayloadError(
+          'New rootfs content (%d) exceed partition size (%d).' %
           (self.new_rootfs_fs_size, rootfs_part_size))
 
-    # Check: payload must contain at least one operation.
+    # Check: Payload must contain at least one operation.
     if not(len(manifest.install_operations) or
            len(manifest.kernel_install_operations)):
-      raise PayloadError('payload contains no operations')
+      raise error.PayloadError('Payload contains no operations.')
 
   def _CheckLength(self, length, total_blocks, op_name, length_name):
     """Checks whether a length matches the space designated in extents.
 
     Args:
-      length: the total length of the data
-      total_blocks: the total number of blocks in extents
-      op_name: operation name (for error reporting)
-      length_name: length name (for error reporting)
+      length: The total length of the data.
+      total_blocks: The total number of blocks in extents.
+      op_name: Operation name (for error reporting).
+      length_name: Length name (for error reporting).
     Raises:
-      PayloadError is there a problem with the length.
-
+      error.PayloadError is there a problem with the length.
     """
     # Check: length is non-zero.
     if length == 0:
-      raise PayloadError('%s: %s is zero' % (op_name, length_name))
+      raise error.PayloadError('%s: %s is zero.' % (op_name, length_name))
 
     # Check that length matches number of blocks.
     self._CheckBlocksFitLength(length, total_blocks, self.block_size,
@@ -603,21 +604,20 @@
     """Checks a sequence of extents.
 
     Args:
-      extents: the sequence of extents to check
-      usable_size: the usable size of the partition to which the extents apply
-      block_counters: an array of counters corresponding to the number of blocks
-      name: the name of the extent block
-      allow_pseudo: whether or not pseudo block numbers are allowed
-      allow_signature: whether or not the extents are used for a signature
+      extents: The sequence of extents to check.
+      usable_size: The usable size of the partition to which the extents apply.
+      block_counters: Array of counters corresponding to the number of blocks.
+      name: The name of the extent block.
+      allow_pseudo: Whether or not pseudo block numbers are allowed.
+      allow_signature: Whether or not the extents are used for a signature.
     Returns:
       The total number of blocks in the extents.
     Raises:
-      PayloadError if any of the entailed checks fails.
-
+      error.PayloadError if any of the entailed checks fails.
     """
     total_num_blocks = 0
     for ex, ex_name in common.ExtentIter(extents, name):
-      # Check: mandatory fields.
+      # Check: Mandatory fields.
       start_block = PayloadChecker._CheckMandatoryField(ex, 'start_block',
                                                         None, ex_name)
       num_blocks = PayloadChecker._CheckMandatoryField(ex, 'num_blocks', None,
@@ -626,22 +626,22 @@
 
       # Check: num_blocks > 0.
       if num_blocks == 0:
-        raise PayloadError('%s: extent length is zero' % ex_name)
+        raise error.PayloadError('%s: extent length is zero.' % ex_name)
 
       if start_block != common.PSEUDO_EXTENT_MARKER:
-        # Check: make sure we're within the partition limit.
+        # Check: Make sure we're within the partition limit.
         if usable_size and end_block * self.block_size > usable_size:
-          raise PayloadError(
-              '%s: extent (%s) exceeds usable partition size (%d)' %
+          raise error.PayloadError(
+              '%s: extent (%s) exceeds usable partition size (%d).' %
               (ex_name, common.FormatExtent(ex, self.block_size), usable_size))
 
         # Record block usage.
-        for i in range(start_block, end_block):
+        for i in xrange(start_block, end_block):
           block_counters[i] += 1
       elif not (allow_pseudo or (allow_signature and len(extents) == 1)):
         # Pseudo-extents must be allowed explicitly, or otherwise be part of a
         # signature operation (in which case there has to be exactly one).
-        raise PayloadError('%s: unexpected pseudo-extent' % ex_name)
+        raise error.PayloadError('%s: unexpected pseudo-extent.' % ex_name)
 
       total_num_blocks += num_blocks
 
@@ -651,21 +651,20 @@
     """Specific checks for REPLACE/REPLACE_BZ operations.
 
     Args:
-      op: the operation object from the manifest
-      data_length: the length of the data blob associated with the operation
-      total_dst_blocks: total number of blocks in dst_extents
-      op_name: operation name for error reporting
+      op: The operation object from the manifest.
+      data_length: The length of the data blob associated with the operation.
+      total_dst_blocks: Total number of blocks in dst_extents.
+      op_name: Operation name for error reporting.
     Raises:
-      PayloadError if any check fails.
-
+      error.PayloadError if any check fails.
     """
-    # Check: does not contain src extents.
+    # Check: Does not contain src extents.
     if op.src_extents:
-      raise PayloadError('%s: contains src_extents' % op_name)
+      raise error.PayloadError('%s: contains src_extents.' % op_name)
 
-    # Check: contains data.
+    # Check: Contains data.
     if data_length is None:
-      raise PayloadError('%s: missing data_{offset,length}' % op_name)
+      raise error.PayloadError('%s: missing data_{offset,length}.' % op_name)
 
     if op.type == common.OpType.REPLACE:
       PayloadChecker._CheckBlocksFitLength(data_length, total_dst_blocks,
@@ -674,9 +673,9 @@
     else:
       # Check: data_length must be smaller than the alotted dst blocks.
       if data_length >= total_dst_blocks * self.block_size:
-        raise PayloadError(
+        raise error.PayloadError(
             '%s: data_length (%d) must be less than allotted dst block '
-            'space (%d * %d)' %
+            'space (%d * %d).' %
             (op_name, data_length, total_dst_blocks, self.block_size))
 
   def _CheckMoveOperation(self, op, data_offset, total_src_blocks,
@@ -684,26 +683,25 @@
     """Specific checks for MOVE operations.
 
     Args:
-      op: the operation object from the manifest
-      data_offset: the offset of a data blob for the operation
-      total_src_blocks: total number of blocks in src_extents
-      total_dst_blocks: total number of blocks in dst_extents
-      op_name: operation name for error reporting
+      op: The operation object from the manifest.
+      data_offset: The offset of a data blob for the operation.
+      total_src_blocks: Total number of blocks in src_extents.
+      total_dst_blocks: Total number of blocks in dst_extents.
+      op_name: Operation name for error reporting.
     Raises:
-      PayloadError if any check fails.
-
+      error.PayloadError if any check fails.
     """
-    # Check: no data_{offset,length}.
+    # Check: No data_{offset,length}.
     if data_offset is not None:
-      raise PayloadError('%s: contains data_{offset,length}' % op_name)
+      raise error.PayloadError('%s: contains data_{offset,length}.' % op_name)
 
-    # Check: total src blocks == total dst blocks.
+    # Check: total_src_blocks == total_dst_blocks.
     if total_src_blocks != total_dst_blocks:
-      raise PayloadError(
-          '%s: total src blocks (%d) != total dst blocks (%d)' %
+      raise error.PayloadError(
+          '%s: total src blocks (%d) != total dst blocks (%d).' %
           (op_name, total_src_blocks, total_dst_blocks))
 
-    # Check: for all i, i-th src block index != i-th dst block index.
+    # Check: For all i, i-th src block index != i-th dst block index.
     i = 0
     src_extent_iter = iter(op.src_extents)
     dst_extent_iter = iter(op.dst_extents)
@@ -715,8 +713,8 @@
         try:
           src_extent = src_extent_iter.next()
         except StopIteration:
-          raise PayloadError('%s: ran out of src extents (%d/%d)' %
-                             (op_name, i, total_src_blocks))
+          raise error.PayloadError('%s: ran out of src extents (%d/%d).' %
+                                   (op_name, i, total_src_blocks))
         src_idx = src_extent.start_block
         src_num = src_extent.num_blocks
 
@@ -725,14 +723,15 @@
         try:
           dst_extent = dst_extent_iter.next()
         except StopIteration:
-          raise PayloadError('%s: ran out of dst extents (%d/%d)' %
-                             (op_name, i, total_dst_blocks))
+          raise error.PayloadError('%s: ran out of dst extents (%d/%d).' %
+                                   (op_name, i, total_dst_blocks))
         dst_idx = dst_extent.start_block
         dst_num = dst_extent.num_blocks
 
       if self.check_move_same_src_dst_block and src_idx == dst_idx:
-        raise PayloadError('%s: src/dst block number %d is the same (%d)' %
-                           (op_name, i, src_idx))
+        raise error.PayloadError(
+            '%s: src/dst block number %d is the same (%d).' %
+            (op_name, i, src_idx))
 
       advance = min(src_num, dst_num)
       i += advance
@@ -749,30 +748,29 @@
 
     # Make sure we've exhausted all src/dst extents.
     if src_extent:
-      raise PayloadError('%s: excess src blocks' % op_name)
+      raise error.PayloadError('%s: excess src blocks.' % op_name)
     if dst_extent:
-      raise PayloadError('%s: excess dst blocks' % op_name)
+      raise error.PayloadError('%s: excess dst blocks.' % op_name)
 
   def _CheckBsdiffOperation(self, data_length, total_dst_blocks, op_name):
     """Specific checks for BSDIFF operations.
 
     Args:
-      data_length: the length of the data blob associated with the operation
-      total_dst_blocks: total number of blocks in dst_extents
-      op_name: operation name for error reporting
+      data_length: The length of the data blob associated with the operation.
+      total_dst_blocks: Total number of blocks in dst_extents.
+      op_name: Operation name for error reporting.
     Raises:
-      PayloadError if any check fails.
-
+      error.PayloadError if any check fails.
     """
     # Check: data_{offset,length} present.
     if data_length is None:
-      raise PayloadError('%s: missing data_{offset,length}' % op_name)
+      raise error.PayloadError('%s: missing data_{offset,length}.' % op_name)
 
     # Check: data_length is strictly smaller than the alotted dst blocks.
     if data_length >= total_dst_blocks * self.block_size:
-      raise PayloadError(
+      raise error.PayloadError(
           '%s: data_length (%d) must be smaller than allotted dst space '
-          '(%d * %d = %d)' %
+          '(%d * %d = %d).' %
           (op_name, data_length, total_dst_blocks, self.block_size,
            total_dst_blocks * self.block_size))
 
@@ -782,21 +780,20 @@
     """Checks a single update operation.
 
     Args:
-      op: the operation object
-      op_name: operation name string for error reporting
-      is_last: whether this is the last operation in the sequence
-      old_block_counters: arrays of block read counters
-      new_block_counters: arrays of block write counters
-      old_usable_size: the overall usable size for src data in bytes
-      new_usable_size: the overall usable size for dst data in bytes
-      prev_data_offset: offset of last used data bytes
-      allow_signature: whether this may be a signature operation
-      blob_hash_counts: counters for hashed/unhashed blobs
+      op: The operation object.
+      op_name: Operation name string for error reporting.
+      is_last: Whether this is the last operation in the sequence.
+      old_block_counters: Arrays of block read counters.
+      new_block_counters: Arrays of block write counters.
+      old_usable_size: The overall usable size for src data in bytes.
+      new_usable_size: The overall usable size for dst data in bytes.
+      prev_data_offset: Offset of last used data bytes.
+      allow_signature: Whether this may be a signature operation.
+      blob_hash_counts: Counters for hashed/unhashed blobs.
     Returns:
       The amount of data blob associated with the operation.
     Raises:
-      PayloadError if any check has failed.
-
+      error.PayloadError if any check has failed.
     """
     # Check extents.
     total_src_blocks = self._CheckExtents(
@@ -816,9 +813,9 @@
     self._CheckPresentIff(data_offset, data_length, 'data_offset',
                           'data_length', op_name)
 
-    # Check: at least one dst_extent.
+    # Check: At least one dst_extent.
     if not op.dst_extents:
-      raise PayloadError('%s: dst_extents is empty' % op_name)
+      raise error.PayloadError('%s: dst_extents is empty.' % op_name)
 
     # Check {src,dst}_length, if present.
     if op.HasField('src_length'):
@@ -829,19 +826,20 @@
     if op.HasField('data_sha256_hash'):
       blob_hash_counts['hashed'] += 1
 
-      # Check: operation carries data.
+      # Check: Operation carries data.
       if data_offset is None:
-        raise PayloadError(
-            '%s: data_sha256_hash present but no data_{offset,length}' %
+        raise error.PayloadError(
+            '%s: data_sha256_hash present but no data_{offset,length}.' %
             op_name)
 
-      # Check: hash verifies correctly.
+      # Check: Hash verifies correctly.
+      # pylint cannot find the method in hashlib, for some reason.
       # pylint: disable=E1101
       actual_hash = hashlib.sha256(self.payload.ReadDataBlob(data_offset,
                                                              data_length))
       if op.data_sha256_hash != actual_hash.digest():
-        raise PayloadError(
-            '%s: data_sha256_hash (%s) does not match actual hash (%s)' %
+        raise error.PayloadError(
+            '%s: data_sha256_hash (%s) does not match actual hash (%s).' %
             (op_name, common.FormatSha256(op.data_sha256_hash),
              common.FormatSha256(actual_hash.digest())))
     elif data_offset is not None:
@@ -850,21 +848,22 @@
       elif self.allow_unhashed:
         blob_hash_counts['unhashed'] += 1
       else:
-        raise PayloadError('%s: unhashed operation not allowed' % op_name)
+        raise error.PayloadError('%s: unhashed operation not allowed.' %
+                                 op_name)
 
     if data_offset is not None:
-      # Check: contiguous use of data section.
+      # Check: Contiguous use of data section.
       if data_offset != prev_data_offset:
-        raise PayloadError(
-            '%s: data offset (%d) not matching amount used so far (%d)' %
+        raise error.PayloadError(
+            '%s: data offset (%d) not matching amount used so far (%d).' %
             (op_name, data_offset, prev_data_offset))
 
     # Type-specific checks.
     if op.type in (common.OpType.REPLACE, common.OpType.REPLACE_BZ):
       self._CheckReplaceOperation(op, data_length, total_dst_blocks, op_name)
     elif self.payload_type == _TYPE_FULL:
-      raise PayloadError('%s: non-REPLACE operation in a full payload' %
-                         op_name)
+      raise error.PayloadError('%s: non-REPLACE operation in a full payload.' %
+                               op_name)
     elif op.type == common.OpType.MOVE:
       self._CheckMoveOperation(op, data_offset, total_src_blocks,
                                total_dst_blocks, op_name)
@@ -882,14 +881,17 @@
   def _AllocBlockCounters(self, total_size):
     """Returns a freshly initialized array of block counters.
 
+    Note that the generated array is not portable as is due to byte-ordering
+    issues, hence it should not be serialized.
+
     Args:
-      total_size: the total block size in bytes
+      total_size: The total block size in bytes.
     Returns:
       An array of unsigned short elements initialized to zero, one for each of
       the blocks necessary for containing the partition.
-
     """
-    return array.array('H', [0] * self._SizeToNumBlocks(total_size))
+    return array.array('H',
+                       itertools.repeat(0, self._SizeToNumBlocks(total_size)))
 
   def _CheckOperations(self, operations, report, base_name, old_fs_size,
                        new_fs_size, new_usable_size, prev_data_offset,
@@ -897,19 +899,18 @@
     """Checks a sequence of update operations.
 
     Args:
-      operations: the sequence of operations to check
-      report: the report object to add to
-      base_name: the name of the operation block
-      old_fs_size: the old filesystem size in bytes
-      new_fs_size: the new filesystem size in bytes
-      new_usable_size: the overall usable size of the new partition in bytes
-      prev_data_offset: offset of last used data bytes
-      allow_signature: whether this sequence may contain signature operations
+      operations: The sequence of operations to check.
+      report: The report object to add to.
+      base_name: The name of the operation block.
+      old_fs_size: The old filesystem size in bytes.
+      new_fs_size: The new filesystem size in bytes.
+      new_usable_size: The overall usable size of the new partition in bytes.
+      prev_data_offset: Offset of last used data bytes.
+      allow_signature: Whether this sequence may contain signature operations.
     Returns:
       The total data blob size used.
     Raises:
-      PayloadError if any of the checks fails.
-
+      error.PayloadError if any of the checks fails.
     """
     # The total size of data blobs used by operations scanned thus far.
     total_data_used = 0
@@ -924,7 +925,7 @@
     op_blob_totals = {
         common.OpType.REPLACE: 0,
         common.OpType.REPLACE_BZ: 0,
-        # MOVE operations don't have blobs
+        # MOVE operations don't have blobs.
         common.OpType.BSDIFF: 0,
     }
     # Counts of hashed vs unhashed operations.
@@ -945,9 +946,9 @@
     for op, op_name in common.OperationIter(operations, base_name):
       op_num += 1
 
-      # Check: type is valid.
+      # Check: Type is valid.
       if op.type not in op_counts.keys():
-        raise PayloadError('%s: invalid type (%d)' % (op_name, op.type))
+        raise error.PayloadError('%s: invalid type (%d).' % (op_name, op.type))
       op_counts[op.type] += 1
 
       is_last = op_num == len(operations)
@@ -990,10 +991,10 @@
     report.AddField('block write hist', new_write_hist, linebreak=True,
                     indent=1)
 
-    # Check: full update must write each dst block once.
+    # Check: Full update must write each dst block once.
     if self.payload_type == _TYPE_FULL and new_write_hist.GetKeys() != [1]:
-      raise PayloadError(
-          '%s: not all blocks written exactly once during full update' %
+      raise error.PayloadError(
+          '%s: not all blocks written exactly once during full update.' %
           base_name)
 
     return total_data_used
@@ -1005,10 +1006,11 @@
     sigs.ParseFromString(sigs_raw)
     report.AddSection('signatures')
 
-    # Check: at least one signature present.
+    # Check: At least one signature present.
+    # pylint cannot see through the protobuf object, it seems.
     # pylint: disable=E1101
     if not sigs.signatures:
-      raise PayloadError('signature block is empty')
+      raise error.PayloadError('Signature block is empty.')
 
     last_ops_section = (self.payload.manifest.kernel_install_operations or
                         self.payload.manifest.install_operations)
@@ -1017,9 +1019,9 @@
     if not (fake_sig_op.type == common.OpType.REPLACE and
             self.sigs_offset == fake_sig_op.data_offset and
             self.sigs_size == fake_sig_op.data_length):
-      raise PayloadError(
-          'signatures_{offset,size} (%d+%d) does not match last operation '
-          '(%d+%d)' %
+      raise error.PayloadError(
+          'Signatures_{offset,size} (%d+%d) does not match last operation '
+          '(%d+%d).' %
           (self.sigs_offset, self.sigs_size, fake_sig_op.data_offset,
            fake_sig_op.data_length))
 
@@ -1035,33 +1037,33 @@
     for sig, sig_name in common.SignatureIter(sigs.signatures, 'signatures'):
       sig_report = report.AddSubReport(sig_name)
 
-      # Check: signature contains mandatory fields.
+      # Check: Signature contains mandatory fields.
       self._CheckMandatoryField(sig, 'version', sig_report, sig_name)
       self._CheckMandatoryField(sig, 'data', None, sig_name)
       sig_report.AddField('data len', len(sig.data))
 
-      # Check: signatures pertains to actual payload hash.
+      # Check: Signatures pertains to actual payload hash.
       if sig.version == 1:
         self._CheckSha256Signature(sig.data, pubkey_file_name,
                                    payload_hasher.digest(), sig_name)
       else:
-        raise PayloadError('unknown signature version (%d)' % sig.version)
+        raise error.PayloadError('Unknown signature version (%d).' %
+                                 sig.version)
 
   def Run(self, pubkey_file_name=None, metadata_sig_file=None,
           rootfs_part_size=0, kernel_part_size=0, report_out_file=None):
     """Checker entry point, invoking all checks.
 
     Args:
-      pubkey_file_name: public key used for signature verification
-      metadata_sig_file: metadata signature, if verification is desired
-      rootfs_part_size: the size of rootfs partitions in bytes (default: use
-                        reported filesystem size)
-      kernel_part_size: the size of kernel partitions in bytes (default: use
-                        reported filesystem size)
-      report_out_file: file object to dump the report to
+      pubkey_file_name: Public key used for signature verification.
+      metadata_sig_file: Metadata signature, if verification is desired.
+      rootfs_part_size: The size of rootfs partitions in bytes (default: use
+                        reported filesystem size).
+      kernel_part_size: The size of kernel partitions in bytes (default: use
+                        reported filesystem size).
+      report_out_file: File object to dump the report to.
     Raises:
-      PayloadError if payload verification failed.
-
+      error.PayloadError if payload verification failed.
     """
     if not pubkey_file_name:
       pubkey_file_name = _DEFAULT_PUBKEY_FILE_NAME
@@ -1081,20 +1083,20 @@
                                    self.payload.manifest_hasher.digest(),
                                    'metadata signature')
 
-      # Part 1: check the file header.
+      # Part 1: Check the file header.
       report.AddSection('header')
-      # Check: payload version is valid.
+      # Check: Payload version is valid.
       if self.payload.header.version != 1:
-        raise PayloadError('unknown payload version (%d)' %
-                           self.payload.header.version)
+        raise error.PayloadError('Unknown payload version (%d).' %
+                                 self.payload.header.version)
       report.AddField('version', self.payload.header.version)
       report.AddField('manifest len', self.payload.header.manifest_len)
 
-      # Part 2: check the manifest.
+      # Part 2: Check the manifest.
       self._CheckManifest(report, rootfs_part_size, kernel_part_size)
       assert self.payload_type, 'payload type should be known by now'
 
-      # Part 3: examine rootfs operations.
+      # Part 3: Examine rootfs operations.
       # TODO(garnold)(chromium:243559) only default to the filesystem size if
       # no explicit size provided *and* the partition size is not embedded in
       # the payload; see issue for more details.
@@ -1106,7 +1108,7 @@
           rootfs_part_size if rootfs_part_size else self.new_rootfs_fs_size,
           0, False)
 
-      # Part 4: examine kernel operations.
+      # Part 4: Examine kernel operations.
       # TODO(garnold)(chromium:243559) as above.
       report.AddSection('kernel operations')
       total_blob_size += self._CheckOperations(
@@ -1116,18 +1118,18 @@
           kernel_part_size if kernel_part_size else self.new_kernel_fs_size,
           total_blob_size, True)
 
-      # Check: operations data reach the end of the payload file.
+      # Check: Operations data reach the end of the payload file.
       used_payload_size = self.payload.data_offset + total_blob_size
       if used_payload_size != payload_file_size:
-        raise PayloadError(
-            'used payload size (%d) different from actual file size (%d)' %
+        raise error.PayloadError(
+            'Used payload size (%d) different from actual file size (%d).' %
             (used_payload_size, payload_file_size))
 
-      # Part 5: handle payload signatures message.
+      # Part 5: Handle payload signatures message.
       if self.check_payload_sig and self.sigs_size:
         self._CheckSignatures(report, pubkey_file_name)
 
-      # Part 6: summary.
+      # Part 6: Summary.
       report.AddSection('summary')
       report.AddField('update type', self.payload_type)
 
diff --git a/host/lib/update_payload/checker_unittest.py b/host/lib/update_payload/checker_unittest.py
index 4b23bd8..2c68637 100755
--- a/host/lib/update_payload/checker_unittest.py
+++ b/host/lib/update_payload/checker_unittest.py
@@ -14,13 +14,13 @@
 import os
 import unittest
 
-# Pylint cannot find mox.
+# pylint cannot find mox.
 # pylint: disable=F0401
 import mox
 
 import checker
 import common
-import payload as update_payload  # avoid name conflicts later.
+import payload as update_payload  # Avoid name conflicts later.
 import test_utils
 import update_metadata_pb2
 
@@ -61,11 +61,11 @@
   return checker.PayloadChecker(payload)
 
 
-# (i) this class doesn't need an __init__();  (ii) unit testing is all about
-# running protected methods;  (iii) don't bark about missing members of classes
-# you cannot import.
+# This class doesn't need an __init__().
 # pylint: disable=W0232
+# Unit testing is all about running protected methods.
 # pylint: disable=W0212
+# Don't bark about missing members of classes you cannot import.
 # pylint: disable=E1101
 class PayloadCheckerTest(mox.MoxTestBase):
   """Tests the PayloadChecker class.
@@ -78,7 +78,6 @@
   individual invocation contexts for each, which are then bound as
   testBar__param1=val1__param2=val2(). The enumeration of parameter spaces for
   all such tests is done in AddAllParametricTests().
-
   """
 
   def MockPayload(self):
@@ -97,11 +96,10 @@
     its default state.
 
     Args:
-      start_block: the starting block of the extent
-      num_blocks: the number of blocks in the extent
+      start_block: The starting block of the extent.
+      num_blocks: The number of blocks in the extent.
     Returns:
       An Extent message.
-
     """
     ex = update_metadata_pb2.Extent()
     if start_block >= 0:
@@ -115,10 +113,9 @@
     """Returns an list of extents.
 
     Args:
-      *args: (start_block, num_blocks) pairs defining the extents
+      *args: (start_block, num_blocks) pairs defining the extents.
     Returns:
       A list of Extent objects.
-
     """
     ex_list = []
     for start_block, num_blocks in args:
@@ -131,6 +128,8 @@
       new_field = repeated_field.add()
       new_field.CopyFrom(field_val)
 
+  # The production environment uses an older Python, this isn't an override.
+  # pylint: disable=W0221
   def assertIsNone(self, val):
     """Asserts that val is None (TODO remove once we upgrade to Python 2.7).
 
@@ -138,28 +137,26 @@
     non-None value.
 
     Args:
-      val: value/object to be equated to None
-
+      val: Value/object to be equated to None.
     """
-    self.assertEqual(val, None)
+    self.assertIs(None, val)
 
   def SetupAddElemTest(self, is_present, is_submsg, convert=str,
                        linebreak=False, indent=0):
     """Setup for testing of _CheckElem() and its derivatives.
 
     Args:
-      is_present: whether or not the element is found in the message
-      is_submsg: whether the element is a sub-message itself
-      convert: a representation conversion function
-      linebreak: whether or not a linebreak is to be used in the report
-      indent: indentation used for the report
+      is_present: Whether or not the element is found in the message.
+      is_submsg: Whether the element is a sub-message itself.
+      convert: A representation conversion function.
+      linebreak: Whether or not a linebreak is to be used in the report.
+      indent: Indentation used for the report.
     Returns:
-      msg: a mock message object
-      report: a mock report object
-      subreport: a mock sub-report object
-      name: an element name to check
-      val: expected element value
-
+      msg: A mock message object.
+      report: A mock report object.
+      subreport: A mock sub-report object.
+      name: An element name to check.
+      val: Expected element value.
     """
     name = 'foo'
     val = 'fake submsg' if is_submsg else 'fake field'
@@ -186,86 +183,83 @@
     """Parametric testing of _CheckElem().
 
     Args:
-      is_present: whether or not the element is found in the message
-      is_mandatory: whether or not it's a mandatory element
-      is_submsg: whether the element is a sub-message itself
-      convert: a representation conversion function
-      linebreak: whether or not a linebreak is to be used in the report
-      indent: indentation used for the report
-
+      is_present: Whether or not the element is found in the message.
+      is_mandatory: Whether or not it's a mandatory element.
+      is_submsg: Whether the element is a sub-message itself.
+      convert: A representation conversion function.
+      linebreak: Whether or not a linebreak is to be used in the report.
+      indent: Indentation used for the report.
     """
     msg, report, subreport, name, val = self.SetupAddElemTest(
         is_present, is_submsg, convert, linebreak, indent)
 
-    largs = [msg, name, report, is_mandatory, is_submsg]
-    dargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
+    args = (msg, name, report, is_mandatory, is_submsg)
+    kwargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
     if is_mandatory and not is_present:
       self.assertRaises(update_payload.PayloadError,
-                        checker.PayloadChecker._CheckElem, *largs, **dargs)
+                        checker.PayloadChecker._CheckElem, *args, **kwargs)
     else:
-      ret_val, ret_subreport = checker.PayloadChecker._CheckElem(*largs,
-                                                                 **dargs)
-      self.assertEquals(ret_val, val if is_present else None)
-      self.assertEquals(ret_subreport,
-                        subreport if is_present and is_submsg else None)
+      ret_val, ret_subreport = checker.PayloadChecker._CheckElem(*args,
+                                                                 **kwargs)
+      self.assertEquals(val if is_present else None, ret_val)
+      self.assertEquals(subreport if is_present and is_submsg else None,
+                        ret_subreport)
 
   def DoAddFieldTest(self, is_mandatory, is_present, convert, linebreak,
                      indent):
     """Parametric testing of _Check{Mandatory,Optional}Field().
 
     Args:
-      is_mandatory: whether we're testing a mandatory call
-      is_present: whether or not the element is found in the message
-      convert: a representation conversion function
-      linebreak: whether or not a linebreak is to be used in the report
-      indent: indentation used for the report
-
+      is_mandatory: Whether we're testing a mandatory call.
+      is_present: Whether or not the element is found in the message.
+      convert: A representation conversion function.
+      linebreak: Whether or not a linebreak is to be used in the report.
+      indent: Indentation used for the report.
     """
     msg, report, _, name, val = self.SetupAddElemTest(
         is_present, False, convert, linebreak, indent)
 
     # Prepare for invocation of the tested method.
-    largs = [msg, name, report]
-    dargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
+    args = [msg, name, report]
+    kwargs = {'convert': convert, 'linebreak': linebreak, 'indent': indent}
     if is_mandatory:
-      largs.append('bar')
+      args.append('bar')
       tested_func = checker.PayloadChecker._CheckMandatoryField
     else:
       tested_func = checker.PayloadChecker._CheckOptionalField
 
     # Test the method call.
     if is_mandatory and not is_present:
-      self.assertRaises(update_payload.PayloadError, tested_func, *largs,
-                        **dargs)
+      self.assertRaises(update_payload.PayloadError, tested_func, *args,
+                        **kwargs)
     else:
-      ret_val = tested_func(*largs, **dargs)
-      self.assertEquals(ret_val, val if is_present else None)
+      ret_val = tested_func(*args, **kwargs)
+      self.assertEquals(val if is_present else None, ret_val)
 
   def DoAddSubMsgTest(self, is_mandatory, is_present):
     """Parametrized testing of _Check{Mandatory,Optional}SubMsg().
 
     Args:
-      is_mandatory: whether we're testing a mandatory call
-      is_present: whether or not the element is found in the message
-
+      is_mandatory: Whether we're testing a mandatory call.
+      is_present: Whether or not the element is found in the message.
     """
     msg, report, subreport, name, val = self.SetupAddElemTest(is_present, True)
 
     # Prepare for invocation of the tested method.
-    largs = [msg, name, report]
+    args = [msg, name, report]
     if is_mandatory:
-      largs.append('bar')
+      args.append('bar')
       tested_func = checker.PayloadChecker._CheckMandatorySubMsg
     else:
       tested_func = checker.PayloadChecker._CheckOptionalSubMsg
 
     # Test the method call.
     if is_mandatory and not is_present:
-      self.assertRaises(update_payload.PayloadError, tested_func, *largs)
+      self.assertRaises(update_payload.PayloadError, tested_func, *args)
     else:
-      ret_val, ret_subreport = tested_func(*largs)
-      self.assertEquals(ret_val, val if is_present else None)
-      self.assertEquals(ret_subreport, subreport if is_present else None)
+      ret_val, ret_subreport = tested_func(*args)
+      self.assertEquals(val if is_present else None, ret_val)
+      self.assertEquals(subreport if is_present else None, ret_subreport)
 
   def testCheckPresentIff(self):
     """Tests _CheckPresentIff()."""
@@ -286,30 +280,31 @@
     """Parametric testing of _CheckSha256SignatureTest().
 
     Args:
-      expect_pass: whether or not it should pass
-      expect_subprocess_call: whether to expect the openssl call to happen
-      sig_data: the signature raw data
-      sig_asn1_header: the ASN1 header
-      returned_signed_hash: the signed hash data retuned by openssl
-      expected_signed_hash: the signed hash data to compare against
-
+      expect_pass: Whether or not it should pass.
+      expect_subprocess_call: Whether to expect the openssl call to happen.
+      sig_data: The signature raw data.
+      sig_asn1_header: The ASN1 header.
+      returned_signed_hash: The signed hash data retuned by openssl.
+      expected_signed_hash: The signed hash data to compare against.
     """
-    # Stub out the subprocess invocation.
-    self.mox.StubOutWithMock(checker.PayloadChecker, '_Run')
-    if expect_subprocess_call:
-      checker.PayloadChecker._Run(mox.IsA(list), send_data=sig_data).AndReturn(
-          (sig_asn1_header + returned_signed_hash, None))
+    try:
+      # Stub out the subprocess invocation.
+      self.mox.StubOutWithMock(checker.PayloadChecker, '_Run')
+      if expect_subprocess_call:
+        checker.PayloadChecker._Run(
+            mox.IsA(list), send_data=sig_data).AndReturn(
+                (sig_asn1_header + returned_signed_hash, None))
 
-    self.mox.ReplayAll()
-    if expect_pass:
-      self.assertIsNone(checker.PayloadChecker._CheckSha256Signature(
-          sig_data, 'foo', expected_signed_hash, 'bar'))
-    else:
-      self.assertRaises(update_payload.PayloadError,
-                        checker.PayloadChecker._CheckSha256Signature,
-                        sig_data, 'foo', expected_signed_hash, 'bar')
-
-    self.mox.UnsetStubs()
+      self.mox.ReplayAll()
+      if expect_pass:
+        self.assertIsNone(checker.PayloadChecker._CheckSha256Signature(
+            sig_data, 'foo', expected_signed_hash, 'bar'))
+      else:
+        self.assertRaises(update_payload.PayloadError,
+                          checker.PayloadChecker._CheckSha256Signature,
+                          sig_data, 'foo', expected_signed_hash, 'bar')
+    finally:
+      self.mox.UnsetStubs()
 
   def testCheckSha256Signature_Pass(self):
     """Tests _CheckSha256Signature(); pass case."""
@@ -321,7 +316,7 @@
 
   def testCheckSha256Signature_FailBadSignature(self):
     """Tests _CheckSha256Signature(); fails due to malformed signature."""
-    sig_data = 'fake-signature'  # malformed (not 256 bytes in length)
+    sig_data = 'fake-signature'  # Malformed (not 256 bytes in length).
     signed_hash = hashlib.sha256('fake-data').digest()
     self.DoCheckSha256SignatureTest(False, False, sig_data,
                                     common.SIG_ASN1_HEADER, signed_hash,
@@ -330,7 +325,7 @@
   def testCheckSha256Signature_FailBadOutputLength(self):
     """Tests _CheckSha256Signature(); fails due to unexpected output length."""
     sig_data = 'fake-signature'.ljust(256)
-    signed_hash = 'fake-hash'  # malformed (not 32 bytes in length)
+    signed_hash = 'fake-hash'  # Malformed (not 32 bytes in length).
     self.DoCheckSha256SignatureTest(False, True, sig_data,
                                     common.SIG_ASN1_HEADER, signed_hash,
                                     signed_hash)
@@ -401,19 +396,18 @@
     """Parametric testing of _CheckManifest().
 
     Args:
-      fail_mismatched_block_size: simulate a missing block_size field
-      fail_bad_sigs: make signatures descriptor inconsistent
-      fail_mismatched_oki_ori: make old rootfs/kernel info partially present
-      fail_bad_oki: tamper with old kernel info
-      fail_bad_ori: tamper with old rootfs info
-      fail_bad_nki: tamper with new kernel info
-      fail_bad_nri: tamper with new rootfs info
-      fail_missing_ops: simulate a manifest without any operations
-      fail_old_kernel_fs_size: make old kernel fs size too big
-      fail_old_rootfs_fs_size: make old rootfs fs size too big
-      fail_new_kernel_fs_size: make new kernel fs size too big
-      fail_new_rootfs_fs_size: make new rootfs fs size too big
-
+      fail_mismatched_block_size: Simulate a missing block_size field.
+      fail_bad_sigs: Make signatures descriptor inconsistent.
+      fail_mismatched_oki_ori: Make old rootfs/kernel info partially present.
+      fail_bad_oki: Tamper with old kernel info.
+      fail_bad_ori: Tamper with old rootfs info.
+      fail_bad_nki: Tamper with new kernel info.
+      fail_bad_nri: Tamper with new rootfs info.
+      fail_missing_ops: Simulate a manifest without any operations.
+      fail_old_kernel_fs_size: Make old kernel fs size too big.
+      fail_old_rootfs_fs_size: Make old rootfs fs size too big.
+      fail_new_kernel_fs_size: Make new kernel fs size too big.
+      fail_new_rootfs_fs_size: Make new rootfs fs size too big.
     """
     # Generate a test payload. For this test, we only care about the manifest
     # and don't need any data blobs, hence we can use a plain paylaod generator
@@ -515,26 +509,26 @@
     # Passes w/ all real extents.
     extents = self.NewExtentList((0, 4), (8, 3), (1024, 16))
     self.assertEquals(
+        23,
         payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
-                                      collections.defaultdict(int), 'foo'),
-        23)
+                                      collections.defaultdict(int), 'foo'))
 
     # Passes w/ pseudo-extents (aka sparse holes).
     extents = self.NewExtentList((0, 4), (common.PSEUDO_EXTENT_MARKER, 5),
                                  (8, 3))
     self.assertEquals(
+        12,
         payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
                                       collections.defaultdict(int), 'foo',
-                                      allow_pseudo=True),
-        12)
+                                      allow_pseudo=True))
 
     # Passes w/ pseudo-extent due to a signature.
     extents = self.NewExtentList((common.PSEUDO_EXTENT_MARKER, 2))
     self.assertEquals(
+        2,
         payload_checker._CheckExtents(extents, (1024 + 16) * block_size,
                                       collections.defaultdict(int), 'foo',
-                                      allow_signature=True),
-        2)
+                                      allow_signature=True))
 
     # Fails, extent missing a start block.
     extents = self.NewExtentList((-1, 4), (8, 3), (1024, 16))
@@ -780,19 +774,19 @@
     """Parametric testing of _CheckOperation().
 
     Args:
-      op_type_name: 'REPLACE', 'REPLACE_BZ', 'MOVE' or 'BSDIFF'
-      is_last: whether we're testing the last operation in a sequence
-      allow_signature: whether we're testing a signature-capable operation
-      allow_unhashed: whether we're allowing to not hash the data
-      fail_src_extents: tamper with src extents
-      fail_dst_extents: tamper with dst extents
-      fail_mismatched_data_offset_length: make data_{offset,length} inconsistent
-      fail_missing_dst_extents: do not include dst extents
-      fail_src_length: make src length inconsistent
-      fail_dst_length: make dst length inconsistent
-      fail_data_hash: tamper with the data blob hash
-      fail_prev_data_offset: make data space uses incontiguous
-
+      op_type_name: 'REPLACE', 'REPLACE_BZ', 'MOVE' or 'BSDIFF'.
+      is_last: Whether we're testing the last operation in a sequence.
+      allow_signature: Whether we're testing a signature-capable operation.
+      allow_unhashed: Whether we're allowing to not hash the data.
+      fail_src_extents: Tamper with src extents.
+      fail_dst_extents: Tamper with dst extents.
+      fail_mismatched_data_offset_length: Make data_{offset,length}
+        inconsistent.
+      fail_missing_dst_extents: Do not include dst extents.
+      fail_src_length: Make src length inconsistent.
+      fail_dst_length: Make dst length inconsistent.
+      fail_data_hash: Tamper with the data blob hash.
+      fail_prev_data_offset: Make data space uses incontiguous.
     """
     op_type = _OpTypeByName(op_type_name)
 
@@ -879,15 +873,15 @@
                    fail_mismatched_data_offset_length or
                    fail_missing_dst_extents or fail_src_length or
                    fail_dst_length or fail_data_hash or fail_prev_data_offset)
-    largs = [op, 'foo', is_last, old_block_counters, new_block_counters,
-             old_part_size, new_part_size, prev_data_offset, allow_signature,
-             blob_hash_counts]
+    args = (op, 'foo', is_last, old_block_counters, new_block_counters,
+            old_part_size, new_part_size, prev_data_offset, allow_signature,
+            blob_hash_counts)
     if should_fail:
       self.assertRaises(update_payload.PayloadError,
-                        payload_checker._CheckOperation, *largs)
+                        payload_checker._CheckOperation, *args)
     else:
-      self.assertEqual(payload_checker._CheckOperation(*largs),
-                       op.data_length if op.HasField('data_length') else 0)
+      self.assertEqual(op.data_length if op.HasField('data_length') else 0,
+                       payload_checker._CheckOperation(*args))
 
   def testAllocBlockCounters(self):
     """Tests _CheckMoveOperation()."""
@@ -896,14 +890,14 @@
 
     # Check allocation for block-aligned partition size, ensure it's integers.
     result = payload_checker._AllocBlockCounters(16 * block_size)
-    self.assertEqual(len(result), 16)
-    self.assertEqual(type(result[0]), int)
+    self.assertEqual(16, len(result))
+    self.assertEqual(int, type(result[0]))
 
     # Check allocation of unaligned partition sizes.
     result = payload_checker._AllocBlockCounters(16 * block_size - 1)
-    self.assertEqual(len(result), 16)
+    self.assertEqual(16, len(result))
     result = payload_checker._AllocBlockCounters(16 * block_size + 1)
-    self.assertEqual(len(result), 17)
+    self.assertEqual(17, len(result))
 
   def DoCheckOperationsTest(self, fail_bad_type,
                             fail_nonexhaustive_full_update):
@@ -942,14 +936,14 @@
     report = checker._PayloadReport()
 
     should_fail = (fail_bad_type or fail_nonexhaustive_full_update)
-    largs = (payload_checker.payload.manifest.install_operations, report,
-             'foo', 0, rootfs_part_size, rootfs_part_size, 0, False)
+    args = (payload_checker.payload.manifest.install_operations, report,
+            'foo', 0, rootfs_part_size, rootfs_part_size, 0, False)
     if should_fail:
       self.assertRaises(update_payload.PayloadError,
-                        payload_checker._CheckOperations, *largs)
+                        payload_checker._CheckOperations, *args)
     else:
-      self.assertEqual(payload_checker._CheckOperations(*largs),
-                       rootfs_data_length)
+      self.assertEqual(rootfs_data_length,
+                       payload_checker._CheckOperations(*args))
 
   def DoCheckSignaturesTest(self, fail_empty_sigs_blob, fail_missing_pseudo_op,
                             fail_mismatched_pseudo_op, fail_sig_missing_fields,
@@ -1015,12 +1009,12 @@
     should_fail = (fail_empty_sigs_blob or fail_missing_pseudo_op or
                    fail_mismatched_pseudo_op or fail_sig_missing_fields or
                    fail_unknown_sig_version or fail_incorrect_sig)
-    largs = (report, test_utils._PUBKEY_FILE_NAME)
+    args = (report, test_utils._PUBKEY_FILE_NAME)
     if should_fail:
       self.assertRaises(update_payload.PayloadError,
-                        payload_checker._CheckSignatures, *largs)
+                        payload_checker._CheckSignatures, *args)
     else:
-      self.assertIsNone(payload_checker._CheckSignatures(*largs))
+      self.assertIsNone(payload_checker._CheckSignatures(*args))
 
   def DoRunTest(self, fail_wrong_payload_type, fail_invalid_block_size,
                 fail_mismatched_block_size, fail_excess_data):
@@ -1050,13 +1044,13 @@
 
     # Generate payload (complete w/ signature) and create the test object.
     if fail_invalid_block_size:
-      use_block_size = block_size + 5  # not a power of two
+      use_block_size = block_size + 5  # Not a power of two.
     elif fail_mismatched_block_size:
-      use_block_size = block_size * 2  # different that payload stated
+      use_block_size = block_size * 2  # Different that payload stated.
     else:
       use_block_size = block_size
 
-    dargs = {
+    kwargs = {
         'payload_gen_dargs': {
             'privkey_file_name': test_utils._PRIVKEY_FILE_NAME,
             'do_add_pseudo_operation': True,
@@ -1067,18 +1061,18 @@
             'block_size': use_block_size}}
     if fail_invalid_block_size:
       self.assertRaises(update_payload.PayloadError, _GetPayloadChecker,
-                        payload_gen.WriteToFileWithData, **dargs)
+                        payload_gen.WriteToFileWithData, **kwargs)
     else:
       payload_checker = _GetPayloadChecker(payload_gen.WriteToFileWithData,
-                                           **dargs)
-      dargs = {'pubkey_file_name': test_utils._PUBKEY_FILE_NAME}
+                                           **kwargs)
+      kwargs = {'pubkey_file_name': test_utils._PUBKEY_FILE_NAME}
       should_fail = (fail_wrong_payload_type or fail_mismatched_block_size or
                      fail_excess_data)
       if should_fail:
-        self.assertRaises(update_payload.PayloadError,
-                          payload_checker.Run, **dargs)
+        self.assertRaises(update_payload.PayloadError, payload_checker.Run,
+                          **kwargs)
       else:
-        self.assertIsNone(payload_checker.Run(**dargs))
+        self.assertIsNone(payload_checker.Run(**kwargs))
 
 
 # This implements a generic API, hence the occasional unused args.
@@ -1123,11 +1117,10 @@
   usual setUp/tearDown mechanics.
 
   Args:
-    tested_method_name: name of the tested PayloadChecker method
-    arg_space: a dictionary containing variables (keys) and lists of values
-               (values) associated with them
-    validate_func: a function used for validating test argument combinations
-
+    tested_method_name: Name of the tested PayloadChecker method.
+    arg_space: A dictionary containing variables (keys) and lists of values
+               (values) associated with them.
+    validate_func: A function used for validating test argument combinations.
   """
   for value_tuple in itertools.product(*arg_space.itervalues()):
     run_dargs = dict(zip(arg_space.iterkeys(), value_tuple))