Update crosutils to CrOS 15617.0.0

BUG=b/307995090
TEST=presubmit
RELEASE_NOTE=None

Change-Id: If94ecbef22c6525d732d6d31b8598616ba251673
diff --git a/bin/cros_make_image_bootable b/bin/cros_make_image_bootable
index 1fd4127..a6cd9e2 100755
--- a/bin/cros_make_image_bootable
+++ b/bin/cros_make_image_bootable
@@ -149,7 +149,7 @@
   local base_pattern="$3"
 
   [[ -f "${file}" ]] || return "${FLAGS_TRUE}"
-  grep -wq "${pattern}" "${file}" && return "${FLAGS_TRUE}"
+  sudo grep -wq "${pattern}" "${file}" && return "${FLAGS_TRUE}"
   sudo sed -i "s/\b${base_pattern}\b/& ${pattern}/g" "${file}"
 }
 
@@ -423,8 +423,7 @@
     kernel_part=("--kernel_partition='${FLAGS_output_dir}/${kern_a_image}'")
     # Install syslinux on the EFI System Partition.
     kernel_part+=(--install_syslinux)
-  elif [[ "${FLAGS_arch}" == "arm64" ]] && \
-       [[ -d "/build/${FLAGS_board}/lib64/grub/arm64-efi/" ]]; then
+  elif [[ "${FLAGS_arch}" == "arm64" ]]; then
     # Use the kernel partition to acquire configuration flags.
     kernel_part=("--kernel_partition='${FLAGS_output_dir}/${kern_a_image}'")
   elif [[ "${FLAGS_arch}" == "arm" || "${FLAGS_arch}" == "mips" ]]; then
diff --git a/build_kernel_image.sh b/build_kernel_image.sh
index bca2914..a1cc67b 100755
--- a/build_kernel_image.sh
+++ b/build_kernel_image.sh
@@ -178,13 +178,23 @@
   info "rootfs is ${root_fs_blocks} blocks of 4096 bytes."
 
   info "Generating root fs hash tree (salt '${FLAGS_verity_salt}')."
+  info "sudo verity mode=create \
+                      alg=${FLAGS_verity_hash_alg} \
+                      payload=${FLAGS_rootfs_image} \
+                      payload_blocks=${root_fs_blocks} \
+                      hashtree=${FLAGS_rootfs_hash} \
+                      salt=${FLAGS_verity_salt}) \
+                      version=1"
+
   # Runs as sudo in case the image is a block device.
   table=$(sudo verity mode=create \
-                      alg="${FLAGS_verity_hash_alg}" \
-                      payload="${FLAGS_rootfs_image}" \
-                      payload_blocks="${root_fs_blocks}" \
-                      hashtree="${FLAGS_rootfs_hash}" \
-                      salt="${FLAGS_verity_salt}")
+                      alg=${FLAGS_verity_hash_alg} \
+                      payload=${FLAGS_rootfs_image} \
+                      payload_blocks=${root_fs_blocks} \
+                      hashtree=${FLAGS_rootfs_hash} \
+                      salt=${FLAGS_verity_salt} \
+                      version=1)
+  info "dm-verity table ${table}"
   if [[ -f "${FLAGS_rootfs_hash}" ]]; then
     sudo chmod a+r "${FLAGS_rootfs_hash}"
   fi
@@ -194,7 +204,7 @@
     table=${table//HASH_DEV/${base_root}}
     table=${table//ROOT_DEV/${base_root}}
   fi
-  device_mapper_args="dm=\"1 vroot none ro 1,${table}\""
+  device_mapper_args="dm-mod.create=\"vroot,,,ro,${table}\""
   info "device mapper configuration: ${device_mapper_args}"
 fi
 
diff --git a/build_library/base_image_util.sh b/build_library/base_image_util.sh
index e2091c2..b3949f3 100755
--- a/build_library/base_image_util.sh
+++ b/build_library/base_image_util.sh
@@ -666,7 +666,7 @@
     sudo "${cpmv}" "${boot_dir}"/Image-* "${BUILD_DIR}/boot_images"
   [ -L "${boot_dir}"/zImage-* ] && \
     sudo "${cpmv}" "${boot_dir}"/zImage-* "${BUILD_DIR}/boot_images"
-  [ -e "${boot_dir}"/vmlinuz-* ] && \
+  find "${boot_dir}"/vmlinuz-* >/dev/null 2>&1 && \
     sudo "${cpmv}" "${boot_dir}"/vmlinuz-* "${BUILD_DIR}/boot_images"
   [ -L "${boot_dir}"/vmlinuz ] && \
     sudo "${cpmv}" "${boot_dir}"/vmlinuz "${BUILD_DIR}/boot_images"
diff --git a/build_library/create_legacy_bootloader_templates.sh b/build_library/create_legacy_bootloader_templates.sh
index 1435735..8b248de 100755
--- a/build_library/create_legacy_bootloader_templates.sh
+++ b/build_library/create_legacy_bootloader_templates.sh
@@ -171,12 +171,12 @@
 
 menuentry "verified image A" {
   linux /syslinux/vmlinuz.A ${common_args} ${verity_common} \
-      i915.modeset=1 cros_efi root=${ROOTDEV} dm="DMTABLEA"
+      i915.modeset=1 cros_efi root=${ROOTDEV} dm-mod.create="DMTABLEA"
 }
 
 menuentry "verified image B" {
   linux /syslinux/vmlinuz.B ${common_args} ${verity_common} \
-      i915.modeset=1 cros_efi root=${ROOTDEV} dm="DMTABLEB"
+      i915.modeset=1 cros_efi root=${ROOTDEV} dm-mod.create="DMTABLEB"
 }
 
 # FIXME: usb doesn't support verified boot for now
@@ -246,7 +246,7 @@
   menu label chromeos-vusb.A
   kernel vmlinuz.A
   append ${common_args} ${verity_common} root=${ROOTDEV} \
-      i915.modeset=1 cros_legacy dm="DMTABLEA"
+      i915.modeset=1 cros_legacy dm-mod.create="DMTABLEA"
 EOF
   info "Emitted ${SYSLINUX_DIR}/usb.A.cfg"
 
@@ -263,7 +263,7 @@
   menu label chromeos-vhd.A
   kernel vmlinuz.A
   append ${common_args} ${verity_common} root=${ROOTDEV} \
-      i915.modeset=1 cros_legacy dm="DMTABLEA"
+      i915.modeset=1 cros_legacy dm-mod.create="DMTABLEA"
 EOF
   info "Emitted ${SYSLINUX_DIR}/root.A.cfg"
 
@@ -277,7 +277,7 @@
   menu label chromeos-vhd.B
   kernel vmlinuz.B
   append ${common_args} ${verity_common} root=${ROOTDEV} \
-      i915.modeset=1 cros_legacy dm="DMTABLEB"
+      i915.modeset=1 cros_legacy dm-mod.create="DMTABLEB"
 EOF
   info "Emitted ${SYSLINUX_DIR}/root.B.cfg"
 
@@ -294,6 +294,57 @@
      [[ -d "/build/${FLAGS_board}/lib64/grub/arm64-efi/" ]]; then
   install_grub_efi_template
   exit 0
+elif [[ "${FLAGS_arch}" = "arm64" ]]; then
+  sudo mkdir -p "${FLAGS_to}"/efi/boot
+
+  # Templated variables:
+  #  DMTABLEA, DMTABLEB -> '0 xxxx verity ... '
+  # This should be replaced during postinst when updating the ESP.
+  cat <<EOF | sudo dd of="${FLAGS_to}/efi/boot/grub.cfg" 2>/dev/null
+defaultA=0
+defaultB=1
+gptpriority \$grubdisk ${partition_num_kern_a} prioA
+gptpriority \$grubdisk ${partition_num_kern_b} prioB
+
+if [ \$prioA -lt \$prioB ]; then
+  set default=\$defaultB
+else
+  set default=\$defaultA
+fi
+
+set timeout=0
+
+# NOTE: These magic grub variables are a Chrome OS hack. They are not portable.
+
+menuentry "local image A" {
+  linux /syslinux/vmlinuz.A ${common_args} cros_efi \
+      root=/dev/\$linuxpartA
+}
+
+menuentry "local image B" {
+  linux /syslinux/vmlinuz.B ${common_args} cros_efi \
+      root=/dev/\$linuxpartB
+}
+
+menuentry "verified image A" {
+  linux /syslinux/vmlinuz.A ${common_args} ${verity_common} \
+      cros_efi root=${ROOTDEV} dm-mod.create="DMTABLEA"
+}
+
+menuentry "verified image B" {
+  linux /syslinux/vmlinuz.B ${common_args} ${verity_common} \
+      cros_efi root=${ROOTDEV} dm-mod.create="DMTABLEB"
+}
+
+EOF
+  if [[ ${FLAGS_enable_rootfs_verification} -eq ${FLAGS_TRUE} ]]; then
+    sudo sed -i \
+      -e '/^defaultA=/s:=.*:=2:' \
+      -e '/^defaultB=/s:=.*:=3:' \
+      "${FLAGS_to}/efi/boot/grub.cfg"
+  fi
+  info "Emitted ${FLAGS_to}/efi/boot/grub.cfg"
+  exit 0
 fi
 
 info "The target platform does not use bootloader templates."
diff --git a/build_library/disk_layout_anthos.json b/build_library/disk_layout_anthos.json
new file mode 100644
index 0000000..db4cf1c
--- /dev/null
+++ b/build_library/disk_layout_anthos.json
@@ -0,0 +1,255 @@
+{
+  # See README_disk_layout
+  "parent": "common_disk_layout.json",
+
+  "metadata": {
+    "block_size": 512,
+    "fs_block_size": 4096,
+    "fs_align": "2 MiB"
+  },
+  "layouts": {
+    # common is the standard layout template.
+    "common": [
+      {
+        # Reserve space for RW firmware. Not used on modern boards.
+        # BIOS Boot Partition for COS.
+        "num": 11,
+        "label": "RWFW",
+        "type": "firmware",
+        "size": "8 MiB"
+      },
+      {
+        # Unused partition, reserved for software slot C.
+        "num": 6,
+        "label": "KERN-C",
+        "type": "kernel"
+      },
+      {
+        # Unused partition, reserved for software slot C.
+        "num": 7,
+        "label": "ROOT-C",
+        "type": "rootfs"
+      },
+      {
+        # Unused partition, reserved for future changes.
+        "num": 9,
+        "type": "reserved",
+        "label": "reserved"
+      },
+      {
+        # Unused partition, reserved for future changes.
+        "num": 10,
+        "type": "reserved",
+        "label": "reserved"
+      },
+      {
+        # Kernel for Slot A, no file system.
+        # Not used by COS.
+        "num": 2,
+        "label": "KERN-A",
+        "type": "kernel",
+        "size": "16 MiB"
+      },
+      {
+        # Kernel for Slot B, no file system.
+        # Not used by COS.
+        "num": 4,
+        "label": "KERN-B",
+        "type": "kernel",
+        "size": "16 MiB"
+      },
+      {
+        # Board specific files, OEM partition for COS/Anthos.
+        # Used for installing application binaries like Anthos.
+        "num": 8,
+        "label": "OEM",
+        "type": "data",
+        "fs_format": "ext4",
+        "size": "2048 MiB",
+        "uuid": "random"
+      },
+      {
+        # Used for Legacy Bios, and EFI Bios, not ChromeOS hardware
+        "num": 12,
+        "label": "EFI-SYSTEM",
+        "type": "efi",
+        "fs_format": "vfat",
+        "size": "64 MiB",
+        "uuid": "clear"
+      },
+      {
+        # Slot B rootfs. Must match Root A in side for normal
+        # updates. Will contain copy of Root A after install, and
+        # contain new rootfs's after runtime updates.
+        # Shrink to 16 MiB since we disable update-engine.
+        "num": 5,
+        "label": "ROOT-B",
+        "type": "rootfs",
+        "size": "16 MiB"
+      },
+      {
+        # Slot A rootfs. Rootfs + extras (AKA verity) must fit, AKA:
+        #   size <= FS size + Verity size
+        #
+        # Verity's size can be found by:
+        #   verity_bytes = div_round_up(fs_bytes, 128) +
+        #                  div_round_up(fs_bytes, 16384) + 4096
+        #
+        # That means that the FS MiB should be:
+        #   ((total_MiB * 1024 * 1024 - 4096) * 16384 / 16513) / (1024 * 1024)
+        #
+        # The reason to not set your fs_size to be exactly what is specified
+        # by the formula above is to make builds start failing a little bit
+        # before we're truly out of space, allowing a quick release valve to
+        # relieve some of the pressure while we try to find other ways to save
+        # space.
+        #
+        # Note that in the past it was observed that updates would fail if the
+        # rootfs size shrunk (crbug.com/192136).  There are no known reasons to
+        # shrink the rootfs size, but if you come up with one you should
+        # revisit that bug and make sure it won't affect you.
+        #
+        # Changes to the offset of this partition may require
+        # changes in cros-signing/security_test_baselines/
+        # ensure_secure_kernelparams.config to allow secure boot.
+        "num": 3,
+        "label": "ROOT-A",
+        "type": "rootfs",
+        "fs_format": "ext2",
+        "fs_options": {
+          "squashfs": "-noI -comp lzo -Xalgorithm lzo1x_999 -Xcompression-level 9",
+          "btrfs": "skinny-metadata"
+        },
+        "size": "2048 MiB",
+        "fs_size": "2000 MiB",
+        "uuid": "clear"
+      },
+      {
+        # User data, stateful partition.
+        # User data, fills all remaining space on drive.
+        "num": 1,
+        "label": "STATE",
+        "type": "data",
+        "fs_format": "ext4",
+        "fs_options": {
+          # A consequence of this option is that some file system
+          # structures are initialized lazily when resizing,
+          # resulting in much faster resize2fs invocations.
+          "ext4": "-O metadata_csum"
+         },
+        "size": "5000 MiB",
+        "features": ["expand"],
+        "uuid": "random"
+      }
+    ],
+    # Used for installs on main device
+    # by default, same as 'common'.
+    "base": [
+    ],
+    # Used for bootable USB installs (not recovery).
+    "usb": [
+      {
+        # Slot B rootfs, unused on USB, but pad to 2M.
+        # installation will expand this to size from base.
+        "num": 5,
+        "size": "2 MiB"
+      }
+    ],
+    # Used for factory install images.
+    "factory_install": [
+      {
+        "num": 12,
+        "size": "32 MiB"
+      },
+      {
+        "num": 5,
+        "size": "2 MiB"
+      },
+      {
+        "num": 3,
+        "size": "420 MiB",
+        "fs_size": "400 MiB"
+      },
+      {
+        "num": 1,
+        "size": "140 MiB"
+      }
+    ],
+    # Used for recovery images.
+    "recovery": [
+      {
+        # Slot B rootfs, unused on USB, but pad to 2M.
+        # installation will expand this to size from base.
+        "num": 5,
+        "size": "2 MiB"
+      },
+      {
+        # Stateful on recovery is dynamically resized larger.
+        "num": 1,
+        "size": "2 MiB"
+      }
+    ],
+    # Larger rootfs, suitable for development with symbols, etc.
+    # Cannot apply updates when running from USB (no slot B).
+    "2gb-rootfs": [
+      {
+        # Will be grown to size from base on install.
+        "num": 5,
+        "size": "2 MiB"
+      },
+      {
+        # Will be shrunk to size from base on install.
+        "num": 3,
+        "size": "2048 MiB",
+        "fs_size": "2000 MiB"
+      }
+    ],
+    # Larger rootfs, suitable for development with symbols, etc.
+    # CAN apply updates when running from USB.
+    "2gb-rootfs-updatable": [
+      {
+        # The partition size matches base, so it's installable.
+        "num": 5,
+        "size": "2048 MiB"
+      },
+      {
+        # The partition size matches base, so it's installable.
+        "num": 3,
+        "size": "2048 MiB",
+        "fs_size": "2000 MiB"
+      },
+      {
+        "num": 1,
+        "size": "4096 MiB"
+      }
+    ],
+    # Very large rootfs, suitable for development with symbols,
+    # etc. Cannot apply updates when running from USB (no slot B)
+    "4gb-rootfs": [
+      {
+        "num": 5,
+        "size": "2 MiB"
+      },
+      {
+        # This partition is larger than the base partition, so the
+        # installer will corrupt the disk during installation.
+        "num": 3,
+        "size": "4096 MiB",
+        "fs_size": "4000 MiB"
+      }
+    ],
+    # Huge rootfs, suitable for VM only images, should not be used
+    # for actual hardware devices.
+    "16gb-rootfs": [
+      {
+        "num": 5,
+        "size": "2 MiB"
+      },
+      {
+        "num": 3,
+        "size": "16384 MiB",
+        "fs_size": "16000 MiB"
+      }
+    ]
+  }
+}
diff --git a/build_library/legacy_disk_layout.json b/build_library/legacy_disk_layout.json
index a989fa2..18d4b24 100644
--- a/build_library/legacy_disk_layout.json
+++ b/build_library/legacy_disk_layout.json
@@ -123,7 +123,8 @@
         "fs_format": "ext2",
         "fs_options": {
           "squashfs": "-noI -no-exports -comp lzo -Xalgorithm lzo1x_999 -Xcompression-level 9",
-          "ext2": "-i 65536",
+	  # lakitu: set bytes per inode at 32768 because we run out of inodes with 65536
+          "ext2": "-i 32768",
           "btrfs": "skinny-metadata"
         },
         "size": "2 GiB",
diff --git a/hooks/filesystem-layout.py b/hooks/filesystem-layout.py
index 7425646..9e09712 100755
--- a/hooks/filesystem-layout.py
+++ b/hooks/filesystem-layout.py
@@ -50,6 +50,14 @@
     "postinst",
 }
 
+# Paths that are allowed in the / dir for EdgeOS.
+VALID_EDGEOS_ROOT = {
+    "data",
+    "export",
+    "logs",
+    "user",
+}
+
 # Paths that are allowed in the / dir for the SDK chroot.
 VALID_HOST_ROOT = set()
 
@@ -84,6 +92,12 @@
 # Paths that are allowed in the /usr dir for the SDK chroot.
 VALID_HOST_USR = set()
 
+# Paths that are allowed in the /usr dir for the EdgeOS boards
+VALID_EDGEOS_USR = {
+    "crosstool",
+    "grte",
+}
+
 # Paths under /usr that should not have any subdirs.
 NOSUBDIRS_USR = {
     "bin",
@@ -117,6 +131,7 @@
     "app-admin/sudo",
     "app-admin/sysstat",
     "app-admin/webapp-config",
+    "app-containers/containerd",
     "app-crypt/mit-krb5",
     "app-crypt/trousers",
     "app-emulation/containerd",
@@ -153,6 +168,12 @@
 }
 
 
+def is_edgeos():
+    """Check if the ebuild has declared itself an EdgeOS ebuild."""
+    # True if $USE_EDGEOS_FS_LAYOUT is set to any non empty string.
+    return bool(os.environ.get("USE_EDGEOS_FS_LAYOUT"))
+
+
 def is_known_bad(allowlist):
     """See if the current package is allowed."""
     atom = get_current_package()
@@ -187,7 +208,7 @@
     return False
 
 
-def check_usr(usr, host=False):
+def check_usr(usr, host=False, edgeos=False):
     """Check the /usr filesystem at |usr|."""
     ret = True
 
@@ -208,6 +229,9 @@
     else:
         unknown -= VALID_BOARD_USR
 
+    if edgeos:
+        unknown -= VALID_EDGEOS_USR
+
     if unknown:
         logging.error(
             "Paths are not allowed in the /usr dir: %s", sorted(unknown)
@@ -224,7 +248,7 @@
     return ret
 
 
-def check_root(root, host=False):
+def check_root(root, host=False, edgeos=False):
     """Check the filesystem |root|."""
     ret = True
 
@@ -235,6 +259,9 @@
     else:
         unknown -= VALID_BOARD_ROOT
 
+    if edgeos:
+        unknown -= VALID_EDGEOS_ROOT
+
     if unknown:
         logging.error(
             "Paths are not allowed in the root dir:\n  %s\n  |-- %s",
@@ -290,7 +317,7 @@
                 "Package has improved; please update BAD_RUN_PACKAGES"
             )
 
-    if not check_usr(os.path.join(root, "usr"), host):
+    if not check_usr(os.path.join(root, "usr"), host, edgeos):
         ret = False
 
     return ret
@@ -344,7 +371,7 @@
         else:
             opts.host = not bool(os.getenv("SYSROOT"))
 
-    if not check_root(opts.root, opts.host):
+    if not check_root(opts.root, opts.host, is_edgeos()):
         logging.critical(
             "Package '%s' does not conform to CrOS's filesystem conventions. "
             "Please review the paths flagged above and adjust its layout.",
diff --git a/hooks/install/gen-package-licenses.sh b/hooks/install/gen-package-licenses.sh
index f793f07..062f57b 100755
--- a/hooks/install/gen-package-licenses.sh
+++ b/hooks/install/gen-package-licenses.sh
@@ -12,7 +12,7 @@
   # Run FEATURES='noclean' emerge-x86-alex libc-bench to prevent having the
   # directory cleaned up if you are debugging.
   einfo "Generating license for ${PKG} in ${PORTAGE_BUILDDIR:-}"
-  python3.8 \
+  python3 \
   /mnt/host/source/chromite/licensing/ebuild_license_hook \
       --builddir "${PORTAGE_BUILDDIR}" || die "
 Failed Generating Licensing for ${PKG}
diff --git a/hooks/install/gen-sbom-package-info.py b/hooks/install/gen-sbom-package-info.py
new file mode 100755
index 0000000..989197f
--- /dev/null
+++ b/hooks/install/gen-sbom-package-info.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python3
+#
+# Copyright 2022 Google LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+
+# This script is used to automatically generate package
+# information for SBOM of COS image bundled dependencies.
+
+import json
+import os
+import sys
+from sbom_info_lib import download_url
+from sbom_info_lib import go_dep
+from sbom_info_lib import licenses
+from chromite.lib import osutils
+from chromite.lib import portage_util
+
+
+SBOM_INFO_FILE_NAME = "sbom-pkg-info.json"
+SPDX_NOASSERTION = "NOASSERTION"
+
+
+class SbomPackageInfo:
+    def __init__(self):
+        self.download_url = ""
+        self.licenses = ""
+        self.go_dep = []
+        # Format: [{
+        #     "license_name": <license_name>,
+        #     "file_names": [file_name],
+        #     "license_txt": <license_txt>
+        # },{......}]
+        self.other_license_list = []
+        self.private = False
+        self.err = ""
+
+    def write_to_build_info(self, build_info_dir):
+        content = {
+            "download-url": self.download_url,
+            "licenses": self.licenses,
+            "go-dep": self.go_dep,
+            "other_licenses": self.other_license_list,
+            "private": self.private,
+            "err": self.err,
+        }
+        json_content = json.dumps(content, indent=4)
+        osutils.WriteFile(
+            f"{build_info_dir}/{SBOM_INFO_FILE_NAME}", json_content, makedirs=True
+        )
+
+
+class SBOMPkgInfoError(Exception):
+    def __init__(self, msg):
+        super().__init__(msg)
+
+
+def read_build_info(build_info_dir):
+    with open(os.path.join(build_info_dir, "repository"), "r") as f:
+        repository = f.read().strip()
+    with open(os.path.join(build_info_dir, "CATEGORY"), "r") as f:
+        category = f.read().strip()
+    with open(os.path.join(build_info_dir, "PF"), "r") as f:
+        pf = f.read().strip()
+    license_path = os.path.join(build_info_dir, "LICENSE")
+    license = ""
+    if os.path.exists(license_path):
+        with open(license_path, "r") as f:
+            license = f.read().strip()
+    return repository, category, pf, license
+
+
+def get_src_path(sysroot, fullname):
+    # Package source code has been fetched by gen-package-licenses.sh.
+    tmpdir = portage_util.PortageqEnvvar("PORTAGE_TMPDIR", sysroot=sysroot)
+    src_dir = os.path.join(tmpdir, "portage", fullname, "work")
+
+    if not os.path.exists(src_dir):
+        raise AssertionError(
+            "Unpack of %s didn't create %s. Version mismatch" % (fullname, src_dir)
+        )
+    return src_dir
+
+
+def main():
+    sbom_pkg_info = SbomPackageInfo()
+    package_dir = os.getenv("PORTAGE_BUILDDIR")
+    sysroot = "/".join(package_dir.split("/")[:3])
+    build_info_dir = os.path.join(package_dir, "build-info")
+    try:
+        package_name = os.path.basename(package_dir)
+        ebuild = os.path.join(build_info_dir, package_name + ".ebuild")
+        repository, category, pf, license = read_build_info(build_info_dir)
+        if "private" in repository:
+            sbom_pkg_info.private = True
+        sbom_pkg_info.download_url = download_url.get_download_url(
+            ebuild, repository, category, pf, license
+        )
+        src_path = get_src_path(sysroot, os.path.join(category, pf))
+        (
+            sbom_pkg_info.licenses,
+            sbom_pkg_info.other_license_list,
+        ) = licenses.get_licenses(build_info_dir, src_path, package_name)
+        sbom_pkg_info.go_dep = go_dep.get_go_dep(src_path)
+
+        # Since download location is not required by the EO, failure to
+        # find download location shouldn't be fatal.
+        if not sbom_pkg_info.download_url and not sbom_pkg_info.private:
+            sbom_pkg_info.download_url = SPDX_NOASSERTION
+        if not sbom_pkg_info.licenses:
+            sbom_pkg_info.licenses = "NONE"
+    except Exception as e:
+        sbom_pkg_info.err = repr(e)
+    finally:
+        sbom_pkg_info.write_to_build_info(build_info_dir)
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/hooks/install/sbom_info_lib/download_url.py b/hooks/install/sbom_info_lib/download_url.py
new file mode 100644
index 0000000..74ecce9
--- /dev/null
+++ b/hooks/install/sbom_info_lib/download_url.py
@@ -0,0 +1,409 @@
+# Copyright 2022 Google LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+
+# get_download_url() in this script is used to
+# find download location for a COS package.
+
+import subprocess
+import re
+import os
+import requests
+
+
+CROS_GCS_MIRRORS = [
+    "gs://chromeos-mirror/gentoo/distfiles/",
+    "gs://chromeos-localmirror/distfiles/",
+]
+GCS_FILE_NOT_FOUND = "One or more URLs matched no objects"
+# An allow-list for variables parsed in an ebuild file.
+EBUILD_VARS = {
+    "MY_P",
+    "MY_PV",
+    "MY_PN",
+    "PARCH",
+    "SRC_PV",
+    "code_ver",
+    "RE2_VER",
+    "MODULE_VERSION",
+    "GIT_COMMIT",
+    "SRC_URI",
+    "EGIT_REPO_URI",
+    "EGIT_COMMIT",
+    "CROS_WORKON_COMMIT",
+    "CROS_WORKON_PROJECT",
+    "CROS_WORKON_SUBTREE",
+    "HOMEPAGE",
+    "CROS_GO_SOURCE",
+    "GN_X64_SHA1",
+    "LLVM_HASH",
+    "CROS_WORKON_REPO",
+    "GNOME_ORG_MODULE",
+    "GRUB2_COMMIT",
+    "MY_CODE_VER",
+    "MY_DATA_VER",
+}
+# For packages whose package names are hard to parse or not defined in ebuilds.
+PN_REPLACE_DICT = {
+    "Locale-gettext": lambda x: "gettext",
+    "systemd": lambda x: "systemd-stable" if "." in x else "systemd",
+    "perf": lambda x: "linux",
+    "gcc-libs": lambda x: "gcc",
+}
+SRC_URI_VARS = ["SRC_URI", "EGIT_REPO_URI"]
+COMMIT_VARS = ["GIT_COMMIT", "EGIT_COMMIT", "LLVM_HASH"]
+# REGEX_STRING_VAR finds `var_name=var_value` and `var_name="var_value"` (no new line) in ebuilds.
+REGEX_STRING_VAR = '([^\n]*?)="?([^\n]*?)"?\n'
+# REGEX_ARRAY_VAR finds `var_name=("var_value1" "var_value2" ...)` (allow new lines) in ebuilds.
+REGEX_ARRAY_VAR = "([^\n]*?)=(\(.*?\))"
+# REGEX_SRC_URI finds `SRC_URI="uri1 uri2 ..."` (allow new lines) in ebuilds.
+REGEX_SRC_URI = 'SRC_URI="(.*?)"'
+# REGEX_SRC_URI_PLUS finds `SRC_URI+="uri1 uri2 ..."` (allow new lines) in ebuilds.
+REGEX_SRC_URI_PLUS = 'SRC_URI\+="(.*?)"'
+# REGEX_PKG_REVISION finds package revision like `-r12` in package full name.
+REGEX_PKG_REVISION = "-r[0-9]+$"
+# REGEX_PKG_REVISION finds package version like `-1` or `-1.2.3.4` in package full name.
+REGEX_PKG_VERSION = "-[0-9]+(\.[0-9]*)*"
+# REGEX_FIND_STRING finds string inside double quotes like "string1".
+REGEX_FIND_STRING = '"(.*?)"'
+# REGEX_EBUILD_REPLACE finds ebuild replacement string `(ver_rs 1- some_string)`.
+REGEX_EBUILD_REPLACE = "\$\(ver_rs 1- (.*?)\)"
+REGEX_INLINE_COMMENT = "( #[^\n]*?)\n"
+GNOME_PN = "GNOME_ORG_MODULE"
+GO_SOURCE = "CROS_GO_SOURCE"
+CROS_REPO = "CROS_WORKON_REPO"
+CROS_COMMIT = "CROS_WORKON_COMMIT"
+LLVM_COMMIT = "LLVM_HASH"
+CROS_PROJECT = "CROS_WORKON_PROJECT"
+CROS_SUBTREE = "CROS_WORKON_SUBTREE"
+CROS_GIT_HOST_URL = "https://chromium.googlesource.com"
+CROS_GIT_AOSP_URL = "https://android.googlesource.com"
+CROS_HOMEPAGE = "HOMEPAGE"
+GOB_REPO_DICT = {
+    "project-lakitu": "https://cos.googlesource.com/cos/overlays/board-overlays/+/master/project-lakitu/",
+    "chromiumos": "https://cos.googlesource.com/third_party/overlays/chromiumos-overlay/+/master/",
+    "portage-stable": "https://cos.googlesource.com/third_party/overlays/portage-stable/+/master/",
+    "eclass-overlay": "https://cos.googlesource.com/third_party/overlays/eclass-overlay/+/master/",
+}
+# Packages that use `MODULE_VERSION` as package version.
+KEYWORDS_FOR_MODULE_VERSION = ["dev-perl", "perl-core"]
+PACKAGES_FROM_GOB = {
+    # portage-stable
+    "dev-util/meson-format-array",
+    "sys-devel/autoconf-wrapper",
+    "sys-devel/automake-wrapper",
+    "dev-python/namespace-zope",
+    "chromeos-base/chromeos-bsp-test-root-lakitu",
+    "dev-python/namespace-jaraco",
+    "dev-python/namespace-google",
+    # project-lakitu
+    "app-admin/cgroup-helper",
+    "app-admin/extensions-manager",
+    "app-admin/kdump-helper",
+    "app-admin/stackdriver",
+    "app-admin/toolbox-config",
+    "app-emulation/cloud-init-config",
+    "chromeos-base/chromeos-auth-config-lakitu",
+    "chromeos-base/chromeos-base",
+    "chromeos-base/chromeos-bsp-lakitu-common",
+    "chromeos-base/chromeos-firewall-init-lakitu",
+    "chromeos-base/chromeos-init-systemd",
+    "chromeos-base/chromeos-init-systemd-dev",
+    "chromeos-base/cloud-audit-config",
+    "chromeos-base/cloud-filesystem-init",
+    "chromeos-base/cloud-network-init",
+    "dev-util/cos-dev-tools",
+    "net-misc/chrony-config",
+    "sys-apps/loadpin-trigger",
+    "sys-apps/system-sysdaemons",
+    "sys-libs/lakitu-custom-locales",
+    "sys-boot/platform-key",
+    # chromiumos-overlay
+    "chromeos-base/chromeos-ca-certificates",
+    "chromeos-base/chromeos-sshd-init",
+    "chromeos-base/tty",
+    "chromeos-base/update-policy-embedded",
+    "dev-util/glib-utils",
+    "chromeos-base/openssh-server-init",
+    "chromeos-base/autotest-all",
+    "chromeos-base/autotest-client",
+    "chromeos-base/chromeos-ssh-testkeys",
+    "app-admin/lakitu-boot",
+}
+
+
+def is_uri_valid(uri):
+    if not uri.strip().startswith("http"):
+        return False
+    try:
+        request = requests.get(uri, stream=True)
+    except:
+        return False
+    if request.status_code == 200:
+        return True
+    return False
+
+
+def parse_var(s):
+    # avoid downloading packages.
+    parts = s.split("->")
+    if len(parts) > 1:
+        s = parts[0]
+    # do not evaluate commands.
+    if s.startswith("("):
+        s = f"'{s}'"
+    cmd = f"echo {s}"
+    res = subprocess.run(
+        ["bash", "-c", cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+    )
+    if res.stderr:
+        return ""
+    return res.stdout.decode("utf-8").rstrip()
+
+
+# Parse an environment variable and return a list.
+def parse_var_from_env(key):
+    val = os.getenv(key)
+    if not val:
+        return []
+    if val.startswith("("):
+        res = []
+        match = re.findall(REGEX_FIND_STRING, val, re.DOTALL)
+        # in some cases, go src version cannot be parsed in array
+        # e.g. chromiumos-overlay/dev-go/protobuf
+        for m in match:
+            res.append(parse_var(m))
+        return res
+    return [val]
+
+
+def find_var_and_set_env(regex, content):
+    env_set = set()
+    match = re.findall(regex, content, re.DOTALL)
+    for m in match:
+        key = m[0].strip()
+        if key not in EBUILD_VARS:
+            continue
+        val = parse_var(m[1]).strip()
+        if val:
+            os.environ[key] = val
+            env_set.add(key)
+    return env_set
+
+
+def parse_vars_in_ebuild(content):
+    env_set = set()
+    # Replace ebuild replacement gramma with bash format.
+    match = re.findall(REGEX_EBUILD_REPLACE, content, re.DOTALL)
+    if match:
+        for m in match:
+            content = content.replace(f"$(ver_rs 1- {m})", f"${{PV//./{m}}}")
+    env_set.update(find_var_and_set_env(REGEX_STRING_VAR, content))
+    env_set.update(find_var_and_set_env(REGEX_ARRAY_VAR, content))
+    return env_set
+
+
+def parse_pkg_name(pf):
+    match = re.search(REGEX_PKG_REVISION, pf)
+    if match:
+        p = pf[: match.start()]
+    else:
+        p = pf
+    match = re.search(REGEX_PKG_VERSION, p)
+    pn = p[: match.start()]
+    p_name = pn
+    pv = p[match.start() + 1 :]
+    if pn in PN_REPLACE_DICT:
+        pn = PN_REPLACE_DICT[pn](pv)
+        p = f"{pn}-{pv}"
+    os.environ["PN"] = pn
+    os.environ["PV"] = pv
+    os.environ["P"] = p
+    # possbile package names in CROS GCS mirror buckets.
+    return p_name, {f"{p}.tar.gz", f"{p}.tar.xz", f"{p}.tgz", f"{p}.xz"}
+
+
+def search_pkg_from_gob(repository, category, p_name, license):
+    pkg = f"{category}/{p_name}"
+    if (
+        pkg in PACKAGES_FROM_GOB
+        or category == "virtual"
+        or repository == "eclass-overlay"
+        or license == "metapackage"
+    ):
+        if repository not in GOB_REPO_DICT:
+            return ""
+        uri = os.path.join(GOB_REPO_DICT[repository], pkg)
+        if is_uri_valid(uri):
+            return uri
+        return ""
+    return ""
+
+
+def find_cros_uri():
+    res = []
+    cros_repo = parse_var_from_env(CROS_REPO)
+    cros_proj = parse_var_from_env(CROS_PROJECT)
+    cros_subtree = parse_var_from_env(CROS_SUBTREE)
+    cros_commit = parse_var_from_env(CROS_COMMIT)
+    if not cros_repo:
+        cros_repo = [CROS_GIT_HOST_URL] * len(cros_proj)
+    for i in range(len(cros_proj)):
+        uri = os.path.join(cros_repo[i], cros_proj[i])
+        if not is_uri_valid(uri):
+            continue
+        if not cros_commit:
+            res.append(uri)
+        elif cros_subtree and cros_subtree[i]:
+            subtrees = cros_subtree[i].split(" ")
+            for subtree in subtrees:
+                res.append(f"{uri}@{cros_commit[i]}#{subtree}")
+        else:
+            res.append(f"{uri}@{cros_commit[i]}")
+    return ",".join(res)
+
+
+def get_gcs_name_from_src_uri(regex, content):
+    gcs_names = set()
+    match = re.findall(regex, content, re.DOTALL)
+    if match:
+        for src_uri_group in match:
+            for uri_line in src_uri_group.split("\n"):
+                for uri in uri_line.split(" "):
+                    if uri == "->":
+                        continue
+                    gcs_names.add(os.path.basename(parse_var(uri)))
+    return gcs_names
+
+
+# Parse ebuild and set environment variables.
+# Find possible CROS gcs mirror package names,
+# and cros download url.
+def parse_ebuild(ebuild):
+    gcs_names = set()
+    with open(ebuild) as eb:
+        content = eb.read()
+        # remove inline comments.
+        match = re.findall(REGEX_INLINE_COMMENT, content, re.DOTALL)
+        for m in match:
+            content = content.replace(m, "")
+        env_set = parse_vars_in_ebuild(content)
+        cros_uri = find_cros_uri()
+        for keyword in KEYWORDS_FOR_MODULE_VERSION:
+            if keyword in ebuild:
+                gcs_names.add(f'{os.getenv("PN")}-{os.getenv("MODULE_VERSION")}.tar.gz')
+                break
+        gnome_pn = os.getenv(GNOME_PN)
+        if gnome_pn:
+            gcs_names.add(f'{gnome_pn}-{os.getenv("PV")}.tar.xz')
+        gcs_names_src = get_gcs_name_from_src_uri(REGEX_SRC_URI, content)
+        if gcs_names_src:
+            gcs_names.update(gcs_names_src)
+        else:
+            gcs_names.update(get_gcs_name_from_src_uri(REGEX_SRC_URI_PLUS, content))
+        return env_set, cros_uri, gcs_names
+
+
+def search_mirror_gcs(gcs_names):
+    for name in gcs_names:
+        name = name.replace("?", "%3f")
+        for bucket in CROS_GCS_MIRRORS:
+            link = os.path.join(bucket, name)
+            res = subprocess.run(
+                ["gsutil", "ls", link], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+            )
+            if res.stderr and GCS_FILE_NOT_FOUND in res.stderr.decode("utf-8"):
+                continue
+            else:
+                return res.stdout.decode("utf-8").rstrip()
+    return ""
+
+
+def search_src_uri():
+    for uri_name in SRC_URI_VARS:
+        uri = os.getenv(uri_name)
+        if uri and is_uri_valid(uri):
+            for commit_name in COMMIT_VARS:
+                commit = os.getenv(commit_name)
+                if commit:
+                    return f"{uri}@{commit}"
+            return uri
+    return ""
+
+
+def search_go_source():
+    res = []
+    go_src = parse_var_from_env(GO_SOURCE)
+    for src in go_src:
+        parts = src.split(" ")
+        if len(parts) == 2:
+            version = parts[1]
+            sources = parts[0].split(":")
+            for uri in sources:
+                uri = "https://" + uri
+                if is_uri_valid(uri):
+                    res.append(f"{uri}@{version}")
+                    break
+    return ",".join(res)
+
+
+def search_homepage():
+    homepage = os.getenv(CROS_HOMEPAGE)
+    if homepage and "chromium.googlesource.com" in homepage and is_uri_valid(homepage):
+        commit = os.getenv(CROS_COMMIT)
+        if commit:
+            return f"{homepage}@{commit}"
+        return homepage
+    # Special case for sys-libs/libcxxabi.
+    if homepage == "http://libcxxabi.llvm.org/":
+        commit = os.getenv(LLVM_COMMIT)
+        return f"https://chromium.googlesource.com/external/github.com/llvm/llvm-project@{commit}"
+    return ""
+
+
+def search_download_location(gcs_names, cros_uri):
+    res = search_mirror_gcs(gcs_names)
+    if res:
+        return res
+    res = search_src_uri()
+    if res:
+        return res
+    if cros_uri:
+        return cros_uri
+    res = search_go_source()
+    if res:
+        return res
+    res = search_homepage()
+    if res:
+        return res
+    return ""
+
+
+def unset_env(env_set):
+    for var in env_set:
+        os.environ[var] = ""
+
+
+def get_download_url(ebuild, repository, category, pf, license):
+    if repository == "private-overlays":
+        return ""
+    os.environ["CROS_GIT_HOST_URL"] = CROS_GIT_HOST_URL
+    os.environ["CROS_GIT_AOSP_URL"] = CROS_GIT_AOSP_URL
+    p_name, gcs_names = parse_pkg_name(pf)
+    gob_res = search_pkg_from_gob(repository, category, p_name, license)
+    if gob_res:
+        return gob_res
+    env_set, cros_uri, gcs_names_ebuild = parse_ebuild(ebuild)
+    gcs_names.update(gcs_names_ebuild)
+    gcs_names.discard("")
+    res = search_download_location(gcs_names, cros_uri)
+    unset_env(env_set)
+    return res
diff --git a/hooks/install/sbom_info_lib/go-licenses-template b/hooks/install/sbom_info_lib/go-licenses-template
new file mode 100644
index 0000000..402dbc0
--- /dev/null
+++ b/hooks/install/sbom_info_lib/go-licenses-template
@@ -0,0 +1,3 @@
+{{ range . }}
+{{ .Name }}@{{ .Version }}@{{ .LicenseName }}
+{{ end }}
\ No newline at end of file
diff --git a/hooks/install/sbom_info_lib/go_dep.py b/hooks/install/sbom_info_lib/go_dep.py
new file mode 100644
index 0000000..cc4a324
--- /dev/null
+++ b/hooks/install/sbom_info_lib/go_dep.py
@@ -0,0 +1,104 @@
+# Copyright 2022 Google LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+
+# This script uses https://github.com/google/go-licenses to find go dependencies.
+
+import os
+import subprocess
+from sbom_info_lib import licenses
+from sbom_info_lib import license_data
+
+# Max number of processes running in parallel for go-licenses.
+PROCESS_LIMIT = 32
+GO_LICENSES_TEMPLATE = (
+    "/mnt/host/source/src/scripts/hooks/install/sbom_info_lib/go-licenses-template"
+)
+GO_LICENSES_BIN = "/usr/bin/go-licenses"
+
+
+def shell_find(path, name):
+    return subprocess.run(
+        ["find", path, "-type", "f", "-name", name],
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+    )
+
+
+def find_go_file(path):
+    return shell_find(path, "*\\.go").stdout
+
+
+def get_go_dep(src_path):
+    # Check if package contains go file.
+    go_files = find_go_file(src_path)
+    if not go_files:
+        return []
+
+    res = set()
+    target_path_list = []
+    go_files_list = go_files.decode("utf-8").split("\n")
+    for go_file_path in go_files_list:
+        if not go_file_path:
+            continue
+        # go-licenses can fetch collection of libraries
+        # used by the package, directly or transitively.
+        # So only need to run it on main module.
+        with open(go_file_path, "r") as f:
+            if "func main()" not in f.read():
+                continue
+        target_path_list.append(os.path.dirname(go_file_path))
+    start_idx = 0
+    while start_idx < len(target_path_list):
+        p_list = [
+            subprocess.Popen(
+                [
+                    GO_LICENSES_BIN,
+                    "report",
+                    f"{dir_path}/...",
+                    "--template",
+                    GO_LICENSES_TEMPLATE,
+                ],
+                cwd=dir_path,
+                stdout=subprocess.PIPE,
+                stderr=subprocess.DEVNULL,
+            )
+            for dir_path in target_path_list[
+                start_idx : min(start_idx + PROCESS_LIMIT, len(target_path_list))
+            ]
+        ]
+        start_idx += PROCESS_LIMIT
+        for p in p_list:
+            output_bytes, _ = p.communicate()
+            output = output_bytes.decode("utf-8")
+            if not output:
+                continue
+            for line in output.splitlines():
+                if not line:
+                    continue
+                # Output format: [<pkg_name>@<pkg_version>@<license_name>]
+                parts = line.split("@")
+                version = parts[1]
+                license = parts[2]
+                # Top level package e.g. google-guest-agent and packages in
+                # vendor/ have version/license "Unknown". They are not needed.
+                if version == "Unknown" or license == "Unknown":
+                    continue
+                if license in licenses.LICENSE_MAP:
+                    license = licenses.LICENSE_MAP[license]
+                    parts[2] = license
+                    line = "@".join(parts)
+                elif license not in license_data.SPDX_LICENSES:
+                    raise licenses.UnknownLicenseError(
+                        f"unknown Go dependency license: {license} in {line}"
+                    )
+                res.add(line)
+    return list(res)
diff --git a/hooks/install/sbom_info_lib/license_data.py b/hooks/install/sbom_info_lib/license_data.py
new file mode 100644
index 0000000..d4a4a86
--- /dev/null
+++ b/hooks/install/sbom_info_lib/license_data.py
@@ -0,0 +1,543 @@
+# Copyright 2023 Google LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+
+SPDX_LICENSES = {
+    "0BSD",
+    "AAL",
+    "Abstyles",
+    "Adobe-2006",
+    "Adobe-Glyph",
+    "ADSL",
+    "AFL-1.1",
+    "AFL-1.2",
+    "AFL-2.0",
+    "AFL-2.1",
+    "AFL-3.0",
+    "Afmparse",
+    "AGPL-1.0-only",
+    "AGPL-1.0-or-later",
+    "AGPL-3.0-only",
+    "AGPL-3.0-or-later",
+    "Aladdin",
+    "AMDPLPA",
+    "AML",
+    "AMPAS",
+    "ANTLR-PD",
+    "ANTLR-PD-fallback",
+    "Apache-1.0",
+    "Apache-1.1",
+    "Apache-2.0",
+    "APAFML",
+    "APL-1.0",
+    "App-s2p",
+    "APSL-1.0",
+    "APSL-1.1",
+    "APSL-1.2",
+    "APSL-2.0",
+    "Arphic-1999",
+    "Artistic-1.0",
+    "Artistic-1.0-cl8",
+    "Artistic-1.0-Perl",
+    "Artistic-2.0",
+    "Baekmuk",
+    "Bahyph",
+    "Barr",
+    "Beerware",
+    "Bitstream-Vera",
+    "BitTorrent-1.0",
+    "BitTorrent-1.1",
+    "blessing",
+    "BlueOak-1.0.0",
+    "Borceux",
+    "BSD-1-Clause",
+    "BSD-2-Clause",
+    "BSD-2-Clause-Patent",
+    "BSD-2-Clause-Views",
+    "BSD-3-Clause",
+    "BSD-3-Clause-Attribution",
+    "BSD-3-Clause-Clear",
+    "BSD-3-Clause-LBNL",
+    "BSD-3-Clause-Modification",
+    "BSD-3-Clause-No-Military-License",
+    "BSD-3-Clause-No-Nuclear-License",
+    "BSD-3-Clause-No-Nuclear-License-2014",
+    "BSD-3-Clause-No-Nuclear-Warranty",
+    "BSD-3-Clause-Open-MPI",
+    "BSD-4-Clause",
+    "BSD-4-Clause-Shortened",
+    "BSD-4-Clause-UC",
+    "BSD-Protection",
+    "BSD-Source-Code",
+    "BSL-1.0",
+    "BUSL-1.1",
+    "bzip2-1.0.6",
+    "C-UDA-1.0",
+    "CAL-1.0",
+    "CAL-1.0-Combined-Work-Exception",
+    "Caldera",
+    "CATOSL-1.1",
+    "CC-BY-1.0",
+    "CC-BY-2.0",
+    "CC-BY-2.5",
+    "CC-BY-2.5-AU",
+    "CC-BY-3.0",
+    "CC-BY-3.0-AT",
+    "CC-BY-3.0-DE",
+    "CC-BY-3.0-IGO",
+    "CC-BY-3.0-NL",
+    "CC-BY-3.0-US",
+    "CC-BY-4.0",
+    "CC-BY-NC-1.0",
+    "CC-BY-NC-2.0",
+    "CC-BY-NC-2.5",
+    "CC-BY-NC-3.0",
+    "CC-BY-NC-3.0-DE",
+    "CC-BY-NC-4.0",
+    "CC-BY-NC-ND-1.0",
+    "CC-BY-NC-ND-2.0",
+    "CC-BY-NC-ND-2.5",
+    "CC-BY-NC-ND-3.0",
+    "CC-BY-NC-ND-3.0-DE",
+    "CC-BY-NC-ND-3.0-IGO",
+    "CC-BY-NC-ND-4.0",
+    "CC-BY-NC-SA-1.0",
+    "CC-BY-NC-SA-2.0",
+    "CC-BY-NC-SA-2.0-FR",
+    "CC-BY-NC-SA-2.0-UK",
+    "CC-BY-NC-SA-2.5",
+    "CC-BY-NC-SA-3.0",
+    "CC-BY-NC-SA-3.0-DE",
+    "CC-BY-NC-SA-3.0-IGO",
+    "CC-BY-NC-SA-4.0",
+    "CC-BY-ND-1.0",
+    "CC-BY-ND-2.0",
+    "CC-BY-ND-2.5",
+    "CC-BY-ND-3.0",
+    "CC-BY-ND-3.0-DE",
+    "CC-BY-ND-4.0",
+    "CC-BY-SA-1.0",
+    "CC-BY-SA-2.0",
+    "CC-BY-SA-2.0-UK",
+    "CC-BY-SA-2.1-JP",
+    "CC-BY-SA-2.5",
+    "CC-BY-SA-3.0",
+    "CC-BY-SA-3.0-AT",
+    "CC-BY-SA-3.0-DE",
+    "CC-BY-SA-4.0",
+    "CC-PDDC",
+    "CC0-1.0",
+    "CDDL-1.0",
+    "CDDL-1.1",
+    "CDL-1.0",
+    "CDLA-Permissive-1.0",
+    "CDLA-Permissive-2.0",
+    "CDLA-Sharing-1.0",
+    "CECILL-1.0",
+    "CECILL-1.1",
+    "CECILL-2.0",
+    "CECILL-2.1",
+    "CECILL-B",
+    "CECILL-C",
+    "CERN-OHL-1.1",
+    "CERN-OHL-1.2",
+    "CERN-OHL-P-2.0",
+    "CERN-OHL-S-2.0",
+    "CERN-OHL-W-2.0",
+    "checkmk",
+    "ClArtistic",
+    "CNRI-Jython",
+    "CNRI-Python",
+    "CNRI-Python-GPL-Compatible",
+    "COIL-1.0",
+    "Community-Spec-1.0",
+    "Condor-1.1",
+    "copyleft-next-0.3.0",
+    "copyleft-next-0.3.1",
+    "CPAL-1.0",
+    "CPL-1.0",
+    "CPOL-1.02",
+    "Crossword",
+    "CrystalStacker",
+    "CUA-OPL-1.0",
+    "Cube",
+    "curl",
+    "D-FSL-1.0",
+    "diffmark",
+    "DL-DE-BY-2.0",
+    "DOC",
+    "Dotseqn",
+    "DRL-1.0",
+    "DSDP",
+    "dvipdfm",
+    "ECL-1.0",
+    "ECL-2.0",
+    "EFL-1.0",
+    "EFL-2.0",
+    "eGenix",
+    "Elastic-2.0",
+    "Entessa",
+    "EPICS",
+    "EPL-1.0",
+    "EPL-2.0",
+    "ErlPL-1.1",
+    "etalab-2.0",
+    "EUDatagrid",
+    "EUPL-1.0",
+    "EUPL-1.1",
+    "EUPL-1.2",
+    "Eurosym",
+    "Fair",
+    "FDK-AAC",
+    "Frameworx-1.0",
+    "FreeBSD-DOC",
+    "FreeImage",
+    "FSFAP",
+    "FSFUL",
+    "FSFULLR",
+    "FSFULLRWD",
+    "FTL",
+    "GD",
+    "GFDL-1.1-invariants-only",
+    "GFDL-1.1-invariants-or-later",
+    "GFDL-1.1-no-invariants-only",
+    "GFDL-1.1-no-invariants-or-later",
+    "GFDL-1.1-only",
+    "GFDL-1.1-or-later",
+    "GFDL-1.2-invariants-only",
+    "GFDL-1.2-invariants-or-later",
+    "GFDL-1.2-no-invariants-only",
+    "GFDL-1.2-no-invariants-or-later",
+    "GFDL-1.2-only",
+    "GFDL-1.2-or-later",
+    "GFDL-1.3-invariants-only",
+    "GFDL-1.3-invariants-or-later",
+    "GFDL-1.3-no-invariants-only",
+    "GFDL-1.3-no-invariants-or-later",
+    "GFDL-1.3-only",
+    "GFDL-1.3-or-later",
+    "Giftware",
+    "GL2PS",
+    "Glide",
+    "Glulxe",
+    "GLWTPL",
+    "gnuplot",
+    "GPL-1.0-only",
+    "GPL-1.0-or-later",
+    "GPL-2.0-only",
+    "GPL-2.0-or-later",
+    "GPL-3.0-only",
+    "GPL-3.0-or-later",
+    "gSOAP-1.3b",
+    "HaskellReport",
+    "Hippocratic-2.1",
+    "HPND",
+    "HPND-sell-variant",
+    "HTMLTIDY",
+    "IBM-pibs",
+    "ICU",
+    "IJG",
+    "ImageMagick",
+    "iMatix",
+    "Imlib2",
+    "Info-ZIP",
+    "Intel",
+    "Intel-ACPI",
+    "Interbase-1.0",
+    "IPA",
+    "IPL-1.0",
+    "ISC",
+    "Jam",
+    "JasPer-2.0",
+    "JPNIC",
+    "JSON",
+    "Knuth-CTAN",
+    "LAL-1.2",
+    "LAL-1.3",
+    "Latex2e",
+    "Leptonica",
+    "LGPL-2.0-only",
+    "LGPL-2.0-or-later",
+    "LGPL-2.1-only",
+    "LGPL-2.1-or-later",
+    "LGPL-3.0-only",
+    "LGPL-3.0-or-later",
+    "LGPLLR",
+    "Libpng",
+    "libpng-2.0",
+    "libselinux-1.0",
+    "libtiff",
+    "libutil-David-Nugent",
+    "LiLiQ-P-1.1",
+    "LiLiQ-R-1.1",
+    "LiLiQ-Rplus-1.1",
+    "Linux-man-pages-copyleft",
+    "Linux-OpenIB",
+    "LPL-1.0",
+    "LPL-1.02",
+    "LPPL-1.0",
+    "LPPL-1.1",
+    "LPPL-1.2",
+    "LPPL-1.3a",
+    "LPPL-1.3c",
+    "LZMA-SDK-9.11-to-9.20",
+    "LZMA-SDK-9.22",
+    "MakeIndex",
+    "Minpack",
+    "MirOS",
+    "MIT",
+    "MIT-0",
+    "MIT-advertising",
+    "MIT-CMU",
+    "MIT-enna",
+    "MIT-feh",
+    "MIT-Modern-Variant",
+    "MIT-open-group",
+    "MITNFA",
+    "Motosoto",
+    "mpi-permissive",
+    "mpich2",
+    "MPL-1.0",
+    "MPL-1.1",
+    "MPL-2.0",
+    "MPL-2.0-no-copyleft-exception",
+    "mplus",
+    "MS-LPL",
+    "MS-PL",
+    "MS-RL",
+    "MTLL",
+    "MulanPSL-1.0",
+    "MulanPSL-2.0",
+    "Multics",
+    "Mup",
+    "NAIST-2003",
+    "NASA-1.3",
+    "Naumen",
+    "NBPL-1.0",
+    "NCGL-UK-2.0",
+    "NCSA",
+    "Net-SNMP",
+    "NetCDF",
+    "Newsletr",
+    "NGPL",
+    "NICTA-1.0",
+    "NIST-PD",
+    "NIST-PD-fallback",
+    "NLOD-1.0",
+    "NLOD-2.0",
+    "NLPL",
+    "Nokia",
+    "NOSL",
+    "Noweb",
+    "NPL-1.0",
+    "NPL-1.1",
+    "NPOSL-3.0",
+    "NRL",
+    "NTP",
+    "NTP-0",
+    "O-UDA-1.0",
+    "OCCT-PL",
+    "OCLC-2.0",
+    "ODbL-1.0",
+    "ODC-By-1.0",
+    "OFL-1.0",
+    "OFL-1.0-no-RFN",
+    "OFL-1.0-RFN",
+    "OFL-1.1",
+    "OFL-1.1-no-RFN",
+    "OFL-1.1-RFN",
+    "OGC-1.0",
+    "OGDL-Taiwan-1.0",
+    "OGL-Canada-2.0",
+    "OGL-UK-1.0",
+    "OGL-UK-2.0",
+    "OGL-UK-3.0",
+    "OGTSL",
+    "OLDAP-1.1",
+    "OLDAP-1.2",
+    "OLDAP-1.3",
+    "OLDAP-1.4",
+    "OLDAP-2.0",
+    "OLDAP-2.0.1",
+    "OLDAP-2.1",
+    "OLDAP-2.2",
+    "OLDAP-2.2.1",
+    "OLDAP-2.2.2",
+    "OLDAP-2.3",
+    "OLDAP-2.4",
+    "OLDAP-2.5",
+    "OLDAP-2.6",
+    "OLDAP-2.7",
+    "OLDAP-2.8",
+    "OML",
+    "OpenSSL",
+    "OPL-1.0",
+    "OPUBL-1.0",
+    "OSET-PL-2.1",
+    "OSL-1.0",
+    "OSL-1.1",
+    "OSL-2.0",
+    "OSL-2.1",
+    "OSL-3.0",
+    "Parity-6.0.0",
+    "Parity-7.0.0",
+    "PDDL-1.0",
+    "PHP-3.0",
+    "PHP-3.01",
+    "Plexus",
+    "PolyForm-Noncommercial-1.0.0",
+    "PolyForm-Small-Business-1.0.0",
+    "PostgreSQL",
+    "PSF-2.0",
+    "psfrag",
+    "psutils",
+    "Python-2.0",
+    "Python-2.0.1",
+    "Qhull",
+    "QPL-1.0",
+    "Rdisc",
+    "RHeCos-1.1",
+    "RPL-1.1",
+    "RPL-1.5",
+    "RPSL-1.0",
+    "RSA-MD",
+    "RSCPL",
+    "Ruby",
+    "SAX-PD",
+    "Saxpath",
+    "SCEA",
+    "SchemeReport",
+    "Sendmail",
+    "Sendmail-8.23",
+    "SGI-B-1.0",
+    "SGI-B-1.1",
+    "SGI-B-2.0",
+    "SHL-0.5",
+    "SHL-0.51",
+    "SimPL-2.0",
+    "SISSL",
+    "SISSL-1.2",
+    "Sleepycat",
+    "SMLNJ",
+    "SMPPL",
+    "SNIA",
+    "Spencer-86",
+    "Spencer-94",
+    "Spencer-99",
+    "SPL-1.0",
+    "SSH-OpenSSH",
+    "SSH-short",
+    "SSPL-1.0",
+    "SugarCRM-1.1.3",
+    "SWL",
+    "TAPR-OHL-1.0",
+    "TCL",
+    "TCP-wrappers",
+    "TMate",
+    "TORQUE-1.1",
+    "TOSL",
+    "TU-Berlin-1.0",
+    "TU-Berlin-2.0",
+    "UCL-1.0",
+    "Unicode-DFS-2015",
+    "Unicode-DFS-2016",
+    "Unicode-TOU",
+    "Unlicense",
+    "UPL-1.0",
+    "Vim",
+    "VOSTROM",
+    "VSL-1.0",
+    "W3C",
+    "W3C-19980720",
+    "W3C-20150513",
+    "Watcom-1.0",
+    "Wsuipa",
+    "WTFPL",
+    "X11",
+    "X11-distribute-modifications-variant",
+    "Xerox",
+    "XFree86-1.1",
+    "xinetd",
+    "Xnet",
+    "xpp",
+    "XSkat",
+    "YPL-1.0",
+    "YPL-1.1",
+    "Zed",
+    "Zend-2.0",
+    "Zimbra-1.3",
+    "Zimbra-1.4",
+    "Zlib",
+    "zlib-acknowledgement",
+    "ZPL-1.1",
+    "ZPL-2.0",
+    "ZPL-2.1",
+    "BSD",  # Need to identify license version.
+    "OPENLDAP",  # Need to identify license version.
+    "lsof",  # COPYING file added to source code on Jan 13 2023.
+    # One paragraph inside the 955-line README
+    # describes its license.
+    "netcat",
+}
+
+
+LSOF_LICENSE_TXT = """Copyright 2002 Purdue Research Foundation,
+West Lafayette,
+Indiana 47907.  All rights reserved.
+
+Written by Victor A. Abell
+
+This software is not subject to any license of the American
+Telephone and Telegraph Company or the Regents of the
+University of California.
+
+Permission is granted to anyone to use this software for
+any purpose on any computer system, and to alter it and
+redistribute it freely, subject to the following
+restrictions:
+
+1. Neither the authors nor Purdue University are responsible
+   for any consequences of the use of this software.
+
+2. The origin of this software must not be misrepresented,
+   either by explicit claim or by omission.  Credit to the
+   authors and Purdue University must appear in documentation
+   and sources.
+
+3. Altered versions must be plainly marked as such, and must
+   not be misrepresented as being the original software.
+
+4. This notice may not be removed or altered."""
+
+
+OTHER_LICENSE_LSOF = {
+    "license_name": "LicenseRef-lsof",
+    "file_names": ["lsof/COPYING"],
+    "license_txt": LSOF_LICENSE_TXT,
+}
+
+
+NETCAT_LICENSE_TXT = """Netcat is entirely my own creation, although
+plenty of other code was used as
+examples.  It is freely given away to the Internet community in the hope that
+it will be useful, with no restrictions except giving credit where it is due.
+No GPLs, Berkeley copyrights or any of that nonsense.  The author assumes NO
+responsibility for how anyone uses it.  If netcat makes you rich somehow and
+you're feeling generous, mail me a check.  If you are affiliated in any way
+with Microsoft Network, get a life.  Always ski in control.  Comments,
+questions, and patches to nc110-devel@lists.sourceforge.net."""
+
+
+OTHER_LICENSE_NETCAT = {
+    "license_name": "LicenseRef-netcat",
+    "file_names": ["nc110/README"],
+    "license_txt": NETCAT_LICENSE_TXT,
+}
diff --git a/hooks/install/sbom_info_lib/licenses.py b/hooks/install/sbom_info_lib/licenses.py
new file mode 100644
index 0000000..d430a0a
--- /dev/null
+++ b/hooks/install/sbom_info_lib/licenses.py
@@ -0,0 +1,275 @@
+# Copyright 2022 Google LLC
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# version 2 as published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+
+# This script is used to parse licenses of a package.
+
+import re
+import os
+from chromite.lib import cros_build_lib
+from chromite.licensing import licenses_lib
+from sbom_info_lib import license_data
+
+
+LICENSE_MAP = {
+    "Artistic-2": "Artistic-2.0",
+    "BSD-2": "BSD-2-Clause",
+    "BSD-4": "BSD-4-Clause",
+    "BSD-Google": "BSD-3-Clause",
+    "BZIP2": "bzip2-1.0.6",
+    "Boost-1.0": "BSL-1.0",
+    "FDL-1.1": "GFDL-1.1-only",
+    "FDL-1.2": "GFDL-1.2-only",
+    "GPL-2": "GPL-2.0-only",
+    "GPL-2.0": "GPL-2.0-only",
+    "GPL-2+": "GPL-2.0-or-later",
+    "GPL-3": "GPL-3.0-only",
+    "GPL-3.0": "GPL-3.0-only",
+    "GPL-3+": "GPL-3.0-or-later",
+    "LGPL-2": "LGPL-2.0-only",
+    "LGPL-2.1": "LGPL-2.1-only",
+    "LGPL-2.1+": "LGPL-2.1-or-later",
+    "LGPL-3": "LGPL-3.0-only",
+    "LGPL-3+": "LGPL-3.0-or-later",
+    "PSF-2": "PSF-2.0",
+    "RSA": "RSA-MD",
+    "UoI-NCSA": "NCSA",
+    "ZLIB": "Zlib",
+    "ZPL": "ZPL-2.1",
+    "openssl": "OpenSSL",
+    "vim": "Vim",
+    "LICENSE.Cavium_Networks": "BSD-3-Clause",  # Used by sys-apps/cavium-n3fips-tools in private board.
+}
+
+
+LICENSE_NAMES_REGEX = [
+    r"^copyright$",
+    r"^copyright[.]txt$",
+    r"^copyright[.]regex$",  # llvm
+    r"^copying.*$",
+    r"^licen[cs]e.*$",
+    r"^licensing.*$",  # libatomic_ops
+    r"^ipa_font_license_agreement_v1[.]0[.]txt$",  # ja-ipafonts
+    r"^MIT-LICENSE$",  # rake
+    r"^PKG-INFO$",  # copyright assignment for
+    # some python packages
+    # (netifaces, unittest2)
+    r"^NOTICE$",  # mit-krb5
+]
+
+LICENSE_REF = "LicenseRef-{}"
+
+
+BSD_VERSION_IDENTIFIER = [
+    "",
+    "Redistributions of source code must retain",
+    "Redistributions in binary form must reproduce",
+    "endorse or promote products derived",
+    "All advertising materials mentioning features",
+]
+
+REGEX_LICENSE_NAME = "- license_names\n  - !!set \{(.*?)\}"
+REGEX_LICENSE_TEXT = "- license_text_scanned\n  - \[(.*?)\]\n- !!python/tuple"
+REGEX_LICENSE_FILE_NAME = "Scanned Source License ([^\n]*?):"
+
+
+SPECIAL_LICENSE_MAP = {
+    "lsof": license_data.OTHER_LICENSE_LSOF,
+    "netcat": license_data.OTHER_LICENSE_NETCAT,
+}
+
+
+class UnknownLicenseError(Exception):
+    def __init__(self, msg):
+        super().__init__(msg)
+
+
+# Parse license.yaml.
+def parse_license_yaml(yaml, pkg_name):
+    # Try to find scanned license text in license.yaml.
+    saved_scanned_txt = ""
+    saved_license_files = []
+    scanned_txt_match = re.findall(REGEX_LICENSE_TEXT, yaml, re.DOTALL)
+    if scanned_txt_match:
+        for m in scanned_txt_match:
+            if not m.strip():
+                continue
+            saved_scanned_txt = m.strip()
+            license_file_match = re.findall(REGEX_LICENSE_FILE_NAME, m, re.DOTALL)
+            for n in license_file_match:
+                saved_license_files.append(n.strip())
+
+    # Try to find scanned license names in license.yaml.
+    found = []
+    license_match = re.findall(REGEX_LICENSE_NAME, yaml, re.DOTALL)
+    if license_match:
+        for m in license_match:
+            for part in m.replace("\n", " ").split(","):
+                license = part.split(":")[0].strip()
+                # Being in the public domain is not a license.
+                if (
+                    not license
+                    or license == "public-domain"
+                    or license == "metapackage"
+                ):
+                    continue
+                if license in LICENSE_MAP:
+                    license = LICENSE_MAP[license]
+                found.append(license)
+    # There are cases where license.yaml contains no license
+    # but only sanned license text e.g. dev-libs/libpcre.
+    if not found and saved_license_files:
+        found.append(pkg_name)
+    return found, saved_scanned_txt, saved_license_files
+
+
+def extract_other_licenses(licenses, src_path, saved_scanned_txt, saved_license_files):
+    # other_license_list format: [
+    # {
+    #     "license_name": <license_name>,
+    #     "file_names": [file_name],
+    #     "license_txt": <license_txt>
+    # },{
+    #     ......
+    # }]
+    other_license_list = []
+    all_license_files = list_all_license_files(src_path)
+    for license in licenses:
+        if license not in license_data.SPDX_LICENSES:
+            license_file_names = []
+            # Find license files same as license names
+            # e.g. LICENCE.ralink-firmware.txt.
+            license_txt, license_file_names = find_license_file(
+                src_path, all_license_files, license
+            )
+            if not license_txt:
+                if not saved_scanned_txt:
+                    # Find general license files e.g. COPYING.
+                    (
+                        saved_scanned_txt,
+                        saved_license_files,
+                    ) = find_general_license_txt(src_path, all_license_files)
+                    if not saved_scanned_txt:
+                        raise UnknownLicenseError(
+                            f"unknown license without scanned text: {license}"
+                        )
+                license_txt = saved_scanned_txt
+                license_file_names = saved_license_files
+
+            # Mark licenses not listed by SPDX spec as license reference.
+            license_ref = LICENSE_REF.format(license)
+            other_license_list.append(
+                {
+                    "license_name": license_ref,
+                    "file_names": license_file_names,
+                    "license_txt": license_txt,
+                }
+            )
+            licenses[licenses.index(license)] = license_ref
+        # Identify BSD version.
+        if "BSD" in licenses:
+            if not saved_scanned_txt:
+                saved_scanned_txt, _ = find_general_license_txt(src_path)
+                if not saved_scanned_txt:
+                    raise UnknownLicenseError(
+                        f"no license file found, cannot identify BSD version"
+                    )
+            bsd_v = find_bsd_version(saved_scanned_txt)
+            licenses[licenses.index("BSD")] = bsd_v
+        # Identify OPENLDAP version.
+        if "OPENLDAP" in licenses:
+            # Currently, OPENLDAP is only used in mit-krb5.
+            if "krb5" in saved_scanned_txt:
+                licenses[licenses.index("OPENLDAP")] = "OLDAP-2.8"
+            else:
+                raise UnknownLicenseError(f"license OPENLDAP doesn't have a version")
+        # Handle special licenses if present.
+        for license, license_content in SPECIAL_LICENSE_MAP.items():
+            if license in licenses:
+                license_ref = LICENSE_REF.format(license)
+                licenses[licenses.index(license)] = license_ref
+                other_license_list.append(license_content)
+    return other_license_list
+
+
+def find_bsd_version(saved_scanned_txt):
+    i = len(BSD_VERSION_IDENTIFIER) - 1
+    while i > 0:
+        if BSD_VERSION_IDENTIFIER[i] in saved_scanned_txt:
+            return f"BSD-{i}-Clause"
+        i -= 1
+    raise UnknownLicenseError(f"failed to identify BSD version")
+
+
+def list_all_license_files(src_path):
+    args = ["find", src_path, "-type", "f"]
+    result = cros_build_lib.run(args, stdout=True, encoding="utf-8")
+    # Truncate results to look like this: swig-2.0.4/COPYRIGHT
+    files = [x[len(src_path) :].lstrip("/") for x in result.stdout.splitlines()]
+    license_files = []
+    for name in files:
+        if ".git/" in name:
+            continue
+        basename = os.path.basename(name)
+        # Looking for license.* brings up things like license.gpl, and we
+        # never want a GPL license when looking for copyright attribution,
+        # so we skip them here. We also skip regexes that can return
+        # license.py (seen in some code).
+        if re.search(r".*GPL.*", basename) or re.search(r"\.py$", basename):
+            continue
+        for regex in LICENSE_NAMES_REGEX:
+            if re.search(regex, basename, re.IGNORECASE):
+                license_files.append(name)
+                break
+    return license_files
+
+
+# Find license files same as license names
+# e.g. LICENCE.ralink-firmware.txt
+# in sys-kernel/linux-firmware.
+def find_license_file(src_path, license_files, license):
+    for name in license_files:
+        basename = os.path.basename(name)
+        if os.path.splitext(basename)[0] == license or basename == license:
+            license_path = os.path.join(src_path, name)
+            return licenses_lib.ReadUnknownEncodedFile(
+                license_path, "Adding License"
+            ), [name]
+    return "", []
+
+
+# Find general license files e.g. COPYING.
+# Partially copy-pasted from chromite.licensing.licenses_lib._ExtractLicenses().
+def find_general_license_txt(src_path, license_files):
+    res = ""
+    for license_file in sorted(license_files):
+        # Joy and pink ponies. Some license_files are encoded as latin1 while
+        # others are utf-8 and of course you can't know but only guess.
+        license_path = os.path.join(src_path, license_file)
+        license_txt = licenses_lib.ReadUnknownEncodedFile(
+            license_path, "Adding License"
+        )
+        res += "\n\nScanned Source License %s:\n\n%s" % (license_file, license_txt)
+    return res, license_files
+
+
+def get_licenses(build_info_dir, src_path, pkg_name):
+    if not os.path.exists(os.path.join(build_info_dir, "license.yaml")):
+        return ""
+    with open(os.path.join(build_info_dir, "license.yaml"), "r") as l:
+        licenses, saved_scanned_txt, saved_license_files = parse_license_yaml(
+            l.read(), pkg_name
+        )
+
+    other_license_list = extract_other_licenses(
+        licenses, src_path, saved_scanned_txt, saved_license_files
+    )
+    return " AND ".join(licenses), other_license_list
diff --git a/sdk_lib/make_chroot.sh b/sdk_lib/make_chroot.sh
index 64aa733..43b05ae 100755
--- a/sdk_lib/make_chroot.sh
+++ b/sdk_lib/make_chroot.sh
@@ -211,6 +211,11 @@
 if [[ "${FLAGS_skip_chroot_upgrade}" -eq "${FLAGS_FALSE}" ]]; then
   info "Updating portage"
   early_enter_chroot emerge -uNv --quiet --ignore-world portage
+
+  # XXX(ovt): reconfigure python-exec to use python3.8 in the scripts
+  # with python3 in the shebang
+  early_enter_chroot sed -i s/python3.6/python3.8/ \
+    /etc/python-exec/python-exec.conf
 fi
 # Add chromite into python path.
 for python_path in "${FLAGS_chroot}/usr/lib/"python*.*; do
@@ -244,6 +249,11 @@
   early_enter_chroot ${EMERGE_CMD} -uN --nodeps ${USEPKG} \
     sys-libs/llvm-libunwind sys-libs/libcxx
 
+  # XXX(ovt): ebuild_license_hook requires yaml module that is available
+  # only for python3.6 in the existing SDK
+  early_enter_chroot ${EMERGE_CMD} -uNv ${USEPKG} ${USEPKGONLY} ${EMERGE_JOBS} \
+    dev-python/pyyaml
+
   # Now we can let the rest of the compiler packages build in parallel as they
   # don't generally rely on each other.
   # Note: early_enter_chroot executes as root.
diff --git a/update_bootloaders.sh b/update_bootloaders.sh
index af5b97f..5d37ed6 100755
--- a/update_bootloaders.sh
+++ b/update_bootloaders.sh
@@ -79,10 +79,10 @@
   local template_dir="$4"
   local to="$5"
 
-  # Pull out the dm="" values
+  # Pull out the dm-mod.create="" values
   dm_table=
-  if echo "$kernel_cmdline" | grep -q 'dm="'; then
-    dm_table=$(echo "$kernel_cmdline" | sed -s 's/.*dm="\([^"]*\)".*/\1/')
+  if echo "$kernel_cmdline" | grep -q 'dm-mod.create="'; then
+    dm_table=$(echo "$kernel_cmdline" | sed -s 's/.*dm-mod.create="\([^"]*\)".*/\1/')
   fi
 
   # Discover last known partition numbers.
@@ -96,7 +96,7 @@
   # Rewrite grub table
   grub_dm_table_a=${dm_table//${old_root}/${root_a_uuid}}
   grub_dm_table_b=${dm_table//${old_root}/${root_b_uuid}}
-  sed -e "s|DMTABLEA|${grub_dm_table_a}|g" \
+  sudo sed -e "s|DMTABLEA|${grub_dm_table_a}|g" \
       -e "s|DMTABLEB|${grub_dm_table_b}|g" \
       -e "s|/dev/\\\$linuxpartA|${root_a_uuid}|g" \
       -e "s|/dev/\\\$linuxpartB|${root_b_uuid}|g" \
@@ -122,6 +122,40 @@
   # and replace HDROOTA and HDROOTB with the correct /dev/sd%D%P/%U+1
 }
 
+if ! type -p update_arm64_bootloaders; then
+  update_arm64_bootloaders() {
+    local old_root="$1"  # e.g., /dev/sd%D%P or %U+1
+    local kernel_cmdline="$2"
+    local esp_fs_dir="$3"
+    local template_dir="$4"
+    local to="$5"
+
+    # Pull out the dm-mod.create="" values
+    dm_table=
+    if echo "$kernel_cmdline" | grep -q 'dm-mod.create="'; then
+      dm_table=$(echo "$kernel_cmdline" | sed -s 's/.*dm-mod.create="\([^"]*\)".*/\1/')
+    fi
+
+    # Discover last known partition numbers.
+    local partition_num_root_a="$(get_layout_partition_number \
+      "${FLAGS_image_type}" ROOT-A)"
+    local partition_num_root_b="$(get_layout_partition_number \
+      "${FLAGS_image_type}" ROOT-B)"
+    root_a_uuid="PARTUUID=$(part_index_to_uuid "$to" ${partition_num_root_a})"
+    root_b_uuid="PARTUUID=$(part_index_to_uuid "$to" ${partition_num_root_b})"
+
+    # Rewrite grub table
+    grub_dm_table_a=${dm_table//${old_root}/${root_a_uuid}}
+    grub_dm_table_b=${dm_table//${old_root}/${root_b_uuid}}
+    sudo sed -e "s|DMTABLEA|${grub_dm_table_a}|g" \
+        -e "s|DMTABLEB|${grub_dm_table_b}|g" \
+        -e "s|/dev/\\\$linuxpartA|${root_a_uuid}|g" \
+        -e "s|/dev/\\\$linuxpartB|${root_b_uuid}|g" \
+        "${template_dir}"/efi/boot/grub.cfg |
+        sudo dd of="${esp_fs_dir}"/efi/boot/grub.cfg status=none
+  }
+fi
+
 ESP_DEV_OURS=
 ESP_DEV=
 if [[ ! -e "${FLAGS_to}" ]]; then
@@ -174,9 +208,7 @@
 trap cleanup EXIT
 sudo mount "${ESP_DEV}" "${ESP_FS_DIR}"
 
-if [[ "${FLAGS_arch}" == "x86" || "${FLAGS_arch}" == "amd64" || \
-      ( "${FLAGS_arch}" == "arm64" && \
-        -d "/build/${FLAGS_board}/lib64/grub/arm64-efi/" ) ]]; then
+if [[ "${FLAGS_arch}" == "x86" || "${FLAGS_arch}" == "amd64" ]]; then
   # Populate the EFI bootloader configuration
   sudo mkdir -p "${ESP_FS_DIR}/efi/boot"
   sudo cp -r "${FLAGS_from}"/efi/boot/. "${ESP_FS_DIR}"/efi/boot
@@ -215,6 +247,27 @@
     # mount again for cleanup to free resource gracefully
     sudo mount -o ro "${ESP_DEV}" "${ESP_FS_DIR}"
   fi
+elif [[ "${FLAGS_arch}" == "arm64" ]]; then
+  # Populate the EFI bootloader configuration
+  sudo mkdir -p "${ESP_FS_DIR}/efi/boot"
+  sudo cp -r "${FLAGS_from}"/efi/boot/. "${ESP_FS_DIR}"/efi/boot
+
+  # Extract kernel flags
+  kernel_cfg=
+  old_root='PARTUUID=%U/PARTNROFF=1'
+  if [[ -n "${FLAGS_kernel_cmdline}" ]]; then
+    info "Using supplied kernel_cmdline to update templates."
+    kernel_cfg="${FLAGS_kernel_cmdline}"
+  elif [[ -n "${FLAGS_kernel_partition}" ]]; then
+    info "Extracting the kernel command line from ${FLAGS_kernel_partition}"
+    kernel_cfg=$(dump_kernel_config "${FLAGS_kernel_partition}")
+  fi
+
+  update_bootloaders "${old_root}" \
+                     "${kernel_cfg}" \
+                     "${ESP_FS_DIR}" \
+                     "${FLAGS_from}" \
+                     "${FLAGS_to}"
 elif [[ "${FLAGS_arch}" == "arm" || "${FLAGS_arch}" == "mips" ]]; then
   # Copy u-boot script to ESP partition
   if [ -r "${FLAGS_from}/boot-A.scr.uimg" ]; then