| #!/bin/bash |
| # Copyright 1999-2012 Gentoo Foundation |
| # Distributed under the terms of the GNU General Public License v2 |
| |
| source "${PORTAGE_BIN_PATH:-/usr/lib/portage/bin}"/helper-functions.sh |
| |
| if [[ -z $1 ]] ; then |
| __helpers_die "${0##*/}: at least one argument needed" |
| exit 1 |
| fi |
| |
| if ! ___eapi_has_prefix_variables; then |
| ED=${D} EPREFIX= |
| fi |
| |
| case $1 in |
| --ignore) |
| shift |
| for skip in "$@" ; do |
| [[ -d ${ED}${skip} || -f ${ED}${skip} ]] \ |
| && >> "${ED}${skip}.ecompress.skip" |
| done |
| exit 0 |
| ;; |
| --queue) |
| shift |
| set -- "${@/%/.ecompress.dir}" |
| set -- "${@/#/${ED}}" |
| ret=0 |
| for x in "$@" ; do |
| >> "$x" |
| ((ret|=$?)) |
| done |
| [[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed" |
| exit $ret |
| ;; |
| --dequeue) |
| [[ -n $2 ]] && __vecho "${0##*/}: --dequeue takes no additional arguments" 1>&2 |
| find "${ED}" -name '*.ecompress.dir' -print0 \ |
| | sed -e 's:\.ecompress\.dir::g' -e "s:${ED}:/:g" \ |
| | ${XARGS} -0 ecompressdir |
| find "${ED}" -name '*.ecompress.skip' -print0 | ${XARGS} -0 rm -f |
| exit 0 |
| ;; |
| --*) |
| __helpers_die "${0##*/}: unknown arguments '$*'" |
| exit 1 |
| ;; |
| esac |
| |
| # figure out the new suffix |
| suffix=$(ecompress --suffix) |
| |
| # funk_up_dir(action, suffix, binary) |
| # - action: compress or decompress |
| # - suffix: the compression suffix to work with |
| # - binary: the program to execute that'll compress/decompress |
| # The directory we act on is implied in the ${dir} variable |
| funk_up_dir() { |
| local act=$1 suffix=$2 binary=$3 |
| |
| local negate="" |
| [[ ${act} == "compress" ]] && negate="!" |
| |
| local ret=0 |
| # first we act on all the files |
| find "${dir}" -type f ${negate} -iname '*'${suffix} -print0 | ${XARGS} -0 ${binary} |
| ((ret|=$?)) |
| |
| while read -r -d $'\0' brokenlink ; do |
| [[ -e ${brokenlink} ]] && continue |
| olddest=$(readlink "${brokenlink}") |
| # Ignore temporarily broken symlinks due to |
| # _relocate_skip_dirs (bug #399595), and handle |
| # absolute symlinks to files that aren't merged |
| # yet (bug #405327). |
| if [[ ${olddest} == /* ]] ; then |
| [ -e "${D}${olddest}" ] && continue |
| skip_dir_dest=${T}/ecompress-skip/${olddest#${EPREFIX}} |
| else |
| skip_dir_dest=${T}/ecompress-skip/${actual_dir#${ED}}/${brokenlink%/*}/${olddest} |
| fi |
| [[ -e ${skip_dir_dest} ]] && continue |
| if [[ ${act} == "compress" ]] ; then |
| newdest=${olddest}${suffix} |
| else |
| [[ ${olddest} == *${suffix} ]] || continue |
| newdest=${olddest%${suffix}} |
| fi |
| if [[ "${newdest}" == /* ]] ; then |
| [[ -f "${D}${newdest}" ]] || continue |
| else |
| [[ -f "${dir}/${brokenlink%/*}/${newdest}" ]] || continue |
| fi |
| rm -f "${brokenlink}" |
| [[ ${act} == "compress" ]] \ |
| && ln -snf "${newdest}" "${brokenlink}${suffix}" \ |
| || ln -snf "${newdest}" "${brokenlink%${suffix}}" |
| ((ret|=$?)) |
| done < <(find "${dir}" -type l -print0) |
| return ${ret} |
| } |
| |
| # _relocate_skip_dirs(srctree, dsttree) |
| # Move all files and directories we want to skip running compression |
| # on from srctree to dsttree. |
| _relocate_skip_dirs() { |
| local srctree="$1" dsttree="$2" |
| |
| [[ -d ${srctree} ]] || return 0 |
| |
| find "${srctree}" -name '*.ecompress.skip' -print0 | \ |
| while read -r -d $'\0' src ; do |
| src=${src%.ecompress.skip} |
| dst="${dsttree}${src#${srctree}}" |
| parent=${dst%/*} |
| mkdir -p "${parent}" |
| mv "${src}" "${dst}" |
| mv "${src}.ecompress.skip" "${dst}.ecompress.skip" |
| done |
| } |
| hide_skip_dirs() { _relocate_skip_dirs "${ED}" "${T}"/ecompress-skip/ ; } |
| restore_skip_dirs() { _relocate_skip_dirs "${T}"/ecompress-skip/ "${ED}" ; } |
| |
| ret=0 |
| |
| rm -rf "${T}"/ecompress-skip |
| |
| decompressors=( |
| ".Z" "gunzip -f" |
| ".gz" "gunzip -f" |
| ".bz2" "bunzip2 -f" |
| ".xz" "unxz -f" |
| ".lzma" "unxz -f" |
| ) |
| |
| __multijob_init |
| |
| for dir in "$@" ; do |
| dir=${dir#/} |
| dir="${ED}${dir}" |
| if [[ ! -d ${dir} ]] ; then |
| __vecho "${0##*/}: /${dir#${ED}} does not exist!" |
| continue |
| fi |
| cd "${dir}" |
| actual_dir=${dir} |
| dir=. # use relative path to avoid 'Argument list too long' errors |
| |
| # hide all the stuff we want to skip |
| hide_skip_dirs "${dir}" |
| |
| # since we've been requested to compress the whole dir, |
| # delete any individual queued requests |
| rm -f "${actual_dir}.ecompress.dir" |
| find "${dir}" -type f -name '*.ecompress.file' -print0 | ${XARGS} -0 rm -f |
| |
| # not uncommon for packages to compress doc files themselves |
| for (( d = 0; d < ${#decompressors[@]}; d += 2 )) ; do |
| # It's faster to parallelize at this stage than to try to |
| # parallelize the compressors. This is because the find|xargs |
| # ends up launching less compressors overall, so the overhead |
| # of forking children ends up dominating. |
| ( |
| __multijob_child_init |
| funk_up_dir "decompress" "${decompressors[i]}" "${decompressors[i+1]}" |
| ) & |
| __multijob_post_fork |
| : $(( ret |= $? )) |
| done |
| |
| __multijob_finish |
| : $(( ret |= $? )) |
| |
| # forcibly break all hard links as some compressors whine about it |
| find "${dir}" -type f -links +1 -exec env file="{}" sh -c \ |
| 'cp -p "${file}" "${file}.ecompress.break" ; mv -f "${file}.ecompress.break" "${file}"' \; |
| |
| # now lets do our work |
| if [[ -n ${suffix} ]] ; then |
| __vecho "${0##*/}: $(ecompress --bin) /${actual_dir#${ED}}" |
| funk_up_dir "compress" "${suffix}" "ecompress" |
| : $(( ret |= $? )) |
| fi |
| |
| # finally, restore the skipped stuff |
| restore_skip_dirs |
| done |
| |
| [[ $ret -ne 0 ]] && __helpers_die "${0##*/} failed" |
| exit ${ret} |