| #!/usr/bin/python -O |
| # Copyright 1999-2011 Gentoo Foundation |
| # Distributed under the terms of the GNU General Public License v2 |
| |
| # Next to do: dep syntax checking in mask files |
| # Then, check to make sure deps are satisfiable (to avoid "can't find match for" problems) |
| # that last one is tricky because multiple profiles need to be checked. |
| |
| from __future__ import print_function |
| |
| import calendar |
| import copy |
| import errno |
| import formatter |
| import io |
| import logging |
| import optparse |
| import re |
| import signal |
| import stat |
| import subprocess |
| import sys |
| import tempfile |
| import textwrap |
| import time |
| import platform |
| |
| try: |
| from urllib.request import urlopen as urllib_request_urlopen |
| except ImportError: |
| from urllib import urlopen as urllib_request_urlopen |
| |
| from itertools import chain |
| from stat import S_ISDIR |
| |
| try: |
| import portage |
| except ImportError: |
| from os import path as osp |
| sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")) |
| import portage |
| portage._disable_legacy_globals() |
| portage.dep._internal_warnings = True |
| |
| try: |
| import xml.etree.ElementTree |
| from xml.parsers.expat import ExpatError |
| except ImportError: |
| msg = ["Please enable python's \"xml\" USE flag in order to use repoman."] |
| from portage.output import EOutput |
| out = EOutput() |
| for line in msg: |
| out.eerror(line) |
| sys.exit(1) |
| |
| from portage import os |
| from portage import subprocess_getstatusoutput |
| from portage import _encodings |
| from portage import _unicode_encode |
| from repoman.checks import run_checks |
| from repoman import utilities |
| from repoman.herdbase import make_herd_base |
| from _emerge.Package import Package |
| from _emerge.RootConfig import RootConfig |
| from _emerge.userquery import userquery |
| import portage.checksum |
| import portage.const |
| from portage import cvstree, normalize_path |
| from portage import util |
| from portage.exception import (FileNotFound, MissingParameter, |
| ParseError, PermissionDenied) |
| from portage.process import find_binary, spawn |
| from portage.output import bold, create_color_func, \ |
| green, nocolor, red |
| from portage.output import ConsoleStyleFile, StyleWriter |
| from portage.util import cmp_sort_key, writemsg_level |
| from portage.package.ebuild.digestgen import digestgen |
| from portage.eapi import eapi_has_iuse_defaults, eapi_has_required_use |
| |
| if sys.hexversion >= 0x3000000: |
| basestring = str |
| |
| util.initialize_logger() |
| |
| # 14 is the length of DESCRIPTION="" |
| max_desc_len = 100 |
| allowed_filename_chars="a-zA-Z0-9._-+:" |
| disallowed_filename_chars_re = re.compile(r'[^a-zA-Z0-9._\-+:]') |
| pv_toolong_re = re.compile(r'[0-9]{19,}') |
| bad = create_color_func("BAD") |
| |
| # A sane umask is needed for files that portage creates. |
| os.umask(0o22) |
| # Repoman sets it's own ACCEPT_KEYWORDS and we don't want it to |
| # behave incrementally. |
| repoman_incrementals = tuple(x for x in \ |
| portage.const.INCREMENTALS if x != 'ACCEPT_KEYWORDS') |
| config_root = os.environ.get("PORTAGE_CONFIGROOT") |
| repoman_settings = portage.config(config_root=config_root, local_config=False) |
| |
| if repoman_settings.get("NOCOLOR", "").lower() in ("yes", "true") or \ |
| repoman_settings.get('TERM') == 'dumb' or \ |
| not sys.stdout.isatty(): |
| nocolor() |
| |
| def warn(txt): |
| print("repoman: " + txt) |
| |
| def err(txt): |
| warn(txt) |
| sys.exit(1) |
| |
| def exithandler(signum=None, frame=None): |
| logging.fatal("Interrupted; exiting...") |
| if signum is None: |
| sys.exit(1) |
| else: |
| sys.exit(128 + signum) |
| |
| signal.signal(signal.SIGINT,exithandler) |
| |
| class RepomanHelpFormatter(optparse.IndentedHelpFormatter): |
| """Repoman needs it's own HelpFormatter for now, because the default ones |
| murder the help text.""" |
| |
| def __init__(self, indent_increment=1, max_help_position=24, width=150, short_first=1): |
| optparse.HelpFormatter.__init__(self, indent_increment, max_help_position, width, short_first) |
| |
| def format_description(self, description): |
| return description |
| |
| class RepomanOptionParser(optparse.OptionParser): |
| """Add the on_tail function, ruby has it, optionParser should too |
| """ |
| |
| def __init__(self, *args, **kwargs): |
| optparse.OptionParser.__init__(self, *args, **kwargs) |
| self.tail = "" |
| |
| def on_tail(self, description): |
| self.tail += description |
| |
| def format_help(self, formatter=None): |
| result = optparse.OptionParser.format_help(self, formatter) |
| result += self.tail |
| return result |
| |
| |
| def ParseArgs(argv, qahelp): |
| """This function uses a customized optionParser to parse command line arguments for repoman |
| Args: |
| argv - a sequence of command line arguments |
| qahelp - a dict of qa warning to help message |
| Returns: |
| (opts, args), just like a call to parser.parse_args() |
| """ |
| |
| if argv and isinstance(argv[0], bytes): |
| argv = [portage._unicode_decode(x) for x in argv] |
| |
| modes = { |
| 'commit' : 'Run a scan then commit changes', |
| 'ci' : 'Run a scan then commit changes', |
| 'fix' : 'Fix simple QA issues (stray digests, missing digests)', |
| 'full' : 'Scan directory tree and print all issues (not a summary)', |
| 'help' : 'Show this screen', |
| 'manifest' : 'Generate a Manifest (fetches files if necessary)', |
| 'manifest-check' : 'Check Manifests for missing or incorrect digests', |
| 'scan' : 'Scan directory tree for QA issues' |
| } |
| |
| mode_keys = list(modes) |
| mode_keys.sort() |
| |
| parser = RepomanOptionParser(formatter=RepomanHelpFormatter(), usage="%prog [options] [mode]") |
| parser.description = green(" ".join((os.path.basename(argv[0]), "1.2"))) |
| parser.description += "\nCopyright 1999-2007 Gentoo Foundation" |
| parser.description += "\nDistributed under the terms of the GNU General Public License v2" |
| parser.description += "\nmodes: " + " | ".join(map(green,mode_keys)) |
| |
| parser.add_option('-a', '--ask', dest='ask', action='store_true', default=False, |
| help='Request a confirmation before commiting') |
| |
| parser.add_option('-m', '--commitmsg', dest='commitmsg', |
| help='specify a commit message on the command line') |
| |
| parser.add_option('-M', '--commitmsgfile', dest='commitmsgfile', |
| help='specify a path to a file that contains a commit message') |
| |
| parser.add_option('-p', '--pretend', dest='pretend', default=False, |
| action='store_true', help='don\'t commit or fix anything; just show what would be done') |
| |
| parser.add_option('-q', '--quiet', dest="quiet", action="count", default=0, |
| help='do not print unnecessary messages') |
| |
| parser.add_option( |
| '--echangelog', type='choice', choices=('y', 'n', 'force'), metavar="<y|n|force>", |
| help='for commit mode, call echangelog if ChangeLog is unmodified (or ' |
| 'regardless of modification if \'force\' is specified)') |
| |
| parser.add_option('-f', '--force', dest='force', default=False, action='store_true', |
| help='Commit with QA violations') |
| |
| parser.add_option('--vcs', dest='vcs', |
| help='Force using specific VCS instead of autodetection') |
| |
| parser.add_option('-v', '--verbose', dest="verbosity", action='count', |
| help='be very verbose in output', default=0) |
| |
| parser.add_option('-V', '--version', dest='version', action='store_true', |
| help='show version info') |
| |
| parser.add_option('-x', '--xmlparse', dest='xml_parse', action='store_true', |
| default=False, help='forces the metadata.xml parse check to be carried out') |
| |
| parser.add_option( |
| '--if-modified', type='choice', choices=('y', 'n'), default='n', |
| metavar="<y|n>", |
| help='only check packages that have uncommitted modifications') |
| |
| parser.add_option('-i', '--ignore-arches', dest='ignore_arches', action='store_true', |
| default=False, help='ignore arch-specific failures (where arch != host)') |
| |
| parser.add_option("--ignore-default-opts", |
| action="store_true", |
| help="do not use the REPOMAN_DEFAULT_OPTS environment variable") |
| |
| parser.add_option('-I', '--ignore-masked', dest='ignore_masked', action='store_true', |
| default=False, help='ignore masked packages (not allowed with commit mode)') |
| |
| parser.add_option('-d', '--include-dev', dest='include_dev', action='store_true', |
| default=False, help='include dev profiles in dependency checks') |
| |
| parser.add_option('--unmatched-removal', dest='unmatched_removal', action='store_true', |
| default=False, help='enable strict checking of package.mask and package.unmask files for unmatched removal atoms') |
| |
| parser.add_option('--without-mask', dest='without_mask', action='store_true', |
| default=False, help='behave as if no package.mask entries exist (not allowed with commit mode)') |
| |
| parser.add_option('--mode', type='choice', dest='mode', choices=list(modes), |
| help='specify which mode repoman will run in (default=full)') |
| |
| parser.on_tail("\n " + green("Modes".ljust(20) + " Description\n")) |
| |
| for k in mode_keys: |
| parser.on_tail(" %s %s\n" % (k.ljust(20), modes[k])) |
| |
| parser.on_tail("\n " + green("QA keyword".ljust(20) + " Description\n")) |
| |
| sorted_qa = list(qahelp) |
| sorted_qa.sort() |
| for k in sorted_qa: |
| parser.on_tail(" %s %s\n" % (k.ljust(20), qahelp[k])) |
| |
| opts, args = parser.parse_args(argv[1:]) |
| |
| if not opts.ignore_default_opts: |
| default_opts = repoman_settings.get("REPOMAN_DEFAULT_OPTS", "").split() |
| if default_opts: |
| opts, args = parser.parse_args(default_opts + sys.argv[1:]) |
| |
| if opts.mode == 'help': |
| parser.print_help(short=False) |
| |
| for arg in args: |
| if arg in modes: |
| if not opts.mode: |
| opts.mode = arg |
| break |
| else: |
| parser.error("invalid mode: %s" % arg) |
| |
| if not opts.mode: |
| opts.mode = 'full' |
| |
| if opts.mode == 'ci': |
| opts.mode = 'commit' # backwards compat shortcut |
| |
| if opts.mode == 'commit' and not (opts.force or opts.pretend): |
| if opts.ignore_masked: |
| parser.error('Commit mode and --ignore-masked are not compatible') |
| if opts.without_mask: |
| parser.error('Commit mode and --without-mask are not compatible') |
| |
| # Use the verbosity and quiet options to fiddle with the loglevel appropriately |
| for val in range(opts.verbosity): |
| logger = logging.getLogger() |
| logger.setLevel(logger.getEffectiveLevel() - 10) |
| |
| for val in range(opts.quiet): |
| logger = logging.getLogger() |
| logger.setLevel(logger.getEffectiveLevel() + 10) |
| |
| return (opts, args) |
| |
| qahelp={ |
| "CVS/Entries.IO_error":"Attempting to commit, and an IO error was encountered access the Entries file", |
| "desktop.invalid":"desktop-file-validate reports errors in a *.desktop file", |
| "ebuild.invalidname":"Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1 versioning extensions)", |
| "ebuild.namenomatch":"Ebuild files that do not have the same name as their parent directory", |
| "changelog.ebuildadded":"An ebuild was added but the ChangeLog was not modified", |
| "changelog.missing":"Missing ChangeLog files", |
| "ebuild.notadded":"Ebuilds that exist but have not been added to cvs", |
| "ebuild.patches":"PATCHES variable should be a bash array to ensure white space safety", |
| "changelog.notadded":"ChangeLogs that exist but have not been added to cvs", |
| "dependency.unknown" : "Ebuild has a dependency that refers to an unknown package (which may be valid if it is a blocker for a renamed/removed package, or is an alternative choice provided by an overlay)", |
| "file.executable":"Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the executable bit", |
| "file.size":"Files in the files directory must be under 20 KiB", |
| "file.size.fatal":"Files in the files directory must be under 60 KiB", |
| "file.name":"File/dir name must be composed of only the following chars: %s " % allowed_filename_chars, |
| "file.UTF8":"File is not UTF8 compliant", |
| "inherit.autotools":"Ebuild inherits autotools but does not call eautomake, eautoconf or eautoreconf", |
| "inherit.deprecated":"Ebuild inherits a deprecated eclass", |
| "java.eclassesnotused":"With virtual/jdk in DEPEND you must inherit a java eclass", |
| "wxwidgets.eclassnotused":"Ebuild DEPENDs on x11-libs/wxGTK without inheriting wxwidgets.eclass", |
| "KEYWORDS.dropped":"Ebuilds that appear to have dropped KEYWORDS for some arch", |
| "KEYWORDS.missing":"Ebuilds that have a missing or empty KEYWORDS variable", |
| "KEYWORDS.stable":"Ebuilds that have been added directly with stable KEYWORDS", |
| "KEYWORDS.stupid":"Ebuilds that use KEYWORDS=-* instead of package.mask", |
| "LICENSE.missing":"Ebuilds that have a missing or empty LICENSE variable", |
| "LICENSE.virtual":"Virtuals that have a non-empty LICENSE variable", |
| "DESCRIPTION.missing":"Ebuilds that have a missing or empty DESCRIPTION variable", |
| "DESCRIPTION.toolong":"DESCRIPTION is over %d characters" % max_desc_len, |
| "EAPI.definition":"EAPI is defined after an inherit call (must be defined before)", |
| "EAPI.deprecated":"Ebuilds that use features that are deprecated in the current EAPI", |
| "EAPI.incompatible":"Ebuilds that use features that are only available with a different EAPI", |
| "EAPI.unsupported":"Ebuilds that have an unsupported EAPI version (you must upgrade portage)", |
| "SLOT.invalid":"Ebuilds that have a missing or invalid SLOT variable value", |
| "HOMEPAGE.missing":"Ebuilds that have a missing or empty HOMEPAGE variable", |
| "HOMEPAGE.virtual":"Virtuals that have a non-empty HOMEPAGE variable", |
| "DEPEND.bad":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds)", |
| "RDEPEND.bad":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds)", |
| "PDEPEND.bad":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds)", |
| "DEPEND.badmasked":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds)", |
| "RDEPEND.badmasked":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds)", |
| "PDEPEND.badmasked":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds)", |
| "DEPEND.badindev":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "RDEPEND.badindev":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "PDEPEND.badindev":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "DEPEND.badmaskedindev":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds) in developing arch", |
| "RDEPEND.badmaskedindev":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds) in developing arch", |
| "PDEPEND.badmaskedindev":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds) in developing arch", |
| "PDEPEND.suspect":"PDEPEND contains a package that usually only belongs in DEPEND.", |
| "DEPEND.syntax":"Syntax error in DEPEND (usually an extra/missing space/parenthesis)", |
| "RDEPEND.syntax":"Syntax error in RDEPEND (usually an extra/missing space/parenthesis)", |
| "PDEPEND.syntax":"Syntax error in PDEPEND (usually an extra/missing space/parenthesis)", |
| "DEPEND.badtilde":"DEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)", |
| "RDEPEND.badtilde":"RDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)", |
| "PDEPEND.badtilde":"PDEPEND uses the ~ dep operator with a non-zero revision part, which is useless (the revision is ignored)", |
| "LICENSE.syntax":"Syntax error in LICENSE (usually an extra/missing space/parenthesis)", |
| "PROVIDE.syntax":"Syntax error in PROVIDE (usually an extra/missing space/parenthesis)", |
| "PROPERTIES.syntax":"Syntax error in PROPERTIES (usually an extra/missing space/parenthesis)", |
| "RESTRICT.syntax":"Syntax error in RESTRICT (usually an extra/missing space/parenthesis)", |
| "REQUIRED_USE.syntax":"Syntax error in REQUIRED_USE (usually an extra/missing space/parenthesis)", |
| "SRC_URI.syntax":"Syntax error in SRC_URI (usually an extra/missing space/parenthesis)", |
| "SRC_URI.mirror":"A uri listed in profiles/thirdpartymirrors is found in SRC_URI", |
| "ebuild.syntax":"Error generating cache entry for ebuild; typically caused by ebuild syntax error or digest verification failure", |
| "ebuild.output":"A simple sourcing of the ebuild produces output; this breaks ebuild policy.", |
| "ebuild.nesteddie":"Placing 'die' inside ( ) prints an error, but doesn't stop the ebuild.", |
| "variable.invalidchar":"A variable contains an invalid character that is not part of the ASCII character set", |
| "variable.readonly":"Assigning a readonly variable", |
| "variable.usedwithhelpers":"Ebuild uses D, ROOT, ED, EROOT or EPREFIX with helpers", |
| "LIVEVCS.stable":"This ebuild is a live checkout from a VCS but has stable keywords.", |
| "LIVEVCS.unmasked":"This ebuild is a live checkout from a VCS but has keywords and is not masked in the global package.mask.", |
| "IUSE.invalid":"This ebuild has a variable in IUSE that is not in the use.desc or its metadata.xml file", |
| "IUSE.missing":"This ebuild has a USE conditional which references a flag that is not listed in IUSE", |
| "IUSE.undefined":"This ebuild does not define IUSE (style guideline says to define IUSE even when empty)", |
| "LICENSE.invalid":"This ebuild is listing a license that doesnt exist in portages license/ dir.", |
| "KEYWORDS.invalid":"This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found", |
| "RDEPEND.implicit":"RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4)", |
| "RDEPEND.suspect":"RDEPEND contains a package that usually only belongs in DEPEND.", |
| "RESTRICT.invalid":"This ebuild contains invalid RESTRICT values.", |
| "digest.assumed":"Existing digest must be assumed correct (Package level only)", |
| "digest.missing":"Some files listed in SRC_URI aren't referenced in the Manifest", |
| "digest.unused":"Some files listed in the Manifest aren't referenced in SRC_URI", |
| "ebuild.majorsyn":"This ebuild has a major syntax error that may cause the ebuild to fail partially or fully", |
| "ebuild.minorsyn":"This ebuild has a minor syntax error that contravenes gentoo coding style", |
| "ebuild.badheader":"This ebuild has a malformed header", |
| "eprefixify.defined":"The ebuild uses eprefixify, but does not inherit the prefix eclass", |
| "manifest.bad":"Manifest has missing or incorrect digests", |
| "metadata.missing":"Missing metadata.xml files", |
| "metadata.bad":"Bad metadata.xml files", |
| "metadata.warning":"Warnings in metadata.xml files", |
| "portage.internal":"The ebuild uses an internal Portage function", |
| "virtual.oldstyle":"The ebuild PROVIDEs an old-style virtual (see GLEP 37)", |
| "usage.obsolete":"The ebuild makes use of an obsolete construct", |
| "upstream.workaround":"The ebuild works around an upstream bug, an upstream bug should be filed and tracked in bugs.gentoo.org" |
| } |
| |
| qacats = list(qahelp) |
| qacats.sort() |
| |
| qawarnings = set(( |
| "changelog.missing", |
| "changelog.notadded", |
| "dependency.unknown", |
| "digest.assumed", |
| "digest.unused", |
| "ebuild.notadded", |
| "ebuild.nesteddie", |
| "desktop.invalid", |
| "DEPEND.badmasked","RDEPEND.badmasked","PDEPEND.badmasked", |
| "DEPEND.badindev","RDEPEND.badindev","PDEPEND.badindev", |
| "DEPEND.badmaskedindev","RDEPEND.badmaskedindev","PDEPEND.badmaskedindev", |
| "DEPEND.badtilde", "RDEPEND.badtilde", "PDEPEND.badtilde", |
| "DESCRIPTION.toolong", |
| "EAPI.deprecated", |
| "HOMEPAGE.virtual", |
| "LICENSE.virtual", |
| "KEYWORDS.dropped", |
| "KEYWORDS.stupid", |
| "KEYWORDS.missing", |
| "IUSE.undefined", |
| "PDEPEND.suspect", |
| "RDEPEND.implicit", |
| "RDEPEND.suspect", |
| "RESTRICT.invalid", |
| "ebuild.minorsyn", |
| "ebuild.badheader", |
| "ebuild.patches", |
| "file.size", |
| "inherit.autotools", |
| "inherit.deprecated", |
| "java.eclassesnotused", |
| "wxwidgets.eclassnotused", |
| "metadata.warning", |
| "portage.internal", |
| "usage.obsolete", |
| "upstream.workaround", |
| "virtual.oldstyle", |
| "LIVEVCS.stable", |
| "LIVEVCS.unmasked", |
| )) |
| |
| non_ascii_re = re.compile(r'[^\x00-\x7f]') |
| |
| missingvars = ["KEYWORDS", "LICENSE", "DESCRIPTION", "HOMEPAGE"] |
| allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_")) |
| allvars.update(Package.metadata_keys) |
| allvars = sorted(allvars) |
| commitmessage=None |
| for x in missingvars: |
| x += ".missing" |
| if x not in qacats: |
| logging.warn('* missingvars values need to be added to qahelp ("%s")' % x) |
| qacats.append(x) |
| qawarnings.add(x) |
| |
| valid_restrict = frozenset(["binchecks", "bindist", |
| "fetch", "installsources", "mirror", |
| "primaryuri", "strip", "test", "userpriv"]) |
| |
| live_eclasses = frozenset([ |
| "bzr", |
| "cvs", |
| "darcs", |
| "git", |
| "git-2", |
| "mercurial", |
| "subversion", |
| "tla", |
| ]) |
| |
| suspect_rdepend = frozenset([ |
| "app-arch/cabextract", |
| "app-arch/rpm2targz", |
| "app-doc/doxygen", |
| "dev-lang/nasm", |
| "dev-lang/swig", |
| "dev-lang/yasm", |
| "dev-perl/extutils-pkgconfig", |
| "dev-util/byacc", |
| "dev-util/cmake", |
| "dev-util/ftjam", |
| "dev-util/gperf", |
| "dev-util/gtk-doc", |
| "dev-util/gtk-doc-am", |
| "dev-util/intltool", |
| "dev-util/jam", |
| "dev-util/pkgconfig", |
| "dev-util/scons", |
| "dev-util/unifdef", |
| "dev-util/yacc", |
| "media-gfx/ebdftopcf", |
| "sys-apps/help2man", |
| "sys-devel/autoconf", |
| "sys-devel/automake", |
| "sys-devel/bin86", |
| "sys-devel/bison", |
| "sys-devel/dev86", |
| "sys-devel/flex", |
| "sys-devel/m4", |
| "sys-devel/pmake", |
| "virtual/linux-sources", |
| "x11-misc/bdftopcf", |
| "x11-misc/imake", |
| ]) |
| |
| metadata_dtd_uri = 'http://www.gentoo.org/dtd/metadata.dtd' |
| # force refetch if the local copy creation time is older than this |
| metadata_dtd_ctime_interval = 60 * 60 * 24 * 7 # 7 days |
| |
| # file.executable |
| no_exec = frozenset(["Manifest","ChangeLog","metadata.xml"]) |
| |
| options, arguments = ParseArgs(sys.argv, qahelp) |
| |
| if options.version: |
| print("Portage", portage.VERSION) |
| sys.exit(0) |
| |
| # Set this to False when an extraordinary issue (generally |
| # something other than a QA issue) makes it impossible to |
| # commit (like if Manifest generation fails). |
| can_force = True |
| |
| portdir, portdir_overlay, mydir = utilities.FindPortdir(repoman_settings) |
| if portdir is None: |
| sys.exit(1) |
| |
| myreporoot = os.path.basename(portdir_overlay) |
| myreporoot += mydir[len(portdir_overlay):] |
| |
| if options.vcs: |
| if options.vcs in ('cvs', 'svn', 'git', 'bzr', 'hg'): |
| vcs = options.vcs |
| else: |
| vcs = None |
| else: |
| vcses = utilities.FindVCS() |
| if len(vcses) > 1: |
| print(red('*** Ambiguous workdir -- more than one VCS found at the same depth: %s.' % ', '.join(vcses))) |
| print(red('*** Please either clean up your workdir or specify --vcs option.')) |
| sys.exit(1) |
| elif vcses: |
| vcs = vcses[0] |
| else: |
| vcs = None |
| |
| if options.if_modified == "y" and vcs is None: |
| logging.info("Not in a version controlled repository; " |
| "disabling --if-modified.") |
| options.if_modified = "n" |
| |
| # Disable copyright/mtime check if vcs does not preserve mtime (bug #324075). |
| vcs_preserves_mtime = vcs not in ('git',) |
| |
| vcs_local_opts = repoman_settings.get("REPOMAN_VCS_LOCAL_OPTS", "").split() |
| vcs_global_opts = repoman_settings.get("REPOMAN_VCS_GLOBAL_OPTS") |
| if vcs_global_opts is None: |
| if vcs in ('cvs', 'svn'): |
| vcs_global_opts = "-q" |
| else: |
| vcs_global_opts = "" |
| vcs_global_opts = vcs_global_opts.split() |
| |
| if options.mode == 'commit' and not options.pretend and not vcs: |
| logging.info("Not in a version controlled repository; enabling pretend mode.") |
| options.pretend = True |
| |
| # Ensure that PORTDIR_OVERLAY contains the repository corresponding to $PWD. |
| repoman_settings['PORTDIR_OVERLAY'] = "%s %s" % \ |
| (repoman_settings.get('PORTDIR_OVERLAY', ''), |
| portage._shell_quote(portdir_overlay)) |
| # We have to call the config constructor again so |
| # that config.repositories is initialized correctly. |
| repoman_settings = portage.config(config_root=config_root, local_config=False, |
| env=dict(os.environ, PORTDIR_OVERLAY=repoman_settings['PORTDIR_OVERLAY'])) |
| |
| root = repoman_settings['EROOT'] |
| trees = { |
| root : {'porttree' : portage.portagetree(settings=repoman_settings)} |
| } |
| portdb = trees[root]['porttree'].dbapi |
| |
| # Constrain dependency resolution to the master(s) |
| # that are specified in layout.conf. |
| repodir = os.path.realpath(portdir_overlay) |
| repo_config = repoman_settings.repositories.get_repo_for_location(repodir) |
| portdb.porttrees = list(repo_config.eclass_db.porttrees) |
| portdir = portdb.porttrees[0] |
| |
| if repo_config.sign_commit: |
| if vcs == 'git': |
| # NOTE: It's possible to use --gpg-sign=key_id to specify the key in |
| # the commit arguments. If key_id is unspecified, then it must be |
| # configured by `git config user.signingkey key_id`. |
| vcs_local_opts.append("--gpg-sign") |
| |
| # In order to disable manifest signatures, repos may set |
| # "sign-manifests = false" in metadata/layout.conf. This |
| # can be used to prevent merge conflicts like those that |
| # thin-manifests is designed to prevent. |
| sign_manifests = "sign" in repoman_settings.features and \ |
| repo_config.sign_manifest |
| |
| manifest_hashes = repo_config.manifest_hashes |
| if manifest_hashes is None: |
| manifest_hashes = portage.const.MANIFEST2_HASH_DEFAULTS |
| |
| if options.mode in ("commit", "fix", "manifest"): |
| if portage.const.MANIFEST2_REQUIRED_HASH not in manifest_hashes: |
| msg = ("The 'manifest-hashes' setting in the '%s' repository's " |
| "metadata/layout.conf does not contain the '%s' hash which " |
| "is required by this portage version. You will have to " |
| "upgrade portage if you want to generate valid manifests for " |
| "this repository.") % \ |
| (repo_config.name, portage.const.MANIFEST2_REQUIRED_HASH) |
| for line in textwrap.wrap(msg, 70): |
| logging.error(line) |
| sys.exit(1) |
| |
| unsupported_hashes = manifest_hashes.difference( |
| portage.const.MANIFEST2_HASH_FUNCTIONS) |
| if unsupported_hashes: |
| msg = ("The 'manifest-hashes' setting in the '%s' repository's " |
| "metadata/layout.conf contains one or more hash types '%s' " |
| "which are not supported by this portage version. You will " |
| "have to upgrade portage if you want to generate valid " |
| "manifests for this repository.") % \ |
| (repo_config.name, " ".join(sorted(unsupported_hashes))) |
| for line in textwrap.wrap(msg, 70): |
| logging.error(line) |
| sys.exit(1) |
| |
| if "commit" == options.mode and \ |
| repo_config.name == "gentoo" and \ |
| "RMD160" in manifest_hashes and \ |
| "RMD160" not in portage.checksum.hashorigin_map: |
| msg = "Please install " \ |
| "pycrypto or enable python's ssl USE flag in order " \ |
| "to enable RMD160 hash support. See bug #198398 for " \ |
| "more information." |
| prefix = bad(" * ") |
| for line in textwrap.wrap(msg, 70): |
| print(prefix + line) |
| sys.exit(1) |
| |
| if options.echangelog is None and repo_config.update_changelog: |
| options.echangelog = 'y' |
| |
| if vcs is None: |
| options.echangelog = 'n' |
| |
| # The --echangelog option causes automatic ChangeLog generation, |
| # which invalidates changelog.ebuildadded and changelog.missing |
| # checks. |
| # Note: Some don't use ChangeLogs in distributed SCMs. |
| # It will be generated on server side from scm log, |
| # before package moves to the rsync server. |
| # This is needed because they try to avoid merge collisions. |
| # Gentoo's Council decided to always use the ChangeLog file. |
| # TODO: shouldn't this just be switched on the repo, iso the VCS? |
| check_changelog = options.echangelog not in ('y', 'force') and vcs in ('cvs', 'svn') |
| |
| logging.debug("vcs: %s" % (vcs,)) |
| logging.debug("repo config: %s" % (repo_config,)) |
| logging.debug("options: %s" % (options,)) |
| |
| # Generate an appropriate PORTDIR_OVERLAY value for passing into the |
| # profile-specific config constructor calls. |
| env = os.environ.copy() |
| env['PORTDIR'] = portdir |
| env['PORTDIR_OVERLAY'] = ' '.join(portdb.porttrees[1:]) |
| |
| logging.info('Setting paths:') |
| logging.info('PORTDIR = "' + portdir + '"') |
| logging.info('PORTDIR_OVERLAY = "%s"' % env['PORTDIR_OVERLAY']) |
| |
| # It's confusing if these warnings are displayed without the user |
| # being told which profile they come from, so disable them. |
| env['FEATURES'] = env.get('FEATURES', '') + ' -unknown-features-warn' |
| |
| categories = [] |
| for path in repo_config.eclass_db.porttrees: |
| categories.extend(portage.util.grabfile( |
| os.path.join(path, 'profiles', 'categories'))) |
| repoman_settings.categories = frozenset( |
| portage.util.stack_lists([categories], incremental=1)) |
| categories = repoman_settings.categories |
| |
| portdb.settings = repoman_settings |
| root_config = RootConfig(repoman_settings, trees[root], None) |
| # We really only need to cache the metadata that's necessary for visibility |
| # filtering. Anything else can be discarded to reduce memory consumption. |
| portdb._aux_cache_keys.clear() |
| portdb._aux_cache_keys.update(["EAPI", "IUSE", "KEYWORDS", "repository", "SLOT"]) |
| |
| reposplit = myreporoot.split(os.path.sep) |
| repolevel = len(reposplit) |
| |
| # check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting. |
| # Reason for this is if they're trying to commit in just $FILESDIR/*, the Manifest needs updating. |
| # this check ensures that repoman knows where it is, and the manifest recommit is at least possible. |
| if options.mode == 'commit' and repolevel not in [1,2,3]: |
| print(red("***")+" Commit attempts *must* be from within a vcs co, category, or package directory.") |
| print(red("***")+" Attempting to commit from a packages files directory will be blocked for instance.") |
| print(red("***")+" This is intended behaviour, to ensure the manifest is recommitted for a package.") |
| print(red("***")) |
| err("Unable to identify level we're commiting from for %s" % '/'.join(reposplit)) |
| |
| # Make startdir relative to the canonical repodir, so that we can pass |
| # it to digestgen and it won't have to be canonicalized again. |
| if repolevel == 1: |
| startdir = repodir |
| else: |
| startdir = normalize_path(mydir) |
| startdir = os.path.join(repodir, *startdir.split(os.sep)[-2-repolevel+3:]) |
| |
| def caterror(mycat): |
| err(mycat+" is not an official category. Skipping QA checks in this directory.\nPlease ensure that you add "+catdir+" to "+repodir+"/profiles/categories\nif it is a new category.") |
| |
| class ProfileDesc(object): |
| __slots__ = ('abs_path', 'arch', 'status', 'sub_path', 'tree_path',) |
| def __init__(self, arch, status, sub_path, tree_path): |
| self.arch = arch |
| self.status = status |
| if sub_path: |
| sub_path = normalize_path(sub_path.lstrip(os.sep)) |
| self.sub_path = sub_path |
| self.tree_path = tree_path |
| if tree_path: |
| self.abs_path = os.path.join(tree_path, 'profiles', self.sub_path) |
| else: |
| self.abs_path = tree_path |
| |
| def __str__(self): |
| if self.sub_path: |
| return self.sub_path |
| return 'empty profile' |
| |
| profile_list = [] |
| valid_profile_types = frozenset(['dev', 'exp', 'stable']) |
| |
| # get lists of valid keywords, licenses, and use |
| kwlist = set() |
| liclist = set() |
| uselist = set() |
| global_pmasklines = [] |
| |
| for path in portdb.porttrees: |
| try: |
| liclist.update(os.listdir(os.path.join(path, "licenses"))) |
| except OSError: |
| pass |
| kwlist.update(portage.grabfile(os.path.join(path, |
| "profiles", "arch.list"))) |
| |
| use_desc = portage.grabfile(os.path.join(path, 'profiles', 'use.desc')) |
| for x in use_desc: |
| x = x.split() |
| if x: |
| uselist.add(x[0]) |
| |
| expand_desc_dir = os.path.join(path, 'profiles', 'desc') |
| try: |
| expand_list = os.listdir(expand_desc_dir) |
| except OSError: |
| pass |
| else: |
| for fn in expand_list: |
| if not fn[-5:] == '.desc': |
| continue |
| use_prefix = fn[:-5].lower() + '_' |
| for x in portage.grabfile(os.path.join(expand_desc_dir, fn)): |
| x = x.split() |
| if x: |
| uselist.add(use_prefix + x[0]) |
| |
| global_pmasklines.append(portage.util.grabfile_package( |
| os.path.join(path, 'profiles', 'package.mask'), recursive=1, verify_eapi=True)) |
| |
| desc_path = os.path.join(path, 'profiles', 'profiles.desc') |
| try: |
| desc_file = io.open(_unicode_encode(desc_path, |
| encoding=_encodings['fs'], errors='strict'), |
| mode='r', encoding=_encodings['repo.content'], errors='replace') |
| except EnvironmentError: |
| pass |
| else: |
| for i, x in enumerate(desc_file): |
| if x[0] == "#": |
| continue |
| arch = x.split() |
| if len(arch) == 0: |
| continue |
| if len(arch) != 3: |
| err("wrong format: \"" + bad(x.strip()) + "\" in " + \ |
| desc_path + " line %d" % (i+1, )) |
| elif arch[0] not in kwlist: |
| err("invalid arch: \"" + bad(arch[0]) + "\" in " + \ |
| desc_path + " line %d" % (i+1, )) |
| elif arch[2] not in valid_profile_types: |
| err("invalid profile type: \"" + bad(arch[2]) + "\" in " + \ |
| desc_path + " line %d" % (i+1, )) |
| profile_desc = ProfileDesc(arch[0], arch[2], arch[1], path) |
| if not os.path.isdir(profile_desc.abs_path): |
| logging.error( |
| "Invalid %s profile (%s) for arch %s in %s line %d", |
| arch[2], arch[1], arch[0], desc_path, i+1) |
| continue |
| if os.path.exists( |
| os.path.join(profile_desc.abs_path, 'deprecated')): |
| continue |
| profile_list.append(profile_desc) |
| desc_file.close() |
| |
| repoman_settings['PORTAGE_ARCHLIST'] = ' '.join(sorted(kwlist)) |
| repoman_settings.backup_changes('PORTAGE_ARCHLIST') |
| |
| global_pmasklines = portage.util.stack_lists(global_pmasklines, incremental=1) |
| global_pmaskdict = {} |
| for x in global_pmasklines: |
| global_pmaskdict.setdefault(x.cp, []).append(x) |
| del global_pmasklines |
| |
| def has_global_mask(pkg): |
| mask_atoms = global_pmaskdict.get(pkg.cp) |
| if mask_atoms: |
| pkg_list = [pkg] |
| for x in mask_atoms: |
| if portage.dep.match_from_list(x, pkg_list): |
| return x |
| return None |
| |
| # Ensure that profile sub_path attributes are unique. Process in reverse order |
| # so that profiles with duplicate sub_path from overlays will override |
| # profiles with the same sub_path from parent repos. |
| profiles = {} |
| profile_list.reverse() |
| profile_sub_paths = set() |
| for prof in profile_list: |
| if prof.sub_path in profile_sub_paths: |
| continue |
| profile_sub_paths.add(prof.sub_path) |
| profiles.setdefault(prof.arch, []).append(prof) |
| |
| # Use an empty profile for checking dependencies of |
| # packages that have empty KEYWORDS. |
| prof = ProfileDesc('**', 'stable', '', '') |
| profiles.setdefault(prof.arch, []).append(prof) |
| |
| for x in repoman_settings.archlist(): |
| if x[0] == "~": |
| continue |
| if x not in profiles: |
| print(red("\""+x+"\" doesn't have a valid profile listed in profiles.desc.")) |
| print(red("You need to either \"cvs update\" your profiles dir or follow this")) |
| print(red("up with the "+x+" team.")) |
| print() |
| |
| if not liclist: |
| logging.fatal("Couldn't find licenses?") |
| sys.exit(1) |
| |
| if not kwlist: |
| logging.fatal("Couldn't read KEYWORDS from arch.list") |
| sys.exit(1) |
| |
| if not uselist: |
| logging.fatal("Couldn't find use.desc?") |
| sys.exit(1) |
| |
| scanlist=[] |
| if repolevel==2: |
| #we are inside a category directory |
| catdir=reposplit[-1] |
| if catdir not in categories: |
| caterror(catdir) |
| mydirlist=os.listdir(startdir) |
| for x in mydirlist: |
| if x == "CVS" or x.startswith("."): |
| continue |
| if os.path.isdir(startdir+"/"+x): |
| scanlist.append(catdir+"/"+x) |
| repo_subdir = catdir + os.sep |
| elif repolevel==1: |
| for x in categories: |
| if not os.path.isdir(startdir+"/"+x): |
| continue |
| for y in os.listdir(startdir+"/"+x): |
| if y == "CVS" or y.startswith("."): |
| continue |
| if os.path.isdir(startdir+"/"+x+"/"+y): |
| scanlist.append(x+"/"+y) |
| repo_subdir = "" |
| elif repolevel==3: |
| catdir = reposplit[-2] |
| if catdir not in categories: |
| caterror(catdir) |
| scanlist.append(catdir+"/"+reposplit[-1]) |
| repo_subdir = scanlist[-1] + os.sep |
| else: |
| msg = 'Repoman is unable to determine PORTDIR or PORTDIR_OVERLAY' + \ |
| ' from the current working directory' |
| logging.critical(msg) |
| sys.exit(1) |
| |
| repo_subdir_len = len(repo_subdir) |
| scanlist.sort() |
| |
| logging.debug("Found the following packages to scan:\n%s" % '\n'.join(scanlist)) |
| |
| def vcs_files_to_cps(vcs_file_iter): |
| """ |
| Iterate over the given modified file paths returned from the vcs, |
| and return a frozenset containing category/pn strings for each |
| modified package. |
| """ |
| |
| modified_cps = [] |
| |
| if repolevel == 3: |
| if reposplit[-2] in categories and \ |
| next(vcs_file_iter, None) is not None: |
| modified_cps.append("/".join(reposplit[-2:])) |
| |
| elif repolevel == 2: |
| category = reposplit[-1] |
| if category in categories: |
| for filename in vcs_file_iter: |
| f_split = filename.split(os.sep) |
| # ['.', pn,...] |
| if len(f_split) > 2: |
| modified_cps.append(category + "/" + f_split[1]) |
| |
| else: |
| # repolevel == 1 |
| for filename in vcs_file_iter: |
| f_split = filename.split(os.sep) |
| # ['.', category, pn,...] |
| if len(f_split) > 3 and f_split[1] in categories: |
| modified_cps.append("/".join(f_split[1:3])) |
| |
| return frozenset(modified_cps) |
| |
| def dev_keywords(profiles): |
| """ |
| Create a set of KEYWORDS values that exist in 'dev' |
| profiles. These are used |
| to trigger a message notifying the user when they might |
| want to add the --include-dev option. |
| """ |
| type_arch_map = {} |
| for arch, arch_profiles in profiles.items(): |
| for prof in arch_profiles: |
| arch_set = type_arch_map.get(prof.status) |
| if arch_set is None: |
| arch_set = set() |
| type_arch_map[prof.status] = arch_set |
| arch_set.add(arch) |
| |
| dev_keywords = type_arch_map.get('dev', set()) |
| dev_keywords.update(['~' + arch for arch in dev_keywords]) |
| return frozenset(dev_keywords) |
| |
| dev_keywords = dev_keywords(profiles) |
| |
| stats={} |
| fails={} |
| |
| # provided by the desktop-file-utils package |
| desktop_file_validate = find_binary("desktop-file-validate") |
| desktop_pattern = re.compile(r'.*\.desktop$') |
| |
| for x in qacats: |
| stats[x]=0 |
| fails[x]=[] |
| |
| xmllint_capable = False |
| metadata_dtd = os.path.join(repoman_settings["DISTDIR"], 'metadata.dtd') |
| |
| def parsedate(s): |
| """Parse a RFC 822 date and time string. |
| This is required for python3 compatibility, since the |
| rfc822.parsedate() function is not available.""" |
| |
| s_split = [] |
| for x in s.upper().split(): |
| for y in x.split(','): |
| if y: |
| s_split.append(y) |
| |
| if len(s_split) != 6: |
| return None |
| |
| # %a, %d %b %Y %H:%M:%S %Z |
| a, d, b, Y, H_M_S, Z = s_split |
| |
| # Convert month to integer, since strptime %w is locale-dependent. |
| month_map = {'JAN':1, 'FEB':2, 'MAR':3, 'APR':4, 'MAY':5, 'JUN':6, |
| 'JUL':7, 'AUG':8, 'SEP':9, 'OCT':10, 'NOV':11, 'DEC':12} |
| m = month_map.get(b) |
| if m is None: |
| return None |
| m = str(m).rjust(2, '0') |
| |
| return time.strptime(':'.join((Y, m, d, H_M_S)), '%Y:%m:%d:%H:%M:%S') |
| |
| def fetch_metadata_dtd(): |
| """ |
| Fetch metadata.dtd if it doesn't exist or the ctime is older than |
| metadata_dtd_ctime_interval. |
| @rtype: bool |
| @returns: True if successful, otherwise False |
| """ |
| |
| must_fetch = True |
| metadata_dtd_st = None |
| current_time = int(time.time()) |
| try: |
| metadata_dtd_st = os.stat(metadata_dtd) |
| except EnvironmentError as e: |
| if e.errno not in (errno.ENOENT, errno.ESTALE): |
| raise |
| del e |
| else: |
| # Trigger fetch if metadata.dtd mtime is old or clock is wrong. |
| if abs(current_time - metadata_dtd_st.st_ctime) \ |
| < metadata_dtd_ctime_interval: |
| must_fetch = False |
| |
| if must_fetch: |
| print() |
| print(green("***") + " the local copy of metadata.dtd " + \ |
| "needs to be refetched, doing that now") |
| print() |
| try: |
| url_f = urllib_request_urlopen(metadata_dtd_uri) |
| msg_info = url_f.info() |
| last_modified = msg_info.get('last-modified') |
| if last_modified is not None: |
| last_modified = parsedate(last_modified) |
| if last_modified is not None: |
| last_modified = calendar.timegm(last_modified) |
| |
| metadata_dtd_tmp = "%s.%s" % (metadata_dtd, os.getpid()) |
| try: |
| local_f = open(metadata_dtd_tmp, mode='wb') |
| local_f.write(url_f.read()) |
| local_f.close() |
| if last_modified is not None: |
| try: |
| os.utime(metadata_dtd_tmp, |
| (int(last_modified), int(last_modified))) |
| except OSError: |
| # This fails on some odd non-unix-like filesystems. |
| # We don't really need the mtime to be preserved |
| # anyway here (currently we use ctime to trigger |
| # fetch), so just ignore it. |
| pass |
| os.rename(metadata_dtd_tmp, metadata_dtd) |
| finally: |
| try: |
| os.unlink(metadata_dtd_tmp) |
| except OSError: |
| pass |
| |
| url_f.close() |
| |
| except EnvironmentError as e: |
| print() |
| print(red("!!!")+" attempting to fetch '%s', caught" % metadata_dtd_uri) |
| print(red("!!!")+" exception '%s' though." % (e,)) |
| print(red("!!!")+" fetching new metadata.dtd failed, aborting") |
| return False |
| |
| return True |
| |
| if options.mode == "manifest": |
| pass |
| elif not find_binary('xmllint'): |
| print(red("!!! xmllint not found. Can't check metadata.xml.\n")) |
| if options.xml_parse or repolevel==3: |
| print(red("!!!")+" sorry, xmllint is needed. failing\n") |
| sys.exit(1) |
| else: |
| if not fetch_metadata_dtd(): |
| sys.exit(1) |
| #this can be problematic if xmllint changes their output |
| xmllint_capable=True |
| |
| if options.mode == 'commit' and vcs: |
| utilities.detect_vcs_conflicts(options, vcs) |
| |
| if options.mode == "manifest": |
| pass |
| elif options.pretend: |
| print(green("\nRepoMan does a once-over of the neighborhood...")) |
| else: |
| print(green("\nRepoMan scours the neighborhood...")) |
| |
| new_ebuilds = set() |
| modified_ebuilds = set() |
| modified_changelogs = set() |
| mychanged = [] |
| mynew = [] |
| myremoved = [] |
| |
| if vcs == "cvs": |
| mycvstree = cvstree.getentries("./", recursive=1) |
| mychanged = cvstree.findchanged(mycvstree, recursive=1, basedir="./") |
| mynew = cvstree.findnew(mycvstree, recursive=1, basedir="./") |
| if options.if_modified == "y": |
| myremoved = cvstree.findremoved(mycvstree, recursive=1, basedir="./") |
| |
| if vcs == "svn": |
| with os.popen("svn status") as f: |
| svnstatus = f.readlines() |
| mychanged = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem and elem[:1] in "MR" ] |
| mynew = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A") ] |
| if options.if_modified == "y": |
| myremoved = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")] |
| |
| elif vcs == "git": |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=M HEAD") as f: |
| mychanged = f.readlines() |
| mychanged = ["./" + elem[:-1] for elem in mychanged] |
| |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=A HEAD") as f: |
| mynew = f.readlines() |
| mynew = ["./" + elem[:-1] for elem in mynew] |
| if options.if_modified == "y": |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=D HEAD") as f: |
| myremoved = f.readlines() |
| myremoved = ["./" + elem[:-1] for elem in myremoved] |
| |
| elif vcs == "bzr": |
| with os.popen("bzr status -S .") as f: |
| bzrstatus = f.readlines() |
| mychanged = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M" ] |
| mynew = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "NK" or elem[0:1] == "R" ) ] |
| if options.if_modified == "y": |
| myremoved = [ "./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "K" or elem[0:1] == "R" ) ] |
| |
| elif vcs == "hg": |
| with os.popen("hg status --no-status --modified .") as f: |
| mychanged = f.readlines() |
| mychanged = ["./" + elem.rstrip() for elem in mychanged] |
| mynew = os.popen("hg status --no-status --added .").readlines() |
| mynew = ["./" + elem.rstrip() for elem in mynew] |
| if options.if_modified == "y": |
| with os.popen("hg status --no-status --removed .") as f: |
| myremoved = f.readlines() |
| myremoved = ["./" + elem.rstrip() for elem in myremoved] |
| |
| if vcs: |
| new_ebuilds.update(x for x in mynew if x.endswith(".ebuild")) |
| modified_ebuilds.update(x for x in mychanged if x.endswith(".ebuild")) |
| modified_changelogs.update(x for x in chain(mychanged, mynew) \ |
| if os.path.basename(x) == "ChangeLog") |
| |
| have_pmasked = False |
| have_dev_keywords = False |
| dofail = 0 |
| arch_caches={} |
| arch_xmatch_caches = {} |
| shared_xmatch_caches = {"cp-list":{}} |
| |
| # Disable the "ebuild.notadded" check when not in commit mode and |
| # running `svn status` in every package dir will be too expensive. |
| |
| check_ebuild_notadded = not \ |
| (vcs == "svn" and repolevel < 3 and options.mode != "commit") |
| |
| # Build a regex from thirdpartymirrors for the SRC_URI.mirror check. |
| thirdpartymirrors = [] |
| for v in repoman_settings.thirdpartymirrors().values(): |
| for v in v: |
| if not v.endswith("/"): |
| v += "/" |
| thirdpartymirrors.append(v) |
| |
| class _MetadataTreeBuilder(xml.etree.ElementTree.TreeBuilder): |
| """ |
| Implements doctype() as required to avoid deprecation warnings with |
| >=python-2.7. |
| """ |
| def doctype(self, name, pubid, system): |
| pass |
| |
| try: |
| herd_base = make_herd_base(os.path.join(repoman_settings["PORTDIR"], "metadata/herds.xml")) |
| except (EnvironmentError, ParseError, PermissionDenied) as e: |
| err(str(e)) |
| except FileNotFound: |
| # TODO: Download as we do for metadata.dtd, but add a way to |
| # disable for non-gentoo repoman users who may not have herds. |
| herd_base = None |
| |
| effective_scanlist = scanlist |
| if options.if_modified == "y": |
| effective_scanlist = sorted(vcs_files_to_cps( |
| chain(mychanged, mynew, myremoved))) |
| |
| for x in effective_scanlist: |
| #ebuilds and digests added to cvs respectively. |
| logging.info("checking package %s" % x) |
| # save memory by discarding xmatch caches from previous package(s) |
| arch_xmatch_caches.clear() |
| eadded=[] |
| catdir,pkgdir=x.split("/") |
| checkdir=repodir+"/"+x |
| checkdir_relative = "" |
| if repolevel < 3: |
| checkdir_relative = os.path.join(pkgdir, checkdir_relative) |
| if repolevel < 2: |
| checkdir_relative = os.path.join(catdir, checkdir_relative) |
| checkdir_relative = os.path.join(".", checkdir_relative) |
| generated_manifest = False |
| |
| if options.mode == "manifest" or \ |
| (options.mode != 'manifest-check' and \ |
| 'digest' in repoman_settings.features) or \ |
| options.mode in ('commit', 'fix') and not options.pretend: |
| auto_assumed = set() |
| fetchlist_dict = portage.FetchlistDict(checkdir, |
| repoman_settings, portdb) |
| if options.mode == 'manifest' and options.force: |
| portage._doebuild_manifest_exempt_depend += 1 |
| try: |
| distdir = repoman_settings['DISTDIR'] |
| mf = repoman_settings.repositories.get_repo_for_location( |
| os.path.dirname(os.path.dirname(checkdir))) |
| mf = mf.load_manifest(checkdir, distdir, |
| fetchlist_dict=fetchlist_dict) |
| mf.create(requiredDistfiles=None, |
| assumeDistHashesAlways=True) |
| for distfiles in fetchlist_dict.values(): |
| for distfile in distfiles: |
| if os.path.isfile(os.path.join(distdir, distfile)): |
| mf.fhashdict['DIST'].pop(distfile, None) |
| else: |
| auto_assumed.add(distfile) |
| mf.write() |
| finally: |
| portage._doebuild_manifest_exempt_depend -= 1 |
| |
| repoman_settings["O"] = checkdir |
| try: |
| generated_manifest = digestgen( |
| mysettings=repoman_settings, myportdb=portdb) |
| except portage.exception.PermissionDenied as e: |
| generated_manifest = False |
| writemsg_level("!!! Permission denied: '%s'\n" % (e,), |
| level=logging.ERROR, noiselevel=-1) |
| |
| if not generated_manifest: |
| print("Unable to generate manifest.") |
| dofail = 1 |
| |
| if options.mode == "manifest": |
| if not dofail and options.force and auto_assumed and \ |
| 'assume-digests' in repoman_settings.features: |
| # Show which digests were assumed despite the --force option |
| # being given. This output will already have been shown by |
| # digestgen() if assume-digests is not enabled, so only show |
| # it here if assume-digests is enabled. |
| pkgs = list(fetchlist_dict) |
| pkgs.sort() |
| portage.writemsg_stdout(" digest.assumed" + \ |
| portage.output.colorize("WARN", |
| str(len(auto_assumed)).rjust(18)) + "\n") |
| for cpv in pkgs: |
| fetchmap = fetchlist_dict[cpv] |
| pf = portage.catsplit(cpv)[1] |
| for distfile in sorted(fetchmap): |
| if distfile in auto_assumed: |
| portage.writemsg_stdout( |
| " %s::%s\n" % (pf, distfile)) |
| continue |
| elif dofail: |
| sys.exit(1) |
| |
| if not generated_manifest: |
| repoman_settings['O'] = checkdir |
| repoman_settings['PORTAGE_QUIET'] = '1' |
| if not portage.digestcheck([], repoman_settings, strict=1): |
| stats["manifest.bad"] += 1 |
| fails["manifest.bad"].append(os.path.join(x, 'Manifest')) |
| repoman_settings.pop('PORTAGE_QUIET', None) |
| |
| if options.mode == 'manifest-check': |
| continue |
| |
| checkdirlist=os.listdir(checkdir) |
| ebuildlist=[] |
| pkgs = {} |
| allvalid = True |
| for y in checkdirlist: |
| if (y in no_exec or y.endswith(".ebuild")) and \ |
| stat.S_IMODE(os.stat(os.path.join(checkdir, y)).st_mode) & 0o111: |
| stats["file.executable"] += 1 |
| fails["file.executable"].append(os.path.join(checkdir, y)) |
| if y.endswith(".ebuild"): |
| pf = y[:-7] |
| ebuildlist.append(pf) |
| cpv = "%s/%s" % (catdir, pf) |
| try: |
| myaux = dict(zip(allvars, portdb.aux_get(cpv, allvars))) |
| except KeyError: |
| allvalid = False |
| stats["ebuild.syntax"] += 1 |
| fails["ebuild.syntax"].append(os.path.join(x, y)) |
| continue |
| except IOError: |
| allvalid = False |
| stats["ebuild.output"] += 1 |
| fails["ebuild.output"].append(os.path.join(x, y)) |
| continue |
| if not portage.eapi_is_supported(myaux["EAPI"]): |
| allvalid = False |
| stats["EAPI.unsupported"] += 1 |
| fails["EAPI.unsupported"].append(os.path.join(x, y)) |
| continue |
| pkgs[pf] = Package(cpv=cpv, metadata=myaux, |
| root_config=root_config, type_name="ebuild") |
| |
| # Sort ebuilds in ascending order for the KEYWORDS.dropped check. |
| pkgsplits = {} |
| for i in range(len(ebuildlist)): |
| ebuild_split = portage.pkgsplit(ebuildlist[i]) |
| pkgsplits[ebuild_split] = ebuildlist[i] |
| ebuildlist[i] = ebuild_split |
| ebuildlist.sort(key=cmp_sort_key(portage.pkgcmp)) |
| for i in range(len(ebuildlist)): |
| ebuildlist[i] = pkgsplits[ebuildlist[i]] |
| del pkgsplits |
| |
| slot_keywords = {} |
| |
| if len(pkgs) != len(ebuildlist): |
| # If we can't access all the metadata then it's totally unsafe to |
| # commit since there's no way to generate a correct Manifest. |
| # Do not try to do any more QA checks on this package since missing |
| # metadata leads to false positives for several checks, and false |
| # positives confuse users. |
| can_force = False |
| continue |
| |
| for y in checkdirlist: |
| m = disallowed_filename_chars_re.search(y.strip(os.sep)) |
| if m is not None: |
| stats["file.name"] += 1 |
| fails["file.name"].append("%s/%s: char '%s'" % \ |
| (checkdir, y, m.group(0))) |
| |
| if not (y in ("ChangeLog", "metadata.xml") or y.endswith(".ebuild")): |
| continue |
| f = None |
| try: |
| line = 1 |
| f = io.open(_unicode_encode(os.path.join(checkdir, y), |
| encoding=_encodings['fs'], errors='strict'), |
| mode='r', encoding=_encodings['repo.content']) |
| for l in f: |
| line +=1 |
| except UnicodeDecodeError as ue: |
| stats["file.UTF8"] += 1 |
| s = ue.object[:ue.start] |
| l2 = s.count("\n") |
| line += l2 |
| if l2 != 0: |
| s = s[s.rfind("\n") + 1:] |
| fails["file.UTF8"].append("%s/%s: line %i, just after: '%s'" % (checkdir, y, line, s)) |
| finally: |
| if f is not None: |
| f.close() |
| |
| if vcs in ("git", "hg") and check_ebuild_notadded: |
| if vcs == "git": |
| myf = os.popen("git ls-files --others %s" % \ |
| (portage._shell_quote(checkdir_relative),)) |
| if vcs == "hg": |
| myf = os.popen("hg status --no-status --unknown %s" % \ |
| (portage._shell_quote(checkdir_relative),)) |
| for l in myf: |
| if l[:-1][-7:] == ".ebuild": |
| stats["ebuild.notadded"] += 1 |
| fails["ebuild.notadded"].append( |
| os.path.join(x, os.path.basename(l[:-1]))) |
| myf.close() |
| |
| if vcs in ("cvs", "svn", "bzr") and check_ebuild_notadded: |
| try: |
| if vcs == "cvs": |
| myf=open(checkdir+"/CVS/Entries","r") |
| if vcs == "svn": |
| myf = os.popen("svn status --depth=files --verbose " + checkdir) |
| if vcs == "bzr": |
| myf = os.popen("bzr ls -v --kind=file " + checkdir) |
| myl = myf.readlines() |
| myf.close() |
| for l in myl: |
| if vcs == "cvs": |
| if l[0]!="/": |
| continue |
| splitl=l[1:].split("/") |
| if not len(splitl): |
| continue |
| if splitl[0][-7:]==".ebuild": |
| eadded.append(splitl[0][:-7]) |
| if vcs == "svn": |
| if l[:1] == "?": |
| continue |
| if l[:7] == ' >': |
| # tree conflict, new in subversion 1.6 |
| continue |
| l = l.split()[-1] |
| if l[-7:] == ".ebuild": |
| eadded.append(os.path.basename(l[:-7])) |
| if vcs == "bzr": |
| if l[1:2] == "?": |
| continue |
| l = l.split()[-1] |
| if l[-7:] == ".ebuild": |
| eadded.append(os.path.basename(l[:-7])) |
| if vcs == "svn": |
| myf = os.popen("svn status " + checkdir) |
| myl=myf.readlines() |
| myf.close() |
| for l in myl: |
| if l[0] == "A": |
| l = l.rstrip().split(' ')[-1] |
| if l[-7:] == ".ebuild": |
| eadded.append(os.path.basename(l[:-7])) |
| except IOError: |
| if vcs == "cvs": |
| stats["CVS/Entries.IO_error"] += 1 |
| fails["CVS/Entries.IO_error"].append(checkdir+"/CVS/Entries") |
| else: |
| raise |
| continue |
| |
| mf = repoman_settings.repositories.get_repo_for_location( |
| os.path.dirname(os.path.dirname(checkdir))) |
| mf = mf.load_manifest(checkdir, repoman_settings["DISTDIR"]) |
| mydigests=mf.getTypeDigests("DIST") |
| |
| fetchlist_dict = portage.FetchlistDict(checkdir, repoman_settings, portdb) |
| myfiles_all = [] |
| src_uri_error = False |
| for mykey in fetchlist_dict: |
| try: |
| myfiles_all.extend(fetchlist_dict[mykey]) |
| except portage.exception.InvalidDependString as e: |
| src_uri_error = True |
| try: |
| portdb.aux_get(mykey, ["SRC_URI"]) |
| except KeyError: |
| # This will be reported as an "ebuild.syntax" error. |
| pass |
| else: |
| stats["SRC_URI.syntax"] = stats["SRC_URI.syntax"] + 1 |
| fails["SRC_URI.syntax"].append( |
| "%s.ebuild SRC_URI: %s" % (mykey, e)) |
| del fetchlist_dict |
| if not src_uri_error: |
| # This test can produce false positives if SRC_URI could not |
| # be parsed for one or more ebuilds. There's no point in |
| # producing a false error here since the root cause will |
| # produce a valid error elsewhere, such as "SRC_URI.syntax" |
| # or "ebuild.sytax". |
| myfiles_all = set(myfiles_all) |
| for entry in mydigests: |
| if entry not in myfiles_all: |
| stats["digest.unused"] += 1 |
| fails["digest.unused"].append(checkdir+"::"+entry) |
| for entry in myfiles_all: |
| if entry not in mydigests: |
| stats["digest.missing"] += 1 |
| fails["digest.missing"].append(checkdir+"::"+entry) |
| del myfiles_all |
| |
| if os.path.exists(checkdir+"/files"): |
| filesdirlist=os.listdir(checkdir+"/files") |
| |
| # recurse through files directory |
| # use filesdirlist as a stack, appending directories as needed so people can't hide > 20k files in a subdirectory. |
| while filesdirlist: |
| y = filesdirlist.pop(0) |
| relative_path = os.path.join(x, "files", y) |
| full_path = os.path.join(repodir, relative_path) |
| try: |
| mystat = os.stat(full_path) |
| except OSError as oe: |
| if oe.errno == 2: |
| # don't worry about it. it likely was removed via fix above. |
| continue |
| else: |
| raise oe |
| if S_ISDIR(mystat.st_mode): |
| # !!! VCS "portability" alert! Need some function isVcsDir() or alike !!! |
| if y == "CVS" or y == ".svn": |
| continue |
| for z in os.listdir(checkdir+"/files/"+y): |
| if z == "CVS" or z == ".svn": |
| continue |
| filesdirlist.append(y+"/"+z) |
| # Current policy is no files over 20 KiB, these are the checks. File size between |
| # 20 KiB and 60 KiB causes a warning, while file size over 60 KiB causes an error. |
| elif mystat.st_size > 61440: |
| stats["file.size.fatal"] += 1 |
| fails["file.size.fatal"].append("("+ str(mystat.st_size//1024) + " KiB) "+x+"/files/"+y) |
| elif mystat.st_size > 20480: |
| stats["file.size"] += 1 |
| fails["file.size"].append("("+ str(mystat.st_size//1024) + " KiB) "+x+"/files/"+y) |
| |
| m = disallowed_filename_chars_re.search( |
| os.path.basename(y.rstrip(os.sep))) |
| if m is not None: |
| stats["file.name"] += 1 |
| fails["file.name"].append("%s/files/%s: char '%s'" % \ |
| (checkdir, y, m.group(0))) |
| |
| if desktop_file_validate and desktop_pattern.match(y): |
| status, cmd_output = subprocess_getstatusoutput( |
| "'%s' '%s'" % (desktop_file_validate, full_path)) |
| if os.WIFEXITED(status) and os.WEXITSTATUS(status) != os.EX_OK: |
| # Note: in the future we may want to grab the |
| # warnings in addition to the errors. We're |
| # just doing errors now since we don't want |
| # to generate too much noise at first. |
| error_re = re.compile(r'.*\s*error:\s*(.*)') |
| for line in cmd_output.splitlines(): |
| error_match = error_re.match(line) |
| if error_match is None: |
| continue |
| stats["desktop.invalid"] += 1 |
| fails["desktop.invalid"].append( |
| relative_path + ': %s' % error_match.group(1)) |
| |
| del mydigests |
| |
| if check_changelog and "ChangeLog" not in checkdirlist: |
| stats["changelog.missing"]+=1 |
| fails["changelog.missing"].append(x+"/ChangeLog") |
| |
| musedict = {} |
| #metadata.xml file check |
| if "metadata.xml" not in checkdirlist: |
| stats["metadata.missing"]+=1 |
| fails["metadata.missing"].append(x+"/metadata.xml") |
| #metadata.xml parse check |
| else: |
| metadata_bad = False |
| |
| # read metadata.xml into memory |
| try: |
| _metadata_xml = xml.etree.ElementTree.parse( |
| os.path.join(checkdir, "metadata.xml"), |
| parser=xml.etree.ElementTree.XMLParser( |
| target=_MetadataTreeBuilder())) |
| except (ExpatError, SyntaxError, EnvironmentError) as e: |
| metadata_bad = True |
| stats["metadata.bad"] += 1 |
| fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e)) |
| del e |
| else: |
| # load USE flags from metadata.xml |
| try: |
| musedict = utilities.parse_metadata_use(_metadata_xml) |
| except portage.exception.ParseError as e: |
| metadata_bad = True |
| stats["metadata.bad"] += 1 |
| fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e)) |
| |
| # Run other metadata.xml checkers |
| try: |
| utilities.check_metadata(_metadata_xml, herd_base) |
| except (utilities.UnknownHerdsError, ) as e: |
| metadata_bad = True |
| stats["metadata.bad"] += 1 |
| fails["metadata.bad"].append("%s/metadata.xml: %s" % (x, e)) |
| del e |
| |
| #Only carry out if in package directory or check forced |
| if xmllint_capable and not metadata_bad: |
| # xmlint can produce garbage output even on success, so only dump |
| # the ouput when it fails. |
| st, out = subprocess_getstatusoutput( |
| "xmllint --nonet --noout --dtdvalid '%s' '%s'" % \ |
| (metadata_dtd, os.path.join(checkdir, "metadata.xml"))) |
| if st != os.EX_OK: |
| print(red("!!!") + " metadata.xml is invalid:") |
| for z in out.splitlines(): |
| print(red("!!! ")+z) |
| stats["metadata.bad"]+=1 |
| fails["metadata.bad"].append(x+"/metadata.xml") |
| |
| del metadata_bad |
| muselist = frozenset(musedict) |
| |
| changelog_path = os.path.join(checkdir_relative, "ChangeLog") |
| changelog_modified = changelog_path in modified_changelogs |
| |
| # detect unused local USE-descriptions |
| used_useflags = set() |
| |
| for y in ebuildlist: |
| relative_path = os.path.join(x, y + ".ebuild") |
| full_path = os.path.join(repodir, relative_path) |
| ebuild_path = y + ".ebuild" |
| if repolevel < 3: |
| ebuild_path = os.path.join(pkgdir, ebuild_path) |
| if repolevel < 2: |
| ebuild_path = os.path.join(catdir, ebuild_path) |
| ebuild_path = os.path.join(".", ebuild_path) |
| if check_changelog and not changelog_modified \ |
| and ebuild_path in new_ebuilds: |
| stats['changelog.ebuildadded'] += 1 |
| fails['changelog.ebuildadded'].append(relative_path) |
| |
| if vcs in ("cvs", "svn", "bzr") and check_ebuild_notadded and y not in eadded: |
| #ebuild not added to vcs |
| stats["ebuild.notadded"]=stats["ebuild.notadded"]+1 |
| fails["ebuild.notadded"].append(x+"/"+y+".ebuild") |
| myesplit=portage.pkgsplit(y) |
| if myesplit is None or myesplit[0] != x.split("/")[-1] \ |
| or pv_toolong_re.search(myesplit[1]) \ |
| or pv_toolong_re.search(myesplit[2]): |
| stats["ebuild.invalidname"]=stats["ebuild.invalidname"]+1 |
| fails["ebuild.invalidname"].append(x+"/"+y+".ebuild") |
| continue |
| elif myesplit[0]!=pkgdir: |
| print(pkgdir,myesplit[0]) |
| stats["ebuild.namenomatch"]=stats["ebuild.namenomatch"]+1 |
| fails["ebuild.namenomatch"].append(x+"/"+y+".ebuild") |
| continue |
| |
| pkg = pkgs[y] |
| |
| if pkg.invalid: |
| allvalid = False |
| for k, msgs in pkg.invalid.items(): |
| for msg in msgs: |
| stats[k] = stats[k] + 1 |
| fails[k].append("%s %s" % (relative_path, msg)) |
| continue |
| |
| myaux = pkg.metadata |
| eapi = myaux["EAPI"] |
| inherited = pkg.inherited |
| live_ebuild = live_eclasses.intersection(inherited) |
| |
| for k, v in myaux.items(): |
| if not isinstance(v, basestring): |
| continue |
| m = non_ascii_re.search(v) |
| if m is not None: |
| stats["variable.invalidchar"] += 1 |
| fails["variable.invalidchar"].append( |
| ("%s: %s variable contains non-ASCII " + \ |
| "character at position %s") % \ |
| (relative_path, k, m.start() + 1)) |
| |
| if not src_uri_error: |
| # Check that URIs don't reference a server from thirdpartymirrors. |
| for uri in portage.dep.use_reduce( \ |
| myaux["SRC_URI"], matchall=True, is_src_uri=True, eapi=eapi, flat=True): |
| contains_mirror = False |
| for mirror in thirdpartymirrors: |
| if uri.startswith(mirror): |
| contains_mirror = True |
| break |
| if not contains_mirror: |
| continue |
| |
| stats["SRC_URI.mirror"] += 1 |
| fails["SRC_URI.mirror"].append( |
| "%s: '%s' found in thirdpartymirrors" % \ |
| (relative_path, mirror)) |
| |
| if myaux.get("PROVIDE"): |
| stats["virtual.oldstyle"]+=1 |
| fails["virtual.oldstyle"].append(relative_path) |
| |
| for pos, missing_var in enumerate(missingvars): |
| if not myaux.get(missing_var): |
| if catdir == "virtual" and \ |
| missing_var in ("HOMEPAGE", "LICENSE"): |
| continue |
| if live_ebuild and missing_var == "KEYWORDS": |
| continue |
| myqakey=missingvars[pos]+".missing" |
| stats[myqakey]=stats[myqakey]+1 |
| fails[myqakey].append(x+"/"+y+".ebuild") |
| |
| if catdir == "virtual": |
| for var in ("HOMEPAGE", "LICENSE"): |
| if myaux.get(var): |
| myqakey = var + ".virtual" |
| stats[myqakey] = stats[myqakey] + 1 |
| fails[myqakey].append(relative_path) |
| |
| # 14 is the length of DESCRIPTION="" |
| if len(myaux['DESCRIPTION']) > max_desc_len: |
| stats['DESCRIPTION.toolong'] += 1 |
| fails['DESCRIPTION.toolong'].append( |
| "%s: DESCRIPTION is %d characters (max %d)" % \ |
| (relative_path, len(myaux['DESCRIPTION']), max_desc_len)) |
| |
| keywords = myaux["KEYWORDS"].split() |
| stable_keywords = [] |
| for keyword in keywords: |
| if not keyword.startswith("~") and \ |
| not keyword.startswith("-"): |
| stable_keywords.append(keyword) |
| if stable_keywords: |
| if ebuild_path in new_ebuilds: |
| stable_keywords.sort() |
| stats["KEYWORDS.stable"] += 1 |
| fails["KEYWORDS.stable"].append( |
| x + "/" + y + ".ebuild added with stable keywords: %s" % \ |
| " ".join(stable_keywords)) |
| |
| ebuild_archs = set(kw.lstrip("~") for kw in keywords \ |
| if not kw.startswith("-")) |
| |
| previous_keywords = slot_keywords.get(myaux["SLOT"]) |
| if previous_keywords is None: |
| slot_keywords[myaux["SLOT"]] = set() |
| elif ebuild_archs and not live_ebuild: |
| dropped_keywords = previous_keywords.difference(ebuild_archs) |
| if dropped_keywords: |
| stats["KEYWORDS.dropped"] += 1 |
| fails["KEYWORDS.dropped"].append( |
| relative_path + ": %s" % \ |
| " ".join(sorted(dropped_keywords))) |
| |
| slot_keywords[myaux["SLOT"]].update(ebuild_archs) |
| |
| # KEYWORDS="-*" is a stupid replacement for package.mask and screws general KEYWORDS semantics |
| if "-*" in keywords: |
| haskeyword = False |
| for kw in keywords: |
| if kw[0] == "~": |
| kw = kw[1:] |
| if kw in kwlist: |
| haskeyword = True |
| if not haskeyword: |
| stats["KEYWORDS.stupid"] += 1 |
| fails["KEYWORDS.stupid"].append(x+"/"+y+".ebuild") |
| |
| """ |
| Ebuilds that inherit a "Live" eclass (darcs,subversion,git,cvs,etc..) should |
| not be allowed to be marked stable |
| """ |
| if live_ebuild: |
| bad_stable_keywords = [] |
| for keyword in keywords: |
| if not keyword.startswith("~") and \ |
| not keyword.startswith("-"): |
| bad_stable_keywords.append(keyword) |
| del keyword |
| if bad_stable_keywords: |
| stats["LIVEVCS.stable"] += 1 |
| fails["LIVEVCS.stable"].append( |
| x + "/" + y + ".ebuild with stable keywords:%s " % \ |
| bad_stable_keywords) |
| del bad_stable_keywords |
| |
| if keywords and not has_global_mask(pkg): |
| stats["LIVEVCS.unmasked"] += 1 |
| fails["LIVEVCS.unmasked"].append(relative_path) |
| |
| if options.ignore_arches: |
| arches = [[repoman_settings["ARCH"], repoman_settings["ARCH"], |
| repoman_settings["ACCEPT_KEYWORDS"].split()]] |
| else: |
| arches=[] |
| for keyword in myaux["KEYWORDS"].split(): |
| if (keyword[0]=="-"): |
| continue |
| elif (keyword[0]=="~"): |
| arches.append([keyword, keyword[1:], [keyword[1:], keyword]]) |
| else: |
| arches.append([keyword, keyword, [keyword]]) |
| if not arches: |
| # Use an empty profile for checking dependencies of |
| # packages that have empty KEYWORDS. |
| arches.append(['**', '**', ['**']]) |
| |
| unknown_pkgs = set() |
| baddepsyntax = False |
| badlicsyntax = False |
| badprovsyntax = False |
| catpkg = catdir+"/"+y |
| |
| inherited_java_eclass = "java-pkg-2" in inherited or \ |
| "java-pkg-opt-2" in inherited |
| inherited_wxwidgets_eclass = "wxwidgets" in inherited |
| operator_tokens = set(["||", "(", ")"]) |
| type_list, badsyntax = [], [] |
| for mytype in ("DEPEND", "RDEPEND", "PDEPEND", |
| "LICENSE", "PROPERTIES", "PROVIDE"): |
| mydepstr = myaux[mytype] |
| |
| token_class = None |
| if mytype in ("DEPEND", "RDEPEND", "PDEPEND"): |
| token_class=portage.dep.Atom |
| |
| try: |
| atoms = portage.dep.use_reduce(mydepstr, matchall=1, flat=True, \ |
| is_valid_flag=pkg.iuse.is_valid_flag, token_class=token_class) |
| except portage.exception.InvalidDependString as e: |
| atoms = None |
| badsyntax.append(str(e)) |
| |
| if atoms and mytype in ("DEPEND", "RDEPEND", "PDEPEND"): |
| if mytype in ("RDEPEND", "PDEPEND") and \ |
| "test?" in mydepstr.split(): |
| stats[mytype + '.suspect'] += 1 |
| fails[mytype + '.suspect'].append(relative_path + \ |
| ": 'test?' USE conditional in %s" % mytype) |
| |
| for atom in atoms: |
| if atom == "||": |
| continue |
| |
| if not portdb.xmatch("match-all", atom) and \ |
| not atom.cp.startswith("virtual/"): |
| unknown_pkgs.add((mytype, atom.unevaluated_atom)) |
| |
| is_blocker = atom.blocker |
| |
| if mytype == "DEPEND" and \ |
| not is_blocker and \ |
| not inherited_java_eclass and \ |
| atom.cp == "virtual/jdk": |
| stats['java.eclassesnotused'] += 1 |
| fails['java.eclassesnotused'].append(relative_path) |
| elif mytype == "DEPEND" and \ |
| not is_blocker and \ |
| not inherited_wxwidgets_eclass and \ |
| atom.cp == "x11-libs/wxGTK": |
| stats['wxwidgets.eclassnotused'] += 1 |
| fails['wxwidgets.eclassnotused'].append( |
| relative_path + ": DEPENDs on x11-libs/wxGTK" |
| " without inheriting wxwidgets.eclass") |
| elif mytype in ("PDEPEND", "RDEPEND"): |
| if not is_blocker and \ |
| atom.cp in suspect_rdepend: |
| stats[mytype + '.suspect'] += 1 |
| fails[mytype + '.suspect'].append( |
| relative_path + ": '%s'" % atom) |
| |
| if atom.operator == "~" and \ |
| portage.versions.catpkgsplit(atom.cpv)[3] != "r0": |
| stats[mytype + '.badtilde'] += 1 |
| fails[mytype + '.badtilde'].append( |
| (relative_path + ": %s uses the ~ operator" |
| " with a non-zero revision:" + \ |
| " '%s'") % (mytype, atom)) |
| |
| type_list.extend([mytype] * (len(badsyntax) - len(type_list))) |
| |
| for m,b in zip(type_list, badsyntax): |
| stats[m+".syntax"] += 1 |
| fails[m+".syntax"].append(catpkg+".ebuild "+m+": "+b) |
| |
| badlicsyntax = len([z for z in type_list if z == "LICENSE"]) |
| badprovsyntax = len([z for z in type_list if z == "PROVIDE"]) |
| baddepsyntax = len(type_list) != badlicsyntax + badprovsyntax |
| badlicsyntax = badlicsyntax > 0 |
| badprovsyntax = badprovsyntax > 0 |
| |
| # uselist checks - global |
| myuse = [] |
| default_use = [] |
| for myflag in myaux["IUSE"].split(): |
| flag_name = myflag.lstrip("+-") |
| used_useflags.add(flag_name) |
| if myflag != flag_name: |
| default_use.append(myflag) |
| if flag_name not in uselist: |
| myuse.append(flag_name) |
| |
| # uselist checks - metadata |
| for mypos in range(len(myuse)-1,-1,-1): |
| if myuse[mypos] and (myuse[mypos] in muselist): |
| del myuse[mypos] |
| |
| if default_use and not eapi_has_iuse_defaults(eapi): |
| for myflag in default_use: |
| stats['EAPI.incompatible'] += 1 |
| fails['EAPI.incompatible'].append( |
| (relative_path + ": IUSE defaults" + \ |
| " not supported with EAPI='%s':" + \ |
| " '%s'") % (eapi, myflag)) |
| |
| for mypos in range(len(myuse)): |
| stats["IUSE.invalid"]=stats["IUSE.invalid"]+1 |
| fails["IUSE.invalid"].append(x+"/"+y+".ebuild: %s" % myuse[mypos]) |
| |
| # license checks |
| if not badlicsyntax: |
| # Parse the LICENSE variable, remove USE conditions and |
| # flatten it. |
| licenses = portage.dep.use_reduce(myaux["LICENSE"], matchall=1, flat=True) |
| # Check each entry to ensure that it exists in PORTDIR's |
| # license directory. |
| for lic in licenses: |
| # Need to check for "||" manually as no portage |
| # function will remove it without removing values. |
| if lic not in liclist and lic != "||": |
| stats["LICENSE.invalid"]=stats["LICENSE.invalid"]+1 |
| fails["LICENSE.invalid"].append(x+"/"+y+".ebuild: %s" % lic) |
| |
| #keyword checks |
| myuse = myaux["KEYWORDS"].split() |
| for mykey in myuse: |
| myskey=mykey[:] |
| if myskey[0]=="-": |
| myskey=myskey[1:] |
| if myskey[0]=="~": |
| myskey=myskey[1:] |
| if mykey!="-*": |
| if myskey not in kwlist: |
| stats["KEYWORDS.invalid"] += 1 |
| fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s" % mykey) |
| elif myskey not in profiles: |
| stats["KEYWORDS.invalid"] += 1 |
| fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s (profile invalid)" % mykey) |
| |
| #restrict checks |
| myrestrict = None |
| try: |
| myrestrict = portage.dep.use_reduce(myaux["RESTRICT"], matchall=1, flat=True) |
| except portage.exception.InvalidDependString as e: |
| stats["RESTRICT.syntax"] = stats["RESTRICT.syntax"] + 1 |
| fails["RESTRICT.syntax"].append( |
| "%s: RESTRICT: %s" % (relative_path, e)) |
| del e |
| if myrestrict: |
| myrestrict = set(myrestrict) |
| mybadrestrict = myrestrict.difference(valid_restrict) |
| if mybadrestrict: |
| stats["RESTRICT.invalid"] += len(mybadrestrict) |
| for mybad in mybadrestrict: |
| fails["RESTRICT.invalid"].append(x+"/"+y+".ebuild: %s" % mybad) |
| #REQUIRED_USE check |
| required_use = myaux["REQUIRED_USE"] |
| if required_use: |
| if not eapi_has_required_use(eapi): |
| stats['EAPI.incompatible'] += 1 |
| fails['EAPI.incompatible'].append( |
| relative_path + ": REQUIRED_USE" + \ |
| " not supported with EAPI='%s'" % (eapi,)) |
| try: |
| portage.dep.check_required_use(required_use, (), |
| pkg.iuse.is_valid_flag) |
| except portage.exception.InvalidDependString as e: |
| stats["REQUIRED_USE.syntax"] = stats["REQUIRED_USE.syntax"] + 1 |
| fails["REQUIRED_USE.syntax"].append( |
| "%s: REQUIRED_USE: %s" % (relative_path, e)) |
| del e |
| |
| # Syntax Checks |
| relative_path = os.path.join(x, y + ".ebuild") |
| full_path = os.path.join(repodir, relative_path) |
| if not vcs_preserves_mtime: |
| if ebuild_path not in new_ebuilds and \ |
| ebuild_path not in modified_ebuilds: |
| pkg.mtime = None |
| try: |
| # All ebuilds should have utf_8 encoding. |
| f = io.open(_unicode_encode(full_path, |
| encoding=_encodings['fs'], errors='strict'), |
| mode='r', encoding=_encodings['repo.content']) |
| try: |
| for check_name, e in run_checks(f, pkg): |
| stats[check_name] += 1 |
| fails[check_name].append(relative_path + ': %s' % e) |
| finally: |
| f.close() |
| except UnicodeDecodeError: |
| # A file.UTF8 failure will have already been recorded above. |
| pass |
| |
| if options.force: |
| # The dep_check() calls are the most expensive QA test. If --force |
| # is enabled, there's no point in wasting time on these since the |
| # user is intent on forcing the commit anyway. |
| continue |
| |
| for keyword,arch,groups in arches: |
| |
| if arch not in profiles: |
| # A missing profile will create an error further down |
| # during the KEYWORDS verification. |
| continue |
| |
| for prof in profiles[arch]: |
| |
| if prof.status not in ("stable", "dev") or \ |
| prof.status == "dev" and not options.include_dev: |
| continue |
| |
| dep_settings = arch_caches.get(prof.sub_path) |
| if dep_settings is None: |
| dep_settings = portage.config( |
| config_profile_path=prof.abs_path, |
| config_incrementals=repoman_incrementals, |
| config_root=config_root, |
| local_config=False, |
| _unmatched_removal=options.unmatched_removal, |
| env=env) |
| dep_settings.categories = repoman_settings.categories |
| if options.without_mask: |
| dep_settings._mask_manager = \ |
| copy.deepcopy(dep_settings._mask_manager) |
| dep_settings._mask_manager._pmaskdict.clear() |
| arch_caches[prof.sub_path] = dep_settings |
| |
| xmatch_cache_key = (prof.sub_path, tuple(groups)) |
| xcache = arch_xmatch_caches.get(xmatch_cache_key) |
| if xcache is None: |
| portdb.melt() |
| portdb.freeze() |
| xcache = portdb.xcache |
| xcache.update(shared_xmatch_caches) |
| arch_xmatch_caches[xmatch_cache_key] = xcache |
| |
| trees[root]["porttree"].settings = dep_settings |
| portdb.settings = dep_settings |
| portdb.xcache = xcache |
| # for package.use.mask support inside dep_check |
| dep_settings.setcpv(pkg) |
| dep_settings["ACCEPT_KEYWORDS"] = " ".join(groups) |
| # just in case, prevent config.reset() from nuking these. |
| dep_settings.backup_changes("ACCEPT_KEYWORDS") |
| |
| if not baddepsyntax: |
| ismasked = not ebuild_archs or \ |
| pkg.cpv not in portdb.xmatch("match-visible", pkg.cp) |
| if ismasked: |
| if not have_pmasked: |
| have_pmasked = bool(dep_settings._getMaskAtom( |
| pkg.cpv, pkg.metadata)) |
| if options.ignore_masked: |
| continue |
| #we are testing deps for a masked package; give it some lee-way |
| suffix="masked" |
| matchmode = "minimum-all" |
| else: |
| suffix="" |
| matchmode = "minimum-visible" |
| |
| if not have_dev_keywords: |
| have_dev_keywords = \ |
| bool(dev_keywords.intersection(keywords)) |
| |
| if prof.status == "dev": |
| suffix=suffix+"indev" |
| |
| for mytype,mypos in [["DEPEND",len(missingvars)],["RDEPEND",len(missingvars)+1],["PDEPEND",len(missingvars)+2]]: |
| |
| mykey=mytype+".bad"+suffix |
| myvalue = myaux[mytype] |
| if not myvalue: |
| continue |
| |
| success, atoms = portage.dep_check(myvalue, portdb, |
| dep_settings, use="all", mode=matchmode, |
| trees=trees) |
| |
| if success: |
| if atoms: |
| |
| # Don't bother with dependency.unknown for |
| # cases in which *DEPEND.bad is triggered. |
| for atom in atoms: |
| # dep_check returns all blockers and they |
| # aren't counted for *DEPEND.bad, so we |
| # ignore them here. |
| if not atom.blocker: |
| unknown_pkgs.discard( |
| (mytype, atom.unevaluated_atom)) |
| |
| if not prof.sub_path: |
| # old-style virtuals currently aren't |
| # resolvable with empty profile, since |
| # 'virtuals' mappings are unavailable |
| # (it would be expensive to search |
| # for PROVIDE in all ebuilds) |
| atoms = [atom for atom in atoms if not \ |
| (atom.cp.startswith('virtual/') and \ |
| not portdb.cp_list(atom.cp))] |
| |
| #we have some unsolvable deps |
| #remove ! deps, which always show up as unsatisfiable |
| atoms = [str(atom.unevaluated_atom) \ |
| for atom in atoms if not atom.blocker] |
| |
| #if we emptied out our list, continue: |
| if not atoms: |
| continue |
| stats[mykey]=stats[mykey]+1 |
| fails[mykey].append("%s: %s(%s) %s" % \ |
| (relative_path, keyword, |
| prof, repr(atoms))) |
| else: |
| stats[mykey]=stats[mykey]+1 |
| fails[mykey].append("%s: %s(%s) %s" % \ |
| (relative_path, keyword, |
| prof, repr(atoms))) |
| |
| if not baddepsyntax and unknown_pkgs: |
| type_map = {} |
| for mytype, atom in unknown_pkgs: |
| type_map.setdefault(mytype, set()).add(atom) |
| for mytype, atoms in type_map.items(): |
| stats["dependency.unknown"] += 1 |
| fails["dependency.unknown"].append("%s: %s: %s" % |
| (relative_path, mytype, ", ".join(sorted(atoms)))) |
| |
| # check if there are unused local USE-descriptions in metadata.xml |
| # (unless there are any invalids, to avoid noise) |
| if allvalid: |
| for myflag in muselist.difference(used_useflags): |
| stats["metadata.warning"] += 1 |
| fails["metadata.warning"].append( |
| "%s/metadata.xml: unused local USE-description: '%s'" % \ |
| (x, myflag)) |
| |
| if options.if_modified == "y" and len(effective_scanlist) < 1: |
| logging.warn("--if-modified is enabled, but no modified packages were found!") |
| |
| if options.mode == "manifest": |
| sys.exit(dofail) |
| |
| #dofail will be set to 1 if we have failed in at least one non-warning category |
| dofail=0 |
| #dowarn will be set to 1 if we tripped any warnings |
| dowarn=0 |
| #dofull will be set if we should print a "repoman full" informational message |
| dofull = options.mode != 'full' |
| |
| for x in qacats: |
| if not stats[x]: |
| continue |
| dowarn = 1 |
| if x not in qawarnings: |
| dofail = 1 |
| |
| if dofail or \ |
| (dowarn and not (options.quiet or options.mode == "scan")): |
| dofull = 0 |
| |
| # Save QA output so that it can be conveniently displayed |
| # in $EDITOR while the user creates a commit message. |
| # Otherwise, the user would not be able to see this output |
| # once the editor has taken over the screen. |
| qa_output = io.StringIO() |
| style_file = ConsoleStyleFile(sys.stdout) |
| if options.mode == 'commit' and \ |
| (not commitmessage or not commitmessage.strip()): |
| style_file.write_listener = qa_output |
| console_writer = StyleWriter(file=style_file, maxcol=9999) |
| console_writer.style_listener = style_file.new_styles |
| |
| f = formatter.AbstractFormatter(console_writer) |
| |
| utilities.format_qa_output(f, stats, fails, dofull, dofail, options, qawarnings) |
| |
| style_file.flush() |
| del console_writer, f, style_file |
| qa_output = qa_output.getvalue() |
| qa_output = qa_output.splitlines(True) |
| |
| def grouplist(mylist,seperator="/"): |
| """(list,seperator="/") -- Takes a list of elements; groups them into |
| same initial element categories. Returns a dict of {base:[sublist]} |
| From: ["blah/foo","spork/spatula","blah/weee/splat"] |
| To: {"blah":["foo","weee/splat"], "spork":["spatula"]}""" |
| mygroups={} |
| for x in mylist: |
| xs=x.split(seperator) |
| if xs[0]==".": |
| xs=xs[1:] |
| if xs[0] not in mygroups: |
| mygroups[xs[0]]=[seperator.join(xs[1:])] |
| else: |
| mygroups[xs[0]]+=[seperator.join(xs[1:])] |
| return mygroups |
| |
| suggest_ignore_masked = False |
| suggest_include_dev = False |
| |
| if have_pmasked and not (options.without_mask or options.ignore_masked): |
| suggest_ignore_masked = True |
| if have_dev_keywords and not options.include_dev: |
| suggest_include_dev = True |
| |
| if suggest_ignore_masked or suggest_include_dev: |
| print() |
| if suggest_ignore_masked: |
| print(bold("Note: use --without-mask to check " + \ |
| "KEYWORDS on dependencies of masked packages")) |
| |
| if suggest_include_dev: |
| print(bold("Note: use --include-dev (-d) to check " + \ |
| "dependencies for 'dev' profiles")) |
| print() |
| |
| if options.mode != 'commit': |
| if dofull: |
| print(bold("Note: type \"repoman full\" for a complete listing.")) |
| if dowarn and not dofail: |
| print(green("RepoMan sez:"),"\"You're only giving me a partial QA payment?\n I'll take it this time, but I'm not happy.\"") |
| elif not dofail: |
| print(green("RepoMan sez:"),"\"If everyone were like you, I'd be out of business!\"") |
| elif dofail: |
| print(bad("Please fix these important QA issues first.")) |
| print(green("RepoMan sez:"),"\"Make your QA payment on time and you'll never see the likes of me.\"\n") |
| sys.exit(1) |
| else: |
| if dofail and can_force and options.force and not options.pretend: |
| print(green("RepoMan sez:") + \ |
| " \"You want to commit even with these QA issues?\n" + \ |
| " I'll take it this time, but I'm not happy.\"\n") |
| elif dofail: |
| if options.force and not can_force: |
| print(bad("The --force option has been disabled due to extraordinary issues.")) |
| print(bad("Please fix these important QA issues first.")) |
| print(green("RepoMan sez:"),"\"Make your QA payment on time and you'll never see the likes of me.\"\n") |
| sys.exit(1) |
| |
| if options.pretend: |
| print(green("RepoMan sez:"), "\"So, you want to play it safe. Good call.\"\n") |
| |
| myunadded = [] |
| if vcs == "cvs": |
| try: |
| myvcstree=portage.cvstree.getentries("./",recursive=1) |
| myunadded=portage.cvstree.findunadded(myvcstree,recursive=1,basedir="./") |
| except SystemExit as e: |
| raise # TODO propagate this |
| except: |
| err("Error retrieving CVS tree; exiting.") |
| if vcs == "svn": |
| try: |
| with os.popen("svn status --no-ignore") as f: |
| svnstatus = f.readlines() |
| myunadded = [ "./"+elem.rstrip().split()[1] for elem in svnstatus if elem.startswith("?") or elem.startswith("I") ] |
| except SystemExit as e: |
| raise # TODO propagate this |
| except: |
| err("Error retrieving SVN info; exiting.") |
| if vcs == "git": |
| # get list of files not under version control or missing |
| myf = os.popen("git ls-files --others") |
| myunadded = [ "./" + elem[:-1] for elem in myf ] |
| myf.close() |
| if vcs == "bzr": |
| try: |
| with os.popen("bzr status -S .") as f: |
| bzrstatus = f.readlines() |
| myunadded = [ "./"+elem.rstrip().split()[1].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("?") or elem[0:2] == " D" ] |
| except SystemExit as e: |
| raise # TODO propagate this |
| except: |
| err("Error retrieving bzr info; exiting.") |
| if vcs == "hg": |
| with os.popen("hg status --no-status --unknown .") as f: |
| myunadded = f.readlines() |
| myunadded = ["./" + elem.rstrip() for elem in myunadded] |
| |
| # Mercurial doesn't handle manually deleted files as removed from |
| # the repository, so the user need to remove them before commit, |
| # using "hg remove [FILES]" |
| with os.popen("hg status --no-status --deleted .") as f: |
| mydeleted = f.readlines() |
| mydeleted = ["./" + elem.rstrip() for elem in mydeleted] |
| |
| |
| myautoadd=[] |
| if myunadded: |
| for x in range(len(myunadded)-1,-1,-1): |
| xs=myunadded[x].split("/") |
| if xs[-1]=="files": |
| print("!!! files dir is not added! Please correct this.") |
| sys.exit(-1) |
| elif xs[-1]=="Manifest": |
| # It's a manifest... auto add |
| myautoadd+=[myunadded[x]] |
| del myunadded[x] |
| |
| if myunadded: |
| print(red("!!! The following files are in your local tree but are not added to the master")) |
| print(red("!!! tree. Please remove them from the local tree or add them to the master tree.")) |
| for x in myunadded: |
| print(" ",x) |
| print() |
| print() |
| sys.exit(1) |
| |
| if vcs == "hg" and mydeleted: |
| print(red("!!! The following files are removed manually from your local tree but are not")) |
| print(red("!!! removed from the repository. Please remove them, using \"hg remove [FILES]\".")) |
| for x in mydeleted: |
| print(" ",x) |
| print() |
| print() |
| sys.exit(1) |
| |
| if vcs == "cvs": |
| mycvstree = cvstree.getentries("./", recursive=1) |
| mychanged = cvstree.findchanged(mycvstree, recursive=1, basedir="./") |
| mynew = cvstree.findnew(mycvstree, recursive=1, basedir="./") |
| myremoved=portage.cvstree.findremoved(mycvstree,recursive=1,basedir="./") |
| bin_blob_pattern = re.compile("^-kb$") |
| no_expansion = set(portage.cvstree.findoption(mycvstree, bin_blob_pattern, |
| recursive=1, basedir="./")) |
| |
| |
| if vcs == "svn": |
| with os.popen("svn status") as f: |
| svnstatus = f.readlines() |
| mychanged = [ "./" + elem.split()[-1:][0] for elem in svnstatus if (elem[:1] in "MR" or elem[1:2] in "M")] |
| mynew = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("A")] |
| myremoved = [ "./" + elem.split()[-1:][0] for elem in svnstatus if elem.startswith("D")] |
| |
| # Subversion expands keywords specified in svn:keywords properties. |
| with os.popen("svn propget -R svn:keywords") as f: |
| props = f.readlines() |
| expansion = dict(("./" + prop.split(" - ")[0], prop.split(" - ")[1].split()) \ |
| for prop in props if " - " in prop) |
| |
| elif vcs == "git": |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=M HEAD") as f: |
| mychanged = f.readlines() |
| mychanged = ["./" + elem[:-1] for elem in mychanged] |
| |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=A HEAD") as f: |
| mynew = f.readlines() |
| mynew = ["./" + elem[:-1] for elem in mynew] |
| |
| with os.popen("git diff-index --name-only " |
| "--relative --diff-filter=D HEAD") as f: |
| myremoved = f.readlines() |
| myremoved = ["./" + elem[:-1] for elem in myremoved] |
| |
| if vcs == "bzr": |
| with os.popen("bzr status -S .") as f: |
| bzrstatus = f.readlines() |
| mychanged = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and elem[1:2] == "M" ] |
| mynew = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] in "NK" or elem[0:1] == "R" ) ] |
| myremoved = [ "./" + elem.split()[-1:][0].split('/')[-1:][0] for elem in bzrstatus if elem.startswith("-") ] |
| myremoved = [ "./" + elem.split()[-3:-2][0].split('/')[-1:][0] for elem in bzrstatus if elem and ( elem[1:2] == "K" or elem[0:1] == "R" ) ] |
| # Bazaar expands nothing. |
| |
| if vcs == "hg": |
| with os.popen("hg status --no-status --modified .") as f: |
| mychanged = f.readlines() |
| mychanged = ["./" + elem.rstrip() for elem in mychanged] |
| |
| with os.popen("hg status --no-status --added .") as f: |
| mynew = f.readlines() |
| mynew = ["./" + elem.rstrip() for elem in mynew] |
| |
| with os.popen("hg status --no-status --removed .") as f: |
| myremoved = f.readlines() |
| myremoved = ["./" + elem.rstrip() for elem in myremoved] |
| |
| if vcs: |
| if not (mychanged or mynew or myremoved or (vcs == "hg" and mydeleted)): |
| print(green("RepoMan sez:"), "\"Doing nothing is not always good for QA.\"") |
| print() |
| print("(Didn't find any changed files...)") |
| print() |
| sys.exit(1) |
| |
| # Manifests need to be regenerated after all other commits, so don't commit |
| # them now even if they have changed. |
| mymanifests = set() |
| myupdates = set() |
| for f in mychanged + mynew: |
| if "Manifest" == os.path.basename(f): |
| mymanifests.add(f) |
| else: |
| myupdates.add(f) |
| myupdates.difference_update(myremoved) |
| myupdates = list(myupdates) |
| mymanifests = list(mymanifests) |
| myheaders = [] |
| mydirty = [] |
| |
| commitmessage = options.commitmsg |
| if options.commitmsgfile: |
| try: |
| f = io.open(_unicode_encode(options.commitmsgfile, |
| encoding=_encodings['fs'], errors='strict'), |
| mode='r', encoding=_encodings['content'], errors='replace') |
| commitmessage = f.read() |
| f.close() |
| del f |
| except (IOError, OSError) as e: |
| if e.errno == errno.ENOENT: |
| portage.writemsg("!!! File Not Found: --commitmsgfile='%s'\n" % options.commitmsgfile) |
| else: |
| raise |
| # We've read the content so the file is no longer needed. |
| commitmessagefile = None |
| if not commitmessage or not commitmessage.strip(): |
| try: |
| editor = os.environ.get("EDITOR") |
| if editor and utilities.editor_is_executable(editor): |
| commitmessage = utilities.get_commit_message_with_editor( |
| editor, message=qa_output) |
| else: |
| commitmessage = utilities.get_commit_message_with_stdin() |
| except KeyboardInterrupt: |
| exithandler() |
| if not commitmessage or not commitmessage.strip(): |
| print("* no commit message? aborting commit.") |
| sys.exit(1) |
| commitmessage = commitmessage.rstrip() |
| changelog_msg = commitmessage |
| portage_version = getattr(portage, "VERSION", None) |
| if portage_version is None: |
| sys.stderr.write("Failed to insert portage version in message!\n") |
| sys.stderr.flush() |
| portage_version = "Unknown" |
| unameout = platform.system() + " " |
| if platform.system() in ["Darwin", "SunOS"]: |
| unameout += platform.processor() |
| else: |
| unameout += platform.machine() |
| commitmessage += "\n\n(Portage version: %s/%s/%s" % \ |
| (portage_version, vcs, unameout) |
| if options.force: |
| commitmessage += ", RepoMan options: --force" |
| commitmessage += ")" |
| |
| if options.echangelog in ('y', 'force'): |
| logging.info("checking for unmodified ChangeLog files") |
| committer_name = utilities.get_committer_name(env=repoman_settings) |
| for x in sorted(vcs_files_to_cps( |
| chain(myupdates, mymanifests, myremoved))): |
| catdir, pkgdir = x.split("/") |
| checkdir = repodir + "/" + x |
| checkdir_relative = "" |
| if repolevel < 3: |
| checkdir_relative = os.path.join(pkgdir, checkdir_relative) |
| if repolevel < 2: |
| checkdir_relative = os.path.join(catdir, checkdir_relative) |
| checkdir_relative = os.path.join(".", checkdir_relative) |
| |
| changelog_path = os.path.join(checkdir_relative, "ChangeLog") |
| changelog_modified = changelog_path in modified_changelogs |
| if changelog_modified and options.echangelog != 'force': |
| continue |
| |
| # get changes for this package |
| cdrlen = len(checkdir_relative) |
| clnew = [elem[cdrlen:] for elem in mynew if elem.startswith(checkdir_relative)] |
| clremoved = [elem[cdrlen:] for elem in myremoved if elem.startswith(checkdir_relative)] |
| clchanged = [elem[cdrlen:] for elem in mychanged if elem.startswith(checkdir_relative)] |
| |
| # Skip ChangeLog generation if only the Manifest was modified, |
| # as discussed in bug #398009. |
| nontrivial_cl_files = set() |
| nontrivial_cl_files.update(clnew, clremoved, clchanged) |
| nontrivial_cl_files.difference_update(['Manifest']) |
| if not nontrivial_cl_files and options.echangelog != 'force': |
| continue |
| |
| new_changelog = utilities.UpdateChangeLog(checkdir_relative, |
| committer_name, changelog_msg, |
| os.path.join(repodir, 'skel.ChangeLog'), |
| catdir, pkgdir, |
| new=clnew, removed=clremoved, changed=clchanged, |
| pretend=options.pretend) |
| if new_changelog is None: |
| writemsg_level("!!! Updating the ChangeLog failed\n", \ |
| level=logging.ERROR, noiselevel=-1) |
| sys.exit(1) |
| |
| # if the ChangeLog was just created, add it to vcs |
| if new_changelog: |
| myautoadd.append(changelog_path) |
| # myautoadd is appended to myupdates below |
| else: |
| myupdates.append(changelog_path) |
| |
| if myautoadd: |
| print(">>> Auto-Adding missing Manifest/ChangeLog file(s)...") |
| add_cmd = [vcs, "add"] |
| add_cmd += myautoadd |
| if options.pretend: |
| portage.writemsg_stdout("(%s)\n" % " ".join(add_cmd), |
| noiselevel=-1) |
| else: |
| if not (sys.hexversion >= 0x3000000 and sys.hexversion < 0x3020000): |
| # Python 3.1 produces the following TypeError if raw bytes are |
| # passed to subprocess.call(): |
| # File "/usr/lib/python3.1/subprocess.py", line 646, in __init__ |
| # errread, errwrite) |
| # File "/usr/lib/python3.1/subprocess.py", line 1157, in _execute_child |
| # raise child_exception |
| # TypeError: expected an object with the buffer interface |
| add_cmd = [_unicode_encode(arg) for arg in add_cmd] |
| retcode = subprocess.call(add_cmd) |
| if retcode != os.EX_OK: |
| logging.error( |
| "Exiting on %s error code: %s\n" % (vcs, retcode)) |
| sys.exit(retcode) |
| |
| myupdates += myautoadd |
| |
| print("* %s files being committed..." % green(str(len(myupdates))), end=' ') |
| |
| if vcs not in ('cvs', 'svn'): |
| # With git, bzr and hg, there's never any keyword expansion, so |
| # there's no need to regenerate manifests and all files will be |
| # committed in one big commit at the end. |
| print() |
| elif not repo_config.thin_manifest: |
| if vcs == 'cvs': |
| headerstring = "'\$(Header|Id).*\$'" |
| elif vcs == "svn": |
| svn_keywords = dict((k.lower(), k) for k in [ |
| "Rev", |
| "Revision", |
| "LastChangedRevision", |
| "Date", |
| "LastChangedDate", |
| "Author", |
| "LastChangedBy", |
| "URL", |
| "HeadURL", |
| "Id", |
| "Header", |
| ]) |
| |
| for myfile in myupdates: |
| |
| # for CVS, no_expansion contains files that are excluded from expansion |
| if vcs == "cvs": |
| if myfile in no_expansion: |
| continue |
| |
| # for SVN, expansion contains files that are included in expansion |
| elif vcs == "svn": |
| if myfile not in expansion: |
| continue |
| |
| # Subversion keywords are case-insensitive in svn:keywords properties, but case-sensitive in contents of files. |
| enabled_keywords = [] |
| for k in expansion[myfile]: |
| keyword = svn_keywords.get(k.lower()) |
| if keyword is not None: |
| enabled_keywords.append(keyword) |
| |
| headerstring = "'\$(%s).*\$'" % "|".join(enabled_keywords) |
| |
| myout = subprocess_getstatusoutput("egrep -q "+headerstring+" "+myfile) |
| if myout[0] == 0: |
| myheaders.append(myfile) |
| |
| print("%s have headers that will change." % green(str(len(myheaders)))) |
| print("* Files with headers will cause the manifests to be changed and committed separately.") |
| |
| logging.info("myupdates: %s", myupdates) |
| logging.info("myheaders: %s", myheaders) |
| |
| if options.ask and userquery('Commit changes?', True) != 'Yes': |
| print("* aborting commit.") |
| sys.exit(1) |
| |
| # Handle the case where committed files have keywords which |
| # will change and need a priming commit before the Manifest |
| # can be committed. |
| if (myupdates or myremoved) and myheaders: |
| myfiles = myupdates + myremoved |
| fd, commitmessagefile = tempfile.mkstemp(".repoman.msg") |
| mymsg = os.fdopen(fd, "wb") |
| mymsg.write(_unicode_encode(commitmessage)) |
| mymsg.close() |
| |
| print() |
| print(green("Using commit message:")) |
| print(green("------------------------------------------------------------------------------")) |
| print(commitmessage) |
| print(green("------------------------------------------------------------------------------")) |
| print() |
| |
| # Having a leading ./ prefix on file paths can trigger a bug in |
| # the cvs server when committing files to multiple directories, |
| # so strip the prefix. |
| myfiles = [f.lstrip("./") for f in myfiles] |
| |
| commit_cmd = [vcs] |
| commit_cmd.extend(vcs_global_opts) |
| commit_cmd.append("commit") |
| commit_cmd.extend(vcs_local_opts) |
| commit_cmd.extend(["-F", commitmessagefile]) |
| commit_cmd.extend(myfiles) |
| |
| try: |
| if options.pretend: |
| print("(%s)" % (" ".join(commit_cmd),)) |
| else: |
| retval = spawn(commit_cmd, env=os.environ) |
| if retval != os.EX_OK: |
| writemsg_level(("!!! Exiting on %s (shell) " + \ |
| "error code: %s\n") % (vcs, retval), |
| level=logging.ERROR, noiselevel=-1) |
| sys.exit(retval) |
| finally: |
| try: |
| os.unlink(commitmessagefile) |
| except OSError: |
| pass |
| |
| # Setup the GPG commands |
| def gpgsign(filename): |
| gpgcmd = repoman_settings.get("PORTAGE_GPG_SIGNING_COMMAND") |
| if gpgcmd is None: |
| raise MissingParameter("PORTAGE_GPG_SIGNING_COMMAND is unset!" + \ |
| " Is make.globals missing?") |
| if "${PORTAGE_GPG_KEY}" in gpgcmd and \ |
| "PORTAGE_GPG_KEY" not in repoman_settings: |
| raise MissingParameter("PORTAGE_GPG_KEY is unset!") |
| if "${PORTAGE_GPG_DIR}" in gpgcmd: |
| if "PORTAGE_GPG_DIR" not in repoman_settings: |
| repoman_settings["PORTAGE_GPG_DIR"] = \ |
| os.path.expanduser("~/.gnupg") |
| logging.info("Automatically setting PORTAGE_GPG_DIR to '%s'" \ |
| % repoman_settings["PORTAGE_GPG_DIR"]) |
| else: |
| repoman_settings["PORTAGE_GPG_DIR"] = \ |
| os.path.expanduser(repoman_settings["PORTAGE_GPG_DIR"]) |
| if not os.access(repoman_settings["PORTAGE_GPG_DIR"], os.X_OK): |
| raise portage.exception.InvalidLocation( |
| "Unable to access directory: PORTAGE_GPG_DIR='%s'" % \ |
| repoman_settings["PORTAGE_GPG_DIR"]) |
| gpgvars = {"FILE": filename} |
| for k in ("PORTAGE_GPG_DIR", "PORTAGE_GPG_KEY"): |
| v = repoman_settings.get(k) |
| if v is not None: |
| gpgvars[k] = v |
| gpgcmd = portage.util.varexpand(gpgcmd, mydict=gpgvars) |
| if options.pretend: |
| print("("+gpgcmd+")") |
| else: |
| rValue = os.system(gpgcmd) |
| if rValue == os.EX_OK: |
| os.rename(filename+".asc", filename) |
| else: |
| raise portage.exception.PortageException("!!! gpg exited with '" + str(rValue) + "' status") |
| |
| # When files are removed and re-added, the cvs server will put /Attic/ |
| # inside the $Header path. This code detects the problem and corrects it |
| # so that the Manifest will generate correctly. See bug #169500. |
| # Use binary mode in order to avoid potential character encoding issues. |
| cvs_header_re = re.compile(br'^#\s*\$Header.*\$$') |
| attic_str = b'/Attic/' |
| attic_replace = b'/' |
| for x in myheaders: |
| f = open(_unicode_encode(x, |
| encoding=_encodings['fs'], errors='strict'), |
| mode='rb') |
| mylines = f.readlines() |
| f.close() |
| modified = False |
| for i, line in enumerate(mylines): |
| if cvs_header_re.match(line) is not None and \ |
| attic_str in line: |
| mylines[i] = line.replace(attic_str, attic_replace) |
| modified = True |
| if modified: |
| portage.util.write_atomic(x, b''.join(mylines), |
| mode='wb') |
| |
| if repolevel == 1: |
| print(green("RepoMan sez:"), "\"You're rather crazy... " |
| "doing the entire repository.\"\n") |
| |
| if vcs in ('cvs', 'svn') and (myupdates or myremoved): |
| |
| for x in sorted(vcs_files_to_cps( |
| chain(myupdates, myremoved, mymanifests))): |
| repoman_settings["O"] = os.path.join(repodir, x) |
| digestgen(mysettings=repoman_settings, myportdb=portdb) |
| |
| signed = False |
| if sign_manifests: |
| signed = True |
| try: |
| for x in sorted(vcs_files_to_cps( |
| chain(myupdates, myremoved, mymanifests))): |
| repoman_settings["O"] = os.path.join(repodir, x) |
| gpgsign(os.path.join(repoman_settings["O"], "Manifest")) |
| except portage.exception.PortageException as e: |
| portage.writemsg("!!! %s\n" % str(e)) |
| portage.writemsg("!!! Disabled FEATURES='sign'\n") |
| signed = False |
| |
| if vcs == 'git': |
| # It's not safe to use the git commit -a option since there might |
| # be some modified files elsewhere in the working tree that the |
| # user doesn't want to commit. Therefore, call git update-index |
| # in order to ensure that the index is updated with the latest |
| # versions of all new and modified files in the relevant portion |
| # of the working tree. |
| myfiles = mymanifests + myupdates |
| myfiles.sort() |
| update_index_cmd = ["git", "update-index"] |
| update_index_cmd.extend(f.lstrip("./") for f in myfiles) |
| if options.pretend: |
| print("(%s)" % (" ".join(update_index_cmd),)) |
| else: |
| retval = spawn(update_index_cmd, env=os.environ) |
| if retval != os.EX_OK: |
| writemsg_level(("!!! Exiting on %s (shell) " + \ |
| "error code: %s\n") % (vcs, retval), |
| level=logging.ERROR, noiselevel=-1) |
| sys.exit(retval) |
| |
| if True: |
| |
| myfiles = mymanifests[:] |
| # If there are no header (SVN/CVS keywords) changes in |
| # the files, this Manifest commit must include the |
| # other (yet uncommitted) files. |
| if not myheaders: |
| myfiles += myupdates |
| myfiles += myremoved |
| myfiles.sort() |
| |
| fd, commitmessagefile = tempfile.mkstemp(".repoman.msg") |
| mymsg = os.fdopen(fd, "wb") |
| # strip the closing parenthesis |
| mymsg.write(_unicode_encode(commitmessage[:-1])) |
| if signed: |
| mymsg.write(_unicode_encode( |
| ", signed Manifest commit with key %s)" % \ |
| repoman_settings["PORTAGE_GPG_KEY"])) |
| else: |
| mymsg.write(b", unsigned Manifest commit)") |
| mymsg.close() |
| |
| commit_cmd = [] |
| if options.pretend and vcs is None: |
| # substitute a bogus value for pretend output |
| commit_cmd.append("cvs") |
| else: |
| commit_cmd.append(vcs) |
| commit_cmd.extend(vcs_global_opts) |
| commit_cmd.append("commit") |
| commit_cmd.extend(vcs_local_opts) |
| if vcs == "hg": |
| commit_cmd.extend(["--logfile", commitmessagefile]) |
| commit_cmd.extend(myfiles) |
| else: |
| commit_cmd.extend(["-F", commitmessagefile]) |
| commit_cmd.extend(f.lstrip("./") for f in myfiles) |
| |
| try: |
| if options.pretend: |
| print("(%s)" % (" ".join(commit_cmd),)) |
| else: |
| retval = spawn(commit_cmd, env=os.environ) |
| if retval != os.EX_OK: |
| writemsg_level(("!!! Exiting on %s (shell) " + \ |
| "error code: %s\n") % (vcs, retval), |
| level=logging.ERROR, noiselevel=-1) |
| sys.exit(retval) |
| finally: |
| try: |
| os.unlink(commitmessagefile) |
| except OSError: |
| pass |
| |
| print() |
| if vcs: |
| print("Commit complete.") |
| else: |
| print("repoman was too scared by not seeing any familiar version control file that he forgot to commit anything") |
| print(green("RepoMan sez:"), "\"If everyone were like you, I'd be out of business!\"\n") |
| sys.exit(0) |
| |