| #!/usr/bin/python -O |
| # Copyright 1999-2006 Gentoo Foundation |
| # Distributed under the terms of the GNU General Public License v2 |
| # $Id$ |
| |
| # Next to do: dep syntax checking in mask files |
| # Then, check to make sure deps are satisfiable (to avoid "can't find match for" problems) |
| # that last one is tricky because multiple profiles need to be checked. |
| |
| import codecs |
| import commands |
| import errno |
| import formatter |
| import logging |
| import optparse |
| import os |
| import re |
| import signal |
| import stat |
| import sys |
| import tempfile |
| import time |
| import platform |
| |
| from itertools import izip |
| from stat import S_ISDIR, ST_CTIME |
| |
| try: |
| import cPickle as pickle |
| except ImportError: |
| import pickle |
| |
| try: |
| import cStringIO as StringIO |
| except ImportError: |
| import StringIO |
| |
| if not hasattr(__builtins__, "set"): |
| from sets import Set as set |
| |
| try: |
| import portage |
| except ImportError: |
| from os import path as osp |
| sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")) |
| import portage |
| portage._disable_legacy_globals() |
| |
| try: |
| from repoman.checks import run_checks |
| from repoman import utilities |
| except ImportError: |
| from os import path as osp |
| sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), 'pym')) |
| from repoman.checks import run_checks |
| from repoman import utilities |
| |
| from _emerge import Package, RootConfig |
| from portage.sets import load_default_config |
| |
| import portage.checksum |
| import portage.const |
| import portage.dep |
| portage.dep._dep_check_strict = True |
| import portage.exception |
| from portage import cvstree, normalize_path |
| from portage import util |
| from portage.exception import ParseError |
| from portage.manifest import Manifest |
| from portage.process import find_binary, spawn |
| from portage.output import bold, create_color_func, darkgreen, \ |
| green, nocolor, red, turquoise, yellow |
| from portage.output import ConsoleStyleFile, StyleWriter |
| |
| util.initialize_logger() |
| |
| # 14 is the length of DESCRIPTION="" |
| max_desc_len = 100 |
| allowed_filename_chars="a-zA-Z0-9._-+:" |
| allowed_filename_chars_set = {} |
| map(allowed_filename_chars_set.setdefault, map(chr, range(ord('a'), ord('z')+1))) |
| map(allowed_filename_chars_set.setdefault, map(chr, range(ord('A'), ord('Z')+1))) |
| map(allowed_filename_chars_set.setdefault, map(chr, range(ord('0'), ord('9')+1))) |
| map(allowed_filename_chars_set.setdefault, map(chr, map(ord, [".", "-", "_", "+", ":"]))) |
| bad = create_color_func("BAD") |
| |
| # A sane umask is needed for files that portage creates. |
| os.umask(022) |
| repoman_settings = portage.config(local_config=False, |
| config_incrementals=portage.const.INCREMENTALS) |
| repoman_settings.lock() |
| |
| if repoman_settings.get("NOCOLOR", "").lower() in ("yes", "true") or \ |
| not sys.stdout.isatty(): |
| nocolor() |
| |
| def warn(txt): |
| print "repoman: " + txt |
| |
| def err(txt): |
| warn(txt) |
| sys.exit(1) |
| |
| def exithandler(signum=None, frame=None): |
| logging.fatal("Interrupted; exiting...") |
| sys.exit(1) |
| os.kill(0, signal.SIGKILL) |
| |
| signal.signal(signal.SIGINT,exithandler) |
| |
| class RepomanHelpFormatter(optparse.IndentedHelpFormatter): |
| """Repoman needs it's own HelpFormatter for now, because the default ones |
| murder the help text.""" |
| |
| def __init__(self, indent_increment=1, max_help_position=24, width=150, short_first=1): |
| optparse.HelpFormatter.__init__(self, indent_increment, max_help_position, width, short_first) |
| |
| def format_description(self, description): |
| return description |
| |
| class RepomanOptionParser(optparse.OptionParser): |
| """Add the on_tail function, ruby has it, optionParser should too |
| """ |
| |
| def __init__(self, *args, **kwargs): |
| optparse.OptionParser.__init__(self, *args, **kwargs) |
| self.tail = "" |
| |
| def on_tail(self, description): |
| self.tail += description |
| |
| def format_help(self, formatter=None): |
| result = optparse.OptionParser.format_help(self, formatter) |
| result += self.tail |
| return result |
| |
| |
| def ParseArgs(args, qahelp): |
| """This function uses a customized optionParser to parse command line arguments for repoman |
| Args: |
| args - a sequence of command line arguments |
| qahelp - a dict of qa warning to help message |
| Returns: |
| (opts, args), just like a call to parser.parse_args() |
| """ |
| |
| modes = { |
| 'commit' : 'Run a scan then commit changes', |
| 'ci' : 'Run a scan then commit changes', |
| 'fix' : 'Fix simple QA issues (stray digests, missing digests)', |
| 'full' : 'Scan directory tree and print all issues (not a summary)', |
| 'help' : 'Show this screen', |
| 'last' : 'Remember report from last run', |
| 'lfull' : 'Remember report from last run (full listing)', |
| 'manifest' : 'Generate a Manifest (fetches files if necessary)', |
| 'scan' : 'Scan directory tree for QA issues' |
| } |
| |
| mode_keys = modes.keys() |
| mode_keys.sort() |
| |
| parser = RepomanOptionParser(formatter=RepomanHelpFormatter(), usage="%prog [options] [mode]") |
| parser.description = green(" ".join((os.path.basename(args[0]), "1.2"))) |
| parser.description += "\nCopyright 1999-2007 Gentoo Foundation" |
| parser.description += "\nDistributed under the terms of the GNU General Public License v2" |
| parser.description += "\nmodes: " + " | ".join(map(green,mode_keys)) |
| |
| parser.add_option('-m', '--commitmsg', dest='commitmsg', |
| help='specify a commit message on the command line') |
| |
| parser.add_option('-M', '--commitmsgfile', dest='commitmsgfile', |
| help='specify a path to a file that contains a commit message') |
| |
| parser.add_option('-p', '--pretend', dest='pretend', default=False, |
| action='store_true', help='don\'t commit or fix anything; just show what would be done') |
| |
| parser.add_option('-q', '--quiet', dest="quiet", action="count", default=0, |
| help='do not print unnecessary messages') |
| |
| parser.add_option('-f', '--force', dest='force', default=False, action='store_true', |
| help='Commit with QA violations') |
| |
| parser.add_option('-v', '--verbose', dest="verbosity", action='count', |
| help='be very verbose in output', default=0) |
| |
| parser.add_option('-x', '--xmlparse', dest='xml_parse', action='store_true', |
| default=False, help='forces the metadata.xml parse check to be carried out') |
| |
| parser.add_option('-i', '--ignore-arches', dest='ignore_arches', action='store_true', |
| default=False, help='ignore arch-specific failures (where arch != host)') |
| |
| parser.add_option('-I', '--ignore-masked', dest='ignore_masked', action='store_true', |
| default=False, help='ignore masked packages (not allowed with commit mode)') |
| |
| parser.add_option('--without-mask', dest='without_mask', action='store_true', |
| default=False, help='behave as if no package.mask entries exist (not allowed with commit mode)') |
| |
| parser.add_option('--mode', type='choice', dest='mode', choices=modes.keys(), |
| help='specify which mode repoman will run in (default=full)') |
| |
| parser.on_tail("\n " + green("Modes".ljust(20) + " Description\n")) |
| |
| for k in mode_keys: |
| parser.on_tail(" %s %s\n" % (k.ljust(20), modes[k])) |
| |
| parser.on_tail("\n " + green("QA keyword".ljust(20) + " Description\n")) |
| |
| sorted_qa = qahelp.keys() |
| sorted_qa.sort() |
| for k in sorted_qa: |
| parser.on_tail(" %s %s\n" % (k.ljust(20), qahelp[k])) |
| |
| if not args: |
| args = sys.argv |
| opts, args = parser.parse_args(args) |
| |
| if opts.mode == 'help': |
| parser.print_help(short=False) |
| |
| for arg in args: |
| if arg in modes: |
| if not opts.mode: |
| opts.mode = arg |
| break |
| |
| if not opts.mode: |
| opts.mode = 'full' #default to full |
| |
| if opts.mode == 'ci': |
| opts.mode = 'commit' # backwards compat shortcut |
| |
| if opts.mode == 'commit' and not (opts.force or opts.pretend): |
| if opts.ignore_masked: |
| parser.error('Commit mode and --ignore-masked are not compatible') |
| if opts.without_mask: |
| parser.error('Commit mode and --without-mask are not compatible') |
| |
| # Use the verbosity and quiet options to fiddle with the loglevel appropriately |
| for val in range(opts.verbosity): |
| logger = logging.getLogger() |
| logger.setLevel(logger.getEffectiveLevel() - 10) |
| |
| for val in range(opts.quiet): |
| logger = logging.getLogger() |
| logger.setLevel(logger.getEffectiveLevel() + 10) |
| |
| return (opts, args) |
| |
| qahelp={ |
| "CVS/Entries.IO_error":"Attempting to commit, and an IO error was encountered access the Entries file", |
| "desktop.invalid":"desktop-file-validate reports errors in a *.desktop file", |
| "ebuild.invalidname":"Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1 versioning extensions)", |
| "ebuild.namenomatch":"Ebuild files that do not have the same name as their parent directory", |
| "changelog.missing":"Missing ChangeLog files", |
| "ebuild.disjointed":"Ebuilds not added to cvs when the matching digest has been added", |
| "ebuild.notadded":"Ebuilds that exist but have not been added to cvs", |
| "ebuild.patches":"PATCHES variable should be a bash array to ensure white space safety", |
| "changelog.notadded":"ChangeLogs that exist but have not been added to cvs", |
| "filedir.missing":"Package lacks a files directory", |
| "file.executable":"Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do note need the executable bit", |
| "file.size":"Files in the files directory must be under 20k", |
| "file.name":"File/dir name must be composed of only the following chars: %s " % allowed_filename_chars, |
| "file.UTF8":"File is not UTF8 compliant", |
| "inherit.autotools":"Ebuild inherits autotools but does not call eautomake, eautoconf or eautoreconf", |
| "java.eclassesnotused":"With virtual/jdk in DEPEND you must inherit a java eclass", |
| "KEYWORDS.dropped":"Ebuilds that appear to have dropped KEYWORDS for some arch", |
| "KEYWORDS.missing":"Ebuilds that have a missing or empty KEYWORDS variable", |
| "KEYWORDS.stable":"Ebuilds that have been added directly with stable KEYWORDS", |
| "KEYWORDS.stupid":"Ebuilds that use KEYWORDS=-* instead of package.mask", |
| "LICENSE.missing":"Ebuilds that have a missing or empty LICENSE variable", |
| "DESCRIPTION.missing":"Ebuilds that have a missing or empty DESCRIPTION variable", |
| "DESCRIPTION.toolong":"DESCRIPTION is over %d characters" % max_desc_len, |
| "EAPI.incompatible":"Ebuilds that use features that are only available with a different EAPI", |
| "EAPI.unsupported":"Ebuilds that have an unsupported EAPI version (you must upgrade portage)", |
| "SLOT.missing":"Ebuilds that have a missing or empty SLOT variable", |
| "HOMEPAGE.missing":"Ebuilds that have a missing or empty HOMEPAGE variable", |
| "DEPEND.bad":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds)", |
| "RDEPEND.bad":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds)", |
| "PDEPEND.bad":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds)", |
| "DEPEND.badmasked":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds)", |
| "RDEPEND.badmasked":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds)", |
| "PDEPEND.badmasked":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds)", |
| "DEPEND.badindev":"User-visible ebuilds with bad DEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "RDEPEND.badindev":"User-visible ebuilds with bad RDEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "PDEPEND.badindev":"User-visible ebuilds with bad PDEPEND settings (matched against *visible* ebuilds) in developing arch", |
| "DEPEND.badmaskedindev":"Masked ebuilds with bad DEPEND settings (matched against *all* ebuilds) in developing arch", |
| "RDEPEND.badmaskedindev":"Masked ebuilds with RDEPEND settings (matched against *all* ebuilds) in developing arch", |
| "PDEPEND.badmaskedindev":"Masked ebuilds with PDEPEND settings (matched against *all* ebuilds) in developing arch", |
| "DEPEND.syntax":"Syntax error in DEPEND (usually an extra/missing space/parenthesis)", |
| "RDEPEND.syntax":"Syntax error in RDEPEND (usually an extra/missing space/parenthesis)", |
| "PDEPEND.syntax":"Syntax error in PDEPEND (usually an extra/missing space/parenthesis)", |
| "LICENSE.syntax":"Syntax error in LICENSE (usually an extra/missing space/parenthesis)", |
| "PROVIDE.syntax":"Syntax error in PROVIDE (usually an extra/missing space/parenthesis)", |
| "RESTRICT.syntax":"Syntax error in RESTRICT (usually an extra/missing space/parenthesis)", |
| "SRC_URI.syntax":"Syntax error in SRC_URI (usually an extra/missing space/parenthesis)", |
| "SRC_URI.mirror":"A uri listed in profiles/thirdpartymirrors is found in SRC_URI", |
| "ebuild.syntax":"Error generating cache entry for ebuild; typically caused by ebuild syntax error or digest verification failure", |
| "ebuild.output":"A simple sourcing of the ebuild produces output; this breaks ebuild policy.", |
| "ebuild.nesteddie":"Placing 'die' inside ( ) prints an error, but doesn't stop the ebuild.", |
| "variable.readonly":"Assigning a readonly variable", |
| "LIVEVCS.stable":"This ebuild is a live checkout from a VCS but has stable keywords.", |
| "IUSE.invalid":"This ebuild has a variable in IUSE that is not in the use.desc or use.local.desc file", |
| "IUSE.undefined":"This ebuild does not define IUSE (style guideline says to define IUSE even when empty)", |
| "LICENSE.invalid":"This ebuild is listing a license that doesnt exist in portages license/ dir.", |
| "KEYWORDS.invalid":"This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for which no valid profile was found", |
| "RDEPEND.suspect":"RDEPEND contains a package that usually only belongs in DEPEND.", |
| "RESTRICT.invalid":"This ebuild contains invalid RESTRICT values.", |
| "digestentry.unused":"Some files listed in the Manifest aren't referenced in SRC_URI", |
| "ebuild.nostable":"There are no ebuilds that are marked as stable for your ARCH", |
| "ebuild.allmasked":"All ebuilds are masked for this package (Package level only)", |
| "ebuild.majorsyn":"This ebuild has a major syntax error that may cause the ebuild to fail partially or fully", |
| "ebuild.minorsyn":"This ebuild has a minor syntax error that contravenes gentoo coding style", |
| "ebuild.badheader":"This ebuild has a malformed header", |
| "metadata.missing":"Missing metadata.xml files", |
| "metadata.bad":"Bad metadata.xml files", |
| "virtual.versioned":"PROVIDE contains virtuals with versions", |
| "virtual.exists":"PROVIDE contains existing package names", |
| "virtual.unavailable":"PROVIDE contains a virtual which contains no profile default", |
| "usage.obsolete":"The ebuild makes use of an obsolete construct" |
| } |
| |
| qacats = qahelp.keys() |
| qacats.sort() |
| |
| qawarnings=[ |
| "changelog.missing", |
| "changelog.notadded", |
| "digestentry.unused", |
| "ebuild.notadded", |
| "ebuild.nostable", |
| "ebuild.allmasked", |
| "ebuild.nesteddie", |
| "desktop.invalid", |
| "DEPEND.badmasked","RDEPEND.badmasked","PDEPEND.badmasked", |
| "DEPEND.badindev","RDEPEND.badindev","PDEPEND.badindev", |
| "DEPEND.badmaskedindev","RDEPEND.badmaskedindev","PDEPEND.badmaskedindev", |
| "DESCRIPTION.toolong", |
| "KEYWORDS.dropped", |
| "KEYWORDS.stupid", |
| "KEYWORDS.missing", |
| "IUSE.undefined", |
| "RDEPEND.suspect", |
| "RESTRICT.invalid", |
| "SRC_URI.mirror", |
| "ebuild.minorsyn", |
| "ebuild.badheader", |
| "ebuild.patches", |
| "file.size", |
| "inherit.autotools", |
| "java.eclassesnotused", |
| "metadata.missing", |
| "metadata.bad", |
| "virtual.versioned", |
| "virtual.exists", |
| "virtual.unavailable", |
| "usage.obsolete", |
| "LIVEVCS.stable" |
| ] |
| |
| missingvars=["KEYWORDS","LICENSE","DESCRIPTION","HOMEPAGE","SLOT"] |
| allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_")) |
| allvars.discard("CDEPEND") |
| allvars.update(Package.metadata_keys) |
| allvars = sorted(allvars) |
| commitmessage=None |
| for x in missingvars: |
| x += ".missing" |
| if x not in qacats: |
| logging.warn('* missingvars values need to be added to qahelp ("%s")' % x) |
| qacats.append(x) |
| qawarnings.append(x) |
| |
| valid_restrict = frozenset(["binchecks", "bindist", |
| "fetch", "installsources", "mirror", |
| "primaryuri", "strip", "test", "userpriv"]) |
| |
| live_eclasses = frozenset([ |
| "cvs", |
| "darcs", |
| "git", |
| "mercurial", |
| "subversion" |
| ]) |
| |
| suspect_rdepend = frozenset([ |
| "app-arch/cabextract", |
| "app-arch/rpm2targz", |
| "app-doc/doxygen", |
| "dev-lang/nasm", |
| "dev-lang/swig", |
| "dev-lang/yasm", |
| "dev-perl/extutils-pkgconfig", |
| "dev-python/setuptools", |
| "dev-util/byacc", |
| "dev-util/cmake", |
| "dev-util/ftjam", |
| "dev-util/gtk-doc", |
| "dev-util/gtk-doc-am", |
| "dev-util/intltool", |
| "dev-util/jam", |
| "dev-util/pkgconfig", |
| "dev-util/scons", |
| "dev-util/unifdef", |
| "dev-util/yacc", |
| "media-gfx/ebdftopcf", |
| "sys-apps/help2man", |
| "sys-devel/autoconf", |
| "sys-devel/automake", |
| "sys-devel/bin86", |
| "sys-devel/bison", |
| "sys-devel/dev86", |
| "sys-devel/flex", |
| "sys-devel/libtool", |
| "sys-devel/m4", |
| "sys-devel/pmake", |
| "x11-misc/bdftopcf", |
| "x11-misc/imake", |
| ]) |
| |
| # file.executable |
| no_exec = frozenset(["Manifest","ChangeLog","metadata.xml"]) |
| |
| def last(full=False): |
| """Print the results of the last repoman run |
| Args: |
| full - Print the complete results, if false, print a summary |
| Returns: |
| Doesn't return (invokes sys.exit() |
| """ |
| #Retrieve and unpickle stats and fails from saved files |
| savedf=open(os.path.join(portage.const.CACHE_PATH, 'repo.stats'),'r') |
| stats = pickle.load(savedf) |
| savedf.close() |
| savedf=open(os.path.join(portage.const.CACHE_PATH, 'repo.fails'),'r') |
| fails = pickle.load(savedf) |
| savedf.close() |
| |
| #dofail will be set to 1 if we have failed in at least one non-warning category |
| dofail=0 |
| #dowarn will be set to 1 if we tripped any warnings |
| dowarn=0 |
| #dofull will be set if we should print a "repoman full" informational message |
| dofull=0 |
| |
| dofull = options.mode not in ("full", "lfull") |
| |
| for x in qacats: |
| if not stats[x]: |
| continue |
| dowarn = 1 |
| if x not in qawarnings: |
| dofail = 1 |
| |
| print |
| print green("RepoMan remembers...") |
| print |
| style_file = ConsoleStyleFile(sys.stdout) |
| console_writer = StyleWriter(file=style_file, maxcol=9999) |
| console_writer.style_listener = style_file.new_styles |
| f = formatter.AbstractFormatter(console_writer) |
| utilities.format_qa_output(f, stats, fails, dofull, dofail, options, qawarnings) |
| print |
| if dofull: |
| print bold("Note: type \"repoman lfull\" for a complete listing of repomans last run.") |
| print |
| if dowarn and not dofail: |
| print green("RepoMan sez:"),"\"You only gave me a partial QA payment last time?\n I took it, but I wasn't happy.\"" |
| elif not dofail: |
| print green("RepoMan sez:"),"\"If everyone were like you, I'd be out of business!\"" |
| print |
| sys.exit(0) |
| |
| options, arguments = ParseArgs(sys.argv, qahelp) |
| |
| if options.mode in ('last', 'lfull'): |
| last('lfull' in options.mode) |
| |
| # Set this to False when an extraordinary issue (generally |
| # something other than a QA issue) makes it impossible to |
| # commit (like if Manifest generation fails). |
| can_force = True |
| |
| |
| vcs = None |
| if os.path.isdir("CVS"): |
| vcs = "cvs" |
| if os.path.isdir(".svn"): |
| vcs = "svn" |
| |
| if vcs == "cvs" and \ |
| "commit" == options.mode and \ |
| "RMD160" not in portage.checksum.hashorigin_map: |
| from portage.util import grablines |
| repo_lines = grablines("./CVS/Repository") |
| if repo_lines and \ |
| "gentoo-x86" == repo_lines[0].strip().split(os.path.sep)[0]: |
| msg = "Please install " \ |
| "pycrypto or enable python's ssl USE flag in order " \ |
| "to enable RMD160 hash support. See bug #198398 for " \ |
| "more information." |
| prefix = bad(" * ") |
| from textwrap import wrap |
| for line in wrap(msg, 70): |
| print prefix + line |
| sys.exit(1) |
| del repo_lines |
| |
| if options.mode == 'commit' and not options.pretend and not vcs: |
| logging.info("Not in a version controlled repository; enabling pretend mode.") |
| options.pretend = True |
| |
| try: |
| portdir, portdir_overlay, mydir = utilities.FindPortdir(repoman_settings) |
| except ValueError: |
| sys.exit(1) |
| |
| os.environ["PORTDIR"] = portdir |
| if portdir_overlay != portdir: |
| os.environ["PORTDIR_OVERLAY"] = portdir_overlay |
| else: |
| os.environ["PORTDIR_OVERLAY"] = "" |
| |
| logging.info('Setting paths:') |
| logging.info('PORTDIR = "' + os.environ['PORTDIR'] + '"') |
| logging.info('PORTDIR_OVERLAY = "' + os.environ['PORTDIR_OVERLAY']+'"') |
| |
| # Now that PORTDIR_OVERLAY is properly overridden, create the portdb. |
| repoman_settings = portage.config(local_config=False, |
| config_incrementals=portage.const.INCREMENTALS) |
| trees = portage.create_trees() |
| trees["/"]["porttree"].settings = repoman_settings |
| portdb = trees["/"]["porttree"].dbapi |
| portdb.mysettings = repoman_settings |
| setconfig = load_default_config(repoman_settings, trees["/"]) |
| root_config = RootConfig(repoman_settings, trees["/"], setconfig) |
| # We really only need to cache the metadata that's necessary for visibility |
| # filtering. Anything else can be discarded to reduce memory consumption. |
| for k in ("DEPEND", "LICENCE", "PDEPEND", |
| "PROVIDE", "RDEPEND", "RESTRICT", "repository"): |
| portdb._aux_cache_keys.discard(k) |
| # dep_zapdeps looks at the vardbapi, but it shouldn't for repoman. |
| del trees["/"]["vartree"] |
| |
| myreporoot = os.path.basename(portdir_overlay) |
| myreporoot += mydir[len(portdir_overlay):] |
| |
| reposplit = myreporoot.split(os.path.sep) |
| repolevel = len(reposplit) |
| |
| # check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting. |
| # Reason for this is if they're trying to commit in just $FILESDIR/*, the Manifest needs updating. |
| # this check ensures that repoman knows where it is, and the manifest recommit is at least possible. |
| if options.mode == 'commit' and repolevel not in [1,2,3]: |
| print red("***")+" Commit attempts *must* be from within a vcs co, category, or package directory." |
| print red("***")+" Attempting to commit from a packages files directory will be blocked for instance." |
| print red("***")+" This is intended behaviour, to ensure the manifest is recommited for a package." |
| print red("***") |
| err("Unable to identify level we're commiting from for %s" % '/'.join(reposplit)) |
| |
| startdir = normalize_path(mydir) |
| repodir = startdir |
| for x in range(0, repolevel - 1): |
| repodir = os.path.dirname(repodir) |
| |
| def caterror(mycat): |
| err(mycat+" is not an official category. Skipping QA checks in this directory.\nPlease ensure that you add "+catdir+" to "+repodir+"/profiles/categories\nif it is a new category.") |
| |
| # retreive local USE list |
| luselist={} |
| try: |
| f = open(os.path.join(portdir, "profiles", "use.local.desc")) |
| utilities.parse_use_local_desc(f, luselist) |
| f.close() |
| except (IOError, OSError, ParseError), e: |
| logging.exception("Couldn't read from use.local.desc") |
| sys.exit(1) |
| |
| if portdir_overlay != portdir: |
| filename = os.path.join(portdir_overlay, "profiles", "use.local.desc") |
| if os.path.exists(filename): |
| try: |
| f = open(filename) |
| utilities.parse_use_local_desc(f, luselist) |
| f.close() |
| except (IOError, OSError, ParseError), e: |
| logging.exception("Couldn't read from '%s'" % (filename,)) |
| sys.exit(1) |
| del filename |
| |
| # setup a uselist from portage |
| uselist=[] |
| try: |
| uselist=portage.grabfile(portdir+"/profiles/use.desc") |
| for l in range(0,len(uselist)): |
| uselist[l]=uselist[l].split()[0] |
| for var in repoman_settings["USE_EXPAND"].split(): |
| vardescs = portage.grabfile(portdir+"/profiles/desc/"+var.lower()+".desc") |
| for l in range(0, len(vardescs)): |
| uselist.append(var.lower() + "_" + vardescs[l].split()[0]) |
| except (IOError, OSError, ParseError), e: |
| logging.exception("Couldn't read USE flags from use.desc") |
| sys.exit(1) |
| |
| # retrieve a list of current licenses in portage |
| liclist = set(portage.listdir(os.path.join(portdir, "licenses"))) |
| if not liclist: |
| logging.fatal("Couldn't find licenses?") |
| sys.exit(1) |
| if portdir_overlay != portdir: |
| liclist.update(portage.listdir(os.path.join(portdir_overlay, "licenses"))) |
| |
| # retrieve list of offical keywords |
| kwlist = set(portage.grabfile(os.path.join(portdir, "profiles", "arch.list"))) |
| if not kwlist: |
| logging.fatal("Couldn't read KEYWORDS from arch.list") |
| sys.exit(1) |
| |
| if portdir_overlay != portdir: |
| kwlist.update(portage.grabfile( |
| os.path.join(portdir_overlay, "profiles", "arch.list"))) |
| |
| scanlist=[] |
| if repolevel==2: |
| #we are inside a category directory |
| catdir=reposplit[-1] |
| if catdir not in repoman_settings.categories: |
| caterror(catdir) |
| mydirlist=os.listdir(startdir) |
| for x in mydirlist: |
| if x == "CVS" or x.startswith("."): |
| continue |
| if os.path.isdir(startdir+"/"+x): |
| scanlist.append(catdir+"/"+x) |
| elif repolevel==1: |
| for x in repoman_settings.categories: |
| if not os.path.isdir(startdir+"/"+x): |
| continue |
| for y in os.listdir(startdir+"/"+x): |
| if y == "CVS" or y.startswith("."): |
| continue |
| if os.path.isdir(startdir+"/"+x+"/"+y): |
| scanlist.append(x+"/"+y) |
| elif repolevel==3: |
| catdir = reposplit[-2] |
| if catdir not in repoman_settings.categories: |
| caterror(catdir) |
| scanlist.append(catdir+"/"+reposplit[-1]) |
| scanlist.sort() |
| |
| logging.debug("Found the following packages to scan:\n%s" % '\n'.join(scanlist)) |
| |
| profiles={} |
| valid_profile_types = frozenset(["dev", "exp", "stable"]) |
| descfile=portdir+"/profiles/profiles.desc" |
| if os.path.exists(descfile): |
| for i, x in enumerate(open(descfile, 'rb')): |
| if x[0]=="#": |
| continue |
| arch=x.split() |
| if len(arch) == 0: |
| continue |
| if len(arch)!=3: |
| err("wrong format: \"" + bad(x.strip()) + "\" in " + \ |
| descfile + " line %d" % (i+1, )) |
| elif arch[0] not in kwlist: |
| err("invalid arch: \"" + bad(arch[0]) + "\" in " + \ |
| descfile + " line %d" % (i+1, )) |
| elif arch[2] not in valid_profile_types: |
| err("invalid profile type: \"" + bad(arch[2]) + "\" in " + \ |
| descfile + " line %d" % (i+1, )) |
| if not os.path.isdir(portdir+"/profiles/"+arch[1]): |
| print "Invalid "+arch[2]+" profile ("+arch[1]+") for arch "+arch[0] |
| continue |
| if arch[0] in profiles: |
| profiles[arch[0]]+= [[arch[1], arch[2]]] |
| else: |
| profiles[arch[0]] = [[arch[1], arch[2]]] |
| |
| for x in repoman_settings.archlist(): |
| if x[0] == "~": |
| continue |
| if x not in profiles: |
| print red("\""+x+"\" doesn't have a valid profile listed in profiles.desc.") |
| print red("You need to either \"cvs update\" your profiles dir or follow this") |
| print red("up with the "+x+" team.") |
| print |
| else: |
| print red("profiles.desc does not exist: "+descfile) |
| print red("You need to do \"cvs update\" in profiles dir.") |
| print |
| sys.exit(1) |
| |
| |
| stats={} |
| fails={} |
| |
| # provided by the desktop-file-utils package |
| desktop_file_validate = find_binary("desktop-file-validate") |
| desktop_pattern = re.compile(r'.*\.desktop$') |
| |
| for x in qacats: |
| stats[x]=0 |
| fails[x]=[] |
| xmllint_capable = False |
| metadata_dtd = os.path.join(repoman_settings["DISTDIR"], 'metadata.dtd') |
| if options.mode == "manifest": |
| pass |
| elif not find_binary('xmllint'): |
| print red("!!! xmllint not found. Can't check metadata.xml.\n") |
| if options.xml_parse or repolevel==3: |
| print red("!!!")+" sorry, xmllint is needed. failing\n" |
| sys.exit(1) |
| else: |
| #hardcoded paths/urls suck. :-/ |
| must_fetch=1 |
| backup_exists=0 |
| try: |
| # if it's been over a week since fetching (or the system clock is fscked), grab an updated copy of metadata.dtd |
| # clock is fscked or it's been a week. time to grab a new one. |
| ct = os.stat(metadata_dtd)[ST_CTIME] |
| if abs(time.time() - ct) > (60*60*24*7): |
| # don't trap the exception, we're watching for errno 2 (file not found), anything else is a bug. |
| backup_exists=1 |
| else: |
| must_fetch=0 |
| |
| except (OSError,IOError), e: |
| if e.errno != 2: |
| print red("!!!")+" caught exception '%s' for %s/metadata.dtd, bailing" % (str(e), portage.CACHE_PATH) |
| sys.exit(1) |
| |
| if must_fetch: |
| print |
| print green("***")+" the local copy of metadata.dtd needs to be refetched, doing that now" |
| print |
| val = 0 |
| try: |
| try: |
| os.unlink(metadata_dtd) |
| except OSError, e: |
| if e.errno != errno.ENOENT: |
| raise |
| del e |
| val=portage.fetch(['http://www.gentoo.org/dtd/metadata.dtd'],repoman_settings,fetchonly=0, \ |
| try_mirrors=0) |
| |
| except SystemExit, e: |
| raise # Need to propogate this |
| except Exception,e: |
| print |
| print red("!!!")+" attempting to fetch 'http://www.gentoo.org/dtd/metadata.dtd', caught" |
| print red("!!!")+" exception '%s' though." % str(e) |
| val=0 |
| if not val: |
| print red("!!!")+" fetching new metadata.dtd failed, aborting" |
| sys.exit(1) |
| #this can be problematic if xmllint changes their output |
| xmllint_capable=True |
| |
| if options.mode == 'commit' and vcs: |
| utilities.detect_vcs_conflicts(options, vcs) |
| |
| if options.mode == "manifest": |
| pass |
| elif options.pretend: |
| print green("\nRepoMan does a once-over of the neighborhood...") |
| else: |
| print green("\nRepoMan scours the neighborhood...") |
| |
| new_ebuilds = set() |
| if vcs == "cvs": |
| mycvstree = cvstree.getentries("./", recursive=1) |
| mynew = cvstree.findnew(mycvstree, recursive=1, basedir="./") |
| new_ebuilds.update(x for x in mynew if x.endswith(".ebuild")) |
| del mycvstree, mynew |
| |
| have_masked = False |
| dofail = 0 |
| arch_caches={} |
| arch_xmatch_caches = {} |
| shared_xmatch_caches = {"cp-list":{}} |
| |
| # Disable the "ebuild.notadded" check when not in commit mode and |
| # running `svn list` and `svn status` calls in every package dir |
| # will be too expensive. |
| check_ebuild_notadded = not \ |
| (vcs == "svn" and repolevel < 3 and options.mode != "commit") |
| |
| # Build a regex from thirdpartymirrors for the SRC_URI.mirror check. |
| thirdpartymirrors = portage.flatten(repoman_settings.thirdpartymirrors().values()) |
| |
| for x in scanlist: |
| #ebuilds and digests added to cvs respectively. |
| logging.info("checking package %s" % x) |
| eadded=[] |
| dadded=[] |
| catdir,pkgdir=x.split("/") |
| checkdir=repodir+"/"+x |
| |
| if options.mode == "manifest" or \ |
| options.mode in ('commit', 'fix') and not options.pretend: |
| repoman_settings["O"] = checkdir |
| if not portage.digestgen([], repoman_settings, myportdb=portdb): |
| print "Unable to generate manifest." |
| dofail = 1 |
| if options.mode == "manifest": |
| continue |
| elif dofail: |
| sys.exit(1) |
| |
| checkdirlist=os.listdir(checkdir) |
| ebuildlist=[] |
| pkgs = {} |
| for y in checkdirlist: |
| if y in no_exec and \ |
| stat.S_IMODE(os.stat(os.path.join(checkdir, y)).st_mode) & 0111: |
| stats["file.executable"] += 1 |
| fails["file.executable"].append(os.path.join(checkdir, y)) |
| if y.endswith(".ebuild"): |
| pf = y[:-7] |
| ebuildlist.append(pf) |
| cpv = "%s/%s" % (catdir, pf) |
| try: |
| myaux = dict(izip(allvars, portdb.aux_get(cpv, allvars))) |
| except KeyError: |
| stats["ebuild.syntax"] += 1 |
| fails["ebuild.syntax"].append(os.path.join(x, y)) |
| continue |
| except IOError: |
| stats["ebuild.output"] += 1 |
| fails["ebuild.output"].append(os.path.join(x, y)) |
| continue |
| if not portage.eapi_is_supported(myaux["EAPI"]): |
| stats["EAPI.unsupported"] += 1 |
| fails["EAPI.unsupported"].append(os.path.join(x, y)) |
| continue |
| pkgs[pf] = Package(cpv=cpv, metadata=myaux, |
| root_config=root_config) |
| |
| # Sort ebuilds in ascending order for the KEYWORDS.dropped check. |
| pkgsplits = {} |
| for i in xrange(len(ebuildlist)): |
| ebuild_split = portage.pkgsplit(ebuildlist[i]) |
| pkgsplits[ebuild_split] = ebuildlist[i] |
| ebuildlist[i] = ebuild_split |
| ebuildlist.sort(portage.pkgcmp) |
| for i in xrange(len(ebuildlist)): |
| ebuildlist[i] = pkgsplits[ebuildlist[i]] |
| del pkgsplits |
| |
| slot_keywords = {} |
| |
| if len(pkgs) != len(ebuildlist): |
| # If we can't access all the metadata then it's totally unsafe to |
| # commit since there's no way to generate a correct Manifest. |
| # Do not try to do any more QA checks on this package since missing |
| # metadata leads to false positives for several checks, and false |
| # positives confuse users. |
| can_force = False |
| continue |
| |
| for y in checkdirlist: |
| for c in y.strip(os.path.sep): |
| if c not in allowed_filename_chars_set: |
| stats["file.name"] += 1 |
| fails["file.name"].append("%s/%s: char '%s'" % (checkdir, y, c)) |
| break |
| |
| if not (y in ("ChangeLog", "metadata.xml") or y.endswith(".ebuild")): |
| continue |
| try: |
| line = 1 |
| for l in codecs.open(checkdir+"/"+y, "r", "utf8"): |
| line +=1 |
| except UnicodeDecodeError, ue: |
| stats["file.UTF8"] += 1 |
| s = ue.object[:ue.start] |
| l2 = s.count("\n") |
| line += l2 |
| if l2 != 0: |
| s = s[s.rfind("\n") + 1:] |
| fails["file.UTF8"].append("%s/%s: line %i, just after: '%s'" % (checkdir, y, line, s)) |
| |
| has_filesdir = True |
| if not os.path.isdir(os.path.join(checkdir, "files")): |
| has_filesdir = False |
| |
| if vcs and check_ebuild_notadded: |
| try: |
| if vcs == "cvs": |
| myf=open(checkdir+"/CVS/Entries","r") |
| if vcs == "svn": |
| myf = os.popen("svn list " + checkdir) |
| myl=myf.readlines() |
| myf.close() |
| for l in myl: |
| if vcs == "cvs": |
| if l[0]!="/": |
| continue |
| splitl=l[1:].split("/") |
| if not len(splitl): |
| continue |
| if splitl[0][-7:]==".ebuild": |
| eadded.append(splitl[0][:-7]) |
| if vcs == "svn": |
| l = l.rstrip(); |
| if l[-1:] == "/": |
| continue |
| if l[-7:] == ".ebuild": |
| eadded.append(os.path.basename(l[:-7])) |
| if vcs == "svn": |
| myf = os.popen("svn status " + checkdir) |
| myl=myf.readlines() |
| myf.close() |
| for l in myl: |
| if l[0] == "A": |
| l = l.rstrip().split(' ')[-1] |
| if l[-7:] == ".ebuild": |
| eadded.append(os.path.basename(l[:-7])) |
| except IOError: |
| if options.mode == 'commit' and vcs == "cvs": |
| stats["CVS/Entries.IO_error"] += 1 |
| fails["CVS/Entries.IO_error"].append(checkdir+"/CVS/Entries") |
| if options.mode == 'commit' and vcs == "svn": |
| stats["svn.IO_error"] += 1 |
| fails["svn.IO_error"].append(checkdir+"svn info") |
| continue |
| |
| if vcs and has_filesdir: |
| try: |
| if vcs == "cvs": |
| myf=open(checkdir+"/files/CVS/Entries","r") |
| if vcs == "svn": |
| myf=os.popen("svn list "+os.path.normpath(checkdir+"/files")) |
| myl=myf.readlines() |
| myf.close() |
| for l in myl: |
| if vcs == "cvs": |
| if l[0]!="/": |
| continue |
| splitl=l[1:].split("/") |
| if not len(splitl): |
| continue |
| if splitl[0][:7]=="digest-": |
| dadded.append(splitl[0][7:]) |
| if vcs == "svn": |
| l = l.rstrip(); |
| if l[-1:] == "/": |
| continue |
| if l[:7] == "digest-": |
| dadded.append(l[7:]) |
| if vcs == "svn": |
| myf=os.popen("svn status "+os.path.normpath(checkdir+"/files")) |
| myl=myf.readlines() |
| myf.close() |
| for l in myl: |
| if l[0] == "A": |
| l = l.rstrip().split(' ')[-1] |
| if l[:7] == "digest-": |
| dadded.append(l[7:]) |
| except IOError: |
| if options.mode == 'commit' and vcs == "cvs": |
| stats["CVS/Entries.IO_error"] += 1 |
| fails["CVS/Entries.IO_error"].append(checkdir+"/files/CVS/Entries") |
| if options.mode == 'commit' and vcs == "svn": |
| stats["svn.IO_error"] += 1 |
| fails["svn.IO_error"].append(checkdir+"/files svn info") |
| continue |
| |
| mf = Manifest(checkdir, repoman_settings["DISTDIR"]) |
| mydigests=mf.getTypeDigests("DIST") |
| |
| fetchlist_dict = portage.FetchlistDict(checkdir, repoman_settings, portdb) |
| myfiles_all = [] |
| src_uri_error = False |
| for mykey in fetchlist_dict: |
| try: |
| myfiles_all.extend(fetchlist_dict[mykey]) |
| except portage.exception.InvalidDependString, e: |
| src_uri_error = True |
| try: |
| portdb.aux_get(mykey, ["SRC_URI"]) |
| except KeyError: |
| # This will be reported as an "ebuild.syntax" error. |
| pass |
| else: |
| stats["SRC_URI.syntax"] = stats["SRC_URI.syntax"] + 1 |
| fails["SRC_URI.syntax"].append( |
| "%s.ebuild SRC_URI: %s" % (mykey, e)) |
| del fetchlist_dict |
| if not src_uri_error: |
| # This test can produce false positives if SRC_URI could not |
| # be parsed for one or more ebuilds. There's no point in |
| # producing a false error here since the root cause will |
| # produce a valid error elsewhere, such as "SRC_URI.syntax" |
| # or "ebuild.sytax". |
| myfiles_all = set(myfiles_all) |
| for entry in mydigests: |
| if entry not in myfiles_all: |
| stats["digestentry.unused"] += 1 |
| fails["digestentry.unused"].append(checkdir+"::"+entry) |
| del myfiles_all |
| |
| if os.path.exists(checkdir+"/files"): |
| filesdirlist=os.listdir(checkdir+"/files") |
| |
| # recurse through files directory |
| # use filesdirlist as a stack, appending directories as needed so people can't hide > 20k files in a subdirectory. |
| while filesdirlist: |
| y = filesdirlist.pop(0) |
| relative_path = os.path.join(x, "files", y) |
| full_path = os.path.join(repodir, relative_path) |
| try: |
| mystat = os.stat(full_path) |
| except OSError, oe: |
| if oe.errno == 2: |
| # don't worry about it. it likely was removed via fix above. |
| continue |
| else: |
| raise oe |
| if S_ISDIR(mystat.st_mode): |
| # !!! VCS "portability" alert! Need some function isVcsDir() or alike !!! |
| if y == "CVS" or y == ".svn": |
| continue |
| for z in os.listdir(checkdir+"/files/"+y): |
| if z == "CVS" or z == ".svn": |
| continue |
| filesdirlist.append(y+"/"+z) |
| # current policy is no files over 20k, this is the check. |
| elif mystat.st_size > 20480: |
| stats["file.size"] += 1 |
| fails["file.size"].append("("+ str(mystat.st_size/1024) + "K) "+x+"/files/"+y) |
| |
| for c in os.path.basename(y.rstrip(os.path.sep)): |
| if c not in allowed_filename_chars_set: |
| stats["file.name"] += 1 |
| fails["file.name"].append("%s/files/%s: char '%s'" % (checkdir, y, c)) |
| break |
| |
| if desktop_file_validate and desktop_pattern.match(y): |
| status, cmd_output = commands.getstatusoutput( |
| "'%s' '%s'" % (desktop_file_validate, full_path)) |
| if os.WIFEXITED(status) and os.WEXITSTATUS(status) != os.EX_OK: |
| # Note: in the future we may want to grab the |
| # warnings in addition to the errors. We're |
| # just doing errors now since we don't want |
| # to generate too much noise at first. |
| error_re = re.compile(r'.*\s*error:\s*(.*)') |
| for line in cmd_output.splitlines(): |
| error_match = error_re.match(line) |
| if error_match is None: |
| continue |
| stats["desktop.invalid"] += 1 |
| fails["desktop.invalid"].append( |
| relative_path + ': %s' % error_match.group(1)) |
| |
| del mydigests |
| |
| if "ChangeLog" not in checkdirlist: |
| stats["changelog.missing"]+=1 |
| fails["changelog.missing"].append(x+"/ChangeLog") |
| |
| #metadata.xml file check |
| muselist = [] |
| |
| if "metadata.xml" not in checkdirlist: |
| stats["metadata.missing"]+=1 |
| fails["metadata.missing"].append(x+"/metadata.xml") |
| #metadata.xml parse check |
| else: |
| #Only carry out if in package directory or check forced |
| if xmllint_capable: |
| # xmlint can produce garbage output even on success, so only dump |
| # the ouput when it fails. |
| st, out = commands.getstatusoutput( |
| "xmllint --nonet --noout --dtdvalid '%s' '%s'" % \ |
| (metadata_dtd, os.path.join(checkdir, "metadata.xml"))) |
| if st != os.EX_OK: |
| print red("!!!") + " metadata.xml is invalid:" |
| for z in out.splitlines(): |
| print red("!!! ")+z |
| stats["metadata.bad"]+=1 |
| fails["metadata.bad"].append(x+"/metadata.xml") |
| |
| #load USE flags from metadata.xml |
| try: |
| f = open(os.path.join(checkdir, "metadata.xml")) |
| utilities.parse_metadata_use(f, muselist) |
| f.close() |
| except (EnvironmentError, ParseError), e: |
| logging.exception("Couldn't read from metadata.xml") |
| sys.exit(1) |
| |
| allmasked = True |
| |
| for y in ebuildlist: |
| relative_path = os.path.join(x, y + ".ebuild") |
| full_path = os.path.join(repodir, relative_path) |
| if stat.S_IMODE(os.stat(full_path).st_mode) & 0111: |
| stats["file.executable"] += 1 |
| fails["file.executable"].append(x+"/"+y+".ebuild") |
| if vcs and check_ebuild_notadded and y not in eadded: |
| #ebuild not added to vcs |
| stats["ebuild.notadded"]=stats["ebuild.notadded"]+1 |
| fails["ebuild.notadded"].append(x+"/"+y+".ebuild") |
| if y in dadded: |
| stats["ebuild.disjointed"]=stats["ebuild.disjointed"]+1 |
| fails["ebuild.disjointed"].append(x+"/"+y+".ebuild") |
| myesplit=portage.pkgsplit(y) |
| if myesplit is None or myesplit[0] != x.split("/")[-1]: |
| stats["ebuild.invalidname"]=stats["ebuild.invalidname"]+1 |
| fails["ebuild.invalidname"].append(x+"/"+y+".ebuild") |
| continue |
| elif myesplit[0]!=pkgdir: |
| print pkgdir,myesplit[0] |
| stats["ebuild.namenomatch"]=stats["ebuild.namenomatch"]+1 |
| fails["ebuild.namenomatch"].append(x+"/"+y+".ebuild") |
| continue |
| |
| pkg = pkgs[y] |
| myaux = pkg.metadata |
| eapi = myaux["EAPI"] |
| inherited = pkg.inherited |
| |
| if not src_uri_error: |
| # Check that URIs don't reference a server from thirdpartymirrors. |
| for uri in portage.flatten(portage.dep.use_reduce( |
| portage.dep.paren_reduce(myaux["SRC_URI"]), matchall=True)): |
| contains_mirror = False |
| for mirror in thirdpartymirrors: |
| if uri.startswith(mirror): |
| contains_mirror = True |
| break |
| if not contains_mirror: |
| continue |
| |
| stats["SRC_URI.mirror"] += 1 |
| fails["SRC_URI.mirror"].append( |
| "%s: '%s' found in thirdpartymirrors" % \ |
| (relative_path, mirror)) |
| |
| # Test for negative logic and bad words in the RESTRICT var. |
| #for x in myaux[allvars.index("RESTRICT")].split(): |
| # if x.startswith("no"): |
| # print "Bad RESTRICT value: %s" % x |
| try: |
| myaux["PROVIDE"] = portage.dep.use_reduce( |
| portage.dep.paren_reduce(myaux["PROVIDE"]), matchall=1) |
| except portage.exception.InvalidDependString, e: |
| stats["PROVIDE.syntax"] = stats["PROVIDE.syntax"] + 1 |
| fails["PROVIDE.syntax"].append(mykey+".ebuild PROVIDE: "+str(e)) |
| del e |
| continue |
| myaux["PROVIDE"] = " ".join(portage.flatten(myaux["PROVIDE"])) |
| for myprovide in myaux["PROVIDE"].split(): |
| prov_cp = portage.dep_getkey(myprovide) |
| if prov_cp != myprovide: |
| stats["virtual.versioned"]+=1 |
| fails["virtual.versioned"].append(x+"/"+y+".ebuild: "+myprovide) |
| prov_pkg = portage.dep_getkey( |
| portage.best(portdb.xmatch("match-all", prov_cp))) |
| if prov_cp == prov_pkg: |
| stats["virtual.exists"]+=1 |
| fails["virtual.exists"].append(x+"/"+y+".ebuild: "+prov_cp) |
| |
| for pos, missing_var in enumerate(missingvars): |
| if not myaux.get(missing_var): |
| if catdir == "virtual" and \ |
| missing_var in ("HOMEPAGE", "LICENSE"): |
| continue |
| myqakey=missingvars[pos]+".missing" |
| stats[myqakey]=stats[myqakey]+1 |
| fails[myqakey].append(x+"/"+y+".ebuild") |
| |
| # 14 is the length of DESCRIPTION="" |
| if len(myaux['DESCRIPTION']) > max_desc_len: |
| stats['DESCRIPTION.toolong'] += 1 |
| fails['DESCRIPTION.toolong'].append( |
| "%s: DESCRIPTION is %d characters (max %d)" % \ |
| (relative_path, len(myaux['DESCRIPTION']), max_desc_len)) |
| |
| keywords = myaux["KEYWORDS"].split() |
| stable_keywords = [] |
| for keyword in keywords: |
| if not keyword.startswith("~") and \ |
| not keyword.startswith("-"): |
| stable_keywords.append(keyword) |
| if stable_keywords: |
| ebuild_path = y + ".ebuild" |
| if repolevel < 3: |
| ebuild_path = os.path.join(pkgdir, ebuild_path) |
| if repolevel < 2: |
| ebuild_path = os.path.join(catdir, ebuild_path) |
| ebuild_path = os.path.join(".", ebuild_path) |
| if ebuild_path in new_ebuilds: |
| stable_keywords.sort() |
| stats["KEYWORDS.stable"] += 1 |
| fails["KEYWORDS.stable"].append( |
| x + "/" + y + ".ebuild added with stable keywords: %s" % \ |
| " ".join(stable_keywords)) |
| |
| ebuild_archs = set(kw.lstrip("~") for kw in keywords \ |
| if not kw.startswith("-")) |
| |
| previous_keywords = slot_keywords.get(myaux["SLOT"]) |
| if previous_keywords is None: |
| slot_keywords[myaux["SLOT"]] = set() |
| else: |
| dropped_keywords = previous_keywords.difference(ebuild_archs) |
| if dropped_keywords: |
| stats["KEYWORDS.dropped"] += 1 |
| fails["KEYWORDS.dropped"].append( |
| relative_path + ": %s" % \ |
| " ".join(sorted(dropped_keywords))) |
| |
| slot_keywords[myaux["SLOT"]].update(ebuild_archs) |
| |
| # KEYWORDS="-*" is a stupid replacement for package.mask and screws general KEYWORDS semantics |
| if "-*" in keywords: |
| haskeyword = False |
| for kw in keywords: |
| if kw[0] == "~": |
| kw = kw[1:] |
| if kw in kwlist: |
| haskeyword = True |
| if not haskeyword: |
| stats["KEYWORDS.stupid"] += 1 |
| fails["KEYWORDS.stupid"].append(x+"/"+y+".ebuild") |
| |
| """ |
| Ebuilds that inherit a "Live" eclass (darcs,subversion,git,cvs,etc..) should |
| not be allowed to be marked stable |
| """ |
| if live_eclasses.intersection(pkg.inherited): |
| bad_stable_keywords = [] |
| for keyword in keywords: |
| if not keyword.startswith("~") and \ |
| not keyword.startswith("-"): |
| bad_stable_keywords.append(keyword) |
| del keyword |
| if bad_stable_keywords: |
| stats["LIVEVCS.stable"] += 1 |
| fails["LIVEVCS.stable"].append( |
| x + "/" + y + ".ebuild with stable keywords:%s " % \ |
| bad_stable_keywords) |
| del bad_stable_keywords |
| |
| if options.ignore_arches: |
| arches = [[repoman_settings["ARCH"], repoman_settings["ARCH"], |
| repoman_settings["ACCEPT_KEYWORDS"].split()]] |
| else: |
| arches=[] |
| for keyword in myaux["KEYWORDS"].split(): |
| if (keyword[0]=="-"): |
| continue |
| elif (keyword[0]=="~"): |
| arches.append([keyword, keyword[1:], [keyword[1:], keyword]]) |
| else: |
| arches.append([keyword, keyword, [keyword]]) |
| allmasked = False |
| |
| baddepsyntax = False |
| badlicsyntax = False |
| badprovsyntax = False |
| catpkg = catdir+"/"+y |
| myiuse = set(repoman_settings.archlist()) |
| for myflag in myaux["IUSE"].split(): |
| if myflag.startswith("+"): |
| myflag = myflag[1:] |
| myiuse.add(myflag) |
| |
| inherited_java_eclass = "java-pkg-2" in inherited or \ |
| "java-pkg-opt-2" in inherited |
| operator_tokens = set(["||", "(", ")"]) |
| type_list, badsyntax = [], [] |
| for mytype in ("DEPEND", "RDEPEND", "PDEPEND", "LICENSE", "PROVIDE"): |
| mydepstr = myaux[mytype] |
| |
| if mydepstr.find(" ?") != -1: |
| badsyntax.append("'?' preceded by space") |
| |
| try: |
| # Missing closing parenthesis will result in a ValueError |
| mydeplist = portage.dep.paren_reduce(mydepstr) |
| # Missing opening parenthesis will result in a final "" element |
| if "" in mydeplist or "(" in mydeplist: |
| raise ValueError |
| except ValueError: |
| badsyntax.append("parenthesis mismatch") |
| mydeplist = [] |
| except portage.exception.InvalidDependString, e: |
| badsyntax.append(str(e)) |
| del e |
| mydeplist = [] |
| |
| try: |
| portage.dep.use_reduce(mydeplist, matchall=1) |
| except portage.exception.InvalidDependString, e: |
| badsyntax.append(str(e)) |
| |
| for token in operator_tokens: |
| if mydepstr.startswith(token+" "): |
| myteststr = mydepstr[len(token):] |
| else: |
| myteststr = mydepstr |
| if myteststr.endswith(" "+token): |
| myteststr = myteststr[:-len(token)] |
| while myteststr.find(" "+token+" ") != -1: |
| myteststr = " ".join(myteststr.split(" "+token+" ", 1)) |
| if myteststr.find(token) != -1: |
| badsyntax.append("'%s' not separated by space" % (token)) |
| |
| if mytype in ("DEPEND", "RDEPEND", "PDEPEND"): |
| for token in mydepstr.split(): |
| if token in operator_tokens or \ |
| token.endswith("?"): |
| continue |
| try: |
| atom = portage.dep.Atom(token) |
| except portage.exception.InvalidAtom: |
| badsyntax.append("'%s' not a valid atom" % token) |
| else: |
| is_blocker = atom.blocker |
| if atom.blocker: |
| atom = portage.dep.Atom(atom[1:]) |
| |
| if mytype == "DEPEND" and \ |
| not is_blocker and \ |
| not inherited_java_eclass and \ |
| portage.dep_getkey(atom) == "virtual/jdk": |
| stats['java.eclassesnotused'] += 1 |
| fails['java.eclassesnotused'].append(relative_path) |
| elif mytype == "RDEPEND": |
| if not is_blocker and \ |
| portage.dep_getkey(atom) in suspect_rdepend: |
| stats['RDEPEND.suspect'] += 1 |
| fails['RDEPEND.suspect'].append( |
| relative_path + ": '%s'" % atom) |
| if eapi == "0": |
| if portage.dep.dep_getslot(atom): |
| stats['EAPI.incompatible'] += 1 |
| fails['EAPI.incompatible'].append( |
| (relative_path + ": %s slot dependency" + \ |
| " not supported with EAPI='%s':" + \ |
| " '%s'") % (mytype, eapi, atom)) |
| if atom.use and eapi in ("0", "1"): |
| stats['EAPI.incompatible'] += 1 |
| fails['EAPI.incompatible'].append( |
| (relative_path + ": %s use dependency" + \ |
| " not supported with EAPI='%s':" + \ |
| " '%s'") % (mytype, eapi, atom)) |
| |
| type_list.extend([mytype] * (len(badsyntax) - len(type_list))) |
| |
| for m,b in zip(type_list, badsyntax): |
| stats[m+".syntax"] += 1 |
| fails[m+".syntax"].append(catpkg+".ebuild "+m+": "+b) |
| |
| badlicsyntax = len(filter(lambda x:x=="LICENSE", type_list)) |
| badprovsyntax = len(filter(lambda x:x=="PROVIDE", type_list)) |
| baddepsyntax = len(type_list) != badlicsyntax + badprovsyntax |
| badlicsyntax = badlicsyntax > 0 |
| badprovsyntax = badprovsyntax > 0 |
| |
| # uselist checks - global |
| myuse = [] |
| default_use = [] |
| for myflag in myaux["IUSE"].split(): |
| flag_name = myflag.lstrip("+-") |
| if myflag != flag_name: |
| default_use.append(myflag) |
| if flag_name not in uselist: |
| myuse.append(flag_name) |
| |
| # uselist checks - metadata |
| for mypos in range(len(myuse)-1,-1,-1): |
| if myuse[mypos] and (myuse[mypos] in muselist): |
| del myuse[mypos] |
| |
| # uselist checks - local |
| mykey = portage.dep_getkey(catpkg) |
| if mykey in luselist: |
| for mypos in range(len(myuse)-1,-1,-1): |
| if myuse[mypos] and (myuse[mypos] in luselist[mykey]): |
| del myuse[mypos] |
| |
| if default_use and eapi == "0": |
| for myflag in default_use: |
| stats['EAPI.incompatible'] += 1 |
| fails['EAPI.incompatible'].append( |
| (relative_path + ": IUSE defaults" + \ |
| " not supported with EAPI='%s':" + \ |
| " '%s'") % (eapi, myflag)) |
| |
| for mypos in range(len(myuse)): |
| stats["IUSE.invalid"]=stats["IUSE.invalid"]+1 |
| fails["IUSE.invalid"].append(x+"/"+y+".ebuild: %s" % myuse[mypos]) |
| |
| # license checks |
| if not badlicsyntax: |
| myuse = myaux["LICENSE"] |
| # Parse the LICENSE variable, remove USE conditions and |
| # flatten it. |
| myuse=portage.dep.use_reduce(portage.dep.paren_reduce(myuse), matchall=1) |
| myuse=portage.flatten(myuse) |
| # Check each entry to ensure that it exists in PORTDIR's |
| # license directory. |
| for mypos in range(0,len(myuse)): |
| # Need to check for "||" manually as no portage |
| # function will remove it without removing values. |
| if myuse[mypos] not in liclist and myuse[mypos] != "||": |
| stats["LICENSE.invalid"]=stats["LICENSE.invalid"]+1 |
| fails["LICENSE.invalid"].append(x+"/"+y+".ebuild: %s" % myuse[mypos]) |
| |
| #keyword checks |
| myuse = myaux["KEYWORDS"].split() |
| for mykey in myuse: |
| myskey=mykey[:] |
| if myskey[0]=="-": |
| myskey=myskey[1:] |
| if myskey[0]=="~": |
| myskey=myskey[1:] |
| if mykey!="-*": |
| if myskey not in kwlist: |
| stats["KEYWORDS.invalid"] += 1 |
| fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s" % mykey) |
| elif myskey not in profiles: |
| stats["KEYWORDS.invalid"] += 1 |
| fails["KEYWORDS.invalid"].append(x+"/"+y+".ebuild: %s (profile invalid)" % mykey) |
| |
| #restrict checks |
| myrestrict = None |
| try: |
| myrestrict = portage.dep.use_reduce( |
| portage.dep.paren_reduce(myaux["RESTRICT"]), matchall=1) |
| except portage.exception.InvalidDependString, e: |
| stats["RESTRICT.syntax"] = stats["RESTRICT.syntax"] + 1 |
| fails["RESTRICT.syntax"].append(mykey+".ebuild RESTRICT: "+str(e)) |
| del e |
| if myrestrict: |
| myrestrict = set(portage.flatten(myrestrict)) |
| mybadrestrict = myrestrict.difference(valid_restrict) |
| if mybadrestrict: |
| stats["RESTRICT.invalid"] += len(mybadrestrict) |
| for mybad in mybadrestrict: |
| fails["RESTRICT.invalid"].append(x+"/"+y+".ebuild: %s" % mybad) |
| # Syntax Checks |
| relative_path = os.path.join(x, y + ".ebuild") |
| full_path = os.path.join(repodir, relative_path) |
| f = open(full_path, 'rb') |
| try: |
| for check_name, e in run_checks(f, pkg): |
| stats[check_name] += 1 |
| fails[check_name].append(relative_path + ': %s' % e) |
| finally: |
| f.close() |
| del f |
| |
| if options.force: |
| # The dep_check() calls are the most expensive QA test. If --force |
| # is enabled, there's no point in wasting time on these since the |
| # user is intent on forcing the commit anyway. |
| continue |
| |
| for keyword,arch,groups in arches: |
| |
| if arch not in profiles: |
| # A missing profile will create an error further down |
| # during the KEYWORDS verification. |
| continue |
| |
| for prof in profiles[arch]: |
| |
| if prof[1] not in ("stable", "dev"): |
| continue |
| |
| profdir = portdir+"/profiles/"+prof[0] |
| |
| if prof[0] in arch_caches: |
| dep_settings = arch_caches[prof[0]] |
| else: |
| dep_settings = portage.config( |
| config_profile_path=profdir, |
| config_incrementals=portage.const.INCREMENTALS, |
| local_config=False) |
| if options.without_mask: |
| dep_settings.pmaskdict.clear() |
| arch_caches[prof[0]] = dep_settings |
| while True: |
| try: |
| # Protect ACCEPT_KEYWORDS from config.regenerate() |
| # (just in case) |
| dep_settings.incrementals.remove("ACCEPT_KEYWORDS") |
| except ValueError: |
| break |
| |
| xmatch_cache_key = (prof[0], tuple(groups)) |
| xcache = arch_xmatch_caches.get(xmatch_cache_key) |
| if xcache is None: |
| portdb.melt() |
| portdb.freeze() |
| xcache = portdb.xcache |
| xcache.update(shared_xmatch_caches) |
| arch_xmatch_caches[xmatch_cache_key] = xcache |
| |
| trees["/"]["porttree"].settings = dep_settings |
| portdb.mysettings = dep_settings |
| portdb.xcache = xcache |
| # for package.use.mask support inside dep_check |
| dep_settings.setcpv("/".join((catdir, y))) |
| dep_settings["ACCEPT_KEYWORDS"] = " ".join(groups) |
| # just in case, prevent config.reset() from nuking these. |
| dep_settings.backup_changes("ACCEPT_KEYWORDS") |
| |
| for myprovide in myaux["PROVIDE"].split(): |
| prov_cp = portage.dep_getkey(myprovide) |
| if prov_cp not in dep_settings.getvirtuals(): |
| stats["virtual.unavailable"]+=1 |
| fails["virtual.unavailable"].append(x+"/"+y+".ebuild: "+keyword+"("+prof[0]+") "+prov_cp) |
| |
| if not baddepsyntax: |
| ismasked = os.path.join(catdir, y) not in \ |
| portdb.xmatch("list-visible", x) |
| if ismasked: |
| have_masked = True |
| if options.ignore_masked: |
| continue |
| #we are testing deps for a masked package; give it some lee-way |
| suffix="masked" |
| matchmode = "minimum-all" |
| else: |
| suffix="" |
| matchmode = "minimum-visible" |
| |
| if prof[1] == "dev": |
| suffix=suffix+"indev" |
| |
| for mytype,mypos in [["DEPEND",len(missingvars)],["RDEPEND",len(missingvars)+1],["PDEPEND",len(missingvars)+2]]: |
| |
| mykey=mytype+".bad"+suffix |
| myvalue = myaux[mytype] |
| if not myvalue: |
| continue |
| try: |
| mydep = portage.dep_check(myvalue, portdb, |
| dep_settings, use="all", mode=matchmode, |
| trees=trees) |
| except KeyError, e: |
| stats[mykey]=stats[mykey]+1 |
| fails[mykey].append(x+"/"+y+".ebuild: "+keyword+"("+prof[0]+") "+repr(e[0])) |
| continue |
| |
| if mydep[0]==1: |
| if mydep[1]!=[]: |
| #we have some unsolvable deps |
| #remove ! deps, which always show up as unsatisfiable |
| d=0 |
| while d<len(mydep[1]): |
| if mydep[1][d][0]=="!": |
| del mydep[1][d] |
| else: |
| d += 1 |
| #if we emptied out our list, continue: |
| if not mydep[1]: |
| continue |
| stats[mykey]=stats[mykey]+1 |
| fails[mykey].append(x+"/"+y+".ebuild: "+keyword+"("+prof[0]+") "+repr(mydep[1])) |
| else: |
| stats[mykey]=stats[mykey]+1 |
| fails[mykey].append(x+"/"+y+".ebuild: "+keyword+"("+prof[0]+") "+repr(mydep[1])) |
| |
| # Check for 'all unstable' or 'all masked' -- ACCEPT_KEYWORDS is stripped |
| # XXX -- Needs to be implemented in dep code. Can't determine ~arch nicely. |
| #if not portage.portdb.xmatch("bestmatch-visible",x): |
| # stats["ebuild.nostable"]+=1 |
| # fails["ebuild.nostable"].append(x) |
| if allmasked and repolevel == 3: |
| stats["ebuild.allmasked"]+=1 |
| fails["ebuild.allmasked"].append(x) |
| |
| if options.mode == "manifest": |
| sys.exit(dofail) |
| |
| #Pickle and save results for instant reuse in last and lfull |
| if os.access(portage.const.CACHE_PATH, os.W_OK): |
| for myobj, fname in (stats, "repo.stats"), (fails, "repo.fails"): |
| fpath = os.path.join(portage.const.CACHE_PATH, fname) |
| savef = open(fpath, 'w') |
| pickle.dump(myobj, savef) |
| savef.close() |
| portage.apply_secpass_permissions(fpath, gid=portage.portage_gid, |
| mode=0664) |
| |
| # TODO(antarus) This function and last () look familiar ;) |
| |
| #dofail will be set to 1 if we have failed in at least one non-warning category |
| dofail=0 |
| #dowarn will be set to 1 if we tripped any warnings |
| dowarn=0 |
| #dofull will be set if we should print a "repoman full" informational message |
| dofull = options.mode not in ("full", "lfull") |
| |
| for x in qacats: |
| if not stats[x]: |
| continue |
| dowarn = 1 |
| if x not in qawarnings: |
| dofail = 1 |
| |
| if dofail or \ |
| (dowarn and not (options.quiet or options.mode == "scan")): |
| dofull = 0 |
| |
| # Save QA output so that it can be conveniently displayed |
| # in $EDITOR while the user creates a commit message. |
| # Otherwise, the user would not be able to see this output |
| # once the editor has taken over the screen. |
| qa_output = StringIO.StringIO() |
| style_file = ConsoleStyleFile(sys.stdout) |
| if options.mode == 'commit' and \ |
| (not commitmessage or not commitmessage.strip()): |
| style_file.write_listener = qa_output |
| console_writer = StyleWriter(file=style_file, maxcol=9999) |
| console_writer.style_listener = style_file.new_styles |
| |
| f = formatter.AbstractFormatter(console_writer) |
| |
| utilities.format_qa_output(f, stats, fails, dofull, dofail, options, qawarnings) |
| |
| style_file.flush() |
| del console_writer, f, style_file |
| qa_output = qa_output.getvalue() |
| qa_output = qa_output.splitlines(True) |
| |
| def grouplist(mylist,seperator="/"): |
| """(list,seperator="/") -- Takes a list of elements; groups them into |
| same initial element categories. Returns a dict of {base:[sublist]} |
| From: ["blah/foo","spork/spatula","blah/weee/splat"] |
| To: {"blah":["foo","weee/splat"], "spork":["spatula"]}""" |
| mygroups={} |
| for x in mylist: |
| xs=x.split(seperator) |
| if xs[0]==".": |
| xs=xs[1:] |
| if xs[0] not in mygroups: |
| mygroups[xs[0]]=[seperator.join(xs[1:])] |
| else: |
| mygroups[xs[0]]+=[seperator.join(xs[1:])] |
| return mygroups |
| |
| if have_masked and not (options.without_mask or options.ignore_masked): |
| print bold("Note: use --without-mask to check " + \ |
| "KEYWORDS on dependencies of masked packages") |
| |
| if options.mode != 'commit': |
| if dofull: |
| print bold("Note: type \"repoman full\" for a complete listing.") |
| if dowarn and not dofail: |
| print green("RepoMan sez:"),"\"You're only giving me a partial QA payment?\n I'll take it this time, but I'm not happy.\"" |
| elif not dofail: |
| print green("RepoMan sez:"),"\"If everyone were like you, I'd be out of business!\"" |
| elif dofail: |
| print turquoise("Please fix these important QA issues first.") |
| print green("RepoMan sez:"),"\"Make your QA payment on time and you'll never see the likes of me.\"\n" |
| sys.exit(1) |
| else: |
| if dofail and can_force and options.force and not options.pretend: |
| print green("RepoMan sez:") + \ |
| " \"You want to commit even with these QA issues?\n" + \ |
| " I'll take it this time, but I'm not happy.\"\n" |
| elif dofail: |
| if options.force and not can_force: |
| print bad("The --force option has been disabled due to extraordinary issues.") |
| print turquoise("Please fix these important QA issues first.") |
| print green("RepoMan sez:"),"\"Make your QA payment on time and you'll never see the likes of me.\"\n" |
| sys.exit(1) |
| |
| if options.pretend: |
| print green("RepoMan sez:"), "\"So, you want to play it safe. Good call.\"\n" |
| |
| if vcs == "cvs": |
| try: |
| myvcstree=portage.cvstree.getentries("./",recursive=1) |
| myunadded=portage.cvstree.findunadded(myvcstree,recursive=1,basedir="./") |
| except SystemExit, e: |
| raise # TODO propogate this |
| except: |
| err("Error retrieving CVS tree; exiting.") |
| |
| if vcs == "svn": |
| try: |
| svnstatus=os.popen("svn status --no-ignore").readlines() |
| myunadded = [ "./"+elem.rstrip().split()[1] for elem in svnstatus if elem.startswith("?") or elem.startswith("I") ] |
| except SystemExit, e: |
| raise # TODO propogate this |
| except: |
| err("Error retrieving SVN info; exiting.") |
| myautoadd=[] |
| if myunadded: |
| for x in range(len(myunadded)-1,-1,-1): |
| xs=myunadded[x].split("/") |
| if xs[-1]=="files": |
| print "!!! files dir is not added! Please correct this." |
| sys.exit(-1) |
| elif xs[-1]=="Manifest": |
| # It's a manifest... auto add |
| myautoadd+=[myunadded[x]] |
| del myunadded[x] |
| elif len(xs[-1])>=7: |
| if xs[-1][:7]=="digest-": |
| del xs[-2] |
| myeb="/".join(xs[:-1]+[xs[-1][7:]])+".ebuild" |
| if os.path.exists(myeb): |
| # Ebuild exists for digest... So autoadd it. |
| myautoadd+=[myunadded[x]] |
| del myunadded[x] |
| |
| if myautoadd: |
| print ">>> Auto-Adding missing digests..." |
| if options.pretend: |
| if vcs == "cvs": |
| print "(cvs add "+" ".join(myautoadd)+")" |
| if vcs == "svn": |
| print "(svn add "+" ".join(myautoadd)+")" |
| retval=0 |
| else: |
| if vcs == "cvs": |
| retval=os.system("cvs add "+" ".join(myautoadd)) |
| if vcs == "svn": |
| retval=os.system("svn add "+" ".join(myautoadd)) |
| if retval: |
| print "!!! Exiting on vcs (shell) error code:",retval |
| sys.exit(retval) |
| |
| if myunadded: |
| print red("!!! The following files are in your local tree but are not added to the master") |
| print red("!!! tree. Please remove them from the local tree or add them to the master tree.") |
| for x in myunadded: |
| print " ",x |
| print |
| print |
| sys.exit(1) |
| |
| if vcs == "cvs": |
| mycvstree=portage.cvstree.getentries("./",recursive=1) |
| mychanged=portage.cvstree.findchanged(mycvstree,recursive=1,basedir="./") |
| mynew=portage.cvstree.findnew(mycvstree,recursive=1,basedir="./") |
| myremoved=portage.cvstree.findremoved(mycvstree,recursive=1,basedir="./") |
| bin_blob_pattern = re.compile("^-kb$") |
| bin_blobs = set(portage.cvstree.findoption(mycvstree, bin_blob_pattern, |
| recursive=1, basedir="./")) |
| |
| |
| if vcs == "svn": |
| svnstatus = os.popen("svn status").readlines() |
| mychanged = [ elem.rstrip()[7:] for elem in svnstatus if elem.startswith("M") ] |
| for manifest in [ file for file in mychanged if '/Manifest' in file ]: |
| mychanged.remove(manifest) |
| mynew = [ elem.rstrip()[7:] for elem in svnstatus if elem.startswith("A") ] |
| myremoved = [ elem.rstrip()[7:] for elem in svnstatus if elem.startswith("D") ] |
| # no idea how to detect binaries in SVN |
| bin_blobs = [] |
| |
| if vcs: |
| if not (mychanged or mynew or myremoved): |
| print green("RepoMan sez:"), "\"Doing nothing is not always good for QA.\"" |
| print |
| print "(Didn't find any changed files...)" |
| print |
| sys.exit(0) |
| |
| # Manifests need to be regenerated after all other commits, so don't commit |
| # them now even if they have changed. |
| mymanifests = [f for f in mychanged if "Manifest" == os.path.basename(f)] |
| mychanged = [f for f in mychanged if "Manifest" != os.path.basename(f)] |
| myupdates = mychanged + mynew |
| myheaders = [] |
| mydirty = [] |
| headerstring = "'\$(Header|Id)" |
| headerstring += ".*\$'" |
| for myfile in myupdates: |
| if myfile in bin_blobs: |
| continue |
| myout = commands.getstatusoutput("egrep -q "+headerstring+" "+myfile) |
| if myout[0] == 0: |
| myheaders.append(myfile) |
| |
| print "*",green(str(len(myupdates))),"files being committed...",green(str(len(myheaders))),"have headers that will change." |
| print "*","Files with headers will cause the manifests to be made and recommited." |
| logging.info("myupdates:", str(myupdates)) |
| logging.info("myheaders:", str(myheaders)) |
| |
| commitmessage = options.commitmsg |
| if options.commitmsgfile: |
| try: |
| f = open(options.commitmsgfile) |
| commitmessage = f.read() |
| f.close() |
| del f |
| except (IOError, OSError), e: |
| if e.errno == errno.ENOENT: |
| portage.writemsg("!!! File Not Found: --commitmsgfile='%s'\n" % options.commitmsgfile) |
| else: |
| raise |
| # We've read the content so the file is no longer needed. |
| commitmessagefile = None |
| if not commitmessage or not commitmessage.strip(): |
| try: |
| editor = os.environ.get("EDITOR") |
| if editor and utilities.editor_is_executable(editor): |
| commitmessage = utilities.get_commit_message_with_editor( |
| editor, message=qa_output) |
| else: |
| commitmessage = utilities.get_commit_message_with_stdin() |
| except KeyboardInterrupt: |
| exithandler() |
| if not commitmessage or not commitmessage.strip(): |
| print "* no commit message? aborting commit." |
| sys.exit(1) |
| commitmessage = commitmessage.rstrip() |
| portage_version = getattr(portage, "VERSION", None) |
| if portage_version is None: |
| sys.stderr.write("Failed to insert portage version in message!\n") |
| sys.stderr.flush() |
| portage_version = "Unknown" |
| unameout = platform.system() + " " + platform.release() + " " |
| if platform.system() in ["Darwin", "SunOS"]: |
| unameout += platform.processor() |
| else: |
| unameout += platform.machine() |
| commitmessage+="\n(Portage version: "+str(portage_version)+"/"+vcs+"/"+unameout |
| if options.force: |
| commitmessage += ", RepoMan options: --force" |
| commitmessage += ")" |
| |
| if myupdates or myremoved: |
| myfiles = myupdates + myremoved |
| fd, commitmessagefile = tempfile.mkstemp(".repoman.msg") |
| mymsg = os.fdopen(fd, "w") |
| mymsg.write(commitmessage) |
| mymsg.close() |
| |
| print |
| print green("Using commit message:") |
| print green("------------------------------------------------------------------------------") |
| print commitmessage |
| print green("------------------------------------------------------------------------------") |
| print |
| |
| retval = None |
| if options.pretend: |
| if vcs == "cvs": |
| print "(cvs -q commit -F %s %s)" % \ |
| (commitmessagefile, " ".join(myfiles)) |
| if vcs == "svn": |
| print "(svn commit -F %s %s)" % \ |
| (commitmessagefile, " ".join(myfiles)) |
| else: |
| if vcs == "cvs": |
| retval = spawn(["cvs", "-q", "commit", |
| "-F", commitmessagefile] + myfiles, |
| env=os.environ) |
| if vcs == "svn": |
| retval = spawn(["svn", "commit", |
| "-F", commitmessagefile] + myfiles, |
| env=os.environ) |
| try: |
| os.unlink(commitmessagefile) |
| except OSError: |
| pass |
| if retval: |
| print "!!! Exiting on cvs (shell) error code:",retval |
| sys.exit(retval) |
| |
| # Setup the GPG commands |
| def gpgsign(filename): |
| if "PORTAGE_GPG_KEY" not in repoman_settings: |
| raise portage.exception.MissingParameter("PORTAGE_GPG_KEY is unset!") |
| if "PORTAGE_GPG_DIR" not in repoman_settings: |
| if "HOME" in os.environ: |
| repoman_settings["PORTAGE_GPG_DIR"] = os.path.join(os.environ["HOME"], ".gnupg") |
| logging.info("Automatically setting PORTAGE_GPG_DIR to %s" % repoman_settings["PORTAGE_GPG_DIR"]) |
| else: |
| raise portage.exception.MissingParameter("PORTAGE_GPG_DIR is unset!") |
| gpg_dir = repoman_settings["PORTAGE_GPG_DIR"] |
| if gpg_dir.startswith("~") and "HOME" in os.environ: |
| repoman_settings["PORTAGE_GPG_DIR"] = os.path.join( |
| os.environ["HOME"], gpg_dir[1:].lstrip(os.path.sep)) |
| if not os.access(repoman_settings["PORTAGE_GPG_DIR"], os.X_OK): |
| raise portage.exception.InvalidLocation( |
| "Unable to access directory: PORTAGE_GPG_DIR='%s'" % \ |
| repoman_settings["PORTAGE_GPG_DIR"]) |
| gpgcmd = "gpg --sign --clearsign --yes " |
| gpgcmd+= "--default-key "+repoman_settings["PORTAGE_GPG_KEY"] |
| if "PORTAGE_GPG_DIR" in repoman_settings: |
| gpgcmd += " --homedir "+repoman_settings["PORTAGE_GPG_DIR"] |
| if options.pretend: |
| print "("+gpgcmd+" "+filename+")" |
| else: |
| rValue = os.system(gpgcmd+" "+filename) |
| if rValue == os.EX_OK: |
| os.rename(filename+".asc", filename) |
| else: |
| raise portage.exception.PortageException("!!! gpg exited with '" + str(rValue) + "' status") |
| |
| # When files are removed and re-added, the cvs server will put /Attic/ |
| # inside the $Header path. This code detects the problem and corrects it |
| # so that the Manifest will generate correctly. See bug #169500. |
| from portage.util import write_atomic |
| cvs_header = re.compile(r'^#\s*\$Header.*\$$') |
| for x in myheaders: |
| f = open(x) |
| mylines = f.readlines() |
| f.close() |
| modified = False |
| for i, line in enumerate(mylines): |
| if cvs_header.match(line) and "/Attic/" in line: |
| mylines[i] = line.replace("/Attic/", "/") |
| modified = True |
| if modified: |
| write_atomic(x, "".join(mylines)) |
| |
| manifest_commit_required = True |
| if myheaders or myupdates or myremoved or mynew: |
| myfiles=myheaders+myupdates+myremoved+mynew |
| for x in range(len(myfiles)-1, -1, -1): |
| if myfiles[x].count("/") < 4-repolevel: |
| del myfiles[x] |
| mydone=[] |
| if repolevel==3: # In a package dir |
| repoman_settings["O"] = startdir |
| portage.digestgen([], repoman_settings, manifestonly=1, |
| myportdb=portdb) |
| elif repolevel==2: # In a category dir |
| for x in myfiles: |
| xs=x.split("/") |
| if len(xs) < 4-repolevel: |
| continue |
| if xs[0]==".": |
| xs=xs[1:] |
| if xs[0] in mydone: |
| continue |
| mydone.append(xs[0]) |
| repoman_settings["O"] = os.path.join(startdir, xs[0]) |
| if not os.path.isdir(repoman_settings["O"]): |
| continue |
| portage.digestgen([], repoman_settings, manifestonly=1, |
| myportdb=portdb) |
| elif repolevel==1: # repo-cvsroot |
| print green("RepoMan sez:"), "\"You're rather crazy... doing the entire repository.\"\n" |
| for x in myfiles: |
| xs=x.split("/") |
| if len(xs) < 4-repolevel: |
| continue |
| if xs[0]==".": |
| xs=xs[1:] |
| if "/".join(xs[:2]) in mydone: |
| continue |
| mydone.append("/".join(xs[:2])) |
| repoman_settings["O"] = os.path.join(startdir, xs[0], xs[1]) |
| if not os.path.isdir(repoman_settings["O"]): |
| continue |
| portage.digestgen([], repoman_settings, manifestonly=1, |
| myportdb=portdb) |
| else: |
| print red("I'm confused... I don't know where I am!") |
| sys.exit(1) |
| |
| # Force an unsigned commit when more than one Manifest needs to be signed. |
| if repolevel < 3 and "sign" in repoman_settings.features: |
| if options.pretend: |
| if vcs == "cvs": |
| print "(cvs -q commit -F commitmessagefile)" |
| if vcs == "svn": |
| print "(svn -q commit -F commitmessagefile)" |
| else: |
| fd, commitmessagefile = tempfile.mkstemp(".repoman.msg") |
| mymsg = os.fdopen(fd, "w") |
| mymsg.write(commitmessage) |
| mymsg.write("\n (Unsigned Manifest commit)") |
| mymsg.close() |
| if vcs == "cvs": |
| retval=os.system("cvs -q commit -F "+commitmessagefile) |
| if vcs == "svn": |
| retval=os.system("svn -q commit -F "+commitmessagefile) |
| try: |
| os.unlink(commitmessagefile) |
| except OSError: |
| pass |
| if retval: |
| print "!!! Exiting on cvs (shell) error code:",retval |
| sys.exit(retval) |
| manifest_commit_required = False |
| |
| signed = False |
| if "sign" in repoman_settings.features: |
| signed = True |
| myfiles = myupdates + myremoved + mymanifests |
| try: |
| if repolevel==3: # In a package dir |
| repoman_settings["O"] = "." |
| gpgsign(os.path.join(repoman_settings["O"], "Manifest")) |
| elif repolevel==2: # In a category dir |
| mydone=[] |
| for x in myfiles: |
| xs=x.split("/") |
| if len(xs) < 4-repolevel: |
| continue |
| if xs[0]==".": |
| xs=xs[1:] |
| if xs[0] in mydone: |
| continue |
| mydone.append(xs[0]) |
| repoman_settings["O"] = os.path.join(".", xs[0]) |
| if not os.path.isdir(repoman_settings["O"]): |
| continue |
| gpgsign(os.path.join(repoman_settings["O"], "Manifest")) |
| elif repolevel==1: # repo-cvsroot |
| print green("RepoMan sez:"), "\"You're rather crazy... doing the entire repository.\"\n" |
| mydone=[] |
| for x in myfiles: |
| xs=x.split("/") |
| if len(xs) < 4-repolevel: |
| continue |
| if xs[0]==".": |
| xs=xs[1:] |
| if "/".join(xs[:2]) in mydone: |
| continue |
| mydone.append("/".join(xs[:2])) |
| repoman_settings["O"] = os.path.join(".", xs[0], xs[1]) |
| if not os.path.isdir(repoman_settings["O"]): |
| continue |
| gpgsign(os.path.join(repoman_settings["O"], "Manifest")) |
| except portage.exception.PortageException, e: |
| portage.writemsg("!!! %s\n" % str(e)) |
| portage.writemsg("!!! Disabled FEATURES='sign'\n") |
| signed = False |
| |
| if manifest_commit_required or signed: |
| if options.pretend: |
| if vcs == "cvs": |
| print "(cvs -q commit -F commitmessagefile)" |
| if vcs == "svn": |
| print "(svn -q commit -F commitmessagefile)" |
| else: |
| fd, commitmessagefile = tempfile.mkstemp(".repoman.msg") |
| mymsg = os.fdopen(fd, "w") |
| mymsg.write(commitmessage) |
| if signed: |
| mymsg.write("\n (Signed Manifest commit)") |
| else: |
| mymsg.write("\n (Unsigned Manifest commit)") |
| mymsg.close() |
| if vcs == "cvs": |
| retval=os.system("cvs -q commit -F "+commitmessagefile) |
| if vcs == "svn": |
| retval=os.system("svn -q commit -F "+commitmessagefile) |
| try: |
| os.unlink(commitmessagefile) |
| except OSError: |
| pass |
| if retval: |
| print "!!! Exiting on cvs (shell) error code:",retval |
| sys.exit(retval) |
| |
| print |
| if vcs: |
| print "Commit complete." |
| else: |
| print "repoman was too scared by not seeing any familiar version control file that he forgot to commit anything" |
| print green("RepoMan sez:"), "\"If everyone were like you, I'd be out of business!\"\n" |
| sys.exit(0) |
| |