various pylint clean ups

Hopefully should be no functional changes here.
diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
index 4c0d9ca..a643c7f 100644
--- a/pym/portage/__init__.py
+++ b/pym/portage/__init__.py
@@ -3,7 +3,7 @@
 
 from __future__ import unicode_literals
 
-VERSION="HEAD"
+VERSION = "HEAD"
 
 # ===========================================================================
 # START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT
@@ -34,7 +34,7 @@
 
 	sys.stderr.write("!!! You might consider starting python with verbose flags to see what has\n")
 	sys.stderr.write("!!! gone wrong. Here is the information we got for this exception:\n")
-	sys.stderr.write("    "+str(e)+"\n\n");
+	sys.stderr.write("    "+str(e)+"\n\n")
 	raise
 
 try:
@@ -140,6 +140,7 @@
 	raise
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 	long = int
 
@@ -484,8 +485,8 @@
 	else:
 		mylink = os.readlink(symlink)
 	if mylink[0] != '/':
-		mydir=os.path.dirname(symlink)
-		mylink=mydir+"/"+mylink
+		mydir = os.path.dirname(symlink)
+		mylink = mydir + "/" + mylink
 	return os.path.normpath(mylink)
 
 _doebuild_manifest_exempt_depend = 0
@@ -552,7 +553,7 @@
 	'PROPERTIES', 'DEFINED_PHASES', 'HDEPEND', 'UNUSED_04',
 	'UNUSED_03', 'UNUSED_02', 'UNUSED_01',
 )
-auxdbkeylen=len(auxdbkeys)
+auxdbkeylen = len(auxdbkeys)
 
 def portageexit():
 	pass
@@ -643,7 +644,7 @@
 							patchlevel = False
 							if len(version_split) > 1:
 								patchlevel = True
-								VERSION = "%s_p%s" %(VERSION, version_split[1])
+								VERSION = "%s_p%s" % (VERSION, version_split[1])
 							if len(output_lines) > 1 and output_lines[1] == 'modified':
 								head_timestamp = None
 								if len(output_lines) > 3:
diff --git a/pym/portage/_emirrordist/MirrorDistTask.py b/pym/portage/_emirrordist/MirrorDistTask.py
index b6f875d..791dc2e 100644
--- a/pym/portage/_emirrordist/MirrorDistTask.py
+++ b/pym/portage/_emirrordist/MirrorDistTask.py
@@ -19,6 +19,7 @@
 from .DeletionIterator import DeletionIterator
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 class MirrorDistTask(CompositeTask):
diff --git a/pym/portage/_emirrordist/main.py b/pym/portage/_emirrordist/main.py
index 139f24f..ce92c2a 100644
--- a/pym/portage/_emirrordist/main.py
+++ b/pym/portage/_emirrordist/main.py
@@ -15,6 +15,7 @@
 from .MirrorDistTask import MirrorDistTask
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 seconds_per_day = 24 * 60 * 60
diff --git a/pym/portage/_global_updates.py b/pym/portage/_global_updates.py
index 8ab6bf8..df413c6 100644
--- a/pym/portage/_global_updates.py
+++ b/pym/portage/_global_updates.py
@@ -123,58 +123,58 @@
 		repo_map['DEFAULT'] = repo_map[master_repo]
 
 	for repo_name, myupd in repo_map.items():
-			if repo_name == 'DEFAULT':
-				continue
-			if not myupd:
-				continue
+		if repo_name == 'DEFAULT':
+			continue
+		if not myupd:
+			continue
 
-			def repo_match(repository):
-				return repository == repo_name or \
-					(repo_name == master_repo and repository not in repo_map)
+		def repo_match(repository):
+			return repository == repo_name or \
+				(repo_name == master_repo and repository not in repo_map)
 
-			def _world_repo_match(atoma, atomb):
-				"""
-				Check whether to perform a world change from atoma to atomb.
-				If best vardb match for atoma comes from the same repository
-				as the update file, allow that. Additionally, if portdb still
-				can find a match for old atom name, warn about that.
-				"""
-				matches = vardb.match(atoma)
-				if not matches:
-					matches = vardb.match(atomb)
-				if matches and \
-					repo_match(vardb.aux_get(best(matches), ['repository'])[0]):
-					if portdb.match(atoma):
-						world_warnings.add((atoma, atomb))
-					return True
-				else:
-					return False
+		def _world_repo_match(atoma, atomb):
+			"""
+			Check whether to perform a world change from atoma to atomb.
+			If best vardb match for atoma comes from the same repository
+			as the update file, allow that. Additionally, if portdb still
+			can find a match for old atom name, warn about that.
+			"""
+			matches = vardb.match(atoma)
+			if not matches:
+				matches = vardb.match(atomb)
+			if matches and \
+				repo_match(vardb.aux_get(best(matches), ['repository'])[0]):
+				if portdb.match(atoma):
+					world_warnings.add((atoma, atomb))
+				return True
+			else:
+				return False
 
-			for update_cmd in myupd:
-				for pos, atom in enumerate(world_list):
-					new_atom = update_dbentry(update_cmd, atom)
-					if atom != new_atom:
-						if _world_repo_match(atom, new_atom):
-							world_list[pos] = new_atom
-							world_modified = True
+		for update_cmd in myupd:
+			for pos, atom in enumerate(world_list):
+				new_atom = update_dbentry(update_cmd, atom)
+				if atom != new_atom:
+					if _world_repo_match(atom, new_atom):
+						world_list[pos] = new_atom
+						world_modified = True
 
-			for update_cmd in myupd:
-				if update_cmd[0] == "move":
-					moves = vardb.move_ent(update_cmd, repo_match=repo_match)
+		for update_cmd in myupd:
+			if update_cmd[0] == "move":
+				moves = vardb.move_ent(update_cmd, repo_match=repo_match)
+				if moves:
+					writemsg_stdout(moves * "@")
+				if bindb:
+					moves = bindb.move_ent(update_cmd, repo_match=repo_match)
 					if moves:
-						writemsg_stdout(moves * "@")
-					if bindb:
-						moves = bindb.move_ent(update_cmd, repo_match=repo_match)
-						if moves:
-							writemsg_stdout(moves * "%")
-				elif update_cmd[0] == "slotmove":
-					moves = vardb.move_slot_ent(update_cmd, repo_match=repo_match)
+						writemsg_stdout(moves * "%")
+			elif update_cmd[0] == "slotmove":
+				moves = vardb.move_slot_ent(update_cmd, repo_match=repo_match)
+				if moves:
+					writemsg_stdout(moves * "s")
+				if bindb:
+					moves = bindb.move_slot_ent(update_cmd, repo_match=repo_match)
 					if moves:
-						writemsg_stdout(moves * "s")
-					if bindb:
-						moves = bindb.move_slot_ent(update_cmd, repo_match=repo_match)
-						if moves:
-							writemsg_stdout(moves * "S")
+						writemsg_stdout(moves * "S")
 
 	if world_modified:
 		world_list.sort()
@@ -187,65 +187,65 @@
 
 	if retupd:
 
-			def _config_repo_match(repo_name, atoma, atomb):
-				"""
-				Check whether to perform a world change from atoma to atomb.
-				If best vardb match for atoma comes from the same repository
-				as the update file, allow that. Additionally, if portdb still
-				can find a match for old atom name, warn about that.
-				"""
-				matches = vardb.match(atoma)
+		def _config_repo_match(repo_name, atoma, atomb):
+			"""
+			Check whether to perform a world change from atoma to atomb.
+			If best vardb match for atoma comes from the same repository
+			as the update file, allow that. Additionally, if portdb still
+			can find a match for old atom name, warn about that.
+			"""
+			matches = vardb.match(atoma)
+			if not matches:
+				matches = vardb.match(atomb)
 				if not matches:
-					matches = vardb.match(atomb)
-					if not matches:
-						return False
-				repository = vardb.aux_get(best(matches), ['repository'])[0]
-				return repository == repo_name or \
-					(repo_name == master_repo and repository not in repo_map)
+					return False
+			repository = vardb.aux_get(best(matches), ['repository'])[0]
+			return repository == repo_name or \
+				(repo_name == master_repo and repository not in repo_map)
 
-			update_config_files(root,
-				shlex_split(mysettings.get("CONFIG_PROTECT", "")),
-				shlex_split(mysettings.get("CONFIG_PROTECT_MASK", "")),
-				repo_map, match_callback=_config_repo_match)
+		update_config_files(root,
+			shlex_split(mysettings.get("CONFIG_PROTECT", "")),
+			shlex_split(mysettings.get("CONFIG_PROTECT_MASK", "")),
+			repo_map, match_callback=_config_repo_match)
 
-			# The above global updates proceed quickly, so they
-			# are considered a single mtimedb transaction.
-			if timestamps:
-				# We do not update the mtime in the mtimedb
-				# until after _all_ of the above updates have
-				# been processed because the mtimedb will
-				# automatically commit when killed by ctrl C.
-				for mykey, mtime in timestamps.items():
-					prev_mtimes[mykey] = mtime
+		# The above global updates proceed quickly, so they
+		# are considered a single mtimedb transaction.
+		if timestamps:
+			# We do not update the mtime in the mtimedb
+			# until after _all_ of the above updates have
+			# been processed because the mtimedb will
+			# automatically commit when killed by ctrl C.
+			for mykey, mtime in timestamps.items():
+				prev_mtimes[mykey] = mtime
 
-			do_upgrade_packagesmessage = False
-			# We gotta do the brute force updates for these now.
-			if True:
-				def onUpdate(maxval, curval):
+		do_upgrade_packagesmessage = False
+		# We gotta do the brute force updates for these now.
+		if True:
+			def onUpdate(_maxval, curval):
+				if curval > 0:
+					writemsg_stdout("#")
+			if quiet:
+				onUpdate = None
+			vardb.update_ents(repo_map, onUpdate=onUpdate)
+			if bindb:
+				def onUpdate(_maxval, curval):
 					if curval > 0:
-						writemsg_stdout("#")
+						writemsg_stdout("*")
 				if quiet:
 					onUpdate = None
-				vardb.update_ents(repo_map, onUpdate=onUpdate)
-				if bindb:
-					def onUpdate(maxval, curval):
-						if curval > 0:
-							writemsg_stdout("*")
-					if quiet:
-						onUpdate = None
-					bindb.update_ents(repo_map, onUpdate=onUpdate)
-			else:
-				do_upgrade_packagesmessage = 1
+				bindb.update_ents(repo_map, onUpdate=onUpdate)
+		else:
+			do_upgrade_packagesmessage = 1
 
-			# Update progress above is indicated by characters written to stdout so
-			# we print a couple new lines here to separate the progress output from
-			# what follows.
-			writemsg_stdout("\n\n")
+		# Update progress above is indicated by characters written to stdout so
+		# we print a couple new lines here to separate the progress output from
+		# what follows.
+		writemsg_stdout("\n\n")
 
-			if do_upgrade_packagesmessage and bindb and \
-				bindb.cpv_all():
-				writemsg_stdout(_(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n"))
-				writemsg_stdout(bold(_("Note: This can take a very long time.")))
-				writemsg_stdout("\n")
+		if do_upgrade_packagesmessage and bindb and \
+			bindb.cpv_all():
+			writemsg_stdout(_(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n"))
+			writemsg_stdout(bold(_("Note: This can take a very long time.")))
+			writemsg_stdout("\n")
 
 	return retupd
diff --git a/pym/portage/_selinux.py b/pym/portage/_selinux.py
index e4621b1..61acbf7 100644
--- a/pym/portage/_selinux.py
+++ b/pym/portage/_selinux.py
@@ -66,7 +66,7 @@
 
 	setfscreate(ctx)
 	try:
-		os.rename(src,dest)
+		os.rename(src, dest)
 	finally:
 		setfscreate()
 
diff --git a/pym/portage/_sets/base.py b/pym/portage/_sets/base.py
index d368e00..3f10462 100644
--- a/pym/portage/_sets/base.py
+++ b/pym/portage/_sets/base.py
@@ -7,6 +7,7 @@
 from portage.versions import cpv_getkey
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 OPERATIONS = ["merge", "unmerge"]
diff --git a/pym/portage/cache/flat_hash.py b/pym/portage/cache/flat_hash.py
index 08dcbe8..9863499 100644
--- a/pym/portage/cache/flat_hash.py
+++ b/pym/portage/cache/flat_hash.py
@@ -18,6 +18,7 @@
 from portage.versions import _pkg_str
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 class database(fs_template.FsBased):
diff --git a/pym/portage/cache/fs_template.py b/pym/portage/cache/fs_template.py
index 0567c72..db3b80b 100644
--- a/pym/portage/cache/fs_template.py
+++ b/pym/portage/cache/fs_template.py
@@ -15,6 +15,7 @@
 del lazyimport
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 class FsBased(template.database):
diff --git a/pym/portage/cache/metadata.py b/pym/portage/cache/metadata.py
index 6612e73..285ad05 100644
--- a/pym/portage/cache/metadata.py
+++ b/pym/portage/cache/metadata.py
@@ -16,6 +16,7 @@
 from portage.cache.mappings import ProtectedDict
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 	long = int
 
diff --git a/pym/portage/cache/sqlite.py b/pym/portage/cache/sqlite.py
index 40db070..06451b6 100644
--- a/pym/portage/cache/sqlite.py
+++ b/pym/portage/cache/sqlite.py
@@ -13,6 +13,7 @@
 from portage.localization import _
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class database(fs_template.FsBased):
diff --git a/pym/portage/cache/template.py b/pym/portage/cache/template.py
index 9b8a4d3..41bad92 100644
--- a/pym/portage/cache/template.py
+++ b/pym/portage/cache/template.py
@@ -10,6 +10,7 @@
 import operator
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	_unicode = str
 	basestring = str
 	long = int
diff --git a/pym/portage/checksum.py b/pym/portage/checksum.py
index cd1572e..a848acd 100644
--- a/pym/portage/checksum.py
+++ b/pym/portage/checksum.py
@@ -3,14 +3,13 @@
 # Distributed under the terms of the GNU General Public License v2
 
 import portage
-from portage.const import PRELINK_BINARY,HASHING_BLOCKSIZE
+from portage.const import PRELINK_BINARY, HASHING_BLOCKSIZE
 from portage.localization import _
 from portage import os
 from portage import _encodings
 from portage import _unicode_encode
 import errno
 import stat
-import sys
 import subprocess
 import tempfile
 
@@ -172,7 +171,7 @@
 	proc.communicate()
 	status = proc.wait()
 	if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
-		prelink_capable=1
+		prelink_capable = 1
 	del cmd, proc, status
 
 def is_prelinkable_elf(filename):
@@ -339,9 +338,10 @@
 						{"file" : filename, "type" : x})
 				else:
 					file_is_ok = False
-					reason     = (("Failed on %s verification" % x), myhash,mydict[x])
+					reason = (("Failed on %s verification" % x), myhash, mydict[x])
 					break
-	return file_is_ok,reason
+
+	return file_is_ok, reason
 
 def perform_checksum(filename, hashname="MD5", calc_prelink=0):
 	"""
diff --git a/pym/portage/cvstree.py b/pym/portage/cvstree.py
index 3680ae4..d2176ce 100644
--- a/pym/portage/cvstree.py
+++ b/pym/portage/cvstree.py
@@ -15,20 +15,20 @@
 from portage import _unicode_encode
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 # [D]/Name/Version/Date/Flags/Tags
 
 def pathdata(entries, path):
-	"""(entries,path)
-	Returns the data(dict) for a specific file/dir at the path specified."""
-	mysplit=path.split("/")
-	myentries=entries
-	mytarget=mysplit[-1]
-	mysplit=mysplit[:-1]
+	"""Returns the data(dict) for a specific file/dir at the path specified."""
+	mysplit = path.split("/")
+	myentries = entries
+	mytarget = mysplit[-1]
+	mysplit = mysplit[:-1]
 	for mys in mysplit:
 		if mys in myentries["dirs"]:
-			myentries=myentries["dirs"][mys]
+			myentries = myentries["dirs"][mys]
 		else:
 			return None
 	if mytarget in myentries["dirs"]:
@@ -39,18 +39,17 @@
 		return None
 
 def fileat(entries, path):
-	return pathdata(entries,path)
+	return pathdata(entries, path)
 
 def isadded(entries, path):
-	"""(entries,path)
-	Returns true if the path exists and is added to the cvs tree."""
-	mytarget=pathdata(entries, path)
+	"""Returns True if the path exists and is added to the cvs tree."""
+	mytarget = pathdata(entries, path)
 	if mytarget:
 		if "cvs" in mytarget["status"]:
 			return 1
 
-	basedir=os.path.dirname(path)
-	filename=os.path.basename(path)
+	basedir = os.path.dirname(path)
+	filename = os.path.basename(path)
 
 	try:
 		myfile = io.open(
@@ -59,234 +58,250 @@
 			mode='r', encoding=_encodings['content'], errors='strict')
 	except IOError:
 		return 0
-	mylines=myfile.readlines()
+	mylines = myfile.readlines()
 	myfile.close()
 
-	rep=re.compile("^\/"+re.escape(filename)+"\/");
+	rep = re.compile("^\/%s\/" % re.escape(filename))
 	for x in mylines:
 		if rep.search(x):
 			return 1
 
 	return 0
 
-def findnew(entries,recursive=0,basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all elements that have been added but
+def findnew(entries, recursive=0, basedir=""):
+	"""Recurses the entries tree to find all elements that have been added but
 	have not yet been committed. Returns a list of paths, optionally prepended
-	with a basedir."""
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mylist=[]
+	with a basedir.
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
+
+	mylist = []
 	for myfile in entries["files"]:
 		if "cvs" in entries["files"][myfile]["status"]:
 			if "0" == entries["files"][myfile]["revision"]:
-				mylist.append(basedir+myfile)
+				mylist.append(basedir + myfile)
+
 	if recursive:
 		for mydir in entries["dirs"]:
-			mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir)
+			mylist += findnew(entries["dirs"][mydir], recursive, basedir + mydir)
+
 	return mylist
 
 def findoption(entries, pattern, recursive=0, basedir=""):
-	"""(entries, pattern, recursive=0, basedir="")
-	Iterate over paths of cvs entries for which the pattern.search() method
+	"""Iterate over paths of cvs entries for which the pattern.search() method
 	finds a match. Returns a list of paths, optionally prepended with a
-	basedir."""
+	basedir.
+	"""
 	if not basedir.endswith("/"):
 		basedir += "/"
+
 	for myfile, mydata in entries["files"].items():
 		if "cvs" in mydata["status"]:
 			if pattern.search(mydata["flags"]):
-				yield basedir+myfile
+				yield basedir + myfile
+
 	if recursive:
 		for mydir, mydata in entries["dirs"].items():
 			for x in findoption(mydata, pattern,
-				recursive, basedir+mydir):
+			                    recursive, basedir + mydir):
 				yield x
 
-def findchanged(entries,recursive=0,basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all elements that exist in the cvs tree
+def findchanged(entries, recursive=0, basedir=""):
+	"""Recurses the entries tree to find all elements that exist in the cvs tree
 	and differ from the committed version. Returns a list of paths, optionally
-	prepended with a basedir."""
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mylist=[]
+	prepended with a basedir.
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
+
+	mylist = []
 	for myfile in entries["files"]:
 		if "cvs" in entries["files"][myfile]["status"]:
 			if "current" not in entries["files"][myfile]["status"]:
 				if "exists" in entries["files"][myfile]["status"]:
-					if entries["files"][myfile]["revision"]!="0":
-						mylist.append(basedir+myfile)
+					if entries["files"][myfile]["revision"] != "0":
+						mylist.append(basedir + myfile)
+
 	if recursive:
 		for mydir in entries["dirs"]:
-			mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir)
+			mylist += findchanged(entries["dirs"][mydir], recursive, basedir + mydir)
+
 	return mylist
 	
-def findmissing(entries,recursive=0,basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all elements that are listed in the cvs
+def findmissing(entries, recursive=0, basedir=""):
+	"""Recurses the entries tree to find all elements that are listed in the cvs
 	tree but do not exist on the filesystem. Returns a list of paths,
-	optionally prepended with a basedir."""
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mylist=[]
+	optionally prepended with a basedir.
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
+
+	mylist = []
 	for myfile in entries["files"]:
 		if "cvs" in entries["files"][myfile]["status"]:
 			if "exists" not in entries["files"][myfile]["status"]:
 				if "removed" not in entries["files"][myfile]["status"]:
-					mylist.append(basedir+myfile)
+					mylist.append(basedir + myfile)
+
 	if recursive:
 		for mydir in entries["dirs"]:
-			mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir)
+			mylist += findmissing(entries["dirs"][mydir], recursive, basedir + mydir)
+
 	return mylist
 
-def findunadded(entries,recursive=0,basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all elements that are in valid cvs
+def findunadded(entries, recursive=0, basedir=""):
+	"""Recurses the entries tree to find all elements that are in valid cvs
 	directories but are not part of the cvs tree. Returns a list of paths,
-	optionally prepended with a basedir."""
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mylist=[]
+	optionally prepended with a basedir.
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
 
-	#ignore what cvs ignores.
+	# Ignore what cvs ignores.
+	mylist = []
 	for myfile in entries["files"]:
 		if "cvs" not in entries["files"][myfile]["status"]:
-			mylist.append(basedir+myfile)
+			mylist.append(basedir + myfile)
+
 	if recursive:
 		for mydir in entries["dirs"]:
-			mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir)
+			mylist += findunadded(entries["dirs"][mydir], recursive, basedir + mydir)
+
 	return mylist
 
-def findremoved(entries,recursive=0,basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all elements that are in flagged for cvs
-	deletions. Returns a list of paths,	optionally prepended with a basedir."""
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mylist=[]
+def findremoved(entries, recursive=0, basedir=""):
+	"""Recurses the entries tree to find all elements that are in flagged for cvs
+	deletions. Returns a list of paths,	optionally prepended with a basedir.
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
+
+	mylist = []
 	for myfile in entries["files"]:
 		if "removed" in entries["files"][myfile]["status"]:
-			mylist.append(basedir+myfile)
+			mylist.append(basedir + myfile)
+
 	if recursive:
 		for mydir in entries["dirs"]:
-			mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir)
+			mylist += findremoved(entries["dirs"][mydir], recursive, basedir + mydir)
+
 	return mylist
 
 def findall(entries, recursive=0, basedir=""):
-	"""(entries,recursive=0,basedir="")
-	Recurses the entries tree to find all new, changed, missing, and unadded
-	entities. Returns a 4 element list of lists as returned from each find*()."""
-
-	if basedir and basedir[-1]!="/":
-		basedir=basedir+"/"
-	mynew     = findnew(entries,recursive,basedir)
-	mychanged = findchanged(entries,recursive,basedir)
-	mymissing = findmissing(entries,recursive,basedir)
-	myunadded = findunadded(entries,recursive,basedir)
-	myremoved = findremoved(entries,recursive,basedir)
+	"""Recurses the entries tree to find all new, changed, missing, and unadded
+	entities. Returns a 4 element list of lists as returned from each find*().
+	"""
+	if basedir and basedir[-1] != "/":
+		basedir += "/"
+	mynew     = findnew(entries, recursive, basedir)
+	mychanged = findchanged(entries, recursive, basedir)
+	mymissing = findmissing(entries, recursive, basedir)
+	myunadded = findunadded(entries, recursive, basedir)
+	myremoved = findremoved(entries, recursive, basedir)
 	return [mynew, mychanged, mymissing, myunadded, myremoved]
 
 ignore_list = re.compile("(^|/)(RCS(|LOG)|SCCS|CVS(|\.adm)|cvslog\..*|tags|TAGS|\.(make\.state|nse_depinfo)|.*~|(\.|)#.*|,.*|_$.*|.*\$|\.del-.*|.*\.(old|BAK|bak|orig|rej|a|olb|o|obj|so|exe|Z|elc|ln)|core)$")
 def apply_cvsignore_filter(list):
-	x=0
+	x = 0
 	while x < len(list):
 		if ignore_list.match(list[x].split("/")[-1]):
 			list.pop(x)
 		else:
-			x+=1
+			x += 1
 	return list
 	
-def getentries(mydir,recursive=0):
-	"""(basedir,recursive=0)
-	Scans the given directory and returns a datadict of all the entries in
-	the directory separated as a dirs dict and a files dict."""
-	myfn=mydir+"/CVS/Entries"
+def getentries(mydir, recursive=0):
+	"""Scans the given directory and returns a datadict of all the entries in
+	the directory separated as a dirs dict and a files dict.
+	"""
+	myfn = mydir + "/CVS/Entries"
 	# entries=[dirs, files]
-	entries={"dirs":{},"files":{}}
+	entries = {"dirs":{}, "files":{}}
 	if not os.path.exists(mydir):
 		return entries
 	try:
 		myfile = io.open(_unicode_encode(myfn,
 			encoding=_encodings['fs'], errors='strict'),
 			mode='r', encoding=_encodings['content'], errors='strict')
-		mylines=myfile.readlines()
+		mylines = myfile.readlines()
 		myfile.close()
 	except SystemExit as e:
 		raise
 	except:
-		mylines=[]
+		mylines = []
+
 	for line in mylines:
-		if line and line[-1]=="\n":
-			line=line[:-1]
+		if line and line[-1] == "\n":
+			line = line[:-1]
 		if not line:
 			continue
-		if line=="D": # End of entries file
+		if line == "D": # End of entries file
 			break
-		mysplit=line.split("/")
-		if len(mysplit)!=6:
-			print("Confused:",mysplit)
+		mysplit = line.split("/")
+		if len(mysplit) != 6:
+			print("Confused:", mysplit)
 			continue
-		if mysplit[0]=="D":
-			entries["dirs"][mysplit[1]]={"dirs":{},"files":{},"status":[]}
-			entries["dirs"][mysplit[1]]["status"]=["cvs"]
+		if mysplit[0] == "D":
+			entries["dirs"][mysplit[1]] = {"dirs":{}, "files":{}, "status":[]}
+			entries["dirs"][mysplit[1]]["status"] = ["cvs"]
 			if os.path.isdir(mydir+"/"+mysplit[1]):
-				entries["dirs"][mysplit[1]]["status"]+=["exists"]
-				entries["dirs"][mysplit[1]]["flags"]=mysplit[2:]
+				entries["dirs"][mysplit[1]]["status"] += ["exists"]
+				entries["dirs"][mysplit[1]]["flags"] = mysplit[2:]
 				if recursive:
-					rentries=getentries(mydir+"/"+mysplit[1],recursive)
-					entries["dirs"][mysplit[1]]["dirs"]=rentries["dirs"]
-					entries["dirs"][mysplit[1]]["files"]=rentries["files"]
+					rentries = getentries(mydir + "/" + mysplit[1], recursive)
+					entries["dirs"][mysplit[1]]["dirs"] = rentries["dirs"]
+					entries["dirs"][mysplit[1]]["files"] = rentries["files"]
 		else:
 			# [D]/Name/revision/Date/Flags/Tags
-			entries["files"][mysplit[1]]={}
-			entries["files"][mysplit[1]]["revision"]=mysplit[2]
-			entries["files"][mysplit[1]]["date"]=mysplit[3]
-			entries["files"][mysplit[1]]["flags"]=mysplit[4]
-			entries["files"][mysplit[1]]["tags"]=mysplit[5]
-			entries["files"][mysplit[1]]["status"]=["cvs"]
-			if entries["files"][mysplit[1]]["revision"][0]=="-":
-				entries["files"][mysplit[1]]["status"]+=["removed"]
+			entries["files"][mysplit[1]] = {}
+			entries["files"][mysplit[1]]["revision"] = mysplit[2]
+			entries["files"][mysplit[1]]["date"] = mysplit[3]
+			entries["files"][mysplit[1]]["flags"] = mysplit[4]
+			entries["files"][mysplit[1]]["tags"] = mysplit[5]
+			entries["files"][mysplit[1]]["status"] = ["cvs"]
+			if entries["files"][mysplit[1]]["revision"][0] == "-":
+				entries["files"][mysplit[1]]["status"] += ["removed"]
 
 	for file in os.listdir(mydir):
-		if file=="CVS":
+		if file == "CVS":
 			continue
-		if os.path.isdir(mydir+"/"+file):
+		if os.path.isdir(mydir + "/" + file):
 			if file not in entries["dirs"]:
 				if ignore_list.match(file) is not None:
 					continue
-				entries["dirs"][file]={"dirs":{},"files":{}}
+				entries["dirs"][file] = {"dirs":{}, "files":{}}
 				# It's normal for a directory to be unlisted in Entries
 				# when checked out without -P (see bug #257660).
-				rentries=getentries(mydir+"/"+file,recursive)
-				entries["dirs"][file]["dirs"]=rentries["dirs"]
-				entries["dirs"][file]["files"]=rentries["files"]
+				rentries = getentries(mydir + "/" + file, recursive)
+				entries["dirs"][file]["dirs"] = rentries["dirs"]
+				entries["dirs"][file]["files"] = rentries["files"]
 			if "status" in entries["dirs"][file]:
 				if "exists" not in entries["dirs"][file]["status"]:
-					entries["dirs"][file]["status"]+=["exists"]
+					entries["dirs"][file]["status"] += ["exists"]
 			else:
-				entries["dirs"][file]["status"]=["exists"]
-		elif os.path.isfile(mydir+"/"+file):
+				entries["dirs"][file]["status"] = ["exists"]
+		elif os.path.isfile(mydir + "/" + file):
 			if file not in entries["files"]:
 				if ignore_list.match(file) is not None:
 					continue
-				entries["files"][file]={"revision":"","date":"","flags":"","tags":""}
+				entries["files"][file] = {"revision":"", "date":"", "flags":"", "tags":""}
 			if "status" in entries["files"][file]:
 				if "exists" not in entries["files"][file]["status"]:
-					entries["files"][file]["status"]+=["exists"]
+					entries["files"][file]["status"] += ["exists"]
 			else:
-				entries["files"][file]["status"]=["exists"]
+				entries["files"][file]["status"] = ["exists"]
 			try:
-				mystat=os.stat(mydir+"/"+file)
+				mystat = os.stat(mydir + "/" + file)
 				mytime = time.asctime(time.gmtime(mystat[stat.ST_MTIME]))
 				if "status" not in entries["files"][file]:
-					entries["files"][file]["status"]=[]
-				if mytime==entries["files"][file]["date"]:
-					entries["files"][file]["status"]+=["current"]
+					entries["files"][file]["status"] = []
+				if mytime == entries["files"][file]["date"]:
+					entries["files"][file]["status"] += ["current"]
 			except SystemExit as e:
 				raise
 			except Exception as e:
-				print("failed to stat",file)
+				print("failed to stat", file)
 				print(e)
 				return
 
@@ -294,6 +309,7 @@
 			pass
 		else:
 			print()
-			print("File of unknown type:",mydir+"/"+file)
+			print("File of unknown type:", mydir + "/" + file)
 			print()
+
 	return entries
diff --git a/pym/portage/data.py b/pym/portage/data.py
index 44104c2..7289457 100644
--- a/pym/portage/data.py
+++ b/pym/portage/data.py
@@ -12,7 +12,7 @@
 )
 from portage.localization import _
 
-ostype=platform.system()
+ostype = platform.system()
 userland = None
 if ostype == "DragonFly" or ostype.endswith("BSD"):
 	userland = "BSD"
@@ -23,10 +23,10 @@
 
 if not lchown:
 	if ostype == "Darwin":
-		def lchown(*pos_args, **key_args):
+		def lchown(*_args, **_kwargs):
 			pass
 	else:
-		def lchown(*pargs, **kwargs):
+		def lchown(*_args, **_kwargs):
 			writemsg(colorize("BAD", "!!!") + _(
 				" It seems that os.lchown does not"
 				" exist.  Please rebuild python.\n"), noiselevel=-1)
@@ -59,11 +59,10 @@
 # If the "wheel" group does not exist then wheelgid falls back to 0.
 # If the "portage" group does not exist then portage_uid falls back to wheelgid.
 
-uid=os.getuid()
-wheelgid=0
-
+uid = os.getuid()
+wheelgid = 0
 try:
-	wheelgid=grp.getgrnam("wheel")[2]
+	wheelgid = grp.getgrnam("wheel")[2]
 except KeyError:
 	pass
 
diff --git a/pym/portage/dbapi/bintree.py b/pym/portage/dbapi/bintree.py
index b1f67ae..c05b59b 100644
--- a/pym/portage/dbapi/bintree.py
+++ b/pym/portage/dbapi/bintree.py
@@ -53,6 +53,7 @@
 	from urlparse import urlparse
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	_unicode = str
 	basestring = str
 	long = int
diff --git a/pym/portage/dbapi/porttree.py b/pym/portage/dbapi/porttree.py
index 25ff27f..590e3c5 100644
--- a/pym/portage/dbapi/porttree.py
+++ b/pym/portage/dbapi/porttree.py
@@ -50,6 +50,7 @@
 	from urlparse import urlparse
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 	long = int
 
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index 7a4a3d2..b593365 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -93,6 +93,7 @@
 	import pickle
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 	long = int
 	_unicode = str
diff --git a/pym/portage/debug.py b/pym/portage/debug.py
index ebf1a13..5fe3b11 100644
--- a/pym/portage/debug.py
+++ b/pym/portage/debug.py
@@ -38,7 +38,7 @@
 		self.max_repr_length = 200
 
 	def event_handler(self, *args):
-		frame, event, arg = args
+		frame, event, _arg = args
 		if "line" == event:
 			if self.show_local_lines:
 				self.trace_line(*args)
@@ -56,7 +56,7 @@
 		self.arg_repr(frame, event, arg),
 		self.locals_repr(frame, event, arg)))
 
-	def arg_repr(self, frame, event, arg):
+	def arg_repr(self, _frame, event, arg):
 		my_repr = None
 		if "return" == event:
 			my_repr = repr(arg)
@@ -71,7 +71,7 @@
 
 		return ""
 
-	def trace_line(self, frame, event, arg):
+	def trace_line(self, frame, _event, _arg):
 		writemsg("%s line=%d\n" % (self.trim_filename(frame.f_code.co_filename), frame.f_lineno))
 
 	def ignore_filename(self, filename):
@@ -81,7 +81,7 @@
 					return True
 		return False
 
-	def locals_repr(self, frame, event, arg):
+	def locals_repr(self, frame, _event, _arg):
 		"""Create a representation of the locals dict that is suitable for
 		tracing output."""
 
diff --git a/pym/portage/dep/__init__.py b/pym/portage/dep/__init__.py
index 798903f..8a15442 100644
--- a/pym/portage/dep/__init__.py
+++ b/pym/portage/dep/__init__.py
@@ -33,6 +33,7 @@
 import portage.cache.mappings
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 	_unicode = str
 else:
diff --git a/pym/portage/dispatch_conf.py b/pym/portage/dispatch_conf.py
index 79daa9f..0c71e65 100644
--- a/pym/portage/dispatch_conf.py
+++ b/pym/portage/dispatch_conf.py
@@ -24,183 +24,184 @@
 DIFF3_MERGE = "diff3 -mE '%s' '%s' '%s' > '%s'"
 
 def diffstatusoutput(cmd, file1, file2):
-    """
-    Execute the string cmd in a shell with getstatusoutput() and return a
-    2-tuple (status, output).
-    """
-    # Use Popen to emulate getstatusoutput(), since getstatusoutput() may
-    # raise a UnicodeDecodeError which makes the output inaccessible.
-    args = shlex_split(cmd % (file1, file2))
+	"""
+	Execute the string cmd in a shell with getstatusoutput() and return a
+	2-tuple (status, output).
+	"""
+	# Use Popen to emulate getstatusoutput(), since getstatusoutput() may
+	# raise a UnicodeDecodeError which makes the output inaccessible.
+	args = shlex_split(cmd % (file1, file2))
 
-    if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
-        not os.path.isabs(args[0]):
-        # Python 3.1 _execvp throws TypeError for non-absolute executable
-        # path passed as bytes (see http://bugs.python.org/issue8513).
-        fullname = portage.process.find_binary(args[0])
-        if fullname is None:
-            raise portage.exception.CommandNotFound(args[0])
-        args[0] = fullname
+	if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
+		not os.path.isabs(args[0]):
+		# Python 3.1 _execvp throws TypeError for non-absolute executable
+		# path passed as bytes (see http://bugs.python.org/issue8513).
+		fullname = portage.process.find_binary(args[0])
+		if fullname is None:
+			raise portage.exception.CommandNotFound(args[0])
+		args[0] = fullname
 
-    args = [portage._unicode_encode(x, errors='strict') for x in args]
-    proc = subprocess.Popen(args,
-        stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    output = portage._unicode_decode(proc.communicate()[0])
-    if output and output[-1] == "\n":
-        # getstatusoutput strips one newline
-        output = output[:-1]
-    return (proc.wait(), output)
+	args = [portage._unicode_encode(x, errors='strict') for x in args]
+	proc = subprocess.Popen(args,
+		stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+	output = portage._unicode_decode(proc.communicate()[0])
+	if output and output[-1] == "\n":
+		# getstatusoutput strips one newline
+		output = output[:-1]
+	return (proc.wait(), output)
 
 def read_config(mandatory_opts):
-    eprefix = portage.settings["EPREFIX"]
-    config_path = os.path.join(eprefix or os.sep, "etc/dispatch-conf.conf")
-    loader = KeyValuePairFileLoader(config_path, None)
-    opts, errors = loader.load()
-    if not opts:
-        print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
-        sys.exit(1)
+	eprefix = portage.settings["EPREFIX"]
+	config_path = os.path.join(eprefix or os.sep, "etc/dispatch-conf.conf")
+	loader = KeyValuePairFileLoader(config_path, None)
+	opts, _errors = loader.load()
+	if not opts:
+		print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr)
+		sys.exit(1)
 
 	# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
-    quotes = "\"'"
-    for k, v in opts.items():
-        if v[:1] in quotes and v[:1] == v[-1:]:
-            opts[k] = v[1:-1]
+	quotes = "\"'"
+	for k, v in opts.items():
+		if v[:1] in quotes and v[:1] == v[-1:]:
+			opts[k] = v[1:-1]
 
-    for key in mandatory_opts:
-        if key not in opts:
-            if key == "merge":
-                opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
-            else:
-                print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
+	for key in mandatory_opts:
+		if key not in opts:
+			if key == "merge":
+				opts["merge"] = "sdiff --suppress-common-lines --output='%s' '%s' '%s'"
+			else:
+				print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr)
 
-    # archive-dir supports ${EPREFIX} expansion, in order to avoid hardcoding
-    variables = {"EPREFIX": eprefix}
-    opts['archive-dir'] = varexpand(opts['archive-dir'], mydict=variables)
+	# archive-dir supports ${EPREFIX} expansion, in order to avoid hardcoding
+	variables = {"EPREFIX": eprefix}
+	opts['archive-dir'] = varexpand(opts['archive-dir'], mydict=variables)
 
-    if not os.path.exists(opts['archive-dir']):
-        os.mkdir(opts['archive-dir'])
-        # Use restrictive permissions by default, in order to protect
-        # against vulnerabilities (like bug #315603 involving rcs).
-        os.chmod(opts['archive-dir'], 0o700)
-    elif not os.path.isdir(opts['archive-dir']):
-        print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
-        sys.exit(1)
+	if not os.path.exists(opts['archive-dir']):
+		os.mkdir(opts['archive-dir'])
+		# Use restrictive permissions by default, in order to protect
+		# against vulnerabilities (like bug #315603 involving rcs).
+		os.chmod(opts['archive-dir'], 0o700)
+	elif not os.path.isdir(opts['archive-dir']):
+		print(_('dispatch-conf: Config archive dir [%s] must exist; fatal') % (opts['archive-dir'],), file=sys.stderr)
+		sys.exit(1)
 
-    return opts
+	return opts
 
 
 def rcs_archive(archive, curconf, newconf, mrgconf):
-    """Archive existing config in rcs (on trunk). Then, if mrgconf is
-    specified and an old branch version exists, merge the user's changes
-    and the distributed changes and put the result into mrgconf.  Lastly,
-    if newconf was specified, leave it in the archive dir with a .dist.new
-    suffix along with the last 1.1.1 branch version with a .dist suffix."""
+	"""Archive existing config in rcs (on trunk). Then, if mrgconf is
+	specified and an old branch version exists, merge the user's changes
+	and the distributed changes and put the result into mrgconf.  Lastly,
+	if newconf was specified, leave it in the archive dir with a .dist.new
+	suffix along with the last 1.1.1 branch version with a .dist suffix."""
 
-    try:
-        os.makedirs(os.path.dirname(archive))
-    except OSError:
-        pass
+	try:
+		os.makedirs(os.path.dirname(archive))
+	except OSError:
+		pass
 
-    if os.path.isfile(curconf):
-        try:
-            shutil.copy2(curconf, archive)
-        except(IOError, os.error) as why:
-            print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
-                {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
+	if os.path.isfile(curconf):
+		try:
+			shutil.copy2(curconf, archive)
+		except(IOError, os.error) as why:
+			print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
+				{"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
 
-    if os.path.exists(archive + ',v'):
-        os.system(RCS_LOCK + ' ' + archive)
-    os.system(RCS_PUT + ' ' + archive)
+	if os.path.exists(archive + ',v'):
+		os.system(RCS_LOCK + ' ' + archive)
+	os.system(RCS_PUT + ' ' + archive)
 
-    ret = 0
-    if newconf != '':
-        os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
-        has_branch = os.path.exists(archive)
-        if has_branch:
-            os.rename(archive, archive + '.dist')
+	ret = 0
+	if newconf != '':
+		os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
+		has_branch = os.path.exists(archive)
+		if has_branch:
+			os.rename(archive, archive + '.dist')
 
-        try:
-            shutil.copy2(newconf, archive)
-        except(IOError, os.error) as why:
-            print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
-                  {"newconf": newconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
+		try:
+			shutil.copy2(newconf, archive)
+		except(IOError, os.error) as why:
+			print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
+				{"newconf": newconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
 
-        if has_branch:
-            if mrgconf != '':
-                # This puts the results of the merge into mrgconf.
-                ret = os.system(RCS_MERGE % (archive, mrgconf))
-                mystat = os.lstat(newconf)
-                os.chmod(mrgconf, mystat.st_mode)
-                os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
-        os.rename(archive, archive + '.dist.new')
-    return ret
+		if has_branch:
+			if mrgconf != '':
+				# This puts the results of the merge into mrgconf.
+				ret = os.system(RCS_MERGE % (archive, mrgconf))
+				mystat = os.lstat(newconf)
+				os.chmod(mrgconf, mystat.st_mode)
+				os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
+		os.rename(archive, archive + '.dist.new')
+
+	return ret
 
 
 def file_archive(archive, curconf, newconf, mrgconf):
-    """Archive existing config to the archive-dir, bumping old versions
-    out of the way into .# versions (log-rotate style). Then, if mrgconf
-    was specified and there is a .dist version, merge the user's changes
-    and the distributed changes and put the result into mrgconf.  Lastly,
-    if newconf was specified, archive it as a .dist.new version (which
-    gets moved to the .dist version at the end of the processing)."""
+	"""Archive existing config to the archive-dir, bumping old versions
+	out of the way into .# versions (log-rotate style). Then, if mrgconf
+	was specified and there is a .dist version, merge the user's changes
+	and the distributed changes and put the result into mrgconf.  Lastly,
+	if newconf was specified, archive it as a .dist.new version (which
+	gets moved to the .dist version at the end of the processing)."""
 
-    try:
-        os.makedirs(os.path.dirname(archive))
-    except OSError:
-        pass
+	try:
+		os.makedirs(os.path.dirname(archive))
+	except OSError:
+		pass
 
-    # Archive the current config file if it isn't already saved
-    if os.path.exists(archive) \
-     and len(diffstatusoutput("diff -aq '%s' '%s'", curconf, archive)[1]) != 0:
-        suf = 1
-        while suf < 9 and os.path.exists(archive + '.' + str(suf)):
-            suf += 1
+	# Archive the current config file if it isn't already saved
+	if (os.path.exists(archive) and
+		len(diffstatusoutput("diff -aq '%s' '%s'", curconf, archive)[1]) != 0):
+		suf = 1
+		while suf < 9 and os.path.exists(archive + '.' + str(suf)):
+			suf += 1
 
-        while suf > 1:
-            os.rename(archive + '.' + str(suf-1), archive + '.' + str(suf))
-            suf -= 1
+		while suf > 1:
+			os.rename(archive + '.' + str(suf-1), archive + '.' + str(suf))
+			suf -= 1
 
-        os.rename(archive, archive + '.1')
+		os.rename(archive, archive + '.1')
 
-    if os.path.isfile(curconf):
-        try:
-            shutil.copy2(curconf, archive)
-        except(IOError, os.error) as why:
-            print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
-                {"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
+	if os.path.isfile(curconf):
+		try:
+			shutil.copy2(curconf, archive)
+		except(IOError, os.error) as why:
+			print(_('dispatch-conf: Error copying %(curconf)s to %(archive)s: %(reason)s; fatal') % \
+				{"curconf": curconf, "archive": archive, "reason": str(why)}, file=sys.stderr)
 
-    if newconf != '':
-        # Save off new config file in the archive dir with .dist.new suffix
-        try:
-            shutil.copy2(newconf, archive + '.dist.new')
-        except(IOError, os.error) as why:
-            print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
-                  {"newconf": newconf, "archive": archive + '.dist.new', "reason": str(why)}, file=sys.stderr)
+	if newconf != '':
+		# Save off new config file in the archive dir with .dist.new suffix
+		try:
+			shutil.copy2(newconf, archive + '.dist.new')
+		except(IOError, os.error) as why:
+			print(_('dispatch-conf: Error copying %(newconf)s to %(archive)s: %(reason)s; fatal') % \
+				{"newconf": newconf, "archive": archive + '.dist.new', "reason": str(why)}, file=sys.stderr)
 
-        ret = 0
-        if mrgconf != '' and os.path.exists(archive + '.dist'):
-            # This puts the results of the merge into mrgconf.
-            ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
-            mystat = os.lstat(newconf)
-            os.chmod(mrgconf, mystat.st_mode)
-            os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
+		ret = 0
+		if mrgconf != '' and os.path.exists(archive + '.dist'):
+			# This puts the results of the merge into mrgconf.
+			ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
+			mystat = os.lstat(newconf)
+			os.chmod(mrgconf, mystat.st_mode)
+			os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
 
-        return ret
+		return ret
 
 
 def rcs_archive_post_process(archive):
-    """Check in the archive file with the .dist.new suffix on the branch
-    and remove the one with the .dist suffix."""
-    os.rename(archive + '.dist.new', archive)
-    if os.path.exists(archive + '.dist'):
-        # Commit the last-distributed version onto the branch.
-        os.system(RCS_LOCK + RCS_BRANCH + ' ' + archive)
-        os.system(RCS_PUT + ' -r' + RCS_BRANCH + ' ' + archive)
-        os.unlink(archive + '.dist')
-    else:
-        # Forcefully commit the last-distributed version onto the branch.
-        os.system(RCS_PUT + ' -f -r' + RCS_BRANCH + ' ' + archive)
+	"""Check in the archive file with the .dist.new suffix on the branch
+	and remove the one with the .dist suffix."""
+	os.rename(archive + '.dist.new', archive)
+	if os.path.exists(archive + '.dist'):
+		# Commit the last-distributed version onto the branch.
+		os.system(RCS_LOCK + RCS_BRANCH + ' ' + archive)
+		os.system(RCS_PUT + ' -r' + RCS_BRANCH + ' ' + archive)
+		os.unlink(archive + '.dist')
+	else:
+		# Forcefully commit the last-distributed version onto the branch.
+		os.system(RCS_PUT + ' -f -r' + RCS_BRANCH + ' ' + archive)
 
 
 def file_archive_post_process(archive):
-    """Rename the archive file with the .dist.new suffix to a .dist suffix"""
-    os.rename(archive + '.dist.new', archive + '.dist')
+	"""Rename the archive file with the .dist.new suffix to a .dist suffix"""
+	os.rename(archive + '.dist.new', archive + '.dist')
diff --git a/pym/portage/eclass_cache.py b/pym/portage/eclass_cache.py
index e1778f7..2988d25 100644
--- a/pym/portage/eclass_cache.py
+++ b/pym/portage/eclass_cache.py
@@ -18,6 +18,7 @@
 from portage import _shell_quote
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 
@@ -132,7 +133,7 @@
 					mtime = obj.mtime
 				except FileNotFound:
 					continue
-				ys=y[:-eclass_len]
+				ys = y[:-eclass_len]
 				if x == self._master_eclass_root:
 					master_eclasses[ys] = mtime
 					self.eclasses[ys] = obj
diff --git a/pym/portage/elog/__init__.py b/pym/portage/elog/__init__.py
index 33dac17..c3386cf 100644
--- a/pym/portage/elog/__init__.py
+++ b/pym/portage/elog/__init__.py
@@ -4,6 +4,7 @@
 
 import sys
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 import portage
diff --git a/pym/portage/elog/mod_echo.py b/pym/portage/elog/mod_echo.py
index 59117be..e0564bd 100644
--- a/pym/portage/elog/mod_echo.py
+++ b/pym/portage/elog/mod_echo.py
@@ -10,6 +10,7 @@
 from portage.localization import _
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 _items = []
diff --git a/pym/portage/elog/mod_syslog.py b/pym/portage/elog/mod_syslog.py
index c8bf441..5df6ce0 100644
--- a/pym/portage/elog/mod_syslog.py
+++ b/pym/portage/elog/mod_syslog.py
@@ -8,12 +8,13 @@
 from portage import _encodings
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 _pri = {
-	"INFO"   : syslog.LOG_INFO, 
-	"WARN"   : syslog.LOG_WARNING, 
-	"ERROR"  : syslog.LOG_ERR, 
+	"INFO"   : syslog.LOG_INFO,
+	"WARN"   : syslog.LOG_WARNING,
+	"ERROR"  : syslog.LOG_ERR,
 	"LOG"    : syslog.LOG_NOTICE,
 	"QA"     : syslog.LOG_WARNING
 }
@@ -23,14 +24,14 @@
 	for phase in EBUILD_PHASES:
 		if not phase in logentries:
 			continue
-		for msgtype,msgcontent in logentries[phase]:
+		for msgtype, msgcontent in logentries[phase]:
 			if isinstance(msgcontent, basestring):
 				msgcontent = [msgcontent]
 			for line in msgcontent:
 				line = "%s: %s: %s" % (key, phase, line)
 				if sys.hexversion < 0x3000000 and not isinstance(line, bytes):
 					# Avoid TypeError from syslog.syslog()
-					line = line.encode(_encodings['content'], 
+					line = line.encode(_encodings['content'],
 						'backslashreplace')
 				syslog.syslog(_pri[msgtype], line.rstrip("\n"))
 	syslog.closelog()
diff --git a/pym/portage/emaint/main.py b/pym/portage/emaint/main.py
index 9f987fa..1ae3d4b 100644
--- a/pym/portage/emaint/main.py
+++ b/pym/portage/emaint/main.py
@@ -86,15 +86,14 @@
 		opts = DEFAULT_OPTIONS
 	for opt in sorted(opts):
 		optd = opts[opt]
-		opto = "  %s, %s" %(optd['short'], optd['long'])
-		_usage += '%s %s\n' % (opto.ljust(15),optd['help'])
+		opto = "  %s, %s" % (optd['short'], optd['long'])
+		_usage += '%s %s\n' % (opto.ljust(15), optd['help'])
 	_usage += '\n'
 	return _usage
 
 
 class TaskHandler(object):
-	"""Handles the running of the tasks it is given
-	"""
+	"""Handles the running of the tasks it is given"""
 
 	def __init__(self, show_progress_bar=True, verbose=True, callback=None):
 		self.show_progress_bar = show_progress_bar
@@ -103,7 +102,6 @@
 		self.isatty = os.environ.get('TERM') != 'dumb' and sys.stdout.isatty()
 		self.progress_bar = ProgressBar(self.isatty, title="Emaint", max_desc_length=27)
 
-
 	def run_tasks(self, tasks, func, status=None, verbose=True, options=None):
 		"""Runs the module tasks"""
 		if tasks is None or func is None:
@@ -201,7 +199,7 @@
 	if args[0] == "all":
 		tasks = []
 		for m in module_names[1:]:
-			#print("DEBUG: module: %s, functions: " %(m, str(module_controller.get_functions(m))))
+			#print("DEBUG: module: %s, functions: " % (m, str(module_controller.get_functions(m))))
 			if long_action in module_controller.get_functions(m):
 				tasks.append(module_controller.get_class(m))
 	elif long_action in module_controller.get_functions(args[0]):
@@ -219,4 +217,3 @@
 	task_opts = options.__dict__
 	taskmaster = TaskHandler(callback=print_results)
 	taskmaster.run_tasks(tasks, func, status, options=task_opts)
-
diff --git a/pym/portage/emaint/module.py b/pym/portage/emaint/module.py
index 64b0c64..eb3af9e 100644
--- a/pym/portage/emaint/module.py
+++ b/pym/portage/emaint/module.py
@@ -37,10 +37,10 @@
 		self.valid = False
 		try:
 			mod_name = ".".join([self._namepath, self.name])
-			self._module = __import__(mod_name, [],[], ["not empty"])
+			self._module = __import__(mod_name, [], [], ["not empty"])
 			self.valid = True
 		except ImportError as e:
-			print("MODULE; failed import", mod_name, "  error was:",e)
+			print("MODULE; failed import", mod_name, "  error was:", e)
 			return False
 		self.module_spec = self._module.module_spec
 		for submodule in self.module_spec['provides']:
@@ -61,7 +61,7 @@
 			module = kid['instance']
 		else:
 			try:
-				module = __import__(kid['module_name'], [],[], ["not empty"])
+				module = __import__(kid['module_name'], [], [], ["not empty"])
 				kid['instance'] = module
 				kid['is_imported'] = True
 			except ImportError:
diff --git a/pym/portage/emaint/modules/binhost/binhost.py b/pym/portage/emaint/modules/binhost/binhost.py
index c297545..9bf11cb 100644
--- a/pym/portage/emaint/modules/binhost/binhost.py
+++ b/pym/portage/emaint/modules/binhost/binhost.py
@@ -9,7 +9,9 @@
 from portage.util import writemsg
 
 import sys
+
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 class BinhostHandler(object):
diff --git a/pym/portage/exception.py b/pym/portage/exception.py
index 1388c49..6fa5447 100644
--- a/pym/portage/exception.py
+++ b/pym/portage/exception.py
@@ -7,12 +7,13 @@
 from portage.localization import _
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class PortageException(Exception):
 	"""General superclass for portage exceptions"""
 	if sys.hexversion >= 0x3000000:
-		def __init__(self,value):
+		def __init__(self, value):
 			self.value = value[:]
 
 		def __str__(self):
@@ -21,7 +22,7 @@
 			else:
 				return repr(self.value)
 	else:
-		def __init__(self,value):
+		def __init__(self, value):
 			self.value = value[:]
 			if isinstance(self.value, basestring):
 				self.value = _unicode_decode(self.value,
@@ -84,20 +85,20 @@
 	"""A directory was not found when it was expected to exist"""
 
 class OperationNotPermitted(PortageException):
-	from errno import EPERM as errno
 	"""An operation was not permitted operating system"""
+	from errno import EPERM as errno
 
 class OperationNotSupported(PortageException):
-	from errno import EOPNOTSUPP as errno
 	"""Operation not supported"""
+	from errno import EOPNOTSUPP as errno
 
 class PermissionDenied(PortageException):
-	from errno import EACCES as errno
 	"""Permission denied"""
+	from errno import EACCES as errno
 
 class TryAgain(PortageException):
-	from errno import EAGAIN as errno
 	"""Try again"""
+	from errno import EAGAIN as errno
 
 class TimeoutException(PortageException):
 	"""Operation timed out"""
diff --git a/pym/portage/getbinpkg.py b/pym/portage/getbinpkg.py
index ff656ba..985d443 100644
--- a/pym/portage/getbinpkg.py
+++ b/pym/portage/getbinpkg.py
@@ -42,7 +42,7 @@
 try:
 	import ftplib
 except ImportError as e:
-	sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT FTPLIB: ")+str(e)+"\n")
+	sys.stderr.write(colorize("BAD", "!!! CANNOT IMPORT FTPLIB: ") + str(e) + "\n")
 else:
 	_all_errors.extend(ftplib.all_errors)
 
@@ -58,13 +58,14 @@
 		from httplib import ResponseNotReady as http_client_ResponseNotReady
 		from httplib import error as http_client_error
 except ImportError as e:
-	sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT HTTP.CLIENT: ")+str(e)+"\n")
+	sys.stderr.write(colorize("BAD", "!!! CANNOT IMPORT HTTP.CLIENT: ") + str(e) + "\n")
 else:
 	_all_errors.append(http_client_error)
 
 _all_errors = tuple(_all_errors)
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 def make_metadata_dict(data):
@@ -72,14 +73,13 @@
 	warnings.warn("portage.getbinpkg.make_metadata_dict() is deprecated",
 		DeprecationWarning, stacklevel=2)
 
-	myid,myglob = data
+	myid, _myglob = data
 	
 	mydict = {}
 	for k_bytes in portage.xpak.getindex_mem(myid):
 		k = _unicode_decode(k_bytes,
 			encoding=_encodings['repo.content'], errors='replace')
-		if k not in _all_metadata_keys and \
-			k != "CATEGORY":
+		if k not in _all_metadata_keys and k != "CATEGORY":
 			continue
 		v = _unicode_decode(portage.xpak.getitem(data, k_bytes),
 			encoding=_encodings['repo.content'], errors='replace')
@@ -101,7 +101,7 @@
 	def get_anchors(self):
 		return self.PL_anchors
 		
-	def get_anchors_by_prefix(self,prefix):
+	def get_anchors_by_prefix(self, prefix):
 		newlist = []
 		for x in self.PL_anchors:
 			if x.startswith(prefix):
@@ -109,7 +109,7 @@
 					newlist.append(x[:])
 		return newlist
 		
-	def get_anchors_by_suffix(self,suffix):
+	def get_anchors_by_suffix(self, suffix):
 		newlist = []
 		for x in self.PL_anchors:
 			if x.endswith(suffix):
@@ -117,10 +117,10 @@
 					newlist.append(x[:])
 		return newlist
 		
-	def	handle_endtag(self,tag):
+	def	handle_endtag(self, tag):
 		pass
 
-	def	handle_starttag(self,tag,attrs):
+	def	handle_starttag(self, tag, attrs):
 		if tag == "a":
 			for x in attrs:
 				if x[0] == 'href':
@@ -128,19 +128,19 @@
 						self.PL_anchors.append(urllib_parse_unquote(x[1]))
 
 
-def create_conn(baseurl,conn=None):
-	"""(baseurl,conn) --- Takes a protocol://site:port/address url, and an
+def create_conn(baseurl, conn=None):
+	"""Takes a protocol://site:port/address url, and an
 	optional connection. If connection is already active, it is passed on.
 	baseurl is reduced to address and is returned in tuple (conn,address)"""
 
 	warnings.warn("portage.getbinpkg.create_conn() is deprecated",
 		DeprecationWarning, stacklevel=2)
 
-	parts = baseurl.split("://",1)
+	parts = baseurl.split("://", 1)
 	if len(parts) != 2:
 		raise ValueError(_("Provided URI does not "
 			"contain protocol identifier. '%s'") % baseurl)
-	protocol,url_parts = parts
+	protocol, url_parts = parts
 	del parts
 
 	url_parts = url_parts.split("/")
@@ -151,7 +151,7 @@
 		address = "/"+"/".join(url_parts[1:])
 	del url_parts
 
-	userpass_host = host.split("@",1)
+	userpass_host = host.split("@", 1)
 	if len(userpass_host) == 1:
 		host = userpass_host[0]
 		userpass = ["anonymous"]
@@ -210,10 +210,10 @@
 				host = host[:-1]
 			conn = ftplib.FTP(host)
 			if password:
-				conn.login(username,password)
+				conn.login(username, password)
 			else:
 				sys.stderr.write(colorize("WARN",
-					_(" * No password provided for username"))+" '%s'" % \
+					_(" * No password provided for username")) + " '%s'" % \
 					(username,) + "\n\n")
 				conn.login(username)
 			conn.set_pasv(passive)
@@ -230,10 +230,10 @@
 		else:
 			raise NotImplementedError(_("%s is not a supported protocol.") % protocol)
 
-	return (conn,protocol,address, http_params, http_headers)
+	return (conn, protocol, address, http_params, http_headers)
 
 def make_ftp_request(conn, address, rest=None, dest=None):
-	"""(conn,address,rest) --- uses the conn object to request the data
+	"""Uses the |conn| object to request the data
 	from address and issuing a rest if it is passed."""
 
 	warnings.warn("portage.getbinpkg.make_ftp_request() is deprecated",
@@ -253,9 +253,9 @@
 			rest = 0
 
 		if rest != None:
-			mysocket = conn.transfercmd("RETR "+str(address), rest)
+			mysocket = conn.transfercmd("RETR %s" % str(address), rest)
 		else:
-			mysocket = conn.transfercmd("RETR "+str(address))
+			mysocket = conn.transfercmd("RETR %s" % str(address))
 
 		mydata = ""
 		while 1:
@@ -277,14 +277,14 @@
 		conn.voidresp()
 		conn.voidcmd("TYPE A")
 
-		return mydata,not (fsize==data_size),""
+		return mydata, (fsize != data_size), ""
 
 	except ValueError as e:
-		return None,int(str(e)[:4]),str(e)
+		return None, int(str(e)[:4]), str(e)
 	
 
-def make_http_request(conn, address, params={}, headers={}, dest=None):
-	"""(conn,address,params,headers) --- uses the conn object to request
+def make_http_request(conn, address, _params={}, headers={}, dest=None):
+	"""Uses the |conn| object to request
 	the data from address, performing Location forwarding and using the
 	optional params and headers."""
 
@@ -295,13 +295,13 @@
 	response = None
 	while (rc == 0) or (rc == 301) or (rc == 302):
 		try:
-			if (rc != 0):
-				conn,ignore,ignore,ignore,ignore = create_conn(address)
+			if rc != 0:
+				conn = create_conn(address)[0]
 			conn.request("GET", address, body=None, headers=headers)
 		except SystemExit as e:
 			raise
 		except Exception as e:
-			return None,None,"Server request failed: "+str(e)
+			return None, None, "Server request failed: %s" % str(e)
 		response = conn.getresponse()
 		rc = response.status
 
@@ -310,7 +310,7 @@
 			ignored_data = response.read()
 			del ignored_data
 			for x in str(response.msg).split("\n"):
-				parts = x.split(": ",1)
+				parts = x.split(": ", 1)
 				if parts[0] == "Location":
 					if (rc == 301):
 						sys.stderr.write(colorize("BAD",
@@ -323,13 +323,13 @@
 					break
 	
 	if (rc != 200) and (rc != 206):
-		return None,rc,"Server did not respond successfully ("+str(response.status)+": "+str(response.reason)+")"
+		return None, rc, "Server did not respond successfully (%s: %s)" % (str(response.status), str(response.reason))
 
 	if dest:
 		dest.write(response.read())
-		return "",0,""
+		return "", 0, ""
 
-	return response.read(),0,""
+	return response.read(), 0, ""
 
 
 def match_in_array(array, prefix="", suffix="", match_both=1, allow_overlap=0):
@@ -369,11 +369,10 @@
 			continue            # Doesn't match.
 
 	return myarray
-			
 
 
-def dir_get_list(baseurl,conn=None):
-	"""(baseurl[,connection]) -- Takes a base url to connect to and read from.
+def dir_get_list(baseurl, conn=None):
+	"""Takes a base url to connect to and read from.
 	URI should be in the form <proto>://<site>[:port]<path>
 	Connection is used for persistent connection instances."""
 
@@ -385,7 +384,7 @@
 	else:
 		keepconnection = 1
 
-	conn,protocol,address,params,headers = create_conn(baseurl, conn)
+	conn, protocol, address, params, headers = create_conn(baseurl, conn)
 
 	listing = None
 	if protocol in ["http","https"]:
@@ -393,7 +392,7 @@
 			# http servers can return a 400 error here
 			# if the address doesn't end with a slash.
 			address += "/"
-		page,rc,msg = make_http_request(conn,address,params,headers)
+		page, rc, msg = make_http_request(conn, address, params, headers)
 		
 		if page:
 			parser = ParseLinks()
@@ -423,8 +422,8 @@
 
 	return listing
 
-def file_get_metadata(baseurl,conn=None, chunk_size=3000):
-	"""(baseurl[,connection]) -- Takes a base url to connect to and read from.
+def file_get_metadata(baseurl, conn=None, chunk_size=3000):
+	"""Takes a base url to connect to and read from.
 	URI should be in the form <proto>://<site>[:port]<path>
 	Connection is used for persistent connection instances."""
 
@@ -436,13 +435,13 @@
 	else:
 		keepconnection = 1
 
-	conn,protocol,address,params,headers = create_conn(baseurl, conn)
+	conn, protocol, address, params, headers = create_conn(baseurl, conn)
 
 	if protocol in ["http","https"]:
-		headers["Range"] = "bytes=-"+str(chunk_size)
-		data,rc,msg = make_http_request(conn, address, params, headers)
+		headers["Range"] = "bytes=-%s" % str(chunk_size)
+		data, _x, _x = make_http_request(conn, address, params, headers)
 	elif protocol in ["ftp"]:
-		data,rc,msg = make_ftp_request(conn, address, -chunk_size)
+		data, _x, _x = make_ftp_request(conn, address, -chunk_size)
 	elif protocol == "sftp":
 		f = conn.open(address)
 		try:
@@ -455,21 +454,21 @@
 	
 	if data:
 		xpaksize = portage.xpak.decodeint(data[-8:-4])
-		if (xpaksize+8) > chunk_size:
-			myid = file_get_metadata(baseurl, conn, (xpaksize+8))
+		if (xpaksize + 8) > chunk_size:
+			myid = file_get_metadata(baseurl, conn, xpaksize + 8)
 			if not keepconnection:
 				conn.close()
 			return myid
 		else:
-			xpak_data = data[len(data)-(xpaksize+8):-8]
+			xpak_data = data[len(data) - (xpaksize + 8):-8]
 		del data
 
 		myid = portage.xpak.xsplit_mem(xpak_data)
 		if not myid:
-			myid = None,None
+			myid = None, None
 		del xpak_data
 	else:
-		myid = None,None
+		myid = None, None
 
 	if not keepconnection:
 		conn.close()
@@ -479,7 +478,7 @@
 
 def file_get(baseurl=None, dest=None, conn=None, fcmd=None, filename=None,
 	fcmd_vars=None):
-	"""(baseurl,dest,fcmd=) -- Takes a base url to connect to and read from.
+	"""Takes a base url to connect to and read from.
 	URI should be in the form <proto>://[user[:pass]@]<site>[:port]<path>"""
 
 	if not fcmd:
@@ -487,7 +486,7 @@
 		warnings.warn("Use of portage.getbinpkg.file_get() without the fcmd "
 			"parameter is deprecated", DeprecationWarning, stacklevel=2)
 
-		return file_get_lib(baseurl,dest,conn)
+		return file_get_lib(baseurl, dest, conn)
 
 	variables = {}
 
@@ -517,10 +516,10 @@
 	from portage.process import spawn
 	myfetch = portage.util.shlex_split(fcmd)
 	myfetch = [varexpand(x, mydict=variables) for x in myfetch]
-	fd_pipes= {
-		0:portage._get_stdin().fileno(),
-		1:sys.__stdout__.fileno(),
-		2:sys.__stdout__.fileno()
+	fd_pipes = {
+		0: portage._get_stdin().fileno(),
+		1: sys.__stdout__.fileno(),
+		2: sys.__stdout__.fileno()
 	}
 	sys.__stdout__.flush()
 	sys.__stderr__.flush()
@@ -530,8 +529,8 @@
 		return 0
 	return 1
 
-def file_get_lib(baseurl,dest,conn=None):
-	"""(baseurl[,connection]) -- Takes a base url to connect to and read from.
+def file_get_lib(baseurl, dest, conn=None):
+	"""Takes a base url to connect to and read from.
 	URI should be in the form <proto>://<site>[:port]<path>
 	Connection is used for persistent connection instances."""
 
@@ -543,13 +542,13 @@
 	else:
 		keepconnection = 1
 
-	conn,protocol,address,params,headers = create_conn(baseurl, conn)
+	conn, protocol, address, params, headers = create_conn(baseurl, conn)
 
-	sys.stderr.write("Fetching '"+str(os.path.basename(address)+"'\n"))
-	if protocol in ["http","https"]:
-		data,rc,msg = make_http_request(conn, address, params, headers, dest=dest)
+	sys.stderr.write("Fetching '" + str(os.path.basename(address)) + "'\n")
+	if protocol in ["http", "https"]:
+		data, rc, _msg = make_http_request(conn, address, params, headers, dest=dest)
 	elif protocol in ["ftp"]:
-		data,rc,msg = make_ftp_request(conn, address, dest=dest)
+		data, rc, _msg = make_ftp_request(conn, address, dest=dest)
 	elif protocol == "sftp":
 		rc = 0
 		try:
@@ -579,8 +578,6 @@
 
 
 def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=1, makepickle=None):
-	"""(baseurl,conn,chunk_size,verbose) -- 
-	"""
 
 	warnings.warn("portage.getbinpkg.dir_get_metadata() is deprecated",
 		DeprecationWarning, stacklevel=2)
@@ -597,7 +594,7 @@
 		makepickle = "/var/cache/edb/metadata.idx.most_recent"
 
 	try:
-		conn, protocol, address, params, headers = create_conn(baseurl, conn)
+		conn = create_conn(baseurl, conn)[0]
 	except _all_errors as e:
 		# ftplib.FTP(host) can raise errors like this:
 		#   socket.error: (111, 'Connection refused')
@@ -623,15 +620,15 @@
 	except Exception:
 		metadata = {}
 	if baseurl not in metadata:
-		metadata[baseurl]={}
+		metadata[baseurl] = {}
 	if "indexname" not in metadata[baseurl]:
-		metadata[baseurl]["indexname"]=""
+		metadata[baseurl]["indexname"] = ""
 	if "timestamp" not in metadata[baseurl]:
-		metadata[baseurl]["timestamp"]=0
+		metadata[baseurl]["timestamp"] = 0
 	if "unmodified" not in metadata[baseurl]:
-		metadata[baseurl]["unmodified"]=0
+		metadata[baseurl]["unmodified"] = 0
 	if "data" not in metadata[baseurl]:
-		metadata[baseurl]["data"]={}
+		metadata[baseurl]["data"] = {}
 
 	if not os.access(cache_path, os.W_OK):
 		sys.stderr.write(_("!!! Unable to write binary metadata to disk!\n"))
@@ -657,36 +654,36 @@
 	for mfile in metalist:
 		if usingcache and \
 		   ((metadata[baseurl]["indexname"] != mfile) or \
-			  (metadata[baseurl]["timestamp"] < int(time.time()-(60*60*24)))):
+			  (metadata[baseurl]["timestamp"] < int(time.time() - (60 * 60 * 24)))):
 			# Try to download new cache until we succeed on one.
-			data=""
-			for trynum in [1,2,3]:
+			data = ""
+			for trynum in [1, 2, 3]:
 				mytempfile = tempfile.TemporaryFile()
 				try:
-					file_get(baseurl+"/"+mfile, mytempfile, conn)
+					file_get(baseurl + "/" + mfile, mytempfile, conn)
 					if mytempfile.tell() > len(data):
 						mytempfile.seek(0)
 						data = mytempfile.read()
 				except ValueError as e:
-					sys.stderr.write("--- "+str(e)+"\n")
+					sys.stderr.write("--- %s\n" % str(e))
 					if trynum < 3:
 						sys.stderr.write(_("Retrying...\n"))
 					sys.stderr.flush()
 					mytempfile.close()
 					continue
-				if match_in_array([mfile],suffix=".gz"):
+				if match_in_array([mfile], suffix=".gz"):
 					out.write("gzip'd\n")
 					out.flush()
 					try:
 						import gzip
 						mytempfile.seek(0)
-						gzindex = gzip.GzipFile(mfile[:-3],'rb',9,mytempfile)
+						gzindex = gzip.GzipFile(mfile[:-3], 'rb', 9, mytempfile)
 						data = gzindex.read()
 					except SystemExit as e:
 						raise
 					except Exception as e:
 						mytempfile.close()
-						sys.stderr.write(_("!!! Failed to use gzip: ")+str(e)+"\n")
+						sys.stderr.write(_("!!! Failed to use gzip: ") + str(e) + "\n")
 						sys.stderr.flush()
 					mytempfile.close()
 				try:
@@ -701,8 +698,8 @@
 				except SystemExit as e:
 					raise
 				except Exception as e:
-					sys.stderr.write(_("!!! Failed to read data from index: ")+str(mfile)+"\n")
-					sys.stderr.write("!!! "+str(e)+"\n")
+					sys.stderr.write(_("!!! Failed to read data from index: ") + str(mfile) + "\n")
+					sys.stderr.write("!!! %s" % str(e))
 					sys.stderr.flush()
 			try:
 				metadatafile = open(_unicode_encode(metadatafilename,
@@ -713,7 +710,7 @@
 				raise
 			except Exception as e:
 				sys.stderr.write(_("!!! Failed to write binary metadata to disk!\n"))
-				sys.stderr.write("!!! "+str(e)+"\n")
+				sys.stderr.write("!!! %s\n" % str(e))
 				sys.stderr.flush()
 			break
 	# We may have metadata... now we run through the tbz2 list and check.
@@ -733,8 +730,8 @@
 				self.display()
 		def display(self):
 			self.out.write("\r"+colorize("WARN",
-				_("cache miss: '")+str(self.misses)+"'") + \
-				" --- "+colorize("GOOD", _("cache hit: '")+str(self.hits)+"'"))
+				_("cache miss: '") + str(self.misses) + "'") + \
+				" --- " + colorize("GOOD", _("cache hit: '") + str(self.hits) + "'"))
 			self.out.flush()
 
 	cache_stats = CacheStats(out)
@@ -751,7 +748,7 @@
 				cache_stats.update()
 			metadata[baseurl]["modified"] = 1
 			myid = None
-			for retry in range(3):
+			for _x in range(3):
 				try:
 					myid = file_get_metadata(
 						"/".join((baseurl.rstrip("/"), x.lstrip("/"))),
@@ -762,22 +759,20 @@
 					# make_http_request().  The docstring for this error in
 					# httplib.py says "Presumably, the server closed the
 					# connection before sending a valid response".
-					conn, protocol, address, params, headers = create_conn(
-						baseurl)
+					conn = create_conn(baseurl)[0]
 				except http_client_ResponseNotReady:
 					# With some http servers this error is known to be thrown
 					# from conn.getresponse() in make_http_request() when the
 					# remote file does not have appropriate read permissions.
 					# Maybe it's possible to recover from this exception in
 					# cases though, so retry.
-					conn, protocol, address, params, headers = create_conn(
-						baseurl)
+					conn = create_conn(baseurl)[0]
 
 			if myid and myid[0]:
 				metadata[baseurl]["data"][x] = make_metadata_dict(myid)
 			elif verbose:
 				sys.stderr.write(colorize("BAD",
-					_("!!! Failed to retrieve metadata on: "))+str(x)+"\n")
+					_("!!! Failed to retrieve metadata on: ")) + str(x) + "\n")
 				sys.stderr.flush()
 		else:
 			cache_stats.hits += 1
@@ -924,7 +919,6 @@
 		for metadata in sorted(self.packages,
 			key=portage.util.cmp_sort_key(_cmp_cpv)):
 			metadata = metadata.copy()
-			cpv = metadata["CPV"]
 			if self._inherited_keys:
 				for k in self._inherited_keys:
 					v = self.header.get(k)
diff --git a/pym/portage/glsa.py b/pym/portage/glsa.py
index cac0f1a..0cfeba1 100644
--- a/pym/portage/glsa.py
+++ b/pym/portage/glsa.py
@@ -21,7 +21,7 @@
 from portage import _encodings
 from portage import _unicode_decode
 from portage import _unicode_encode
-from portage.versions import pkgsplit, vercmp, best
+from portage.versions import pkgsplit, vercmp
 from portage.util import grabfile
 from portage.const import PRIVATE_PATH
 from portage.localization import _
diff --git a/pym/portage/localization.py b/pym/portage/localization.py
index 2815ef5..2aac1f3 100644
--- a/pym/portage/localization.py
+++ b/pym/portage/localization.py
@@ -21,6 +21,7 @@
 
 	a_value = "value.of.a"
 	b_value = 123
-	c_value = [1,2,3,4]
-	print(_("A: %(a)s -- B: %(b)s -- C: %(c)s") % {"a":a_value,"b":b_value,"c":c_value})
+	c_value = [1, 2, 3, 4]
+	print(_("A: %(a)s -- B: %(b)s -- C: %(c)s") %
+	      {"a": a_value, "b": b_value, "c": c_value})
 
diff --git a/pym/portage/locks.py b/pym/portage/locks.py
index 8571d8c..9593ae9 100644
--- a/pym/portage/locks.py
+++ b/pym/portage/locks.py
@@ -21,6 +21,7 @@
 from portage.localization import _
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 HARDLINK_FD = -2
@@ -246,8 +247,8 @@
 
 		_open_fds.add(myfd)
 
-	writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1)
-	return (lockfilename,myfd,unlinkfile,locking_method)
+	writemsg(str((lockfilename, myfd, unlinkfile)) + "\n", 1)
+	return (lockfilename, myfd, unlinkfile, locking_method)
 
 def _fstat_nlink(fd):
 	"""
@@ -269,10 +270,10 @@
 
 	#XXX: Compatability hack.
 	if len(mytuple) == 3:
-		lockfilename,myfd,unlinkfile = mytuple
+		lockfilename, myfd, unlinkfile = mytuple
 		locking_method = fcntl.flock
 	elif len(mytuple) == 4:
-		lockfilename,myfd,unlinkfile,locking_method = mytuple
+		lockfilename, myfd, unlinkfile, locking_method = mytuple
 	else:
 		raise InvalidData
 
@@ -283,7 +284,7 @@
 	# myfd may be None here due to myfd = mypath in lockfile()
 	if isinstance(lockfilename, basestring) and \
 		not os.path.exists(lockfilename):
-		writemsg(_("lockfile does not exist '%s'\n") % lockfilename,1)
+		writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1)
 		if myfd is not None:
 			os.close(myfd)
 			_open_fds.remove(myfd)
@@ -291,9 +292,9 @@
 
 	try:
 		if myfd is None:
-			myfd = os.open(lockfilename, os.O_WRONLY,0o660)
+			myfd = os.open(lockfilename, os.O_WRONLY, 0o660)
 			unlinkfile = 1
-		locking_method(myfd,fcntl.LOCK_UN)
+		locking_method(myfd, fcntl.LOCK_UN)
 	except OSError:
 		if isinstance(lockfilename, basestring):
 			os.close(myfd)
@@ -308,14 +309,14 @@
 		# commenting until it is proved necessary.
 		#time.sleep(0.0001)
 		if unlinkfile:
-			locking_method(myfd,fcntl.LOCK_EX|fcntl.LOCK_NB)
+			locking_method(myfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
 			# We won the lock, so there isn't competition for it.
 			# We can safely delete the file.
 			writemsg(_("Got the lockfile...\n"), 1)
 			if _fstat_nlink(myfd) == 1:
 				os.unlink(lockfilename)
 				writemsg(_("Unlinked lockfile...\n"), 1)
-				locking_method(myfd,fcntl.LOCK_UN)
+				locking_method(myfd, fcntl.LOCK_UN)
 			else:
 				writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1)
 				os.close(myfd)
@@ -325,7 +326,7 @@
 		raise
 	except Exception as e:
 		writemsg(_("Failed to get lock... someone took it.\n"), 1)
-		writemsg(str(e)+"\n",1)
+		writemsg(str(e) + "\n", 1)
 
 	# why test lockfilename?  because we may have been handed an
 	# fd originally, and the caller might not like having their
@@ -337,14 +338,12 @@
 	return True
 
 
-
-
 def hardlock_name(path):
 	base, tail = os.path.split(path)
 	return os.path.join(base, ".%s.hardlock-%s-%s" %
 		(tail, os.uname()[1], os.getpid()))
 
-def hardlink_is_mine(link,lock):
+def hardlink_is_mine(link, lock):
 	try:
 		lock_st = os.stat(lock)
 		if lock_st.st_nlink == 2:
@@ -496,7 +495,6 @@
 		pass
 
 def hardlock_cleanup(path, remove_all_locks=False):
-	mypid  = str(os.getpid())
 	myhost = os.uname()[1]
 	mydl = os.listdir(path)
 
@@ -505,7 +503,7 @@
 
 	mylist = {}
 	for x in mydl:
-		if os.path.isfile(path+"/"+x):
+		if os.path.isfile(path + "/" + x):
 			parts = x.split(".hardlock-")
 			if len(parts) == 2:
 				filename = parts[0][1:]
@@ -522,17 +520,17 @@
 				mycount += 1
 
 
-	results.append(_("Found %(count)s locks") % {"count":mycount})
+	results.append(_("Found %(count)s locks") % {"count": mycount})
 	
 	for x in mylist:
 		if myhost in mylist[x] or remove_all_locks:
-			mylockname = hardlock_name(path+"/"+x)
-			if hardlink_is_mine(mylockname, path+"/"+x) or \
-			   not os.path.exists(path+"/"+x) or \
+			mylockname = hardlock_name(path + "/" + x)
+			if hardlink_is_mine(mylockname, path + "/" + x) or \
+			   not os.path.exists(path + "/" + x) or \
 				 remove_all_locks:
 				for y in mylist[x]:
 					for z in mylist[x][y]:
-						filename = path+"/."+x+".hardlock-"+y+"-"+z
+						filename = path + "/." + x + ".hardlock-" + y + "-" + z
 						if filename == mylockname:
 							continue
 						try:
@@ -542,8 +540,8 @@
 						except OSError:
 							pass
 				try:
-					os.unlink(path+"/"+x)
-					results.append(_("Unlinked: ") + path+"/"+x)
+					os.unlink(path + "/" + x)
+					results.append(_("Unlinked: ") + path + "/" + x)
 					os.unlink(mylockname)
 					results.append(_("Unlinked: ") + mylockname)
 				except OSError:
diff --git a/pym/portage/mail.py b/pym/portage/mail.py
index 3fcadd2..e5913d7 100644
--- a/pym/portage/mail.py
+++ b/pym/portage/mail.py
@@ -21,6 +21,7 @@
 import portage
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 	def _force_ascii_if_necessary(s):
@@ -117,13 +118,13 @@
 		if "@" in mymailuri:
 			myauthdata, myconndata = mymailuri.rsplit("@", 1)
 			try:
-				mymailuser,mymailpasswd = myauthdata.split(":")
+				mymailuser, mymailpasswd = myauthdata.split(":")
 			except ValueError:
 				print(_("!!! invalid SMTP AUTH configuration, trying unauthenticated ..."))
 		else:
 			myconndata = mymailuri
 		if ":" in myconndata:
-			mymailhost,mymailport = myconndata.split(":")
+			mymailhost, mymailport = myconndata.split(":")
 		else:
 			mymailhost = myconndata
 	else:
diff --git a/pym/portage/manifest.py b/pym/portage/manifest.py
index 510e203..11dec94 100644
--- a/pym/portage/manifest.py
+++ b/pym/portage/manifest.py
@@ -33,6 +33,7 @@
 	re.UNICODE)
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	_unicode = str
 	basestring = str
 else:
diff --git a/pym/portage/output.py b/pym/portage/output.py
index fc1b042..7f362c5 100644
--- a/pym/portage/output.py
+++ b/pym/portage/output.py
@@ -24,8 +24,8 @@
 	ParseError, PermissionDenied, PortageException
 from portage.localization import _
 
-havecolor=1
-dotitles=1
+havecolor = 1
+dotitles = 1
 
 _styles = {}
 """Maps style class to tuple of attribute names."""
@@ -299,12 +299,12 @@
 
 def notitles():
 	"turn off title setting"
-	dotitles=0
+	dotitles = 0
 
 def nocolor():
 	"turn off colorization"
 	global havecolor
-	havecolor=0
+	havecolor = 0
 
 def resetColor():
 	return codes["reset"]
@@ -341,9 +341,11 @@
 	else:
 		return text
 
-compat_functions_colors = ["bold","white","teal","turquoise","darkteal",
-	"fuchsia","purple","blue","darkblue","green","darkgreen","yellow",
-	"brown","darkyellow","red","darkred"]
+compat_functions_colors = [
+	"bold", "white", "teal", "turquoise", "darkteal",
+	"fuchsia", "purple", "blue", "darkblue", "green", "darkgreen", "yellow",
+	"brown", "darkyellow", "red", "darkred",
+]
 
 class create_color_func(object):
 	__slots__ = ("_color_key",)
diff --git a/pym/portage/package/ebuild/config.py b/pym/portage/package/ebuild/config.py
index f5ef982..9e9b3fc 100644
--- a/pym/portage/package/ebuild/config.py
+++ b/pym/portage/package/ebuild/config.py
@@ -62,6 +62,7 @@
 from portage.package.ebuild._config.unpack_dependencies import load_unpack_dependencies_configuration
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 _feature_flags_cache = {}
diff --git a/pym/portage/package/ebuild/getmaskingstatus.py b/pym/portage/package/ebuild/getmaskingstatus.py
index c8954aa..7708f21 100644
--- a/pym/portage/package/ebuild/getmaskingstatus.py
+++ b/pym/portage/package/ebuild/getmaskingstatus.py
@@ -15,6 +15,7 @@
 from portage.versions import catpkgsplit, _pkg_str
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class _UnmaskHint(object):
diff --git a/pym/portage/process.py b/pym/portage/process.py
index 9ae7a55..f2e583b 100644
--- a/pym/portage/process.py
+++ b/pym/portage/process.py
@@ -33,6 +33,7 @@
 	max_fd_limit = 256
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 # Support PEP 446 for Python >=3.4
@@ -117,14 +118,14 @@
 def spawn_sandbox(mycommand, opt_name=None, **keywords):
 	if not sandbox_capable:
 		return spawn_bash(mycommand, opt_name=opt_name, **keywords)
-	args=[SANDBOX_BINARY]
+	args = [SANDBOX_BINARY]
 	if not opt_name:
 		opt_name = os.path.basename(mycommand.split()[0])
 	args.append(mycommand)
 	return spawn(args, opt_name=opt_name, **keywords)
 
 def spawn_fakeroot(mycommand, fakeroot_state=None, opt_name=None, **keywords):
-	args=[FAKEROOT_BINARY]
+	args = [FAKEROOT_BINARY]
 	if not opt_name:
 		opt_name = os.path.basename(mycommand.split()[0])
 	if fakeroot_state:
diff --git a/pym/portage/proxy/lazyimport.py b/pym/portage/proxy/lazyimport.py
index 3057c05..8dfc598 100644
--- a/pym/portage/proxy/lazyimport.py
+++ b/pym/portage/proxy/lazyimport.py
@@ -14,6 +14,7 @@
 from portage.proxy.objectproxy import ObjectProxy
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 _module_proxies = {}
diff --git a/pym/portage/repository/config.py b/pym/portage/repository/config.py
index f89f098..c931c86 100644
--- a/pym/portage/repository/config.py
+++ b/pym/portage/repository/config.py
@@ -33,6 +33,7 @@
 from portage import manifest
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 # Characters prohibited by repoman's file.name check.
diff --git a/pym/portage/tests/dep/test_match_from_list.py b/pym/portage/tests/dep/test_match_from_list.py
index 8a1c9e2..3547694 100644
--- a/pym/portage/tests/dep/test_match_from_list.py
+++ b/pym/portage/tests/dep/test_match_from_list.py
@@ -7,6 +7,7 @@
 from portage.versions import catpkgsplit, _pkg_str
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class Package(object):
diff --git a/pym/portage/tests/resolver/ResolverPlayground.py b/pym/portage/tests/resolver/ResolverPlayground.py
index fabdefe..077e271 100644
--- a/pym/portage/tests/resolver/ResolverPlayground.py
+++ b/pym/portage/tests/resolver/ResolverPlayground.py
@@ -26,6 +26,7 @@
 from _emerge.RootConfig import RootConfig
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class ResolverPlayground(object):
diff --git a/pym/portage/tests/unicode/test_string_format.py b/pym/portage/tests/unicode/test_string_format.py
index 6723883..d4443b2 100644
--- a/pym/portage/tests/unicode/test_string_format.py
+++ b/pym/portage/tests/unicode/test_string_format.py
@@ -12,6 +12,7 @@
 from _emerge.UseFlagDisplay import UseFlagDisplay
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 STR_IS_UNICODE = sys.hexversion >= 0x3000000
diff --git a/pym/portage/update.py b/pym/portage/update.py
index 92aba9a..78f764d 100644
--- a/pym/portage/update.py
+++ b/pym/portage/update.py
@@ -28,6 +28,7 @@
 from portage.localization import _
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 	_unicode = str
 else:
diff --git a/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py b/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
index 4bc64db..163f48a 100644
--- a/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
+++ b/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py
@@ -25,6 +25,7 @@
 from portage.locks import lockfile, unlockfile
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	basestring = str
 
 class PreservedLibsRegistry(object):
diff --git a/pym/portage/util/_urlopen.py b/pym/portage/util/_urlopen.py
index 15f041a..f209c26 100644
--- a/pym/portage/util/_urlopen.py
+++ b/pym/portage/util/_urlopen.py
@@ -19,6 +19,7 @@
 	from urllib import splituser as urllib_parse_splituser
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 # to account for the difference between TIMESTAMP of the index' contents
diff --git a/pym/portage/util/env_update.py b/pym/portage/util/env_update.py
index 5fddaac..143217a 100644
--- a/pym/portage/util/env_update.py
+++ b/pym/portage/util/env_update.py
@@ -24,6 +24,7 @@
 from portage.package.ebuild.config import config
 
 if sys.hexversion >= 0x3000000:
+	# pylint: disable=W0622
 	long = int
 
 def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None,
diff --git a/pym/portage/versions.py b/pym/portage/versions.py
index 615d522..2c9fe5b 100644
--- a/pym/portage/versions.py
+++ b/pym/portage/versions.py
@@ -337,11 +337,11 @@
 	except AttributeError:
 		pass
 	mysplit = mydata.split('/', 1)
-	p_split=None
-	if len(mysplit)==1:
+	p_split = None
+	if len(mysplit) == 1:
 		cat = _missing_cat
 		p_split = _pkgsplit(mydata, eapi=eapi)
-	elif len(mysplit)==2:
+	elif len(mysplit) == 2:
 		cat = mysplit[0]
 		if _cat_re.match(cat) is not None:
 			p_split = _pkgsplit(mysplit[1], eapi=eapi)
@@ -539,7 +539,7 @@
 	return cmp_sort_key(cmp_cpv)
 
 def catsplit(mydep):
-        return mydep.split("/", 1)
+	return mydep.split("/", 1)
 
 def best(mymatches, eapi=None):
 	"""Accepts None arguments; assumes matches are valid."""
diff --git a/pym/portage/xpak.py b/pym/portage/xpak.py
index 73f84ab..34ff70b 100644
--- a/pym/portage/xpak.py
+++ b/pym/portage/xpak.py
@@ -15,10 +15,12 @@
 # (integer) == encodeint(integer)  ===> 4 characters (big-endian copy)
 # '+' means concatenate the fields ===> All chunks are strings
 
-__all__ = ['addtolist', 'decodeint', 'encodeint', 'getboth',
+__all__ = [
+	'addtolist', 'decodeint', 'encodeint', 'getboth',
 	'getindex', 'getindex_mem', 'getitem', 'listindex',
 	'searchindex', 'tbz2', 'xpak_mem', 'xpak', 'xpand',
-	'xsplit', 'xsplit_mem']
+	'xsplit', 'xsplit_mem',
+]
 
 import array
 import errno