Go to:
Gentoo Home
Documentation
Forums
Lists
Bugs
Planet
Store
Wiki
Get Gentoo!
Gentoo's Bugzilla – Attachment 198279 Details for
Bug 278127
Misc portage code cleanup.
Home
|
New
–
[Ex]
|
Browse
|
Search
|
Privacy Policy
|
[?]
|
Reports
|
Requests
|
Help
|
New Account
|
Log In
[x]
|
Forgot Password
Login:
[x]
[patch]
"/"+x+":"+foo+".ebuild" -> "/" + x + ":" + foo + ".ebuild"
plus_spacing.patch (text/plain), 159.30 KB, created by
Alec Warner (RETIRED)
on 2009-07-17 10:18:16 UTC
(
hide
)
Description:
"/"+x+":"+foo+".ebuild" -> "/" + x + ":" + foo + ".ebuild"
Filename:
MIME Type:
Creator:
Alec Warner (RETIRED)
Created:
2009-07-17 10:18:16 UTC
Size:
159.30 KB
patch
obsolete
>Index: pym/repoman/utilities.py >=================================================================== >--- pym/repoman/utilities.py (revision 13832) >+++ pym/repoman/utilities.py (working copy) >@@ -74,7 +74,7 @@ > continue > if line[0] not in "UPMARD": # Updates,Patches,Modified,Added,Removed/Replaced(svn),Deleted(svn) > logging.error(red("!!! Please fix the following issues reported " + \ >- "from cvs: ")+green("(U,P,M,A,R,D are ok)")) >+ "from cvs: ") + green("(U,P,M,A,R,D are ok)")) > logging.error(red("!!! Note: This is a pretend/no-modify pass...")) > logging.error(retval[1]) > sys.exit(1) >Index: pym/portage/dbapi/bintree.py >=================================================================== >--- pym/portage/dbapi/bintree.py (revision 13832) >+++ pym/portage/dbapi/bintree.py (working copy) >@@ -59,7 +59,7 @@ > cache_me = True > mysplit = mycpv.split("/") > mylist = [] >- tbz2name = mysplit[1]+".tbz2" >+ tbz2name = mysplit[1] + ".tbz2" > if not self.bintree._remotepkgs or \ > not self.bintree.isremote(mycpv): > tbz2_path = self.bintree.getname(mycpv) >@@ -229,7 +229,7 @@ > if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)): > writemsg(_("!!! Cannot update binary: Destination exists.\n"), > noiselevel=-1) >- writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1) >+ writemsg("!!! " + mycpv + " -> " + mynewcpv + "\n", noiselevel=-1) > continue > > tbz2path = self.getname(mycpv) >@@ -244,12 +244,12 @@ > updated_items = update_dbentries([mylist], mydata) > mydata.update(updated_items) > mydata["PF"] = mynewpkg + "\n" >- mydata["CATEGORY"] = mynewcat+"\n" >+ mydata["CATEGORY"] = mynewcat + "\n" > if mynewpkg != myoldpkg: >- ebuild_data = mydata.get(myoldpkg+".ebuild") >+ ebuild_data = mydata.get(myoldpkg + ".ebuild") > if ebuild_data is not None: >- mydata[mynewpkg+".ebuild"] = ebuild_data >- del mydata[myoldpkg+".ebuild"] >+ mydata[mynewpkg + ".ebuild"] = ebuild_data >+ del mydata[myoldpkg + ".ebuild"] > mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata)) > > self.dbapi.cpv_remove(mycpv) >@@ -462,7 +462,7 @@ > # reading the xpak if possible. > if mydir != "All": > possibilities = None >- d = metadata.get(mydir+"/"+myfile[:-5]) >+ d = metadata.get(mydir + "/" + myfile[:-5]) > if d: > possibilities = [d] > else: >@@ -757,7 +757,7 @@ > del self.remotepkgs[mypkg] > continue > mycat = self.remotepkgs[mypkg]["CATEGORY"].strip() >- fullpkg = mycat+"/"+mypkg[:-5] >+ fullpkg = mycat + "/" + mypkg[:-5] > > if fullpkg in metadata: > # When using this old protocol, comparison with the remote >@@ -777,7 +777,7 @@ > mykey = dep_getkey(fullpkg) > try: > # invalid tbz2's can hurt things. >- #print "cpv_inject("+str(fullpkg)+")" >+ #print "cpv_inject(" + str(fullpkg) + ")" > self.dbapi.cpv_inject(fullpkg) > remote_metadata = self.remotepkgs[mypkg] > for k, v in remote_metadata.items(): >@@ -997,7 +997,7 @@ > if not self.populated: > self.populate() > return self.dbapi.match( >- dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings)) >+ dep_expand("=" + cpv, mydb=self.dbapi, settings=self.settings)) > > def dep_bestmatch(self, mydep): > "compatibility method -- all matches, not just visible ones" >@@ -1054,7 +1054,7 @@ > def gettbz2(self, pkgname): > """Fetches the package from a remote site, if necessary. Attempts to > resume if the file appears to be partially downloaded.""" >- print "Fetching '"+str(pkgname)+"'" >+ print "Fetching '" + str(pkgname) + "'" > tbz2_path = self.getname(pkgname) > tbz2name = os.path.basename(tbz2_path) > resume = False >@@ -1073,7 +1073,7 @@ > if self._remote_has_index: > rel_url = self._remotepkgs[pkgname].get("PATH") > if not rel_url: >- rel_url = pkgname+".tbz2" >+ rel_url = pkgname + ".tbz2" > url = self._remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/") > else: > url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name >Index: pym/portage/dbapi/vartree.py >=================================================================== >--- pym/portage/dbapi/vartree.py (revision 13832) >+++ pym/portage/dbapi/vartree.py (working copy) >@@ -110,7 +110,7 @@ > @type paths: List > """ > cp = "/".join(catpkgsplit(cpv)[:2]) >- cps = cp+":"+slot >+ cps = cp + ":" + slot > if len(paths) == 0 and cps in self._data \ > and self._data[cps][0] == cpv and int(self._data[cps][1]) == int(counter): > del self._data[cps] >@@ -866,8 +866,8 @@ > if e.errno != errno.ENOENT: > raise > del e >- write_atomic(os.path.join(newpath, "PF"), new_pf+"\n") >- write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n") >+ write_atomic(os.path.join(newpath, "PF"), new_pf + "\n") >+ write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat + "\n") > fixdbentries([mylist], newpath) > return moves > >@@ -902,7 +902,7 @@ > continue > if len(mysplit) > 1: > if ps[0] == mysplit[1]: >- returnme.append(mysplit[0]+"/"+x) >+ returnme.append(mysplit[0] + "/" + x) > self._cpv_sort_ascending(returnme) > if use_cache: > self.cpcache[mycp] = [mystat, returnme[:]] >@@ -968,7 +968,7 @@ > if not mysplit: > self.invalidentry(self.getpath(y)) > continue >- d[mysplit[0]+"/"+mysplit[1]] = None >+ d[mysplit[0] + "/" + mysplit[1]] = None > return d.keys() > > def checkblockers(self, origdep): >@@ -1009,7 +1009,7 @@ > return list(self._iter_match(mydep, > self.cp_list(mydep.cp, use_cache=use_cache))) > try: >- curmtime = os.stat(self.root+VDB_PATH+"/"+mycat).st_mtime >+ curmtime = os.stat(self.root + VDB_PATH + "/" + mycat).st_mtime > except (IOError, OSError): > curmtime=0 > >@@ -1025,7 +1025,7 @@ > return self.matchcache[mycat][mydep][:] > > def findname(self, mycpv): >- return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild") >+ return self.getpath(str(mycpv), filename=catsplit(mycpv)[1] + ".ebuild") > > def flush_cache(self): > """If the current user has permission and the internal aux_get cache has >@@ -1681,7 +1681,7 @@ > > def getebuildpath(self, fullpackage): > cat, package = catsplit(fullpackage) >- return self.getpath(fullpackage, filename=package+".ebuild") >+ return self.getpath(fullpackage, filename=package + ".ebuild") > > def getnode(self, mykey, use_cache=1): > mykey = key_expand(mykey, mydb=self.dbapi, use_cache=use_cache, >@@ -1697,7 +1697,7 @@ > self.dbapi.invalidentry(self.getpath(mysplit[0], filename=x)) > continue > if mypsplit[0] == mysplit[1]: >- appendme = [mysplit[0]+"/"+x, [mysplit[0], mypsplit[0], mypsplit[1], mypsplit[2]]] >+ appendme = [mysplit[0] + "/" + x, [mysplit[0], mypsplit[0], mypsplit[1], mypsplit[2]]] > returnme.append(appendme) > return returnme > >@@ -1781,9 +1781,9 @@ > self._scheduler = scheduler > > self.dbroot = normalize_path(os.path.join(myroot, VDB_PATH)) >- self.dbcatdir = self.dbroot+"/"+cat >- self.dbpkgdir = self.dbcatdir+"/"+pkg >- self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg >+ self.dbcatdir = self.dbroot + "/" + cat >+ self.dbpkgdir = self.dbcatdir + "/" + pkg >+ self.dbtmpdir = self.dbcatdir + "/-MERGING-" + pkg > self.dbdir = self.dbpkgdir > > self._lock_vdb = None >@@ -1844,8 +1844,8 @@ > """ > For a given db entry (self), erase the CONTENTS values. > """ >- if os.path.exists(self.dbdir+"/CONTENTS"): >- os.unlink(self.dbdir+"/CONTENTS") >+ if os.path.exists(self.dbdir + "/CONTENTS"): >+ os.unlink(self.dbdir + "/CONTENTS") > > def _clear_contents_cache(self): > self.contentscache = None >@@ -2012,7 +2012,7 @@ > # Clean up after vardbapi.move_ent() breakage in > # portage versions before 2.1.2 > os.rename(os.path.join(self.dbdir, x), myebuildpath) >- write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n") >+ write_atomic(os.path.join(self.dbdir, "PF"), self.pkg + "\n") > break > > self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi) >@@ -2922,7 +2922,7 @@ > else: > raise > if f[0] != "/": >- f="/"+f >+ f="/" + f > > plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino)) > if plibs: >@@ -3392,13 +3392,13 @@ > > # XXX: Decide how to handle failures here. > if a != os.EX_OK: >- showMessage(_("!!! FAILED preinst: ")+str(a)+"\n", >+ showMessage(_("!!! FAILED preinst: ") + str(a) + "\n", > level=logging.ERROR, noiselevel=-1) > return a > > # copy "info" files (like SLOT, CFLAGS, etc.) into the database > for x in os.listdir(inforoot): >- self.copyfile(inforoot+"/"+x) >+ self.copyfile(inforoot + "/" + x) > > # write local package counter for recording > counter = self.vartree.dbapi.counter_tick(self.myroot, mycpv=self.mycpv) >@@ -3610,7 +3610,7 @@ > > # XXX: Decide how to handle failures here. > if a != os.EX_OK: >- showMessage(_("!!! FAILED postinst: ")+str(a)+"\n", >+ showMessage(_("!!! FAILED postinst: ") + str(a) + "\n", > level=logging.ERROR, noiselevel=-1) > return a > >@@ -3756,7 +3756,7 @@ > mymtime = movefile(mysrc, mydest, newmtime=thismtime, sstat=mystat, mysettings=self.settings) > if mymtime != None: > showMessage(">>> %s -> %s\n" % (mydest, myto)) >- outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n") >+ outfile.write("sym " + myrealdest + " -> " + myto + " " + str(mymtime) + "\n") > else: > showMessage(_("!!! Failed to move file.\n"), > level=logging.ERROR, noiselevel=-1) >@@ -3779,7 +3779,7 @@ > writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1) > writemsg(_("!!! Please check permissions and directories for broken symlinks.\n")) > writemsg(_("!!! You may start the merge process again by using ebuild:\n")) >- writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n") >+ writemsg("!!! ebuild " + self.settings["PORTDIR"] + "/" + self.cat + "/" + pkgstuff[0] + "/" + self.pkg + ".ebuild merge\n") > writemsg(_("!!! And finish by running this: env-update\n\n")) > return 1 > >@@ -3791,7 +3791,7 @@ > bsd_chflags.lchflags(mydest, dflags) > else: > # a non-directory and non-symlink-to-directory. Won't work for us. Move out of the way. >- if movefile(mydest, mydest+".backup", mysettings=self.settings) is None: >+ if movefile(mydest, mydest + ".backup", mysettings=self.settings) is None: > return 1 > showMessage(_("bak %s %s.backup\n") % (mydest, mydest), > level=logging.ERROR, noiselevel=-1) >@@ -3818,7 +3818,7 @@ > os.chmod(mydest, mystat[0]) > os.chown(mydest, mystat[4], mystat[5]) > showMessage(">>> %s/\n" % mydest) >- outfile.write("dir "+myrealdest+"\n") >+ outfile.write("dir " + myrealdest + "\n") > # recurse and merge this directory > if self.mergeme(srcroot, destroot, outfile, secondhand, > join(offset, x), cfgfiledict, thismtime): >@@ -3907,7 +3907,7 @@ > zing = ">>>" > > if mymtime != None: >- outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n") >+ outfile.write("obj " + myrealdest + " " + mymd5 + " " + str(mymtime) + "\n") > showMessage("%s %s\n" % (zing,mydest)) > else: > # we are merging a fifo or device node >@@ -3998,20 +3998,20 @@ > > def getstring(self,name): > "returns contents of a file with whitespace converted to spaces" >- if not os.path.exists(self.dbdir+"/"+name): >+ if not os.path.exists(self.dbdir + "/" + name): > return "" >- myfile = open(self.dbdir+"/"+name,"r") >+ myfile = open(self.dbdir + "/" + name,"r") > mydata = myfile.read().split() > myfile.close() > return " ".join(mydata) > > def copyfile(self,fname): >- shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname)) >+ shutil.copyfile(fname,self.dbdir + "/" + os.path.basename(fname)) > > def getfile(self,fname): >- if not os.path.exists(self.dbdir+"/"+fname): >+ if not os.path.exists(self.dbdir + "/" + fname): > return "" >- myfile = open(self.dbdir+"/"+fname,"r") >+ myfile = open(self.dbdir + "/" + fname,"r") > mydata = myfile.read() > myfile.close() > return mydata >@@ -4023,9 +4023,9 @@ > write_atomic(os.path.join(self.dbdir, fname), data, mode=mode) > > def getelements(self,ename): >- if not os.path.exists(self.dbdir+"/"+ename): >+ if not os.path.exists(self.dbdir + "/" + ename): > return [] >- myelement = open(self.dbdir+"/"+ename,"r") >+ myelement = open(self.dbdir + "/" + ename,"r") > mylines = myelement.readlines() > myreturn = [] > for x in mylines: >@@ -4035,9 +4035,9 @@ > return myreturn > > def setelements(self,mylist,ename): >- myelement = open(self.dbdir+"/"+ename,"w") >+ myelement = open(self.dbdir + "/" + ename,"w") > for x in mylist: >- myelement.write(x+"\n") >+ myelement.write(x + "\n") > myelement.close() > > def isregular(self): >Index: pym/portage/dbapi/__init__.py >=================================================================== >--- pym/portage/dbapi/__init__.py (revision 13832) >+++ pym/portage/dbapi/__init__.py (working copy) >@@ -198,7 +198,7 @@ > pass > elif '/-MERGING-' in mypath: > if os.path.exists(mypath): >- writemsg(colorize("BAD", _("INCOMPLETE MERGE:"))+" %s\n" % mypath, >+ writemsg(colorize("BAD", _("INCOMPLETE MERGE:")) + " %s\n" % mypath, > noiselevel=-1) > else: > writemsg("!!! Invalid db entry: %s\n" % mypath, noiselevel=-1) >@@ -233,9 +233,9 @@ > if metadata_updates: > aux_update(cpv, metadata_updates) > if onUpdate: >- onUpdate(maxval, i+1) >+ onUpdate(maxval, i + 1) > if onProgress: >- onProgress(maxval, i+1) >+ onProgress(maxval, i + 1) > > def move_slot_ent(self, mylist): > """This function takes a sequence: >@@ -256,6 +256,6 @@ > if slot != origslot: > continue > moves += 1 >- mydata = {"SLOT": newslot+"\n"} >+ mydata = {"SLOT": newslot + "\n"} > self.aux_update(mycpv, mydata) > return moves >Index: pym/portage/dbapi/porttree.py >=================================================================== >--- pym/portage/dbapi/porttree.py (revision 13832) >+++ pym/portage/dbapi/porttree.py (working copy) >@@ -591,7 +591,7 @@ > mydata, st, emtime = self._pull_valid_cache(mycpv, myebuild, mylocation) > doregen = mydata is None > >- writemsg(_("auxdb is valid: ")+str(not doregen)+" "+str(pkg)+"\n", 2) >+ writemsg(_("auxdb is valid: ") + str(not doregen) + " " + str(pkg) + "\n", 2) > > if doregen: > if myebuild in self._broken_ebuilds: >@@ -599,7 +599,7 @@ > if not self._have_root_eclass_dir: > raise KeyError(mycpv) > writemsg("doregen: %s %s\n" % (doregen, mycpv), 2) >- writemsg(_("Generating cache entry(0) for: ")+str(myebuild)+"\n", 1) >+ writemsg(_("Generating cache entry(0) for: ") + str(myebuild) + "\n", 1) > > self.doebuild_settings.setcpv(mycpv) > mydata = {} >@@ -690,7 +690,7 @@ > # Convert this to an InvalidDependString exception since callers > # already handle it. > raise portage.exception.InvalidDependString( >- "getFetchMap(): aux_get() error reading "+mypkg+"; aborting.") >+ "getFetchMap(): aux_get() error reading " + mypkg + "; aborting.") > > if not eapi_is_supported(eapi): > # Convert this to an InvalidDependString exception >@@ -762,7 +762,7 @@ > checksums = mf.getDigests() > if not checksums: > if debug: >- print "[empty/missing/bad digest]: "+mypkg >+ print "[empty/missing/bad digest]: " + mypkg > return None > filesdict={} > myfiles = self.getFetchMap(mypkg, useflags=useflags) >@@ -839,11 +839,11 @@ > d = {} > for x in self.mysettings.categories: > for oroot in self.porttrees: >- for y in listdir(oroot+"/"+x, EmptyOnError=1, ignorecvs=1, dirsonly=1): >+ for y in listdir(oroot + "/" + x, EmptyOnError=1, ignorecvs=1, dirsonly=1): > if not self._pkg_dir_name_re.match(y) or \ > y == "CVS": > continue >- d[x+"/"+y] = None >+ d[x + "/" + y] = None > l = d.keys() > l.sort() > return l >@@ -895,7 +895,7 @@ > writemsg(_("\nInvalid ebuild version: %s\n") % \ > os.path.join(oroot, mycp, x), noiselevel=-1) > continue >- d[mysplit[0]+"/"+pf] = None >+ d[mysplit[0] + "/" + pf] = None > if invalid_category and d: > writemsg(_("\n!!! '%s' has a category that is not listed in " \ > "%setc/portage/categories\n") % \ >@@ -1178,13 +1178,13 @@ > return "" > mysplit = pkgname.split("/") > psplit = pkgsplit(mysplit[1]) >- return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]])+".ebuild" >+ return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]]) + ".ebuild" > > def resolve_specific(self, myspec): > cps = catpkgsplit(myspec) > if not cps: > return None >- mykey = key_expand(cps[0]+"/"+cps[1], mydb=self.dbapi, >+ mykey = key_expand(cps[0] + "/" + cps[1], mydb=self.dbapi, > settings=self.settings) > mykey = mykey + "-" + cps[2] > if cps[3] != "r0": >Index: pym/portage/proxy/lazyimport.py >=================================================================== >--- pym/portage/proxy/lazyimport.py (revision 13832) >+++ pym/portage/proxy/lazyimport.py (working copy) >@@ -137,7 +137,7 @@ > for i in xrange(len(components)): > alias = components[i] > if i < len(components) - 1: >- parent_name = ".".join(components[:i+1]) >+ parent_name = ".".join(components[:i + 1]) > __import__(parent_name) > mod = modules.get(parent_name) > if not isinstance(mod, types.ModuleType): >Index: pym/portage/__init__.py >=================================================================== >--- pym/portage/__init__.py (revision 13832) >+++ pym/portage/__init__.py (working copy) >@@ -43,7 +43,7 @@ > > sys.stderr.write("!!! You might consider starting python with verbose flags to see what has\n") > sys.stderr.write("!!! gone wrong. Here is the information we got for this exception:\n") >- sys.stderr.write(" "+str(e)+"\n\n"); >+ sys.stderr.write(" " + str(e) + "\n\n"); > raise > > try: >@@ -107,7 +107,7 @@ > sys.stderr.write("!!! portage tree under '/usr/portage/sys-apps/portage/files/' (default).\n") > sys.stderr.write("!!! There is a README.RESCUE file that details the steps required to perform\n") > sys.stderr.write("!!! a recovery of portage.\n") >- sys.stderr.write(" "+str(e)+"\n\n") >+ sys.stderr.write(" " + str(e) + "\n\n") > raise > > >@@ -200,7 +200,7 @@ > mylink=os.readlink(symlink) > if mylink[0] != '/': > mydir=os.path.dirname(symlink) >- mylink=mydir+"/"+mylink >+ mylink=mydir + "/" + mylink > return os.path.normpath(mylink) > > dircache = {} >@@ -248,9 +248,9 @@ > for x in list: > try: > if followSymlinks: >- pathstat = os.stat(mypath+"/"+x) >+ pathstat = os.stat(mypath + "/" + x) > else: >- pathstat = os.lstat(mypath+"/"+x) >+ pathstat = os.lstat(mypath + "/" + x) > > if stat.S_ISREG(pathstat[stat.ST_MODE]): > ftype.append(0) >@@ -322,20 +322,20 @@ > while x<len(ftype): > if ftype[x] == 1 and not \ > (ignorecvs and os.path.basename(list[x]) in _ignorecvs_dirs): >- l,f = cacheddir(mypath+"/"+list[x], ignorecvs, ignorelist, EmptyOnError, >+ l,f = cacheddir(mypath + "/" + list[x], ignorecvs, ignorelist, EmptyOnError, > followSymlinks) > > l=l[:] > for y in range(0,len(l)): >- l[y]=list[x]+"/"+l[y] >- list=list+l >- ftype=ftype+f >+ l[y]=list[x] + "/" + l[y] >+ list=list + l >+ ftype=ftype + f > x+=1 > if filesonly: > rlist=[] > for x in range(0,len(ftype)): > if ftype[x]==0: >- rlist=rlist+[list[x]] >+ rlist=rlist + [list[x]] > elif dirsonly: > rlist = [] > for x in range(0, len(ftype)): >@@ -747,7 +747,7 @@ > myfd.write("# ld.so.conf autogenerated by env-update; make all changes to\n") > myfd.write("# contents of /etc/env.d directory\n") > for x in specials["LDPATH"]: >- myfd.write(x+"\n") >+ myfd.write(x + "\n") > myfd.close() > ld_cache_update=True > >@@ -759,25 +759,25 @@ > newprelink.write("# contents of /etc/env.d directory\n") > > for x in ["/bin","/sbin","/usr/bin","/usr/sbin","/lib","/usr/lib"]: >- newprelink.write("-l "+x+"\n"); >- for x in specials["LDPATH"]+specials["PATH"]+specials["PRELINK_PATH"]: >+ newprelink.write("-l " + x + "\n"); >+ for x in specials["LDPATH"] + specials["PATH"] + specials["PRELINK_PATH"]: > if not x: > continue > if x[-1]!='/': >- x=x+"/" >+ x=x + "/" > plmasked=0 > for y in specials["PRELINK_PATH_MASK"]: > if not y: > continue > if y[-1]!='/': >- y=y+"/" >+ y=y + "/" > if y==x[0:len(y)]: > plmasked=1 > break > if not plmasked: >- newprelink.write("-h "+x+"\n") >+ newprelink.write("-h " + x + "\n") > for x in specials["PRELINK_PATH_MASK"]: >- newprelink.write("-b "+x+"\n") >+ newprelink.write("-b " + x + "\n") > newprelink.close() > > # Portage stores mtimes with 1 second granularity but in >=python-2.5 finer >@@ -788,7 +788,7 @@ > current_time = long(time.time()) > mtime_changed = False > lib_dirs = set() >- for lib_dir in portage.util.unique_array(specials["LDPATH"]+['usr/lib','usr/lib64','usr/lib32','lib','lib64','lib32']): >+ for lib_dir in portage.util.unique_array(specials["LDPATH"] + ['usr/lib','usr/lib64','usr/lib32','lib','lib64','lib32']): > x = os.path.join(target_root, lib_dir.lstrip(os.sep)) > try: > newldpathtime = long(os.stat(x).st_mtime) >@@ -950,7 +950,7 @@ > version += "".join( " ".join( grabfile( base_dir+ "/" + lv ) ).split() ) > > # Check the .config for a CONFIG_LOCALVERSION and append that too, also stripping whitespace >- kernelconfig = getconfig(base_dir+"/.config") >+ kernelconfig = getconfig(base_dir + "/.config") > if kernelconfig and "CONFIG_LOCALVERSION" in kernelconfig: > version += "".join(kernelconfig["CONFIG_LOCALVERSION"].split()) > >@@ -983,7 +983,7 @@ > dep_met = False > break > if dep_met: >- myusevars += " "+myuse >+ myusevars += " " + myuse > return myusevars > > def check_config_instance(test): >@@ -1655,7 +1655,7 @@ > groups = [] > for keyword in groups: > if not keyword[0] in "~-": >- mykeywordlist.append("~"+keyword) >+ mykeywordlist.append("~" + keyword) > pkgdict[key] = mykeywordlist > cp = dep_getkey(key) > if cp not in self.pkeywordsdict: >@@ -1754,7 +1754,7 @@ > continue > cpvr = catpkgsplit(pkgprovidedlines[x]) > if not cpvr or cpvr[0] == "null": >- writemsg("Invalid package name in package.provided: "+pkgprovidedlines[x]+"\n", >+ writemsg("Invalid package name in package.provided: " + pkgprovidedlines[x] + "\n", > noiselevel=-1) > has_invalid_data = True > del pkgprovidedlines[x] >@@ -1923,7 +1923,7 @@ > if group_name in traversed_groups: > writemsg(("Circular license group reference" + \ > " detected in '%s'\n") % group_name, noiselevel=-1) >- rValue.append("@"+group_name) >+ rValue.append("@" + group_name) > elif license_group: > traversed_groups.add(group_name) > for l in license_group: >@@ -1936,7 +1936,7 @@ > else: > writemsg("Undefined license group '%s'\n" % group_name, > noiselevel=-1) >- rValue.append("@"+group_name) >+ rValue.append("@" + group_name) > if negate: > rValue = ["-" + token for token in rValue] > return rValue >@@ -2397,7 +2397,7 @@ > if len(x_split) == 1: > continue > for i in xrange(len(x_split) - 1): >- k = '_'.join(x_split[:i+1]) >+ k = '_'.join(x_split[:i + 1]) > if k in use_expand_split: > v = use_expand_iuses.get(k) > if v is None: >@@ -3088,7 +3088,7 @@ > 3. profile only > """ > >- # Virtuals by profile+tree preferences. >+ # Virtuals by profile + tree preferences. > ptVirtuals = {} > > for virt, installed_list in self.treeVirtuals.iteritems(): >@@ -3218,7 +3218,7 @@ > continue > mydict[x] = myvalue > if "HOME" not in mydict and "BUILD_PREFIX" in mydict: >- writemsg("*** HOME not set. Setting to "+mydict["BUILD_PREFIX"]+"\n") >+ writemsg("*** HOME not set. Setting to " + mydict["BUILD_PREFIX"] + "\n") > mydict["HOME"]=mydict["BUILD_PREFIX"][:] > > if filter_calling_env: >@@ -3870,12 +3870,12 @@ > if myfile not in filedict: > filedict[myfile]=[] > for y in range(0,len(locations)): >- filedict[myfile].append(locations[y]+"/distfiles/"+myfile) >+ filedict[myfile].append(locations[y] + "/distfiles/" + myfile) > if myuri[:9]=="mirror://": > eidx = myuri.find("/", 9) > if eidx != -1: > mirrorname = myuri[9:eidx] >- path = myuri[eidx+1:] >+ path = myuri[eidx + 1:] > > # Try user-defined mirrors first > if mirrorname in custommirrors: >@@ -4274,7 +4274,7 @@ > continue > tried_locations.add(loc) > if listonly: >- writemsg_stdout(loc+" ", noiselevel=-1) >+ writemsg_stdout(loc + " ", noiselevel=-1) > continue > # allow different fetchcommands per protocol > protocol = loc[0:loc.find("://")] >@@ -4463,9 +4463,9 @@ > # Fetch failed... Try the next one... Kill 404 files though. > if (mystat[stat.ST_SIZE]<100000) and (len(myfile)>4) and not ((myfile[-5:]==".html") or (myfile[-4:]==".htm")): > html404=re.compile("<title>.*(not found|404).*</title>",re.I|re.M) >- if html404.search(open(mysettings["DISTDIR"]+"/"+myfile).read()): >+ if html404.search(open(mysettings["DISTDIR"] + "/" + myfile).read()): > try: >- os.unlink(mysettings["DISTDIR"]+"/"+myfile) >+ os.unlink(mysettings["DISTDIR"] + "/" + myfile) > writemsg(">>> Deleting invalid distfile. (Improper 404 redirect from server.)\n") > fetched = 0 > continue >@@ -4478,12 +4478,12 @@ > # file NOW, for those users who don't have a stable/continuous > # net connection. This way we have a chance to try to download > # from another mirror... >- verified_ok,reason = portage.checksum.verify_all(mysettings["DISTDIR"]+"/"+myfile, mydigests[myfile]) >+ verified_ok,reason = portage.checksum.verify_all(mysettings["DISTDIR"] + "/" + myfile, mydigests[myfile]) > if not verified_ok: > print reason >- writemsg("!!! Fetched file: "+str(myfile)+" VERIFY FAILED!\n", >+ writemsg("!!! Fetched file: " + str(myfile) + " VERIFY FAILED!\n", > noiselevel=-1) >- writemsg("!!! Reason: "+reason[0]+"\n", >+ writemsg("!!! Reason: " + reason[0] + "\n", > noiselevel=-1) > writemsg("!!! Got: %s\n!!! Expected: %s\n" % \ > (reason[1], reason[2]), noiselevel=-1) >@@ -5310,7 +5310,7 @@ > else: > mypv = os.path.basename(ebuild_path)[:-7] > >- mycpv = cat+"/"+mypv >+ mycpv = cat + "/" + mypv > mysplit=pkgsplit(mypv,silent=0) > if mysplit is None: > raise portage.exception.IncorrectParameter( >@@ -5353,7 +5353,7 @@ > mysettings["EBUILD"] = ebuild_path > mysettings["O"] = pkg_dir > mysettings.configdict["pkg"]["CATEGORY"] = cat >- mysettings["FILESDIR"] = pkg_dir+"/files" >+ mysettings["FILESDIR"] = pkg_dir + "/files" > mysettings["PF"] = mypv > > if hasattr(mydbapi, '_repo_info'): >@@ -5366,11 +5366,11 @@ > mysettings["DISTDIR"] = os.path.realpath(mysettings["DISTDIR"]) > mysettings["RPMDIR"] = os.path.realpath(mysettings["RPMDIR"]) > >- mysettings["ECLASSDIR"] = mysettings["PORTDIR"]+"/eclass" >+ mysettings["ECLASSDIR"] = mysettings["PORTDIR"] + "/eclass" > mysettings["SANDBOX_LOG"] = mycpv.replace("/", "_-_") > > mysettings["PROFILE_PATHS"] = "\n".join(mysettings.profiles) >- mysettings["P"] = mysplit[0]+"-"+mysplit[1] >+ mysettings["P"] = mysplit[0] + "-" + mysplit[1] > mysettings["PN"] = mysplit[0] > mysettings["PV"] = mysplit[1] > mysettings["PR"] = mysplit[2] >@@ -5404,7 +5404,7 @@ > if mysplit[2] == "r0": > mysettings["PVR"]=mysplit[1] > else: >- mysettings["PVR"]=mysplit[1]+"-"+mysplit[2] >+ mysettings["PVR"]=mysplit[1] + "-" + mysplit[2] > > if "PATH" in mysettings: > mysplit=mysettings["PATH"].split(":") >@@ -5419,8 +5419,8 @@ > # Sandbox needs cannonical paths. > mysettings["PORTAGE_TMPDIR"] = os.path.realpath( > mysettings["PORTAGE_TMPDIR"]) >- mysettings["BUILD_PREFIX"] = mysettings["PORTAGE_TMPDIR"]+"/portage" >- mysettings["PKG_TMPDIR"] = mysettings["PORTAGE_TMPDIR"]+"/binpkgs" >+ mysettings["BUILD_PREFIX"] = mysettings["PORTAGE_TMPDIR"] + "/portage" >+ mysettings["PKG_TMPDIR"] = mysettings["PORTAGE_TMPDIR"] + "/binpkgs" > > # Package {pre,post}inst and {pre,post}rm may overlap, so they must have separate > # locations in order to prevent interference. >@@ -6614,7 +6614,7 @@ > """moves a file from src to dest, preserving all permissions and attributes; mtime will > be preserved even when moving across filesystems. Returns true on success and false on > failure. Move is atomic.""" >- #print "movefile("+str(src)+","+str(dest)+","+str(newmtime)+","+str(sstat)+")" >+ #print "movefile(" + str(src) + "," + str(dest) + "," + str(newmtime) + "," + str(sstat) + ")" > global lchown > if mysettings is None: > global settings >@@ -6740,11 +6740,11 @@ > if stat.S_ISREG(sstat[stat.ST_MODE]): > try: # For safety copy then move it over. > if selinux_enabled: >- selinux.secure_copy(src,dest+"#new") >- selinux.secure_rename(dest+"#new",dest) >+ selinux.secure_copy(src,dest + "#new") >+ selinux.secure_rename(dest + "#new",dest) > else: >- shutil.copyfile(src,dest+"#new") >- os.rename(dest+"#new",dest) >+ shutil.copyfile(src,dest + "#new") >+ os.rename(dest + "#new",dest) > didcopy=1 > except SystemExit, e: > raise >@@ -6755,12 +6755,12 @@ > else: > #we don't yet handle special, so we need to fall back to /bin/mv > if selinux_enabled: >- a=commands.getstatusoutput(MOVE_BINARY+" -c -f "+"'"+src+"' '"+dest+"'") >+ a=commands.getstatusoutput(MOVE_BINARY + " -c -f " + "'" + src + "' '" + dest + "'") > else: >- a=commands.getstatusoutput(MOVE_BINARY+" -f "+"'"+src+"' '"+dest+"'") >+ a=commands.getstatusoutput(MOVE_BINARY + " -f " + "'" + src + "' '" + dest + "'") > if a[0]!=0: > print "!!! Failed to move special file:" >- print "!!! '"+src+"' to '"+dest+"'" >+ print "!!! '" + src + "' to '" + dest + "'" > print "!!!",a > return None # failure > try: >@@ -7003,7 +7003,7 @@ > "%s: %s '%s'" % (y[0], mycheck[1], depstring)) > > # pull in the new-style virtual >- mycheck[1].append(portage.dep.Atom("="+y[0])) >+ mycheck[1].append(portage.dep.Atom("=" + y[0])) > a.append(mycheck[1]) > # Plain old-style virtuals. New-style virtuals are preferred. > if not pkgs: >@@ -7233,7 +7233,7 @@ > mydep = dep_getcpv(orig_dep) > myindex = orig_dep.index(mydep) > prefix = orig_dep[:myindex] >- postfix = orig_dep[myindex+len(mydep):] >+ postfix = orig_dep[myindex + len(mydep):] > expanded = cpv_expand(mydep, mydb=mydb, > use_cache=use_cache, settings=settings) > try: >@@ -7392,7 +7392,7 @@ > return None > mylen=len(myslash) > if mylen==2: >- return myslash[0]+"/"+mysplit[0] >+ return myslash[0] + "/" + mysplit[0] > elif mylen==1: > return mysplit[0] > else: >@@ -7407,11 +7407,11 @@ > if len(mysplit)==1: > if hasattr(mydb, "cp_list"): > for x in mydb.categories: >- if mydb.cp_list(x+"/"+mykey,use_cache=use_cache): >- return x+"/"+mykey >+ if mydb.cp_list(x + "/" + mykey,use_cache=use_cache): >+ return x + "/" + mykey > if mykey in virts_p: > return(virts_p[mykey][0]) >- return "null/"+mykey >+ return "null/" + mykey > elif mydb: > if hasattr(mydb, "cp_list"): > if not mydb.cp_list(mykey, use_cache=use_cache) and \ >@@ -7436,7 +7436,7 @@ > mykey=mycpv > elif len(myslash)==2: > if mysplit: >- mykey=myslash[0]+"/"+mysplit[0] >+ mykey=myslash[0] + "/" + mysplit[0] > else: > mykey=mycpv > if mydb and virts and mykey in virts: >@@ -7469,8 +7469,8 @@ > matches=[] > if mydb and hasattr(mydb, "categories"): > for x in mydb.categories: >- if mydb.cp_list(x+"/"+myp,use_cache=use_cache): >- matches.append(x+"/"+myp) >+ if mydb.cp_list(x + "/" + myp,use_cache=use_cache): >+ matches.append(x + "/" + myp) > if len(matches) > 1: > virtual_name_collision = False > if len(matches) == 2: >@@ -7496,12 +7496,12 @@ > mykey=virts_p[myp][0] > #again, we only perform virtual expansion if we have a dbapi (not a list) > if not mykey: >- mykey="null/"+myp >+ mykey="null/" + myp > if mysplit: > if mysplit[2]=="r0": >- return mykey+"-"+mysplit[1] >+ return mykey + "-" + mysplit[1] > else: >- return mykey+"-"+mysplit[1]+"-"+mysplit[2] >+ return mykey + "-" + mysplit[1] + "-" + mysplit[2] > else: > return mykey > >@@ -7526,7 +7526,7 @@ > cpv_slot_list = [mycpv] > else: > cpv_slot_list = ["%s:%s" % (mycpv, metadata["SLOT"])] >- mycp=mysplit[0]+"/"+mysplit[1] >+ mycp=mysplit[0] + "/" + mysplit[1] > > # XXX- This is a temporary duplicate of code from the config constructor. > locations = [os.path.join(settings["PORTDIR"], "profiles")] >@@ -7554,7 +7554,7 @@ > comment = "" > comment_valid = -1 > elif l[0] == "#": >- comment += (l+"\n") >+ comment += (l + "\n") > comment_valid = i + 1 > elif l == x: > if comment_valid != i: >@@ -7603,7 +7603,7 @@ > metadata["USE"] = settings["PORTAGE_USE"] > else: > metadata["USE"] = "" >- mycp=mysplit[0]+"/"+mysplit[1] >+ mycp=mysplit[0] + "/" + mysplit[1] > > rValue = [] > >@@ -7674,11 +7674,11 @@ > if gp=="*": > kmask=None > break >- elif gp=="-"+myarch and myarch in pgroups: >- kmask="-"+myarch >+ elif gp=="-" + myarch and myarch in pgroups: >+ kmask="-" + myarch > break >- elif gp=="~"+myarch and myarch in pgroups: >- kmask="~"+myarch >+ elif gp=="~" + myarch and myarch in pgroups: >+ kmask="~" + myarch > break > > try: >@@ -7693,12 +7693,12 @@ > msg.append("license(s)") > rValue.append(" ".join(msg)) > except portage.exception.InvalidDependString, e: >- rValue.append("LICENSE: "+str(e)) >+ rValue.append("LICENSE: " + str(e)) > > # Only show KEYWORDS masks for installed packages > # if they're not masked for any other reason. > if kmask and (not installed or not rValue): >- rValue.append(kmask+" keyword") >+ rValue.append(kmask + " keyword") > > return rValue > >@@ -7810,7 +7810,7 @@ > mysettings.setcpv(mycat + "/" + mypkg, mydb=mydbapi) > # Store the md5sum in the vdb. > fp = open(os.path.join(infloc, "BINPKGMD5"), "w") >- fp.write(str(portage.checksum.perform_md5(mytbz2))+"\n") >+ fp.write(str(portage.checksum.perform_md5(mytbz2)) + "\n") > fp.close() > > # This gives bashrc users an opportunity to do various things >@@ -8093,7 +8093,7 @@ > for mykey, mystat, mycontent in update_data: > writemsg_stdout("\n\n") > writemsg_stdout(colorize("GOOD", >- "Performing Global Updates: ")+bold(mykey)+"\n") >+ "Performing Global Updates: ") + bold(mykey) + "\n") > writemsg_stdout("(Could take a couple of minutes if you have a lot of binary packages.)\n") > writemsg_stdout(" " + bold(".") + "='update pass' " + \ > bold("*") + "='binary update' " + bold("#") + \ >@@ -8190,7 +8190,7 @@ > if do_upgrade_packagesmessage and bindb and \ > bindb.cpv_all(): > writemsg_stdout(" ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the") >- writemsg_stdout("\n tbz2's in the packages directory. "+bold("Note: This can take a very long time.")) >+ writemsg_stdout("\n tbz2's in the packages directory. " + bold("Note: This can take a very long time.")) > writemsg_stdout("\n") > if myupd: > return myupd >Index: pym/portage/checksum.py >=================================================================== >--- pym/portage/checksum.py (revision 13832) >+++ pym/portage/checksum.py (working copy) >@@ -109,7 +109,7 @@ > > prelink_capable = False > if os.path.exists(PRELINK_BINARY): >- results = commands.getstatusoutput(PRELINK_BINARY+" --version > /dev/null 2>&1") >+ results = commands.getstatusoutput(PRELINK_BINARY + " --version > /dev/null 2>&1") > if (results[0] >> 8) == 0: > prelink_capable=1 > del results >@@ -262,6 +262,6 @@ > rVal = {} > for x in hashes: > if x not in hashfunc_map: >- raise portage.exception.DigestException, x+" hash function not available (needs dev-python/pycrypto or >=dev-lang/python-2.5)" >+ raise portage.exception.DigestException, x + " hash function not available (needs dev-python/pycrypto or >=dev-lang/python-2.5)" > rVal[x] = perform_checksum(filename, x, calc_prelink)[0] > return rVal >Index: pym/portage/manifest.py >=================================================================== >--- pym/portage/manifest.py (revision 13832) >+++ pym/portage/manifest.py (working copy) >@@ -257,7 +257,7 @@ > """ Add entry to Manifest optionally using hashdict to avoid recalculation of hashes """ > if ftype == "AUX" and not fname.startswith("files/"): > fname = os.path.join("files", fname) >- if not os.path.exists(self.pkgdir+fname) and not ignoreMissing: >+ if not os.path.exists(self.pkgdir + fname) and not ignoreMissing: > raise FileNotFound(fname) > if not ftype in portage.const.MANIFEST2_IDENTIFIERS: > raise InvalidDataType(ftype) >@@ -332,7 +332,7 @@ > mytype = "MISC" > else: > continue >- self.fhashdict[mytype][f] = perform_multiple_checksums(self.pkgdir+f, self.hashes) >+ self.fhashdict[mytype][f] = perform_multiple_checksums(self.pkgdir + f, self.hashes) > recursive_files = [] > cut_len = len(os.path.join(self.pkgdir, "files") + os.sep) > for parentdir, dirs, files in os.walk(os.path.join(self.pkgdir, "files")): >@@ -405,7 +405,7 @@ > try: > ok,reason = verify_all(self._getAbsname(ftype, fname), self.fhashdict[ftype][fname]) > if not ok: >- raise DigestException(tuple([self._getAbsname(ftype, fname)]+list(reason))) >+ raise DigestException(tuple([self._getAbsname(ftype, fname)] + list(reason))) > return ok, reason > except FileNotFound, e: > if not ignoreMissing: >Index: pym/portage/dep.py >=================================================================== >--- pym/portage/dep.py (revision 13832) >+++ pym/portage/dep.py (working copy) >@@ -120,8 +120,8 @@ > subsec,tail = mystr.split(")",1) > if tokenize: > subsec = strip_empty(subsec.split(" ")) >- return [mylist+subsec,tail] >- return mylist+[subsec],tail >+ return [mylist + subsec,tail] >+ return mylist + [subsec],tail > if not isinstance(tail, basestring): > raise portage.exception.InvalidDependString( > "malformed syntax: '%s'" % mystr) >@@ -188,7 +188,7 @@ > mystrparts = [] > for x in mylist: > if isinstance(x, list): >- mystrparts.append("( "+paren_enclose(x)+" )") >+ mystrparts.append("( " + paren_enclose(x) + " )") > else: > mystrparts.append(x) > return " ".join(mystrparts) >@@ -218,10 +218,10 @@ > # Quick validity checks > for x, y in enumerate(deparray): > if y == '||': >- if len(deparray) - 1 == x or not isinstance(deparray[x+1], list): >- raise portage.exception.InvalidDependString(deparray[x]+" missing atom list in \""+paren_enclose(deparray)+"\"") >+ if len(deparray) - 1 == x or not isinstance(deparray[x + 1], list): >+ raise portage.exception.InvalidDependString(deparray[x] + " missing atom list in \"" + paren_enclose(deparray) + "\"") > if deparray and deparray[-1] and deparray[-1][-1] == "?": >- raise portage.exception.InvalidDependString("Conditional without target in \""+paren_enclose(deparray)+"\"") >+ raise portage.exception.InvalidDependString("Conditional without target in \"" + paren_enclose(deparray) + "\"") > > global _dep_check_strict > >@@ -236,7 +236,7 @@ > rlist.append(additions) > elif rlist and rlist[-1] == "||": > #XXX: Currently some DEPEND strings have || lists without default atoms. >- # raise portage.exception.InvalidDependString("No default atom(s) in \""+paren_enclose(deparray)+"\"") >+ # raise portage.exception.InvalidDependString("No default atom(s) in \"" + paren_enclose(deparray) + "\"") > rlist.append([]) > > else: >@@ -259,7 +259,7 @@ > sys.stderr.write("Note: Nested use flags without parenthesis (Deprecated)\n") > warned = 1 > if warned: >- sys.stderr.write(" --> "+" ".join(map(str,[head]+newdeparray))+"\n") >+ sys.stderr.write(" --> " + " ".join(map(str,[head] + newdeparray)) + "\n") > > # Check that each flag matches > ismatch = True >@@ -287,7 +287,7 @@ > if missing_flag: > raise portage.exception.InvalidDependString( > "Conditional without flag: \"" + \ >- paren_enclose([head+"?", newdeparray[-1]])+"\"") >+ paren_enclose([head + "?", newdeparray[-1]]) + "\"") > > # If they all match, process the target > if ismatch: >@@ -333,7 +333,7 @@ > if isinstance(deplist[x], list): > retlist.append(dep_opconvert(deplist[x])) > elif deplist[x] == "||" or deplist[x] == "&&": >- retlist.append([deplist[x]] + dep_opconvert(deplist[x+1])) >+ retlist.append([deplist[x]] + dep_opconvert(deplist[x + 1])) > x += 1 > else: > retlist.append(deplist[x]) >@@ -748,9 +748,9 @@ > if colon != -1: > bracket = mydep.find("[", colon) > if bracket == -1: >- return mydep[colon+1:] >+ return mydep[colon + 1:] > else: >- return mydep[colon+1:bracket] >+ return mydep[colon + 1:bracket] > return None > > def remove_slot(mydep): >@@ -821,7 +821,7 @@ > use_list.append(use) > > # Find next use flag >- open_bracket = depend.find( '[', open_bracket+1 ) >+ open_bracket = depend.find( '[', open_bracket + 1 ) > return tuple(use_list) > > _valid_category = re.compile("^\w[\w-]*") >@@ -1117,16 +1117,16 @@ > mysplit = catpkgsplit(mycpv) > myver = mysplit[2].lstrip("0") > if not myver or not myver[0].isdigit(): >- myver = "0"+myver >- mycpv = mysplit[0]+"/"+mysplit[1]+"-"+myver >+ myver = "0" + myver >+ mycpv = mysplit[0] + "/" + mysplit[1] + "-" + myver > for x in candidate_list: > xs = getattr(x, "cpv_split", None) > if xs is None: > xs = catpkgsplit(remove_slot(x)) > myver = xs[2].lstrip("0") > if not myver or not myver[0].isdigit(): >- myver = "0"+myver >- xcpv = xs[0]+"/"+xs[1]+"-"+myver >+ myver = "0" + myver >+ xcpv = xs[0] + "/" + xs[1] + "-" + myver > if xcpv.startswith(mycpv): > mylist.append(x) > >@@ -1137,7 +1137,7 @@ > xs = catpkgsplit(remove_slot(x)) > if xs is None: > raise InvalidData(x) >- if not cpvequal(xs[0]+"/"+xs[1]+"-"+xs[2], mycpv_cps[0]+"/"+mycpv_cps[1]+"-"+mycpv_cps[2]): >+ if not cpvequal(xs[0] + "/" + xs[1] + "-" + xs[2], mycpv_cps[0] + "/" + mycpv_cps[1] + "-" + mycpv_cps[2]): > continue > if xs[2] != ver: > continue >Index: pym/portage/versions.py >=================================================================== >--- pym/portage/versions.py (revision 13832) >+++ pym/portage/versions.py (working copy) >@@ -47,7 +47,7 @@ > > if ver1 == ver2: > return 0 >- mykey=ver1+":"+ver2 >+ mykey=ver1 + ":" + ver2 > try: > return vercmp_cache[mykey] > except KeyError: >@@ -98,8 +98,8 @@ > list2.append(int(vlist2[i])) > # now we have to use floats so 1.02 compares correctly against 1.1 > else: >- # list1.append(float("0."+vlist1[i])) >- # list2.append(float("0."+vlist2[i])) >+ # list1.append(float("0." + vlist1[i])) >+ # list2.append(float("0." + vlist2[i])) > # Since python floats have limited range, we multiply both > # floating point representations by a constant so that they are > # transformed into whole numbers. This allows the practically >@@ -218,7 +218,7 @@ > > if len(myparts)<2: > if not silent: >- print "!!! Name error in",mypkg+": missing a version or name part." >+ print "!!! Name error in",mypkg + ": missing a version or name part." > pkgcache[mypkg]=None > return None > >Index: pym/portage/tests/__init__.py >=================================================================== >--- pym/portage/tests/__init__.py (revision 13832) >+++ pym/portage/tests/__init__.py (working copy) >@@ -45,7 +45,7 @@ > """ > files = os.listdir(path) > files = [ f[:-3] for f in files if f.startswith("test") and f.endswith(".py") ] >- parent_path = path[len(base_path)+1:] >+ parent_path = path[len(base_path) + 1:] > parent_module = ".".join(("portage", "tests", parent_path)) > parent_module = parent_module.replace('/', '.') > result = [] >Index: pym/portage/tests/bin/setup_env.py >=================================================================== >--- pym/portage/tests/bin/setup_env.py (revision 13832) >+++ pym/portage/tests/bin/setup_env.py (working copy) >@@ -54,7 +54,7 @@ > # just their exit value and the state of $D > f = open('/dev/null', 'w') > fd_pipes = {0:0,1:f.fileno(),2:f.fileno()} >- spawn(func+" "+args, env=os.environ, fd_pipes=fd_pipes) >+ spawn(func + " " + args, env=os.environ, fd_pipes=fd_pipes) > f.close() > > def create_portage_wrapper(bin): >Index: pym/portage/cache/flat_hash.py >=================================================================== >--- pym/portage/cache/flat_hash.py (revision 13832) >+++ pym/portage/cache/flat_hash.py (working copy) >@@ -53,7 +53,7 @@ > def _setitem(self, cpv, values): > # import pdb;pdb.set_trace() > s = cpv.rfind("/") >- fp = os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:])) >+ fp = os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s + 1:])) > try: > myf = codecs.open(fp, mode='w', > encoding='utf_8', errors='replace') >@@ -124,6 +124,6 @@ > if stat.S_ISDIR(st.st_mode): > dirs.append(p) > continue >- yield p[len_base+1:] >+ yield p[len_base + 1:] > dirs.pop(0) > >Index: pym/portage/cache/sql_template.py >=================================================================== >--- pym/portage/cache/sql_template.py (revision 13832) >+++ pym/portage/cache/sql_template.py (working copy) >@@ -280,7 +280,7 @@ > query_list.append("(key=%s AND value LIKE %s)" % (self._sfilter(k), self._sfilter(v))) > > if len(query_list): >- query = " AND "+" AND ".join(query_list) >+ query = " AND " + " AND ".join(query_list) > else: > query = '' > >Index: pym/portage/cache/cache_errors.py >=================================================================== >--- pym/portage/cache/cache_errors.py (revision 13832) >+++ pym/portage/cache/cache_errors.py (working copy) >@@ -38,7 +38,7 @@ > def __init__(self, info=''): > self.info = info > def __str__(self): >- return "cache is non-modifiable"+str(self.info) >+ return "cache is non-modifiable" + str(self.info) > > class StatCollision(CacheError): > """ >Index: pym/portage/cache/sqlite.py >=================================================================== >--- pym/portage/cache/sqlite.py (revision 13832) >+++ pym/portage/cache/sqlite.py (working copy) >@@ -133,7 +133,7 @@ > actual_cache_size = int(cursor.fetchone()[0]) > del cursor > if actual_cache_size != cache_size: >- raise cache_errors.InitializationError(self.__class__,"actual cache_size = "+actual_cache_size+" does does not match requested size of "+cache_size) >+ raise cache_errors.InitializationError(self.__class__,"actual cache_size = " + actual_cache_size + " does does not match requested size of " + cache_size) > > def _db_init_synchronous(self, synchronous): > cursor = self._db_cursor >@@ -142,7 +142,7 @@ > actual_synchronous=int(cursor.fetchone()[0]) > del cursor > if actual_synchronous!=synchronous: >- raise cache_errors.InitializationError(self.__class__,"actual synchronous = "+actual_synchronous+" does does not match requested value of "+synchronous) >+ raise cache_errors.InitializationError(self.__class__,"actual synchronous = " + actual_synchronous + " does does not match requested value of " + synchronous) > > def _getitem(self, cpv): > cursor = self._db_cursor >Index: pym/portage/cache/fs_template.py >=================================================================== >--- pym/portage/cache/fs_template.py (revision 13832) >+++ pym/portage/cache/fs_template.py (working copy) >@@ -26,10 +26,10 @@ > > for x, y in (("gid", -1), ("perms", -1)): > if x in config: >- setattr(self, "_"+x, config[x]) >+ setattr(self, "_" + x, config[x]) > del config[x] > else: >- setattr(self, "_"+x, y) >+ setattr(self, "_" + x, y) > super(FsBased, self).__init__(*args, **config) > > if self.label.startswith(os.path.sep): >Index: pym/portage/cache/anydbm.py >=================================================================== >--- pym/portage/cache/anydbm.py (revision 13832) >+++ pym/portage/cache/anydbm.py (working copy) >@@ -27,7 +27,7 @@ > if not default_db.startswith("."): > default_db = '.' + default_db > >- self._db_path = os.path.join(self.location, fs_template.gen_label(self.location, self.label)+default_db) >+ self._db_path = os.path.join(self.location, fs_template.gen_label(self.location, self.label) + default_db) > self.__db = None > try: > self.__db = anydbm_module.open(self._db_path, "w", self._perms) >Index: pym/portage/cache/metadata.py >=================================================================== >--- pym/portage/cache/metadata.py (revision 13832) >+++ pym/portage/cache/metadata.py (working copy) >@@ -113,7 +113,7 @@ > > s = cpv.rfind("/") > fp = os.path.join(self.location,cpv[:s], >- ".update.%i.%s" % (os.getpid(), cpv[s+1:])) >+ ".update.%i.%s" % (os.getpid(), cpv[s + 1:])) > try: > myf = open(fp, 'wb') > except EnvironmentError, e: >Index: pym/portage/cache/flat_list.py >=================================================================== >--- pym/portage/cache/flat_list.py (revision 13832) >+++ pym/portage/cache/flat_list.py (working copy) >@@ -46,7 +46,7 @@ > > def _setitem(self, cpv, values): > s = cpv.rfind("/") >- fp=os.path.join(self._base,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:])) >+ fp=os.path.join(self._base,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s + 1:])) > try: > myf = open(fp, "w") > except (OSError, IOError), e: >@@ -61,7 +61,7 @@ > > > for x in self.auxdbkey_order: >- myf.write(values.get(x,"")+"\n") >+ myf.write(values.get(x,"") + "\n") > > myf.close() > self._ensure_access(fp, mtime=values["_mtime_"]) >@@ -101,7 +101,7 @@ > if stat.S_ISDIR(st.st_mode): > dirs.append(p) > continue >- yield p[len_base+1:] >+ yield p[len_base + 1:] > dirs.pop(0) > > >Index: pym/portage/cache/ebuild_xattr.py >=================================================================== >--- pym/portage/cache/ebuild_xattr.py (revision 13832) >+++ pym/portage/cache/ebuild_xattr.py (working copy) >@@ -115,7 +115,7 @@ > parts = int(parts) > if parts > 1: > for i in range(1,parts): >- value += all.get(key+str(i)) >+ value += all.get(key + str(i)) > values[key] = value > > return values >@@ -141,8 +141,8 @@ > # Write out the rest > for i in range(1,parts): > start = i * max >- val = s[start:start+max] >- self.__set(path,key+str(i),val) >+ val = s[start:start + max] >+ self.__set(path,key + str(i),val) > else: > self.__set(path,key,"%s:%s"%(1,s)) > >Index: pym/portage/util.py >=================================================================== >--- pym/portage/util.py (revision 13832) >+++ pym/portage/util.py (working copy) >@@ -207,7 +207,7 @@ > for y in mydict.keys(): > if True: > if y in final_dict and (incremental or (y in incrementals)): >- final_dict[y] += " "+mydict[y][:] >+ final_dict[y] += " " + mydict[y][:] > else: > final_dict[y] = mydict[y][:] > mydict[y] = " ".join(mydict[y].split()) # Remove extra spaces. >@@ -334,7 +334,7 @@ > myfile = atomic_ofstream(myfilename) > if not writekey: > for x in mydict.values(): >- myfile.write(x+"\n") >+ myfile.write(x + "\n") > else: > for x in mydict: > myfile.write("%s %s\n" % (x, " ".join(mydict[x]))) >@@ -404,9 +404,9 @@ > #unexpected end of file > #lex.error_leader(self.filename,lex.lineno) > if not tolerant: >- writemsg("!!! Unexpected end of config file: variable "+str(key)+"\n", >+ writemsg("!!! Unexpected end of config file: variable " + str(key) + "\n", > noiselevel=-1) >- raise Exception("ParseError: Unexpected EOF: "+str(mycfg)+": on/before line "+str(lex.lineno)) >+ raise Exception("ParseError: Unexpected EOF: " + str(mycfg) + ": on/before line " + str(lex.lineno)) > else: > return mykeys > elif (equ!='='): >@@ -423,9 +423,9 @@ > #unexpected end of file > #lex.error_leader(self.filename,lex.lineno) > if not tolerant: >- writemsg("!!! Unexpected end of config file: variable "+str(key)+"\n", >+ writemsg("!!! Unexpected end of config file: variable " + str(key) + "\n", > noiselevel=-1) >- raise portage.exception.CorruptionError("ParseError: Unexpected EOF: "+str(mycfg)+": line "+str(lex.lineno)) >+ raise portage.exception.CorruptionError("ParseError: Unexpected EOF: " + str(mycfg) + ": line " + str(lex.lineno)) > else: > return mykeys > if expand: >@@ -436,13 +436,13 @@ > except SystemExit, e: > raise > except Exception, e: >- raise portage.exception.ParseError(str(e)+" in "+mycfg) >+ raise portage.exception.ParseError(str(e) + " in " + mycfg) > return mykeys > > #cache expansions of constant strings > cexpand={} > def varexpand(mystring, mydict={}): >- newstring = cexpand.get(" "+mystring, None) >+ newstring = cexpand.get(" " + mystring, None) > if newstring is not None: > return newstring > >@@ -452,7 +452,7 @@ > This would be a good bunch of code to port to C. > """ > numvars=0 >- mystring=" "+mystring >+ mystring=" " + mystring > #in single, double quotes > insing=0 > indoub=0 >@@ -461,89 +461,89 @@ > while (pos<len(mystring)): > if (mystring[pos]=="'") and (mystring[pos-1]!="\\"): > if (indoub): >- newstring=newstring+"'" >+ newstring=newstring + "'" > else: > newstring += "'" # Quote removal is handled by shlex. > insing=not insing >- pos=pos+1 >+ pos=pos + 1 > continue > elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"): > if (insing): >- newstring=newstring+'"' >+ newstring=newstring + '"' > else: > newstring += '"' # Quote removal is handled by shlex. > indoub=not indoub >- pos=pos+1 >+ pos=pos + 1 > continue > if (not insing): > #expansion time > if (mystring[pos]=="\n"): > #convert newlines to spaces >- newstring=newstring+" " >- pos=pos+1 >+ newstring=newstring + " " >+ pos=pos + 1 > elif (mystring[pos]=="\\"): > #backslash expansion time >- if (pos+1>=len(mystring)): >- newstring=newstring+mystring[pos] >+ if (pos + 1>=len(mystring)): >+ newstring=newstring + mystring[pos] > break > else: >- a=mystring[pos+1] >- pos=pos+2 >+ a=mystring[pos + 1] >+ pos=pos + 2 > if a=='a': >- newstring=newstring+chr(007) >+ newstring=newstring + chr(007) > elif a=='b': >- newstring=newstring+chr(010) >+ newstring=newstring + chr(010) > elif a=='e': >- newstring=newstring+chr(033) >+ newstring=newstring + chr(033) > elif (a=='f') or (a=='n'): >- newstring=newstring+chr(012) >+ newstring=newstring + chr(012) > elif a=='r': >- newstring=newstring+chr(015) >+ newstring=newstring + chr(015) > elif a=='t': >- newstring=newstring+chr(011) >+ newstring=newstring + chr(011) > elif a=='v': >- newstring=newstring+chr(013) >+ newstring=newstring + chr(013) > elif a!='\n': > #remove backslash only, as bash does: this takes care of \\ and \' and \" as well >- newstring=newstring+mystring[pos-1:pos] >+ newstring=newstring + mystring[pos-1:pos] > continue > elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"): >- pos=pos+1 >+ pos=pos + 1 > if mystring[pos]=="{": >- pos=pos+1 >+ pos=pos + 1 > braced=True > else: > braced=False > myvstart=pos >- validchars=string.ascii_letters+string.digits+"_" >+ validchars=string.ascii_letters + string.digits + "_" > while mystring[pos] in validchars: >- if (pos+1)>=len(mystring): >+ if (pos + 1)>=len(mystring): > if braced: > cexpand[mystring]="" > return "" > else: >- pos=pos+1 >+ pos=pos + 1 > break >- pos=pos+1 >+ pos=pos + 1 > myvarname=mystring[myvstart:pos] > if braced: > if mystring[pos]!="}": > cexpand[mystring]="" > return "" > else: >- pos=pos+1 >+ pos=pos + 1 > if len(myvarname)==0: > cexpand[mystring]="" > return "" >- numvars=numvars+1 >+ numvars=numvars + 1 > if myvarname in mydict: >- newstring=newstring+mydict[myvarname] >+ newstring=newstring + mydict[myvarname] > else: >- newstring=newstring+mystring[pos] >- pos=pos+1 >+ newstring=newstring + mystring[pos] >+ pos=pos + 1 > else: >- newstring=newstring+mystring[pos] >- pos=pos+1 >+ newstring=newstring + mystring[pos] >+ pos=pos + 1 > if numvars==0: > cexpand[mystring]=newstring[1:] > return newstring[1:] >@@ -554,7 +554,7 @@ > def pickle_read(filename,default=None,debug=0): > import os > if not os.access(filename, os.R_OK): >- writemsg("pickle_read(): File not readable. '"+filename+"'\n",1) >+ writemsg("pickle_read(): File not readable. '" + filename + "'\n",1) > return default > data = None > try: >@@ -563,11 +563,11 @@ > data = mypickle.load() > myf.close() > del mypickle,myf >- writemsg("pickle_read(): Loaded pickle. '"+filename+"'\n",1) >+ writemsg("pickle_read(): Loaded pickle. '" + filename + "'\n",1) > except SystemExit, e: > raise > except Exception, e: >- writemsg("!!! Failed to load pickle: "+str(e)+"\n",1) >+ writemsg("!!! Failed to load pickle: " + str(e) + "\n",1) > data = default > return data > >@@ -585,7 +585,7 @@ > for line in traceback.format_list(stack): > writemsg(line, noiselevel=noiselevel) > if error: >- writemsg(error+"\n", noiselevel=noiselevel) >+ writemsg(error + "\n", noiselevel=noiselevel) > writemsg("====================================\n\n", noiselevel=noiselevel) > > class cmp_sort_key(object): >Index: pym/portage/update.py >=================================================================== >--- pym/portage/update.py (revision 13832) >+++ pym/portage/update.py (working copy) >@@ -89,9 +89,9 @@ > return [] > > # update names are mangled to make them sort properly >- mylist = [myfile[3:]+"-"+myfile[:2] for myfile in mylist] >+ mylist = [myfile[3:] + "-" + myfile[:2] for myfile in mylist] > mylist.sort() >- mylist = [myfile[5:]+"-"+myfile[:4] for myfile in mylist] >+ mylist = [myfile[5:] + "-" + myfile[:4] for myfile in mylist] > > update_data = [] > for myfile in mylist: >Index: pym/portage/mail.py >=================================================================== >--- pym/portage/mail.py (revision 13832) >+++ pym/portage/mail.py (working copy) >@@ -69,7 +69,7 @@ > > # user wants to use a sendmail binary instead of smtp > if mymailhost[0] == os.sep and os.path.exists(mymailhost): >- fd = os.popen(mymailhost+" -f "+myfrom+" "+myrecipient, "w") >+ fd = os.popen(mymailhost + " -f " + myfrom + " " + myrecipient, "w") > fd.write(message.as_string()) > if fd.close() != None: > sys.stderr.write("!!! %s returned with a non-zero exit code. This generally indicates an error.\n" % mymailhost) >@@ -89,8 +89,8 @@ > myconn.sendmail(myfrom, myrecipient, message.as_string()) > myconn.quit() > except smtplib.SMTPException, e: >- raise portage.exception.PortageException("!!! An error occured while trying to send logmail:\n"+str(e)) >+ raise portage.exception.PortageException("!!! An error occured while trying to send logmail:\n" + str(e)) > except socket.error, e: >- raise portage.exception.PortageException("!!! A network error occured while trying to send logmail:\n"+str(e)+"\nSure you configured PORTAGE_ELOG_MAILURI correctly?") >+ raise portage.exception.PortageException("!!! A network error occured while trying to send logmail:\n" + str(e) + "\nSure you configured PORTAGE_ELOG_MAILURI correctly?") > return > >Index: pym/portage/glsa.py >=================================================================== >--- pym/portage/glsa.py (revision 13832) >+++ pym/portage/glsa.py (working copy) >@@ -51,26 +51,26 @@ > """ > rValue = "" > line = caption >- text = text.replace(2*NEWLINE_ESCAPE, NEWLINE_ESCAPE+" "+NEWLINE_ESCAPE) >+ text = text.replace(2*NEWLINE_ESCAPE, NEWLINE_ESCAPE + " " + NEWLINE_ESCAPE) > words = text.split() >- indentLevel = len(caption)+1 >+ indentLevel = len(caption) + 1 > > for w in words: > if line != "" and line[-1] == "\n": > rValue += line > line = " "*indentLevel >- if len(line)+len(w.replace(NEWLINE_ESCAPE, ""))+1 > width: >- rValue += line+"\n" >- line = " "*indentLevel+w.replace(NEWLINE_ESCAPE, "\n") >+ if len(line) + len(w.replace(NEWLINE_ESCAPE, "")) + 1 > width: >+ rValue += line + "\n" >+ line = " "*indentLevel + w.replace(NEWLINE_ESCAPE, "\n") > elif w.find(NEWLINE_ESCAPE) >= 0: > if len(line.strip()) > 0: >- rValue += line+" "+w.replace(NEWLINE_ESCAPE, "\n") >+ rValue += line + " " + w.replace(NEWLINE_ESCAPE, "\n") > else: >- rValue += line+w.replace(NEWLINE_ESCAPE, "\n") >+ rValue += line + w.replace(NEWLINE_ESCAPE, "\n") > line = " "*indentLevel > else: > if len(line.strip()) > 0: >- line += " "+w >+ line += " " + w > else: > line += w > if len(line) > 0: >@@ -153,7 +153,7 @@ > rValue = "" > if format in ["strip", "keep"]: > if node.nodeName in ["uri", "mail"]: >- rValue += node.childNodes[0].data+": "+node.getAttribute("link") >+ rValue += node.childNodes[0].data + ": " + node.getAttribute("link") > else: > for subnode in node.childNodes: > if subnode.nodeName == "#text": >@@ -168,16 +168,16 @@ > rValue += p_subnode.data.strip() > elif p_subnode.nodeName in ["uri", "mail"]: > rValue += p_subnode.childNodes[0].data >- rValue += " ( "+p_subnode.getAttribute("link")+" )" >+ rValue += " ( " + p_subnode.getAttribute("link") + " )" > rValue += NEWLINE_ESCAPE > elif subnode.nodeName == "ul": > for li in getListElements(subnode): >- rValue += "-"+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" " >+ rValue += "-" + SPACE_ESCAPE + li + NEWLINE_ESCAPE + " " > elif subnode.nodeName == "ol": > i = 0 > for li in getListElements(subnode): >- i = i+1 >- rValue += str(i)+"."+SPACE_ESCAPE+li+NEWLINE_ESCAPE+" " >+ i = i + 1 >+ rValue += str(i) + "." + SPACE_ESCAPE + li + NEWLINE_ESCAPE + " " > elif subnode.nodeName == "code": > rValue += getText(subnode, format="keep").replace("\n", NEWLINE_ESCAPE) > if rValue[-1*len(NEWLINE_ESCAPE):] != NEWLINE_ESCAPE: >@@ -299,19 +299,19 @@ > """ > if match_type == "default" or not hasattr(dbapi, "xmatch"): > if ":" in revisionAtom: >- mylist = dbapi.match(re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) >+ mylist = dbapi.match(re.sub(r'-r[0-9] + (:[^ ] + )?$', r'\1', revisionAtom[2:])) > else: >- mylist = dbapi.match(re.sub("-r[0-9]+$", "", revisionAtom[2:])) >+ mylist = dbapi.match(re.sub("-r[0-9] + $", "", revisionAtom[2:])) > else: > if ":" in revisionAtom: >- mylist = dbapi.xmatch(match_type, re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) >+ mylist = dbapi.xmatch(match_type, re.sub(r'-r[0-9] + (:[^ ] + )?$', r'\1', revisionAtom[2:])) > else: >- mylist = dbapi.xmatch(match_type, re.sub("-r[0-9]+$", "", revisionAtom[2:])) >+ mylist = dbapi.xmatch(match_type, re.sub("-r[0-9] + $", "", revisionAtom[2:])) > rValue = [] > for v in mylist: > r1 = pkgsplit(v)[-1][1:] > r2 = pkgsplit(revisionAtom[3:])[-1][1:] >- if eval(r1+" "+revisionAtom[0:2]+" "+r2): >+ if eval(r1 + " " + revisionAtom[0:2] + " " + r2): > rValue.append(v) > return rValue > >@@ -365,13 +365,13 @@ > i_pv = catpkgsplit(best(v_installed)) > if pkgcmp(c_pv[1:], i_pv[1:]) > 0 \ > and (rValue == None \ >- or not match("="+rValue, portdbapi) \ >+ or not match("=" + rValue, portdbapi) \ > or (minimize ^ (pkgcmp(c_pv[1:], catpkgsplit(rValue)[1:]) > 0)) \ >- and match("="+c, portdbapi)) \ >+ and match("=" + c, portdbapi)) \ > and portdbapi.aux_get(c, ["SLOT"]) == vardbapi.aux_get(best(v_installed), ["SLOT"]): >- rValue = c_pv[0]+"/"+c_pv[1]+"-"+c_pv[2] >+ rValue = c_pv[0] + "/" + c_pv[1] + "-" + c_pv[2] > if c_pv[3] != "r0": # we don't like -r0 for display >- rValue += "-"+c_pv[3] >+ rValue += "-" + c_pv[3] > return rValue > > def format_date(datestr): >@@ -439,7 +439,7 @@ > elif os.path.exists(myid): > self.type = "file" > else: >- raise GlsaArgumentException("Given ID "+myid+" isn't a valid GLSA ID or filename.") >+ raise GlsaArgumentException("Given ID " + myid + " isn't a valid GLSA ID or filename.") > self.nr = myid > self.config = myconfig > self.vardbapi = vardbapi >@@ -455,11 +455,11 @@ > @return: None > """ > if "GLSA_DIR" in self.config: >- repository = "file://" + self.config["GLSA_DIR"]+"/" >+ repository = "file://" + self.config["GLSA_DIR"] + "/" > else: > repository = "file://" + self.config["PORTDIR"] + "/metadata/glsa/" > if self.type == "file": >- myurl = "file://"+self.nr >+ myurl = "file://" + self.nr > else: > myurl = repository + "glsa-%s.xml" % str(self.nr) > self.parse(urllib.urlopen(myurl)) >@@ -564,9 +564,9 @@ > (defaults to sys.stdout) > """ > width = 76 >- outstream.write(("GLSA %s: \n%s" % (self.nr, self.title)).center(width)+"\n") >- outstream.write((width*"=")+"\n") >- outstream.write(wrap(self.synopsis, width, caption="Synopsis: ")+"\n") >+ outstream.write(("GLSA %s: \n%s" % (self.nr, self.title)).center(width) + "\n") >+ outstream.write((width*"=") + "\n") >+ outstream.write(wrap(self.synopsis, width, caption="Synopsis: ") + "\n") > outstream.write("Announced on: %s\n" % self.announced) > outstream.write("Last revised on: %s : %02d\n\n" % (self.revised, self.count)) > if self.glsatype == "ebuild": >@@ -594,15 +594,15 @@ > else: > outstream.write("\n") > if self.background: >- outstream.write("\n"+wrap(self.background, width, caption="Background: ")) >- outstream.write("\n"+wrap(self.description, width, caption="Description: ")) >- outstream.write("\n"+wrap(self.impact_text, width, caption="Impact: ")) >- outstream.write("\n"+wrap(self.workaround, width, caption="Workaround: ")) >- outstream.write("\n"+wrap(self.resolution, width, caption="Resolution: ")) >+ outstream.write("\n" + wrap(self.background, width, caption="Background: ")) >+ outstream.write("\n" + wrap(self.description, width, caption="Description: ")) >+ outstream.write("\n" + wrap(self.impact_text, width, caption="Impact: ")) >+ outstream.write("\n" + wrap(self.workaround, width, caption="Workaround: ")) >+ outstream.write("\n" + wrap(self.resolution, width, caption="Resolution: ")) > myreferences = "" > for r in self.references: >- myreferences += (r.replace(" ", SPACE_ESCAPE)+NEWLINE_ESCAPE+" ") >- outstream.write("\n"+wrap(myreferences, width, caption="References: ")) >+ myreferences += (r.replace(" ", SPACE_ESCAPE) + NEWLINE_ESCAPE + " ") >+ outstream.write("\n" + wrap(myreferences, width, caption="References: ")) > outstream.write("\n") > > def isVulnerable(self): >@@ -646,8 +646,8 @@ > @returns: None > """ > if not self.isApplied(): >- checkfile = open(os.path.join(os.sep, self.config["ROOT"], CACHE_PATH.lstrip(os.sep), "glsa"), "a+") >- checkfile.write(self.nr+"\n") >+ checkfile = open(os.path.join(os.sep, self.config["ROOT"], CACHE_PATH.lstrip(os.sep), "glsa"), "a + ") >+ checkfile.write(self.nr + "\n") > checkfile.close() > return None > >Index: pym/portage/const.py >=================================================================== >--- pym/portage/const.py (revision 13832) >+++ pym/portage/const.py (working copy) >@@ -13,22 +13,22 @@ > VDB_PATH = "var/db/pkg" > PRIVATE_PATH = "var/lib/portage" > CACHE_PATH = "/var/cache/edb" >-DEPCACHE_PATH = CACHE_PATH+"/dep" >+DEPCACHE_PATH = CACHE_PATH + "/dep" > > USER_CONFIG_PATH = "etc/portage" >-MODULES_FILE_PATH = USER_CONFIG_PATH+"/modules" >-CUSTOM_PROFILE_PATH = USER_CONFIG_PATH+"/profile" >+MODULES_FILE_PATH = USER_CONFIG_PATH + "/modules" >+CUSTOM_PROFILE_PATH = USER_CONFIG_PATH + "/profile" > GLOBAL_CONFIG_PATH = "/usr/share/portage/config" > > PORTAGE_BASE_PATH = os.path.join(os.sep, os.sep.join(__file__.split(os.sep)[:-3])) >-PORTAGE_BIN_PATH = PORTAGE_BASE_PATH+"/bin" >-PORTAGE_PYM_PATH = PORTAGE_BASE_PATH+"/pym" >+PORTAGE_BIN_PATH = PORTAGE_BASE_PATH + "/bin" >+PORTAGE_PYM_PATH = PORTAGE_BASE_PATH + "/pym" > PORTAGE_PACKAGE_ATOM = "sys-apps/portage" > NEWS_LIB_PATH = "/var/lib/gentoo" > PROFILE_PATH = "etc/make.profile" >-LOCALE_DATA_PATH = PORTAGE_BASE_PATH+"/locale" >+LOCALE_DATA_PATH = PORTAGE_BASE_PATH + "/locale" > >-EBUILD_SH_BINARY = PORTAGE_BIN_PATH+"/ebuild.sh" >+EBUILD_SH_BINARY = PORTAGE_BIN_PATH + "/ebuild.sh" > MISC_SH_BINARY = PORTAGE_BIN_PATH + "/misc-functions.sh" > SANDBOX_BINARY = "/usr/bin/sandbox" > FAKEROOT_BINARY = "/usr/bin/fakeroot" >@@ -39,11 +39,11 @@ > WORLD_FILE = PRIVATE_PATH + "/world" > MAKE_CONF_FILE = "etc/make.conf" > MAKE_DEFAULTS_FILE = PROFILE_PATH + "/make.defaults" >-DEPRECATED_PROFILE_FILE = PROFILE_PATH+"/deprecated" >-USER_VIRTUALS_FILE = USER_CONFIG_PATH+"/virtuals" >-EBUILD_SH_ENV_FILE = USER_CONFIG_PATH+"/bashrc" >+DEPRECATED_PROFILE_FILE = PROFILE_PATH + "/deprecated" >+USER_VIRTUALS_FILE = USER_CONFIG_PATH + "/virtuals" >+EBUILD_SH_ENV_FILE = USER_CONFIG_PATH + "/bashrc" > INVALID_ENV_FILE = "/etc/spork/is/not/valid/profile.env" >-CUSTOM_MIRRORS_FILE = USER_CONFIG_PATH+"/mirrors" >+CUSTOM_MIRRORS_FILE = USER_CONFIG_PATH + "/mirrors" > CONFIG_MEMORY_FILE = PRIVATE_PATH + "/config" > COLOR_MAP_FILE = USER_CONFIG_PATH + "/color.map" > >Index: pym/portage/sets/__init__.py >=================================================================== >--- pym/portage/sets/__init__.py (revision 13832) >+++ pym/portage/sets/__init__.py (working copy) >@@ -80,7 +80,7 @@ > setclass = load_mod(classname) > except (ImportError, AttributeError): > try: >- setclass = load_mod("portage.sets."+classname) >+ setclass = load_mod("portage.sets." + classname) > except (ImportError, AttributeError): > self.errors.append(_("Could not import '%(class)s' for section " > "'%(section)s'") % {"class": classname, "section": sname}) >@@ -201,8 +201,8 @@ > sc = load_default_config(portage.settings, portage.db["/"]) > l, e = sc.getSets() > for x in l: >- print x+":" >+ print x + ":" > print "DESCRIPTION = %s" % l[x].getMetadata("Description") > for n in sorted(l[x].getAtoms()): >- print "- "+n >+ print "- " + n > print >Index: pym/portage/sets/files.py >=================================================================== >--- pym/portage/sets/files.py (revision 13832) >+++ pym/portage/sets/files.py (working copy) >@@ -73,7 +73,7 @@ > data, errors = self.loader.load() > for fname in errors: > for e in errors[fname]: >- self.errors.append(fname+": "+e) >+ self.errors.append(fname + ": " + e) > except EnvironmentError, e: > if e.errno != errno.ENOENT: > raise >@@ -173,7 +173,7 @@ > for suffix in ["keywords", "use", "mask", "unmask"]: > myname = name_pattern.replace("$suffix", suffix) > myname = myname.replace("${suffix}", suffix) >- rValue[myname] = ConfigFileSet(os.path.join(directory, "package."+suffix)) >+ rValue[myname] = ConfigFileSet(os.path.join(directory, "package." + suffix)) > return rValue > multiBuilder = classmethod(multiBuilder) > >@@ -202,7 +202,7 @@ > def write(self): > write_atomic(self._filename, > "".join(sorted("%s\n" % x for x in self._atoms))) >- write_atomic(self._filename2, "\n".join(sorted(self._nonatoms))+"\n") >+ write_atomic(self._filename2, "\n".join(sorted(self._nonatoms)) + "\n") > > def load(self): > atoms = [] >@@ -220,7 +220,7 @@ > data, errors = self.loader.load() > for fname in errors: > for e in errors[fname]: >- self.errors.append(fname+": "+e) >+ self.errors.append(fname + ": " + e) > except EnvironmentError, e: > if e.errno != errno.ENOENT: > raise >@@ -240,7 +240,7 @@ > data, errors = self.loader2.load() > for fname in errors: > for e in errors[fname]: >- self.errors.append(fname+": "+e) >+ self.errors.append(fname + ": " + e) > except EnvironmentError, e: > if e.errno != errno.ENOENT: > raise >@@ -252,7 +252,7 @@ > else: > nonatoms.extend(self._nonatoms) > if atoms_changed: >- self._setAtoms(atoms+nonatoms) >+ self._setAtoms(atoms + nonatoms) > > def _ensure_dirs(self): > ensure_dirs(os.path.dirname(self._filename), gid=portage_gid, mode=02750, mask=02) >Index: pym/portage/sets/security.py >=================================================================== >--- pym/portage/sets/security.py (revision 13832) >+++ pym/portage/sets/security.py (working copy) >@@ -40,7 +40,7 @@ > myglsa = glsa.Glsa(glsaid, self._settings, self._vardbapi, self._portdbapi) > #print glsaid, myglsa.isVulnerable(), myglsa.isApplied(), myglsa.getMergeList() > if self.useGlsa(myglsa): >- atomlist += ["="+x for x in myglsa.getMergeList(least_change=self._least_change)] >+ atomlist += ["=" + x for x in myglsa.getMergeList(least_change=self._least_change)] > self._setAtoms(self._reduce(atomlist)) > > def _reduce(self, atomlist): >Index: pym/portage/elog/mod_save_summary.py >=================================================================== >--- pym/portage/elog/mod_save_summary.py (revision 13832) >+++ pym/portage/elog/mod_save_summary.py (working copy) >@@ -16,7 +16,7 @@ > ensure_dirs(elogdir, uid=portage_uid, gid=portage_gid, mode=02770) > > # TODO: Locking >- elogfilename = elogdir+"/summary.log" >+ elogfilename = elogdir + "/summary.log" > elogfile = open(elogfilename, "a") > apply_permissions(elogfilename, mode=060, mask=0) > elogfile.write(_(">>> Messages generated by process %(pid)d on %(time)s for package %(pkg)s:\n\n") % >Index: pym/portage/elog/mod_save.py >=================================================================== >--- pym/portage/elog/mod_save.py (revision 13832) >+++ pym/portage/elog/mod_save.py (working copy) >@@ -16,7 +16,7 @@ > elogdir = os.path.join(os.sep, "var", "log", "portage", "elog") > ensure_dirs(elogdir, uid=portage_uid, gid=portage_gid, mode=02770) > >- elogfilename = elogdir+"/"+path+":"+time.strftime("%Y%m%d-%H%M%S", time.gmtime(time.time()))+".log" >+ elogfilename = elogdir + "/" + path + ":" + time.strftime("%Y%m%d-%H%M%S", time.gmtime(time.time())) + ".log" > elogfile = open(elogfilename, "w") > elogfile.write(fulltext) > elogfile.close() >Index: pym/portage/output.py >=================================================================== >--- pym/portage/output.py (revision 13832) >+++ pym/portage/output.py (working copy) >@@ -65,7 +65,7 @@ > > def color(fg, bg="default", attr=["normal"]): > mystr = codes[fg] >- for x in [bg]+attr: >+ for x in [bg] + attr: > mystr += codes[x] > return mystr > >@@ -230,7 +230,7 @@ > raise > > def nc_len(mystr): >- tmp = re.sub(esc_seq + "^m]+m", "", mystr); >+ tmp = re.sub(esc_seq + "^m] + m", "", mystr); > return len(tmp) > > _legal_terms_re = re.compile(r'^(xterm|xterm-color|Eterm|aterm|rxvt|screen|kterm|rxvt-unicode|gnome|interix)') >@@ -648,7 +648,7 @@ > resulting curval is coerced between 0 and maxval if incrementing causes > it to fall outside this range. > """ >- self.set(self._curval+n) >+ self.set(self._curval + n) > > class TermProgressBar(ProgressBar): > """A tty progress bar similar to wget's.""" >Index: pym/portage/xpak.py >=================================================================== >--- pym/portage/xpak.py (revision 13832) >+++ pym/portage/xpak.py (working copy) >@@ -25,11 +25,11 @@ > for x in os.listdir("."): > if os.path.isdir(x): > os.chdir(x) >- addtolist(mylist,curdir+x+"/") >+ addtolist(mylist,curdir + x + "/") > os.chdir("..") > else: >- if curdir+x not in mylist: >- mylist.append(curdir+x) >+ if curdir + x not in mylist: >+ mylist.append(curdir + x) > > def encodeint(myint): > """Takes a 4 byte integer and converts it into a string of 4 characters. >@@ -38,16 +38,16 @@ > part2=chr((myint >> 16 ) & 0x000000ff) > part3=chr((myint >> 8 ) & 0x000000ff) > part4=chr(myint & 0x000000ff) >- return part1+part2+part3+part4 >+ return part1 + part2 + part3 + part4 > > def decodeint(mystring): > """Takes a 4 byte string and converts it into a 4 byte integer. > Returns an integer.""" > myint=0 >- myint=myint+ord(mystring[3]) >- myint=myint+(ord(mystring[2]) << 8) >- myint=myint+(ord(mystring[1]) << 16) >- myint=myint+(ord(mystring[0]) << 24) >+ myint=myint + ord(mystring[3]) >+ myint=myint + (ord(mystring[2]) << 8) >+ myint=myint + (ord(mystring[1]) << 16) >+ myint=myint + (ord(mystring[0]) << 24) > return myint > > def xpak(rootdir,outfile=None): >@@ -89,10 +89,10 @@ > datapos=0 > for x, newglob in mydata.iteritems(): > mydatasize=len(newglob) >- indexglob=indexglob+encodeint(len(x))+x+encodeint(datapos)+encodeint(mydatasize) >- indexpos=indexpos+4+len(x)+4+4 >- dataglob=dataglob+newglob >- datapos=datapos+mydatasize >+ indexglob=indexglob + encodeint(len(x)) + x + encodeint(datapos) + encodeint(mydatasize) >+ indexpos=indexpos + 4 + len(x) + 4 + 4 >+ dataglob=dataglob + newglob >+ datapos=datapos + mydatasize > return "XPAKPACK" \ > + encodeint(len(indexglob)) \ > + encodeint(len(dataglob)) \ >@@ -112,10 +112,10 @@ > if not splits: > return False > >- myfile=open(infile+".index","w") >+ myfile=open(infile + ".index","w") > myfile.write(splits[0]) > myfile.close() >- myfile=open(infile+".dat","w") >+ myfile=open(infile + ".dat","w") > myfile.write(splits[1]) > myfile.close() > return True >@@ -126,7 +126,7 @@ > if mydat[-8:]!="XPAKSTOP": > return None > indexsize=decodeint(mydat[8:12]) >- return (mydat[16:indexsize+16], mydat[indexsize+16:-8]) >+ return (mydat[16:indexsize + 16], mydat[indexsize + 16:-8]) > > def getindex(infile): > """(infile) -- grabs the index segment from the infile and returns it.""" >@@ -165,10 +165,10 @@ > myindexlen=len(myindex) > startpos=0 > myret=[] >- while ((startpos+8)<myindexlen): >- mytestlen=decodeint(myindex[startpos:startpos+4]) >- myret=myret+[myindex[startpos+4:startpos+4+mytestlen]] >- startpos=startpos+mytestlen+12 >+ while ((startpos + 8)<myindexlen): >+ mytestlen=decodeint(myindex[startpos:startpos + 4]) >+ myret=myret + [myindex[startpos + 4:startpos + 4 + mytestlen]] >+ startpos=startpos + mytestlen + 12 > return myret > > def searchindex(myindex,myitem): >@@ -177,15 +177,15 @@ > mylen=len(myitem) > myindexlen=len(myindex) > startpos=0 >- while ((startpos+8)<myindexlen): >- mytestlen=decodeint(myindex[startpos:startpos+4]) >+ while ((startpos + 8)<myindexlen): >+ mytestlen=decodeint(myindex[startpos:startpos + 4]) > if mytestlen==mylen: >- if myitem==myindex[startpos+4:startpos+4+mytestlen]: >+ if myitem==myindex[startpos + 4:startpos + 4 + mytestlen]: > #found >- datapos=decodeint(myindex[startpos+4+mytestlen:startpos+8+mytestlen]); >- datalen=decodeint(myindex[startpos+8+mytestlen:startpos+12+mytestlen]); >+ datapos=decodeint(myindex[startpos + 4 + mytestlen:startpos + 8 + mytestlen]); >+ datalen=decodeint(myindex[startpos + 8 + mytestlen:startpos + 12 + mytestlen]); > return datapos, datalen >- startpos=startpos+mytestlen+12 >+ startpos=startpos + mytestlen + 12 > > def getitem(myid,myitem): > myindex=myid[0] >@@ -193,7 +193,7 @@ > myloc=searchindex(myindex,myitem) > if not myloc: > return None >- return mydata[myloc[0]:myloc[0]+myloc[1]] >+ return mydata[myloc[0]:myloc[0] + myloc[1]] > > def xpand(myid,mydest): > myindex=myid[0] >@@ -208,19 +208,19 @@ > os.chdir(mydest) > myindexlen=len(myindex) > startpos=0 >- while ((startpos+8)<myindexlen): >- namelen=decodeint(myindex[startpos:startpos+4]) >- datapos=decodeint(myindex[startpos+4+namelen:startpos+8+namelen]); >- datalen=decodeint(myindex[startpos+8+namelen:startpos+12+namelen]); >- myname=myindex[startpos+4:startpos+4+namelen] >+ while ((startpos + 8)<myindexlen): >+ namelen=decodeint(myindex[startpos:startpos + 4]) >+ datapos=decodeint(myindex[startpos + 4 + namelen:startpos + 8 + namelen]); >+ datalen=decodeint(myindex[startpos + 8 + namelen:startpos + 12 + namelen]); >+ myname=myindex[startpos + 4:startpos + 4 + namelen] > dirname=os.path.dirname(myname) > if dirname: > if not os.path.exists(dirname): > os.makedirs(dirname) > mydat=open(myname,"w") >- mydat.write(mydata[datapos:datapos+datalen]) >+ mydat.write(mydata[datapos:datapos + datalen]) > mydat.close() >- startpos=startpos+namelen+12 >+ startpos=startpos + namelen + 12 > os.chdir(origdir) > > class tbz2(object): >@@ -262,12 +262,12 @@ > > def recompose_mem(self, xpdata): > self.scan() # Don't care about condition... We'll rewrite the data anyway. >- myfile=open(self.file,"a+") >+ myfile=open(self.file,"a + ") > if not myfile: > raise IOError > myfile.seek(-self.xpaksize,2) # 0,2 or -0,2 just mean EOF. > myfile.truncate() >- myfile.write(xpdata+encodeint(len(xpdata))+"STOP") >+ myfile.write(xpdata + encodeint(len(xpdata)) + "STOP") > myfile.flush() > myfile.close() > return 1 >@@ -310,7 +310,7 @@ > a.close() > return 0 > self.infosize=decodeint(trailer[8:12]) >- self.xpaksize=self.infosize+8 >+ self.xpaksize=self.infosize + 8 > a.seek(-(self.xpaksize),2) > header=a.read(16) > if header[0:8]!="XPAKPACK": >@@ -342,7 +342,7 @@ > if not myresult: > return mydefault > a=open(self.file,"r") >- a.seek(self.datapos+myresult[0],0) >+ a.seek(self.datapos + myresult[0],0) > myreturn=a.read(myresult[1]) > a.close() > return myreturn >@@ -370,20 +370,20 @@ > os.makedirs(mydest) > os.chdir(mydest) > startpos=0 >- while ((startpos+8)<self.indexsize): >- namelen=decodeint(self.index[startpos:startpos+4]) >- datapos=decodeint(self.index[startpos+4+namelen:startpos+8+namelen]); >- datalen=decodeint(self.index[startpos+8+namelen:startpos+12+namelen]); >- myname=self.index[startpos+4:startpos+4+namelen] >+ while ((startpos + 8)<self.indexsize): >+ namelen=decodeint(self.index[startpos:startpos + 4]) >+ datapos=decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen]); >+ datalen=decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen]); >+ myname=self.index[startpos + 4:startpos + 4 + namelen] > dirname=os.path.dirname(myname) > if dirname: > if not os.path.exists(dirname): > os.makedirs(dirname) > mydat=open(myname,"w") >- a.seek(self.datapos+datapos) >+ a.seek(self.datapos + datapos) > mydat.write(a.read(datalen)) > mydat.close() >- startpos=startpos+namelen+12 >+ startpos=startpos + namelen + 12 > a.close() > os.chdir(origdir) > return 1 >@@ -395,14 +395,14 @@ > a = open(self.file, "r") > mydata = {} > startpos=0 >- while ((startpos+8)<self.indexsize): >- namelen=decodeint(self.index[startpos:startpos+4]) >- datapos=decodeint(self.index[startpos+4+namelen:startpos+8+namelen]); >- datalen=decodeint(self.index[startpos+8+namelen:startpos+12+namelen]); >- myname=self.index[startpos+4:startpos+4+namelen] >- a.seek(self.datapos+datapos) >+ while ((startpos + 8)<self.indexsize): >+ namelen=decodeint(self.index[startpos:startpos + 4]) >+ datapos=decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen]); >+ datalen=decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen]); >+ myname=self.index[startpos + 4:startpos + 4 + namelen] >+ a.seek(self.datapos + datapos) > mydata[myname] = a.read(datalen) >- startpos=startpos+namelen+12 >+ startpos=startpos + namelen + 12 > a.close() > return mydata > >Index: pym/portage/getbinpkg.py >=================================================================== >--- pym/portage/getbinpkg.py (revision 13832) >+++ pym/portage/getbinpkg.py (working copy) >@@ -23,12 +23,12 @@ > try: > import ftplib > except ImportError, e: >- sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT FTPLIB: ")+str(e)+"\n") >+ sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT FTPLIB: ") + str(e) + "\n") > > try: > import httplib > except ImportError, e: >- sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT HTTPLIB: ")+str(e)+"\n") >+ sys.stderr.write(colorize("BAD","!!! CANNOT IMPORT HTTPLIB: ") + str(e) + "\n") > > def make_metadata_dict(data): > myid,myglob = data >@@ -93,7 +93,7 @@ > if len(url_parts) < 2: > address = "/" > else: >- address = "/"+"/".join(url_parts[1:]) >+ address = "/" + "/".join(url_parts[1:]) > del url_parts > > userpass_host = host.split("@",1) >@@ -141,7 +141,7 @@ > conn.login(username,password) > else: > sys.stderr.write(colorize("WARN", >- " * No password provided for username")+" '%s'" % \ >+ " * No password provided for username") + " '%s'" % \ > (username,) + "\n\n") > conn.login(username) > conn.set_pasv(passive) >@@ -172,14 +172,14 @@ > fsize = conn.size(address) > > if (rest != None) and (rest < 0): >- rest = fsize+int(rest) >+ rest = fsize + int(rest) > if rest < 0: > rest = 0 > > if rest != None: >- mysocket = conn.transfercmd("RETR "+str(address), rest) >+ mysocket = conn.transfercmd("RETR " + str(address), rest) > else: >- mysocket = conn.transfercmd("RETR "+str(address)) >+ mysocket = conn.transfercmd("RETR " + str(address)) > > mydata = "" > while 1: >@@ -222,7 +222,7 @@ > except SystemExit, e: > raise > except Exception, e: >- return None,None,"Server request failed: "+str(e) >+ return None,None,"Server request failed: " + str(e) > response = conn.getresponse() > rc = response.status > >@@ -244,7 +244,7 @@ > break > > if (rc != 200) and (rc != 206): >- return None,rc,"Server did not respond successfully ("+str(response.status)+": "+str(response.reason)+")" >+ return None,rc,"Server did not respond successfully (" + str(response.status) + ": " + str(response.reason) + ")" > > if dest: > dest.write(response.read()) >@@ -273,7 +273,7 @@ > continue > > if not allow_overlap: # Not allow to overlap prefix and suffix >- if len(x) >= (len(prefix)+len(suffix)): >+ if len(x) >= (len(prefix) + len(suffix)): > pass > else: > continue # Too short to match. >@@ -350,7 +350,7 @@ > conn,protocol,address,params,headers = create_conn(baseurl, conn) > > if protocol in ["http","https"]: >- headers["Range"] = "bytes=-"+str(chunk_size) >+ headers["Range"] = "bytes=-" + str(chunk_size) > data,rc,msg = make_http_request(conn, address, params, headers) > elif protocol in ["ftp"]: > data,rc,msg = make_ftp_request(conn, address, -chunk_size) >@@ -366,13 +366,13 @@ > > if data: > xpaksize = portage.xpak.decodeint(data[-8:-4]) >- if (xpaksize+8) > chunk_size: >- myid = file_get_metadata(baseurl, conn, (xpaksize+8)) >+ if (xpaksize + 8) > chunk_size: >+ myid = file_get_metadata(baseurl, conn, (xpaksize + 8)) > if not keepconnection: > conn.close() > return myid > else: >- xpak_data = data[len(data)-(xpaksize+8):-8] >+ xpak_data = data[len(data)-(xpaksize + 8):-8] > del data > > myid = portage.xpak.xsplit_mem(xpak_data) >@@ -428,7 +428,7 @@ > > conn,protocol,address,params,headers = create_conn(baseurl, conn) > >- sys.stderr.write("Fetching '"+str(os.path.basename(address)+"'\n")) >+ sys.stderr.write("Fetching '" + str(os.path.basename(address) + "'\n")) > if protocol in ["http","https"]: > data,rc,msg = make_http_request(conn, address, params, headers, dest=dest) > elif protocol in ["ftp"]: >@@ -538,12 +538,12 @@ > for trynum in [1,2,3]: > mytempfile = tempfile.TemporaryFile() > try: >- file_get(baseurl+"/"+mfile, mytempfile, conn) >+ file_get(baseurl + "/" + mfile, mytempfile, conn) > if mytempfile.tell() > len(data): > mytempfile.seek(0) > data = mytempfile.read() > except ValueError, e: >- sys.stderr.write("--- "+str(e)+"\n") >+ sys.stderr.write("--- " + str(e) + "\n") > if trynum < 3: > sys.stderr.write("Retrying...\n") > sys.stderr.flush() >@@ -561,7 +561,7 @@ > raise > except Exception, e: > mytempfile.close() >- sys.stderr.write("!!! Failed to use gzip: "+str(e)+"\n") >+ sys.stderr.write("!!! Failed to use gzip: " + str(e) + "\n") > sys.stderr.flush() > mytempfile.close() > try: >@@ -576,8 +576,8 @@ > except SystemExit, e: > raise > except Exception, e: >- sys.stderr.write("!!! Failed to read data from index: "+str(mfile)+"\n") >- sys.stderr.write("!!! "+str(e)+"\n") >+ sys.stderr.write("!!! Failed to read data from index: " + str(mfile) + "\n") >+ sys.stderr.write("!!! " + str(e) + "\n") > sys.stderr.flush() > try: > metadatafile = open(metadatafilename, 'wb') >@@ -587,7 +587,7 @@ > raise > except Exception, e: > sys.stderr.write("!!! Failed to write binary metadata to disk!\n") >- sys.stderr.write("!!! "+str(e)+"\n") >+ sys.stderr.write("!!! " + str(e) + "\n") > sys.stderr.flush() > break > # We may have metadata... now we run through the tbz2 list and check. >@@ -606,9 +606,9 @@ > self.last_update = cur_time > self.display() > def display(self): >- self.out.write("\r"+colorize("WARN", >- "cache miss: '"+str(self.misses)+"'") + \ >- " --- "+colorize("GOOD","cache hit: '"+str(self.hits)+"'")) >+ self.out.write("\r" + colorize("WARN", >+ "cache miss: '" + str(self.misses) + "'") + \ >+ " --- " + colorize("GOOD","cache hit: '" + str(self.hits) + "'")) > self.out.flush() > > cache_stats = CacheStats(out) >@@ -651,7 +651,7 @@ > metadata[baseurl]["data"][x] = make_metadata_dict(myid) > elif verbose: > sys.stderr.write(colorize("BAD", >- "!!! Failed to retrieve metadata on: ")+str(x)+"\n") >+ "!!! Failed to retrieve metadata on: ") + str(x) + "\n") > sys.stderr.flush() > else: > cache_stats.hits += 1 >@@ -683,7 +683,7 @@ > raise > except Exception, e: > sys.stderr.write("!!! Failed to write binary metadata to disk!\n") >- sys.stderr.write("!!! "+str(e)+"\n") >+ sys.stderr.write("!!! " + str(e) + "\n") > sys.stderr.flush() > > if not keepconnection: >Index: pym/portage/locks.py >=================================================================== >--- pym/portage/locks.py (revision 13832) >+++ pym/portage/locks.py (working copy) >@@ -153,7 +153,7 @@ > mypath, wantnewlockfile=wantnewlockfile, unlinkfile=unlinkfile, > waiting_msg=waiting_msg, flags=flags) > >- writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1) >+ writemsg(str((lockfilename,myfd,unlinkfile)) + "\n",1) > return (lockfilename,myfd,unlinkfile,locking_method) > > def _fstat_nlink(fd): >@@ -228,7 +228,7 @@ > return False > except Exception, e: > writemsg("Failed to get lock... someone took it.\n",1) >- writemsg(str(e)+"\n",1) >+ writemsg(str(e) + "\n",1) > > # why test lockfilename? because we may have been handed an > # fd originally, and the caller might not like having their >@@ -242,7 +242,7 @@ > > > def hardlock_name(path): >- return path+".hardlock-"+os.uname()[1]+"-"+str(os.getpid()) >+ return path + ".hardlock-" + os.uname()[1] + "-" + str(os.getpid()) > > def hardlink_is_mine(link,lock): > try: >@@ -322,7 +322,7 @@ > > mylist = {} > for x in mydl: >- if os.path.isfile(path+"/"+x): >+ if os.path.isfile(path + "/" + x): > parts = x.split(".hardlock-") > if len(parts) == 2: > filename = parts[0] >@@ -343,13 +343,13 @@ > > for x in mylist: > if myhost in mylist[x] or remove_all_locks: >- mylockname = hardlock_name(path+"/"+x) >- if hardlink_is_mine(mylockname, path+"/"+x) or \ >- not os.path.exists(path+"/"+x) or \ >+ mylockname = hardlock_name(path + "/" + x) >+ if hardlink_is_mine(mylockname, path + "/" + x) or \ >+ not os.path.exists(path + "/" + x) or \ > remove_all_locks: > for y in mylist[x]: > for z in mylist[x][y]: >- filename = path+"/"+x+".hardlock-"+y+"-"+z >+ filename = path + "/" + x + ".hardlock-" + y + "-" + z > if filename == mylockname: > continue > try: >@@ -359,8 +359,8 @@ > except OSError: > pass > try: >- os.unlink(path+"/"+x) >- results.append(_("Unlinked: ") + path+"/"+x) >+ os.unlink(path + "/" + x) >+ results.append(_("Unlinked: ") + path + "/" + x) > os.unlink(mylockname) > results.append(_("Unlinked: ") + mylockname) > except OSError: >Index: pym/portage/cvstree.py >=================================================================== >--- pym/portage/cvstree.py (revision 13832) >+++ pym/portage/cvstree.py (working copy) >@@ -43,13 +43,13 @@ > filename=os.path.basename(path) > > try: >- myfile=open(basedir+"/CVS/Entries","r") >+ myfile=open(basedir + "/CVS/Entries","r") > except IOError: > return 0 > mylines=myfile.readlines() > myfile.close() > >- rep=re.compile("^\/"+re.escape(filename)+"\/"); >+ rep=re.compile("^\/" + re.escape(filename) + "\/"); > for x in mylines: > if rep.search(x): > return 1 >@@ -62,15 +62,15 @@ > have not yet been committed. Returns a list of paths, optionally prepended > with a basedir.""" > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mylist=[] > for myfile in entries["files"]: > if "cvs" in entries["files"][myfile]["status"]: > if "0" == entries["files"][myfile]["revision"]: >- mylist.append(basedir+myfile) >+ mylist.append(basedir + myfile) > if recursive: > for mydir in entries["dirs"]: >- mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir) >+ mylist+=findnew(entries["dirs"][mydir],recursive,basedir + mydir) > return mylist > > def findoption(entries, pattern, recursive=0, basedir=""): >@@ -83,11 +83,11 @@ > for myfile, mydata in entries["files"].iteritems(): > if "cvs" in mydata["status"]: > if pattern.search(mydata["flags"]): >- yield basedir+myfile >+ yield basedir + myfile > if recursive: > for mydir, mydata in entries["dirs"].iteritems(): > for x in findoption(mydata, pattern, >- recursive, basedir+mydir): >+ recursive, basedir + mydir): > yield x > > def findchanged(entries,recursive=0,basedir=""): >@@ -96,17 +96,17 @@ > and differ from the committed version. Returns a list of paths, optionally > prepended with a basedir.""" > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mylist=[] > for myfile in entries["files"]: > if "cvs" in entries["files"][myfile]["status"]: > if "current" not in entries["files"][myfile]["status"]: > if "exists" in entries["files"][myfile]["status"]: > if entries["files"][myfile]["revision"]!="0": >- mylist.append(basedir+myfile) >+ mylist.append(basedir + myfile) > if recursive: > for mydir in entries["dirs"]: >- mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir) >+ mylist+=findchanged(entries["dirs"][mydir],recursive,basedir + mydir) > return mylist > > def findmissing(entries,recursive=0,basedir=""): >@@ -115,16 +115,16 @@ > tree but do not exist on the filesystem. Returns a list of paths, > optionally prepended with a basedir.""" > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mylist=[] > for myfile in entries["files"]: > if "cvs" in entries["files"][myfile]["status"]: > if "exists" not in entries["files"][myfile]["status"]: > if "removed" not in entries["files"][myfile]["status"]: >- mylist.append(basedir+myfile) >+ mylist.append(basedir + myfile) > if recursive: > for mydir in entries["dirs"]: >- mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir) >+ mylist+=findmissing(entries["dirs"][mydir],recursive,basedir + mydir) > return mylist > > def findunadded(entries,recursive=0,basedir=""): >@@ -133,16 +133,16 @@ > directories but are not part of the cvs tree. Returns a list of paths, > optionally prepended with a basedir.""" > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mylist=[] > > #ignore what cvs ignores. > for myfile in entries["files"]: > if "cvs" not in entries["files"][myfile]["status"]: >- mylist.append(basedir+myfile) >+ mylist.append(basedir + myfile) > if recursive: > for mydir in entries["dirs"]: >- mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir) >+ mylist+=findunadded(entries["dirs"][mydir],recursive,basedir + mydir) > return mylist > > def findremoved(entries,recursive=0,basedir=""): >@@ -150,14 +150,14 @@ > Recurses the entries tree to find all elements that are in flagged for cvs > deletions. Returns a list of paths, optionally prepended with a basedir.""" > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mylist=[] > for myfile in entries["files"]: > if "removed" in entries["files"][myfile]["status"]: >- mylist.append(basedir+myfile) >+ mylist.append(basedir + myfile) > if recursive: > for mydir in entries["dirs"]: >- mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir) >+ mylist+=findremoved(entries["dirs"][mydir],recursive,basedir + mydir) > return mylist > > def findall(entries, recursive=0, basedir=""): >@@ -166,7 +166,7 @@ > entities. Returns a 4 element list of lists as returned from each find*().""" > > if basedir and basedir[-1]!="/": >- basedir=basedir+"/" >+ basedir=basedir + "/" > mynew = findnew(entries,recursive,basedir) > mychanged = findchanged(entries,recursive,basedir) > mymissing = findmissing(entries,recursive,basedir) >@@ -188,7 +188,7 @@ > """(basedir,recursive=0) > Scans the given directory and returns an datadict of all the entries in > the directory seperated as a dirs dict and a files dict.""" >- myfn=mydir+"/CVS/Entries" >+ myfn=mydir + "/CVS/Entries" > # entries=[dirs, files] > entries={"dirs":{},"files":{}} > if not os.path.exists(mydir): >@@ -215,11 +215,11 @@ > if mysplit[0]=="D": > entries["dirs"][mysplit[1]]={"dirs":{},"files":{},"status":[]} > entries["dirs"][mysplit[1]]["status"]=["cvs"] >- if os.path.isdir(mydir+"/"+mysplit[1]): >+ if os.path.isdir(mydir + "/" + mysplit[1]): > entries["dirs"][mysplit[1]]["status"]+=["exists"] > entries["dirs"][mysplit[1]]["flags"]=mysplit[2:] > if recursive: >- rentries=getentries(mydir+"/"+mysplit[1],recursive) >+ rentries=getentries(mydir + "/" + mysplit[1],recursive) > #print rentries.keys() > #print entries["files"].keys() > #print entries["files"][mysplit[1]] >@@ -241,7 +241,7 @@ > continue > if file=="digest-framerd-2.4.3": > print mydir,file >- if os.path.isdir(mydir+"/"+file): >+ if os.path.isdir(mydir + "/" + file): > if file not in entries["dirs"]: > entries["dirs"][file]={"dirs":{},"files":{}} > if "status" in entries["dirs"][file]: >@@ -249,7 +249,7 @@ > entries["dirs"][file]["status"]+=["exists"] > else: > entries["dirs"][file]["status"]=["exists"] >- elif os.path.isfile(mydir+"/"+file): >+ elif os.path.isfile(mydir + "/" + file): > if file=="digest-framerd-2.4.3": > print "isfile" > if file not in entries["files"]: >@@ -268,7 +268,7 @@ > try: > if file=="digest-framerd-2.4.3": > print "stat'ing" >- mystat=os.stat(mydir+"/"+file) >+ mystat=os.stat(mydir + "/" + file) > mytime=time.asctime(time.gmtime(mystat[ST_MTIME])) > if "status" not in entries["files"][file]: > if file=="digest-framerd-2.4.3": >@@ -292,7 +292,7 @@ > > else: > print >- print "File of unknown type:",mydir+"/"+file >+ print "File of unknown type:",mydir + "/" + file > print > return entries > >Index: pym/_emerge/RepoDisplay.py >=================================================================== >--- pym/_emerge/RepoDisplay.py (revision 13832) >+++ pym/_emerge/RepoDisplay.py (working copy) >@@ -57,9 +57,9 @@ > show_repo_paths[repo_index] = repo_path > if show_repo_paths: > for index, repo_path in enumerate(show_repo_paths): >- output.append(" "+teal("["+str(index)+"]")+" %s\n" % repo_path) >+ output.append(" " + teal("[" + str(index) + "]") + " %s\n" % repo_path) > if unknown_repo: >- output.append(" "+teal("[?]") + \ >+ output.append(" " + teal("[?]") + \ > " indicates that the source repository could not be determined\n") > return "".join(output) > >Index: pym/_emerge/emergelog.py >=================================================================== >--- pym/_emerge/emergelog.py (revision 13832) >+++ pym/_emerge/emergelog.py (working copy) >@@ -20,7 +20,7 @@ > def emergelog(xterm_titles, mystr, short_msg=None): > if xterm_titles and short_msg: > if "HOSTNAME" in os.environ: >- short_msg = os.environ["HOSTNAME"]+": "+short_msg >+ short_msg = os.environ["HOSTNAME"] + ": " + short_msg > xtermTitle(short_msg) > try: > file_path = os.path.join(_emerge_log_dir, 'emerge.log') >@@ -34,7 +34,7 @@ > # seek because we may have gotten held up by the lock. > # if so, we may not be positioned at the end of the file. > mylogfile.seek(0, 2) >- mylogfile.write(str(time.time())[:10]+": "+mystr+"\n") >+ mylogfile.write(str(time.time())[:10] + ": " + mystr + "\n") > mylogfile.flush() > finally: > if mylock: >Index: pym/_emerge/Scheduler.py >=================================================================== >--- pym/_emerge/Scheduler.py (revision 13832) >+++ pym/_emerge/Scheduler.py (working copy) >@@ -241,14 +241,14 @@ > "--fetch-all-uri" in self.myopts or \ > "--fetchonly" in self.myopts): > if "distlocks" not in features: >- portage.writemsg(red("!!!")+"\n", noiselevel=-1) >- portage.writemsg(red("!!!")+" parallel-fetching " + \ >- "requires the distlocks feature enabled"+"\n", >+ portage.writemsg(red("!!!") + "\n", noiselevel=-1) >+ portage.writemsg(red("!!!") + " parallel-fetching " + \ >+ "requires the distlocks feature enabled" + "\n", > noiselevel=-1) >- portage.writemsg(red("!!!")+" you have it disabled, " + \ >- "thus parallel-fetching is being disabled"+"\n", >+ portage.writemsg(red("!!!") + " you have it disabled, " + \ >+ "thus parallel-fetching is being disabled" + "\n", > noiselevel=-1) >- portage.writemsg(red("!!!")+"\n", noiselevel=-1) >+ portage.writemsg(red("!!!") + "\n", noiselevel=-1) > elif len(mergelist) > 1: > self._parallel_fetch = True > >@@ -744,7 +744,7 @@ > if myarg is True: > mynewargv.append(myopt) > else: >- mynewargv.append(myopt +"="+ str(myarg)) >+ mynewargv.append(myopt + "="+ str(myarg)) > # priority only needs to be adjusted on the first run > os.environ["PORTAGE_NICENESS"] = "0" > os.execv(mynewargv[0], mynewargv) >Index: pym/_emerge/countdown.py >=================================================================== >--- pym/_emerge/countdown.py (revision 13832) >+++ pym/_emerge/countdown.py (working copy) >@@ -10,11 +10,11 @@ > def countdown(secs=5, doing="Starting"): > if secs: > print ">>> Waiting",secs,"seconds before starting..." >- print ">>> (Control-C to abort)...\n"+doing+" in: ", >+ print ">>> (Control-C to abort)...\n" + doing + " in: ", > ticks=range(secs) > ticks.reverse() > for sec in ticks: >- sys.stdout.write(colorize("UNMERGE_WARN", str(sec+1)+" ")) >+ sys.stdout.write(colorize("UNMERGE_WARN", str(sec + 1) + " ")) > sys.stdout.flush() > time.sleep(1) > print >Index: pym/_emerge/Package.py >=================================================================== >--- pym/_emerge/Package.py (revision 13832) >+++ pym/_emerge/Package.py (working copy) >@@ -88,7 +88,7 @@ > other = [] > for x in tokens: > prefix = x[:1] >- if prefix == "+": >+ if prefix == " + ": > enabled.append(x[1:]) > elif prefix == "-": > disabled.append(x[1:]) >Index: pym/_emerge/depgraph.py >=================================================================== >--- pym/_emerge/depgraph.py (revision 13832) >+++ pym/_emerge/depgraph.py (working copy) >@@ -1267,11 +1267,11 @@ > os.path.join(pkgsettings["PKGDIR"], x)): > x = os.path.join(pkgsettings["PKGDIR"], x) > else: >- print "\n\n!!! Binary package '"+str(x)+"' does not exist." >+ print "\n\n!!! Binary package '" + str(x) + "' does not exist." > print "!!! Please ensure the tbz2 exists as specified.\n" > return 0, myfavorites > mytbz2=portage.xpak.tbz2(x) >- mykey=mytbz2.getelements("CATEGORY")[0]+"/"+os.path.splitext(os.path.basename(x))[0] >+ mykey=mytbz2.getelements("CATEGORY")[0] + "/" + os.path.splitext(os.path.basename(x))[0] > if os.path.realpath(x) != \ > os.path.realpath(self._frozen_config.trees[myroot]["bintree"].getname(mykey)): > print colorize("BAD", "\n*** You need to adjust PKGDIR to emerge this package.\n") >@@ -1285,7 +1285,7 @@ > ebuild_path = portage.util.normalize_path(os.path.abspath(x)) > pkgdir = os.path.dirname(ebuild_path) > tree_root = os.path.dirname(os.path.dirname(pkgdir)) >- cp = pkgdir[len(tree_root)+1:] >+ cp = pkgdir[len(tree_root) + 1:] > e = portage.exception.PackageNotFound( > ("%s is not in a valid portage tree " + \ > "hierarchy or does not exist") % x) >@@ -1293,7 +1293,7 @@ > raise e > cat = portage.catsplit(cp)[0] > mykey = cat + "/" + os.path.basename(ebuild_path[:-7]) >- if not portage.isvalidatom("="+mykey): >+ if not portage.isvalidatom("=" + mykey): > raise e > ebuild_path = portdb.findname(mykey) > if ebuild_path: >@@ -1872,7 +1872,7 @@ > need_disable = sorted(atom.use.disabled.intersection(use)) > if need_enable or need_disable: > changes = [] >- changes.extend(colorize("red", "+" + x) \ >+ changes.extend(colorize("red", " + " + x) \ > for x in need_enable) > changes.extend(colorize("blue", "-" + x) \ > for x in need_disable) >@@ -1898,10 +1898,10 @@ > show_missing_use = unmasked_iuse_reasons > > if show_missing_use: >- print "\nemerge: there are no ebuilds built with USE flags to satisfy "+green(xinfo)+"." >+ print "\nemerge: there are no ebuilds built with USE flags to satisfy " + green(xinfo) + "." > print "!!! One of the following packages is required to complete your request:" > for pkg, mreasons in show_missing_use: >- print "- "+pkg.cpv+" ("+", ".join(mreasons)+")" >+ print "- " + pkg.cpv + " (" + ", ".join(mreasons) + ")" > > elif masked_packages: > print "\n!!! " + \ >@@ -1922,7 +1922,7 @@ > print > show_mask_docs() > else: >- print "\nemerge: there are no ebuilds to satisfy "+green(xinfo)+"." >+ print "\nemerge: there are no ebuilds to satisfy " + green(xinfo) + "." > > # Show parent nodes and the argument that pulled them in. > traversed_nodes = set() >@@ -3827,7 +3827,7 @@ > continue > if depth >= last_merge_depth or \ > i < len(mylist) - 1 and \ >- depth >= mylist[i+1][1]: >+ depth >= mylist[i + 1][1]: > del mylist[i] > > from portage import flatten >@@ -3925,7 +3925,7 @@ > myinslotlist = None > installed_versions = vardb.match(portage.cpv_getkey(pkg_key)) > if vardb.cpv_exists(pkg_key): >- addl=" "+yellow("R")+fetch+" " >+ addl=" " + yellow("R") + fetch + " " > if ordered: > if pkg_merge: > counters.reinst += 1 >@@ -3948,7 +3948,7 @@ > if not portage.dep.cpvequal(pkg_key, > portage.best([pkg_key] + myoldbest)): > # Downgrade in slot >- addl += turquoise("U")+blue("D") >+ addl += turquoise("U") + blue("D") > if ordered: > counters.downgrades += 1 > else: >@@ -4021,10 +4021,10 @@ > ret[exp] = [] > forced[exp] = set() > for val in myvals[:]: >- if val.startswith(exp.lower()+"_"): >+ if val.startswith(exp.lower() + "_"): > if val in forced_flags: >- forced[exp].add(val[len(exp)+1:]) >- ret[exp].append(val[len(exp)+1:]) >+ forced[exp].add(val[len(exp) + 1:]) >+ ret[exp].append(val[len(exp) + 1:]) > myvals.remove(val) > ret["USE"] = myvals > forced["USE"] = [val for val in myvals \ >@@ -4157,7 +4157,7 @@ > if key[-3:] == "-r0": > key = key[:-3] > myoldbest[pos] = key >- myoldbest = blue("["+", ".join(myoldbest)+"]") >+ myoldbest = blue("[" + ", ".join(myoldbest) + "]") > > pkg_cp = xs[0] > root_config = self._frozen_config.roots[myroot] >@@ -4208,10 +4208,10 @@ > myoldbest +=" " > if "--columns" in self._frozen_config.myopts: > if "--quiet" in self._frozen_config.myopts: >- myprint=addl+" "+indent+pkgprint(pkg_cp) >- myprint=myprint+darkblue(" "+xs[1]+xs[2])+" " >- myprint=myprint+myoldbest >- myprint=myprint+darkgreen("to "+x[1]) >+ myprint=addl + " " + indent + pkgprint(pkg_cp) >+ myprint=myprint + darkblue(" " + xs[1] + xs[2]) + " " >+ myprint=myprint + myoldbest >+ myprint=myprint + darkgreen("to " + x[1]) > verboseadd = None > else: > if not pkg_merge: >@@ -4223,11 +4223,11 @@ > (pkgprint(pkg.type_name), addl, > indent, pkgprint(pkg.cp)) > if (newlp-nc_len(myprint)) > 0: >- myprint=myprint+(" "*(newlp-nc_len(myprint))) >- myprint=myprint+"["+darkblue(xs[1]+xs[2])+"] " >+ myprint=myprint + (" "*(newlp-nc_len(myprint))) >+ myprint=myprint + "[" + darkblue(xs[1] + xs[2]) + "] " > if (oldlp-nc_len(myprint)) > 0: >- myprint=myprint+" "*(oldlp-nc_len(myprint)) >- myprint=myprint+myoldbest >+ myprint=myprint + " "*(oldlp-nc_len(myprint)) >+ myprint=myprint + myoldbest > myprint += darkgreen("to " + pkg.root) > else: > if not pkg_merge: >@@ -4239,9 +4239,9 @@ > else: > if "--columns" in self._frozen_config.myopts: > if "--quiet" in self._frozen_config.myopts: >- myprint=addl+" "+indent+pkgprint(pkg_cp) >- myprint=myprint+" "+green(xs[1]+xs[2])+" " >- myprint=myprint+myoldbest >+ myprint=addl + " " + indent + pkgprint(pkg_cp) >+ myprint=myprint + " " + green(xs[1] + xs[2]) + " " >+ myprint=myprint + myoldbest > verboseadd = None > else: > if not pkg_merge: >@@ -4253,10 +4253,10 @@ > (pkgprint(pkg.type_name), addl, > indent, pkgprint(pkg.cp)) > if (newlp-nc_len(myprint)) > 0: >- myprint=myprint+(" "*(newlp-nc_len(myprint))) >- myprint=myprint+green(" ["+xs[1]+xs[2]+"] ") >+ myprint=myprint + (" "*(newlp-nc_len(myprint))) >+ myprint=myprint + green(" [" + xs[1] + xs[2] + "] ") > if (oldlp-nc_len(myprint)) > 0: >- myprint=myprint+(" "*(oldlp-nc_len(myprint))) >+ myprint=myprint + (" "*(oldlp-nc_len(myprint))) > myprint += myoldbest > else: > if not pkg_merge: >@@ -4315,7 +4315,7 @@ > if "--changelog" in self._frozen_config.myopts: > print > for revision,text in changelogs: >- print bold('*'+revision) >+ print bold('*' + revision) > sys.stdout.write(text) > > sys.stdout.flush() >@@ -4389,7 +4389,7 @@ > sys.stderr.write("\n!!! Problems have been " + \ > "detected with your world file\n") > sys.stderr.write("!!! Please run " + \ >- green("emaint --check world")+"\n\n") >+ green("emaint --check world") + "\n\n") > > if self._dynamic_config._missing_args: > sys.stderr.write("\n" + colorize("BAD", "!!!") + \ >@@ -4552,7 +4552,7 @@ > masked_tasks.append(Dependency(root=pkg.root, parent=pkg)) > else: > self._dynamic_config._unsatisfied_deps_for_display.append( >- ((pkg.root, "="+pkg.cpv), {"myparent":None})) >+ ((pkg.root, "=" + pkg.cpv), {"myparent":None})) > > fakedb[myroot].cpv_inject(pkg) > serialized_tasks.append(pkg) >@@ -5071,9 +5071,9 @@ > # above via mreasons. > pass > >- print "- "+cpv+" (masked by: "+", ".join(mreasons)+")" >+ print "- " + cpv + " (masked by: " + ", ".join(mreasons) + ")" > if comment and comment not in shown_comments: >- print filename+":" >+ print filename + ":" > print comment > shown_comments.add(comment) > portdb = root_config.trees["porttree"].dbapi >@@ -5094,7 +5094,7 @@ > > def filter_iuse_defaults(iuse): > for flag in iuse: >- if flag.startswith("+") or flag.startswith("-"): >+ if flag.startswith(" + ") or flag.startswith("-"): > yield flag[1:] > else: > yield flag >Index: pym/_emerge/main.py >=================================================================== >--- pym/_emerge/main.py (revision 13832) >+++ pym/_emerge/main.py (working copy) >@@ -100,7 +100,7 @@ > for z in infodirs: > if z=='': > continue >- inforoot=normpath(root+z) >+ inforoot=normpath(root + z) > if os.path.isdir(inforoot): > infomtime = long(os.stat(inforoot).st_mtime) > if inforoot not in prev_mtimes or \ >@@ -154,7 +154,7 @@ > raise > del e > processed_count += 1 >- myso=commands.getstatusoutput("LANG=C LANGUAGE=C /usr/bin/install-info --dir-file="+inforoot+"/dir "+inforoot+"/"+x)[1] >+ myso=commands.getstatusoutput("LANG=C LANGUAGE=C /usr/bin/install-info --dir-file=" + inforoot + "/dir " + inforoot + "/" + x)[1] > existsstr="already exists, for file `" > if myso!="": > if re.search(existsstr,myso): >@@ -166,9 +166,9 @@ > # Don't increment the count for this. > pass > else: >- badcount=badcount+1 >+ badcount=badcount + 1 > errmsg += myso + "\n" >- icount=icount+1 >+ icount=icount + 1 > > if moved_old_dir and not os.path.exists(dir_file): > # We didn't generate a new dir file, so put the old file >@@ -246,7 +246,7 @@ > consumers.append(c) > consumers.sort() > consumer_map[f] = consumers >- search_for_owners.update(consumers[:MAX_DISPLAY+1]) >+ search_for_owners.update(consumers[:MAX_DISPLAY + 1]) > > owners = vardbapi._owners.getFileOwnerMap(search_for_owners) > >@@ -733,7 +733,7 @@ > newargs = [] > for a in myfiles: > if a in ("system", "world"): >- newargs.append(SETPREFIX+a) >+ newargs.append(SETPREFIX + a) > else: > newargs.append(a) > myfiles = newargs >@@ -755,7 +755,7 @@ > end = x.find(ARG_END) > if start > 0 and start < end: > namepart = x[:start] >- argpart = x[start+1:end] >+ argpart = x[start + 1:end] > > # TODO: implement proper quoting > args = argpart.split(",") >@@ -767,12 +767,12 @@ > else: > options[a] = "True" > setconfig.update(namepart, options) >- newset += (x[:start-len(namepart)]+namepart) >- x = x[end+len(ARG_END):] >+ newset += (x[:start-len(namepart)] + namepart) >+ x = x[end + len(ARG_END):] > else: > newset += x > x = "" >- myfiles[i] = SETPREFIX+newset >+ myfiles[i] = SETPREFIX + newset > > sets = setconfig.getSets() > >@@ -831,7 +831,7 @@ > elif myaction not in do_not_expand: > newargs.extend(set_atoms) > else: >- newargs.append(SETPREFIX+s) >+ newargs.append(SETPREFIX + s) > for e in sets[s].errors: > print e > else: >@@ -1195,13 +1195,13 @@ > _emerge_log_dir = settings['EMERGE_LOG_DIR'] > > if not "--pretend" in myopts: >- emergelog(xterm_titles, "Started emerge on: "+\ >+ emergelog(xterm_titles, "Started emerge on: " + \ > time.strftime("%b %d, %Y %H:%M:%S", time.localtime())) > myelogstr="" > if myopts: > myelogstr=" ".join(myopts) > if myaction: >- myelogstr+=" "+myaction >+ myelogstr+=" " + myaction > if myfiles: > myelogstr += " " + " ".join(oldargs) > emergelog(xterm_titles, " *** emerge " + myelogstr) >@@ -1211,7 +1211,7 @@ > signal.signal(signal.SIGINT, signal.SIG_IGN) > signal.signal(signal.SIGTERM, signal.SIG_IGN) > portage.util.writemsg("\n\nExiting on signal %(signal)s\n" % {"signal":signum}) >- sys.exit(100+signum) >+ sys.exit(100 + signum) > signal.signal(signal.SIGINT, emergeexitsig) > signal.signal(signal.SIGTERM, emergeexitsig) > >Index: pym/_emerge/help.py >=================================================================== >--- pym/_emerge/help.py (revision 13832) >+++ pym/_emerge/help.py (working copy) >@@ -5,21 +5,21 @@ > from portage.output import bold, turquoise, green > > def shorthelp(): >- print bold("emerge:")+" the other white meat (command-line interface to the Portage system)" >+ print bold("emerge:") + " the other white meat (command-line interface to the Portage system)" > print bold("Usage:") >- print " "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] [ "+turquoise("ebuild")+" | "+turquoise("tbz2")+" | "+turquoise("file")+" | "+turquoise("@set")+" | "+turquoise("atom")+" ] [ ... ]" >- print " "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("system")+" | "+turquoise("world")+" >" >- print " "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >" >- print " "+turquoise("emerge")+" "+turquoise("--resume")+" [ "+green("--pretend")+" | "+green("--ask")+" | "+green("--skipfirst")+" ]" >- print " "+turquoise("emerge")+" "+turquoise("--help")+" [ "+green("--verbose")+" ] " >- print bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpqPsStuvV")+"]" >- print " [ " + green("--color")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ] [ "+green("--columns")+" ]" >- print " [ "+green("--complete-graph")+" ] [ "+green("--deep")+" ]" >- print " [ "+green("--jobs") + " " + turquoise("JOBS")+" ] [ "+green("--keep-going")+" ] [ " + green("--load-average")+" " + turquoise("LOAD") + " ]" >- print " [ "+green("--newuse")+" ] [ "+green("--noconfmem")+" ] [ "+green("--nospinner")+" ]" >- print " [ "+green("--oneshot")+" ] [ "+green("--onlydeps")+" ]" >- print " [ "+green("--reinstall ")+turquoise("changed-use")+" ] [ " + green("--with-bdeps")+" < " + turquoise("y") + " | "+ turquoise("n")+" > ]" >- print bold("Actions:")+" [ "+green("--depclean")+" | "+green("--list-sets")+" | "+green("--search")+" | "+green("--sync")+" | "+green("--version")+" ]" >+ print " " + turquoise("emerge") + " [ " + green("options") + " ] [ " + green("action") + " ] [ " + turquoise("ebuild") + " | " + turquoise("tbz2") + " | " + turquoise("file") + " | " + turquoise("@set") + " | " + turquoise("atom") + " ] [ ... ]" >+ print " " + turquoise("emerge") + " [ " + green("options") + " ] [ " + green("action") + " ] < " + turquoise("system") + " | " + turquoise("world") + " >" >+ print " " + turquoise("emerge") + " < " + turquoise("--sync") + " | " + turquoise("--metadata") + " | " + turquoise("--info") + " >" >+ print " " + turquoise("emerge") + " " + turquoise("--resume") + " [ " + green("--pretend") + " | " + green("--ask") + " | " + green("--skipfirst") + " ]" >+ print " " + turquoise("emerge") + " " + turquoise("--help") + " [ " + green("--verbose") + " ] " >+ print bold("Options:") + " " + green("-") + "[" + green("abBcCdDefgGhjkKlnNoOpqPsStuvV") + "]" >+ print " [ " + green("--color") + " < " + turquoise("y") + " | "+ turquoise("n") + " > ] [ " + green("--columns") + " ]" >+ print " [ " + green("--complete-graph") + " ] [ " + green("--deep") + " ]" >+ print " [ " + green("--jobs") + " " + turquoise("JOBS") + " ] [ " + green("--keep-going") + " ] [ " + green("--load-average") + " " + turquoise("LOAD") + " ]" >+ print " [ " + green("--newuse") + " ] [ " + green("--noconfmem") + " ] [ " + green("--nospinner") + " ]" >+ print " [ " + green("--oneshot") + " ] [ " + green("--onlydeps") + " ]" >+ print " [ " + green("--reinstall ") + turquoise("changed-use") + " ] [ " + green("--with-bdeps") + " < " + turquoise("y") + " | "+ turquoise("n") + " > ]" >+ print bold("Actions:") + " [ " + green("--depclean") + " | " + green("--list-sets") + " | " + green("--search") + " | " + green("--sync") + " | " + green("--version") + " ]" > > def help(myopts, havecolor=1): > # TODO: Implement a wrap() that accounts for console color escape codes. >@@ -35,12 +35,12 @@ > shorthelp() > print > print turquoise("Help (this screen):") >- print " "+green("--help")+" ("+green("-h")+" short option)" >+ print " " + green("--help") + " (" + green("-h") + " short option)" > print " Displays this help; an additional argument (see above) will tell" > print " emerge to display detailed help." > print > print turquoise("Actions:") >- print " "+green("--clean")+" ("+green("-c")+" short option)" >+ print " " + green("--clean") + " (" + green("-c") + " short option)" > print " Cleans the system by removing outdated packages which will not" > print " remove functionalities or prevent your system from working." > print " The arguments can be in several different formats :" >@@ -48,23 +48,23 @@ > print " * system or" > print " * 'dependency specification' (in single quotes is best.)" > print " Here are a few examples of the dependency specification format:" >- print " "+bold("binutils")+" matches" >+ print " " + bold("binutils") + " matches" > print " binutils-2.11.90.0.7 and binutils-2.11.92.0.12.3-r1" >- print " "+bold("sys-devel/binutils")+" matches" >+ print " " + bold("sys-devel/binutils") + " matches" > print " binutils-2.11.90.0.7 and binutils-2.11.92.0.12.3-r1" >- print " "+bold(">sys-devel/binutils-2.11.90.0.7")+" matches" >+ print " " + bold(">sys-devel/binutils-2.11.90.0.7") + " matches" > print " binutils-2.11.92.0.12.3-r1" >- print " "+bold(">=sys-devel/binutils-2.11.90.0.7")+" matches" >+ print " " + bold(">=sys-devel/binutils-2.11.90.0.7") + " matches" > print " binutils-2.11.90.0.7 and binutils-2.11.92.0.12.3-r1" >- print " "+bold("<=sys-devel/binutils-2.11.92.0.12.3-r1")+" matches" >+ print " " + bold("<=sys-devel/binutils-2.11.92.0.12.3-r1") + " matches" > print " binutils-2.11.90.0.7 and binutils-2.11.92.0.12.3-r1" > print >- print " "+green("--config") >+ print " " + green("--config") > print " Runs package-specific operations that must be executed after an" > print " emerge process has completed. This usually entails configuration" > print " file setup or other similar setups that the user may wish to run." > print >- print " "+green("--depclean") >+ print " " + green("--depclean") > > paragraph = "Cleans the system by removing packages that are " + \ > "not associated with explicitly merged packages. Depclean works " + \ >@@ -116,7 +116,7 @@ > for line in wrap(paragraph, desc_width): > print desc_indent + line > print >- print " "+green("--info") >+ print " " + green("--info") > print " Displays important portage variables that will be exported to" > print " ebuild.sh when performing merges. This information is useful" > print " for bug reports and verification of settings. All settings in" >@@ -129,7 +129,7 @@ > for line in wrap(paragraph, desc_width): > print desc_indent + line > print >- print " "+green("--metadata") >+ print " " + green("--metadata") > print " Transfers metadata cache from ${PORTDIR}/metadata/cache/ to" > print " /var/cache/edb/dep/ as is normally done on the tail end of an" > print " rsync update using " + bold("emerge --sync") + ". This process populates the" >@@ -138,8 +138,8 @@ > print " listed in PORTDIR_OVERLAY. In order to generate cache for" > print " overlays, use " + bold("--regen") + "." > print >- print " "+green("--prune")+" ("+green("-P")+" short option)" >- print " "+turquoise("WARNING: This action can remove important packages!") >+ print " " + green("--prune") + " (" + green("-P") + " short option)" >+ print " " + turquoise("WARNING: This action can remove important packages!") > paragraph = "Removes all but the highest installed version of a " + \ > "package from your system. Use --prune together with " + \ > "--verbose to show reverse dependencies or with --nodeps " + \ >@@ -148,7 +148,7 @@ > for line in wrap(paragraph, desc_width): > print desc_indent + line > print >- print " "+green("--regen") >+ print " " + green("--regen") > print " Causes portage to check and update the dependency cache of all" > print " ebuilds in the portage tree. This is not recommended for rsync" > print " users as rsync updates the cache using server-side caches." >@@ -159,7 +159,7 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--resume") >+ print " " + green("--resume") > print " Resumes the most recent merge list that has been aborted due to an" > print " error. Please note that this operation will only return an error" > print " on failure. If there is nothing for portage to do, then portage" >@@ -170,7 +170,7 @@ > print " completes, it is possible to invoke --resume once again in order" > print " to resume an older list." > print >- print " "+green("--search")+" ("+green("-s")+" short option)" >+ print " " + green("--search") + " (" + green("-s") + " short option)" > print " Searches for matches of the supplied string in the current local" > print " portage tree. By default emerge uses a case-insensitive simple " > print " search, but you can enable a regular expression search by " >@@ -178,16 +178,16 @@ > print " Prepending the expression with a '@' will cause the category to" > print " be included in the search." > print " A few examples:" >- print " "+bold("emerge --search libc") >+ print " " + bold("emerge --search libc") > print " list all packages that contain libc in their name" >- print " "+bold("emerge --search '%^kde'") >+ print " " + bold("emerge --search '%^kde'") > print " list all packages starting with kde" >- print " "+bold("emerge --search '%gcc$'") >+ print " " + bold("emerge --search '%gcc$'") > print " list all packages ending with gcc" >- print " "+bold("emerge --search '%@^dev-java.*jdk'") >+ print " " + bold("emerge --search '%@^dev-java.*jdk'") > print " list all available Java JDKs" > print >- print " "+green("--searchdesc")+" ("+green("-S")+" short option)" >+ print " " + green("--searchdesc") + " (" + green("-S") + " short option)" > print " Matches the search string against the description field as well" > print " the package's name. Take caution as the descriptions are also" > print " matched as regular expressions." >@@ -195,7 +195,7 @@ > print " emerge -S applet" > print " emerge -S 'perl.*module'" > print >- print " "+green("--sync") >+ print " " + green("--sync") > print " Tells emerge to update the Portage tree as specified in" > print " The SYNC variable found in /etc/make.conf. By default, SYNC instructs" > print " emerge to perform an rsync-style update with rsync.gentoo.org." >@@ -204,39 +204,39 @@ > print " method to receive the entire portage tree as a tarball that can be" > print " extracted and used. First time syncs would benefit greatly from this." > print >- print " "+turquoise("WARNING:") >+ print " " + turquoise("WARNING:") > print " If using our rsync server, emerge will clean out all files that do not" > print " exist on it, including ones that you may have created. The exceptions" > print " to this are the distfiles, local and packages directories." > print >- print " "+green("--unmerge")+" ("+green("-C")+" short option)" >- print " "+turquoise("WARNING: This action can remove important packages!") >+ print " " + green("--unmerge") + " (" + green("-C") + " short option)" >+ print " " + turquoise("WARNING: This action can remove important packages!") > print " Removes all matching packages. This does no checking of" > print " dependencies, so it may remove packages necessary for the proper" > print " operation of your system. Its arguments can be atoms or" > print " ebuilds. For a dependency aware version of --unmerge, use" > print " --depclean or --prune." > print >- print " "+green("--update")+" ("+green("-u")+" short option)" >+ print " " + green("--update") + " (" + green("-u") + " short option)" > print " Updates packages to the best version available, which may not" > print " always be the highest version number due to masking for testing" > print " and development. Package atoms specified on the command" > print " line are greedy, meaning that unspecific atoms may match multiple" > print " installed versions of slotted packages." > print >- print " "+green("--version")+" ("+green("-V")+" short option)" >+ print " " + green("--version") + " (" + green("-V") + " short option)" > print " Displays the currently installed version of portage along with" > print " other information useful for quick reference on a system. See" >- print " "+bold("emerge info")+" for more advanced information." >+ print " " + bold("emerge info") + " for more advanced information." > print > print turquoise("Options:") >- print " "+green("--alphabetical") >+ print " " + green("--alphabetical") > print " When displaying USE and other flag output, combines the enabled" > print " and disabled flags into a single list and sorts it alphabetically." > print " With this option, output such as USE=\"dar -bar -foo\" will instead" > print " be displayed as USE=\"-bar dar -foo\"" > print >- print " "+green("--ask")+" ("+green("-a")+" short option)" >+ print " " + green("--ask") + " (" + green("-a") + " short option)" > print " before performing the merge, display what ebuilds and tbz2s will" > print " be installed, in the same format as when using --pretend; then" > print " ask whether to continue with the merge or abort. Using --ask is" >@@ -255,7 +255,7 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--buildpkg")+" ("+green("-b")+" short option)" >+ print " " + green("--buildpkg") + " (" + green("-b") + " short option)" > desc = "Tells emerge to build binary packages for all ebuilds processed in" + \ > " addition to actually merging the packages. Useful for maintainers" + \ > " or if you administrate multiple Gentoo Linux systems (build once," + \ >@@ -267,27 +267,27 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--buildpkgonly")+" ("+green("-B")+" short option)" >+ print " " + green("--buildpkgonly") + " (" + green("-B") + " short option)" > print " Creates a binary package, but does not merge it to the" > print " system. This has the restriction that unsatisfied dependencies" > print " must not exist for the desired package as they cannot be used if" > print " they do not exist on the system." > print >- print " "+green("--changelog")+" ("+green("-l")+" short option)" >+ print " " + green("--changelog") + " (" + green("-l") + " short option)" > print " When pretending, also display the ChangeLog entries for packages" > print " that will be upgraded." > print >- print " "+green("--color") + " < " + turquoise("y") + " | "+ turquoise("n")+" >" >+ print " " + green("--color") + " < " + turquoise("y") + " | "+ turquoise("n") + " >" > print " Enable or disable color output. This option will override NOCOLOR" > print " (see make.conf(5)) and may also be used to force color output when" > print " stdout is not a tty (by default, color is disabled unless stdout" > print " is a tty)." > print >- print " "+green("--columns") >+ print " " + green("--columns") > print " Display the pretend output in a tabular form. Versions are" > print " aligned vertically." > print >- print " "+green("--complete-graph") >+ print " " + green("--complete-graph") > desc = "This causes emerge to consider the deep dependencies of all" + \ > " packages from the system and world sets. With this option enabled," + \ > " emerge will bail out if it determines that the given operation will" + \ >@@ -300,20 +300,20 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--config-root=DIR") >+ print " " + green("--config-root=DIR") > desc = "Set the PORTAGE_CONFIGROOT environment variable " + \ > "which is documented in the emerge(1) man page." > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--debug")+" ("+green("-d")+" short option)" >+ print " " + green("--debug") + " (" + green("-d") + " short option)" > print " Tell emerge to run the ebuild command in --debug mode. In this" > print " mode, the bash build environment will run with the -x option," > print " causing it to output verbose debug information print to stdout." > print " --debug is great for finding bash syntax errors as providing" > print " very verbose information about the dependency and build process." > print >- print " "+green("--deep") + " " + turquoise("[DEPTH]") + \ >+ print " " + green("--deep") + " " + turquoise("[DEPTH]") + \ > " (" + green("-D") + " short option)" > print " This flag forces emerge to consider the entire dependency tree of" > print " packages, instead of checking only the immediate dependencies of" >@@ -322,35 +322,35 @@ > print " Also see --with-bdeps for behavior with respect to build time" > print " dependencies that are not strictly required." > print >- print " "+green("--emptytree")+" ("+green("-e")+" short option)" >+ print " " + green("--emptytree") + " (" + green("-e") + " short option)" > print " Virtually tweaks the tree of installed packages to contain" > print " nothing. This is great to use together with --pretend. This makes" > print " it possible for developers to get a complete overview of the" > print " complete dependency tree of a certain package." > print >- print " "+green("--fetchonly")+" ("+green("-f")+" short option)" >+ print " " + green("--fetchonly") + " (" + green("-f") + " short option)" > print " Instead of doing any package building, just perform fetches for" > print " all packages (main package as well as all dependencies.) When" > print " used in combination with --pretend all the SRC_URIs will be" > print " displayed multiple mirrors per line, one line per file." > print >- print " "+green("--fetch-all-uri")+" ("+green("-F")+" short option)" >+ print " " + green("--fetch-all-uri") + " (" + green("-F") + " short option)" > print " Same as --fetchonly except that all package files, including those" > print " not required to build the package, will be processed." > print >- print " "+green("--getbinpkg")+" ("+green("-g")+" short option)" >+ print " " + green("--getbinpkg") + " (" + green("-g") + " short option)" > print " Using the server and location defined in PORTAGE_BINHOST, portage" > print " will download the information from each binary file there and it" > print " will use that information to help build the dependency list. This" > print " option implies '-k'. (Use -gK for binary-only merging.)" > print >- print " "+green("--getbinpkgonly")+" ("+green("-G")+" short option)" >+ print " " + green("--getbinpkgonly") + " (" + green("-G") + " short option)" > print " This option is identical to -g, as above, except it will not use" > print " ANY information from the local machine. All binaries will be" > print " downloaded from the remote server without consulting packages" > print " existing in the packages directory." > print >- print " " + green("--jobs") + " " + turquoise("[JOBS]") + " ("+green("-j")+" short option)" >+ print " " + green("--jobs") + " " + turquoise("[JOBS]") + " (" + green("-j") + " short option)" > desc = "Specifies the number of packages " + \ > "to build simultaneously. If this option is " + \ > "given without an argument, emerge will not " + \ >@@ -360,7 +360,7 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--keep-going") >+ print " " + green("--keep-going") > desc = "Continue as much as possible after " + \ > "an error. When an error occurs, " + \ > "dependencies are recalculated for " + \ >@@ -386,11 +386,11 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--newuse")+" ("+green("-N")+" short option)" >+ print " " + green("--newuse") + " (" + green("-N") + " short option)" > print " Tells emerge to include installed packages where USE flags have " > print " changed since installation." > print >- print " "+green("--noconfmem") >+ print " " + green("--noconfmem") > print " Portage keeps track of files that have been placed into" > print " CONFIG_PROTECT directories, and normally it will not merge the" > print " same file more than once, as that would become annoying. This" >@@ -398,11 +398,11 @@ > print " of accidental deletion. With this option, files will always be" > print " merged to the live fs instead of silently dropped." > print >- print " "+green("--nodeps")+" ("+green("-O")+" short option)" >+ print " " + green("--nodeps") + " (" + green("-O") + " short option)" > print " Merge specified packages, but don't merge any dependencies." > print " Note that the build may fail if deps aren't satisfied." > print >- print " "+green("--noreplace")+" ("+green("-n")+" short option)" >+ print " " + green("--noreplace") + " (" + green("-n") + " short option)" > print " Skip the packages specified on the command-line that have" > print " already been installed. Without this option, any packages," > print " ebuilds, or deps you specify on the command-line *will* cause" >@@ -415,19 +415,19 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--nospinner") >+ print " " + green("--nospinner") > print " Disables the spinner regardless of terminal type." > print >- print " "+green("--oneshot")+" ("+green("-1")+" short option)" >+ print " " + green("--oneshot") + " (" + green("-1") + " short option)" > print " Emerge as normal, but don't add packages to the world profile." > print " This package will only be updated if it is depended upon by" > print " another package." > print >- print " "+green("--onlydeps")+" ("+green("-o")+" short option)" >+ print " " + green("--onlydeps") + " (" + green("-o") + " short option)" > print " Only merge (or pretend to merge) the dependencies of the" > print " specified packages, not the packages themselves." > print >- print " "+green("--pretend")+" ("+green("-p")+" short option)" >+ print " " + green("--pretend") + " (" + green("-p") + " short option)" > print " Instead of actually performing the merge, simply display what" > print " ebuilds and tbz2s *would* have been installed if --pretend" > print " weren't used. Using --pretend is strongly recommended before" >@@ -435,26 +435,26 @@ > print " U = updating, R = replacing, F = fetch restricted, B = blocked" > print " by an already installed package, D = possible downgrading," > print " S = slotted install. --verbose causes affecting use flags to be" >- print " printed out accompanied by a '+' for enabled and a '-' for" >+ print " printed out accompanied by a ' + ' for enabled and a '-' for" > print " disabled USE flags." > print >- print " "+green("--quiet")+" ("+green("-q")+" short option)" >+ print " " + green("--quiet") + " (" + green("-q") + " short option)" > print " Effects vary, but the general outcome is a reduced or condensed" > print " output from portage's displays." > print >- print " "+green("--reinstall ") + turquoise("changed-use") >+ print " " + green("--reinstall ") + turquoise("changed-use") > print " Tells emerge to include installed packages where USE flags have" > print " changed since installation. Unlike --newuse, this option does" > print " not trigger reinstallation when flags that the user has not" > print " enabled are added or removed." > print >- print " "+green("--root=DIR") >+ print " " + green("--root=DIR") > desc = "Set the ROOT environment variable " + \ > "which is documented in the emerge(1) man page." > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--root-deps[=rdeps]") >+ print " " + green("--root-deps[=rdeps]") > desc = "If no argument is given then build-time dependencies of packages for " + \ > "ROOT are installed to " + \ > "ROOT instead of /. If the rdeps argument is given then discard " + \ >@@ -473,7 +473,7 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--skipfirst") >+ print " " + green("--skipfirst") > desc = "This option is only valid when " + \ > "used with --resume. It removes the " + \ > "first package in the resume list. " + \ >@@ -486,13 +486,13 @@ > for line in wrap(desc, desc_width): > print desc_indent + line > print >- print " "+green("--tree")+" ("+green("-t")+" short option)" >+ print " " + green("--tree") + " (" + green("-t") + " short option)" > print " Shows the dependency tree using indentation for dependencies." > print " The packages are also listed in reverse merge order so that" > print " a package's dependencies follow the package. Only really useful" > print " in combination with --emptytree, --update or --deep." > print >- print " "+green("--usepkg")+" ("+green("-k")+" short option)" >+ print " " + green("--usepkg") + " (" + green("-k") + " short option)" > print " Tell emerge to use binary packages (from $PKGDIR) if they are" > print " available, thus possibly avoiding some time-consuming compiles." > print " This option is useful for CD installs; you can export" >@@ -500,16 +500,16 @@ > print " emerge \"pull\" binary packages from the CD in order to satisfy" > print " dependencies." > print >- print " "+green("--usepkgonly")+" ("+green("-K")+" short option)" >+ print " " + green("--usepkgonly") + " (" + green("-K") + " short option)" > print " Like --usepkg above, except this only allows the use of binary" > print " packages, and it will abort the emerge if the package is not" > print " available at the time of dependency calculation." > print >- print " "+green("--verbose")+" ("+green("-v")+" short option)" >+ print " " + green("--verbose") + " (" + green("-v") + " short option)" > print " Effects vary, but the general outcome is an increased or expanded" > print " display of content in portage's displays." > print >- print " "+green("--with-bdeps")+" < " + turquoise("y") + " | "+ turquoise("n")+" >" >+ print " " + green("--with-bdeps") + " < " + turquoise("y") + " | "+ turquoise("n") + " >" > print " In dependency calculations, pull in build time dependencies that" > print " are not strictly required. This defaults to 'n' for installation" > print " actions and 'y' for the --depclean action. This setting can be" >Index: pym/_emerge/actions.py >=================================================================== >--- pym/_emerge/actions.py (revision 13832) >+++ pym/_emerge/actions.py (working copy) >@@ -513,7 +513,7 @@ > for pkg in pkgs: > idx += 1 > options.append(str(idx)) >- print options[-1]+") "+pkg >+ print options[-1] + ") " + pkg > print "X) Cancel" > options.append("X") > idx = userquery("Selection?", options) >@@ -523,7 +523,7 @@ > else: > print "The following packages available:" > for pkg in pkgs: >- print "* "+pkg >+ print "* " + pkg > print "\nPlease use a specific atom or the --ask option." > sys.exit(1) > else: >@@ -531,7 +531,7 @@ > > print > if "--ask" in myopts: >- if userquery("Ready to configure "+pkg+"?") == "No": >+ if userquery("Ready to configure " + pkg + "?") == "No": > sys.exit(0) > else: > print "Configuring pkg..." >@@ -624,11 +624,11 @@ > str(len(root_config.sets["world"].getAtoms())) > print "Packages in system: " + \ > str(len(root_config.sets["system"].getAtoms())) >- print "Required packages: "+str(req_pkg_count) >+ print "Required packages: " + str(req_pkg_count) > if "--pretend" in myopts: >- print "Number to remove: "+str(len(cleanlist)) >+ print "Number to remove: " + str(len(cleanlist)) > else: >- print "Number removed: "+str(len(cleanlist)) >+ print "Number removed: " + str(len(cleanlist)) > > def calc_depclean(settings, trees, ldpath_mtimes, > myopts, action, args_set, spinner): >@@ -1247,7 +1247,7 @@ > print header_width * "=" > print header_title.rjust(int(header_width/2 + len(header_title)/2)) > print header_width * "=" >- print "System uname: "+platform.platform(aliased=1) >+ print "System uname: " + platform.platform(aliased=1) > > lastSync = portage.grabfile(os.path.join( > settings["PORTDIR"], "metadata", "timestamp.chk")) >@@ -1275,7 +1275,7 @@ > > myvars = ["sys-devel/autoconf", "sys-devel/automake", "virtual/os-headers", > "sys-devel/binutils", "sys-devel/libtool", "dev-lang/python"] >- myvars += portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_pkgs") >+ myvars += portage.util.grabfile(settings["PORTDIR"] + "/profiles/info_pkgs") > myvars = portage.util.unique_array(myvars) > myvars.sort() > >@@ -1292,9 +1292,9 @@ > pkgs.append(ver) > if pkgs: > pkgs = ", ".join(pkgs) >- print "%-20s %s" % (x+":", pkgs) >+ print "%-20s %s" % (x + ":", pkgs) > else: >- print "%-20s %s" % (x+":", "[NOT VALID]") >+ print "%-20s %s" % (x + ":", "[NOT VALID]") > > libtool_vers = ",".join(trees["/"]["vartree"].dbapi.match("sys-devel/libtool")) > >@@ -1306,7 +1306,7 @@ > 'PORTDIR_OVERLAY', 'USE', 'CHOST', 'CFLAGS', 'CXXFLAGS', > 'ACCEPT_KEYWORDS', 'SYNC', 'FEATURES', 'EMERGE_DEFAULT_OPTS'] > >- myvars.extend(portage.util.grabfile(settings["PORTDIR"]+"/profiles/info_vars")) >+ myvars.extend(portage.util.grabfile(settings["PORTDIR"] + "/profiles/info_vars")) > > myvars = portage.util.unique_array(myvars) > use_expand = settings.get('USE_EXPAND', '').split() >@@ -1339,7 +1339,7 @@ > else: > unset_vars.append(x) > if unset_vars: >- print "Unset: "+", ".join(unset_vars) >+ print "Unset: " + ", ".join(unset_vars) > print > > if "--debug" in myopts: >@@ -1829,7 +1829,7 @@ > "--whole-file", # Don't do block transfers, only entire files > "--delete", # Delete files that aren't in the master tree > "--stats", # Show final statistics about what was transfered >- "--timeout="+str(mytimeout), # IO timeout if not done in X seconds >+ "--timeout=" + str(mytimeout), # IO timeout if not done in X seconds > "--exclude=/distfiles", # Exclude distfiles from consideration > "--exclude=/local", # Exclude local from consideration > "--exclude=/packages", # Exclude packages from consideration >@@ -1892,7 +1892,7 @@ > if content: > try: > mytimestamp = time.mktime(time.strptime(content[0], >- "%a, %d %b %Y %H:%M:%S +0000")) >+ "%a, %d %b %Y %H:%M:%S + 0000")) > except (OverflowError, ValueError): > pass > del content >@@ -1971,7 +1971,7 @@ > sys.exit(0) > emergelog(xterm_titles, ">>> Starting rsync with " + dosyncuri) > if "--quiet" not in myopts: >- print ">>> Starting rsync with "+dosyncuri+"..." >+ print ">>> Starting rsync with " + dosyncuri + "..." > else: > emergelog(xterm_titles, > ">>> Starting retry %d of %d with %s" % \ >@@ -2075,7 +2075,7 @@ > exitcode = SERVER_OUT_OF_DATE > elif (servertimestamp == 0) or (servertimestamp > mytimestamp): > # actual sync >- mycommand = rsynccommand + [dosyncuri+"/", myportdir] >+ mycommand = rsynccommand + [dosyncuri + "/", myportdir] > exitcode = portage.process.spawn(mycommand, **spawn_kwargs) > if exitcode in [0,1,3,4,11,14,20,21]: > break >@@ -2088,7 +2088,7 @@ > # at least rsync protocol version 29 (>=rsync-2.6.4). > pass > >- retries=retries+1 >+ retries=retries + 1 > > if retries<=maxretries: > print ">>> Retrying..." >@@ -2138,11 +2138,11 @@ > sys.exit(1) > cvsroot=syncuri[6:] > cvsdir=os.path.dirname(myportdir) >- if not os.path.exists(myportdir+"/CVS"): >+ if not os.path.exists(myportdir + "/CVS"): > #initial checkout >- print ">>> Starting initial cvs checkout with "+syncuri+"..." >- if os.path.exists(cvsdir+"/gentoo-x86"): >- print "!!! existing",cvsdir+"/gentoo-x86 directory; exiting." >+ print ">>> Starting initial cvs checkout with " + syncuri + "..." >+ if os.path.exists(cvsdir + "/gentoo-x86"): >+ print "!!! existing",cvsdir + "/gentoo-x86 directory; exiting." > sys.exit(1) > try: > os.rmdir(myportdir) >@@ -2152,13 +2152,13 @@ > "!!! existing '%s' directory; exiting.\n" % myportdir) > sys.exit(1) > del e >- if portage.spawn("cd "+cvsdir+"; cvs -z0 -d "+cvsroot+" co -P gentoo-x86",settings,free=1): >+ if portage.spawn("cd " + cvsdir + "; cvs -z0 -d " + cvsroot + " co -P gentoo-x86",settings,free=1): > print "!!! cvs checkout error; exiting." > sys.exit(1) > os.rename(os.path.join(cvsdir, "gentoo-x86"), myportdir) > else: > #cvs update >- print ">>> Starting cvs update with "+syncuri+"..." >+ print ">>> Starting cvs update with " + syncuri + "..." > retval = portage.process.spawn_bash( > "cd %s; cvs -z0 -q update -dP" % \ > (portage._shell_quote(myportdir),), **spawn_kwargs) >@@ -2213,10 +2213,10 @@ > > if(mybestpv != mypvs) and not "--quiet" in myopts: > print >- print red(" * ")+bold("An update to portage is available.")+" It is _highly_ recommended" >- print red(" * ")+"that you update portage now, before any other packages are updated." >+ print red(" * ") + bold("An update to portage is available.") + " It is _highly_ recommended" >+ print red(" * ") + "that you update portage now, before any other packages are updated." > print >- print red(" * ")+"To update portage, run 'emerge portage' now." >+ print red(" * ") + "To update portage, run 'emerge portage' now." > print > > display_news_notification(root_config, myopts) >@@ -2448,16 +2448,16 @@ > for x in libclist: > xs=portage.catpkgsplit(x) > if libcver: >- libcver+=","+"-".join(xs[1:]) >+ libcver+="," + "-".join(xs[1:]) > else: > libcver="-".join(xs[1:]) > if libcver==[]: > libcver="unavailable" > > gccver = getgccversion(chost) >- unameout=platform.release()+" "+platform.machine() >+ unameout=platform.release() + " " + platform.machine() > >- return "Portage " + portage.VERSION +" ("+profilever+", "+gccver+", "+libcver+", "+unameout+")" >+ return "Portage " + portage.VERSION + " (" + profilever + ", " + gccver + ", " + libcver + ", " + unameout + ")" > > def git_sync_timestamps(settings, portdir): > """ >@@ -2658,7 +2658,7 @@ > del files[-1] > if files: > procount += 1 >- print "\n"+colorize("WARN", " * IMPORTANT:"), >+ print "\n" + colorize("WARN", " * IMPORTANT:"), > if stat.S_ISDIR(mymode): > print "%d config files in '%s' need updating." % \ > (len(files), x) >@@ -2666,9 +2666,9 @@ > print "config file '%s' needs updating." % x > > if procount: >- print " "+yellow("*")+" See the "+colorize("INFORM","CONFIGURATION FILES")+ \ >+ print " " + yellow("*") + " See the " + colorize("INFORM","CONFIGURATION FILES")+ \ > " section of the " + bold("emerge") >- print " "+yellow("*")+" man page to learn how to update config files." >+ print " " + yellow("*") + " man page to learn how to update config files." > > def display_news_notification(root_config, myopts): > target_root = root_config.root >Index: pym/_emerge/unmerge.py >=================================================================== >--- pym/_emerge/unmerge.py (revision 13832) >+++ pym/_emerge/unmerge.py (working copy) >@@ -113,7 +113,7 @@ > # it appears that the user is specifying an installed > # ebuild and we're in "unmerge" mode, so it's ok. > if not os.path.exists(x): >- print "\n!!! The path '"+x+"' doesn't exist.\n" >+ print "\n!!! The path '" + x + "' doesn't exist.\n" > return 0 > > absx = os.path.abspath(x) >@@ -130,8 +130,8 @@ > sp_vdb = vdb_path.split("/") > sp_vdb_len = len(sp_vdb) > >- if not os.path.exists(absx+"/CONTENTS"): >- print "!!! Not a valid db dir: "+str(absx) >+ if not os.path.exists(absx + "/CONTENTS"): >+ print "!!! Not a valid db dir: " + str(absx) > return 0 > > if sp_absx_len <= sp_vdb_len: >@@ -139,20 +139,20 @@ > print sp_absx > print absx > print "\n!!!",x,"cannot be inside "+ \ >- vdb_path+"; aborting.\n" >+ vdb_path + "; aborting.\n" > return 0 > > for idx in range(0,sp_vdb_len): > if idx >= sp_absx_len or sp_vdb[idx] != sp_absx[idx]: > print sp_absx > print absx >- print "\n!!!", x, "is not inside "+\ >- vdb_path+"; aborting.\n" >+ print "\n!!!", x, "is not inside " + \ >+ vdb_path + "; aborting.\n" > return 0 > >- print "="+"/".join(sp_absx[sp_vdb_len:]) >+ print "=" + "/".join(sp_absx[sp_vdb_len:]) > candidate_catpkgs.append( >- "="+"/".join(sp_absx[sp_vdb_len:])) >+ "=" + "/".join(sp_absx[sp_vdb_len:])) > > newline="" > if (not "--quiet" in myopts): >@@ -164,7 +164,7 @@ > > if (("--pretend" in myopts) or ("--ask" in myopts)) and \ > not ("--quiet" in myopts): >- writemsg_level(darkgreen(newline+\ >+ writemsg_level(darkgreen(newline + \ > ">>> These are the packages that would be unmerged:\n")) > > # Preservation of order is required for --depclean and --prune so >@@ -502,8 +502,8 @@ > > for x in xrange(len(pkgmap)): > for y in pkgmap[x]["selected"]: >- writemsg_level(">>> Unmerging "+y+"...\n", noiselevel=-1) >- emergelog(xterm_titles, "=== Unmerging... ("+y+")") >+ writemsg_level(">>> Unmerging " + y + "...\n", noiselevel=-1) >+ emergelog(xterm_titles, "=== Unmerging... (" + y + ")") > mysplit = y.split("/") > #unmerge... > retval = portage.unmerge(mysplit[0], mysplit[1], settings["ROOT"], >@@ -512,16 +512,16 @@ > scheduler=scheduler) > > if retval != os.EX_OK: >- emergelog(xterm_titles, " !!! unmerge FAILURE: "+y) >+ emergelog(xterm_titles, " !!! unmerge FAILURE: " + y) > if raise_on_error: > raise UninstallFailure(retval) > sys.exit(retval) > else: > if clean_world and hasattr(sets["world"], "cleanPackage"): > sets["world"].cleanPackage(vartree.dbapi, y) >- emergelog(xterm_titles, " >>> unmerge success: "+y) >+ emergelog(xterm_titles, " >>> unmerge success: " + y) > if clean_world and hasattr(sets["world"], "remove"): > for s in root_config.setconfig.active: >- sets["world"].remove(SETPREFIX+s) >+ sets["world"].remove(SETPREFIX + s) > return 1 > >Index: pym/_emerge/userquery.py >=================================================================== >--- pym/_emerge/userquery.py (revision 13832) >+++ pym/_emerge/userquery.py (working copy) >@@ -36,7 +36,7 @@ > print bold(prompt), > try: > while True: >- response=raw_input("["+"/".join([colours[i](responses[i]) for i in range(len(responses))])+"] ") >+ response=raw_input("[" + "/".join([colours[i](responses[i]) for i in range(len(responses))]) + "] ") > for key in responses: > # An empty response will match the first value in responses. > if response.upper()==key[:len(response)].upper(): >Index: pym/_emerge/search.py >=================================================================== >--- pym/_emerge/search.py (revision 13832) >+++ pym/_emerge/search.py (working copy) >@@ -50,7 +50,7 @@ > self.portdb = fake_portdb > for attrib in ("aux_get", "cp_all", > "xmatch", "findname", "getFetchMap"): >- setattr(fake_portdb, attrib, getattr(self, "_"+attrib)) >+ setattr(fake_portdb, attrib, getattr(self, "_" + attrib)) > > self._dbs = [] > >@@ -268,8 +268,8 @@ > > def output(self): > """Outputs the results of the search.""" >- print "\b\b \n[ Results for search key : "+white(self.searchkey)+" ]" >- print "[ Applications found : "+white(str(self.mlen))+" ]" >+ print "\b\b \n[ Results for search key : " + white(self.searchkey) + " ]" >+ print "[ Applications found : " + white(str(self.mlen)) + " ]" > print " " > vardb = self.vartree.dbapi > for mtype in self.matches: >@@ -288,8 +288,8 @@ > full_package = match > match = portage.cpv_getkey(match) > elif mtype == "set": >- print green("*")+" "+white(match) >- print " ", darkgreen("Description:")+" ", self.sdict[match].getMetadata("DESCRIPTION") >+ print green("*") + " " + white(match) >+ print " ", darkgreen("Description:") + " ", self.sdict[match].getMetadata("DESCRIPTION") > print > if full_package: > try: >@@ -299,9 +299,9 @@ > print "emerge: search: aux_get() failed, skipping" > continue > if masked: >- print green("*")+" "+white(match)+" "+red("[ Masked ]") >+ print green("*") + " " + white(match) + " " + red("[ Masked ]") > else: >- print green("*")+" "+white(match) >+ print green("*") + " " + white(match) > myversion = self.getVersion(full_package, search.VERSION_RELEASE) > > mysum = [0,0] >@@ -352,13 +352,13 @@ > if self.verbose: > if available: > print " ", darkgreen("Latest version available:"),myversion >- print " ", self.getInstallationStatus(mycat+'/'+mypkg) >+ print " ", self.getInstallationStatus(mycat + '/' + mypkg) > if myebuild: > print " %s %s" % \ > (darkgreen("Size of files:"), file_size_str) >- print " ", darkgreen("Homepage:")+" ",homepage >- print " ", darkgreen("Description:")+" ",desc >- print " ", darkgreen("License:")+" ",license >+ print " ", darkgreen("Homepage:") + " ",homepage >+ print " ", darkgreen("Description:") + " ",desc >+ print " ", darkgreen("License:") + " ",license > print > # > # private interface >@@ -368,9 +368,9 @@ > result = "" > version = self.getVersion(installed_package,search.VERSION_RELEASE) > if len(version) > 0: >- result = darkgreen("Latest version installed:")+" "+version >+ result = darkgreen("Latest version installed:") + " " + version > else: >- result = darkgreen("Latest version installed:")+" [ Not Installed ]" >+ result = darkgreen("Latest version installed:") + " [ Not Installed ]" > return result > > def getVersion(self,full_package,detail): >Index: pym/_emerge/stdout_spinner.py >=================================================================== >--- pym/_emerge/stdout_spinner.py (revision 13832) >+++ pym/_emerge/stdout_spinner.py (working copy) >@@ -10,7 +10,7 @@ > > class stdout_spinner(object): > scroll_msgs = [ >- "Gentoo Rocks ("+platform.system()+")", >+ "Gentoo Rocks (" + platform.system() + ")", > "Thank you for using Gentoo. :)", > "Are you actually trying to read this?", > "How many times have you stared at this?", >Index: pym/_emerge/format_size.py >=================================================================== >--- pym/_emerge/format_size.py (revision 13832) >+++ pym/_emerge/format_size.py (working copy) >@@ -14,6 +14,6 @@ > mycount=len(mystr) > while (mycount > 3): > mycount-=3 >- mystr=mystr[:mycount]+","+mystr[mycount:] >- return mystr+" kB" >+ mystr=mystr[:mycount] + "," + mystr[mycount:] >+ return mystr + " kB" > >Index: pym/portage_compat_namespace.py >=================================================================== >--- pym/portage_compat_namespace.py (revision 13832) >+++ pym/portage_compat_namespace.py (working copy) >@@ -23,7 +23,7 @@ > if __name__.startswith("portage_"): > __newname = __name__.replace("_", ".") > else: >- __newname = "portage."+__name__ >+ __newname = "portage." + __name__ > > try: > __package = __import__(__newname, globals(), locals()) >Index: bin/md5check.py >=================================================================== >--- bin/md5check.py (revision 13832) >+++ bin/md5check.py (working copy) >@@ -36,7 +36,7 @@ > newuri = newuri.split() > > digestpath = portage.db["/"]["porttree"].dbapi.findname(mycpv) >- digestpath = os.path.dirname(digestpath)+"/files/digest-"+pv >+ digestpath = os.path.dirname(digestpath) + "/files/digest-" + pv > md5sums = portage.digestParseFile(digestpath) > > if md5sums == None: >@@ -77,18 +77,18 @@ > > # This associates the md5 with each file. [md5/size] > md5joins = md5_list[mybn][2].split(",") >- md5joins = (" ["+md5_list[mybn][0]+"/"+md5_list[mybn][1]+"],").join(md5joins) >- md5joins += " ["+md5_list[mybn][0]+"/"+md5_list[mybn][1]+"]" >+ md5joins = (" [" + md5_list[mybn][0] + "/" + md5_list[mybn][1] + "],").join(md5joins) >+ md5joins += " [" + md5_list[mybn][0] + "/" + md5_list[mybn][1] + "]" > > portage.writemsg("Colliding md5: %s of %s [%s/%s] and %s\n" % (mybn,mycpv,md5sums[mybn][0],md5sums[mybn][1],md5joins)) > col_list += [mybn] > else: >- md5_list[mybn][2] += ","+mycpv >+ md5_list[mybn][2] += "," + mycpv > else: >- md5_list[mybn] = md5sums[mybn]+[mycpv] >+ md5_list[mybn] = md5sums[mybn] + [mycpv] > del md5sums[mybn] > >- #portage.writemsg(str(bn_list)+"\n") >+ #portage.writemsg(str(bn_list) + "\n") > for x in md5sums: > if x not in bn_list: > portage.writemsg("Extra md5sum: %s in %s\n" % (x, mycpv)) >@@ -96,5 +96,5 @@ > > print col_list > print >-print str(len(md5_list))+" unique distfile md5s." >-print str(len(bn_list))+" unique distfile names." >+print str(len(md5_list)) + " unique distfile md5s." >+print str(len(bn_list)) + " unique distfile names." >Index: bin/pemerge.py >=================================================================== >--- bin/pemerge.py (revision 13832) >+++ bin/pemerge.py (working copy) >@@ -1,13 +1,13 @@ > #!/usr/bin/python -O > > import profile,time,sys,os >-sys.path = ["/usr/lib/portage/bin","/usr/lib/portage/pym"]+sys.path >+sys.path = ["/usr/lib/portage/bin","/usr/lib/portage/pym"] + sys.path > > def clock(): > return time.time() > profile.time.clock = clock > >-profile.run("import emerge", os.getcwd()+"/prof") >+profile.run("import emerge", os.getcwd() + "/prof") > > class StatsProcesser: > def __init__(self, stats): >@@ -21,7 +21,7 @@ > for line in self.output: > spline = line.split() > if len(spline) == 6 and spline[0][0].isdigit(): >- func = spline[5][spline[5].index("(")+1:-1] >+ func = spline[5][spline[5].index("(") + 1:-1] > print line > if func not in funcs: > funcs.append(func)
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
Attachments on
bug 278127
:
198262
| 198279