Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 61881 | Differences between
and this patch

Collapse All | Expand All

(-)portage.20040901.orig/pym/cvstree.py (-11 / +11 lines)
Lines 69-75 Link Here
69
				mylist.append(basedir+myfile)
69
				mylist.append(basedir+myfile)
70
	if recursive:
70
	if recursive:
71
		for mydir in entries["dirs"].keys():
71
		for mydir in entries["dirs"].keys():
72
			mylist+=findnew(entries["dirs"][mydir],recursive,basedir+mydir)
72
			mylist+=findnew(entries["dirs"][mydir],recursive=recursive,basedir=(basedir+mydir))
73
	return mylist
73
	return mylist
74
					
74
					
75
def findchanged(entries,recursive=0,basedir=""):
75
def findchanged(entries,recursive=0,basedir=""):
Lines 88-94 Link Here
88
						mylist.append(basedir+myfile)
88
						mylist.append(basedir+myfile)
89
	if recursive:
89
	if recursive:
90
		for mydir in entries["dirs"].keys():
90
		for mydir in entries["dirs"].keys():
91
			mylist+=findchanged(entries["dirs"][mydir],recursive,basedir+mydir)
91
			mylist+=findchanged(entries["dirs"][mydir],recursive=recursive,basedir=(basedir+mydir))
92
	return mylist
92
	return mylist
93
	
93
	
94
def findmissing(entries,recursive=0,basedir=""):
94
def findmissing(entries,recursive=0,basedir=""):
Lines 106-112 Link Here
106
					mylist.append(basedir+myfile)
106
					mylist.append(basedir+myfile)
107
	if recursive:
107
	if recursive:
108
		for mydir in entries["dirs"].keys():
108
		for mydir in entries["dirs"].keys():
109
			mylist+=findmissing(entries["dirs"][mydir],recursive,basedir+mydir)
109
			mylist+=findmissing(entries["dirs"][mydir],recursive=recursive,basedir=(basedir+mydir))
110
	return mylist
110
	return mylist
111
111
112
def findunadded(entries,recursive=0,basedir=""):
112
def findunadded(entries,recursive=0,basedir=""):
Lines 124-130 Link Here
124
			mylist.append(basedir+myfile)
124
			mylist.append(basedir+myfile)
125
	if recursive:
125
	if recursive:
126
		for mydir in entries["dirs"].keys():
126
		for mydir in entries["dirs"].keys():
127
			mylist+=findunadded(entries["dirs"][mydir],recursive,basedir+mydir)
127
			mylist+=findunadded(entries["dirs"][mydir],recursive=recursive,basedir=(basedir+mydir))
128
	return mylist
128
	return mylist
129
129
130
def findremoved(entries,recursive=0,basedir=""):
130
def findremoved(entries,recursive=0,basedir=""):
Lines 139-145 Link Here
139
			mylist.append(basedir+myfile)
139
			mylist.append(basedir+myfile)
140
	if recursive:
140
	if recursive:
141
		for mydir in entries["dirs"].keys():
141
		for mydir in entries["dirs"].keys():
142
			mylist+=findremoved(entries["dirs"][mydir],recursive,basedir+mydir)
142
			mylist+=findremoved(entries["dirs"][mydir],recursive=recursive,basedir=(basedir+mydir))
143
	return mylist
143
	return mylist
144
144
145
def findall(entries, recursive=0, basedir=""):
145
def findall(entries, recursive=0, basedir=""):
Lines 149-159 Link Here
149
149
150
	if basedir and basedir[-1]!="/":
150
	if basedir and basedir[-1]!="/":
151
		basedir=basedir+"/"
151
		basedir=basedir+"/"
152
	mynew     = findnew(entries,recursive,basedir)
152
	mynew     = findnew(entries,recursive=recursive,basedir=basedir)
153
	mychanged = findchanged(entries,recursive,basedir)
153
	mychanged = findchanged(entries,recursive=recursive,basedir=basedir)
154
	mymissing = findmissing(entries,recursive,basedir)
154
	mymissing = findmissing(entries,recursive=recursive,basedir=basedir)
155
	myunadded = findunadded(entries,recursive,basedir)
155
	myunadded = findunadded(entries,recursive=recursive,basedir=basedir)
156
	myremoved = findremoved(entries,recursive,basedir)
156
	myremoved = findremoved(entries,recursive=recursive,basedir=basedir)
157
	return [mynew, mychanged, mymissing, myunadded, myremoved]
157
	return [mynew, mychanged, mymissing, myunadded, myremoved]
158
158
159
ignore_list = re.compile("(^|/)(RCS(|LOG)|SCCS|CVS(|\.adm)|cvslog\..*|tags|TAGS|\.(make\.state|nse_depinfo)|.*~|(\.|)#.*|,.*|_$.*|.*\$|\.del-.*|.*\.(old|BAK|bak|orig|rej|a|olb|o|obj|so|exe|Z|elc|ln)|core)$")
159
ignore_list = re.compile("(^|/)(RCS(|LOG)|SCCS|CVS(|\.adm)|cvslog\..*|tags|TAGS|\.(make\.state|nse_depinfo)|.*~|(\.|)#.*|,.*|_$.*|.*\$|\.del-.*|.*\.(old|BAK|bak|orig|rej|a|olb|o|obj|so|exe|Z|elc|ln)|core)$")
Lines 199-205 Link Here
199
				entries["dirs"][mysplit[1]]["status"]+=["exists"]
199
				entries["dirs"][mysplit[1]]["status"]+=["exists"]
200
				entries["dirs"][mysplit[1]]["flags"]=mysplit[2:]
200
				entries["dirs"][mysplit[1]]["flags"]=mysplit[2:]
201
				if recursive:
201
				if recursive:
202
					rentries=getentries(mydir+"/"+mysplit[1],recursive)
202
					rentries=getentries(mydir+"/"+mysplit[1],recursive=recursive)
203
					#print rentries.keys()
203
					#print rentries.keys()
204
					#print entries["files"].keys()
204
					#print entries["files"].keys()
205
					#print entries["files"][mysplit[1]]
205
					#print entries["files"][mysplit[1]]
(-)portage.20040901.orig/pym/dcdialog.py (-2 / +2 lines)
Lines 395-401 Link Here
395
    d.scrollbox(bigMessage)
395
    d.scrollbox(bigMessage)
396
396
397
    #<>#  Gauge Demo
397
    #<>#  Gauge Demo
398
    d.gauge_start(0, 'percentage: 0', title='Gauge Demo')
398
    d.gauge_start(perc=0, text='percentage: 0', title='Gauge Demo')
399
    for i in range(1, 101):
399
    for i in range(1, 101):
400
	if i < 50:
400
	if i < 50:
401
	    msg = 'percentage: %d' % i
401
	    msg = 'percentage: %d' % i
Lines 403-409 Link Here
403
	    msg = 'Over 50%'
403
	    msg = 'Over 50%'
404
	else:
404
	else:
405
	    msg = ''
405
	    msg = ''
406
	d.gauge_iterate(i, msg)
406
	d.gauge_iterate(i, text=msg)
407
	sleep(0.1)
407
	sleep(0.1)
408
    d.gauge_stop()
408
    d.gauge_stop()
409
    #<>#
409
    #<>#
(-)portage.20040901.orig/pym/getbinpkg.py (-12 / +12 lines)
Lines 266-276 Link Here
266
	else:
266
	else:
267
		keepconnection = 1
267
		keepconnection = 1
268
268
269
	conn,protocol,address,params,headers = create_conn(baseurl, conn)
269
	conn,protocol,address,params,headers = create_conn(baseurl, conn=conn)
270
270
271
	listing = None
271
	listing = None
272
	if protocol in ["http","https"]:
272
	if protocol in ["http","https"]:
273
		page,rc,msg = make_http_request(conn,address,params,headers)
273
		page,rc,msg = make_http_request(conn,address,params=params,headers=headers)
274
		
274
		
275
		if page:
275
		if page:
276
			parser = ParseLinks()
276
			parser = ParseLinks()
Lines 306-316 Link Here
306
	else:
306
	else:
307
		keepconnection = 1
307
		keepconnection = 1
308
308
309
	conn,protocol,address,params,headers = create_conn(baseurl, conn)
309
	conn,protocol,address,params,headers = create_conn(baseurl, conn=conn)
310
310
311
	if protocol in ["http","https"]:
311
	if protocol in ["http","https"]:
312
		headers["Range"] = "bytes=-"+str(chunk_size)
312
		headers["Range"] = "bytes=-"+str(chunk_size)
313
		data,rc,msg = make_http_request(conn, address, params, headers)
313
		data,rc,msg = make_http_request(conn, address, params=params, headers=headers)
314
	elif protocol in ["ftp"]:
314
	elif protocol in ["ftp"]:
315
		data,rc,msg = make_ftp_request(conn, address, -chunk_size)
315
		data,rc,msg = make_ftp_request(conn, address, -chunk_size)
316
	else:
316
	else:
Lines 319-325 Link Here
319
	if data:
319
	if data:
320
		xpaksize = xpak.decodeint(data[-8:-4])
320
		xpaksize = xpak.decodeint(data[-8:-4])
321
		if (xpaksize+8) > chunk_size:
321
		if (xpaksize+8) > chunk_size:
322
			myid = file_get_metadata(baseurl, conn, (xpaksize+8))
322
			myid = file_get_metadata(baseurl, conn=conn, chunk_size=(xpaksize+8))
323
			if not keepconnection:
323
			if not keepconnection:
324
				conn.close()
324
				conn.close()
325
			return myid
325
			return myid
Lines 345-351 Link Here
345
	URL should be in the for <proto>://[user[:pass]@]<site>[:port]<path>"""
345
	URL should be in the for <proto>://[user[:pass]@]<site>[:port]<path>"""
346
346
347
	if not fcmd:
347
	if not fcmd:
348
		return file_get_lib(baseurl,dest,conn)
348
		return file_get_lib(baseurl,dest,conn=conn)
349
349
350
	fcmd = string.replace(fcmd, "${DISTDIR}", dest)
350
	fcmd = string.replace(fcmd, "${DISTDIR}", dest)
351
	fcmd = string.replace(fcmd, "${URI}", baseurl)
351
	fcmd = string.replace(fcmd, "${URI}", baseurl)
Lines 378-388 Link Here
378
	else:
378
	else:
379
		keepconnection = 1
379
		keepconnection = 1
380
380
381
	conn,protocol,address,params,headers = create_conn(baseurl, conn)
381
	conn,protocol,address,params,headers = create_conn(baseurl, conn=conn)
382
382
383
	sys.stderr.write("Fetching '"+str(os.path.basename(address)+"'\n"))
383
	sys.stderr.write("Fetching '"+str(os.path.basename(address)+"'\n"))
384
	if protocol in ["http","https"]:
384
	if protocol in ["http","https"]:
385
		data,rc,msg = make_http_request(conn, address, params, headers, dest=dest)
385
		data,rc,msg = make_http_request(conn, address, params=params, headers=headers, dest=dest)
386
	elif protocol in ["ftp"]:
386
	elif protocol in ["ftp"]:
387
		data,rc,msg = make_ftp_request(conn, address, dest=dest)
387
		data,rc,msg = make_ftp_request(conn, address, dest=dest)
388
	else:
388
	else:
Lines 405-411 Link Here
405
	if makepickle == None:
405
	if makepickle == None:
406
		makepickle = "/var/cache/edb/metadata.idx.most_recent"
406
		makepickle = "/var/cache/edb/metadata.idx.most_recent"
407
407
408
	conn,protocol,address,params,headers = create_conn(baseurl, conn)
408
	conn,protocol,address,params,headers = create_conn(baseurl, conn=conn)
409
409
410
	filedict = {}
410
	filedict = {}
411
411
Lines 427-433 Link Here
427
	if not metadata[baseurl].has_key("data"):
427
	if not metadata[baseurl].has_key("data"):
428
		metadata[baseurl]["data"]={}
428
		metadata[baseurl]["data"]={}
429
429
430
	filelist = dir_get_list(baseurl, conn)
430
	filelist = dir_get_list(baseurl, conn=conn)
431
	tbz2list = match_in_array(filelist, suffix=".tbz2")
431
	tbz2list = match_in_array(filelist, suffix=".tbz2")
432
	metalist = match_in_array(filelist, prefix="metadata.idx")
432
	metalist = match_in_array(filelist, prefix="metadata.idx")
433
	del filelist
433
	del filelist
Lines 445-451 Link Here
445
			for trynum in [1,2,3]:
445
			for trynum in [1,2,3]:
446
				mytempfile = tempfile.TemporaryFile()
446
				mytempfile = tempfile.TemporaryFile()
447
				try:
447
				try:
448
					file_get(baseurl+"/"+mfile, mytempfile, conn)
448
					file_get(baseurl+"/"+mfile, mytempfile, conn=conn)
449
					if mytempfile.tell() > len(data):
449
					if mytempfile.tell() > len(data):
450
						mytempfile.seek(0)
450
						mytempfile.seek(0)
451
						data = mytempfile.read()
451
						data = mytempfile.read()
Lines 493-499 Link Here
493
		    (x not in metadata[baseurl]["data"].keys())):
493
		    (x not in metadata[baseurl]["data"].keys())):
494
			sys.stderr.write(yellow("x"))
494
			sys.stderr.write(yellow("x"))
495
			metadata[baseurl]["modified"] = 1
495
			metadata[baseurl]["modified"] = 1
496
			myid = file_get_metadata(baseurl+"/"+x, conn, chunk_size)
496
			myid = file_get_metadata(baseurl+"/"+x, conn=conn, chunk_size=chunk_size)
497
		
497
		
498
			if myid[0]:
498
			if myid[0]:
499
				metadata[baseurl]["data"][x] = make_metadata_dict(myid)
499
				metadata[baseurl]["data"][x] = make_metadata_dict(myid)
(-)portage.20040901.orig/pym/portage.py (-1 / +1 lines)
Lines 2369-2375 Link Here
2369
	if mydo=="manifest":
2369
	if mydo=="manifest":
2370
		return (not digestgen(checkme,mysettings,overwrite=1,manifestonly=1))
2370
		return (not digestgen(checkme,mysettings,overwrite=1,manifestonly=1))
2371
	
2371
	
2372
	if not digestcheck(checkme, mysettings, ("strict" in features)):
2372
	if not digestcheck(checkme, mysettings, strict=("strict" in features)):
2373
		return 1
2373
		return 1
2374
	
2374
	
2375
	if mydo=="fetch":
2375
	if mydo=="fetch":
(-)portage.20040901.orig/pym/portage_checksum.py (-2 / +2 lines)
Lines 20-29 Link Here
20
	del results
20
	del results
21
21
22
def perform_md5(x, calc_prelink=0):
22
def perform_md5(x, calc_prelink=0):
23
	return perform_checksum(x, md5hash, calc_prelink)[0]
23
	return perform_checksum(x, hash_function=md5hash, calc_prelink=calc_prelink)[0]
24
24
25
def perform_sha1(x, calc_prelink=0):
25
def perform_sha1(x, calc_prelink=0):
26
	return perform_checksum(x, sha1hash, calc_prelink)[0]
26
	return perform_checksum(x, hash_function=sha1hash, calc_prelink=calc_prelink)[0]
27
27
28
# We _try_ to load this module. If it fails we do the slow fallback.
28
# We _try_ to load this module. If it fails we do the slow fallback.
29
try:
29
try:
(-)portage.20040901.orig/pym/portage_dep.py (-3 / +3 lines)
Lines 38-44 Link Here
38
			return mylist,mystr[1:]
38
			return mylist,mystr[1:]
39
		elif ("(" in mystr) and (mystr.index("(") < mystr.index(")")):
39
		elif ("(" in mystr) and (mystr.index("(") < mystr.index(")")):
40
			freesec,subsec = mystr.split("(",1)
40
			freesec,subsec = mystr.split("(",1)
41
			subsec,tail = paren_reduce(subsec,tokenize)
41
			subsec,tail = paren_reduce(subsec,tokenize=tokenize)
42
		else:
42
		else:
43
			subsec,tail = mystr.split(")",1)
43
			subsec,tail = mystr.split(")",1)
44
			if tokenize:
44
			if tokenize:
Lines 82-88 Link Here
82
		head = mydeparray.pop(0)
82
		head = mydeparray.pop(0)
83
83
84
		if type(head) == types.ListType:
84
		if type(head) == types.ListType:
85
			rlist = rlist + [use_reduce(head, uselist, masklist, matchall, excludeall)]
85
			rlist = rlist + [use_reduce(head, uselist=uselist, masklist=masklist, matchall=matchall, excludeall=excludeall)]
86
86
87
		else:
87
		else:
88
			if head[-1] == "?": # Use reduce next group on fail.
88
			if head[-1] == "?": # Use reduce next group on fail.
Lines 125-131 Link Here
125
				if ismatch:
125
				if ismatch:
126
					target = newdeparray[-1]
126
					target = newdeparray[-1]
127
					if isinstance(target, list):
127
					if isinstance(target, list):
128
						rlist += [use_reduce(target, uselist, masklist, matchall, excludeall)]
128
						rlist += [use_reduce(target, uselist=uselist, masklist=masklist, matchall=matchall, excludeall=excludeall)]
129
					else:
129
					else:
130
						rlist += [target]
130
						rlist += [target]
131
131
(-)portage.20040901.orig/pym/portage_gpg.py (-1 / +1 lines)
Lines 65-71 Link Here
65
65
66
		self.keyringStats = fileStats(keyringPath)
66
		self.keyringStats = fileStats(keyringPath)
67
		self.minimumTrust = TRUSTED
67
		self.minimumTrust = TRUSTED
68
		if not self.verify(keyringPath, keyringPath+".asc"):
68
		if not self.verify(keyringPath, sigfile=(keyringPath+".asc")):
69
			self.keyringIsTrusted = False
69
			self.keyringIsTrusted = False
70
			if requireSignedRing:
70
			if requireSignedRing:
71
				raise portage_exception.InvalidSignature, "Required keyring verification: "+keyringPath
71
				raise portage_exception.InvalidSignature, "Required keyring verification: "+keyringPath
(-)portage.20040901.orig/pym/portage_locks.py (-1 / +1 lines)
Lines 77-83 Link Here
77
		# The file was deleted on us... Keep trying to make one...
77
		# The file was deleted on us... Keep trying to make one...
78
		os.close(myfd)
78
		os.close(myfd)
79
		portage_util.writemsg("lockfile recurse\n",1)
79
		portage_util.writemsg("lockfile recurse\n",1)
80
		lockfilename,myfd,unlinkfile = lockfile(mypath,wantnewlockfile,unlinkfile)
80
		lockfilename,myfd,unlinkfile = lockfile(mypath,wantnewlockfile=wantnewlockfile,unlinkfile=unlinkfile)
81
81
82
	portage_util.writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1)
82
	portage_util.writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1)
83
	return (lockfilename,myfd,unlinkfile)
83
	return (lockfilename,myfd,unlinkfile)
(-)portage.20040901.orig/pym/portage_util.py (-2 / +2 lines)
Lines 139-145 Link Here
139
	return newdict
139
	return newdict
140
140
141
def grabdict_package(myfilename,juststrings=0):
141
def grabdict_package(myfilename,juststrings=0):
142
	pkgs=grabdict(myfilename, juststrings, empty=1)
142
	pkgs=grabdict(myfilename, juststrings=juststrings, empty=1)
143
	for x in pkgs.keys():
143
	for x in pkgs.keys():
144
		if not isvalidatom(x):
144
		if not isvalidatom(x):
145
			del(pkgs[x])
145
			del(pkgs[x])
Lines 233-239 Link Here
233
				raise Exception("ParseError: Unexpected EOF: "+str(mycfg)+": line "+str(lex.lineno))
233
				raise Exception("ParseError: Unexpected EOF: "+str(mycfg)+": line "+str(lex.lineno))
234
			else:
234
			else:
235
				return mykeys
235
				return mykeys
236
		mykeys[key]=varexpand(val,mykeys)
236
		mykeys[key]=varexpand(val,mydict=mykeys)
237
	return mykeys
237
	return mykeys
238
238
239
#cache expansions of constant strings
239
#cache expansions of constant strings
(-)portage.20040901.orig/pym/xpak.py (-1 / +1 lines)
Lines 242-248 Link Here
242
		return self.unpackinfo(datadir)
242
		return self.unpackinfo(datadir)
243
	def compose(self,datadir,cleanup=0):
243
	def compose(self,datadir,cleanup=0):
244
		"""Alias for recompose()."""
244
		"""Alias for recompose()."""
245
		return recompose(datadir,cleanup)
245
		return recompose(datadir,cleanup=cleanup)
246
	def recompose(self,datadir,cleanup=0):
246
	def recompose(self,datadir,cleanup=0):
247
		"""Creates an xpak segment from the datadir provided, truncates the tbz2
247
		"""Creates an xpak segment from the datadir provided, truncates the tbz2
248
		to the end of regular data if an xpak segment already exists, and adds
248
		to the end of regular data if an xpak segment already exists, and adds

Return to bug 61881