Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 671864 | Differences between
and this patch

Collapse All | Expand All

(-)file_not_specified_in_diff (-309 / +422 lines)
Line  Link Here
0
-- vartree.py
0
++ vartree.py
Lines 96-101 Link Here
96
import textwrap
96
import textwrap
97
import time
97
import time
98
import warnings
98
import warnings
99
import operator
99
100
100
try:
101
try:
101
	import cPickle as pickle
102
	import cPickle as pickle
Lines 202-207 Link Here
202
203
203
		self._cached_counter = None
204
		self._cached_counter = None
204
205
206
		self._content_files = [
207
				("CONTENTS_DIGESTS_SHA512", 128+1),
208
				("CONTENTS_DIGESTS_SHA1", 40+1),
209
				("CONTENTS_DIGESTS_SHA256", 64+1),
210
				("CONTENTS_MODES", 4+1),
211
				("CONTENTS_ATTRS_PAX", 5+1),
212
				("CONTENTS_ATTRS_CAPS", 16+1)
213
			]
214
205
	@property
215
	@property
206
	def writable(self):
216
	def writable(self):
207
		"""
217
		"""
Lines 1069-1087 Link Here
1069
1079
1070
	def removeFromContents(self, pkg, paths, relative_paths=True):
1080
	def removeFromContents(self, pkg, paths, relative_paths=True):
1071
		"""
1081
		"""
1082
		Remove installed files from contents and its metadata files.
1083
		Typically during an unmerge, so that libraries needed by other
1084
		packages are spared from the unmerge.
1085
1072
		@param pkg: cpv for an installed package
1086
		@param pkg: cpv for an installed package
1073
		@type pkg: string
1087
		@type pkg: string
1074
		@param paths: paths of files to remove from contents
1088
		@param paths: paths of files to remove from contents
1075
		@type paths: iterable
1089
		@type paths: iterable
1076
		"""
1090
		"""
1091
1092
		# Grab a copy of contents and its metadata files.
1077
		if not hasattr(pkg, "getcontents"):
1093
		if not hasattr(pkg, "getcontents"):
1078
			pkg = self._dblink(pkg)
1094
			pkg = self._dblink(pkg)
1079
		root = self.settings['ROOT']
1095
		root = self.settings['ROOT']
1080
		root_len = len(root) - 1
1096
		root_len = len(root) - 1
1081
		new_contents = pkg.getcontents().copy()
1097
		new_contents = pkg.getcontents().copy()
1098
		new_contents_index = pkg.getContentsIndices().copy()
1099
		new_contents_metadata = pkg.getContentsMetadata().copy()
1082
		removed = 0
1100
		removed = 0
1083
1101
1084
		self.startContentsRemoval(pkg.dbdir)
1102
		# Remove installed files from contents and its metadata files.
1085
		for filename in paths:
1103
		for filename in paths:
1086
			filename = _unicode_decode(filename,
1104
			filename = _unicode_decode(filename,
1087
				encoding=_encodings['content'], errors='strict')
1105
				encoding=_encodings['content'], errors='strict')
Lines 1091-1119 Link Here
1091
			else:
1109
			else:
1092
				relative_filename = filename[root_len:]
1110
				relative_filename = filename[root_len:]
1093
			contents_key = pkg._match_contents(relative_filename)
1111
			contents_key = pkg._match_contents(relative_filename)
1094
			index = -1
1112
			if contents_key:
1095
			try:
1096
				index = list(new_contents).index(filename)+1
1097
			except ValueError:
1098
				print("List does not contain value")
1099
			if contents_key and index >= 0:
1100
				# It's possible for two different paths to refer to the same
1113
				# It's possible for two different paths to refer to the same
1101
				# contents_key, due to directory symlinks. Therefore, pass a
1114
				# contents_key, due to directory symlinks. Therefore, pass a
1102
				# default value to pop, in order to avoid a KeyError which
1115
				# default value to pop, in order to avoid a KeyError which
1103
				# could otherwise be triggered (see bug #454400).
1116
				# could otherwise be triggered (see bug #454400).
1104
				new_contents.pop(contents_key, None)
1117
				new_contents.pop(contents_key, None)
1105
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512")
1118
				new_contents_metadata.pop(contents_key)
1106
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1")
1107
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256")
1108
				self.removeFromContentsMeta(pkg.dbdir, index, "MODES")
1109
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX")
1110
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS")
1111
				removed += 1
1119
				removed += 1
1112
1120
1113
		self.stopContentsRemoval(pkg.dbdir)
1114
		if removed:
1121
		if removed:
1115
			# Also remove corresponding NEEDED lines, so that they do
1122
			# Also remove corresponding NEEDED lines, so that they do
1116
			# no corrupt LinkageMap data for preserve-libs.
1123
			# not corrupt LinkageMap data for preserve-libs.
1117
			needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
1124
			needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
1118
			new_needed = None
1125
			new_needed = None
1119
			try:
1126
			try:
Lines 1142-1361 Link Here
1142
					if filename in new_contents:
1149
					if filename in new_contents:
1143
						new_needed.append(entry)
1150
						new_needed.append(entry)
1144
1151
1145
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1152
			# Write new contents files and clear CONTENTS cache
1146
1153
			pkg.writeContentsToContentsFile(new_contents, new_contents_metadata, new_needed=new_needed)
1147
	def startContentsRemoval(self, vdbdir):
1148
		contents_dir = os.path.join(vdbdir, "contents.d")
1149
		transaction_dir = os.path.join(vdbdir, "contents.d~")
1150
		manifest_file = os.path.join(contents_dir, "Manifest")
1151
		manifest_lines = ""
1152
1153
		# Clean previously unfinished transaction @TODO also: either roll-back or roll-forward
1154
		if os.path.isdir(transaction_dir):
1155
			shutil.rmtree(transaction_dir)
1156
		if os.path.isdir(contents_dir):
1157
			shutil.rmtree(contents_dir)
1158
1159
		# Set up transaction
1160
		os.mkdir(transaction_dir, 0o644)
1161
		files = [
1162
				"CONTENTS_DIGESTS_SHA1",
1163
				"CONTENTS_DIGESTS_SHA256",
1164
				"CONTENTS_DIGESTS_SHA512",
1165
				"CONTENTS_MODES",
1166
				"CONTENTS_ATTRS_PAX",
1167
				"CONTENTS_ATTRS_CAPS"
1168
			]
1169
		for f in files:
1170
			fname_src = os.path.join(vdbdir, f)
1171
			fname_dest = os.path.join(transaction_dir, f)
1172
1173
			# Gracefully handle non-existent files
1174
			if os.path.isfile(fname_src):
1175
				shutil.copy2(fname_src, fname_dest)
1176
				manifest_lines += f + "\n"
1177
				manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n"
1178
1179
		# Write Manifest-file of transaction
1180
		os.mkdir(contents_dir, 0o644)
1181
		with open(manifest_file,"w") as f:
1182
			f.write(manifest_lines)
1183
1184
	def stopContentsRemoval(self, vdbdir):
1185
		contents_dir = os.path.join(vdbdir, "contents.d")
1186
		transaction_dir = os.path.join(vdbdir, "contents.d~")
1187
		digests = []
1188
		transaction_files = []
1189
		all_files = [
1190
				"CONTENTS_DIGESTS_SHA1",
1191
				"CONTENTS_DIGESTS_SHA256",
1192
				"CONTENTS_DIGESTS_SHA512",
1193
				"CONTENTS_MODES",
1194
				"CONTENTS_ATTRS_PAX",
1195
				"CONTENTS_ATTRS_CAPS"
1196
			]
1197
1198
		if not os.path.isdir(transaction_dir):
1199
			print("Failed creating transaction dir")
1200
			sys.exit(1)
1201
1202
		# Read Manifest-file of contents
1203
		manifest_file = os.path.join(contents_dir, "Manifest")
1204
		if os.path.isfile(manifest_file):
1205
			with open(manifest_file,"r") as f:
1206
				lines = f.read().splitlines()
1207
1208
			for i, line in enumerate(lines):
1209
				if (i%2) == 0:
1210
					transaction_files.append(line)
1211
				else:
1212
					digests.append(line)
1213
1214
		# Check transactiondir against Manifest
1215
		for f in transaction_files:
1216
			file = os.path.join(transaction_dir, f)
1217
			if not os.path.isfile(file):
1218
				print("Manifest contains non-existing file '"+file+"'")
1219
				sys.exit(1)
1220
1221
		# Setup contents_dir with links of vdbdir files
1222
		for i, f in enumerate(transaction_files):
1223
			fname_src = os.path.join(vdbdir, f)
1224
			fname_dest = os.path.join(contents_dir, f)
1225
1226
			# Gracefully handle non-existent files
1227
			if os.path.isfile(fname_src):
1228
				if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]:
1229
					print("According to Manifest, file in vdbdir was modified '" + fname_src + "'")
1230
					sys.exit(1)
1231
				else:
1232
					os.link(fname_src, fname_dest)
1233
			else:
1234
				print("File in Manifest no longer found in vdbdir '"+f+"'")
1235
				sys.exit(1)
1236
1237
		# Sync contents_dir and transaction_dir to disk
1238
		if platform.system() == "Linux":
1239
			paths = []
1240
			for f in os.listdir(transaction_dir):
1241
				paths.append(os.path.join(transaction_dir, f))
1242
			for f in os.listdir(contents_dir):
1243
				paths.append(os.path.join(contents_dir, f))
1244
			paths = tuple(paths)
1245
1246
			proc = SyncfsProcess(paths=paths,
1247
				scheduler=(
1248
						SchedulerInterface(portage._internal_caller and
1249
						global_event_loop() or EventLoop(main=False))
1250
					))
1251
1252
			proc.start()
1253
			returncode = proc.wait()
1254
1255
		# Link from transaction_dir
1256
		for f in transaction_files:
1257
			fname_src = os.path.join(transaction_dir, f)
1258
			fname_dest = os.path.join(vdbdir, f+"~")
1259
1260
			# Gracefully handle non-existent files
1261
			if os.path.isfile(fname_src):
1262
				os.link(fname_src, fname_dest)
1263
			else:
1264
				print("Manifest contains file that no longer exists '"+f+"'")
1265
				sys.exit(1)
1266
1267
		# Sync contents_dir and transaction_dir to disk
1268
		if platform.system() == "Linux":
1269
			paths = []
1270
			for f in transaction_files:
1271
				# Gracefully handle non-existent files
1272
				if os.path.isfile(os.path.join(vdbdir, f+"~")):
1273
					paths.append(os.path.join(vdbdir, f+"~"))
1274
				else:
1275
					print("Manifest contains file that no longer exists '"+f+"'")
1276
					sys.exit(1)
1277
			paths = tuple(paths)
1278
1279
			proc = SyncfsProcess(paths=paths,
1280
				scheduler=(
1281
						SchedulerInterface(portage._internal_caller and
1282
						global_event_loop() or EventLoop(main=False))
1283
					))
1284
1285
			proc.start()
1286
			returncode = proc.wait()
1287
1288
		# Rename
1289
		for f in transaction_files:
1290
			fname_src = os.path.join(vdbdir, f+"~")
1291
			fname_dest = os.path.join(vdbdir, f)
1292
1293
			# Gracefully handle non-existent files
1294
			if os.path.isfile(fname_src):
1295
				os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync
1296
			else:
1297
				print("Manifest contains file that no longer exists '"+f+"'")
1298
				sys.exit(1)
1299
1300
		shutil.rmtree(transaction_dir)
1301
		shutil.rmtree(contents_dir)
1302
1303
	def removeFromContentsMeta(self, vdbdir, index, type):
1304
		contents_file = ""
1305
		if (type in
1306
			{"DIGESTS_SHA512",
1307
			"DIGESTS_SHA256",
1308
			"DIGESTS_SHA1",
1309
			"MODES",
1310
			"ATTRS_PAX",
1311
			"ATTRS_CAPS"}):
1312
			contents_file = os.path.join(os.path.join(vdbdir, "contents.d~"),"CONTENTS_"+type)
1313
		else:
1314
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1315
1316
		if type == "DIGESTS_SHA512": linelen = 128+1 #including newline
1317
		elif type == "DIGESTS_SHA256": linelen = 64 + 1
1318
		elif type == "DIGESTS_SHA1": linelen = 40+1
1319
		elif type == "MODES": linelen = 4 + 1
1320
		elif type == "ATTRS_PAX": linelen = 5 + 1
1321
		elif type == "ATTRS_CAPS": linelen = 16 + 1
1322
		else:
1323
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1324
1325
		if os.path.isfile(contents_file):
1326
			with open(contents_file,"r+") as f:
1327
				pre = f.read((index-1)*linelen)
1328
				f.read(129)
1329
				post = f.read()
1330
				f.seek(0, 0)
1331
				f.write(pre)
1332
				f.write(post)
1333
				f.truncate()
1334
				f.flush()
1335
				os.fsync(f.fileno())
1336
1337
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1338
		"""
1339
		@param pkg: package to write contents file for
1340
		@type pkg: dblink
1341
		@param new_contents: contents to write to CONTENTS file
1342
		@type new_contents: contents dictionary of the form
1343
					{u'/path/to/file' : (contents_attribute 1, ...), ...}
1344
		@param new_needed: new NEEDED entries
1345
		@type new_needed: list of NeededEntry
1346
		"""
1347
		root = self.settings['ROOT']
1348
		self._bump_mtime(pkg.mycpv)
1349
		if new_needed is not None:
1350
			f = atomic_ofstream(os.path.join(pkg.dbdir, LinkageMap._needed_aux_key))
1351
			for entry in new_needed:
1352
				f.write(_unicode(entry))
1353
			f.close()
1354
		f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
1355
		write_contents(new_contents, root, f)
1356
		f.close()
1357
		self._bump_mtime(pkg.mycpv)
1358
		pkg._clear_contents_cache()
1359
1154
1360
	class _owners_cache(object):
1155
	class _owners_cache(object):
1361
		"""
1156
		"""
Lines 1817-1822 Link Here
1817
		self.myroot = self.settings['ROOT']
1612
		self.myroot = self.settings['ROOT']
1818
		self._installed_instance = None
1613
		self._installed_instance = None
1819
		self.contentscache = None
1614
		self.contentscache = None
1615
		self.contents_index_cache = None
1616
		self.contents_metadata_cache = None
1820
		self._contents_inodes = None
1617
		self._contents_inodes = None
1821
		self._contents_basenames = None
1618
		self._contents_basenames = None
1822
		self._linkmap_broken = False
1619
		self._linkmap_broken = False
Lines 1833-1838 Link Here
1833
		self._contents = ContentsCaseSensitivityManager(self)
1630
		self._contents = ContentsCaseSensitivityManager(self)
1834
		self._slot_locks = []
1631
		self._slot_locks = []
1835
1632
1633
		self._content_files = [
1634
				("CONTENTS_DIGESTS_SHA512", 128+1),
1635
				("CONTENTS_DIGESTS_SHA1", 40+1),
1636
				("CONTENTS_DIGESTS_SHA256", 64+1),
1637
				("CONTENTS_MODES", 4+1),
1638
				("CONTENTS_ATTRS_PAX", 5+1),
1639
				("CONTENTS_ATTRS_CAPS", 16+1)
1640
			]
1641
1836
	def __hash__(self):
1642
	def __hash__(self):
1837
		return hash(self._hash_key)
1643
		return hash(self._hash_key)
1838
1644
Lines 1978-1983 Link Here
1978
1784
1979
	def _clear_contents_cache(self):
1785
	def _clear_contents_cache(self):
1980
		self.contentscache = None
1786
		self.contentscache = None
1787
		self.contents_index_cache = None
1788
		self.contents_metadata_cache = None
1981
		self._contents_inodes = None
1789
		self._contents_inodes = None
1982
		self._contents_basenames = None
1790
		self._contents_basenames = None
1983
		self._contents.clear_cache()
1791
		self._contents.clear_cache()
Lines 1988-1995 Link Here
1988
		"""
1796
		"""
1989
		if self.contentscache is not None:
1797
		if self.contentscache is not None:
1990
			return self.contentscache
1798
			return self.contentscache
1799
1991
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1800
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1992
		pkgfiles = OrderedDict()
1801
		pkgfiles = OrderedDict()
1802
		pkgfiles_indices = OrderedDict()
1803
1993
		try:
1804
		try:
1994
			with io.open(_unicode_encode(contents_file,
1805
			with io.open(_unicode_encode(contents_file,
1995
				encoding=_encodings['fs'], errors='strict'),
1806
				encoding=_encodings['fs'], errors='strict'),
Lines 2009-2035 Link Here
2009
		obj_index = contents_re.groupindex['obj']
1820
		obj_index = contents_re.groupindex['obj']
2010
		dir_index = contents_re.groupindex['dir']
1821
		dir_index = contents_re.groupindex['dir']
2011
		sym_index = contents_re.groupindex['sym']
1822
		sym_index = contents_re.groupindex['sym']
1823
2012
		# The old symlink format may exist on systems that have packages
1824
		# The old symlink format may exist on systems that have packages
2013
		# which were installed many years ago (see bug #351814).
1825
		# which were installed many years ago (see bug #351814).
2014
		oldsym_index = contents_re.groupindex['oldsym']
1826
		oldsym_index = contents_re.groupindex['oldsym']
1827
2015
		# CONTENTS files already contain EPREFIX
1828
		# CONTENTS files already contain EPREFIX
2016
		myroot = self.settings['ROOT']
1829
		myroot = self.settings['ROOT']
2017
		if myroot == os.path.sep:
1830
		if myroot == os.path.sep:
2018
			myroot = None
1831
			myroot = None
1832
2019
		# used to generate parent dir entries
1833
		# used to generate parent dir entries
2020
		dir_entry = ("dir",)
1834
		dir_entry = ("dir",)
2021
		eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1835
		eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1836
2022
		pos = 0
1837
		pos = 0
2023
		errors = []
1838
		errors = []
2024
		for pos, line in enumerate(mylines):
1839
		for pos, line in enumerate(mylines):
2025
			if null_byte in line:
1840
			if null_byte in line:
2026
				# Null bytes are a common indication of corruption.
1841
				# Null bytes are a common indication of corruption.
2027
				errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1842
				errors.append((pos + 1, _("Null byte found in CONTENTS entry: "+line)))
2028
				continue
1843
				continue
2029
			line = line.rstrip("\n")
1844
			line = line.rstrip("\n")
2030
			m = contents_re.match(line)
1845
			m = contents_re.match(line)
2031
			if m is None:
1846
			if m is None:
2032
				errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1847
				errors.append((pos + 1, _("Unrecognized CONTENTS entry: "+line)))
2033
				continue
1848
				continue
2034
1849
2035
			if m.group(obj_index) is not None:
1850
			if m.group(obj_index) is not None:
Lines 2074-2090 Link Here
2074
				if parent in pkgfiles:
1889
				if parent in pkgfiles:
2075
					break
1890
					break
2076
				pkgfiles[parent] = dir_entry
1891
				pkgfiles[parent] = dir_entry
1892
				pkgfiles_indices[parent] = (pos,dir_entry)
2077
				path_split.pop()
1893
				path_split.pop()
2078
1894
2079
			pkgfiles[path] = data
1895
			pkgfiles[path] = data
1896
			pkgfiles_indices[path] = (pos,data)
2080
1897
2081
		if errors:
1898
		if errors:
2082
			writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1899
			writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
2083
			for pos, e in errors:
1900
			for pos, e in errors:
2084
				writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1901
				writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
2085
		self.contentscache = pkgfiles
1902
		self.contentscache = pkgfiles
1903
		self.contents_index_cache = pkgfiles_indices
2086
		return pkgfiles
1904
		return pkgfiles
2087
1905
1906
	def getContentsIndices(self):
1907
		"""
1908
		Get installed files of a given package (aka what that package installed), with indices
1909
		"""
1910
		if self.contents_index_cache is not None:
1911
			return self.contents_index_cache
1912
		else:
1913
			getcontents()
1914
			if self.contents_index_cache is not None:
1915
				return self.contents_index_cache
1916
			else:
1917
				showMessage(_("!!! FAILED couldn't get cached contents index"),
1918
					level=logging.ERROR, noiselevel=-1)
1919
				return None
1920
1921
	def getContentsMetadata(self):
1922
		"""
1923
		Get metadata of installed files of a given package (aka what that package installed):
1924
		- iterate over the results returned by getContentsIndices()
1925
		- iterate over the filetypes concerned
1926
		- grab the metadata using the identifier to calculate position in the different files
1927
		- write metadata to an orderedDict and return
1928
		"""
1929
		if self.contents_metadata_cache is not None:
1930
			return self.contents_metadata_cache
1931
		else:
1932
			if self.contents_index_cache is None:
1933
				getcontents()
1934
1935
		vdbdir = self.dbdir
1936
		contents_metadata = OrderedDict()
1937
1938
		for (filename, (index, tmp)) in self.contents_index_cache.copy().items():
1939
			contents_metadata[filename] = OrderedDict()
1940
			for type, linelen in self._content_files:
1941
				contents_fname = os.path.join(vdbdir, type)
1942
				if not os.path.isfile(contents_fname):
1943
					continue
1944
1945
				with open(contents_fname,"rb") as f:
1946
					f.seek(index*linelen) #skip to the right line
1947
					value = f.read(linelen).decode() #read the line
1948
					contents_metadata[filename][type] = value
1949
1950
		self.contents_metadata_cache = contents_metadata
1951
		return contents_metadata
1952
2088
	def _prune_plib_registry(self, unmerge=False,
1953
	def _prune_plib_registry(self, unmerge=False,
2089
		needed=None, preserve_paths=None):
1954
		needed=None, preserve_paths=None):
2090
		# remove preserved libraries that don't have any consumers left
1955
		# remove preserved libraries that don't have any consumers left
Lines 2475-2480 Link Here
2475
		self._display_merge("%s %s %s %s\n" % \
2340
		self._display_merge("%s %s %s %s\n" % \
2476
			(zing, desc.ljust(8), file_type, file_name))
2341
			(zing, desc.ljust(8), file_type, file_name))
2477
2342
2343
	def aux_update_pkg(self, values):
2344
		self.vartree.dbapi._bump_mtime(self.mycpv)
2345
		self.vartree.dbapi._clear_pkg_cache(self)
2346
		for k, v in values.items():
2347
			if v:
2348
				self.setfile(k, v)
2349
			else:
2350
				try:
2351
					os.unlink(os.path.join(self.vartree.dbapi.getpath(self.mycpv), k))
2352
				except EnvironmentError:
2353
					pass
2354
		self.vartree.dbapi._bump_mtime(self.mycpv)
2355
2478
	def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2356
	def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2479
		"""
2357
		"""
2480
2358
Lines 3076-3081 Link Here
3076
						for parent in sorted(set(recursive_parents)):
2954
						for parent in sorted(set(recursive_parents)):
3077
							dirs.append((parent, revisit.pop(parent)))
2955
							dirs.append((parent, revisit.pop(parent)))
3078
2956
2957
	def startContentsUpdate(self):
2958
		vdbdir = self.dbdir
2959
		contents_dir = os.path.join(vdbdir, "contents.d")
2960
		transaction_dir = os.path.join(vdbdir, "contents.d~")
2961
		manifest_lines = ""
2962
2963
		# Clean previously unfinished transaction (this shouldn't occur, but might)
2964
		if os.path.isdir(transaction_dir):
2965
			shutil.rmtree(transaction_dir)
2966
		if os.path.isdir(contents_dir):
2967
			shutil.rmtree(contents_dir)
2968
2969
		# Set up transaction
2970
		os.mkdir(transaction_dir, 0o644)
2971
		files = [
2972
				"NEEDED.ELF.2",
2973
				"CONTENTS",
2974
				"CONTENTS_DIGESTS_SHA1",
2975
				"CONTENTS_DIGESTS_SHA256",
2976
				"CONTENTS_DIGESTS_SHA512",
2977
				"CONTENTS_MODES",
2978
				"CONTENTS_ATTRS_PAX",
2979
				"CONTENTS_ATTRS_CAPS"
2980
			]
2981
		for f in files:
2982
			fname_src = os.path.join(vdbdir, f)
2983
			fname_dest = os.path.join(transaction_dir, f)
2984
2985
			# Gracefully handle non-existent files
2986
			if os.path.isfile(fname_src):
2987
				shutil.copy2(fname_src, fname_dest)
2988
				manifest_lines += f + "\n"
2989
				manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n"
2990
2991
		# Write Manifest-file of transaction
2992
		os.mkdir(contents_dir, 0o644)
2993
		self.aux_update_pkg({os.path.join("contents.d","Manifest"): manifest_lines})
2994
2995
	def stopContentsUpdate(self, vdbdir):
2996
		contents_dir = os.path.join(vdbdir, "contents.d")
2997
		transaction_dir = os.path.join(vdbdir, "contents.d~")
2998
		digests = []
2999
		transaction_files = []
3000
		all_files = [
3001
				"NEEDED.ELF.2",
3002
				"CONTENTS",
3003
				"CONTENTS_DIGESTS_SHA1",
3004
				"CONTENTS_DIGESTS_SHA256",
3005
				"CONTENTS_DIGESTS_SHA512",
3006
				"CONTENTS_MODES",
3007
				"CONTENTS_ATTRS_PAX",
3008
				"CONTENTS_ATTRS_CAPS"
3009
			]
3010
3011
		if not os.path.isdir(transaction_dir):
3012
			showMessage(_("!!! FAILED creating transaction dir "
3013
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3014
				level=logging.ERROR, noiselevel=-1)
3015
			sys.exit(1)
3016
3017
		# Read Manifest-file of contents
3018
		manifest_file = os.path.join(contents_dir, "Manifest")
3019
		if os.path.isfile(manifest_file):
3020
			with open(manifest_file,"r") as f:
3021
				lines = f.read().splitlines()
3022
3023
			for i, line in enumerate(lines):
3024
				if (i%2) == 0:
3025
					transaction_files.append(line)
3026
				else:
3027
					digests.append(line)
3028
		else:
3029
			showMessage(_("!!! FAILED reading Manifest of transaction"
3030
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3031
				level=logging.ERROR, noiselevel=-1)
3032
			sys.exit(1)
3033
3034
		# Check Manifest against transaction_dir
3035
		for f in transaction_files:
3036
			file = os.path.join(transaction_dir, f)
3037
			if not os.path.isfile(file):
3038
				showMessage(_("!!! FAILED Manifest of transaction "
3039
					"contained non-existing file")+str(file)+_(" "
3040
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3041
					level=logging.ERROR, noiselevel=-1)
3042
				sys.exit(1)
3043
3044
		# Check transaction_dir against Manifest
3045
		for f in os.listdir(transaction_dir):
3046
			if not f in transaction_files:
3047
				showMessage(_("!!! FAILED found file ")+str(file)+_(" "
3048
					"in transaction_dir that wasn't recorded in Manifest of "
3049
					"transaction during contents update in:\n\t")+str(vdbdir)+"\n",
3050
					level=logging.ERROR, noiselevel=-1)
3051
				sys.exit(1)
3052
3053
		# Setup contents_dir with links of vdbdir files
3054
		for i, f in enumerate(transaction_files):
3055
			fname_src = os.path.join(vdbdir, f)
3056
			fname_dest = os.path.join(contents_dir, f)
3057
3058
			# Gracefully handle non-existent files
3059
			if os.path.isfile(fname_src):
3060
				if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]:
3061
					showMessage(_("!!! FAILED according to Manifest of transaction, "
3062
						"file ")+str(file)+_(" in vdbdir was modified"
3063
						"during contents update in:\n\t")+str(vdbdir)+"\n",
3064
						level=logging.ERROR, noiselevel=-1)
3065
					sys.exit(1)
3066
				else:
3067
					os.link(fname_src, fname_dest)
3068
			else:
3069
				showMessage(_("!!! FAILED file in Manifest of transaction"
3070
					"no longer found in vdbdir ")+str(f)+_(
3071
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3072
					level=logging.ERROR, noiselevel=-1)
3073
				sys.exit(1)
3074
3075
		# Sync contents_dir and transaction_dir to disk
3076
		if platform.system() == "Linux":
3077
			paths = []
3078
			for f in os.listdir(transaction_dir):
3079
				paths.append(os.path.join(transaction_dir, f))
3080
			for f in os.listdir(contents_dir):
3081
				paths.append(os.path.join(contents_dir, f))
3082
			paths = tuple(paths)
3083
3084
			proc = SyncfsProcess(paths=paths,
3085
				scheduler=(
3086
						SchedulerInterface(portage._internal_caller and
3087
						global_event_loop() or EventLoop(main=False))
3088
					))
3089
3090
			proc.start()
3091
			returncode = proc.wait()
3092
3093
		# Link from transaction_dir
3094
		for f in transaction_files:
3095
			fname_src = os.path.join(transaction_dir, f)
3096
			fname_dest = os.path.join(vdbdir, f+"~")
3097
3098
			# Gracefully handle non-existent files
3099
			if os.path.isfile(fname_src):
3100
				os.link(fname_src, fname_dest)
3101
			else:
3102
				showMessage(_("!!! FAILED Manifest of transaction contains"
3103
					"file that no longer exists ")+str(f)+_(
3104
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3105
					level=logging.ERROR, noiselevel=-1)
3106
				sys.exit(1)
3107
3108
		# Sync contents_dir and transaction_dir to disk
3109
		if platform.system() == "Linux":
3110
			paths = []
3111
			for f in transaction_files:
3112
				# Gracefully handle non-existent files
3113
				if os.path.isfile(os.path.join(vdbdir, f+"~")):
3114
					paths.append(os.path.join(vdbdir, f+"~"))
3115
				else:
3116
					showMessage(_("!!! FAILED Manifest of transaction contains"
3117
						"file that no longer exists ")+str(f)+_(
3118
						"during contents update in:\n\t")+str(vdbdir)+"\n",
3119
						level=logging.ERROR, noiselevel=-1)
3120
					sys.exit(1)
3121
			paths = tuple(paths)
3122
3123
			proc = SyncfsProcess(paths=paths,
3124
				scheduler=(
3125
						SchedulerInterface(portage._internal_caller and
3126
						global_event_loop() or EventLoop(main=False))
3127
					))
3128
3129
			proc.start()
3130
			returncode = proc.wait()
3131
3132
		# Rename
3133
		for f in transaction_files:
3134
			fname_src = os.path.join(vdbdir, f+"~")
3135
			fname_dest = os.path.join(vdbdir, f)
3136
3137
			# Gracefully handle non-existent files
3138
			if os.path.isfile(fname_src):
3139
				os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync
3140
			else:
3141
				showMessage(_("!!! FAILED Manifest of transaction contains"
3142
					"file that no longer exists ")+str(f)+_(
3143
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3144
					level=logging.ERROR, noiselevel=-1)
3145
				sys.exit(1)
3146
3147
		# Cleanup transaction (order matters for roll-back)
3148
		shutil.rmtree(contents_dir)
3149
		shutil.rmtree(transaction_dir)
3150
3151
	def abortContentsUpdate(self):
3152
		# As this is an abort, we roll-back. So figure out, given the
3153
		# current state, how to roll-back.
3154
		vdbdir = self.dbdir
3155
		contents_dir = os.path.join(vdbdir, "contents.d")
3156
		transaction_dir = os.path.join(vdbdir, "contents.d~")
3157
3158
		if os.path.isdir(transaction_dir) and not os.path.isdir(contents_dir):
3159
			# Transaction_dir exists, so it might be in progress
3160
			# Therefore we can't trust its contents
3161
			shutil.rmtree(transaction_dir)
3162
3163
			# Contents_dir might exist, so clean it
3164
			if os.path.isdir(contents_dir):
3165
				shutil.rmtree(contents_dir)
3166
		elif not os.path.isdir(transaction_dir) and os.path.isdir(contents_dir):
3167
			# This shouldn't occur
3168
			shutil.rmtree(contents_dir)
3169
			showMessage(_("!!! FAILED please file a bug describing this situation"),
3170
				level=logging.ERROR, noiselevel=-1)
3171
3172
		sys.exit(1)
3173
3174
	def writeContentsToContentsFile(self, new_contents, new_contents_metadata, new_needed=None):
3175
		"""
3176
		@param new_contents: contents to write to CONTENTS file
3177
		@type new_contents: contents dictionary of the form
3178
				{u'/path/to/file' : (contents_attribute 1, ...), ...}
3179
		@param new_contents_metadata: contents to write to CONTENTS_* files
3180
		@type new_contents_metadata: contents dictionary of the form
3181
				{u'/path/to/file' : [index, {CONTENTS_* type : value, ... }]}
3182
		@param new_needed: new NEEDED entries
3183
		@type new_needed: list of NeededEntry
3184
		"""
3185
		# Here we do a number of things:
3186
		# - surround by an error catcher
3187
		# - setup write-ahead transaction
3188
		# - write multiple orderedDicts to multiple files using aux_update()
3189
		# - complete write-ahead transaction
3190
		# - call a rollback function on error
3191
3192
		try:
3193
			self.startContentsUpdate()
3194
3195
			new_needed_str = ""
3196
			if new_needed is not None:
3197
				new_needed_str = ''.join(_unicode(e) for e in new_needed)
3198
3199
			transaction_dir = "contents.d~"
3200
			contents_metadata = {}
3201
			contents_metadata[os.path.join(transaction_dir, LinkageMap._needed_aux_key)] = new_needed_str
3202
			contents_metadata[os.path.join(transaction_dir, "CONTENTS")] = prepare_contents(new_contents, self.settings['ROOT'])
3203
			for (filename, (tmp)) in new_contents_metadata.items():
3204
				for (type, value) in tmp.items():
3205
					type = os.path.join(transaction_dir,type)
3206
					contents_metadata[type] = contents_metadata.get(type,"") + value
3207
3208
			self.aux_update_pkg(contents_metadata)
3209
3210
			self.stopContentsUpdate(self.dbdir)
3211
3212
			self._clear_contents_cache()
3213
		except (IOError, OSError) as e:
3214
			showMessage(_("!!! FAILED abort of transaction due to "+str(e)+
3215
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3216
				level=logging.ERROR, noiselevel=-1)
3217
			self.abortContentsUpdate()
3218
3079
	def isowner(self, filename, destroot=None):
3219
	def isowner(self, filename, destroot=None):
3080
		"""
3220
		"""
3081
		Check if a file belongs to this package. This may
3221
		Check if a file belongs to this package. This may
Lines 3418-3428 Link Here
3418
		# Copy contents entries from the old package to the new one.
3558
		# Copy contents entries from the old package to the new one.
3419
		new_contents = self.getcontents().copy()
3559
		new_contents = self.getcontents().copy()
3420
		old_contents = self._installed_instance.getcontents()
3560
		old_contents = self._installed_instance.getcontents()
3561
		new_contents_metadata = self.getContentsMetadata().copy()
3562
		old_contents_metadata = self._installed_instance.getContentsMetadata()
3421
		for f in sorted(preserve_paths):
3563
		for f in sorted(preserve_paths):
3422
			f = _unicode_decode(f,
3564
			f = _unicode_decode(f,
3423
				encoding=_encodings['content'], errors='strict')
3565
				encoding=_encodings['content'], errors='strict')
3424
			f_abs = os.path.join(root, f.lstrip(os.sep))
3566
			f_abs = os.path.join(root, f.lstrip(os.sep))
3425
			contents_entry = old_contents.get(f_abs)
3567
			contents_entry = old_contents.get(f_abs)
3568
			contents_metadata_entry = old_contents_metadata.get(f_abs)
3426
			if contents_entry is None:
3569
			if contents_entry is None:
3427
				# This will probably never happen, but it might if one of the
3570
				# This will probably never happen, but it might if one of the
3428
				# paths returned from findConsumers() refers to one of the libs
3571
				# paths returned from findConsumers() refers to one of the libs
Lines 3435-3518 Link Here
3435
				preserve_paths.remove(f)
3578
				preserve_paths.remove(f)
3436
				continue
3579
				continue
3437
			new_contents[f_abs] = contents_entry
3580
			new_contents[f_abs] = contents_entry
3438
			self.writeMetaData(f_abs)
3581
			new_contents_metadata[f_abs] = contents_metadata_entry
3439
			obj_type = contents_entry[0]
3582
			obj_type = contents_entry[0]
3440
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3583
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3441
				noiselevel=-1)
3584
				noiselevel=-1)
3585
3442
			# Add parent directories to contents if necessary.
3586
			# Add parent directories to contents if necessary.
3443
			parent_dir = os.path.dirname(f_abs)
3587
			parent_dir = os.path.dirname(f_abs)
3444
			while len(parent_dir) > len(root):
3588
			while len(parent_dir) > len(root):
3445
				new_contents[parent_dir] = ["dir"]
3589
				new_contents[parent_dir] = ["dir"]
3446
				self.writeMetaData(parent_dir)
3590
				new_contents_metadata[f_abs] = contents_metadata_entry
3447
				prev = parent_dir
3591
				prev = parent_dir
3448
				parent_dir = os.path.dirname(parent_dir)
3592
				parent_dir = os.path.dirname(parent_dir)
3449
				if prev == parent_dir:
3593
				if prev == parent_dir:
3450
					break
3594
					break
3451
		outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
3452
		write_contents(new_contents, root, outfile)
3453
		outfile.close()
3454
		self._clear_contents_cache()
3455
3456
	def writeMetaData(self, fname):
3457
		hashtype = "SHA512"
3458
		if hashtype is None:
3459
			hashtype = "SHA512"
3460
		elif hashtype != "SHA1" and hashtype != "SHA256":
3461
			hashtype = "SHA512"
3462
3595
3463
		mystat = os.lstat(fname)
3596
		# Write new contents files
3464
		mymode = mystat[stat.ST_MODE]
3597
		self.writeContentsToContentsFile(new_contents, new_contents_metadata)
3465
3466
		if stat.S_ISREG(mymode):
3467
			line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n"
3468
			line_mode = oct(mymode)[-4:]+"\n"
3469
3470
			attrlist = xattr.list(fname)
3471
			if len(attrlist)>0:
3472
				for i in attrlist:
3473
					if i == "user.pax.flags":
3474
						line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n"
3475
					if i == "security.capability":
3476
						caps = xattr.get(fname, "security.capability") # Take the actual value from xattr
3477
						caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
3478
						line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
3479
			else:
3480
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3481
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3482
		else: #DIR, LINK, FIFO, DEV
3483
			digest_length = 0
3484
			if hashtype == "SHA1":
3485
				digest_length = 40
3486
			elif hashtype == "SHA256":
3487
				digest_length = 64
3488
			elif hashtype == "SHA512":
3489
				digest_length = 128
3490
3491
			line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
3492
			line_mode = oct(mymode)[-4:]+"\n"
3493
			line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3494
			line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3495
3496
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype)
3497
		if os.path.isfile(contents_file):
3498
			with open(contents_file,"r+") as f:
3499
				f.seek(0,2)
3500
				f.write(line_digest)
3501
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES")
3502
		if os.path.isfile(contents_file):
3503
			with open(contents_file,"r+") as f:
3504
				f.seek(0,2)
3505
				f.write(line_mode)
3506
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX")
3507
		if os.path.isfile(contents_file):
3508
			with open(contents_file,"r+") as f:
3509
				f.seek(0,2)
3510
				f.write(line_attr_pax)
3511
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS")
3512
		if os.path.isfile(contents_file):
3513
			with open(contents_file,"r+") as f:
3514
				f.seek(0,2)
3515
				f.write(line_attr_caps)
3516
3598
3517
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3599
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3518
		"""
3600
		"""
Lines 5127-5135 Link Here
5127
				# confmem rejected this update
5209
				# confmem rejected this update
5128
				zing = "---"
5210
				zing = "---"
5129
5211
5130
			srcobj = srcroot+relative_path
5212
			# Set some values for use by metadata entries
5131
			destobj = destroot+relative_path
5132
5133
			digest_length = 0
5213
			digest_length = 0
5134
			if hashtype == "SHA1":
5214
			if hashtype == "SHA1":
5135
				digest_length = 40
5215
				digest_length = 40
Lines 5138-5144 Link Here
5138
			elif hashtype == "SHA512":
5218
			elif hashtype == "SHA512":
5139
				digest_length = 128
5219
				digest_length = 128
5140
5220
5221
			# Determine metadata entries
5141
			if stat.S_ISREG(mymode):
5222
			if stat.S_ISREG(mymode):
5223
				srcobj = srcroot+relative_path
5142
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
5224
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
5143
				line_mode = oct(mymode)[-4:]+"\n"
5225
				line_mode = oct(mymode)[-4:]+"\n"
5144
5226
Lines 5160-5170 Link Here
5160
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
5242
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
5161
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
5243
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
5162
5244
5163
			digfile.write(line_digest)
5164
			modfile.write(line_mode)
5165
			paxfile.write(line_attr_pax)
5166
			capfile.write(line_attr_caps)
5167
5168
			if stat.S_ISLNK(mymode):
5245
			if stat.S_ISLNK(mymode):
5169
				# we are merging a symbolic link
5246
				# we are merging a symbolic link
5170
				# Pass in the symlink target in order to bypass the
5247
				# Pass in the symlink target in order to bypass the
Lines 5224-5230 Link Here
5224
							[_("QA Notice: Symbolic link /%s points to /%s which does not exist.")
5301
							[_("QA Notice: Symbolic link /%s points to /%s which does not exist.")
5225
							% (relative_path, myabsto)])
5302
							% (relative_path, myabsto)])
5226
5303
5304
					# Order writing of metadata entries
5305
					write_metadata = True
5306
5227
					showMessage("%s %s -> %s\n" % (zing, mydest, myto))
5307
					showMessage("%s %s -> %s\n" % (zing, mydest, myto))
5308
5309
					# Writing contents entry
5228
					if sys.hexversion >= 0x3030000:
5310
					if sys.hexversion >= 0x3030000:
5229
						outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
5311
						outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
5230
					else:
5312
					else:
Lines 5329-5335 Link Here
5329
				except OSError:
5411
				except OSError:
5330
					pass
5412
					pass
5331
5413
5414
				# Order writing of metadata entries
5415
				write_metadata = True
5416
5417
				# Writing contents entry
5332
				outfile.write("dir "+myrealdest+"\n")
5418
				outfile.write("dir "+myrealdest+"\n")
5419
5333
				# recurse and merge this directory
5420
				# recurse and merge this directory
5334
				mergelist.extend(join(relative_path, child) for child in
5421
				mergelist.extend(join(relative_path, child) for child in
5335
					os.listdir(join(srcroot, relative_path)))
5422
					os.listdir(join(srcroot, relative_path)))
Lines 5377-5382 Link Here
5377
						pass
5464
						pass
5378
5465
5379
				if mymtime != None:
5466
				if mymtime != None:
5467
					# Order writing of metadata entries
5468
					write_metadata = True
5469
5470
					# Writing contents entry
5380
					if sys.hexversion >= 0x3030000:
5471
					if sys.hexversion >= 0x3030000:
5381
						outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
5472
						outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
5382
					else:
5473
					else:
Lines 5399-5410 Link Here
5399
5490
5400
					else:
5491
					else:
5401
						return 1
5492
						return 1
5493
5494
				# Writing contents entry
5402
				if stat.S_ISFIFO(mymode):
5495
				if stat.S_ISFIFO(mymode):
5403
					outfile.write("fif %s\n" % myrealdest)
5496
					outfile.write("fif %s\n" % myrealdest)
5404
				else:
5497
				else:
5405
					outfile.write("dev %s\n" % myrealdest)
5498
					outfile.write("dev %s\n" % myrealdest)
5406
				showMessage(zing + " " + mydest + "\n")
5499
				showMessage(zing + " " + mydest + "\n")
5407
5500
5501
				# Order writing of metadata entries
5502
				write_metadata = True
5503
5504
			# Write metadata entries
5505
			if write_metadata:
5506
				digfile.write(line_digest)
5507
				modfile.write(line_mode)
5508
				paxfile.write(line_attr_pax)
5509
				capfile.write(line_attr_caps)
5510
5408
	def _protect(self, cfgfiledict, protect_if_modified, src_md5,
5511
	def _protect(self, cfgfiledict, protect_if_modified, src_md5,
5409
		src_link, dest, dest_real, dest_mode, dest_md5, dest_link):
5512
		src_link, dest, dest_real, dest_mode, dest_md5, dest_link):
5410
5513
Lines 5776-5786 Link Here
5776
		if not parallel_install:
5879
		if not parallel_install:
5777
			mylink.unlockdb()
5880
			mylink.unlockdb()
5778
5881
5779
def write_contents(contents, root, f):
5882
def prepare_contents(contents, root):
5780
	"""
5883
	"""
5781
	Write contents to any file like object. The file will be left open.
5884
	Prepare string with contents of CONTENTS
5782
	"""
5885
	"""
5783
	root_len = len(root) - 1
5886
	root_len = len(root) - 1
5887
	lines = ""
5784
	for filename in sorted(contents):
5888
	for filename in sorted(contents):
5785
		entry_data = contents[filename]
5889
		entry_data = contents[filename]
5786
		entry_type = entry_data[0]
5890
		entry_type = entry_data[0]
Lines 5795-5801 Link Here
5795
				(entry_type, relative_filename, link, mtime)
5899
				(entry_type, relative_filename, link, mtime)
5796
		else: # dir, dev, fif
5900
		else: # dir, dev, fif
5797
			line = "%s %s\n" % (entry_type, relative_filename)
5901
			line = "%s %s\n" % (entry_type, relative_filename)
5798
		f.write(line)
5902
5903
		lines += line
5904
5905
	return lines
5906
5907
def write_contents(contents, root, f):
5908
	"""
5909
	Write contents to any file like object. The file will be left open.
5910
	"""
5911
	f.write(prepare_contents(contents, root))
5799
5912
5800
def tar_contents(contents, root, tar, protect=None, onProgress=None,
5913
def tar_contents(contents, root, tar, protect=None, onProgress=None,
5801
	xattrs=False):
5914
	xattrs=False):

Return to bug 671864