Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 671864 | Differences between
and this patch

Collapse All | Expand All

(-)file_not_specified_in_diff (-149 / +422 lines)
Line  Link Here
0
-- vartree.py
0
++ vartree.py
Lines 96-101 Link Here
96
import textwrap
96
import textwrap
97
import time
97
import time
98
import warnings
98
import warnings
99
import operator
99
100
100
try:
101
try:
101
	import cPickle as pickle
102
	import cPickle as pickle
Lines 202-207 Link Here
202
203
203
		self._cached_counter = None
204
		self._cached_counter = None
204
205
206
		self._content_files = [
207
				("CONTENTS_DIGESTS_SHA512", 128+1),
208
				("CONTENTS_DIGESTS_SHA1", 40+1),
209
				("CONTENTS_DIGESTS_SHA256", 64+1),
210
				("CONTENTS_MODES", 4+1),
211
				("CONTENTS_ATTRS_PAX", 5+1),
212
				("CONTENTS_ATTRS_CAPS", 16+1)
213
			]
214
205
	@property
215
	@property
206
	def writable(self):
216
	def writable(self):
207
		"""
217
		"""
Lines 1069-1086 Link Here
1069
1079
1070
	def removeFromContents(self, pkg, paths, relative_paths=True):
1080
	def removeFromContents(self, pkg, paths, relative_paths=True):
1071
		"""
1081
		"""
1082
		Remove installed files from contents and its metadata files.
1083
		Typically during an unmerge, so that libraries needed by other
1084
		packages are spared from the unmerge.
1085
1072
		@param pkg: cpv for an installed package
1086
		@param pkg: cpv for an installed package
1073
		@type pkg: string
1087
		@type pkg: string
1074
		@param paths: paths of files to remove from contents
1088
		@param paths: paths of files to remove from contents
1075
		@type paths: iterable
1089
		@type paths: iterable
1076
		"""
1090
		"""
1091
1092
		# Grab a copy of contents and its metadata files.
1077
		if not hasattr(pkg, "getcontents"):
1093
		if not hasattr(pkg, "getcontents"):
1078
			pkg = self._dblink(pkg)
1094
			pkg = self._dblink(pkg)
1079
		root = self.settings['ROOT']
1095
		root = self.settings['ROOT']
1080
		root_len = len(root) - 1
1096
		root_len = len(root) - 1
1081
		new_contents = pkg.getcontents().copy()
1097
		new_contents = pkg.getcontents().copy()
1098
		new_contents_index = pkg.getContentsIndices().copy()
1099
		new_contents_metadata = pkg.getContentsMetadata().copy()
1082
		removed = 0
1100
		removed = 0
1083
1101
1102
		# Remove installed files from contents and its metadata files.
1084
		for filename in paths:
1103
		for filename in paths:
1085
			filename = _unicode_decode(filename,
1104
			filename = _unicode_decode(filename,
1086
				encoding=_encodings['content'], errors='strict')
1105
				encoding=_encodings['content'], errors='strict')
Lines 1090-1117 Link Here
1090
			else:
1109
			else:
1091
				relative_filename = filename[root_len:]
1110
				relative_filename = filename[root_len:]
1092
			contents_key = pkg._match_contents(relative_filename)
1111
			contents_key = pkg._match_contents(relative_filename)
1093
			index = -1
1112
			if contents_key:
1094
			try:
1095
				index = list(new_contents).index(filename)+1
1096
			except ValueError:
1097
				print("List does not contain value")
1098
			if contents_key and index >= 0:
1099
				# It's possible for two different paths to refer to the same
1113
				# It's possible for two different paths to refer to the same
1100
				# contents_key, due to directory symlinks. Therefore, pass a
1114
				# contents_key, due to directory symlinks. Therefore, pass a
1101
				# default value to pop, in order to avoid a KeyError which
1115
				# default value to pop, in order to avoid a KeyError which
1102
				# could otherwise be triggered (see bug #454400).
1116
				# could otherwise be triggered (see bug #454400).
1103
				new_contents.pop(contents_key, None)
1117
				new_contents.pop(contents_key, None)
1104
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512")
1118
				new_contents_metadata.pop(contents_key)
1105
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1")
1106
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256")
1107
				self.removeFromContentsMeta(pkg.dbdir, index, "MODES")
1108
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX")
1109
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS")
1110
				removed += 1
1119
				removed += 1
1111
1120
1112
		if removed:
1121
		if removed:
1113
			# Also remove corresponding NEEDED lines, so that they do
1122
			# Also remove corresponding NEEDED lines, so that they do
1114
			# no corrupt LinkageMap data for preserve-libs.
1123
			# not corrupt LinkageMap data for preserve-libs.
1115
			needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
1124
			needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
1116
			new_needed = None
1125
			new_needed = None
1117
			try:
1126
			try:
Lines 1140-1201 Link Here
1140
					if filename in new_contents:
1149
					if filename in new_contents:
1141
						new_needed.append(entry)
1150
						new_needed.append(entry)
1142
1151
1143
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1152
			# Write new contents files and clear CONTENTS cache
1144
1153
			pkg.writeContentsToContentsFile(new_contents, new_contents_metadata, new_needed=new_needed)
1145
	def removeFromContentsMeta(self, vdbdir, index, type):
1146
		contents_file = ""
1147
		if (type in
1148
			{"DIGESTS_SHA512",
1149
			"DIGESTS_SHA256",
1150
			"DIGESTS_SHA1",
1151
			"MODES",
1152
			"ATTRS_PAX",
1153
			"ATTRS_CAPS"}):
1154
			contents_file = os.path.join(vdbdir, "CONTENTS_"+type)
1155
		else:
1156
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1157
1158
		if type == "DIGESTS_SHA512": linelen = 128+1 #including newline
1159
		elif type == "DIGESTS_SHA256": linelen = 64 + 1
1160
		elif type == "DIGESTS_SHA1": linelen = 40+1
1161
		elif type == "MODES": linelen = 4 + 1
1162
		elif type == "ATTRS_PAX": linelen = 5 + 1
1163
		elif type == "ATTRS_CAPS": linelen = 16 + 1
1164
		else:
1165
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1166
1167
		if os.path.isfile(contents_file):
1168
			with open(contents_file,"r+") as f:
1169
				pre = f.read((index-1)*linelen)
1170
				f.read(129)
1171
				post = f.read()
1172
				f.seek(0, 0)
1173
				f.write(pre)
1174
				f.write(post)
1175
				f.truncate()
1176
1177
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1178
		"""
1179
		@param pkg: package to write contents file for
1180
		@type pkg: dblink
1181
		@param new_contents: contents to write to CONTENTS file
1182
		@type new_contents: contents dictionary of the form
1183
					{u'/path/to/file' : (contents_attribute 1, ...), ...}
1184
		@param new_needed: new NEEDED entries
1185
		@type new_needed: list of NeededEntry
1186
		"""
1187
		root = self.settings['ROOT']
1188
		self._bump_mtime(pkg.mycpv)
1189
		if new_needed is not None:
1190
			f = atomic_ofstream(os.path.join(pkg.dbdir, LinkageMap._needed_aux_key))
1191
			for entry in new_needed:
1192
				f.write(_unicode(entry))
1193
			f.close()
1194
		f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
1195
		write_contents(new_contents, root, f)
1196
		f.close()
1197
		self._bump_mtime(pkg.mycpv)
1198
		pkg._clear_contents_cache()
1199
1154
1200
	class _owners_cache(object):
1155
	class _owners_cache(object):
1201
		"""
1156
		"""
Lines 1657-1662 Link Here
1657
		self.myroot = self.settings['ROOT']
1612
		self.myroot = self.settings['ROOT']
1658
		self._installed_instance = None
1613
		self._installed_instance = None
1659
		self.contentscache = None
1614
		self.contentscache = None
1615
		self.contents_index_cache = None
1616
		self.contents_metadata_cache = None
1660
		self._contents_inodes = None
1617
		self._contents_inodes = None
1661
		self._contents_basenames = None
1618
		self._contents_basenames = None
1662
		self._linkmap_broken = False
1619
		self._linkmap_broken = False
Lines 1673-1678 Link Here
1673
		self._contents = ContentsCaseSensitivityManager(self)
1630
		self._contents = ContentsCaseSensitivityManager(self)
1674
		self._slot_locks = []
1631
		self._slot_locks = []
1675
1632
1633
		self._content_files = [
1634
				("CONTENTS_DIGESTS_SHA512", 128+1),
1635
				("CONTENTS_DIGESTS_SHA1", 40+1),
1636
				("CONTENTS_DIGESTS_SHA256", 64+1),
1637
				("CONTENTS_MODES", 4+1),
1638
				("CONTENTS_ATTRS_PAX", 5+1),
1639
				("CONTENTS_ATTRS_CAPS", 16+1)
1640
			]
1641
1676
	def __hash__(self):
1642
	def __hash__(self):
1677
		return hash(self._hash_key)
1643
		return hash(self._hash_key)
1678
1644
Lines 1818-1823 Link Here
1818
1784
1819
	def _clear_contents_cache(self):
1785
	def _clear_contents_cache(self):
1820
		self.contentscache = None
1786
		self.contentscache = None
1787
		self.contents_index_cache = None
1788
		self.contents_metadata_cache = None
1821
		self._contents_inodes = None
1789
		self._contents_inodes = None
1822
		self._contents_basenames = None
1790
		self._contents_basenames = None
1823
		self._contents.clear_cache()
1791
		self._contents.clear_cache()
Lines 1828-1835 Link Here
1828
		"""
1796
		"""
1829
		if self.contentscache is not None:
1797
		if self.contentscache is not None:
1830
			return self.contentscache
1798
			return self.contentscache
1799
1831
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1800
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1832
		pkgfiles = OrderedDict()
1801
		pkgfiles = OrderedDict()
1802
		pkgfiles_indices = OrderedDict()
1803
1833
		try:
1804
		try:
1834
			with io.open(_unicode_encode(contents_file,
1805
			with io.open(_unicode_encode(contents_file,
1835
				encoding=_encodings['fs'], errors='strict'),
1806
				encoding=_encodings['fs'], errors='strict'),
Lines 1849-1875 Link Here
1849
		obj_index = contents_re.groupindex['obj']
1820
		obj_index = contents_re.groupindex['obj']
1850
		dir_index = contents_re.groupindex['dir']
1821
		dir_index = contents_re.groupindex['dir']
1851
		sym_index = contents_re.groupindex['sym']
1822
		sym_index = contents_re.groupindex['sym']
1823
1852
		# The old symlink format may exist on systems that have packages
1824
		# The old symlink format may exist on systems that have packages
1853
		# which were installed many years ago (see bug #351814).
1825
		# which were installed many years ago (see bug #351814).
1854
		oldsym_index = contents_re.groupindex['oldsym']
1826
		oldsym_index = contents_re.groupindex['oldsym']
1827
1855
		# CONTENTS files already contain EPREFIX
1828
		# CONTENTS files already contain EPREFIX
1856
		myroot = self.settings['ROOT']
1829
		myroot = self.settings['ROOT']
1857
		if myroot == os.path.sep:
1830
		if myroot == os.path.sep:
1858
			myroot = None
1831
			myroot = None
1832
1859
		# used to generate parent dir entries
1833
		# used to generate parent dir entries
1860
		dir_entry = ("dir",)
1834
		dir_entry = ("dir",)
1861
		eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1835
		eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1836
1862
		pos = 0
1837
		pos = 0
1863
		errors = []
1838
		errors = []
1864
		for pos, line in enumerate(mylines):
1839
		for pos, line in enumerate(mylines):
1865
			if null_byte in line:
1840
			if null_byte in line:
1866
				# Null bytes are a common indication of corruption.
1841
				# Null bytes are a common indication of corruption.
1867
				errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1842
				errors.append((pos + 1, _("Null byte found in CONTENTS entry: "+line)))
1868
				continue
1843
				continue
1869
			line = line.rstrip("\n")
1844
			line = line.rstrip("\n")
1870
			m = contents_re.match(line)
1845
			m = contents_re.match(line)
1871
			if m is None:
1846
			if m is None:
1872
				errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1847
				errors.append((pos + 1, _("Unrecognized CONTENTS entry: "+line)))
1873
				continue
1848
				continue
1874
1849
1875
			if m.group(obj_index) is not None:
1850
			if m.group(obj_index) is not None:
Lines 1914-1930 Link Here
1914
				if parent in pkgfiles:
1889
				if parent in pkgfiles:
1915
					break
1890
					break
1916
				pkgfiles[parent] = dir_entry
1891
				pkgfiles[parent] = dir_entry
1892
				pkgfiles_indices[parent] = (pos,dir_entry)
1917
				path_split.pop()
1893
				path_split.pop()
1918
1894
1919
			pkgfiles[path] = data
1895
			pkgfiles[path] = data
1896
			pkgfiles_indices[path] = (pos,data)
1920
1897
1921
		if errors:
1898
		if errors:
1922
			writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1899
			writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1923
			for pos, e in errors:
1900
			for pos, e in errors:
1924
				writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1901
				writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1925
		self.contentscache = pkgfiles
1902
		self.contentscache = pkgfiles
1903
		self.contents_index_cache = pkgfiles_indices
1926
		return pkgfiles
1904
		return pkgfiles
1927
1905
1906
	def getContentsIndices(self):
1907
		"""
1908
		Get installed files of a given package (aka what that package installed), with indices
1909
		"""
1910
		if self.contents_index_cache is not None:
1911
			return self.contents_index_cache
1912
		else:
1913
			getcontents()
1914
			if self.contents_index_cache is not None:
1915
				return self.contents_index_cache
1916
			else:
1917
				showMessage(_("!!! FAILED couldn't get cached contents index"),
1918
					level=logging.ERROR, noiselevel=-1)
1919
				return None
1920
1921
	def getContentsMetadata(self):
1922
		"""
1923
		Get metadata of installed files of a given package (aka what that package installed):
1924
		- iterate over the results returned by getContentsIndices()
1925
		- iterate over the filetypes concerned
1926
		- grab the metadata using the identifier to calculate position in the different files
1927
		- write metadata to an orderedDict and return
1928
		"""
1929
		if self.contents_metadata_cache is not None:
1930
			return self.contents_metadata_cache
1931
		else:
1932
			if self.contents_index_cache is None:
1933
				getcontents()
1934
1935
		vdbdir = self.dbdir
1936
		contents_metadata = OrderedDict()
1937
1938
		for (filename, (index, tmp)) in self.contents_index_cache.copy().items():
1939
			contents_metadata[filename] = OrderedDict()
1940
			for type, linelen in self._content_files:
1941
				contents_fname = os.path.join(vdbdir, type)
1942
				if not os.path.isfile(contents_fname):
1943
					continue
1944
1945
				with open(contents_fname,"rb") as f:
1946
					f.seek(index*linelen) #skip to the right line
1947
					value = f.read(linelen).decode() #read the line
1948
					contents_metadata[filename][type] = value
1949
1950
		self.contents_metadata_cache = contents_metadata
1951
		return contents_metadata
1952
1928
	def _prune_plib_registry(self, unmerge=False,
1953
	def _prune_plib_registry(self, unmerge=False,
1929
		needed=None, preserve_paths=None):
1954
		needed=None, preserve_paths=None):
1930
		# remove preserved libraries that don't have any consumers left
1955
		# remove preserved libraries that don't have any consumers left
Lines 2315-2320 Link Here
2315
		self._display_merge("%s %s %s %s\n" % \
2340
		self._display_merge("%s %s %s %s\n" % \
2316
			(zing, desc.ljust(8), file_type, file_name))
2341
			(zing, desc.ljust(8), file_type, file_name))
2317
2342
2343
	def aux_update_pkg(self, values):
2344
		self.vartree.dbapi._bump_mtime(self.mycpv)
2345
		self.vartree.dbapi._clear_pkg_cache(self)
2346
		for k, v in values.items():
2347
			if v:
2348
				self.setfile(k, v)
2349
			else:
2350
				try:
2351
					os.unlink(os.path.join(self.vartree.dbapi.getpath(self.mycpv), k))
2352
				except EnvironmentError:
2353
					pass
2354
		self.vartree.dbapi._bump_mtime(self.mycpv)
2355
2318
	def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2356
	def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2319
		"""
2357
		"""
2320
2358
Lines 2916-2921 Link Here
2916
						for parent in sorted(set(recursive_parents)):
2954
						for parent in sorted(set(recursive_parents)):
2917
							dirs.append((parent, revisit.pop(parent)))
2955
							dirs.append((parent, revisit.pop(parent)))
2918
2956
2957
	def startContentsUpdate(self):
2958
		vdbdir = self.dbdir
2959
		contents_dir = os.path.join(vdbdir, "contents.d")
2960
		transaction_dir = os.path.join(vdbdir, "contents.d~")
2961
		manifest_lines = ""
2962
2963
		# Clean previously unfinished transaction (this shouldn't occur, but might)
2964
		if os.path.isdir(transaction_dir):
2965
			shutil.rmtree(transaction_dir)
2966
		if os.path.isdir(contents_dir):
2967
			shutil.rmtree(contents_dir)
2968
2969
		# Set up transaction
2970
		os.mkdir(transaction_dir, 0o644)
2971
		files = [
2972
				"NEEDED.ELF.2",
2973
				"CONTENTS",
2974
				"CONTENTS_DIGESTS_SHA1",
2975
				"CONTENTS_DIGESTS_SHA256",
2976
				"CONTENTS_DIGESTS_SHA512",
2977
				"CONTENTS_MODES",
2978
				"CONTENTS_ATTRS_PAX",
2979
				"CONTENTS_ATTRS_CAPS"
2980
			]
2981
		for f in files:
2982
			fname_src = os.path.join(vdbdir, f)
2983
			fname_dest = os.path.join(transaction_dir, f)
2984
2985
			# Gracefully handle non-existent files
2986
			if os.path.isfile(fname_src):
2987
				shutil.copy2(fname_src, fname_dest)
2988
				manifest_lines += f + "\n"
2989
				manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n"
2990
2991
		# Write Manifest-file of transaction
2992
		os.mkdir(contents_dir, 0o644)
2993
		self.aux_update_pkg({os.path.join("contents.d","Manifest"): manifest_lines})
2994
2995
	def stopContentsUpdate(self, vdbdir):
2996
		contents_dir = os.path.join(vdbdir, "contents.d")
2997
		transaction_dir = os.path.join(vdbdir, "contents.d~")
2998
		digests = []
2999
		transaction_files = []
3000
		all_files = [
3001
				"NEEDED.ELF.2",
3002
				"CONTENTS",
3003
				"CONTENTS_DIGESTS_SHA1",
3004
				"CONTENTS_DIGESTS_SHA256",
3005
				"CONTENTS_DIGESTS_SHA512",
3006
				"CONTENTS_MODES",
3007
				"CONTENTS_ATTRS_PAX",
3008
				"CONTENTS_ATTRS_CAPS"
3009
			]
3010
3011
		if not os.path.isdir(transaction_dir):
3012
			showMessage(_("!!! FAILED creating transaction dir "
3013
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3014
				level=logging.ERROR, noiselevel=-1)
3015
			sys.exit(1)
3016
3017
		# Read Manifest-file of contents
3018
		manifest_file = os.path.join(contents_dir, "Manifest")
3019
		if os.path.isfile(manifest_file):
3020
			with open(manifest_file,"r") as f:
3021
				lines = f.read().splitlines()
3022
3023
			for i, line in enumerate(lines):
3024
				if (i%2) == 0:
3025
					transaction_files.append(line)
3026
				else:
3027
					digests.append(line)
3028
		else:
3029
			showMessage(_("!!! FAILED reading Manifest of transaction"
3030
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3031
				level=logging.ERROR, noiselevel=-1)
3032
			sys.exit(1)
3033
3034
		# Check Manifest against transaction_dir
3035
		for f in transaction_files:
3036
			file = os.path.join(transaction_dir, f)
3037
			if not os.path.isfile(file):
3038
				showMessage(_("!!! FAILED Manifest of transaction "
3039
					"contained non-existing file")+str(file)+_(" "
3040
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3041
					level=logging.ERROR, noiselevel=-1)
3042
				sys.exit(1)
3043
3044
		# Check transaction_dir against Manifest
3045
		for f in os.listdir(transaction_dir):
3046
			if not f in transaction_files:
3047
				showMessage(_("!!! FAILED found file ")+str(file)+_(" "
3048
					"in transaction_dir that wasn't recorded in Manifest of "
3049
					"transaction during contents update in:\n\t")+str(vdbdir)+"\n",
3050
					level=logging.ERROR, noiselevel=-1)
3051
				sys.exit(1)
3052
3053
		# Setup contents_dir with links of vdbdir files
3054
		for i, f in enumerate(transaction_files):
3055
			fname_src = os.path.join(vdbdir, f)
3056
			fname_dest = os.path.join(contents_dir, f)
3057
3058
			# Gracefully handle non-existent files
3059
			if os.path.isfile(fname_src):
3060
				if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]:
3061
					showMessage(_("!!! FAILED according to Manifest of transaction, "
3062
						"file ")+str(file)+_(" in vdbdir was modified"
3063
						"during contents update in:\n\t")+str(vdbdir)+"\n",
3064
						level=logging.ERROR, noiselevel=-1)
3065
					sys.exit(1)
3066
				else:
3067
					os.link(fname_src, fname_dest)
3068
			else:
3069
				showMessage(_("!!! FAILED file in Manifest of transaction"
3070
					"no longer found in vdbdir ")+str(f)+_(
3071
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3072
					level=logging.ERROR, noiselevel=-1)
3073
				sys.exit(1)
3074
3075
		# Sync contents_dir and transaction_dir to disk
3076
		if platform.system() == "Linux":
3077
			paths = []
3078
			for f in os.listdir(transaction_dir):
3079
				paths.append(os.path.join(transaction_dir, f))
3080
			for f in os.listdir(contents_dir):
3081
				paths.append(os.path.join(contents_dir, f))
3082
			paths = tuple(paths)
3083
3084
			proc = SyncfsProcess(paths=paths,
3085
				scheduler=(
3086
						SchedulerInterface(portage._internal_caller and
3087
						global_event_loop() or EventLoop(main=False))
3088
					))
3089
3090
			proc.start()
3091
			returncode = proc.wait()
3092
3093
		# Link from transaction_dir
3094
		for f in transaction_files:
3095
			fname_src = os.path.join(transaction_dir, f)
3096
			fname_dest = os.path.join(vdbdir, f+"~")
3097
3098
			# Gracefully handle non-existent files
3099
			if os.path.isfile(fname_src):
3100
				os.link(fname_src, fname_dest)
3101
			else:
3102
				showMessage(_("!!! FAILED Manifest of transaction contains"
3103
					"file that no longer exists ")+str(f)+_(
3104
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3105
					level=logging.ERROR, noiselevel=-1)
3106
				sys.exit(1)
3107
3108
		# Sync contents_dir and transaction_dir to disk
3109
		if platform.system() == "Linux":
3110
			paths = []
3111
			for f in transaction_files:
3112
				# Gracefully handle non-existent files
3113
				if os.path.isfile(os.path.join(vdbdir, f+"~")):
3114
					paths.append(os.path.join(vdbdir, f+"~"))
3115
				else:
3116
					showMessage(_("!!! FAILED Manifest of transaction contains"
3117
						"file that no longer exists ")+str(f)+_(
3118
						"during contents update in:\n\t")+str(vdbdir)+"\n",
3119
						level=logging.ERROR, noiselevel=-1)
3120
					sys.exit(1)
3121
			paths = tuple(paths)
3122
3123
			proc = SyncfsProcess(paths=paths,
3124
				scheduler=(
3125
						SchedulerInterface(portage._internal_caller and
3126
						global_event_loop() or EventLoop(main=False))
3127
					))
3128
3129
			proc.start()
3130
			returncode = proc.wait()
3131
3132
		# Rename
3133
		for f in transaction_files:
3134
			fname_src = os.path.join(vdbdir, f+"~")
3135
			fname_dest = os.path.join(vdbdir, f)
3136
3137
			# Gracefully handle non-existent files
3138
			if os.path.isfile(fname_src):
3139
				os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync
3140
			else:
3141
				showMessage(_("!!! FAILED Manifest of transaction contains"
3142
					"file that no longer exists ")+str(f)+_(
3143
					"during contents update in:\n\t")+str(vdbdir)+"\n",
3144
					level=logging.ERROR, noiselevel=-1)
3145
				sys.exit(1)
3146
3147
		# Cleanup transaction (order matters for roll-back)
3148
		shutil.rmtree(contents_dir)
3149
		shutil.rmtree(transaction_dir)
3150
3151
	def abortContentsUpdate(self):
3152
		# As this is an abort, we roll-back. So figure out, given the
3153
		# current state, how to roll-back.
3154
		vdbdir = self.dbdir
3155
		contents_dir = os.path.join(vdbdir, "contents.d")
3156
		transaction_dir = os.path.join(vdbdir, "contents.d~")
3157
3158
		if os.path.isdir(transaction_dir) and not os.path.isdir(contents_dir):
3159
			# Transaction_dir exists, so it might be in progress
3160
			# Therefore we can't trust its contents
3161
			shutil.rmtree(transaction_dir)
3162
3163
			# Contents_dir might exist, so clean it
3164
			if os.path.isdir(contents_dir):
3165
				shutil.rmtree(contents_dir)
3166
		elif not os.path.isdir(transaction_dir) and os.path.isdir(contents_dir):
3167
			# This shouldn't occur
3168
			shutil.rmtree(contents_dir)
3169
			showMessage(_("!!! FAILED please file a bug describing this situation"),
3170
				level=logging.ERROR, noiselevel=-1)
3171
3172
		sys.exit(1)
3173
3174
	def writeContentsToContentsFile(self, new_contents, new_contents_metadata, new_needed=None):
3175
		"""
3176
		@param new_contents: contents to write to CONTENTS file
3177
		@type new_contents: contents dictionary of the form
3178
				{u'/path/to/file' : (contents_attribute 1, ...), ...}
3179
		@param new_contents_metadata: contents to write to CONTENTS_* files
3180
		@type new_contents_metadata: contents dictionary of the form
3181
				{u'/path/to/file' : [index, {CONTENTS_* type : value, ... }]}
3182
		@param new_needed: new NEEDED entries
3183
		@type new_needed: list of NeededEntry
3184
		"""
3185
		# Here we do a number of things:
3186
		# - surround by an error catcher
3187
		# - setup write-ahead transaction
3188
		# - write multiple orderedDicts to multiple files using aux_update()
3189
		# - complete write-ahead transaction
3190
		# - call a rollback function on error
3191
3192
		try:
3193
			self.startContentsUpdate()
3194
3195
			new_needed_str = ""
3196
			if new_needed is not None:
3197
				new_needed_str = ''.join(_unicode(e) for e in new_needed)
3198
3199
			transaction_dir = "contents.d~"
3200
			contents_metadata = {}
3201
			contents_metadata[os.path.join(transaction_dir, LinkageMap._needed_aux_key)] = new_needed_str
3202
			contents_metadata[os.path.join(transaction_dir, "CONTENTS")] = prepare_contents(new_contents, self.settings['ROOT'])
3203
			for (filename, (tmp)) in new_contents_metadata.items():
3204
				for (type, value) in tmp.items():
3205
					type = os.path.join(transaction_dir,type)
3206
					contents_metadata[type] = contents_metadata.get(type,"") + value
3207
3208
			self.aux_update_pkg(contents_metadata)
3209
3210
			self.stopContentsUpdate(self.dbdir)
3211
3212
			self._clear_contents_cache()
3213
		except (IOError, OSError) as e:
3214
			showMessage(_("!!! FAILED abort of transaction due to "+str(e)+
3215
				"during contents update in:\n\t")+str(vdbdir)+"\n",
3216
				level=logging.ERROR, noiselevel=-1)
3217
			self.abortContentsUpdate()
3218
2919
	def isowner(self, filename, destroot=None):
3219
	def isowner(self, filename, destroot=None):
2920
		"""
3220
		"""
2921
		Check if a file belongs to this package. This may
3221
		Check if a file belongs to this package. This may
Lines 3258-3268 Link Here
3258
		# Copy contents entries from the old package to the new one.
3558
		# Copy contents entries from the old package to the new one.
3259
		new_contents = self.getcontents().copy()
3559
		new_contents = self.getcontents().copy()
3260
		old_contents = self._installed_instance.getcontents()
3560
		old_contents = self._installed_instance.getcontents()
3561
		new_contents_metadata = self.getContentsMetadata().copy()
3562
		old_contents_metadata = self._installed_instance.getContentsMetadata()
3261
		for f in sorted(preserve_paths):
3563
		for f in sorted(preserve_paths):
3262
			f = _unicode_decode(f,
3564
			f = _unicode_decode(f,
3263
				encoding=_encodings['content'], errors='strict')
3565
				encoding=_encodings['content'], errors='strict')
3264
			f_abs = os.path.join(root, f.lstrip(os.sep))
3566
			f_abs = os.path.join(root, f.lstrip(os.sep))
3265
			contents_entry = old_contents.get(f_abs)
3567
			contents_entry = old_contents.get(f_abs)
3568
			contents_metadata_entry = old_contents_metadata.get(f_abs)
3266
			if contents_entry is None:
3569
			if contents_entry is None:
3267
				# This will probably never happen, but it might if one of the
3570
				# This will probably never happen, but it might if one of the
3268
				# paths returned from findConsumers() refers to one of the libs
3571
				# paths returned from findConsumers() refers to one of the libs
Lines 3275-3358 Link Here
3275
				preserve_paths.remove(f)
3578
				preserve_paths.remove(f)
3276
				continue
3579
				continue
3277
			new_contents[f_abs] = contents_entry
3580
			new_contents[f_abs] = contents_entry
3278
			self.writeMetaData(f_abs)
3581
			new_contents_metadata[f_abs] = contents_metadata_entry
3279
			obj_type = contents_entry[0]
3582
			obj_type = contents_entry[0]
3280
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3583
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3281
				noiselevel=-1)
3584
				noiselevel=-1)
3585
3282
			# Add parent directories to contents if necessary.
3586
			# Add parent directories to contents if necessary.
3283
			parent_dir = os.path.dirname(f_abs)
3587
			parent_dir = os.path.dirname(f_abs)
3284
			while len(parent_dir) > len(root):
3588
			while len(parent_dir) > len(root):
3285
				new_contents[parent_dir] = ["dir"]
3589
				new_contents[parent_dir] = ["dir"]
3286
				self.writeMetaData(parent_dir)
3590
				new_contents_metadata[f_abs] = contents_metadata_entry
3287
				prev = parent_dir
3591
				prev = parent_dir
3288
				parent_dir = os.path.dirname(parent_dir)
3592
				parent_dir = os.path.dirname(parent_dir)
3289
				if prev == parent_dir:
3593
				if prev == parent_dir:
3290
					break
3594
					break
3291
		outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
3292
		write_contents(new_contents, root, outfile)
3293
		outfile.close()
3294
		self._clear_contents_cache()
3295
3296
	def writeMetaData(self, fname):
3297
		hashtype = "SHA512"
3298
		if hashtype is None:
3299
			hashtype = "SHA512"
3300
		elif hashtype != "SHA1" and hashtype != "SHA256":
3301
			hashtype = "SHA512"
3302
3595
3303
		mystat = os.lstat(fname)
3596
		# Write new contents files
3304
		mymode = mystat[stat.ST_MODE]
3597
		self.writeContentsToContentsFile(new_contents, new_contents_metadata)
3305
3306
		if stat.S_ISREG(mymode):
3307
			line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n"
3308
			line_mode = oct(mymode)[-4:]+"\n"
3309
3310
			attrlist = xattr.list(fname)
3311
			if len(attrlist)>0:
3312
				for i in attrlist:
3313
					if i == "user.pax.flags":
3314
						line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n"
3315
					if i == "security.capability":
3316
						caps = xattr.get(fname, "security.capability") # Take the actual value from xattr
3317
						caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
3318
						line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
3319
			else:
3320
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3321
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3322
		else: #DIR, LINK, FIFO, DEV
3323
			digest_length = 0
3324
			if hashtype == "SHA1":
3325
				digest_length = 40
3326
			elif hashtype == "SHA256":
3327
				digest_length = 64
3328
			elif hashtype == "SHA512":
3329
				digest_length = 128
3330
3331
			line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
3332
			line_mode = oct(mymode)[-4:]+"\n"
3333
			line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3334
			line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3335
3336
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype)
3337
		if os.path.isfile(contents_file):
3338
			with open(contents_file,"r+") as f:
3339
				f.seek(0,2)
3340
				f.write(line_digest)
3341
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES")
3342
		if os.path.isfile(contents_file):
3343
			with open(contents_file,"r+") as f:
3344
				f.seek(0,2)
3345
				f.write(line_mode)
3346
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX")
3347
		if os.path.isfile(contents_file):
3348
			with open(contents_file,"r+") as f:
3349
				f.seek(0,2)
3350
				f.write(line_attr_pax)
3351
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS")
3352
		if os.path.isfile(contents_file):
3353
			with open(contents_file,"r+") as f:
3354
				f.seek(0,2)
3355
				f.write(line_attr_caps)
3356
3598
3357
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3599
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3358
		"""
3600
		"""
Lines 4967-4975 Link Here
4967
				# confmem rejected this update
5209
				# confmem rejected this update
4968
				zing = "---"
5210
				zing = "---"
4969
5211
4970
			srcobj = srcroot+relative_path
5212
			# Set some values for use by metadata entries
4971
			destobj = destroot+relative_path
4972
4973
			digest_length = 0
5213
			digest_length = 0
4974
			if hashtype == "SHA1":
5214
			if hashtype == "SHA1":
4975
				digest_length = 40
5215
				digest_length = 40
Lines 4978-4984 Link Here
4978
			elif hashtype == "SHA512":
5218
			elif hashtype == "SHA512":
4979
				digest_length = 128
5219
				digest_length = 128
4980
5220
5221
			# Determine metadata entries
4981
			if stat.S_ISREG(mymode):
5222
			if stat.S_ISREG(mymode):
5223
				srcobj = srcroot+relative_path
4982
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
5224
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
4983
				line_mode = oct(mymode)[-4:]+"\n"
5225
				line_mode = oct(mymode)[-4:]+"\n"
4984
5226
Lines 5000-5010 Link Here
5000
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
5242
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
5001
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
5243
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
5002
5244
5003
			digfile.write(line_digest)
5004
			modfile.write(line_mode)
5005
			paxfile.write(line_attr_pax)
5006
			capfile.write(line_attr_caps)
5007
5008
			if stat.S_ISLNK(mymode):
5245
			if stat.S_ISLNK(mymode):
5009
				# we are merging a symbolic link
5246
				# we are merging a symbolic link
5010
				# Pass in the symlink target in order to bypass the
5247
				# Pass in the symlink target in order to bypass the
Lines 5064-5070 Link Here
5064
							[_("QA Notice: Symbolic link /%s points to /%s which does not exist.")
5301
							[_("QA Notice: Symbolic link /%s points to /%s which does not exist.")
5065
							% (relative_path, myabsto)])
5302
							% (relative_path, myabsto)])
5066
5303
5304
					# Order writing of metadata entries
5305
					write_metadata = True
5306
5067
					showMessage("%s %s -> %s\n" % (zing, mydest, myto))
5307
					showMessage("%s %s -> %s\n" % (zing, mydest, myto))
5308
5309
					# Writing contents entry
5068
					if sys.hexversion >= 0x3030000:
5310
					if sys.hexversion >= 0x3030000:
5069
						outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
5311
						outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
5070
					else:
5312
					else:
Lines 5169-5175 Link Here
5169
				except OSError:
5411
				except OSError:
5170
					pass
5412
					pass
5171
5413
5414
				# Order writing of metadata entries
5415
				write_metadata = True
5416
5417
				# Writing contents entry
5172
				outfile.write("dir "+myrealdest+"\n")
5418
				outfile.write("dir "+myrealdest+"\n")
5419
5173
				# recurse and merge this directory
5420
				# recurse and merge this directory
5174
				mergelist.extend(join(relative_path, child) for child in
5421
				mergelist.extend(join(relative_path, child) for child in
5175
					os.listdir(join(srcroot, relative_path)))
5422
					os.listdir(join(srcroot, relative_path)))
Lines 5217-5222 Link Here
5217
						pass
5464
						pass
5218
5465
5219
				if mymtime != None:
5466
				if mymtime != None:
5467
					# Order writing of metadata entries
5468
					write_metadata = True
5469
5470
					# Writing contents entry
5220
					if sys.hexversion >= 0x3030000:
5471
					if sys.hexversion >= 0x3030000:
5221
						outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
5472
						outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
5222
					else:
5473
					else:
Lines 5239-5250 Link Here
5239
5490
5240
					else:
5491
					else:
5241
						return 1
5492
						return 1
5493
5494
				# Writing contents entry
5242
				if stat.S_ISFIFO(mymode):
5495
				if stat.S_ISFIFO(mymode):
5243
					outfile.write("fif %s\n" % myrealdest)
5496
					outfile.write("fif %s\n" % myrealdest)
5244
				else:
5497
				else:
5245
					outfile.write("dev %s\n" % myrealdest)
5498
					outfile.write("dev %s\n" % myrealdest)
5246
				showMessage(zing + " " + mydest + "\n")
5499
				showMessage(zing + " " + mydest + "\n")
5247
5500
5501
				# Order writing of metadata entries
5502
				write_metadata = True
5503
5504
			# Write metadata entries
5505
			if write_metadata:
5506
				digfile.write(line_digest)
5507
				modfile.write(line_mode)
5508
				paxfile.write(line_attr_pax)
5509
				capfile.write(line_attr_caps)
5510
5248
	def _protect(self, cfgfiledict, protect_if_modified, src_md5,
5511
	def _protect(self, cfgfiledict, protect_if_modified, src_md5,
5249
		src_link, dest, dest_real, dest_mode, dest_md5, dest_link):
5512
		src_link, dest, dest_real, dest_mode, dest_md5, dest_link):
5250
5513
Lines 5616-5626 Link Here
5616
		if not parallel_install:
5879
		if not parallel_install:
5617
			mylink.unlockdb()
5880
			mylink.unlockdb()
5618
5881
5619
def write_contents(contents, root, f):
5882
def prepare_contents(contents, root):
5620
	"""
5883
	"""
5621
	Write contents to any file like object. The file will be left open.
5884
	Prepare string with contents of CONTENTS
5622
	"""
5885
	"""
5623
	root_len = len(root) - 1
5886
	root_len = len(root) - 1
5887
	lines = ""
5624
	for filename in sorted(contents):
5888
	for filename in sorted(contents):
5625
		entry_data = contents[filename]
5889
		entry_data = contents[filename]
5626
		entry_type = entry_data[0]
5890
		entry_type = entry_data[0]
Lines 5635-5641 Link Here
5635
				(entry_type, relative_filename, link, mtime)
5899
				(entry_type, relative_filename, link, mtime)
5636
		else: # dir, dev, fif
5900
		else: # dir, dev, fif
5637
			line = "%s %s\n" % (entry_type, relative_filename)
5901
			line = "%s %s\n" % (entry_type, relative_filename)
5638
		f.write(line)
5902
5903
		lines += line
5904
5905
	return lines
5906
5907
def write_contents(contents, root, f):
5908
	"""
5909
	Write contents to any file like object. The file will be left open.
5910
	"""
5911
	f.write(prepare_contents(contents, root))
5639
5912
5640
def tar_contents(contents, root, tar, protect=None, onProgress=None,
5913
def tar_contents(contents, root, tar, protect=None, onProgress=None,
5641
	xattrs=False):
5914
	xattrs=False):

Return to bug 671864