Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 605082 | Differences between
and this patch

Collapse All | Expand All

(-)file_not_specified_in_diff (-7 / +225 lines)
Line  Link Here
0
-- vartree.py
0
++ vartree.py
Lines 77-82 Link Here
77
from _emerge.SpawnProcess import SpawnProcess
77
from _emerge.SpawnProcess import SpawnProcess
78
from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager
78
from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager
79
79
80
from collections import OrderedDict
80
import errno
81
import errno
81
import fnmatch
82
import fnmatch
82
import gc
83
import gc
Lines 89-94 Link Here
89
import pwd
90
import pwd
90
import re
91
import re
91
import stat
92
import stat
93
import struct
92
import sys
94
import sys
93
import tempfile
95
import tempfile
94
import textwrap
96
import textwrap
Lines 1088-1099 Link Here
1088
			else:
1090
			else:
1089
				relative_filename = filename[root_len:]
1091
				relative_filename = filename[root_len:]
1090
			contents_key = pkg._match_contents(relative_filename)
1092
			contents_key = pkg._match_contents(relative_filename)
1091
			if contents_key:
1093
			index = -1
1094
			try:
1095
				index = list(new_contents).index(filename)+1
1096
			except ValueError:
1097
				print("List does not contain value")
1098
			if contents_key and index >= 0:
1092
				# It's possible for two different paths to refer to the same
1099
				# It's possible for two different paths to refer to the same
1093
				# contents_key, due to directory symlinks. Therefore, pass a
1100
				# contents_key, due to directory symlinks. Therefore, pass a
1094
				# default value to pop, in order to avoid a KeyError which
1101
				# default value to pop, in order to avoid a KeyError which
1095
				# could otherwise be triggered (see bug #454400).
1102
				# could otherwise be triggered (see bug #454400).
1096
				new_contents.pop(contents_key, None)
1103
				new_contents.pop(contents_key, None)
1104
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512")
1105
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1")
1106
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256")
1107
				self.removeFromContentsMeta(pkg.dbdir, index, "MODES")
1108
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX")
1109
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS")
1097
				removed += 1
1110
				removed += 1
1098
1111
1099
		if removed:
1112
		if removed:
Lines 1129-1134 Link Here
1129
1142
1130
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1143
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1131
1144
1145
	def removeFromContentsMeta(self, vdbdir, index, type):
1146
		contents_file = ""
1147
		if (type in
1148
			{"DIGESTS_SHA512",
1149
			"DIGESTS_SHA256",
1150
			"DIGESTS_SHA1",
1151
			"MODES",
1152
			"ATTRS_PAX",
1153
			"ATTRS_CAPS"}):
1154
			contents_file = os.path.join(vdbdir, "CONTENTS_"+type)
1155
		else:
1156
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1157
1158
		if type == "DIGESTS_SHA512": linelen = 128+1 #including newline
1159
		elif type == "DIGESTS_SHA256": linelen = 64 + 1
1160
		elif type == "DIGESTS_SHA1": linelen = 40+1
1161
		elif type == "MODES": linelen = 4 + 1
1162
		elif type == "ATTRS_PAX": linelen = 5 + 1
1163
		elif type == "ATTRS_CAPS": linelen = 16 + 1
1164
		else:
1165
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1166
1167
		if os.path.isfile(contents_file):
1168
			with open(contents_file,"r+") as f:
1169
				pre = f.read((index-1)*linelen)
1170
				f.read(129)
1171
				post = f.read()
1172
				f.seek(0, 0)
1173
				f.write(pre)
1174
				f.write(post)
1175
				f.truncate()
1176
1132
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1177
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1133
		"""
1178
		"""
1134
		@param pkg: package to write contents file for
1179
		@param pkg: package to write contents file for
Lines 1784-1790 Link Here
1784
		if self.contentscache is not None:
1829
		if self.contentscache is not None:
1785
			return self.contentscache
1830
			return self.contentscache
1786
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1831
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1787
		pkgfiles = {}
1832
		pkgfiles = OrderedDict()
1788
		try:
1833
		try:
1789
			with io.open(_unicode_encode(contents_file,
1834
			with io.open(_unicode_encode(contents_file,
1790
				encoding=_encodings['fs'], errors='strict'),
1835
				encoding=_encodings['fs'], errors='strict'),
Lines 3209-3214 Link Here
3209
				preserve_paths.remove(f)
3254
				preserve_paths.remove(f)
3210
				continue
3255
				continue
3211
			new_contents[f_abs] = contents_entry
3256
			new_contents[f_abs] = contents_entry
3257
			self.writeMetaData(f_abs)
3212
			obj_type = contents_entry[0]
3258
			obj_type = contents_entry[0]
3213
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3259
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3214
				noiselevel=-1)
3260
				noiselevel=-1)
Lines 3216-3221 Link Here
3216
			parent_dir = os.path.dirname(f_abs)
3262
			parent_dir = os.path.dirname(f_abs)
3217
			while len(parent_dir) > len(root):
3263
			while len(parent_dir) > len(root):
3218
				new_contents[parent_dir] = ["dir"]
3264
				new_contents[parent_dir] = ["dir"]
3265
				self.writeMetaData(parent_dir)
3219
				prev = parent_dir
3266
				prev = parent_dir
3220
				parent_dir = os.path.dirname(parent_dir)
3267
				parent_dir = os.path.dirname(parent_dir)
3221
				if prev == parent_dir:
3268
				if prev == parent_dir:
Lines 3225-3230 Link Here
3225
		outfile.close()
3272
		outfile.close()
3226
		self._clear_contents_cache()
3273
		self._clear_contents_cache()
3227
3274
3275
	def writeMetaData(fname):
3276
		hashtype = "SHA512"
3277
		if hashtype is None:
3278
			hashtype = "SHA512"
3279
		elif hashtype != "SHA1" and hashtype != "SHA256":
3280
			hashtype = "SHA512"
3281
3282
		mystat = os.lstat(fname)
3283
		mymode = mystat[stat.ST_MODE]
3284
3285
		if stat.S_ISREG(mymode):
3286
			line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n"
3287
			line_mode = oct(mymode)[-4:]+"\n"
3288
3289
			attrlist = xattr.list(fname)
3290
			if len(attrlist)>0:
3291
				for i in attrlist:
3292
					if i == "user.pax.flags":
3293
						line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n"
3294
					if i == "security.capability":
3295
						caps = xattr.get(fname, "security.capability") # Take the actual value from xattr
3296
						caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
3297
						line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
3298
			else:
3299
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3300
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3301
		else: #DIR, LINK, FIFO, DEV
3302
			digest_length = 0
3303
			if hashtype == "SHA1":
3304
				digest_length = 40
3305
			elif hashtype == "SHA256":
3306
				digest_length = 64
3307
			elif hashtype == "SHA512":
3308
				digest_length = 128
3309
3310
			line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
3311
			line_mode = oct(mymode)[-4:]+"\n"
3312
			line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3313
			line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3314
3315
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype)
3316
		if os.path.isfile(contents_file):
3317
			with open(contents_file,"r+") as f:
3318
				f.seek(0,2)
3319
				f.write(line_digest)
3320
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES")
3321
		if os.path.isfile(contents_file):
3322
			with open(contents_file,"r+") as f:
3323
				f.seek(0,2)
3324
				f.write(line_mode)
3325
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX")
3326
		if os.path.isfile(contents_file):
3327
			with open(contents_file,"r+") as f:
3328
				f.seek(0,2)
3329
				f.write(line_attr_pax)
3330
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS")
3331
		if os.path.isfile(contents_file):
3332
			with open(contents_file,"r+") as f:
3333
				f.seek(0,2)
3334
				f.write(line_attr_caps)
3335
3228
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3336
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3229
		"""
3337
		"""
3230
		Find preserved libraries that don't have any consumers left.
3338
		Find preserved libraries that don't have any consumers left.
Lines 4546-4551 Link Here
4546
4654
4547
		cfgfiledict_orig = cfgfiledict.copy()
4655
		cfgfiledict_orig = cfgfiledict.copy()
4548
4656
4657
		hashtype = self.settings.get("INTEGRITY_HASH").upper()
4658
		if hashtype is None:
4659
			hashtype = "SHA512"
4660
		elif hashtype != "SHA1" and hashtype != "SHA256":
4661
			hashtype = "SHA512"
4662
4549
		# open CONTENTS file (possibly overwriting old one) for recording
4663
		# open CONTENTS file (possibly overwriting old one) for recording
4550
		# Use atomic_ofstream for automatic coercion of raw bytes to
4664
		# Use atomic_ofstream for automatic coercion of raw bytes to
4551
		# unicode, in order to prevent TypeError when writing raw bytes
4665
		# unicode, in order to prevent TypeError when writing raw bytes
Lines 4556-4561 Link Here
4556
			mode='w', encoding=_encodings['repo.content'],
4670
			mode='w', encoding=_encodings['repo.content'],
4557
			errors='backslashreplace')
4671
			errors='backslashreplace')
4558
4672
4673
		# open CONTENTS_DIGESTS file (possibly overwriting old one) for recording
4674
		# Use atomic_ofstream for automatic coercion of raw bytes to
4675
		# unicode, in order to prevent TypeError when writing raw bytes
4676
		# to TextIOWrapper with python2.
4677
		digfile = atomic_ofstream(_unicode_encode(
4678
			os.path.join(self.dbtmpdir, 'CONTENTS_DIGESTS_'+hashtype),
4679
			encoding=_encodings['fs'], errors='strict'),
4680
			mode='w', encoding=_encodings['repo.content'],
4681
			errors='backslashreplace')
4682
4683
		# open CONTENTS_MODES file (possibly overwriting old one) for recording
4684
		# Use atomic_ofstream for automatic coercion of raw bytes to
4685
		# unicode, in order to prevent TypeError when writing raw bytes
4686
		# to TextIOWrapper with python2.
4687
		modfile = atomic_ofstream(_unicode_encode(
4688
			os.path.join(self.dbtmpdir, 'CONTENTS_MODES'),
4689
			encoding=_encodings['fs'], errors='strict'),
4690
			mode='w', encoding=_encodings['repo.content'],
4691
			errors='backslashreplace')
4692
4693
		# open CONTENTS_ATTRS_PAX file (possibly overwriting old one) for recording
4694
		# Use atomic_ofstream for automatic coercion of raw bytes to
4695
		# unicode, in order to prevent TypeError when writing raw bytes
4696
		# to TextIOWrapper with python2.
4697
		paxfile = atomic_ofstream(_unicode_encode(
4698
			os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_PAX'),
4699
			encoding=_encodings['fs'], errors='strict'),
4700
			mode='w', encoding=_encodings['repo.content'],
4701
			errors='backslashreplace')
4702
4703
		# open CONTENTS_ATTRS_CAPS file (possibly overwriting old one) for recording
4704
		# Use atomic_ofstream for automatic coercion of raw bytes to
4705
		# unicode, in order to prevent TypeError when writing raw bytes
4706
		# to TextIOWrapper with python2.
4707
		capfile = atomic_ofstream(_unicode_encode(
4708
			os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_CAPS'),
4709
			encoding=_encodings['fs'], errors='strict'),
4710
			mode='w', encoding=_encodings['repo.content'],
4711
			errors='backslashreplace')
4712
4559
		# Don't bump mtimes on merge since some application require
4713
		# Don't bump mtimes on merge since some application require
4560
		# preservation of timestamps.  This means that the unmerge phase must
4714
		# preservation of timestamps.  This means that the unmerge phase must
4561
		# check to see if file belongs to an installed instance in the same
4715
		# check to see if file belongs to an installed instance in the same
Lines 4568-4574 Link Here
4568
4722
4569
		# we do a first merge; this will recurse through all files in our srcroot but also build up a
4723
		# we do a first merge; this will recurse through all files in our srcroot but also build up a
4570
		# "second hand" of symlinks to merge later
4724
		# "second hand" of symlinks to merge later
4571
		if self.mergeme(srcroot, destroot, outfile, secondhand,
4725
		if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand,
4572
			self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4726
			self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4573
			return 1
4727
			return 1
4574
4728
Lines 4580-4586 Link Here
4580
			# couldn't get merged will be added to thirdhand.
4734
			# couldn't get merged will be added to thirdhand.
4581
4735
4582
			thirdhand = []
4736
			thirdhand = []
4583
			if self.mergeme(srcroot, destroot, outfile, thirdhand,
4737
			if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, thirdhand,
4584
				secondhand, cfgfiledict, mymtime):
4738
				secondhand, cfgfiledict, mymtime):
4585
				return 1
4739
				return 1
4586
4740
Lines 4594-4600 Link Here
4594
4748
4595
		if len(secondhand):
4749
		if len(secondhand):
4596
			# force merge of remaining symlinks (broken or circular; oh well)
4750
			# force merge of remaining symlinks (broken or circular; oh well)
4597
			if self.mergeme(srcroot, destroot, outfile, None,
4751
			if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, None,
4598
				secondhand, cfgfiledict, mymtime):
4752
				secondhand, cfgfiledict, mymtime):
4599
				return 1
4753
				return 1
4600
4754
Lines 4605-4610 Link Here
4605
		outfile.flush()
4759
		outfile.flush()
4606
		outfile.close()
4760
		outfile.close()
4607
4761
4762
		#if we opened it, close it
4763
		digfile.flush()
4764
		digfile.close()
4765
4766
		#if we opened it, close it
4767
		modfile.flush()
4768
		modfile.close()
4769
4770
		#if we opened it, close it
4771
		paxfile.flush()
4772
		paxfile.close()
4773
4774
		#if we opened it, close it
4775
		capfile.flush()
4776
		capfile.close()
4777
4608
		# write out our collection of md5sums
4778
		# write out our collection of md5sums
4609
		if cfgfiledict != cfgfiledict_orig:
4779
		if cfgfiledict != cfgfiledict_orig:
4610
			cfgfiledict.pop("IGNORE", None)
4780
			cfgfiledict.pop("IGNORE", None)
Lines 4616-4622 Link Here
4616
4786
4617
		return os.EX_OK
4787
		return os.EX_OK
4618
4788
4619
	def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
4789
	def mergeme(self, srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, stufftomerge, cfgfiledict, thismtime):
4620
		"""
4790
		"""
4621
4791
4622
		This function handles actual merging of the package contents to the livefs.
4792
		This function handles actual merging of the package contents to the livefs.
Lines 4628-4633 Link Here
4628
		@type destroot: String (Path)
4798
		@type destroot: String (Path)
4629
		@param outfile: File to log operations to
4799
		@param outfile: File to log operations to
4630
		@type outfile: File Object
4800
		@type outfile: File Object
4801
		@param digfile: File to log digests to
4802
		@type digfile: File Object
4803
		@param modfile: File to log mode to
4804
		@type modfile: File Object
4805
		@param paxfile: File to log pax markings to
4806
		@type paxfile: File Object
4807
		@param capfile: File to log capabilities to
4808
		@type capfile: File Object
4809
		@param hashtype: Type of hash function to use, can be SHA1, SHA256 or SHA512
4810
		@type hashtype: String
4631
		@param secondhand: A set of items to merge in pass two (usually
4811
		@param secondhand: A set of items to merge in pass two (usually
4632
		or symlinks that point to non-existing files that may get merged later)
4812
		or symlinks that point to non-existing files that may get merged later)
4633
		@type secondhand: List
4813
		@type secondhand: List
Lines 4766-4771 Link Here
4766
				# confmem rejected this update
4946
				# confmem rejected this update
4767
				zing = "---"
4947
				zing = "---"
4768
4948
4949
			srcobj = srcroot+relative_path
4950
			destobj = destroot+relative_path
4951
4952
			digest_length = 0
4953
			if hashtype == "SHA1":
4954
				digest_length = 40
4955
			elif hashtype == "SHA256":
4956
				digest_length = 64
4957
			elif hashtype == "SHA512":
4958
				digest_length = 128
4959
4960
			if stat.S_ISREG(mymode):
4961
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
4962
				line_mode = oct(mymode)[-4:]+"\n"
4963
4964
				attrlist = xattr.list(srcobj)
4965
				if len(attrlist)>0:
4966
					for i in attrlist:
4967
						if i == "user.pax.flags":
4968
							line_attr_pax = _unicode_decode(xattr.get(srcobj, "user.pax.flags")).zfill(5)+"\n"
4969
						if i == "security.capability":
4970
							caps = xattr.get(srcobj, "security.capability") # Take the actual value from xattr
4971
							caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
4972
							line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
4973
				else:
4974
					line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
4975
					line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
4976
			else: #DIR, LINK, FIFO, DEV
4977
				line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
4978
				line_mode = oct(mymode)[-4:]+"\n"
4979
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
4980
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
4981
4982
			digfile.write(line_digest)
4983
			modfile.write(line_mode)
4984
			paxfile.write(line_attr_pax)
4985
			capfile.write(line_attr_caps)
4986
4769
			if stat.S_ISLNK(mymode):
4987
			if stat.S_ISLNK(mymode):
4770
				# we are merging a symbolic link
4988
				# we are merging a symbolic link
4771
				# Pass in the symlink target in order to bypass the
4989
				# Pass in the symlink target in order to bypass the

Return to bug 605082