Gentoo Websites Logo
Go to: Gentoo Home Documentation Forums Lists Bugs Planet Store Wiki Get Gentoo!
View | Details | Raw Unified | Return to bug 605082 | Differences between
and this patch

Collapse All | Expand All

(-)file_not_specified_in_diff (-7 / +225 lines)
Line  Link Here
0
-- vartree.py
0
++ vartree.py
Lines 77-82 Link Here
77
from _emerge.SpawnProcess import SpawnProcess
77
from _emerge.SpawnProcess import SpawnProcess
78
from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager
78
from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager
79
79
80
from collections import OrderedDict
80
import errno
81
import errno
81
import fnmatch
82
import fnmatch
82
import gc
83
import gc
Lines 89-94 Link Here
89
import pwd
90
import pwd
90
import re
91
import re
91
import stat
92
import stat
93
import struct
92
import sys
94
import sys
93
import tempfile
95
import tempfile
94
import textwrap
96
import textwrap
Lines 1088-1099 Link Here
1088
			else:
1090
			else:
1089
				relative_filename = filename[root_len:]
1091
				relative_filename = filename[root_len:]
1090
			contents_key = pkg._match_contents(relative_filename)
1092
			contents_key = pkg._match_contents(relative_filename)
1091
			if contents_key:
1093
			index = -1
1094
			try:
1095
				index = list(new_contents).index(filename)+1
1096
			except ValueError:
1097
				print("List does not contain value")
1098
			if contents_key and index >= 0:
1092
				# It's possible for two different paths to refer to the same
1099
				# It's possible for two different paths to refer to the same
1093
				# contents_key, due to directory symlinks. Therefore, pass a
1100
				# contents_key, due to directory symlinks. Therefore, pass a
1094
				# default value to pop, in order to avoid a KeyError which
1101
				# default value to pop, in order to avoid a KeyError which
1095
				# could otherwise be triggered (see bug #454400).
1102
				# could otherwise be triggered (see bug #454400).
1096
				new_contents.pop(contents_key, None)
1103
				new_contents.pop(contents_key, None)
1104
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512")
1105
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1")
1106
				self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256")
1107
				self.removeFromContentsMeta(pkg.dbdir, index, "MODES")
1108
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX")
1109
				self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS")
1097
				removed += 1
1110
				removed += 1
1098
1111
1099
		if removed:
1112
		if removed:
Lines 1129-1134 Link Here
1129
1142
1130
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1143
			self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed)
1131
1144
1145
	def removeFromContentsMeta(self, vdbdir, index, type):
1146
		contents_file = ""
1147
		if (type in
1148
			{"DIGESTS_SHA512",
1149
			"DIGESTS_SHA256",
1150
			"DIGESTS_SHA1",
1151
			"MODES",
1152
			"ATTRS_PAX",
1153
			"ATTRS_CAPS"}):
1154
			contents_file = os.path.join(vdbdir, "CONTENTS_"+type)
1155
		else:
1156
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1157
1158
		if type == "DIGESTS_SHA512": linelen = 128+1 #including newline
1159
		elif type == "DIGESTS_SHA256": linelen = 64 + 1
1160
		elif type == "DIGESTS_SHA1": linelen = 40+1
1161
		elif type == "MODES": linelen = 4 + 1
1162
		elif type == "ATTRS_PAX": linelen = 5 + 1
1163
		elif type == "ATTRS_CAPS": linelen = 16 + 1
1164
		else:
1165
			print("ERROR removeFromContentsMeta() got passed unexpected type "+type)
1166
1167
		if os.path.isfile(contents_file):
1168
			with open(contents_file,"r+") as f:
1169
				pre = f.read((index-1)*linelen)
1170
				f.read(129)
1171
				post = f.read()
1172
				f.seek(0, 0)
1173
				f.write(pre)
1174
				f.write(post)
1175
				f.truncate()
1176
1132
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1177
	def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None):
1133
		"""
1178
		"""
1134
		@param pkg: package to write contents file for
1179
		@param pkg: package to write contents file for
Lines 1784-1790 Link Here
1784
		if self.contentscache is not None:
1829
		if self.contentscache is not None:
1785
			return self.contentscache
1830
			return self.contentscache
1786
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1831
		contents_file = os.path.join(self.dbdir, "CONTENTS")
1787
		pkgfiles = {}
1832
		pkgfiles = OrderedDict()
1788
		try:
1833
		try:
1789
			with io.open(_unicode_encode(contents_file,
1834
			with io.open(_unicode_encode(contents_file,
1790
				encoding=_encodings['fs'], errors='strict'),
1835
				encoding=_encodings['fs'], errors='strict'),
Lines 3230-3235 Link Here
3230
				preserve_paths.remove(f)
3275
				preserve_paths.remove(f)
3231
				continue
3276
				continue
3232
			new_contents[f_abs] = contents_entry
3277
			new_contents[f_abs] = contents_entry
3278
			self.writeMetaData(f_abs)
3233
			obj_type = contents_entry[0]
3279
			obj_type = contents_entry[0]
3234
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3280
			showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
3235
				noiselevel=-1)
3281
				noiselevel=-1)
Lines 3237-3242 Link Here
3237
			parent_dir = os.path.dirname(f_abs)
3283
			parent_dir = os.path.dirname(f_abs)
3238
			while len(parent_dir) > len(root):
3284
			while len(parent_dir) > len(root):
3239
				new_contents[parent_dir] = ["dir"]
3285
				new_contents[parent_dir] = ["dir"]
3286
				self.writeMetaData(parent_dir)
3240
				prev = parent_dir
3287
				prev = parent_dir
3241
				parent_dir = os.path.dirname(parent_dir)
3288
				parent_dir = os.path.dirname(parent_dir)
3242
				if prev == parent_dir:
3289
				if prev == parent_dir:
Lines 3246-3251 Link Here
3246
		outfile.close()
3293
		outfile.close()
3247
		self._clear_contents_cache()
3294
		self._clear_contents_cache()
3248
3295
3296
	def writeMetaData(self, fname):
3297
		hashtype = "SHA512"
3298
		if hashtype is None:
3299
			hashtype = "SHA512"
3300
		elif hashtype != "SHA1" and hashtype != "SHA256":
3301
			hashtype = "SHA512"
3302
3303
		mystat = os.lstat(fname)
3304
		mymode = mystat[stat.ST_MODE]
3305
3306
		if stat.S_ISREG(mymode):
3307
			line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n"
3308
			line_mode = oct(mymode)[-4:]+"\n"
3309
3310
			attrlist = xattr.list(fname)
3311
			if len(attrlist)>0:
3312
				for i in attrlist:
3313
					if i == "user.pax.flags":
3314
						line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n"
3315
					if i == "security.capability":
3316
						caps = xattr.get(fname, "security.capability") # Take the actual value from xattr
3317
						caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
3318
						line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
3319
			else:
3320
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3321
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3322
		else: #DIR, LINK, FIFO, DEV
3323
			digest_length = 0
3324
			if hashtype == "SHA1":
3325
				digest_length = 40
3326
			elif hashtype == "SHA256":
3327
				digest_length = 64
3328
			elif hashtype == "SHA512":
3329
				digest_length = 128
3330
3331
			line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
3332
			line_mode = oct(mymode)[-4:]+"\n"
3333
			line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
3334
			line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
3335
3336
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype)
3337
		if os.path.isfile(contents_file):
3338
			with open(contents_file,"r+") as f:
3339
				f.seek(0,2)
3340
				f.write(line_digest)
3341
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES")
3342
		if os.path.isfile(contents_file):
3343
			with open(contents_file,"r+") as f:
3344
				f.seek(0,2)
3345
				f.write(line_mode)
3346
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX")
3347
		if os.path.isfile(contents_file):
3348
			with open(contents_file,"r+") as f:
3349
				f.seek(0,2)
3350
				f.write(line_attr_pax)
3351
		contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS")
3352
		if os.path.isfile(contents_file):
3353
			with open(contents_file,"r+") as f:
3354
				f.seek(0,2)
3355
				f.write(line_attr_caps)
3356
3249
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3357
	def _find_unused_preserved_libs(self, unmerge_no_replacement):
3250
		"""
3358
		"""
3251
		Find preserved libraries that don't have any consumers left.
3359
		Find preserved libraries that don't have any consumers left.
Lines 4567-4572 Link Here
4567
4675
4568
		cfgfiledict_orig = cfgfiledict.copy()
4676
		cfgfiledict_orig = cfgfiledict.copy()
4569
4677
4678
		hashtype = self.settings.get("INTEGRITY_HASH").upper()
4679
		if hashtype is None:
4680
			hashtype = "SHA512"
4681
		elif hashtype != "SHA1" and hashtype != "SHA256":
4682
			hashtype = "SHA512"
4683
4570
		# open CONTENTS file (possibly overwriting old one) for recording
4684
		# open CONTENTS file (possibly overwriting old one) for recording
4571
		# Use atomic_ofstream for automatic coercion of raw bytes to
4685
		# Use atomic_ofstream for automatic coercion of raw bytes to
4572
		# unicode, in order to prevent TypeError when writing raw bytes
4686
		# unicode, in order to prevent TypeError when writing raw bytes
Lines 4577-4582 Link Here
4577
			mode='w', encoding=_encodings['repo.content'],
4691
			mode='w', encoding=_encodings['repo.content'],
4578
			errors='backslashreplace')
4692
			errors='backslashreplace')
4579
4693
4694
		# open CONTENTS_DIGESTS file (possibly overwriting old one) for recording
4695
		# Use atomic_ofstream for automatic coercion of raw bytes to
4696
		# unicode, in order to prevent TypeError when writing raw bytes
4697
		# to TextIOWrapper with python2.
4698
		digfile = atomic_ofstream(_unicode_encode(
4699
			os.path.join(self.dbtmpdir, 'CONTENTS_DIGESTS_'+hashtype),
4700
			encoding=_encodings['fs'], errors='strict'),
4701
			mode='w', encoding=_encodings['repo.content'],
4702
			errors='backslashreplace')
4703
4704
		# open CONTENTS_MODES file (possibly overwriting old one) for recording
4705
		# Use atomic_ofstream for automatic coercion of raw bytes to
4706
		# unicode, in order to prevent TypeError when writing raw bytes
4707
		# to TextIOWrapper with python2.
4708
		modfile = atomic_ofstream(_unicode_encode(
4709
			os.path.join(self.dbtmpdir, 'CONTENTS_MODES'),
4710
			encoding=_encodings['fs'], errors='strict'),
4711
			mode='w', encoding=_encodings['repo.content'],
4712
			errors='backslashreplace')
4713
4714
		# open CONTENTS_ATTRS_PAX file (possibly overwriting old one) for recording
4715
		# Use atomic_ofstream for automatic coercion of raw bytes to
4716
		# unicode, in order to prevent TypeError when writing raw bytes
4717
		# to TextIOWrapper with python2.
4718
		paxfile = atomic_ofstream(_unicode_encode(
4719
			os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_PAX'),
4720
			encoding=_encodings['fs'], errors='strict'),
4721
			mode='w', encoding=_encodings['repo.content'],
4722
			errors='backslashreplace')
4723
4724
		# open CONTENTS_ATTRS_CAPS file (possibly overwriting old one) for recording
4725
		# Use atomic_ofstream for automatic coercion of raw bytes to
4726
		# unicode, in order to prevent TypeError when writing raw bytes
4727
		# to TextIOWrapper with python2.
4728
		capfile = atomic_ofstream(_unicode_encode(
4729
			os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_CAPS'),
4730
			encoding=_encodings['fs'], errors='strict'),
4731
			mode='w', encoding=_encodings['repo.content'],
4732
			errors='backslashreplace')
4733
4580
		# Don't bump mtimes on merge since some application require
4734
		# Don't bump mtimes on merge since some application require
4581
		# preservation of timestamps.  This means that the unmerge phase must
4735
		# preservation of timestamps.  This means that the unmerge phase must
4582
		# check to see if file belongs to an installed instance in the same
4736
		# check to see if file belongs to an installed instance in the same
Lines 4589-4595 Link Here
4589
4743
4590
		# we do a first merge; this will recurse through all files in our srcroot but also build up a
4744
		# we do a first merge; this will recurse through all files in our srcroot but also build up a
4591
		# "second hand" of symlinks to merge later
4745
		# "second hand" of symlinks to merge later
4592
		if self.mergeme(srcroot, destroot, outfile, secondhand,
4746
		if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand,
4593
			self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4747
			self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4594
			return 1
4748
			return 1
4595
4749
Lines 4601-4607 Link Here
4601
			# couldn't get merged will be added to thirdhand.
4755
			# couldn't get merged will be added to thirdhand.
4602
4756
4603
			thirdhand = []
4757
			thirdhand = []
4604
			if self.mergeme(srcroot, destroot, outfile, thirdhand,
4758
			if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, thirdhand,
4605
				secondhand, cfgfiledict, mymtime):
4759
				secondhand, cfgfiledict, mymtime):
4606
				return 1
4760
				return 1
4607
4761
Lines 4615-4621 Link Here
4615
4769
4616
		if len(secondhand):
4770
		if len(secondhand):
4617
			# force merge of remaining symlinks (broken or circular; oh well)
4771
			# force merge of remaining symlinks (broken or circular; oh well)
4618
			if self.mergeme(srcroot, destroot, outfile, None,
4772
			if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, None,
4619
				secondhand, cfgfiledict, mymtime):
4773
				secondhand, cfgfiledict, mymtime):
4620
				return 1
4774
				return 1
4621
4775
Lines 4626-4631 Link Here
4626
		outfile.flush()
4780
		outfile.flush()
4627
		outfile.close()
4781
		outfile.close()
4628
4782
4783
		#if we opened it, close it
4784
		digfile.flush()
4785
		digfile.close()
4786
4787
		#if we opened it, close it
4788
		modfile.flush()
4789
		modfile.close()
4790
4791
		#if we opened it, close it
4792
		paxfile.flush()
4793
		paxfile.close()
4794
4795
		#if we opened it, close it
4796
		capfile.flush()
4797
		capfile.close()
4798
4629
		# write out our collection of md5sums
4799
		# write out our collection of md5sums
4630
		if cfgfiledict != cfgfiledict_orig:
4800
		if cfgfiledict != cfgfiledict_orig:
4631
			cfgfiledict.pop("IGNORE", None)
4801
			cfgfiledict.pop("IGNORE", None)
Lines 4637-4643 Link Here
4637
4807
4638
		return os.EX_OK
4808
		return os.EX_OK
4639
4809
4640
	def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
4810
	def mergeme(self, srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, stufftomerge, cfgfiledict, thismtime):
4641
		"""
4811
		"""
4642
4812
4643
		This function handles actual merging of the package contents to the livefs.
4813
		This function handles actual merging of the package contents to the livefs.
Lines 4649-4654 Link Here
4649
		@type destroot: String (Path)
4819
		@type destroot: String (Path)
4650
		@param outfile: File to log operations to
4820
		@param outfile: File to log operations to
4651
		@type outfile: File Object
4821
		@type outfile: File Object
4822
		@param digfile: File to log digests to
4823
		@type digfile: File Object
4824
		@param modfile: File to log mode to
4825
		@type modfile: File Object
4826
		@param paxfile: File to log pax markings to
4827
		@type paxfile: File Object
4828
		@param capfile: File to log capabilities to
4829
		@type capfile: File Object
4830
		@param hashtype: Type of hash function to use, can be SHA1, SHA256 or SHA512
4831
		@type hashtype: String
4652
		@param secondhand: A set of items to merge in pass two (usually
4832
		@param secondhand: A set of items to merge in pass two (usually
4653
		or symlinks that point to non-existing files that may get merged later)
4833
		or symlinks that point to non-existing files that may get merged later)
4654
		@type secondhand: List
4834
		@type secondhand: List
Lines 4787-4792 Link Here
4787
				# confmem rejected this update
4967
				# confmem rejected this update
4788
				zing = "---"
4968
				zing = "---"
4789
4969
4970
			srcobj = srcroot+relative_path
4971
			destobj = destroot+relative_path
4972
4973
			digest_length = 0
4974
			if hashtype == "SHA1":
4975
				digest_length = 40
4976
			elif hashtype == "SHA256":
4977
				digest_length = 64
4978
			elif hashtype == "SHA512":
4979
				digest_length = 128
4980
4981
			if stat.S_ISREG(mymode):
4982
				line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n"
4983
				line_mode = oct(mymode)[-4:]+"\n"
4984
4985
				attrlist = xattr.list(srcobj)
4986
				if len(attrlist)>0:
4987
					for i in attrlist:
4988
						if i == "user.pax.flags":
4989
							line_attr_pax = _unicode_decode(xattr.get(srcobj, "user.pax.flags")).zfill(5)+"\n"
4990
						if i == "security.capability":
4991
							caps = xattr.get(srcobj, "security.capability") # Take the actual value from xattr
4992
							caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer
4993
							line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string
4994
				else:
4995
					line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
4996
					line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
4997
			else: #DIR, LINK, FIFO, DEV
4998
				line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length)
4999
				line_mode = oct(mymode)[-4:]+"\n"
5000
				line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5)
5001
				line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16)
5002
5003
			digfile.write(line_digest)
5004
			modfile.write(line_mode)
5005
			paxfile.write(line_attr_pax)
5006
			capfile.write(line_attr_caps)
5007
4790
			if stat.S_ISLNK(mymode):
5008
			if stat.S_ISLNK(mymode):
4791
				# we are merging a symbolic link
5009
				# we are merging a symbolic link
4792
				# Pass in the symlink target in order to bypass the
5010
				# Pass in the symlink target in order to bypass the

Return to bug 605082