Go to:
Gentoo Home
Documentation
Forums
Lists
Bugs
Planet
Store
Wiki
Get Gentoo!
Gentoo's Bugzilla – Attachment 551410 Details for
Bug 605082
Add metadata to CONTENTS_ files in vdb
Home
|
New
–
[Ex]
|
Browse
|
Search
|
Privacy Policy
|
[?]
|
Reports
|
Requests
|
Help
|
New Account
|
Log In
[x]
|
Forgot Password
Login:
[x]
[patch]
Fix mistake in writeMetaData() definition
integrity.patch (text/plain), 12.59 KB, created by
Sam
on 2018-10-15 20:34:48 UTC
(
hide
)
Description:
Fix mistake in writeMetaData() definition
Filename:
MIME Type:
Creator:
Sam
Created:
2018-10-15 20:34:48 UTC
Size:
12.59 KB
patch
obsolete
>--- vartree.py >+++ vartree.py >@@ -77,6 +77,7 @@ > from _emerge.SpawnProcess import SpawnProcess > from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager > >+from collections import OrderedDict > import errno > import fnmatch > import gc >@@ -89,6 +90,7 @@ > import pwd > import re > import stat >+import struct > import sys > import tempfile > import textwrap >@@ -1088,12 +1090,23 @@ > else: > relative_filename = filename[root_len:] > contents_key = pkg._match_contents(relative_filename) >- if contents_key: >+ index = -1 >+ try: >+ index = list(new_contents).index(filename)+1 >+ except ValueError: >+ print("List does not contain value") >+ if contents_key and index >= 0: > # It's possible for two different paths to refer to the same > # contents_key, due to directory symlinks. Therefore, pass a > # default value to pop, in order to avoid a KeyError which > # could otherwise be triggered (see bug #454400). > new_contents.pop(contents_key, None) >+ self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512") >+ self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1") >+ self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256") >+ self.removeFromContentsMeta(pkg.dbdir, index, "MODES") >+ self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX") >+ self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS") > removed += 1 > > if removed: >@@ -1129,6 +1142,38 @@ > > self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed) > >+ def removeFromContentsMeta(self, vdbdir, index, type): >+ contents_file = "" >+ if (type in >+ {"DIGESTS_SHA512", >+ "DIGESTS_SHA256", >+ "DIGESTS_SHA1", >+ "MODES", >+ "ATTRS_PAX", >+ "ATTRS_CAPS"}): >+ contents_file = os.path.join(vdbdir, "CONTENTS_"+type) >+ else: >+ print("ERROR removeFromContentsMeta() got passed unexpected type "+type) >+ >+ if type == "DIGESTS_SHA512": linelen = 128+1 #including newline >+ elif type == "DIGESTS_SHA256": linelen = 64 + 1 >+ elif type == "DIGESTS_SHA1": linelen = 40+1 >+ elif type == "MODES": linelen = 4 + 1 >+ elif type == "ATTRS_PAX": linelen = 5 + 1 >+ elif type == "ATTRS_CAPS": linelen = 16 + 1 >+ else: >+ print("ERROR removeFromContentsMeta() got passed unexpected type "+type) >+ >+ if os.path.isfile(contents_file): >+ with open(contents_file,"r+") as f: >+ pre = f.read((index-1)*linelen) >+ f.read(129) >+ post = f.read() >+ f.seek(0, 0) >+ f.write(pre) >+ f.write(post) >+ f.truncate() >+ > def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None): > """ > @param pkg: package to write contents file for >@@ -1784,7 +1829,7 @@ > if self.contentscache is not None: > return self.contentscache > contents_file = os.path.join(self.dbdir, "CONTENTS") >- pkgfiles = {} >+ pkgfiles = OrderedDict() > try: > with io.open(_unicode_encode(contents_file, > encoding=_encodings['fs'], errors='strict'), >@@ -3230,6 +3275,7 @@ > preserve_paths.remove(f) > continue > new_contents[f_abs] = contents_entry >+ self.writeMetaData(f_abs) > obj_type = contents_entry[0] > showMessage(_(">>> needed %s %s\n") % (obj_type, f_abs), > noiselevel=-1) >@@ -3237,6 +3283,7 @@ > parent_dir = os.path.dirname(f_abs) > while len(parent_dir) > len(root): > new_contents[parent_dir] = ["dir"] >+ self.writeMetaData(parent_dir) > prev = parent_dir > parent_dir = os.path.dirname(parent_dir) > if prev == parent_dir: >@@ -3246,6 +3293,67 @@ > outfile.close() > self._clear_contents_cache() > >+ def writeMetaData(self, fname): >+ hashtype = "SHA512" >+ if hashtype is None: >+ hashtype = "SHA512" >+ elif hashtype != "SHA1" and hashtype != "SHA256": >+ hashtype = "SHA512" >+ >+ mystat = os.lstat(fname) >+ mymode = mystat[stat.ST_MODE] >+ >+ if stat.S_ISREG(mymode): >+ line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n" >+ line_mode = oct(mymode)[-4:]+"\n" >+ >+ attrlist = xattr.list(fname) >+ if len(attrlist)>0: >+ for i in attrlist: >+ if i == "user.pax.flags": >+ line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n" >+ if i == "security.capability": >+ caps = xattr.get(fname, "security.capability") # Take the actual value from xattr >+ caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer >+ line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string >+ else: >+ line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >+ line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >+ else: #DIR, LINK, FIFO, DEV >+ digest_length = 0 >+ if hashtype == "SHA1": >+ digest_length = 40 >+ elif hashtype == "SHA256": >+ digest_length = 64 >+ elif hashtype == "SHA512": >+ digest_length = 128 >+ >+ line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length) >+ line_mode = oct(mymode)[-4:]+"\n" >+ line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >+ line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >+ >+ contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype) >+ if os.path.isfile(contents_file): >+ with open(contents_file,"r+") as f: >+ f.seek(0,2) >+ f.write(line_digest) >+ contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES") >+ if os.path.isfile(contents_file): >+ with open(contents_file,"r+") as f: >+ f.seek(0,2) >+ f.write(line_mode) >+ contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX") >+ if os.path.isfile(contents_file): >+ with open(contents_file,"r+") as f: >+ f.seek(0,2) >+ f.write(line_attr_pax) >+ contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS") >+ if os.path.isfile(contents_file): >+ with open(contents_file,"r+") as f: >+ f.seek(0,2) >+ f.write(line_attr_caps) >+ > def _find_unused_preserved_libs(self, unmerge_no_replacement): > """ > Find preserved libraries that don't have any consumers left. >@@ -4567,6 +4675,12 @@ > > cfgfiledict_orig = cfgfiledict.copy() > >+ hashtype = self.settings.get("INTEGRITY_HASH").upper() >+ if hashtype is None: >+ hashtype = "SHA512" >+ elif hashtype != "SHA1" and hashtype != "SHA256": >+ hashtype = "SHA512" >+ > # open CONTENTS file (possibly overwriting old one) for recording > # Use atomic_ofstream for automatic coercion of raw bytes to > # unicode, in order to prevent TypeError when writing raw bytes >@@ -4577,6 +4691,46 @@ > mode='w', encoding=_encodings['repo.content'], > errors='backslashreplace') > >+ # open CONTENTS_DIGESTS file (possibly overwriting old one) for recording >+ # Use atomic_ofstream for automatic coercion of raw bytes to >+ # unicode, in order to prevent TypeError when writing raw bytes >+ # to TextIOWrapper with python2. >+ digfile = atomic_ofstream(_unicode_encode( >+ os.path.join(self.dbtmpdir, 'CONTENTS_DIGESTS_'+hashtype), >+ encoding=_encodings['fs'], errors='strict'), >+ mode='w', encoding=_encodings['repo.content'], >+ errors='backslashreplace') >+ >+ # open CONTENTS_MODES file (possibly overwriting old one) for recording >+ # Use atomic_ofstream for automatic coercion of raw bytes to >+ # unicode, in order to prevent TypeError when writing raw bytes >+ # to TextIOWrapper with python2. >+ modfile = atomic_ofstream(_unicode_encode( >+ os.path.join(self.dbtmpdir, 'CONTENTS_MODES'), >+ encoding=_encodings['fs'], errors='strict'), >+ mode='w', encoding=_encodings['repo.content'], >+ errors='backslashreplace') >+ >+ # open CONTENTS_ATTRS_PAX file (possibly overwriting old one) for recording >+ # Use atomic_ofstream for automatic coercion of raw bytes to >+ # unicode, in order to prevent TypeError when writing raw bytes >+ # to TextIOWrapper with python2. >+ paxfile = atomic_ofstream(_unicode_encode( >+ os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_PAX'), >+ encoding=_encodings['fs'], errors='strict'), >+ mode='w', encoding=_encodings['repo.content'], >+ errors='backslashreplace') >+ >+ # open CONTENTS_ATTRS_CAPS file (possibly overwriting old one) for recording >+ # Use atomic_ofstream for automatic coercion of raw bytes to >+ # unicode, in order to prevent TypeError when writing raw bytes >+ # to TextIOWrapper with python2. >+ capfile = atomic_ofstream(_unicode_encode( >+ os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_CAPS'), >+ encoding=_encodings['fs'], errors='strict'), >+ mode='w', encoding=_encodings['repo.content'], >+ errors='backslashreplace') >+ > # Don't bump mtimes on merge since some application require > # preservation of timestamps. This means that the unmerge phase must > # check to see if file belongs to an installed instance in the same >@@ -4589,7 +4743,7 @@ > > # we do a first merge; this will recurse through all files in our srcroot but also build up a > # "second hand" of symlinks to merge later >- if self.mergeme(srcroot, destroot, outfile, secondhand, >+ if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, > self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime): > return 1 > >@@ -4601,7 +4755,7 @@ > # couldn't get merged will be added to thirdhand. > > thirdhand = [] >- if self.mergeme(srcroot, destroot, outfile, thirdhand, >+ if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, thirdhand, > secondhand, cfgfiledict, mymtime): > return 1 > >@@ -4615,7 +4769,7 @@ > > if len(secondhand): > # force merge of remaining symlinks (broken or circular; oh well) >- if self.mergeme(srcroot, destroot, outfile, None, >+ if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, None, > secondhand, cfgfiledict, mymtime): > return 1 > >@@ -4626,6 +4780,22 @@ > outfile.flush() > outfile.close() > >+ #if we opened it, close it >+ digfile.flush() >+ digfile.close() >+ >+ #if we opened it, close it >+ modfile.flush() >+ modfile.close() >+ >+ #if we opened it, close it >+ paxfile.flush() >+ paxfile.close() >+ >+ #if we opened it, close it >+ capfile.flush() >+ capfile.close() >+ > # write out our collection of md5sums > if cfgfiledict != cfgfiledict_orig: > cfgfiledict.pop("IGNORE", None) >@@ -4637,7 +4807,7 @@ > > return os.EX_OK > >- def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime): >+ def mergeme(self, srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, stufftomerge, cfgfiledict, thismtime): > """ > > This function handles actual merging of the package contents to the livefs. >@@ -4649,6 +4819,16 @@ > @type destroot: String (Path) > @param outfile: File to log operations to > @type outfile: File Object >+ @param digfile: File to log digests to >+ @type digfile: File Object >+ @param modfile: File to log mode to >+ @type modfile: File Object >+ @param paxfile: File to log pax markings to >+ @type paxfile: File Object >+ @param capfile: File to log capabilities to >+ @type capfile: File Object >+ @param hashtype: Type of hash function to use, can be SHA1, SHA256 or SHA512 >+ @type hashtype: String > @param secondhand: A set of items to merge in pass two (usually > or symlinks that point to non-existing files that may get merged later) > @type secondhand: List >@@ -4787,6 +4967,44 @@ > # confmem rejected this update > zing = "---" > >+ srcobj = srcroot+relative_path >+ destobj = destroot+relative_path >+ >+ digest_length = 0 >+ if hashtype == "SHA1": >+ digest_length = 40 >+ elif hashtype == "SHA256": >+ digest_length = 64 >+ elif hashtype == "SHA512": >+ digest_length = 128 >+ >+ if stat.S_ISREG(mymode): >+ line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n" >+ line_mode = oct(mymode)[-4:]+"\n" >+ >+ attrlist = xattr.list(srcobj) >+ if len(attrlist)>0: >+ for i in attrlist: >+ if i == "user.pax.flags": >+ line_attr_pax = _unicode_decode(xattr.get(srcobj, "user.pax.flags")).zfill(5)+"\n" >+ if i == "security.capability": >+ caps = xattr.get(srcobj, "security.capability") # Take the actual value from xattr >+ caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer >+ line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string >+ else: >+ line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >+ line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >+ else: #DIR, LINK, FIFO, DEV >+ line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length) >+ line_mode = oct(mymode)[-4:]+"\n" >+ line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >+ line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >+ >+ digfile.write(line_digest) >+ modfile.write(line_mode) >+ paxfile.write(line_attr_pax) >+ capfile.write(line_attr_caps) >+ > if stat.S_ISLNK(mymode): > # we are merging a symbolic link > # Pass in the symlink target in order to bypass the
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
Attachments on
bug 605082
:
460212
|
472158
|
472168
|
534598
|
534870
|
535372
|
542774
|
543736
|
544732
|
545162
|
545784
|
550998
| 551410