--- vartree.py +++ vartree.py @@ -89,6 +89,7 @@ import pwd import re import stat +import struct import sys import tempfile import textwrap @@ -4546,6 +4547,12 @@ cfgfiledict_orig = cfgfiledict.copy() + hashtype = self.settings.get("INTEGRITY_HASH").upper() + if hashtype is None: + hashtype = "SHA512" + elif hashtype != "SHA1" and hashtype != "SHA256": + hashtype = "SHA512" + # open CONTENTS file (possibly overwriting old one) for recording # Use atomic_ofstream for automatic coercion of raw bytes to # unicode, in order to prevent TypeError when writing raw bytes @@ -4556,6 +4563,46 @@ mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') + # open CONTENTS_DIGESTS file (possibly overwriting old one) for recording + # Use atomic_ofstream for automatic coercion of raw bytes to + # unicode, in order to prevent TypeError when writing raw bytes + # to TextIOWrapper with python2. + digfile = atomic_ofstream(_unicode_encode( + os.path.join(self.dbtmpdir, 'CONTENTS_DIGESTS_'+hashtype), + encoding=_encodings['fs'], errors='strict'), + mode='w', encoding=_encodings['repo.content'], + errors='backslashreplace') + + # open CONTENTS_MODES file (possibly overwriting old one) for recording + # Use atomic_ofstream for automatic coercion of raw bytes to + # unicode, in order to prevent TypeError when writing raw bytes + # to TextIOWrapper with python2. + modfile = atomic_ofstream(_unicode_encode( + os.path.join(self.dbtmpdir, 'CONTENTS_MODES'), + encoding=_encodings['fs'], errors='strict'), + mode='w', encoding=_encodings['repo.content'], + errors='backslashreplace') + + # open CONTENTS_ATTRS_PAX file (possibly overwriting old one) for recording + # Use atomic_ofstream for automatic coercion of raw bytes to + # unicode, in order to prevent TypeError when writing raw bytes + # to TextIOWrapper with python2. + paxfile = atomic_ofstream(_unicode_encode( + os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_PAX'), + encoding=_encodings['fs'], errors='strict'), + mode='w', encoding=_encodings['repo.content'], + errors='backslashreplace') + + # open CONTENTS_ATTRS_CAPS file (possibly overwriting old one) for recording + # Use atomic_ofstream for automatic coercion of raw bytes to + # unicode, in order to prevent TypeError when writing raw bytes + # to TextIOWrapper with python2. + capfile = atomic_ofstream(_unicode_encode( + os.path.join(self.dbtmpdir, 'CONTENTS_ATTRS_CAPS'), + encoding=_encodings['fs'], errors='strict'), + mode='w', encoding=_encodings['repo.content'], + errors='backslashreplace') + # Don't bump mtimes on merge since some application require # preservation of timestamps. This means that the unmerge phase must # check to see if file belongs to an installed instance in the same @@ -4568,7 +4615,7 @@ # we do a first merge; this will recurse through all files in our srcroot but also build up a # "second hand" of symlinks to merge later - if self.mergeme(srcroot, destroot, outfile, secondhand, + if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime): return 1 @@ -4580,7 +4627,7 @@ # couldn't get merged will be added to thirdhand. thirdhand = [] - if self.mergeme(srcroot, destroot, outfile, thirdhand, + if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, thirdhand, secondhand, cfgfiledict, mymtime): return 1 @@ -4594,7 +4641,7 @@ if len(secondhand): # force merge of remaining symlinks (broken or circular; oh well) - if self.mergeme(srcroot, destroot, outfile, None, + if self.mergeme(srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, None, secondhand, cfgfiledict, mymtime): return 1 @@ -4605,6 +4652,22 @@ outfile.flush() outfile.close() + #if we opened it, close it + digfile.flush() + digfile.close() + + #if we opened it, close it + modfile.flush() + modfile.close() + + #if we opened it, close it + paxfile.flush() + paxfile.close() + + #if we opened it, close it + capfile.flush() + capfile.close() + # write out our collection of md5sums if cfgfiledict != cfgfiledict_orig: cfgfiledict.pop("IGNORE", None) @@ -4616,7 +4679,7 @@ return os.EX_OK - def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime): + def mergeme(self, srcroot, destroot, outfile, digfile, modfile, paxfile, capfile, hashtype, secondhand, stufftomerge, cfgfiledict, thismtime): """ This function handles actual merging of the package contents to the livefs. @@ -4628,6 +4691,16 @@ @type destroot: String (Path) @param outfile: File to log operations to @type outfile: File Object + @param digfile: File to log digests to + @type digfile: File Object + @param modfile: File to log mode to + @type modfile: File Object + @param paxfile: File to log pax markings to + @type paxfile: File Object + @param capfile: File to log capabilities to + @type capfile: File Object + @param hashtype: Type of hash function to use, can be SHA1, SHA256 or SHA512 + @type hashtype: String @param secondhand: A set of items to merge in pass two (usually or symlinks that point to non-existing files that may get merged later) @type secondhand: List @@ -4766,6 +4839,44 @@ # confmem rejected this update zing = "---" + srcobj = srcroot+relative_path + destobj = destroot+relative_path + + digest_length = 0 + if hashtype == "SHA1": + digest_length = 40 + elif hashtype == "SHA256": + digest_length = 64 + elif hashtype == "SHA512": + digest_length = 128 + + if stat.S_ISREG(mymode): + line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n" + line_mode = "mode:"+oct(mymode)[-4:]+"\n" + + attrlist = xattr.list(mydest) + if len(attrlist)>0: + for i in attrlist: + if i == "user.pax.flags": + line_attr_pax = _unicode_decode(xattr.get(mydest, "user.pax.flags")).zfill(5)+"\n" + if i == "security.capability": + caps = xattr.get(mydest, "security.capability") # Take the actual value from xattr + caps_int = int(struct.unpack("