--- vartree.py +++ vartree.py @@ -96,6 +96,7 @@ import textwrap import time import warnings +import operator try: import cPickle as pickle @@ -202,6 +203,15 @@ self._cached_counter = None + self._content_files = [ + ("CONTENTS_DIGESTS_SHA512", 128+1), + ("CONTENTS_DIGESTS_SHA1", 40+1), + ("CONTENTS_DIGESTS_SHA256", 64+1), + ("CONTENTS_MODES", 4+1), + ("CONTENTS_ATTRS_PAX", 5+1), + ("CONTENTS_ATTRS_CAPS", 16+1) + ] + @property def writable(self): """ @@ -1069,18 +1079,27 @@ def removeFromContents(self, pkg, paths, relative_paths=True): """ + Remove installed files from contents and its metadata files. + Typically during an unmerge, so that libraries needed by other + packages are spared from the unmerge. + @param pkg: cpv for an installed package @type pkg: string @param paths: paths of files to remove from contents @type paths: iterable """ + + # Grab a copy of contents and its metadata files. if not hasattr(pkg, "getcontents"): pkg = self._dblink(pkg) root = self.settings['ROOT'] root_len = len(root) - 1 new_contents = pkg.getcontents().copy() + new_contents_index = pkg.getContentsIndices().copy() + new_contents_metadata = pkg.getContentsMetadata().copy() removed = 0 + # Remove installed files from contents and its metadata files. for filename in paths: filename = _unicode_decode(filename, encoding=_encodings['content'], errors='strict') @@ -1090,28 +1109,18 @@ else: relative_filename = filename[root_len:] contents_key = pkg._match_contents(relative_filename) - index = -1 - try: - index = list(new_contents).index(filename)+1 - except ValueError: - print("List does not contain value") - if contents_key and index >= 0: + if contents_key: # It's possible for two different paths to refer to the same # contents_key, due to directory symlinks. Therefore, pass a # default value to pop, in order to avoid a KeyError which # could otherwise be triggered (see bug #454400). new_contents.pop(contents_key, None) - self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512") - self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1") - self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256") - self.removeFromContentsMeta(pkg.dbdir, index, "MODES") - self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX") - self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS") + new_contents_metadata.pop(contents_key) removed += 1 if removed: # Also remove corresponding NEEDED lines, so that they do - # no corrupt LinkageMap data for preserve-libs. + # not corrupt LinkageMap data for preserve-libs. needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key) new_needed = None try: @@ -1140,62 +1149,8 @@ if filename in new_contents: new_needed.append(entry) + # Write new contents files and clear CONTENTS cache + pkg.writeContentsToContentsFile(new_contents, new_contents_metadata, new_needed=new_needed) - self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed) - - def removeFromContentsMeta(self, vdbdir, index, type): - contents_file = "" - if (type in - {"DIGESTS_SHA512", - "DIGESTS_SHA256", - "DIGESTS_SHA1", - "MODES", - "ATTRS_PAX", - "ATTRS_CAPS"}): - contents_file = os.path.join(vdbdir, "CONTENTS_"+type) - else: - print("ERROR removeFromContentsMeta() got passed unexpected type "+type) - - if type == "DIGESTS_SHA512": linelen = 128+1 #including newline - elif type == "DIGESTS_SHA256": linelen = 64 + 1 - elif type == "DIGESTS_SHA1": linelen = 40+1 - elif type == "MODES": linelen = 4 + 1 - elif type == "ATTRS_PAX": linelen = 5 + 1 - elif type == "ATTRS_CAPS": linelen = 16 + 1 - else: - print("ERROR removeFromContentsMeta() got passed unexpected type "+type) - - if os.path.isfile(contents_file): - with open(contents_file,"r+") as f: - pre = f.read((index-1)*linelen) - f.read(129) - post = f.read() - f.seek(0, 0) - f.write(pre) - f.write(post) - f.truncate() - - def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None): - """ - @param pkg: package to write contents file for - @type pkg: dblink - @param new_contents: contents to write to CONTENTS file - @type new_contents: contents dictionary of the form - {u'/path/to/file' : (contents_attribute 1, ...), ...} - @param new_needed: new NEEDED entries - @type new_needed: list of NeededEntry - """ - root = self.settings['ROOT'] - self._bump_mtime(pkg.mycpv) - if new_needed is not None: - f = atomic_ofstream(os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)) - for entry in new_needed: - f.write(_unicode(entry)) - f.close() - f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS")) - write_contents(new_contents, root, f) - f.close() - self._bump_mtime(pkg.mycpv) - pkg._clear_contents_cache() class _owners_cache(object): """ @@ -1657,6 +1612,8 @@ self.myroot = self.settings['ROOT'] self._installed_instance = None self.contentscache = None + self.contents_index_cache = None + self.contents_metadata_cache = None self._contents_inodes = None self._contents_basenames = None self._linkmap_broken = False @@ -1673,6 +1630,15 @@ self._contents = ContentsCaseSensitivityManager(self) self._slot_locks = [] + self._content_files = [ + ("CONTENTS_DIGESTS_SHA512", 128+1), + ("CONTENTS_DIGESTS_SHA1", 40+1), + ("CONTENTS_DIGESTS_SHA256", 64+1), + ("CONTENTS_MODES", 4+1), + ("CONTENTS_ATTRS_PAX", 5+1), + ("CONTENTS_ATTRS_CAPS", 16+1) + ] + def __hash__(self): return hash(self._hash_key) @@ -1818,6 +1784,8 @@ def _clear_contents_cache(self): self.contentscache = None + self.contents_index_cache = None + self.contents_metadata_cache = None self._contents_inodes = None self._contents_basenames = None self._contents.clear_cache() @@ -1828,8 +1796,11 @@ """ if self.contentscache is not None: return self.contentscache + contents_file = os.path.join(self.dbdir, "CONTENTS") pkgfiles = OrderedDict() + pkgfiles_indices = OrderedDict() + try: with io.open(_unicode_encode(contents_file, encoding=_encodings['fs'], errors='strict'), @@ -1849,27 +1820,31 @@ obj_index = contents_re.groupindex['obj'] dir_index = contents_re.groupindex['dir'] sym_index = contents_re.groupindex['sym'] + # The old symlink format may exist on systems that have packages # which were installed many years ago (see bug #351814). oldsym_index = contents_re.groupindex['oldsym'] + # CONTENTS files already contain EPREFIX myroot = self.settings['ROOT'] if myroot == os.path.sep: myroot = None + # used to generate parent dir entries dir_entry = ("dir",) eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1 + pos = 0 errors = [] for pos, line in enumerate(mylines): if null_byte in line: # Null bytes are a common indication of corruption. - errors.append((pos + 1, _("Null byte found in CONTENTS entry"))) + errors.append((pos + 1, _("Null byte found in CONTENTS entry: "+line))) continue line = line.rstrip("\n") m = contents_re.match(line) if m is None: - errors.append((pos + 1, _("Unrecognized CONTENTS entry"))) + errors.append((pos + 1, _("Unrecognized CONTENTS entry: "+line))) continue if m.group(obj_index) is not None: @@ -1914,17 +1889,67 @@ if parent in pkgfiles: break pkgfiles[parent] = dir_entry + pkgfiles_indices[parent] = (pos,dir_entry) path_split.pop() pkgfiles[path] = data + pkgfiles_indices[path] = (pos,data) if errors: writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1) for pos, e in errors: writemsg(_("!!! line %d: %s\n") % (pos, e), noiselevel=-1) self.contentscache = pkgfiles + self.contents_index_cache = pkgfiles_indices return pkgfiles + def getContentsIndices(self): + """ + Get installed files of a given package (aka what that package installed), with indices + """ + if self.contents_index_cache is not None: + return self.contents_index_cache + else: + getcontents() + if self.contents_index_cache is not None: + return self.contents_index_cache + else: + showMessage(_("!!! FAILED couldn't get cached contents index"), + level=logging.ERROR, noiselevel=-1) + return None + + def getContentsMetadata(self): + """ + Get metadata of installed files of a given package (aka what that package installed): + - iterate over the results returned by getContentsIndices() + - iterate over the filetypes concerned + - grab the metadata using the identifier to calculate position in the different files + - write metadata to an orderedDict and return + """ + if self.contents_metadata_cache is not None: + return self.contents_metadata_cache + else: + if self.contents_index_cache is None: + getcontents() + + vdbdir = self.dbdir + contents_metadata = OrderedDict() + + for (filename, (index, tmp)) in self.contents_index_cache.copy().items(): + contents_metadata[filename] = OrderedDict() + for type, linelen in self._content_files: + contents_fname = os.path.join(vdbdir, type) + if not os.path.isfile(contents_fname): + continue + + with open(contents_fname,"rb") as f: + f.seek(index*linelen) #skip to the right line + value = f.read(linelen).decode() #read the line + contents_metadata[filename][type] = value + + self.contents_metadata_cache = contents_metadata + return contents_metadata + def _prune_plib_registry(self, unmerge=False, needed=None, preserve_paths=None): # remove preserved libraries that don't have any consumers left @@ -2315,6 +2340,19 @@ self._display_merge("%s %s %s %s\n" % \ (zing, desc.ljust(8), file_type, file_name)) + def aux_update_pkg(self, values): + self.vartree.dbapi._bump_mtime(self.mycpv) + self.vartree.dbapi._clear_pkg_cache(self) + for k, v in values.items(): + if v: + self.setfile(k, v) + else: + try: + os.unlink(os.path.join(self.vartree.dbapi.getpath(self.mycpv), k)) + except EnvironmentError: + pass + self.vartree.dbapi._bump_mtime(self.mycpv) + def _unmerge_pkgfiles(self, pkgfiles, others_in_slot): """ @@ -2916,6 +2954,268 @@ for parent in sorted(set(recursive_parents)): dirs.append((parent, revisit.pop(parent))) + def startContentsUpdate(self): + vdbdir = self.dbdir + contents_dir = os.path.join(vdbdir, "contents.d") + transaction_dir = os.path.join(vdbdir, "contents.d~") + manifest_lines = "" + + # Clean previously unfinished transaction (this shouldn't occur, but might) + if os.path.isdir(transaction_dir): + shutil.rmtree(transaction_dir) + if os.path.isdir(contents_dir): + shutil.rmtree(contents_dir) + + # Set up transaction + os.mkdir(transaction_dir, 0o644) + files = [ + "NEEDED.ELF.2", + "CONTENTS", + "CONTENTS_DIGESTS_SHA1", + "CONTENTS_DIGESTS_SHA256", + "CONTENTS_DIGESTS_SHA512", + "CONTENTS_MODES", + "CONTENTS_ATTRS_PAX", + "CONTENTS_ATTRS_CAPS" + ] + for f in files: + fname_src = os.path.join(vdbdir, f) + fname_dest = os.path.join(transaction_dir, f) + + # Gracefully handle non-existent files + if os.path.isfile(fname_src): + shutil.copy2(fname_src, fname_dest) + manifest_lines += f + "\n" + manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n" + + # Write Manifest-file of transaction + os.mkdir(contents_dir, 0o644) + self.aux_update_pkg({os.path.join("contents.d","Manifest"): manifest_lines}) + + def stopContentsUpdate(self, vdbdir): + contents_dir = os.path.join(vdbdir, "contents.d") + transaction_dir = os.path.join(vdbdir, "contents.d~") + digests = [] + transaction_files = [] + all_files = [ + "NEEDED.ELF.2", + "CONTENTS", + "CONTENTS_DIGESTS_SHA1", + "CONTENTS_DIGESTS_SHA256", + "CONTENTS_DIGESTS_SHA512", + "CONTENTS_MODES", + "CONTENTS_ATTRS_PAX", + "CONTENTS_ATTRS_CAPS" + ] + + if not os.path.isdir(transaction_dir): + showMessage(_("!!! FAILED creating transaction dir " + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Read Manifest-file of contents + manifest_file = os.path.join(contents_dir, "Manifest") + if os.path.isfile(manifest_file): + with open(manifest_file,"r") as f: + lines = f.read().splitlines() + + for i, line in enumerate(lines): + if (i%2) == 0: + transaction_files.append(line) + else: + digests.append(line) + else: + showMessage(_("!!! FAILED reading Manifest of transaction" + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Check Manifest against transaction_dir + for f in transaction_files: + file = os.path.join(transaction_dir, f) + if not os.path.isfile(file): + showMessage(_("!!! FAILED Manifest of transaction " + "contained non-existing file")+str(file)+_(" " + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Check transaction_dir against Manifest + for f in os.listdir(transaction_dir): + if not f in transaction_files: + showMessage(_("!!! FAILED found file ")+str(file)+_(" " + "in transaction_dir that wasn't recorded in Manifest of " + "transaction during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Setup contents_dir with links of vdbdir files + for i, f in enumerate(transaction_files): + fname_src = os.path.join(vdbdir, f) + fname_dest = os.path.join(contents_dir, f) + + # Gracefully handle non-existent files + if os.path.isfile(fname_src): + if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]: + showMessage(_("!!! FAILED according to Manifest of transaction, " + "file ")+str(file)+_(" in vdbdir was modified" + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + else: + os.link(fname_src, fname_dest) + else: + showMessage(_("!!! FAILED file in Manifest of transaction" + "no longer found in vdbdir ")+str(f)+_( + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Sync contents_dir and transaction_dir to disk + if platform.system() == "Linux": + paths = [] + for f in os.listdir(transaction_dir): + paths.append(os.path.join(transaction_dir, f)) + for f in os.listdir(contents_dir): + paths.append(os.path.join(contents_dir, f)) + paths = tuple(paths) + + proc = SyncfsProcess(paths=paths, + scheduler=( + SchedulerInterface(portage._internal_caller and + global_event_loop() or EventLoop(main=False)) + )) + + proc.start() + returncode = proc.wait() + + # Link from transaction_dir + for f in transaction_files: + fname_src = os.path.join(transaction_dir, f) + fname_dest = os.path.join(vdbdir, f+"~") + + # Gracefully handle non-existent files + if os.path.isfile(fname_src): + os.link(fname_src, fname_dest) + else: + showMessage(_("!!! FAILED Manifest of transaction contains" + "file that no longer exists ")+str(f)+_( + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Sync contents_dir and transaction_dir to disk + if platform.system() == "Linux": + paths = [] + for f in transaction_files: + # Gracefully handle non-existent files + if os.path.isfile(os.path.join(vdbdir, f+"~")): + paths.append(os.path.join(vdbdir, f+"~")) + else: + showMessage(_("!!! FAILED Manifest of transaction contains" + "file that no longer exists ")+str(f)+_( + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + paths = tuple(paths) + + proc = SyncfsProcess(paths=paths, + scheduler=( + SchedulerInterface(portage._internal_caller and + global_event_loop() or EventLoop(main=False)) + )) + + proc.start() + returncode = proc.wait() + + # Rename + for f in transaction_files: + fname_src = os.path.join(vdbdir, f+"~") + fname_dest = os.path.join(vdbdir, f) + + # Gracefully handle non-existent files + if os.path.isfile(fname_src): + os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync + else: + showMessage(_("!!! FAILED Manifest of transaction contains" + "file that no longer exists ")+str(f)+_( + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + sys.exit(1) + + # Cleanup transaction (order matters for roll-back) + shutil.rmtree(contents_dir) + shutil.rmtree(transaction_dir) + + def abortContentsUpdate(self): + # As this is an abort, we roll-back. So figure out, given the + # current state, how to roll-back. + vdbdir = self.dbdir + contents_dir = os.path.join(vdbdir, "contents.d") + transaction_dir = os.path.join(vdbdir, "contents.d~") + + if os.path.isdir(transaction_dir) and not os.path.isdir(contents_dir): + # Transaction_dir exists, so it might be in progress + # Therefore we can't trust its contents + shutil.rmtree(transaction_dir) + + # Contents_dir might exist, so clean it + if os.path.isdir(contents_dir): + shutil.rmtree(contents_dir) + elif not os.path.isdir(transaction_dir) and os.path.isdir(contents_dir): + # This shouldn't occur + shutil.rmtree(contents_dir) + showMessage(_("!!! FAILED please file a bug describing this situation"), + level=logging.ERROR, noiselevel=-1) + + sys.exit(1) + + def writeContentsToContentsFile(self, new_contents, new_contents_metadata, new_needed=None): + """ + @param new_contents: contents to write to CONTENTS file + @type new_contents: contents dictionary of the form + {u'/path/to/file' : (contents_attribute 1, ...), ...} + @param new_contents_metadata: contents to write to CONTENTS_* files + @type new_contents_metadata: contents dictionary of the form + {u'/path/to/file' : [index, {CONTENTS_* type : value, ... }]} + @param new_needed: new NEEDED entries + @type new_needed: list of NeededEntry + """ + # Here we do a number of things: + # - surround by an error catcher + # - setup write-ahead transaction + # - write multiple orderedDicts to multiple files using aux_update() + # - complete write-ahead transaction + # - call a rollback function on error + + try: + self.startContentsUpdate() + + new_needed_str = "" + if new_needed is not None: + new_needed_str = ''.join(_unicode(e) for e in new_needed) + + transaction_dir = "contents.d~" + contents_metadata = {} + contents_metadata[os.path.join(transaction_dir, LinkageMap._needed_aux_key)] = new_needed_str + contents_metadata[os.path.join(transaction_dir, "CONTENTS")] = prepare_contents(new_contents, self.settings['ROOT']) + for (filename, (tmp)) in new_contents_metadata.items(): + for (type, value) in tmp.items(): + type = os.path.join(transaction_dir,type) + contents_metadata[type] = contents_metadata.get(type,"") + value + + self.aux_update_pkg(contents_metadata) + + self.stopContentsUpdate(self.dbdir) + + self._clear_contents_cache() + except (IOError, OSError) as e: + showMessage(_("!!! FAILED abort of transaction due to "+str(e)+ + "during contents update in:\n\t")+str(vdbdir)+"\n", + level=logging.ERROR, noiselevel=-1) + self.abortContentsUpdate() + def isowner(self, filename, destroot=None): """ Check if a file belongs to this package. This may @@ -3258,11 +3558,14 @@ # Copy contents entries from the old package to the new one. new_contents = self.getcontents().copy() old_contents = self._installed_instance.getcontents() + new_contents_metadata = self.getContentsMetadata().copy() + old_contents_metadata = self._installed_instance.getContentsMetadata() for f in sorted(preserve_paths): f = _unicode_decode(f, encoding=_encodings['content'], errors='strict') f_abs = os.path.join(root, f.lstrip(os.sep)) contents_entry = old_contents.get(f_abs) + contents_metadata_entry = old_contents_metadata.get(f_abs) if contents_entry is None: # This will probably never happen, but it might if one of the # paths returned from findConsumers() refers to one of the libs @@ -3275,84 +3578,23 @@ preserve_paths.remove(f) continue new_contents[f_abs] = contents_entry - self.writeMetaData(f_abs) + new_contents_metadata[f_abs] = contents_metadata_entry obj_type = contents_entry[0] showMessage(_(">>> needed %s %s\n") % (obj_type, f_abs), noiselevel=-1) + # Add parent directories to contents if necessary. parent_dir = os.path.dirname(f_abs) while len(parent_dir) > len(root): new_contents[parent_dir] = ["dir"] - self.writeMetaData(parent_dir) + new_contents_metadata[f_abs] = contents_metadata_entry prev = parent_dir parent_dir = os.path.dirname(parent_dir) if prev == parent_dir: break - outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS")) - write_contents(new_contents, root, outfile) - outfile.close() - self._clear_contents_cache() - - def writeMetaData(self, fname): - hashtype = "SHA512" - if hashtype is None: - hashtype = "SHA512" - elif hashtype != "SHA1" and hashtype != "SHA256": - hashtype = "SHA512" - mystat = os.lstat(fname) - mymode = mystat[stat.ST_MODE] - - if stat.S_ISREG(mymode): - line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n" - line_mode = oct(mymode)[-4:]+"\n" - - attrlist = xattr.list(fname) - if len(attrlist)>0: - for i in attrlist: - if i == "user.pax.flags": - line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n" - if i == "security.capability": - caps = xattr.get(fname, "security.capability") # Take the actual value from xattr - caps_int = int(struct.unpack(" %s\n" % (zing, mydest, myto)) + + # Writing contents entry if sys.hexversion >= 0x3030000: outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n") else: @@ -5169,7 +5411,12 @@ except OSError: pass + # Order writing of metadata entries + write_metadata = True + + # Writing contents entry outfile.write("dir "+myrealdest+"\n") + # recurse and merge this directory mergelist.extend(join(relative_path, child) for child in os.listdir(join(srcroot, relative_path))) @@ -5217,6 +5464,10 @@ pass if mymtime != None: + # Order writing of metadata entries + write_metadata = True + + # Writing contents entry if sys.hexversion >= 0x3030000: outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n") else: @@ -5239,12 +5490,24 @@ else: return 1 + + # Writing contents entry if stat.S_ISFIFO(mymode): outfile.write("fif %s\n" % myrealdest) else: outfile.write("dev %s\n" % myrealdest) showMessage(zing + " " + mydest + "\n") + # Order writing of metadata entries + write_metadata = True + + # Write metadata entries + if write_metadata: + digfile.write(line_digest) + modfile.write(line_mode) + paxfile.write(line_attr_pax) + capfile.write(line_attr_caps) + def _protect(self, cfgfiledict, protect_if_modified, src_md5, src_link, dest, dest_real, dest_mode, dest_md5, dest_link): @@ -5616,11 +5879,12 @@ if not parallel_install: mylink.unlockdb() -def write_contents(contents, root, f): +def prepare_contents(contents, root): """ - Write contents to any file like object. The file will be left open. + Prepare string with contents of CONTENTS """ root_len = len(root) - 1 + lines = "" for filename in sorted(contents): entry_data = contents[filename] entry_type = entry_data[0] @@ -5635,7 +5899,16 @@ (entry_type, relative_filename, link, mtime) else: # dir, dev, fif line = "%s %s\n" % (entry_type, relative_filename) - f.write(line) + + lines += line + + return lines + +def write_contents(contents, root, f): + """ + Write contents to any file like object. The file will be left open. + """ + f.write(prepare_contents(contents, root)) def tar_contents(contents, root, tar, protect=None, onProgress=None, xattrs=False):