Go to:
Gentoo Home
Documentation
Forums
Lists
Bugs
Planet
Store
Wiki
Get Gentoo!
Gentoo's Bugzilla – Attachment 563340 Details for
Bug 671864
sys-apps/portage: varbapi aux_update transactions with write-ahead logging
Home
|
New
–
[Ex]
|
Browse
|
Search
|
Privacy Policy
|
[?]
|
Reports
|
Requests
|
Help
|
New Account
|
Log In
[x]
|
Forgot Password
Login:
[x]
[patch]
Add write ahead log using aux_update - fixed
integrity-w-write-ahead-0.3.patch (text/plain), 31.68 KB, created by
Sam
on 2019-01-30 23:11:04 UTC
(
hide
)
Description:
Add write ahead log using aux_update - fixed
Filename:
MIME Type:
Creator:
Sam
Created:
2019-01-30 23:11:04 UTC
Size:
31.68 KB
patch
obsolete
>--- vartree.py >+++ vartree.py >@@ -96,6 +96,7 @@ > import textwrap > import time > import warnings >+import operator > > try: > import cPickle as pickle >@@ -202,6 +203,15 @@ > > self._cached_counter = None > >+ self._content_files = [ >+ ("CONTENTS_DIGESTS_SHA512", 128+1), >+ ("CONTENTS_DIGESTS_SHA1", 40+1), >+ ("CONTENTS_DIGESTS_SHA256", 64+1), >+ ("CONTENTS_MODES", 4+1), >+ ("CONTENTS_ATTRS_PAX", 5+1), >+ ("CONTENTS_ATTRS_CAPS", 16+1) >+ ] >+ > @property > def writable(self): > """ >@@ -1069,19 +1079,27 @@ > > def removeFromContents(self, pkg, paths, relative_paths=True): > """ >+ Remove installed files from contents and its metadata files. >+ Typically during an unmerge, so that libraries needed by other >+ packages are spared from the unmerge. >+ > @param pkg: cpv for an installed package > @type pkg: string > @param paths: paths of files to remove from contents > @type paths: iterable > """ >+ >+ # Grab a copy of contents and its metadata files. > if not hasattr(pkg, "getcontents"): > pkg = self._dblink(pkg) > root = self.settings['ROOT'] > root_len = len(root) - 1 > new_contents = pkg.getcontents().copy() >+ new_contents_index = pkg.getContentsIndices().copy() >+ new_contents_metadata = pkg.getContentsMetadata().copy() > removed = 0 > >- self.startContentsRemoval(pkg.dbdir) >+ # Remove installed files from contents and its metadata files. > for filename in paths: > filename = _unicode_decode(filename, > encoding=_encodings['content'], errors='strict') >@@ -1091,29 +1109,18 @@ > else: > relative_filename = filename[root_len:] > contents_key = pkg._match_contents(relative_filename) >- index = -1 >- try: >- index = list(new_contents).index(filename)+1 >- except ValueError: >- print("List does not contain value") >- if contents_key and index >= 0: >+ if contents_key: > # It's possible for two different paths to refer to the same > # contents_key, due to directory symlinks. Therefore, pass a > # default value to pop, in order to avoid a KeyError which > # could otherwise be triggered (see bug #454400). > new_contents.pop(contents_key, None) >- self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA512") >- self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA1") >- self.removeFromContentsMeta(pkg.dbdir, index, "DIGESTS_SHA256") >- self.removeFromContentsMeta(pkg.dbdir, index, "MODES") >- self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_PAX") >- self.removeFromContentsMeta(pkg.dbdir, index, "ATTRS_CAPS") >+ new_contents_metadata.pop(contents_key) > removed += 1 > >- self.stopContentsRemoval(pkg.dbdir) > if removed: > # Also remove corresponding NEEDED lines, so that they do >- # no corrupt LinkageMap data for preserve-libs. >+ # not corrupt LinkageMap data for preserve-libs. > needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key) > new_needed = None > try: >@@ -1142,220 +1149,8 @@ > if filename in new_contents: > new_needed.append(entry) > >- self.writeContentsToContentsFile(pkg, new_contents, new_needed=new_needed) >- >- def startContentsRemoval(self, vdbdir): >- contents_dir = os.path.join(vdbdir, "contents.d") >- transaction_dir = os.path.join(vdbdir, "contents.d~") >- manifest_file = os.path.join(contents_dir, "Manifest") >- manifest_lines = "" >- >- # Clean previously unfinished transaction @TODO also: either roll-back or roll-forward >- if os.path.isdir(transaction_dir): >- shutil.rmtree(transaction_dir) >- if os.path.isdir(contents_dir): >- shutil.rmtree(contents_dir) >- >- # Set up transaction >- os.mkdir(transaction_dir, 0o644) >- files = [ >- "CONTENTS_DIGESTS_SHA1", >- "CONTENTS_DIGESTS_SHA256", >- "CONTENTS_DIGESTS_SHA512", >- "CONTENTS_MODES", >- "CONTENTS_ATTRS_PAX", >- "CONTENTS_ATTRS_CAPS" >- ] >- for f in files: >- fname_src = os.path.join(vdbdir, f) >- fname_dest = os.path.join(transaction_dir, f) >- >- # Gracefully handle non-existent files >- if os.path.isfile(fname_src): >- shutil.copy2(fname_src, fname_dest) >- manifest_lines += f + "\n" >- manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n" >- >- # Write Manifest-file of transaction >- os.mkdir(contents_dir, 0o644) >- with open(manifest_file,"w") as f: >- f.write(manifest_lines) >- >- def stopContentsRemoval(self, vdbdir): >- contents_dir = os.path.join(vdbdir, "contents.d") >- transaction_dir = os.path.join(vdbdir, "contents.d~") >- digests = [] >- transaction_files = [] >- all_files = [ >- "CONTENTS_DIGESTS_SHA1", >- "CONTENTS_DIGESTS_SHA256", >- "CONTENTS_DIGESTS_SHA512", >- "CONTENTS_MODES", >- "CONTENTS_ATTRS_PAX", >- "CONTENTS_ATTRS_CAPS" >- ] >- >- if not os.path.isdir(transaction_dir): >- print("Failed creating transaction dir") >- sys.exit(1) >- >- # Read Manifest-file of contents >- manifest_file = os.path.join(contents_dir, "Manifest") >- if os.path.isfile(manifest_file): >- with open(manifest_file,"r") as f: >- lines = f.read().splitlines() >- >- for i, line in enumerate(lines): >- if (i%2) == 0: >- transaction_files.append(line) >- else: >- digests.append(line) >- >- # Check transactiondir against Manifest >- for f in transaction_files: >- file = os.path.join(transaction_dir, f) >- if not os.path.isfile(file): >- print("Manifest contains non-existing file '"+file+"'") >- sys.exit(1) >- >- # Setup contents_dir with links of vdbdir files >- for i, f in enumerate(transaction_files): >- fname_src = os.path.join(vdbdir, f) >- fname_dest = os.path.join(contents_dir, f) >- >- # Gracefully handle non-existent files >- if os.path.isfile(fname_src): >- if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]: >- print("According to Manifest, file in vdbdir was modified '" + fname_src + "'") >- sys.exit(1) >- else: >- os.link(fname_src, fname_dest) >- else: >- print("File in Manifest no longer found in vdbdir '"+f+"'") >- sys.exit(1) >- >- # Sync contents_dir and transaction_dir to disk >- if platform.system() == "Linux": >- paths = [] >- for f in os.listdir(transaction_dir): >- paths.append(os.path.join(transaction_dir, f)) >- for f in os.listdir(contents_dir): >- paths.append(os.path.join(contents_dir, f)) >- paths = tuple(paths) >- >- proc = SyncfsProcess(paths=paths, >- scheduler=( >- SchedulerInterface(portage._internal_caller and >- global_event_loop() or EventLoop(main=False)) >- )) >- >- proc.start() >- returncode = proc.wait() >- >- # Link from transaction_dir >- for f in transaction_files: >- fname_src = os.path.join(transaction_dir, f) >- fname_dest = os.path.join(vdbdir, f+"~") >- >- # Gracefully handle non-existent files >- if os.path.isfile(fname_src): >- os.link(fname_src, fname_dest) >- else: >- print("Manifest contains file that no longer exists '"+f+"'") >- sys.exit(1) >- >- # Sync contents_dir and transaction_dir to disk >- if platform.system() == "Linux": >- paths = [] >- for f in transaction_files: >- # Gracefully handle non-existent files >- if os.path.isfile(os.path.join(vdbdir, f+"~")): >- paths.append(os.path.join(vdbdir, f+"~")) >- else: >- print("Manifest contains file that no longer exists '"+f+"'") >- sys.exit(1) >- paths = tuple(paths) >- >- proc = SyncfsProcess(paths=paths, >- scheduler=( >- SchedulerInterface(portage._internal_caller and >- global_event_loop() or EventLoop(main=False)) >- )) >- >- proc.start() >- returncode = proc.wait() >- >- # Rename >- for f in transaction_files: >- fname_src = os.path.join(vdbdir, f+"~") >- fname_dest = os.path.join(vdbdir, f) >- >- # Gracefully handle non-existent files >- if os.path.isfile(fname_src): >- os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync >- else: >- print("Manifest contains file that no longer exists '"+f+"'") >- sys.exit(1) >- >- shutil.rmtree(transaction_dir) >- shutil.rmtree(contents_dir) >- >- def removeFromContentsMeta(self, vdbdir, index, type): >- contents_file = "" >- if (type in >- {"DIGESTS_SHA512", >- "DIGESTS_SHA256", >- "DIGESTS_SHA1", >- "MODES", >- "ATTRS_PAX", >- "ATTRS_CAPS"}): >- contents_file = os.path.join(os.path.join(vdbdir, "contents.d~"),"CONTENTS_"+type) >- else: >- print("ERROR removeFromContentsMeta() got passed unexpected type "+type) >- >- if type == "DIGESTS_SHA512": linelen = 128+1 #including newline >- elif type == "DIGESTS_SHA256": linelen = 64 + 1 >- elif type == "DIGESTS_SHA1": linelen = 40+1 >- elif type == "MODES": linelen = 4 + 1 >- elif type == "ATTRS_PAX": linelen = 5 + 1 >- elif type == "ATTRS_CAPS": linelen = 16 + 1 >- else: >- print("ERROR removeFromContentsMeta() got passed unexpected type "+type) >- >- if os.path.isfile(contents_file): >- with open(contents_file,"r+") as f: >- pre = f.read((index-1)*linelen) >- f.read(129) >- post = f.read() >- f.seek(0, 0) >- f.write(pre) >- f.write(post) >- f.truncate() >- f.flush() >- os.fsync(f.fileno()) >- >- def writeContentsToContentsFile(self, pkg, new_contents, new_needed=None): >- """ >- @param pkg: package to write contents file for >- @type pkg: dblink >- @param new_contents: contents to write to CONTENTS file >- @type new_contents: contents dictionary of the form >- {u'/path/to/file' : (contents_attribute 1, ...), ...} >- @param new_needed: new NEEDED entries >- @type new_needed: list of NeededEntry >- """ >- root = self.settings['ROOT'] >- self._bump_mtime(pkg.mycpv) >- if new_needed is not None: >- f = atomic_ofstream(os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)) >- for entry in new_needed: >- f.write(_unicode(entry)) >- f.close() >- f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS")) >- write_contents(new_contents, root, f) >- f.close() >- self._bump_mtime(pkg.mycpv) >- pkg._clear_contents_cache() >+ # Write new contents files and clear CONTENTS cache >+ pkg.writeContentsToContentsFile(new_contents, new_contents_metadata, new_needed=new_needed) > > class _owners_cache(object): > """ >@@ -1817,6 +1612,8 @@ > self.myroot = self.settings['ROOT'] > self._installed_instance = None > self.contentscache = None >+ self.contents_index_cache = None >+ self.contents_metadata_cache = None > self._contents_inodes = None > self._contents_basenames = None > self._linkmap_broken = False >@@ -1833,6 +1630,15 @@ > self._contents = ContentsCaseSensitivityManager(self) > self._slot_locks = [] > >+ self._content_files = [ >+ ("CONTENTS_DIGESTS_SHA512", 128+1), >+ ("CONTENTS_DIGESTS_SHA1", 40+1), >+ ("CONTENTS_DIGESTS_SHA256", 64+1), >+ ("CONTENTS_MODES", 4+1), >+ ("CONTENTS_ATTRS_PAX", 5+1), >+ ("CONTENTS_ATTRS_CAPS", 16+1) >+ ] >+ > def __hash__(self): > return hash(self._hash_key) > >@@ -1978,6 +1784,8 @@ > > def _clear_contents_cache(self): > self.contentscache = None >+ self.contents_index_cache = None >+ self.contents_metadata_cache = None > self._contents_inodes = None > self._contents_basenames = None > self._contents.clear_cache() >@@ -1988,8 +1796,11 @@ > """ > if self.contentscache is not None: > return self.contentscache >+ > contents_file = os.path.join(self.dbdir, "CONTENTS") > pkgfiles = OrderedDict() >+ pkgfiles_indices = OrderedDict() >+ > try: > with io.open(_unicode_encode(contents_file, > encoding=_encodings['fs'], errors='strict'), >@@ -2009,27 +1820,31 @@ > obj_index = contents_re.groupindex['obj'] > dir_index = contents_re.groupindex['dir'] > sym_index = contents_re.groupindex['sym'] >+ > # The old symlink format may exist on systems that have packages > # which were installed many years ago (see bug #351814). > oldsym_index = contents_re.groupindex['oldsym'] >+ > # CONTENTS files already contain EPREFIX > myroot = self.settings['ROOT'] > if myroot == os.path.sep: > myroot = None >+ > # used to generate parent dir entries > dir_entry = ("dir",) > eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1 >+ > pos = 0 > errors = [] > for pos, line in enumerate(mylines): > if null_byte in line: > # Null bytes are a common indication of corruption. >- errors.append((pos + 1, _("Null byte found in CONTENTS entry"))) >+ errors.append((pos + 1, _("Null byte found in CONTENTS entry: "+line))) > continue > line = line.rstrip("\n") > m = contents_re.match(line) > if m is None: >- errors.append((pos + 1, _("Unrecognized CONTENTS entry"))) >+ errors.append((pos + 1, _("Unrecognized CONTENTS entry: "+line))) > continue > > if m.group(obj_index) is not None: >@@ -2074,17 +1889,67 @@ > if parent in pkgfiles: > break > pkgfiles[parent] = dir_entry >+ pkgfiles_indices[parent] = (pos,dir_entry) > path_split.pop() > > pkgfiles[path] = data >+ pkgfiles_indices[path] = (pos,data) > > if errors: > writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1) > for pos, e in errors: > writemsg(_("!!! line %d: %s\n") % (pos, e), noiselevel=-1) > self.contentscache = pkgfiles >+ self.contents_index_cache = pkgfiles_indices > return pkgfiles > >+ def getContentsIndices(self): >+ """ >+ Get installed files of a given package (aka what that package installed), with indices >+ """ >+ if self.contents_index_cache is not None: >+ return self.contents_index_cache >+ else: >+ getcontents() >+ if self.contents_index_cache is not None: >+ return self.contents_index_cache >+ else: >+ showMessage(_("!!! FAILED couldn't get cached contents index"), >+ level=logging.ERROR, noiselevel=-1) >+ return None >+ >+ def getContentsMetadata(self): >+ """ >+ Get metadata of installed files of a given package (aka what that package installed): >+ - iterate over the results returned by getContentsIndices() >+ - iterate over the filetypes concerned >+ - grab the metadata using the identifier to calculate position in the different files >+ - write metadata to an orderedDict and return >+ """ >+ if self.contents_metadata_cache is not None: >+ return self.contents_metadata_cache >+ else: >+ if self.contents_index_cache is None: >+ getcontents() >+ >+ vdbdir = self.dbdir >+ contents_metadata = OrderedDict() >+ >+ for (filename, (index, tmp)) in self.contents_index_cache.copy().items(): >+ contents_metadata[filename] = OrderedDict() >+ for type, linelen in self._content_files: >+ contents_fname = os.path.join(vdbdir, type) >+ if not os.path.isfile(contents_fname): >+ continue >+ >+ with open(contents_fname,"rb") as f: >+ f.seek(index*linelen) #skip to the right line >+ value = f.read(linelen).decode() #read the line >+ contents_metadata[filename][type] = value >+ >+ self.contents_metadata_cache = contents_metadata >+ return contents_metadata >+ > def _prune_plib_registry(self, unmerge=False, > needed=None, preserve_paths=None): > # remove preserved libraries that don't have any consumers left >@@ -2475,6 +2340,19 @@ > self._display_merge("%s %s %s %s\n" % \ > (zing, desc.ljust(8), file_type, file_name)) > >+ def aux_update_pkg(self, values): >+ self.vartree.dbapi._bump_mtime(self.mycpv) >+ self.vartree.dbapi._clear_pkg_cache(self) >+ for k, v in values.items(): >+ if v: >+ self.setfile(k, v) >+ else: >+ try: >+ os.unlink(os.path.join(self.vartree.dbapi.getpath(self.mycpv), k)) >+ except EnvironmentError: >+ pass >+ self.vartree.dbapi._bump_mtime(self.mycpv) >+ > def _unmerge_pkgfiles(self, pkgfiles, others_in_slot): > """ > >@@ -3076,6 +2954,268 @@ > for parent in sorted(set(recursive_parents)): > dirs.append((parent, revisit.pop(parent))) > >+ def startContentsUpdate(self): >+ vdbdir = self.dbdir >+ contents_dir = os.path.join(vdbdir, "contents.d") >+ transaction_dir = os.path.join(vdbdir, "contents.d~") >+ manifest_lines = "" >+ >+ # Clean previously unfinished transaction (this shouldn't occur, but might) >+ if os.path.isdir(transaction_dir): >+ shutil.rmtree(transaction_dir) >+ if os.path.isdir(contents_dir): >+ shutil.rmtree(contents_dir) >+ >+ # Set up transaction >+ os.mkdir(transaction_dir, 0o644) >+ files = [ >+ "NEEDED.ELF.2", >+ "CONTENTS", >+ "CONTENTS_DIGESTS_SHA1", >+ "CONTENTS_DIGESTS_SHA256", >+ "CONTENTS_DIGESTS_SHA512", >+ "CONTENTS_MODES", >+ "CONTENTS_ATTRS_PAX", >+ "CONTENTS_ATTRS_CAPS" >+ ] >+ for f in files: >+ fname_src = os.path.join(vdbdir, f) >+ fname_dest = os.path.join(transaction_dir, f) >+ >+ # Gracefully handle non-existent files >+ if os.path.isfile(fname_src): >+ shutil.copy2(fname_src, fname_dest) >+ manifest_lines += f + "\n" >+ manifest_lines += portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] + "\n" >+ >+ # Write Manifest-file of transaction >+ os.mkdir(contents_dir, 0o644) >+ self.aux_update_pkg({os.path.join("contents.d","Manifest"): manifest_lines}) >+ >+ def stopContentsUpdate(self, vdbdir): >+ contents_dir = os.path.join(vdbdir, "contents.d") >+ transaction_dir = os.path.join(vdbdir, "contents.d~") >+ digests = [] >+ transaction_files = [] >+ all_files = [ >+ "NEEDED.ELF.2", >+ "CONTENTS", >+ "CONTENTS_DIGESTS_SHA1", >+ "CONTENTS_DIGESTS_SHA256", >+ "CONTENTS_DIGESTS_SHA512", >+ "CONTENTS_MODES", >+ "CONTENTS_ATTRS_PAX", >+ "CONTENTS_ATTRS_CAPS" >+ ] >+ >+ if not os.path.isdir(transaction_dir): >+ showMessage(_("!!! FAILED creating transaction dir " >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Read Manifest-file of contents >+ manifest_file = os.path.join(contents_dir, "Manifest") >+ if os.path.isfile(manifest_file): >+ with open(manifest_file,"r") as f: >+ lines = f.read().splitlines() >+ >+ for i, line in enumerate(lines): >+ if (i%2) == 0: >+ transaction_files.append(line) >+ else: >+ digests.append(line) >+ else: >+ showMessage(_("!!! FAILED reading Manifest of transaction" >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Check Manifest against transaction_dir >+ for f in transaction_files: >+ file = os.path.join(transaction_dir, f) >+ if not os.path.isfile(file): >+ showMessage(_("!!! FAILED Manifest of transaction " >+ "contained non-existing file")+str(file)+_(" " >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Check transaction_dir against Manifest >+ for f in os.listdir(transaction_dir): >+ if not f in transaction_files: >+ showMessage(_("!!! FAILED found file ")+str(file)+_(" " >+ "in transaction_dir that wasn't recorded in Manifest of " >+ "transaction during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Setup contents_dir with links of vdbdir files >+ for i, f in enumerate(transaction_files): >+ fname_src = os.path.join(vdbdir, f) >+ fname_dest = os.path.join(contents_dir, f) >+ >+ # Gracefully handle non-existent files >+ if os.path.isfile(fname_src): >+ if portage.checksum.perform_checksum(fname_src, "SHA1", 0)[0] != digests[i]: >+ showMessage(_("!!! FAILED according to Manifest of transaction, " >+ "file ")+str(file)+_(" in vdbdir was modified" >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ else: >+ os.link(fname_src, fname_dest) >+ else: >+ showMessage(_("!!! FAILED file in Manifest of transaction" >+ "no longer found in vdbdir ")+str(f)+_( >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Sync contents_dir and transaction_dir to disk >+ if platform.system() == "Linux": >+ paths = [] >+ for f in os.listdir(transaction_dir): >+ paths.append(os.path.join(transaction_dir, f)) >+ for f in os.listdir(contents_dir): >+ paths.append(os.path.join(contents_dir, f)) >+ paths = tuple(paths) >+ >+ proc = SyncfsProcess(paths=paths, >+ scheduler=( >+ SchedulerInterface(portage._internal_caller and >+ global_event_loop() or EventLoop(main=False)) >+ )) >+ >+ proc.start() >+ returncode = proc.wait() >+ >+ # Link from transaction_dir >+ for f in transaction_files: >+ fname_src = os.path.join(transaction_dir, f) >+ fname_dest = os.path.join(vdbdir, f+"~") >+ >+ # Gracefully handle non-existent files >+ if os.path.isfile(fname_src): >+ os.link(fname_src, fname_dest) >+ else: >+ showMessage(_("!!! FAILED Manifest of transaction contains" >+ "file that no longer exists ")+str(f)+_( >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Sync contents_dir and transaction_dir to disk >+ if platform.system() == "Linux": >+ paths = [] >+ for f in transaction_files: >+ # Gracefully handle non-existent files >+ if os.path.isfile(os.path.join(vdbdir, f+"~")): >+ paths.append(os.path.join(vdbdir, f+"~")) >+ else: >+ showMessage(_("!!! FAILED Manifest of transaction contains" >+ "file that no longer exists ")+str(f)+_( >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ paths = tuple(paths) >+ >+ proc = SyncfsProcess(paths=paths, >+ scheduler=( >+ SchedulerInterface(portage._internal_caller and >+ global_event_loop() or EventLoop(main=False)) >+ )) >+ >+ proc.start() >+ returncode = proc.wait() >+ >+ # Rename >+ for f in transaction_files: >+ fname_src = os.path.join(vdbdir, f+"~") >+ fname_dest = os.path.join(vdbdir, f) >+ >+ # Gracefully handle non-existent files >+ if os.path.isfile(fname_src): >+ os.rename(fname_src, fname_dest) #atomic rename, doesn't require sync >+ else: >+ showMessage(_("!!! FAILED Manifest of transaction contains" >+ "file that no longer exists ")+str(f)+_( >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ sys.exit(1) >+ >+ # Cleanup transaction (order matters for roll-back) >+ shutil.rmtree(contents_dir) >+ shutil.rmtree(transaction_dir) >+ >+ def abortContentsUpdate(self): >+ # As this is an abort, we roll-back. So figure out, given the >+ # current state, how to roll-back. >+ vdbdir = self.dbdir >+ contents_dir = os.path.join(vdbdir, "contents.d") >+ transaction_dir = os.path.join(vdbdir, "contents.d~") >+ >+ if os.path.isdir(transaction_dir) and not os.path.isdir(contents_dir): >+ # Transaction_dir exists, so it might be in progress >+ # Therefore we can't trust its contents >+ shutil.rmtree(transaction_dir) >+ >+ # Contents_dir might exist, so clean it >+ if os.path.isdir(contents_dir): >+ shutil.rmtree(contents_dir) >+ elif not os.path.isdir(transaction_dir) and os.path.isdir(contents_dir): >+ # This shouldn't occur >+ shutil.rmtree(contents_dir) >+ showMessage(_("!!! FAILED please file a bug describing this situation"), >+ level=logging.ERROR, noiselevel=-1) >+ >+ sys.exit(1) >+ >+ def writeContentsToContentsFile(self, new_contents, new_contents_metadata, new_needed=None): >+ """ >+ @param new_contents: contents to write to CONTENTS file >+ @type new_contents: contents dictionary of the form >+ {u'/path/to/file' : (contents_attribute 1, ...), ...} >+ @param new_contents_metadata: contents to write to CONTENTS_* files >+ @type new_contents_metadata: contents dictionary of the form >+ {u'/path/to/file' : [index, {CONTENTS_* type : value, ... }]} >+ @param new_needed: new NEEDED entries >+ @type new_needed: list of NeededEntry >+ """ >+ # Here we do a number of things: >+ # - surround by an error catcher >+ # - setup write-ahead transaction >+ # - write multiple orderedDicts to multiple files using aux_update() >+ # - complete write-ahead transaction >+ # - call a rollback function on error >+ >+ try: >+ self.startContentsUpdate() >+ >+ new_needed_str = "" >+ if new_needed is not None: >+ new_needed_str = ''.join(_unicode(e) for e in new_needed) >+ >+ transaction_dir = "contents.d~" >+ contents_metadata = {} >+ contents_metadata[os.path.join(transaction_dir, LinkageMap._needed_aux_key)] = new_needed_str >+ contents_metadata[os.path.join(transaction_dir, "CONTENTS")] = prepare_contents(new_contents, self.settings['ROOT']) >+ for (filename, (tmp)) in new_contents_metadata.items(): >+ for (type, value) in tmp.items(): >+ type = os.path.join(transaction_dir,type) >+ contents_metadata[type] = contents_metadata.get(type,"") + value >+ >+ self.aux_update_pkg(contents_metadata) >+ >+ self.stopContentsUpdate(self.dbdir) >+ >+ self._clear_contents_cache() >+ except (IOError, OSError) as e: >+ showMessage(_("!!! FAILED abort of transaction due to "+str(e)+ >+ "during contents update in:\n\t")+str(vdbdir)+"\n", >+ level=logging.ERROR, noiselevel=-1) >+ self.abortContentsUpdate() >+ > def isowner(self, filename, destroot=None): > """ > Check if a file belongs to this package. This may >@@ -3418,11 +3558,14 @@ > # Copy contents entries from the old package to the new one. > new_contents = self.getcontents().copy() > old_contents = self._installed_instance.getcontents() >+ new_contents_metadata = self.getContentsMetadata().copy() >+ old_contents_metadata = self._installed_instance.getContentsMetadata() > for f in sorted(preserve_paths): > f = _unicode_decode(f, > encoding=_encodings['content'], errors='strict') > f_abs = os.path.join(root, f.lstrip(os.sep)) > contents_entry = old_contents.get(f_abs) >+ contents_metadata_entry = old_contents_metadata.get(f_abs) > if contents_entry is None: > # This will probably never happen, but it might if one of the > # paths returned from findConsumers() refers to one of the libs >@@ -3435,84 +3578,23 @@ > preserve_paths.remove(f) > continue > new_contents[f_abs] = contents_entry >- self.writeMetaData(f_abs) >+ new_contents_metadata[f_abs] = contents_metadata_entry > obj_type = contents_entry[0] > showMessage(_(">>> needed %s %s\n") % (obj_type, f_abs), > noiselevel=-1) >+ > # Add parent directories to contents if necessary. > parent_dir = os.path.dirname(f_abs) > while len(parent_dir) > len(root): > new_contents[parent_dir] = ["dir"] >- self.writeMetaData(parent_dir) >+ new_contents_metadata[f_abs] = contents_metadata_entry > prev = parent_dir > parent_dir = os.path.dirname(parent_dir) > if prev == parent_dir: > break >- outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS")) >- write_contents(new_contents, root, outfile) >- outfile.close() >- self._clear_contents_cache() >- >- def writeMetaData(self, fname): >- hashtype = "SHA512" >- if hashtype is None: >- hashtype = "SHA512" >- elif hashtype != "SHA1" and hashtype != "SHA256": >- hashtype = "SHA512" > >- mystat = os.lstat(fname) >- mymode = mystat[stat.ST_MODE] >- >- if stat.S_ISREG(mymode): >- line_digest = portage.checksum.perform_checksum(fname, hashtype, 0)[0]+"\n" >- line_mode = oct(mymode)[-4:]+"\n" >- >- attrlist = xattr.list(fname) >- if len(attrlist)>0: >- for i in attrlist: >- if i == "user.pax.flags": >- line_attr_pax = _unicode_decode(xattr.get(fname, "user.pax.flags")).zfill(5)+"\n" >- if i == "security.capability": >- caps = xattr.get(fname, "security.capability") # Take the actual value from xattr >- caps_int = int(struct.unpack("<IIIII",caps)[1]) # Grab the bitfield, as integer >- line_attr_caps = '{0:x}'.format(caps_int).zfill(16)+"\n" # Convert to a 16-pos hex string >- else: >- line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >- line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >- else: #DIR, LINK, FIFO, DEV >- digest_length = 0 >- if hashtype == "SHA1": >- digest_length = 40 >- elif hashtype == "SHA256": >- digest_length = 64 >- elif hashtype == "SHA512": >- digest_length = 128 >- >- line_digest = '{num:0{width}}\n'.format(num=0, width=digest_length) >- line_mode = oct(mymode)[-4:]+"\n" >- line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) >- line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) >- >- contents_file = os.path.join(self.dbtmpdir, "CONTENTS_"+hashtype) >- if os.path.isfile(contents_file): >- with open(contents_file,"r+") as f: >- f.seek(0,2) >- f.write(line_digest) >- contents_file = os.path.join(self.dbtmpdir, "CONTENTS_MODES") >- if os.path.isfile(contents_file): >- with open(contents_file,"r+") as f: >- f.seek(0,2) >- f.write(line_mode) >- contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_PAX") >- if os.path.isfile(contents_file): >- with open(contents_file,"r+") as f: >- f.seek(0,2) >- f.write(line_attr_pax) >- contents_file = os.path.join(self.dbtmpdir, "CONTENTS_ATTRS_CAPS") >- if os.path.isfile(contents_file): >- with open(contents_file,"r+") as f: >- f.seek(0,2) >- f.write(line_attr_caps) >+ # Write new contents files >+ self.writeContentsToContentsFile(new_contents, new_contents_metadata) > > def _find_unused_preserved_libs(self, unmerge_no_replacement): > """ >@@ -5127,9 +5209,7 @@ > # confmem rejected this update > zing = "---" > >- srcobj = srcroot+relative_path >- destobj = destroot+relative_path >- >+ # Set some values for use by metadata entries > digest_length = 0 > if hashtype == "SHA1": > digest_length = 40 >@@ -5138,7 +5218,9 @@ > elif hashtype == "SHA512": > digest_length = 128 > >+ # Determine metadata entries > if stat.S_ISREG(mymode): >+ srcobj = srcroot+relative_path > line_digest = portage.checksum.perform_checksum(srcobj, hashtype, 0)[0]+"\n" > line_mode = oct(mymode)[-4:]+"\n" > >@@ -5160,11 +5242,6 @@ > line_attr_pax = '{num:0{width}}\n'.format(num=0, width=5) > line_attr_caps = '{num:0{width}}\n'.format(num=0, width=16) > >- digfile.write(line_digest) >- modfile.write(line_mode) >- paxfile.write(line_attr_pax) >- capfile.write(line_attr_caps) >- > if stat.S_ISLNK(mymode): > # we are merging a symbolic link > # Pass in the symlink target in order to bypass the >@@ -5224,7 +5301,12 @@ > [_("QA Notice: Symbolic link /%s points to /%s which does not exist.") > % (relative_path, myabsto)]) > >+ # Order writing of metadata entries >+ write_metadata = True >+ > showMessage("%s %s -> %s\n" % (zing, mydest, myto)) >+ >+ # Writing contents entry > if sys.hexversion >= 0x3030000: > outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n") > else: >@@ -5329,7 +5411,12 @@ > except OSError: > pass > >+ # Order writing of metadata entries >+ write_metadata = True >+ >+ # Writing contents entry > outfile.write("dir "+myrealdest+"\n") >+ > # recurse and merge this directory > mergelist.extend(join(relative_path, child) for child in > os.listdir(join(srcroot, relative_path))) >@@ -5377,6 +5464,10 @@ > pass > > if mymtime != None: >+ # Order writing of metadata entries >+ write_metadata = True >+ >+ # Writing contents entry > if sys.hexversion >= 0x3030000: > outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n") > else: >@@ -5399,12 +5490,24 @@ > > else: > return 1 >+ >+ # Writing contents entry > if stat.S_ISFIFO(mymode): > outfile.write("fif %s\n" % myrealdest) > else: > outfile.write("dev %s\n" % myrealdest) > showMessage(zing + " " + mydest + "\n") > >+ # Order writing of metadata entries >+ write_metadata = True >+ >+ # Write metadata entries >+ if write_metadata: >+ digfile.write(line_digest) >+ modfile.write(line_mode) >+ paxfile.write(line_attr_pax) >+ capfile.write(line_attr_caps) >+ > def _protect(self, cfgfiledict, protect_if_modified, src_md5, > src_link, dest, dest_real, dest_mode, dest_md5, dest_link): > >@@ -5776,11 +5879,12 @@ > if not parallel_install: > mylink.unlockdb() > >-def write_contents(contents, root, f): >+def prepare_contents(contents, root): > """ >- Write contents to any file like object. The file will be left open. >+ Prepare string with contents of CONTENTS > """ > root_len = len(root) - 1 >+ lines = "" > for filename in sorted(contents): > entry_data = contents[filename] > entry_type = entry_data[0] >@@ -5795,7 +5899,16 @@ > (entry_type, relative_filename, link, mtime) > else: # dir, dev, fif > line = "%s %s\n" % (entry_type, relative_filename) >- f.write(line) >+ >+ lines += line >+ >+ return lines >+ >+def write_contents(contents, root, f): >+ """ >+ Write contents to any file like object. The file will be left open. >+ """ >+ f.write(prepare_contents(contents, root)) > > def tar_contents(contents, root, tar, protect=None, onProgress=None, > xattrs=False):
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
Attachments on
bug 671864
:
557478
|
563338
| 563340