summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZac Medico <zmedico@gentoo.org>2009-10-10 01:33:10 +0000
committerZac Medico <zmedico@gentoo.org>2009-10-10 01:33:10 +0000
commitfc447b9a1b5902d914fc8bd1f341cb897260dce1 (patch)
tree0eef7fe5f5f7015dc9eced15d0b8bb625f358775
parentDisable EAPI 3_pre1. (diff)
downloadportage-multirepo-fc447b9a1b5902d914fc8bd1f341cb897260dce1.tar.gz
portage-multirepo-fc447b9a1b5902d914fc8bd1f341cb897260dce1.tar.bz2
portage-multirepo-fc447b9a1b5902d914fc8bd1f341cb897260dce1.zip
Revert preserve-libs support since it won't be supported in this branch.
svn path=/main/branches/2.1.7/; revision=14538
-rwxr-xr-xbin/portageq23
-rw-r--r--cnf/make.globals3
-rw-r--r--cnf/sets.conf6
-rw-r--r--doc/config/sets.docbook20
-rw-r--r--man/emerge.15
-rw-r--r--pym/_emerge/actions.py184
-rw-r--r--pym/_emerge/clear_caches.py1
-rw-r--r--pym/_emerge/help.py7
-rw-r--r--pym/_emerge/main.py78
-rw-r--r--pym/portage/__init__.py3
-rw-r--r--pym/portage/dbapi/vartree.py1152
-rw-r--r--pym/portage/sets/libs.py89
12 files changed, 16 insertions, 1555 deletions
diff --git a/bin/portageq b/bin/portageq
index cea14231..dea77836 100755
--- a/bin/portageq
+++ b/bin/portageq
@@ -519,29 +519,6 @@ def get_repo_path(argv):
for arg in argv[1:]:
print(portage.db[argv[0]]["porttree"].dbapi.getRepositoryPath(arg))
-def list_preserved_libs(argv):
- """<root>
- Print a list of libraries preserved during a package update in the form
- package: path. Returns 0 if no preserved libraries could be found,
- 1 otherwise.
- """
-
- if len(argv) != 1:
- print("ERROR: wrong number of arguments")
- sys.exit(2)
- mylibs = portage.db[argv[0]]["vartree"].dbapi.plib_registry.getPreservedLibs()
- rValue = 0
- msg = []
- for cpv in sorted(mylibs):
- msg.append(cpv)
- for path in mylibs[cpv]:
- msg.append(' ' + path)
- rValue = 1
- msg.append('\n')
- writemsg_stdout(''.join(msg), noiselevel=-1)
- return rValue
-list_preserved_libs.uses_root = True
-
#-----------------------------------------------------------------------------
#
# DO NOT CHANGE CODE BEYOND THIS POINT - IT'S NOT NEEDED!
diff --git a/cnf/make.globals b/cnf/make.globals
index 14ebf4fd..0eb664e1 100644
--- a/cnf/make.globals
+++ b/cnf/make.globals
@@ -49,9 +49,6 @@ FEATURES="assume-digests distlocks fixpackages news parallel-fetch protect-owned
# FEATURES=protect-owned to operate smoothly in all cases.
COLLISION_IGNORE="/lib/modules"
-# Enable for global testing
-FEATURES="${FEATURES} preserve-libs"
-
# Default chunksize for binhost comms
PORTAGE_BINHOST_CHUNKSIZE="3000"
diff --git a/cnf/sets.conf b/cnf/sets.conf
index a921a2bb..79760a1b 100644
--- a/cnf/sets.conf
+++ b/cnf/sets.conf
@@ -40,12 +40,6 @@ class = portage.sets.files.StaticFileSet
multiset = true
directory = /etc/portage/sets
-# Set to rebuild all packages that need a preserved lib that only remains due
-# to FEATURES=preserve-libs
-[preserved-rebuild]
-class = portage.sets.libs.PreservedLibraryConsumerSet
-world-candidate = False
-
# Installed ebuilds that inherit from known live eclasses.
[live-rebuild]
class = portage.sets.dbapi.VariableSet
diff --git a/doc/config/sets.docbook b/doc/config/sets.docbook
index e50c85cc..95483d83 100644
--- a/doc/config/sets.docbook
+++ b/doc/config/sets.docbook
@@ -547,25 +547,6 @@
This class doesn't support any extra options.
</para>
</sect2>
- <sect2 id='config-set-classes-PreservedLibraryConsumerSet'>
- <title>portage.sets.libs.PreservedLibraryConsumerSet</title>
- <para>
- A special set used to rebuild all packages that need a preserved library that only
- remains due to <varname>FEATURES="preserve-libs"</varname>.
- </para>
-
- <sect3>
- <title>Single Set Configuration</title>
- <para>
- This class supports the following option:
- </para>
- <itemizedlist>
- <listitem><para><varname>debug</varname>: Generate extra output useful to figure out why
- certain packages are included in the set, as it's not always obvious.</para>
- </listitem>
- </itemizedlist>
- </sect3>
- </sect2>
</sect1>
<sect1 id='config-set-defaults'>
@@ -584,7 +565,6 @@
<listitem><para><varname>world</varname>: uses <classname>WorldSet</classname></para></listitem>
<listitem><para><varname>security</varname>: uses <classname>NewAffectedSet</classname> with default options</para></listitem>
<listitem><para><varname>installed</varname>: uses <classname>EverythingSet</classname></para></listitem>
- <listitem><para><varname>preserved-rebuild</varname>: uses <classname>PreservedLibraryConsumerSet</classname></para></listitem>
<listitem><para><varname>live-rebuild</varname>: uses <classname>VariableSet</classname></para></listitem>
<listitem><para><varname>module-rebuild</varname>: uses <classname>OwnerSet</classname></para></listitem>
<listitem><para><varname>downgrade</varname>: uses <classname>DowngradeSet</classname></para></listitem>
diff --git a/man/emerge.1 b/man/emerge.1
index 6465b58d..fb0e220b 100644
--- a/man/emerge.1
+++ b/man/emerge.1
@@ -122,7 +122,10 @@ option in order to see a preview of which packages
will be uninstalled. Always study the list of packages
to be cleaned for any obvious mistakes. Note that packages listed in
package.provided (see \fBportage\fR(5)) may be removed by
-depclean, even if they are part of the world set.
+depclean, even if they are part of the world set. Also note that
+depclean may break link level dependencies. Thus, it is
+recommended to use a tool such as \fBrevdep-rebuild\fR(1)
+in order to detect such breakage.
Depclean serves as a dependency aware
version of \fB\-\-unmerge\fR. When given one or more atoms, it will
diff --git a/pym/_emerge/actions.py b/pym/_emerge/actions.py
index 9788c085..eaf31b97 100644
--- a/pym/_emerge/actions.py
+++ b/pym/_emerge/actions.py
@@ -475,7 +475,6 @@ def action_build(settings, trees, mtimedb,
portage.writemsg_stdout(colorize("WARN", "WARNING:")
+ " AUTOCLEAN is disabled. This can cause serious"
+ " problems due to overlapping packages.\n")
- trees[settings["ROOT"]]["vartree"].dbapi.plib_registry.pruneNonExisting()
return retval
@@ -555,6 +554,10 @@ def action_depclean(settings, trees, ldpath_mtimes,
# specific packages.
msg = []
+ msg.append("Depclean may break link level dependencies. Thus, it is\n")
+ msg.append("recommended to use a tool such as " + good("`revdep-rebuild`") + " (from\n")
+ msg.append("app-portage/gentoolkit) in order to detect such breakage.\n")
+ msg.append("\n")
msg.append("Always study the list of packages to be cleaned for any obvious\n")
msg.append("mistakes. Packages that are part of the world set will always\n")
msg.append("be kept. They can be manually added to this set with\n")
@@ -898,185 +901,6 @@ def calc_depclean(settings, trees, ldpath_mtimes,
if len(cleanlist):
clean_set = set(cleanlist)
- # Check if any of these package are the sole providers of libraries
- # with consumers that have not been selected for removal. If so, these
- # packages and any dependencies need to be added to the graph.
- real_vardb = trees[myroot]["vartree"].dbapi
- linkmap = real_vardb.linkmap
- consumer_cache = {}
- provider_cache = {}
- soname_cache = {}
- consumer_map = {}
-
- writemsg_level(">>> Checking for lib consumers...\n")
-
- for pkg in cleanlist:
- pkg_dblink = real_vardb._dblink(pkg.cpv)
- consumers = {}
-
- for lib in pkg_dblink.getcontents():
- lib = lib[len(myroot):]
- lib_key = linkmap._obj_key(lib)
- lib_consumers = consumer_cache.get(lib_key)
- if lib_consumers is None:
- try:
- lib_consumers = linkmap.findConsumers(lib_key)
- except KeyError:
- continue
- consumer_cache[lib_key] = lib_consumers
- if lib_consumers:
- consumers[lib_key] = lib_consumers
-
- if not consumers:
- continue
-
- for lib, lib_consumers in list(consumers.items()):
- for consumer_file in list(lib_consumers):
- if pkg_dblink.isowner(consumer_file, myroot):
- lib_consumers.remove(consumer_file)
- if not lib_consumers:
- del consumers[lib]
-
- if not consumers:
- continue
-
- for lib, lib_consumers in consumers.items():
-
- soname = soname_cache.get(lib)
- if soname is None:
- soname = linkmap.getSoname(lib)
- soname_cache[lib] = soname
-
- consumer_providers = []
- for lib_consumer in lib_consumers:
- providers = provider_cache.get(lib)
- if providers is None:
- providers = linkmap.findProviders(lib_consumer)
- provider_cache[lib_consumer] = providers
- if soname not in providers:
- # Why does this happen?
- continue
- consumer_providers.append(
- (lib_consumer, providers[soname]))
-
- consumers[lib] = consumer_providers
-
- consumer_map[pkg] = consumers
-
- if consumer_map:
-
- search_files = set()
- for consumers in consumer_map.values():
- for lib, consumer_providers in consumers.items():
- for lib_consumer, providers in consumer_providers:
- search_files.add(lib_consumer)
- search_files.update(providers)
-
- writemsg_level(">>> Assigning files to packages...\n")
- file_owners = real_vardb._owners.getFileOwnerMap(search_files)
-
- for pkg, consumers in list(consumer_map.items()):
- for lib, consumer_providers in list(consumers.items()):
- lib_consumers = set()
-
- for lib_consumer, providers in consumer_providers:
- owner_set = file_owners.get(lib_consumer)
- provider_dblinks = set()
- provider_pkgs = set()
-
- if len(providers) > 1:
- for provider in providers:
- provider_set = file_owners.get(provider)
- if provider_set is not None:
- provider_dblinks.update(provider_set)
-
- if len(provider_dblinks) > 1:
- for provider_dblink in provider_dblinks:
- pkg_key = ("installed", myroot,
- provider_dblink.mycpv, "nomerge")
- if pkg_key not in clean_set:
- provider_pkgs.add(vardb.get(pkg_key))
-
- if provider_pkgs:
- continue
-
- if owner_set is not None:
- lib_consumers.update(owner_set)
-
- for consumer_dblink in list(lib_consumers):
- if ("installed", myroot, consumer_dblink.mycpv,
- "nomerge") in clean_set:
- lib_consumers.remove(consumer_dblink)
- continue
-
- if lib_consumers:
- consumers[lib] = lib_consumers
- else:
- del consumers[lib]
- if not consumers:
- del consumer_map[pkg]
-
- if consumer_map:
- # TODO: Implement a package set for rebuilding consumer packages.
-
- msg = "In order to avoid breakage of link level " + \
- "dependencies, one or more packages will not be removed. " + \
- "This can be solved by rebuilding " + \
- "the packages that pulled them in."
-
- prefix = bad(" * ")
- from textwrap import wrap
- writemsg_level("".join(prefix + "%s\n" % line for \
- line in wrap(msg, 70)), level=logging.WARNING, noiselevel=-1)
-
- msg = []
- for pkg in sorted(consumer_map, key=cmp_sort_key(cmp_pkg_cpv)):
- consumers = consumer_map[pkg]
- unique_consumers = set(chain(*consumers.values()))
- unique_consumers = sorted(consumer.mycpv \
- for consumer in unique_consumers)
- msg.append("")
- msg.append(" %s pulled in by:" % (pkg.cpv,))
- for consumer in unique_consumers:
- msg.append(" %s" % (consumer,))
- msg.append("")
- writemsg_level("".join(prefix + "%s\n" % line for line in msg),
- level=logging.WARNING, noiselevel=-1)
-
- # Add lib providers to the graph as children of lib consumers,
- # and also add any dependencies pulled in by the provider.
- writemsg_level(">>> Adding lib providers to graph...\n")
-
- for pkg, consumers in consumer_map.items():
- for consumer_dblink in set(chain(*consumers.values())):
- consumer_pkg = vardb.get(("installed", myroot,
- consumer_dblink.mycpv, "nomerge"))
- if not resolver._add_pkg(pkg,
- Dependency(parent=consumer_pkg,
- priority=UnmergeDepPriority(runtime=True),
- root=pkg.root)):
- resolver.display_problems()
- return 1, [], False, 0
-
- writemsg_level("\nCalculating dependencies ")
- success = resolver._complete_graph()
- writemsg_level("\b\b... done!\n")
- resolver.display_problems()
- if not success:
- return 1, [], False, 0
- if unresolved_deps():
- return 1, [], False, 0
-
- graph = resolver._dynamic_config.digraph.copy()
- required_pkgs_total = 0
- for node in graph:
- if isinstance(node, Package):
- required_pkgs_total += 1
- cleanlist = create_cleanlist()
- if not cleanlist:
- return 0, [], False, required_pkgs_total
- clean_set = set(cleanlist)
-
# Use a topological sort to create an unmerge order such that
# each package is unmerged before it's dependencies. This is
# necessary to avoid breaking things that may need to run
diff --git a/pym/_emerge/clear_caches.py b/pym/_emerge/clear_caches.py
index 83ab77eb..cd43d915 100644
--- a/pym/_emerge/clear_caches.py
+++ b/pym/_emerge/clear_caches.py
@@ -11,6 +11,5 @@ def clear_caches(trees):
d["porttree"].dbapi._aux_cache.clear()
d["bintree"].dbapi._aux_cache.clear()
d["bintree"].dbapi._clear_cache()
- d["vartree"].dbapi.linkmap._clear_cache()
portage.dircache.clear()
gc.collect()
diff --git a/pym/_emerge/help.py b/pym/_emerge/help.py
index e2c7a80e..7b4c66dd 100644
--- a/pym/_emerge/help.py
+++ b/pym/_emerge/help.py
@@ -11,7 +11,7 @@ def shorthelp():
print(bold("Usage:"))
print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] [ "+turquoise("ebuild")+" | "+turquoise("tbz2")+" | "+turquoise("file")+" | "+turquoise("@set")+" | "+turquoise("atom")+" ] [ ... ]")
print(" "+turquoise("emerge")+" [ "+green("options")+" ] [ "+green("action")+" ] < "+turquoise("system")+" | "+turquoise("world")+" >")
- print(" "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >")
+ #print(" "+turquoise("emerge")+" < "+turquoise("--sync")+" | "+turquoise("--metadata")+" | "+turquoise("--info")+" >")
print(" "+turquoise("emerge")+" "+turquoise("--resume")+" [ "+green("--pretend")+" | "+green("--ask")+" | "+green("--skipfirst")+" ]")
print(" "+turquoise("emerge")+" "+turquoise("--help")+" [ "+green("--verbose")+" ] ")
print(bold("Options:")+" "+green("-")+"["+green("abBcCdDefgGhjkKlnNoOpPqrsStuvV")+"]")
@@ -92,7 +92,10 @@ def help(myopts, havecolor=1):
"packages will be uninstalled. Always study the list of packages " + \
"to be cleaned for any obvious mistakes. Note that packages " + \
"listed in package.provided (see portage(5)) may be removed by " + \
- "depclean, even if they are part of the world set."
+ "depclean, even if they are part of the world set. Also note that " + \
+ "depclean may break link level dependencies. Thus, it is " + \
+ "recommended to use a tool such as revdep-rebuild(1) " + \
+ "in order to detect such breakage."
for line in wrap(paragraph, desc_width):
print(desc_indent + line)
diff --git a/pym/_emerge/main.py b/pym/_emerge/main.py
index 667a811d..22e00819 100644
--- a/pym/_emerge/main.py
+++ b/pym/_emerge/main.py
@@ -201,82 +201,6 @@ def chk_updated_info_files(root, infodirs, prev_mtimes, retval):
if icount > 0:
out.einfo("Processed %d info files." % (icount,))
-def display_preserved_libs(vardbapi, myopts):
- MAX_DISPLAY = 3
-
- # Ensure the registry is consistent with existing files.
- vardbapi.plib_registry.pruneNonExisting()
-
- if vardbapi.plib_registry.hasEntries():
- if "--quiet" in myopts:
- print()
- print(colorize("WARN", "!!!") + " existing preserved libs found")
- return
- else:
- print()
- print(colorize("WARN", "!!!") + " existing preserved libs:")
-
- plibdata = vardbapi.plib_registry.getPreservedLibs()
- linkmap = vardbapi.linkmap
- consumer_map = {}
- owners = {}
- linkmap_broken = False
-
- try:
- linkmap.rebuild()
- except portage.exception.CommandNotFound as e:
- writemsg_level("!!! Command Not Found: %s\n" % (e,),
- level=logging.ERROR, noiselevel=-1)
- del e
- linkmap_broken = True
- else:
- search_for_owners = set()
- for cpv in plibdata:
- internal_plib_keys = set(linkmap._obj_key(f) \
- for f in plibdata[cpv])
- for f in plibdata[cpv]:
- if f in consumer_map:
- continue
- consumers = []
- for c in linkmap.findConsumers(f):
- # Filter out any consumers that are also preserved libs
- # belonging to the same package as the provider.
- if linkmap._obj_key(c) not in internal_plib_keys:
- consumers.append(c)
- consumers.sort()
- consumer_map[f] = consumers
- search_for_owners.update(consumers[:MAX_DISPLAY+1])
-
- owners = vardbapi._owners.getFileOwnerMap(search_for_owners)
-
- for cpv in plibdata:
- print(colorize("WARN", ">>>") + " package: %s" % cpv)
- samefile_map = {}
- for f in plibdata[cpv]:
- obj_key = linkmap._obj_key(f)
- alt_paths = samefile_map.get(obj_key)
- if alt_paths is None:
- alt_paths = set()
- samefile_map[obj_key] = alt_paths
- alt_paths.add(f)
-
- for alt_paths in samefile_map.values():
- alt_paths = sorted(alt_paths)
- for p in alt_paths:
- print(colorize("WARN", " * ") + " - %s" % (p,))
- f = alt_paths[0]
- consumers = consumer_map.get(f, [])
- for c in consumers[:MAX_DISPLAY]:
- print(colorize("WARN", " * ") + " used by %s (%s)" % \
- (c, ", ".join(x.mycpv for x in owners.get(c, []))))
- if len(consumers) == MAX_DISPLAY + 1:
- print(colorize("WARN", " * ") + " used by %s (%s)" % \
- (consumers[MAX_DISPLAY], ", ".join(x.mycpv \
- for x in owners.get(consumers[MAX_DISPLAY], []))))
- elif len(consumers) > MAX_DISPLAY:
- print(colorize("WARN", " * ") + " used by %d other files" % (len(consumers) - MAX_DISPLAY))
- print("Use " + colorize("GOOD", "emerge @preserved-rebuild") + " to rebuild packages using these libraries")
-
def post_emerge(root_config, myopts, mtimedb, retval):
"""
Misc. things to run at the end of a merge session.
@@ -349,8 +273,6 @@ def post_emerge(root_config, myopts, mtimedb, retval):
chk_updated_cfg_files(target_root, config_protect)
display_news_notification(root_config, myopts)
- if retval in (None, os.EX_OK) or (not "--pretend" in myopts):
- display_preserved_libs(vardbapi, myopts)
sys.exit(retval)
diff --git a/pym/portage/__init__.py b/pym/portage/__init__.py
index 7f6aead2..8bc8bcc0 100644
--- a/pym/portage/__init__.py
+++ b/pym/portage/__init__.py
@@ -7584,8 +7584,6 @@ def unmerge(cat, pkg, myroot, mysettings, mytrimworld=1, vartree=None,
try:
mylink.lockdb()
if mylink.exists():
- vartree.dbapi.plib_registry.load()
- vartree.dbapi.plib_registry.pruneNonExisting()
retval = mylink.unmerge(trimworld=mytrimworld, cleanup=1,
ldpath_mtimes=ldpath_mtimes)
if retval == os.EX_OK:
@@ -7593,7 +7591,6 @@ def unmerge(cat, pkg, myroot, mysettings, mytrimworld=1, vartree=None,
return retval
return os.EX_OK
finally:
- vartree.dbapi.linkmap._clear_cache()
mylink.unlockdb()
def dep_virtual(mysplit, mysettings):
diff --git a/pym/portage/dbapi/vartree.py b/pym/portage/dbapi/vartree.py
index c103cbd8..23e170ab 100644
--- a/pym/portage/dbapi/vartree.py
+++ b/pym/portage/dbapi/vartree.py
@@ -4,7 +4,7 @@
from __future__ import print_function
-__all__ = ["PreservedLibsRegistry", "LinkageMap",
+__all__ = [
"vardbapi", "vartree", "dblink"] + \
["write_contents", "tar_contents"]
@@ -63,714 +63,6 @@ if sys.hexversion >= 0x3000000:
basestring = str
long = int
-class PreservedLibsRegistry(object):
- """ This class handles the tracking of preserved library objects """
- def __init__(self, root, filename, autocommit=True):
- """
- @param root: root used to check existence of paths in pruneNonExisting
- @type root: String
- @param filename: absolute path for saving the preserved libs records
- @type filename: String
- @param autocommit: determines if the file is written after every update
- @type autocommit: Boolean
- """
- self._root = root
- self._filename = filename
- self._autocommit = autocommit
- self.load()
- self.pruneNonExisting()
-
- def load(self):
- """ Reload the registry data from file """
- self._data = None
- try:
- self._data = pickle.load(
- open(_unicode_encode(self._filename,
- encoding=_encodings['fs'], errors='strict'), 'rb'))
- except (ValueError, pickle.UnpicklingError) as e:
- writemsg_level(_("!!! Error loading '%s': %s\n") % \
- (self._filename, e), level=logging.ERROR, noiselevel=-1)
- except (EOFError, IOError) as e:
- if isinstance(e, EOFError) or e.errno == errno.ENOENT:
- pass
- elif e.errno == PermissionDenied.errno:
- raise PermissionDenied(self._filename)
- else:
- raise
- if self._data is None:
- self._data = {}
- self._data_orig = self._data.copy()
- def store(self):
- """ Store the registry data to file. No need to call this if autocommit
- was enabled.
- """
- if os.environ.get("SANDBOX_ON") == "1" or \
- self._data == self._data_orig:
- return
- try:
- f = atomic_ofstream(self._filename, 'wb')
- pickle.dump(self._data, f, protocol=2)
- f.close()
- except EnvironmentError as e:
- if e.errno != PermissionDenied.errno:
- writemsg("!!! %s %s\n" % (e, self._filename), noiselevel=-1)
- else:
- self._data_orig = self._data.copy()
-
- def register(self, cpv, slot, counter, paths):
- """ Register new objects in the registry. If there is a record with the
- same packagename (internally derived from cpv) and slot it is
- overwritten with the new data.
- @param cpv: package instance that owns the objects
- @type cpv: CPV (as String)
- @param slot: the value of SLOT of the given package instance
- @type slot: String
- @param counter: vdb counter value for the package instace
- @type counter: Integer
- @param paths: absolute paths of objects that got preserved during an update
- @type paths: List
- """
- cp = "/".join(catpkgsplit(cpv)[:2])
- cps = cp+":"+slot
- if len(paths) == 0 and cps in self._data \
- and self._data[cps][0] == cpv and int(self._data[cps][1]) == int(counter):
- del self._data[cps]
- elif len(paths) > 0:
- self._data[cps] = (cpv, counter, paths)
- if self._autocommit:
- self.store()
-
- def unregister(self, cpv, slot, counter):
- """ Remove a previous registration of preserved objects for the given package.
- @param cpv: package instance whose records should be removed
- @type cpv: CPV (as String)
- @param slot: the value of SLOT of the given package instance
- @type slot: String
- """
- self.register(cpv, slot, counter, [])
-
- def pruneNonExisting(self):
- """ Remove all records for objects that no longer exist on the filesystem. """
-
- os = _os_merge
-
- for cps in list(self._data):
- cpv, counter, paths = self._data[cps]
- paths = [f for f in paths \
- if os.path.exists(os.path.join(self._root, f.lstrip(os.sep)))]
- if len(paths) > 0:
- self._data[cps] = (cpv, counter, paths)
- else:
- del self._data[cps]
- if self._autocommit:
- self.store()
-
- def hasEntries(self):
- """ Check if this registry contains any records. """
- return len(self._data) > 0
-
- def getPreservedLibs(self):
- """ Return a mapping of packages->preserved objects.
- @returns mapping of package instances to preserved objects
- @rtype Dict cpv->list-of-paths
- """
- rValue = {}
- for cps in self._data:
- rValue[self._data[cps][0]] = self._data[cps][2]
- return rValue
-
-class LinkageMap(object):
-
- """Models dynamic linker dependencies."""
-
- _needed_aux_key = "NEEDED.ELF.2"
- _soname_map_class = slot_dict_class(
- ("consumers", "providers"), prefix="")
-
- def __init__(self, vardbapi):
- self._dbapi = vardbapi
- self._root = self._dbapi.root
- self._libs = {}
- self._obj_properties = {}
- self._obj_key_cache = {}
- self._defpath = set()
- self._path_key_cache = {}
-
- def _clear_cache(self):
- self._libs.clear()
- self._obj_properties.clear()
- self._obj_key_cache.clear()
- self._defpath.clear()
- self._path_key_cache.clear()
-
- def _path_key(self, path):
- key = self._path_key_cache.get(path)
- if key is None:
- key = self._ObjectKey(path, self._root)
- self._path_key_cache[path] = key
- return key
-
- def _obj_key(self, path):
- key = self._obj_key_cache.get(path)
- if key is None:
- key = self._ObjectKey(path, self._root)
- self._obj_key_cache[path] = key
- return key
-
- class _ObjectKey(object):
-
- """Helper class used as _obj_properties keys for objects."""
-
- __slots__ = ("__weakref__", "_key")
-
- def __init__(self, obj, root):
- """
- This takes a path to an object.
-
- @param object: path to a file
- @type object: string (example: '/usr/bin/bar')
-
- """
- self._key = self._generate_object_key(obj, root)
-
- def __hash__(self):
- return hash(self._key)
-
- def __eq__(self, other):
- return self._key == other._key
-
- def _generate_object_key(self, obj, root):
- """
- Generate object key for a given object.
-
- @param object: path to a file
- @type object: string (example: '/usr/bin/bar')
- @rtype: 2-tuple of types (long, int) if object exists. string if
- object does not exist.
- @return:
- 1. 2-tuple of object's inode and device from a stat call, if object
- exists.
- 2. realpath of object if object does not exist.
-
- """
-
- os = _os_merge
-
- try:
- _unicode_encode(obj,
- encoding=_encodings['merge'], errors='strict')
- except UnicodeEncodeError:
- # The package appears to have been merged with a
- # different value of sys.getfilesystemencoding(),
- # so fall back to utf_8 if appropriate.
- try:
- _unicode_encode(obj,
- encoding=_encodings['fs'], errors='strict')
- except UnicodeEncodeError:
- pass
- else:
- os = portage.os
-
- abs_path = os.path.join(root, obj.lstrip(os.sep))
- try:
- object_stat = os.stat(abs_path)
- except OSError:
- # Use the realpath as the key if the file does not exists on the
- # filesystem.
- return os.path.realpath(abs_path)
- # Return a tuple of the device and inode.
- return (object_stat.st_dev, object_stat.st_ino)
-
- def file_exists(self):
- """
- Determine if the file for this key exists on the filesystem.
-
- @rtype: Boolean
- @return:
- 1. True if the file exists.
- 2. False if the file does not exist or is a broken symlink.
-
- """
- return isinstance(self._key, tuple)
-
- class _LibGraphNode(_ObjectKey):
- __slots__ = ("alt_paths",)
-
- def __init__(self, obj, root):
- LinkageMap._ObjectKey.__init__(self, obj, root)
- self.alt_paths = set()
-
- def __str__(self):
- return str(sorted(self.alt_paths))
-
- def rebuild(self, exclude_pkgs=None, include_file=None):
- """
- Raises CommandNotFound if there are preserved libs
- and the scanelf binary is not available.
- """
-
- os = _os_merge
- root = self._root
- root_len = len(root) - 1
- self._clear_cache()
- self._defpath.update(getlibpaths(self._root))
- libs = self._libs
- obj_key_cache = self._obj_key_cache
- obj_properties = self._obj_properties
-
- lines = []
-
- # Data from include_file is processed first so that it
- # overrides any data from previously installed files.
- if include_file is not None:
- lines += grabfile(include_file)
-
- aux_keys = [self._needed_aux_key]
- for cpv in self._dbapi.cpv_all():
- if exclude_pkgs is not None and cpv in exclude_pkgs:
- continue
- lines += self._dbapi.aux_get(cpv, aux_keys)[0].split('\n')
- # Cache NEEDED.* files avoid doing excessive IO for every rebuild.
- self._dbapi.flush_cache()
-
- # have to call scanelf for preserved libs here as they aren't
- # registered in NEEDED.ELF.2 files
- if self._dbapi.plib_registry and self._dbapi.plib_registry.getPreservedLibs():
- args = ["/usr/bin/scanelf", "-qF", "%a;%F;%S;%r;%n"]
- for items in self._dbapi.plib_registry.getPreservedLibs().values():
- args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
- for x in items)
- try:
- proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- except EnvironmentError as e:
- if e.errno != errno.ENOENT:
- raise
- raise CommandNotFound(args[0])
- else:
- for l in proc.stdout:
- try:
- l = _unicode_decode(l,
- encoding=_encodings['content'], errors='strict')
- except UnicodeDecodeError:
- l = _unicode_decode(l,
- encoding=_encodings['content'], errors='replace')
- writemsg_level(_("\nError decoding characters " \
- "returned from scanelf: %s\n\n") % (l,),
- level=logging.ERROR, noiselevel=-1)
- l = l[3:].rstrip("\n")
- if not l:
- continue
- fields = l.split(";")
- if len(fields) < 5:
- writemsg_level(_("\nWrong number of fields " \
- "returned from scanelf: %s\n\n") % (l,),
- level=logging.ERROR, noiselevel=-1)
- continue
- fields[1] = fields[1][root_len:]
- lines.append(";".join(fields))
- proc.wait()
-
- for l in lines:
- l = l.rstrip("\n")
- if not l:
- continue
- fields = l.split(";")
- if len(fields) < 5:
- writemsg_level(_("\nWrong number of fields " \
- "in %s: %s\n\n") % (self._needed_aux_key, l),
- level=logging.ERROR, noiselevel=-1)
- continue
- arch = fields[0]
- obj = fields[1]
- soname = fields[2]
- path = set([normalize_path(x) \
- for x in filter(None, fields[3].replace(
- "${ORIGIN}", os.path.dirname(obj)).replace(
- "$ORIGIN", os.path.dirname(obj)).split(":"))])
- needed = [x for x in fields[4].split(",") if x]
-
- obj_key = self._obj_key(obj)
- indexed = True
- myprops = obj_properties.get(obj_key)
- if myprops is None:
- indexed = False
- myprops = (arch, needed, path, soname, set())
- obj_properties[obj_key] = myprops
- # All object paths are added into the obj_properties tuple.
- myprops[4].add(obj)
-
- # Don't index the same file more that once since only one
- # set of data can be correct and therefore mixing data
- # may corrupt the index (include_file overrides previously
- # installed).
- if indexed:
- continue
-
- arch_map = libs.get(arch)
- if arch_map is None:
- arch_map = {}
- libs[arch] = arch_map
- if soname:
- soname_map = arch_map.get(soname)
- if soname_map is None:
- soname_map = self._soname_map_class(
- providers=set(), consumers=set())
- arch_map[soname] = soname_map
- soname_map.providers.add(obj_key)
- for needed_soname in needed:
- soname_map = arch_map.get(needed_soname)
- if soname_map is None:
- soname_map = self._soname_map_class(
- providers=set(), consumers=set())
- arch_map[needed_soname] = soname_map
- soname_map.consumers.add(obj_key)
-
- def listBrokenBinaries(self, debug=False):
- """
- Find binaries and their needed sonames, which have no providers.
-
- @param debug: Boolean to enable debug output
- @type debug: Boolean
- @rtype: dict (example: {'/usr/bin/foo': set(['libbar.so'])})
- @return: The return value is an object -> set-of-sonames mapping, where
- object is a broken binary and the set consists of sonames needed by
- object that have no corresponding libraries to fulfill the dependency.
-
- """
-
- os = _os_merge
-
- class _LibraryCache(object):
-
- """
- Caches properties associated with paths.
-
- The purpose of this class is to prevent multiple instances of
- _ObjectKey for the same paths.
-
- """
-
- def __init__(cache_self):
- cache_self.cache = {}
-
- def get(cache_self, obj):
- """
- Caches and returns properties associated with an object.
-
- @param obj: absolute path (can be symlink)
- @type obj: string (example: '/usr/lib/libfoo.so')
- @rtype: 4-tuple with types
- (string or None, string or None, 2-tuple, Boolean)
- @return: 4-tuple with the following components:
- 1. arch as a string or None if it does not exist,
- 2. soname as a string or None if it does not exist,
- 3. obj_key as 2-tuple,
- 4. Boolean representing whether the object exists.
- (example: ('libfoo.so.1', (123L, 456L), True))
-
- """
- if obj in cache_self.cache:
- return cache_self.cache[obj]
- else:
- obj_key = self._obj_key(obj)
- # Check that the library exists on the filesystem.
- if obj_key.file_exists():
- # Get the arch and soname from LinkageMap._obj_properties if
- # it exists. Otherwise, None.
- arch, _, _, soname, _ = \
- self._obj_properties.get(obj_key, (None,)*5)
- return cache_self.cache.setdefault(obj, \
- (arch, soname, obj_key, True))
- else:
- return cache_self.cache.setdefault(obj, \
- (None, None, obj_key, False))
-
- rValue = {}
- cache = _LibraryCache()
- providers = self.listProviders()
-
- # Iterate over all obj_keys and their providers.
- for obj_key, sonames in providers.items():
- arch, _, path, _, objs = self._obj_properties[obj_key]
- path = path.union(self._defpath)
- # Iterate over each needed soname and the set of library paths that
- # fulfill the soname to determine if the dependency is broken.
- for soname, libraries in sonames.items():
- # validLibraries is used to store libraries, which satisfy soname,
- # so if no valid libraries are found, the soname is not satisfied
- # for obj_key. If unsatisfied, objects associated with obj_key
- # must be emerged.
- validLibraries = set()
- # It could be the case that the library to satisfy the soname is
- # not in the obj's runpath, but a symlink to the library is (eg
- # libnvidia-tls.so.1 in nvidia-drivers). Also, since LinkageMap
- # does not catalog symlinks, broken or missing symlinks may go
- # unnoticed. As a result of these cases, check that a file with
- # the same name as the soname exists in obj's runpath.
- # XXX If we catalog symlinks in LinkageMap, this could be improved.
- for directory in path:
- cachedArch, cachedSoname, cachedKey, cachedExists = \
- cache.get(os.path.join(directory, soname))
- # Check that this library provides the needed soname. Doing
- # this, however, will cause consumers of libraries missing
- # sonames to be unnecessarily emerged. (eg libmix.so)
- if cachedSoname == soname and cachedArch == arch:
- validLibraries.add(cachedKey)
- if debug and cachedKey not in \
- set(map(self._obj_key_cache.get, libraries)):
- # XXX This is most often due to soname symlinks not in
- # a library's directory. We could catalog symlinks in
- # LinkageMap to avoid checking for this edge case here.
- print(_("Found provider outside of findProviders:"), \
- os.path.join(directory, soname), "->", \
- self._obj_properties[cachedKey][4], libraries)
- # A valid library has been found, so there is no need to
- # continue.
- break
- if debug and cachedArch == arch and \
- cachedKey in self._obj_properties:
- print(_("Broken symlink or missing/bad soname: %(dir_soname)s -> %(cachedKey)s "
- "with soname %(cachedSoname)s but expecting %(soname)s") % \
- {"dir_soname":os.path.join(directory, soname),
- "cachedKey": self._obj_properties[cachedKey],
- "cachedSoname": cachedSoname, "soname":soname})
- # This conditional checks if there are no libraries to satisfy the
- # soname (empty set).
- if not validLibraries:
- for obj in objs:
- rValue.setdefault(obj, set()).add(soname)
- # If no valid libraries have been found by this point, then
- # there are no files named with the soname within obj's runpath,
- # but if there are libraries (from the providers mapping), it is
- # likely that soname symlinks or the actual libraries are
- # missing or broken. Thus those libraries are added to rValue
- # in order to emerge corrupt library packages.
- for lib in libraries:
- rValue.setdefault(lib, set()).add(soname)
- if debug:
- if not os.path.isfile(lib):
- print(_("Missing library:"), lib)
- else:
- print(_("Possibly missing symlink:"), \
- os.path.join(os.path.dirname(lib), soname))
- return rValue
-
- def listProviders(self):
- """
- Find the providers for all object keys in LinkageMap.
-
- @rtype: dict (example:
- {(123L, 456L): {'libbar.so': set(['/lib/libbar.so.1.5'])}})
- @return: The return value is an object key -> providers mapping, where
- providers is a mapping of soname -> set-of-library-paths returned
- from the findProviders method.
-
- """
- rValue = {}
- if not self._libs:
- self.rebuild()
- # Iterate over all object keys within LinkageMap.
- for obj_key in self._obj_properties:
- rValue.setdefault(obj_key, self.findProviders(obj_key))
- return rValue
-
- def isMasterLink(self, obj):
- """
- Determine whether an object is a master link.
-
- @param obj: absolute path to an object
- @type obj: string (example: '/usr/bin/foo')
- @rtype: Boolean
- @return:
- 1. True if obj is a master link
- 2. False if obj is not a master link
-
- """
- os = _os_merge
- basename = os.path.basename(obj)
- obj_key = self._obj_key(obj)
- if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
- soname = self._obj_properties[obj_key][3]
- return (len(basename) < len(soname))
-
- def listLibraryObjects(self):
- """
- Return a list of library objects.
-
- Known limitation: library objects lacking an soname are not included.
-
- @rtype: list of strings
- @return: list of paths to all providers
-
- """
- rValue = []
- if not self._libs:
- self.rebuild()
- for arch_map in self._libs.values():
- for soname_map in arch_map.values():
- for obj_key in soname_map.providers:
- rValue.extend(self._obj_properties[obj_key][4])
- return rValue
-
- def getSoname(self, obj):
- """
- Return the soname associated with an object.
-
- @param obj: absolute path to an object
- @type obj: string (example: '/usr/bin/bar')
- @rtype: string
- @return: soname as a string
-
- """
- if not self._libs:
- self.rebuild()
- if isinstance(obj, self._ObjectKey):
- obj_key = obj
- if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
- return self._obj_properties[obj_key][3]
- if obj not in self._obj_key_cache:
- raise KeyError("%s not in object list" % obj)
- return self._obj_properties[self._obj_key_cache[obj]][3]
-
- def findProviders(self, obj):
- """
- Find providers for an object or object key.
-
- This method may be called with a key from _obj_properties.
-
- In some cases, not all valid libraries are returned. This may occur when
- an soname symlink referencing a library is in an object's runpath while
- the actual library is not. We should consider cataloging symlinks within
- LinkageMap as this would avoid those cases and would be a better model of
- library dependencies (since the dynamic linker actually searches for
- files named with the soname in the runpaths).
-
- @param obj: absolute path to an object or a key from _obj_properties
- @type obj: string (example: '/usr/bin/bar') or _ObjectKey
- @rtype: dict (example: {'libbar.so': set(['/lib/libbar.so.1.5'])})
- @return: The return value is a soname -> set-of-library-paths, where
- set-of-library-paths satisfy soname.
-
- """
-
- os = _os_merge
-
- rValue = {}
-
- if not self._libs:
- self.rebuild()
-
- # Determine the obj_key from the arguments.
- if isinstance(obj, self._ObjectKey):
- obj_key = obj
- if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
- else:
- obj_key = self._obj_key(obj)
- if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
-
- arch, needed, path, _, _ = self._obj_properties[obj_key]
- path_keys = set(self._path_key(x) for x in path.union(self._defpath))
- for soname in needed:
- rValue[soname] = set()
- if arch not in self._libs or soname not in self._libs[arch]:
- continue
- # For each potential provider of the soname, add it to rValue if it
- # resides in the obj's runpath.
- for provider_key in self._libs[arch][soname].providers:
- providers = self._obj_properties[provider_key][4]
- for provider in providers:
- if self._path_key(os.path.dirname(provider)) in path_keys:
- rValue[soname].add(provider)
- return rValue
-
- def findConsumers(self, obj):
- """
- Find consumers of an object or object key.
-
- This method may be called with a key from _obj_properties. If this
- method is going to be called with an object key, to avoid not catching
- shadowed libraries, do not pass new _ObjectKey instances to this method.
- Instead pass the obj as a string.
-
- In some cases, not all consumers are returned. This may occur when
- an soname symlink referencing a library is in an object's runpath while
- the actual library is not. For example, this problem is noticeable for
- binutils since it's libraries are added to the path via symlinks that
- are gemerated in the /usr/$CHOST/lib/ directory by binutils-config.
- Failure to recognize consumers of these symlinks makes preserve-libs
- fail to preserve binutils libs that are needed by these unrecognized
- consumers.
-
- Note that library consumption via dlopen (common for kde plugins) is
- currently undetected. However, it is possible to use the
- corresponding libtool archive (*.la) files to detect such consumers
- (revdep-rebuild is able to detect them).
-
- @param obj: absolute path to an object or a key from _obj_properties
- @type obj: string (example: '/usr/bin/bar') or _ObjectKey
- @rtype: set of strings (example: set(['/bin/foo', '/usr/bin/bar']))
- @return: The return value is a soname -> set-of-library-paths, where
- set-of-library-paths satisfy soname.
-
- """
-
- os = _os_merge
-
- rValue = set()
-
- if not self._libs:
- self.rebuild()
-
- # Determine the obj_key and the set of objects matching the arguments.
- if isinstance(obj, self._ObjectKey):
- obj_key = obj
- if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
- objs = self._obj_properties[obj_key][4]
- else:
- objs = set([obj])
- obj_key = self._obj_key(obj)
- if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
-
- # If there is another version of this lib with the
- # same soname and the master link points to that
- # other version, this lib will be shadowed and won't
- # have any consumers.
- if not isinstance(obj, self._ObjectKey):
- soname = self._obj_properties[obj_key][3]
- master_link = os.path.join(self._root,
- os.path.dirname(obj).lstrip(os.path.sep), soname)
- try:
- master_st = os.stat(master_link)
- obj_st = os.stat(obj)
- except OSError:
- pass
- else:
- if (obj_st.st_dev, obj_st.st_ino) != \
- (master_st.st_dev, master_st.st_ino):
- return set()
-
- # Determine the directory(ies) from the set of objects.
- objs_dir_keys = set(self._path_key(os.path.dirname(x)) for x in objs)
- defpath_keys = set(self._path_key(x) for x in self._defpath)
-
- arch, _, _, soname, _ = self._obj_properties[obj_key]
- if arch in self._libs and soname in self._libs[arch]:
- # For each potential consumer, add it to rValue if an object from the
- # arguments resides in the consumer's runpath.
- for consumer_key in self._libs[arch][soname].consumers:
- _, _, path, _, consumer_objs = \
- self._obj_properties[consumer_key]
- path_keys = defpath_keys.union(self._path_key(x) for x in path)
- if objs_dir_keys.intersection(path_keys):
- rValue.update(consumer_objs)
- return rValue
-
class vardbapi(dbapi):
_excluded_dirs = ["CVS", "lost+found"]
@@ -829,14 +121,6 @@ class vardbapi(dbapi):
self._counter_path = os.path.join(root,
CACHE_PATH, "counter")
- try:
- self.plib_registry = PreservedLibsRegistry(self.root,
- os.path.join(self.root, PRIVATE_PATH, "preserved_libs_registry"))
- except PermissionDenied:
- # apparently this user isn't allowed to access PRIVATE_PATH
- self.plib_registry = None
-
- self.linkmap = LinkageMap(self)
self._owners = self._owners_db(self)
def getpath(self, mykey, filename=None):
@@ -1889,7 +1173,6 @@ class dblink(object):
self.contentscache = None
self._contents_inodes = None
self._contents_basenames = None
- self._linkmap_broken = False
self._md5_merge_map = {}
def lockdb(self):
@@ -2160,11 +1443,6 @@ class dblink(object):
self._unmerge_pkgfiles(pkgfiles, others_in_slot)
self._clear_contents_cache()
- # Remove the registration of preserved libs for this pkg instance
- plib_registry = self.vartree.dbapi.plib_registry
- plib_registry.unregister(self.mycpv, self.settings["SLOT"],
- self.vartree.dbapi.cpv_counter(self.mycpv))
-
if myebuildpath:
ebuild_phase = "postrm"
if scheduler is None:
@@ -2180,33 +1458,6 @@ class dblink(object):
failures += 1
writemsg(_("!!! FAILED postrm: %s\n") % retval, noiselevel=-1)
- # Skip this if another package in the same slot has just been
- # merged on top of this package, since the other package has
- # already called LinkageMap.rebuild() and passed it's NEEDED file
- # in as an argument.
- if not others_in_slot:
- self._linkmap_rebuild(exclude_pkgs=(self.mycpv,))
-
- # remove preserved libraries that don't have any consumers left
- cpv_lib_map = self._find_unused_preserved_libs()
- if cpv_lib_map:
- self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.items():
- if not self.vartree.dbapi.cpv_exists(cpv):
- for dblnk in others_in_slot:
- if dblnk.mycpv == cpv:
- # This one just got merged so it doesn't
- # register with cpv_exists() yet.
- self.vartree.dbapi.removeFromContents(
- dblnk, removed)
- break
- continue
- self.vartree.dbapi.removeFromContents(cpv, removed)
- else:
- # Prune any preserved libs that may have
- # been unmerged with this package.
- self.vartree.dbapi.plib_registry.pruneNonExisting()
-
finally:
if builddir_lock:
try:
@@ -2759,330 +2010,6 @@ class dblink(object):
return False
- def _linkmap_rebuild(self, **kwargs):
- if self._linkmap_broken:
- return
- try:
- self.vartree.dbapi.linkmap.rebuild(**kwargs)
- except CommandNotFound as e:
- self._linkmap_broken = True
- self._display_merge(_("!!! Disabling preserve-libs " \
- "due to error: Command Not Found: %s\n") % (e,),
- level=logging.ERROR, noiselevel=-1)
-
- def _find_libs_to_preserve(self):
- """
- Get set of relative paths for libraries to be preserved. The file
- paths are selected from self._installed_instance.getcontents().
- """
- if self._linkmap_broken or not \
- (self._installed_instance is not None and \
- "preserve-libs" in self.settings.features):
- return None
-
- os = _os_merge
- linkmap = self.vartree.dbapi.linkmap
- installed_instance = self._installed_instance
- old_contents = installed_instance.getcontents()
- root = self.myroot
- root_len = len(root) - 1
- lib_graph = digraph()
- path_node_map = {}
-
- def path_to_node(path):
- node = path_node_map.get(path)
- if node is None:
- node = LinkageMap._LibGraphNode(path, root)
- alt_path_node = lib_graph.get(node)
- if alt_path_node is not None:
- node = alt_path_node
- node.alt_paths.add(path)
- path_node_map[path] = node
- return node
-
- consumer_map = {}
- provider_nodes = set()
- # Create provider nodes and add them to the graph.
- for f_abs in old_contents:
-
- if os is _os_merge:
- try:
- _unicode_encode(f_abs,
- encoding=_encodings['merge'], errors='strict')
- except UnicodeEncodeError:
- # The package appears to have been merged with a
- # different value of sys.getfilesystemencoding(),
- # so fall back to utf_8 if appropriate.
- try:
- _unicode_encode(f_abs,
- encoding=_encodings['fs'], errors='strict')
- except UnicodeEncodeError:
- pass
- else:
- os = portage.os
-
- f = f_abs[root_len:]
- if self.isowner(f, root):
- continue
- try:
- consumers = linkmap.findConsumers(f)
- except KeyError:
- continue
- if not consumers:
- continue
- provider_node = path_to_node(f)
- lib_graph.add(provider_node, None)
- provider_nodes.add(provider_node)
- consumer_map[provider_node] = consumers
-
- # Create consumer nodes and add them to the graph.
- # Note that consumers can also be providers.
- for provider_node, consumers in consumer_map.items():
- for c in consumers:
- if self.isowner(c, root):
- continue
- consumer_node = path_to_node(c)
- if installed_instance.isowner(c, root) and \
- consumer_node not in provider_nodes:
- # This is not a provider, so it will be uninstalled.
- continue
- lib_graph.add(provider_node, consumer_node)
-
- # Locate nodes which should be preserved. They consist of all
- # providers that are reachable from consumers that are not
- # providers themselves.
- preserve_nodes = set()
- for consumer_node in lib_graph.root_nodes():
- if consumer_node in provider_nodes:
- continue
- # Preserve all providers that are reachable from this consumer.
- node_stack = lib_graph.child_nodes(consumer_node)
- while node_stack:
- provider_node = node_stack.pop()
- if provider_node in preserve_nodes:
- continue
- preserve_nodes.add(provider_node)
- node_stack.extend(lib_graph.child_nodes(provider_node))
-
- preserve_paths = set()
- for preserve_node in preserve_nodes:
- # Make sure that at least one of the paths is not a symlink.
- # This prevents symlinks from being erroneously preserved by
- # themselves when the old instance installed symlinks that
- # the new instance does not install.
- have_lib = False
- for f in preserve_node.alt_paths:
- f_abs = os.path.join(root, f.lstrip(os.sep))
- try:
- if stat.S_ISREG(os.lstat(f_abs).st_mode):
- have_lib = True
- break
- except OSError:
- continue
-
- if have_lib:
- preserve_paths.update(preserve_node.alt_paths)
-
- return preserve_paths
-
- def _add_preserve_libs_to_contents(self, preserve_paths):
- """
- Preserve libs returned from _find_libs_to_preserve().
- """
-
- if not preserve_paths:
- return
-
- os = _os_merge
- showMessage = self._display_merge
- root = self.myroot
-
- # Copy contents entries from the old package to the new one.
- new_contents = self.getcontents().copy()
- old_contents = self._installed_instance.getcontents()
- for f in sorted(preserve_paths):
- f = _unicode_decode(f,
- encoding=_encodings['content'], errors='strict')
- f_abs = os.path.join(root, f.lstrip(os.sep))
- contents_entry = old_contents.get(f_abs)
- if contents_entry is None:
- # This will probably never happen, but it might if one of the
- # paths returned from findConsumers() refers to one of the libs
- # that should be preserved yet the path is not listed in the
- # contents. Such a path might belong to some other package, so
- # it shouldn't be preserved here.
- showMessage(_("!!! File '%s' will not be preserved "
- "due to missing contents entry\n") % (f_abs,),
- level=logging.ERROR, noiselevel=-1)
- preserve_paths.remove(f)
- continue
- new_contents[f_abs] = contents_entry
- obj_type = contents_entry[0]
- showMessage(_(">>> needed %s %s\n") % (obj_type, f_abs),
- noiselevel=-1)
- # Add parent directories to contents if necessary.
- parent_dir = os.path.dirname(f_abs)
- while len(parent_dir) > len(root):
- new_contents[parent_dir] = ["dir"]
- prev = parent_dir
- parent_dir = os.path.dirname(parent_dir)
- if prev == parent_dir:
- break
- outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
- write_contents(new_contents, root, outfile)
- outfile.close()
- self._clear_contents_cache()
-
- def _find_unused_preserved_libs(self):
- """
- Find preserved libraries that don't have any consumers left.
- """
-
- if self._linkmap_broken:
- return {}
-
- # Since preserved libraries can be consumers of other preserved
- # libraries, use a graph to track consumer relationships.
- plib_dict = self.vartree.dbapi.plib_registry.getPreservedLibs()
- lib_graph = digraph()
- preserved_nodes = set()
- preserved_paths = set()
- path_cpv_map = {}
- path_node_map = {}
- root = self.myroot
-
- def path_to_node(path):
- node = path_node_map.get(path)
- if node is None:
- node = LinkageMap._LibGraphNode(path, root)
- alt_path_node = lib_graph.get(node)
- if alt_path_node is not None:
- node = alt_path_node
- node.alt_paths.add(path)
- path_node_map[path] = node
- return node
-
- linkmap = self.vartree.dbapi.linkmap
- for cpv, plibs in plib_dict.items():
- for f in plibs:
- path_cpv_map[f] = cpv
- preserved_node = path_to_node(f)
- if not preserved_node.file_exists():
- continue
- lib_graph.add(preserved_node, None)
- preserved_paths.add(f)
- preserved_nodes.add(preserved_node)
- for c in self.vartree.dbapi.linkmap.findConsumers(f):
- consumer_node = path_to_node(c)
- if not consumer_node.file_exists():
- continue
- # Note that consumers may also be providers.
- lib_graph.add(preserved_node, consumer_node)
-
- # Eliminate consumers having providers with the same soname as an
- # installed library that is not preserved. This eliminates
- # libraries that are erroneously preserved due to a move from one
- # directory to another.
- provider_cache = {}
- for preserved_node in preserved_nodes:
- soname = linkmap.getSoname(preserved_node)
- for consumer_node in lib_graph.parent_nodes(preserved_node):
- if consumer_node in preserved_nodes:
- continue
- providers = provider_cache.get(consumer_node)
- if providers is None:
- providers = linkmap.findProviders(consumer_node)
- provider_cache[consumer_node] = providers
- providers = providers.get(soname)
- if providers is None:
- continue
- for provider in providers:
- if provider in preserved_paths:
- continue
- provider_node = path_to_node(provider)
- if not provider_node.file_exists():
- continue
- if provider_node in preserved_nodes:
- continue
- # An alternative provider seems to be
- # installed, so drop this edge.
- lib_graph.remove_edge(preserved_node, consumer_node)
- break
-
- cpv_lib_map = {}
- while not lib_graph.empty():
- root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
- if not root_nodes:
- break
- lib_graph.difference_update(root_nodes)
- unlink_list = set()
- for node in root_nodes:
- unlink_list.update(node.alt_paths)
- unlink_list = sorted(unlink_list)
- for obj in unlink_list:
- cpv = path_cpv_map.get(obj)
- if cpv is None:
- # This means that a symlink is in the preserved libs
- # registry, but the actual lib it points to is not.
- self._display_merge(_("!!! symlink to lib is preserved, "
- "but not the lib itself:\n!!! '%s'\n") % (obj,),
- level=logging.ERROR, noiselevel=-1)
- continue
- removed = cpv_lib_map.get(cpv)
- if removed is None:
- removed = set()
- cpv_lib_map[cpv] = removed
- removed.add(obj)
-
- return cpv_lib_map
-
- def _remove_preserved_libs(self, cpv_lib_map):
- """
- Remove files returned from _find_unused_preserved_libs().
- """
-
- os = _os_merge
-
- files_to_remove = set()
- for files in cpv_lib_map.values():
- files_to_remove.update(files)
- files_to_remove = sorted(files_to_remove)
- showMessage = self._display_merge
- root = self.myroot
-
- parent_dirs = set()
- for obj in files_to_remove:
- obj = os.path.join(root, obj.lstrip(os.sep))
- parent_dirs.add(os.path.dirname(obj))
- if os.path.islink(obj):
- obj_type = _("sym")
- else:
- obj_type = _("obj")
- try:
- os.unlink(obj)
- except OSError as e:
- if e.errno != errno.ENOENT:
- raise
- del e
- else:
- showMessage(_("<<< !needed %s %s\n") % (obj_type, obj),
- noiselevel=-1)
-
- # Remove empty parent directories if possible.
- while parent_dirs:
- x = parent_dirs.pop()
- while True:
- try:
- os.rmdir(x)
- except OSError:
- break
- prev = x
- x = os.path.dirname(x)
- if x == prev:
- break
-
- self.vartree.dbapi.plib_registry.pruneNonExisting()
-
def _collision_protect(self, srcroot, destroot, mypkglist, mycontents):
os = _os_merge
@@ -3091,18 +2018,6 @@ class dblink(object):
portage.util.shlex_split(
self.settings.get("COLLISION_IGNORE", ""))])
- # For collisions with preserved libraries, the current package
- # will assume ownership and the libraries will be unregistered.
- plib_dict = self.vartree.dbapi.plib_registry.getPreservedLibs()
- plib_cpv_map = {}
- plib_paths = set()
- for cpv, paths in plib_dict.items():
- plib_paths.update(paths)
- for f in paths:
- plib_cpv_map[f] = cpv
- plib_inodes = self._lstat_inode_map(plib_paths)
- plib_collisions = {}
-
showMessage = self._display_merge
scheduler = self._scheduler
stopmerge = False
@@ -3155,20 +2070,6 @@ class dblink(object):
if f[0] != "/":
f="/"+f
- plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
- if plibs:
- for path in plibs:
- cpv = plib_cpv_map[path]
- paths = plib_collisions.get(cpv)
- if paths is None:
- paths = set()
- plib_collisions[cpv] = paths
- paths.add(path)
- # The current package will assume ownership and the
- # libraries will be unregistered, so exclude this
- # path from the normal collisions.
- continue
-
isowned = False
full_path = os.path.join(destroot, f.lstrip(os.path.sep))
for ver in mypkglist:
@@ -3189,7 +2090,7 @@ class dblink(object):
break
if stopmerge:
collisions.append(f)
- return collisions, plib_collisions
+ return collisions
def _lstat_inode_map(self, path_iter):
"""
@@ -3583,7 +2484,7 @@ class dblink(object):
blockers = self._blockers()
if blockers is None:
blockers = []
- collisions, plib_collisions = \
+ collisions = \
self._collision_protect(srcroot, destroot,
others_in_slot + blockers, myfilelist + mylinklist)
@@ -3819,15 +2720,6 @@ class dblink(object):
dblnk._clear_contents_cache()
self._clear_contents_cache()
- linkmap = self.vartree.dbapi.linkmap
- self._linkmap_rebuild(include_file=os.path.join(inforoot,
- linkmap._needed_aux_key))
-
- # Preserve old libs if they are still in use
- preserve_paths = self._find_libs_to_preserve()
- if preserve_paths:
- self._add_preserve_libs_to_contents(preserve_paths)
-
# If portage is reinstalling itself, remove the old
# version now since we want to use the temporary
# PORTAGE_BIN_PATH that will be removed when we return.
@@ -3856,7 +2748,6 @@ class dblink(object):
showMessage(_(">>> Safely unmerging already-installed instance...\n"))
emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
others_in_slot.remove(dblnk) # dblnk will unmerge itself now
- dblnk._linkmap_broken = self._linkmap_broken
unmerge_rval = dblnk.unmerge(trimworld=0,
ldpath_mtimes=prev_mtimes, others_in_slot=others_in_slot)
@@ -3880,11 +2771,6 @@ class dblink(object):
self.delete()
_movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
- # keep track of the libs we preserved
- if preserve_paths:
- self.vartree.dbapi.plib_registry.register(self.mycpv,
- slot, counter, sorted(preserve_paths))
-
# Check for file collisions with blocking packages
# and remove any colliding files from their CONTENTS
# since they now belong to this package.
@@ -3895,24 +2781,6 @@ class dblink(object):
self.vartree.dbapi.removeFromContents(blocker, iter(contents),
relative_paths=False)
- # Unregister any preserved libs that this package has overwritten
- # and update the contents of the packages that owned them.
- plib_registry = self.vartree.dbapi.plib_registry
- plib_dict = plib_registry.getPreservedLibs()
- for cpv, paths in plib_collisions.items():
- if cpv not in plib_dict:
- continue
- if cpv == self.mycpv:
- continue
- try:
- slot, counter = self.vartree.dbapi.aux_get(
- cpv, ["SLOT", "COUNTER"])
- except KeyError:
- continue
- remaining = [f for f in plib_dict[cpv] if f not in paths]
- plib_registry.register(cpv, slot, counter, remaining)
- self.vartree.dbapi.removeFromContents(cpv, paths)
-
self.vartree.dbapi._add(self)
contents = self.getcontents()
@@ -3950,17 +2818,6 @@ class dblink(object):
contents=contents, env=self.settings.environ(),
writemsg_level=self._display_merge)
- # For gcc upgrades, preserved libs have to be removed after the
- # the library path has been updated.
- self._linkmap_rebuild()
- cpv_lib_map = self._find_unused_preserved_libs()
- if cpv_lib_map:
- self._remove_preserved_libs(cpv_lib_map)
- for cpv, removed in cpv_lib_map.items():
- if not self.vartree.dbapi.cpv_exists(cpv):
- continue
- self.vartree.dbapi.removeFromContents(cpv, removed)
-
return os.EX_OK
def _new_backup_path(self, p):
@@ -4342,8 +3199,6 @@ class dblink(object):
retval = -1
self.lockdb()
try:
- self.vartree.dbapi.plib_registry.load()
- self.vartree.dbapi.plib_registry.pruneNonExisting()
retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes)
@@ -4364,7 +3219,6 @@ class dblink(object):
self._scheduler.dblinkEbuildPhase(
self, mydbapi, myebuild, "clean")
finally:
- self.vartree.dbapi.linkmap._clear_cache()
self.unlockdb()
return retval
diff --git a/pym/portage/sets/libs.py b/pym/portage/sets/libs.py
deleted file mode 100644
index b8aa3c41..00000000
--- a/pym/portage/sets/libs.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2007 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-# $Id$
-
-from __future__ import print_function
-
-from portage.localization import _
-from portage.sets.base import PackageSet
-from portage.sets import get_boolean
-from portage.versions import catpkgsplit
-import portage
-
-class LibraryConsumerSet(PackageSet):
- _operations = ["merge", "unmerge"]
-
- def __init__(self, vardbapi, debug=False):
- super(LibraryConsumerSet, self).__init__()
- self.dbapi = vardbapi
- self.debug = debug
-
- def mapPathsToAtoms(self, paths):
- rValue = set()
- for link, p in self.dbapi._owners.iter_owners(paths):
- cat, pn = catpkgsplit(link.mycpv)[:2]
- slot = self.dbapi.aux_get(link.mycpv, ["SLOT"])[0]
- rValue.add("%s/%s:%s" % (cat, pn, slot))
- return rValue
-
-class LibraryFileConsumerSet(LibraryConsumerSet):
-
- """
- Note: This does not detect libtool archive (*.la) files that consume the
- specified files (revdep-rebuild is able to detect them).
- """
-
- description = "Package set which contains all packages " + \
- "that consume the specified library file(s)."
-
- def __init__(self, vardbapi, files, **kargs):
- super(LibraryFileConsumerSet, self).__init__(vardbapi, **kargs)
- self.files = files
-
- def load(self):
- consumers = set()
- for lib in self.files:
- consumers.update(self.dbapi.linkmap.findConsumers(lib))
-
- if not consumers:
- return
- self._setAtoms(self.mapPathsToAtoms(consumers))
-
- def singleBuilder(cls, options, settings, trees):
- files = tuple(portage.util.shlex_split(options.get("files", "")))
- if not files:
- raise SetConfigError(_("no files given"))
- debug = get_boolean(options, "debug", False)
- return LibraryFileConsumerSet(trees["vartree"].dbapi,
- files, debug=debug)
- singleBuilder = classmethod(singleBuilder)
-
-class PreservedLibraryConsumerSet(LibraryConsumerSet):
- def load(self):
- reg = self.dbapi.plib_registry
- consumers = set()
- if reg:
- plib_dict = reg.getPreservedLibs()
- for libs in plib_dict.values():
- for lib in libs:
- if self.debug:
- print(lib)
- for x in sorted(self.dbapi.linkmap.findConsumers(lib)):
- print(" ", x)
- print("-"*40)
- consumers.update(self.dbapi.linkmap.findConsumers(lib))
- # Don't rebuild packages just because they contain preserved
- # libs that happen to be consumers of other preserved libs.
- for libs in plib_dict.values():
- consumers.difference_update(libs)
- else:
- return
- if not consumers:
- return
- self._setAtoms(self.mapPathsToAtoms(consumers))
-
- def singleBuilder(cls, options, settings, trees):
- debug = get_boolean(options, "debug", False)
- return PreservedLibraryConsumerSet(trees["vartree"].dbapi,
- debug=debug)
- singleBuilder = classmethod(singleBuilder)