diff options
Diffstat (limited to 'portage_with_autodep/pym/portage')
388 files changed, 11768 insertions, 10348 deletions
diff --git a/portage_with_autodep/pym/portage/__init__.py b/portage_with_autodep/pym/portage/__init__.py index 2a2eb99..431dc26 100644 --- a/portage_with_autodep/pym/portage/__init__.py +++ b/portage_with_autodep/pym/portage/__init__.py @@ -2,7 +2,7 @@ # Copyright 1998-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 -VERSION="HEAD" +VERSION="2.2.0_alpha108" # =========================================================================== # START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT @@ -148,20 +148,35 @@ if sys.hexversion >= 0x3000000: basestring = str long = int -# Assume utf_8 fs encoding everywhere except in merge code, where the -# user's locale is respected. +# We use utf_8 encoding everywhere. Previously, we used +# sys.getfilesystemencoding() for the 'merge' encoding, but that had +# various problems: +# +# 1) If the locale is ever changed then it can cause orphan files due +# to changed character set translation. +# +# 2) Ebuilds typically install files with utf_8 encoded file names, +# and then portage would be forced to rename those files to match +# sys.getfilesystemencoding(), possibly breaking things. +# +# 3) Automatic translation between encodings can lead to nonsensical +# file names when the source encoding is unknown by portage. +# +# 4) It's inconvenient for ebuilds to convert the encodings of file +# names to match the current locale, and upstreams typically encode +# file names with utf_8 encoding. +# +# So, instead of relying on sys.getfilesystemencoding(), we avoid the above +# problems by using a constant utf_8 'merge' encoding for all locales, as +# discussed in bug #382199 and bug #381509. _encodings = { 'content' : 'utf_8', 'fs' : 'utf_8', - 'merge' : sys.getfilesystemencoding(), + 'merge' : 'utf_8', 'repo.content' : 'utf_8', 'stdio' : 'utf_8', } -# This can happen if python is built with USE=build (stage 1). -if _encodings['merge'] is None: - _encodings['merge'] = 'ascii' - if sys.hexversion >= 0x3000000: def _unicode_encode(s, encoding=_encodings['content'], errors='backslashreplace'): if isinstance(s, str): @@ -215,7 +230,7 @@ class _unicode_func_wrapper(object): rval = self._func(*wrapped_args, **wrapped_kwargs) # Don't use isinstance() since we don't want to convert subclasses - # of tuple such as posix.stat_result in python-3.2. + # of tuple such as posix.stat_result in Python >=3.2. if rval.__class__ in (list, tuple): decoded_rval = [] for x in rval: @@ -320,6 +335,16 @@ _python_interpreter = os.path.realpath(sys.executable) _bin_path = PORTAGE_BIN_PATH _pym_path = PORTAGE_PYM_PATH +if sys.hexversion >= 0x3030000: + # Workaround for http://bugs.python.org/issue14007 + def _test_xml_etree_ElementTree_TreeBuilder_type(): + import subprocess + p = subprocess.Popen([_python_interpreter, "-c", + "import sys, xml.etree.ElementTree; sys.exit(not isinstance(xml.etree.ElementTree.TreeBuilder, type))"]) + if p.wait() != 0: + sys.modules["_elementtree"] = None + _test_xml_etree_ElementTree_TreeBuilder_type() + def _shell_quote(s): """ Quote a string in double-quotes and use backslashes to @@ -380,9 +405,12 @@ def getcwd(): return "/" getcwd() -def abssymlink(symlink): +def abssymlink(symlink, target=None): "This reads symlinks, resolving the relative symlinks, and returning the absolute." - mylink=os.readlink(symlink) + if target is not None: + mylink = target + else: + mylink = os.readlink(symlink) if mylink[0] != '/': mydir=os.path.dirname(symlink) mylink=mydir+"/"+mylink @@ -417,29 +445,25 @@ def eapi_is_supported(eapi): return False return eapi <= portage.const.EAPI -# Generally, it's best not to assume that cache entries for unsupported EAPIs -# can be validated. However, the current package manager specification does not -# guarantee that the EAPI can be parsed without sourcing the ebuild, so -# it's too costly to discard existing cache entries for unsupported EAPIs. -# Therefore, by default, assume that cache entries for unsupported EAPIs can be -# validated. If FEATURES=parse-eapi-* is enabled, this assumption is discarded -# since the EAPI can be determined without the incurring the cost of sourcing -# the ebuild. -_validate_cache_for_unsupported_eapis = True - -_parse_eapi_ebuild_head_re = re.compile(r'^EAPI=[\'"]?([^\'"#]*)') -_parse_eapi_ebuild_head_max_lines = 30 +# This pattern is specified by PMS section 7.3.1. +_pms_eapi_re = re.compile(r"^[ \t]*EAPI=(['\"]?)([A-Za-z0-9+_.-]*)\1[ \t]*([ \t]#.*)?$") +_comment_or_blank_line = re.compile(r"^\s*(#.*)?$") def _parse_eapi_ebuild_head(f): - count = 0 + eapi = None + eapi_lineno = None + lineno = 0 for line in f: - m = _parse_eapi_ebuild_head_re.match(line) - if m is not None: - return m.group(1).strip() - count += 1 - if count >= _parse_eapi_ebuild_head_max_lines: + lineno += 1 + m = _comment_or_blank_line.match(line) + if m is None: + eapi_lineno = lineno + m = _pms_eapi_re.match(line) + if m is not None: + eapi = m.group(2) break - return '0' + + return (eapi, eapi_lineno) def _movefile(src, dest, **kwargs): """Calls movefile and raises a PortageException if an error occurs.""" @@ -461,10 +485,16 @@ def portageexit(): if data.secpass > 1 and os.environ.get("SANDBOX_ON") != "1": close_portdbapi_caches() -def create_trees(config_root=None, target_root=None, trees=None): - if trees is None: - trees = {} - else: +class _trees_dict(dict): + __slots__ = ('_running_eroot', '_target_eroot',) + def __init__(self, *pargs, **kargs): + dict.__init__(self, *pargs, **kargs) + self._running_eroot = None + self._target_eroot = None + +def create_trees(config_root=None, target_root=None, trees=None, env=None, + eprefix=None): + if trees is not None: # clean up any existing portdbapi instances for myroot in trees: portdb = trees[myroot]["porttree"].dbapi @@ -472,12 +502,25 @@ def create_trees(config_root=None, target_root=None, trees=None): portdbapi.portdbapi_instances.remove(portdb) del trees[myroot]["porttree"], myroot, portdb + if trees is None: + trees = _trees_dict() + elif not isinstance(trees, _trees_dict): + # caller passed a normal dict or something, + # but we need a _trees_dict instance + trees = _trees_dict(trees) + + if env is None: + env = os.environ + settings = config(config_root=config_root, target_root=target_root, - config_incrementals=portage.const.INCREMENTALS) + env=env, eprefix=eprefix) settings.lock() - myroots = [(settings["ROOT"], settings)] - if settings["ROOT"] != "/": + trees._target_eroot = settings['EROOT'] + myroots = [(settings['EROOT'], settings)] + if settings["ROOT"] == "/": + trees._running_eroot = trees._target_eroot + else: # When ROOT != "/" we only want overrides from the calling # environment to apply to the config that's associated @@ -485,24 +528,27 @@ def create_trees(config_root=None, target_root=None, trees=None): clean_env = {} for k in ('PATH', 'PORTAGE_GRPNAME', 'PORTAGE_USERNAME', 'SSH_AGENT_PID', 'SSH_AUTH_SOCK', 'TERM', - 'ftp_proxy', 'http_proxy', 'no_proxy'): + 'ftp_proxy', 'http_proxy', 'no_proxy', + '__PORTAGE_TEST_HARDLINK_LOCKS'): v = settings.get(k) if v is not None: clean_env[k] = v - settings = config(config_root=None, target_root="/", env=clean_env) + settings = config(config_root=None, target_root="/", + env=clean_env, eprefix=eprefix) settings.lock() - myroots.append((settings["ROOT"], settings)) + trees._running_eroot = settings['EROOT'] + myroots.append((settings['EROOT'], settings)) for myroot, mysettings in myroots: trees[myroot] = portage.util.LazyItemsDict(trees.get(myroot, {})) trees[myroot].addLazySingleton("virtuals", mysettings.getvirtuals) trees[myroot].addLazySingleton( - "vartree", vartree, myroot, categories=mysettings.categories, + "vartree", vartree, categories=mysettings.categories, settings=mysettings) trees[myroot].addLazySingleton("porttree", - portagetree, myroot, settings=mysettings) + portagetree, settings=mysettings) trees[myroot].addLazySingleton("bintree", - binarytree, myroot, mysettings["PKGDIR"], settings=mysettings) + binarytree, pkgdir=mysettings["PKGDIR"], settings=mysettings) return trees if VERSION == 'HEAD': diff --git a/portage_with_autodep/pym/portage/__init__.pyo b/portage_with_autodep/pym/portage/__init__.pyo Binary files differnew file mode 100644 index 0000000..9fc449e --- /dev/null +++ b/portage_with_autodep/pym/portage/__init__.pyo diff --git a/portage_with_autodep/pym/portage/_emirrordist/Config.py b/portage_with_autodep/pym/portage/_emirrordist/Config.py new file mode 100644 index 0000000..db4bfeb --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/Config.py @@ -0,0 +1,132 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import copy +import io +import logging +import shelve +import sys +import time + +import portage +from portage import os +from portage.util import grabdict, grablines +from portage.util._ShelveUnicodeWrapper import ShelveUnicodeWrapper + +class Config(object): + def __init__(self, options, portdb, event_loop): + self.options = options + self.portdb = portdb + self.event_loop = event_loop + self.added_byte_count = 0 + self.added_file_count = 0 + self.scheduled_deletion_count = 0 + self.delete_count = 0 + self.file_owners = {} + self.file_failures = {} + self.start_time = time.time() + self._open_files = [] + + self.log_success = self._open_log('success', options.success_log, 'a') + self.log_failure = self._open_log('failure', options.failure_log, 'a') + + self.distfiles = None + if options.distfiles is not None: + self.distfiles = options.distfiles + + self.mirrors = copy.copy(portdb.settings.thirdpartymirrors()) + + if options.mirror_overrides is not None: + self.mirrors.update(grabdict(options.mirror_overrides)) + + if options.mirror_skip is not None: + for x in options.mirror_skip.split(","): + self.mirrors[x] = [] + + self.whitelist = None + if options.whitelist_from is not None: + self.whitelist = set() + for filename in options.whitelist_from: + for line in grablines(filename): + line = line.strip() + if line and not line.startswith("#"): + self.whitelist.add(line) + + self.restrict_mirror_exemptions = None + if options.restrict_mirror_exemptions is not None: + self.restrict_mirror_exemptions = frozenset( + options.restrict_mirror_exemptions.split(",")) + + self.recycle_db = None + if options.recycle_db is not None: + self.recycle_db = self._open_shelve( + options.recycle_db, 'recycle') + + self.distfiles_db = None + if options.distfiles_db is not None: + self.distfiles_db = self._open_shelve( + options.distfiles_db, 'distfiles') + + self.deletion_db = None + if options.deletion_db is not None: + self.deletion_db = self._open_shelve( + options.deletion_db, 'deletion') + + def _open_log(self, log_desc, log_path, mode): + + if log_path is None or self.options.dry_run: + log_func = logging.info + line_format = "%s: %%s" % log_desc + add_newline = False + if log_path is not None: + logging.warn(("dry-run: %s log " + "redirected to logging.info") % log_desc) + else: + self._open_files.append(io.open(log_path, mode=mode, + encoding='utf_8')) + line_format = "%s\n" + log_func = self._open_files[-1].write + + return self._LogFormatter(line_format, log_func) + + class _LogFormatter(object): + + __slots__ = ('_line_format', '_log_func') + + def __init__(self, line_format, log_func): + self._line_format = line_format + self._log_func = log_func + + def __call__(self, msg): + self._log_func(self._line_format % (msg,)) + + def _open_shelve(self, db_file, db_desc): + if self.options.dry_run: + open_flag = "r" + else: + open_flag = "c" + + if self.options.dry_run and not os.path.exists(db_file): + db = {} + else: + db = shelve.open(db_file, flag=open_flag) + if sys.hexversion < 0x3000000: + db = ShelveUnicodeWrapper(db) + + if self.options.dry_run: + logging.warn("dry-run: %s db opened in readonly mode" % db_desc) + if not isinstance(db, dict): + volatile_db = dict((k, db[k]) for k in db) + db.close() + db = volatile_db + else: + self._open_files.append(db) + + return db + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + while self._open_files: + self._open_files.pop().close() diff --git a/portage_with_autodep/pym/portage/_emirrordist/DeletionIterator.py b/portage_with_autodep/pym/portage/_emirrordist/DeletionIterator.py new file mode 100644 index 0000000..dff52c0 --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/DeletionIterator.py @@ -0,0 +1,83 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import logging +import stat + +from portage import os +from .DeletionTask import DeletionTask + +class DeletionIterator(object): + + def __init__(self, config): + self._config = config + + def __iter__(self): + distdir = self._config.options.distfiles + file_owners = self._config.file_owners + whitelist = self._config.whitelist + distfiles_local = self._config.options.distfiles_local + deletion_db = self._config.deletion_db + deletion_delay = self._config.options.deletion_delay + start_time = self._config.start_time + distfiles_set = set(os.listdir(self._config.options.distfiles)) + for filename in distfiles_set: + try: + st = os.stat(os.path.join(distdir, filename)) + except OSError as e: + logging.error("stat failed on '%s' in distfiles: %s\n" % + (filename, e)) + continue + if not stat.S_ISREG(st.st_mode): + continue + elif filename in file_owners: + if deletion_db is not None: + try: + del deletion_db[filename] + except KeyError: + pass + elif whitelist is not None and filename in whitelist: + if deletion_db is not None: + try: + del deletion_db[filename] + except KeyError: + pass + elif distfiles_local is not None and \ + os.path.exists(os.path.join(distfiles_local, filename)): + if deletion_db is not None: + try: + del deletion_db[filename] + except KeyError: + pass + else: + self._config.scheduled_deletion_count += 1 + + if deletion_db is None or deletion_delay is None: + + yield DeletionTask(background=True, + distfile=filename, + config=self._config) + + else: + deletion_entry = deletion_db.get(filename) + + if deletion_entry is None: + logging.debug("add '%s' to deletion db" % filename) + deletion_db[filename] = start_time + + elif deletion_entry + deletion_delay <= start_time: + + yield DeletionTask(background=True, + distfile=filename, + config=self._config) + + if deletion_db is not None: + for filename in list(deletion_db): + if filename not in distfiles_set: + try: + del deletion_db[filename] + except KeyError: + pass + else: + logging.debug("drop '%s' from deletion db" % + filename) diff --git a/portage_with_autodep/pym/portage/_emirrordist/DeletionTask.py b/portage_with_autodep/pym/portage/_emirrordist/DeletionTask.py new file mode 100644 index 0000000..7d10957 --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/DeletionTask.py @@ -0,0 +1,129 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import logging + +from portage import os +from portage.util._async.FileCopier import FileCopier +from _emerge.CompositeTask import CompositeTask + +class DeletionTask(CompositeTask): + + __slots__ = ('distfile', 'config') + + def _start(self): + + distfile_path = os.path.join( + self.config.options.distfiles, self.distfile) + + if self.config.options.recycle_dir is not None: + distfile_path = os.path.join(self.config.options.distfiles, self.distfile) + recycle_path = os.path.join( + self.config.options.recycle_dir, self.distfile) + if self.config.options.dry_run: + logging.info(("dry-run: move '%s' from " + "distfiles to recycle") % self.distfile) + else: + logging.debug(("move '%s' from " + "distfiles to recycle") % self.distfile) + try: + os.rename(distfile_path, recycle_path) + except OSError as e: + if e.errno != errno.EXDEV: + logging.error(("rename %s from distfiles to " + "recycle failed: %s") % (self.distfile, e)) + else: + self.returncode = os.EX_OK + self._async_wait() + return + + self._start_task( + FileCopier(src_path=distfile_path, + dest_path=recycle_path, + background=False), + self._recycle_copier_exit) + return + + success = True + + if self.config.options.dry_run: + logging.info(("dry-run: delete '%s' from " + "distfiles") % self.distfile) + else: + logging.debug(("delete '%s' from " + "distfiles") % self.distfile) + try: + os.unlink(distfile_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + logging.error("%s unlink failed in distfiles: %s" % + (self.distfile, e)) + success = False + + if success: + self._success() + self.returncode = os.EX_OK + else: + self.returncode = 1 + + self._async_wait() + + def _recycle_copier_exit(self, copier): + + self._assert_current(copier) + if self._was_cancelled(): + self.wait() + return + + success = True + if copier.returncode == os.EX_OK: + + try: + os.unlink(copier.src_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + logging.error("%s unlink failed in distfiles: %s" % + (self.distfile, e)) + success = False + + else: + logging.error(("%s copy from distfiles " + "to recycle failed: %s") % (self.distfile, e)) + success = False + + if success: + self._success() + self.returncode = os.EX_OK + else: + self.returncode = 1 + + self._current_task = None + self.wait() + + def _success(self): + + cpv = "unknown" + if self.config.distfiles_db is not None: + cpv = self.config.distfiles_db.get(self.distfile, cpv) + + self.config.delete_count += 1 + self.config.log_success("%s\t%s\tremoved" % (cpv, self.distfile)) + + if self.config.distfiles_db is not None: + try: + del self.config.distfiles_db[self.distfile] + except KeyError: + pass + else: + logging.debug(("drop '%s' from " + "distfiles db") % self.distfile) + + if self.config.deletion_db is not None: + try: + del self.config.deletion_db[self.distfile] + except KeyError: + pass + else: + logging.debug(("drop '%s' from " + "deletion db") % self.distfile) diff --git a/portage_with_autodep/pym/portage/_emirrordist/FetchIterator.py b/portage_with_autodep/pym/portage/_emirrordist/FetchIterator.py new file mode 100644 index 0000000..16a0b04 --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/FetchIterator.py @@ -0,0 +1,147 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os +from portage.checksum import (_apply_hash_filter, + _filter_unaccelarated_hashes, _hash_filter) +from portage.dep import use_reduce +from portage.exception import PortageException +from .FetchTask import FetchTask + +class FetchIterator(object): + + def __init__(self, config): + self._config = config + self._log_failure = config.log_failure + + def _iter_every_cp(self): + # List categories individually, in order to start yielding quicker, + # and in order to reduce latency in case of a signal interrupt. + cp_all = self._config.portdb.cp_all + for category in sorted(self._config.portdb.categories): + for cp in cp_all(categories=(category,)): + yield cp + + def __iter__(self): + + portdb = self._config.portdb + get_repo_for_location = portdb.repositories.get_repo_for_location + file_owners = self._config.file_owners + file_failures = self._config.file_failures + restrict_mirror_exemptions = self._config.restrict_mirror_exemptions + + hash_filter = _hash_filter( + portdb.settings.get("PORTAGE_CHECKSUM_FILTER", "")) + if hash_filter.transparent: + hash_filter = None + + for cp in self._iter_every_cp(): + + for tree in portdb.porttrees: + + # Reset state so the Manifest is pulled once + # for this cp / tree combination. + digests = None + repo_config = get_repo_for_location(tree) + + for cpv in portdb.cp_list(cp, mytree=tree): + + try: + restrict, = portdb.aux_get(cpv, ("RESTRICT",), + mytree=tree) + except (KeyError, PortageException) as e: + self._log_failure("%s\t\taux_get exception %s" % + (cpv, e)) + continue + + # Here we use matchnone=True to ignore conditional parts + # of RESTRICT since they don't apply unconditionally. + # Assume such conditionals only apply on the client side. + try: + restrict = frozenset(use_reduce(restrict, + flat=True, matchnone=True)) + except PortageException as e: + self._log_failure("%s\t\tuse_reduce exception %s" % + (cpv, e)) + continue + + if "fetch" in restrict: + continue + + try: + uri_map = portdb.getFetchMap(cpv) + except PortageException as e: + self._log_failure("%s\t\tgetFetchMap exception %s" % + (cpv, e)) + continue + + if not uri_map: + continue + + if "mirror" in restrict: + skip = False + if restrict_mirror_exemptions is not None: + new_uri_map = {} + for filename, uri_tuple in uri_map.items(): + for uri in uri_tuple: + if uri[:9] == "mirror://": + i = uri.find("/", 9) + if i != -1 and uri[9:i].strip("/") in \ + restrict_mirror_exemptions: + new_uri_map[filename] = uri_tuple + break + if new_uri_map: + uri_map = new_uri_map + else: + skip = True + else: + skip = True + + if skip: + continue + + # Parse Manifest for this cp if we haven't yet. + if digests is None: + try: + digests = repo_config.load_manifest( + os.path.join(repo_config.location, cp) + ).getTypeDigests("DIST") + except (EnvironmentError, PortageException) as e: + for filename in uri_map: + self._log_failure( + "%s\t%s\tManifest exception %s" % + (cpv, filename, e)) + file_failures[filename] = cpv + continue + + if not digests: + for filename in uri_map: + self._log_failure("%s\t%s\tdigest entry missing" % + (cpv, filename)) + file_failures[filename] = cpv + continue + + for filename, uri_tuple in uri_map.items(): + file_digests = digests.get(filename) + if file_digests is None: + self._log_failure("%s\t%s\tdigest entry missing" % + (cpv, filename)) + file_failures[filename] = cpv + continue + if filename in file_owners: + continue + file_owners[filename] = cpv + + file_digests = \ + _filter_unaccelarated_hashes(file_digests) + if hash_filter is not None: + file_digests = _apply_hash_filter( + file_digests, hash_filter) + + yield FetchTask(cpv=cpv, + background=True, + digests=file_digests, + distfile=filename, + restrict=restrict, + uri_tuple=uri_tuple, + config=self._config) diff --git a/portage_with_autodep/pym/portage/_emirrordist/FetchTask.py b/portage_with_autodep/pym/portage/_emirrordist/FetchTask.py new file mode 100644 index 0000000..66c41c1 --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/FetchTask.py @@ -0,0 +1,629 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import collections +import errno +import logging +import random +import stat +import subprocess +import sys + +import portage +from portage import _encodings, _unicode_encode +from portage import os +from portage.util._async.FileCopier import FileCopier +from portage.util._async.FileDigester import FileDigester +from portage.util._async.PipeLogger import PipeLogger +from portage.util._async.PopenProcess import PopenProcess +from _emerge.CompositeTask import CompositeTask + +default_hash_name = portage.const.MANIFEST2_REQUIRED_HASH + +# Use --no-check-certificate since Manifest digests should provide +# enough security, and certificates can be self-signed or whatnot. +default_fetchcommand = "wget -c -v -t 1 --passive-ftp --no-check-certificate --timeout=60 -O \"${DISTDIR}/${FILE}\" \"${URI}\"" + +class FetchTask(CompositeTask): + + __slots__ = ('distfile', 'digests', 'config', 'cpv', + 'restrict', 'uri_tuple', '_current_mirror', + '_current_stat', '_fetch_tmp_dir_info', '_fetch_tmp_file', + '_fs_mirror_stack', '_mirror_stack', + '_previously_added', + '_primaryuri_stack', '_log_path', '_tried_uris') + + def _start(self): + + if self.config.options.fetch_log_dir is not None and \ + not self.config.options.dry_run: + self._log_path = os.path.join( + self.config.options.fetch_log_dir, + self.distfile + '.log') + + self._previously_added = True + if self.config.distfiles_db is not None and \ + self.distfile not in self.config.distfiles_db: + self._previously_added = False + self.config.distfiles_db[self.distfile] = self.cpv + + if not self._have_needed_digests(): + msg = "incomplete digests: %s" % " ".join(self.digests) + self.scheduler.output(msg, background=self.background, + log_path=self._log_path) + self.config.log_failure("%s\t%s\t%s" % + (self.cpv, self.distfile, msg)) + self.config.file_failures[self.distfile] = self.cpv + self.returncode = os.EX_OK + self._async_wait() + return + + distfile_path = os.path.join( + self.config.options.distfiles, self.distfile) + + st = None + size_ok = False + try: + st = os.stat(distfile_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + msg = "%s stat failed in %s: %s" % \ + (self.distfile, "distfiles", e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + else: + size_ok = st.st_size == self.digests["size"] + + if not size_ok: + if self.config.options.dry_run: + if st is not None: + logging.info(("dry-run: delete '%s' with " + "wrong size from distfiles") % (self.distfile,)) + else: + # Do the unlink in order to ensure that the path is clear, + # even if stat raised ENOENT, since a broken symlink can + # trigger ENOENT. + if self._unlink_file(distfile_path, "distfiles"): + if st is not None: + logging.debug(("delete '%s' with " + "wrong size from distfiles") % (self.distfile,)) + else: + self.config.log_failure("%s\t%s\t%s" % + (self.cpv, self.distfile, "unlink failed in distfiles")) + self.returncode = os.EX_OK + self._async_wait() + return + + if size_ok: + if self.config.options.verify_existing_digest: + self._start_task( + FileDigester(file_path=distfile_path, + hash_names=(self._select_hash(),), + background=self.background, + logfile=self._log_path), self._distfiles_digester_exit) + return + + self._success() + self.returncode = os.EX_OK + self._async_wait() + return + + self._start_fetch() + + def _success(self): + if not self._previously_added: + size = self.digests["size"] + self.config.added_byte_count += size + self.config.added_file_count += 1 + self.config.log_success("%s\t%s\tadded %i bytes" % + (self.cpv, self.distfile, size)) + + if self._log_path is not None: + if not self.config.options.dry_run: + try: + os.unlink(self._log_path) + except OSError: + pass + + if self.config.options.recycle_dir is not None: + + recycle_file = os.path.join( + self.config.options.recycle_dir, self.distfile) + + if self.config.options.dry_run: + if os.path.exists(recycle_file): + logging.info("dry-run: delete '%s' from recycle" % + (self.distfile,)) + else: + try: + os.unlink(recycle_file) + except OSError: + pass + else: + logging.debug("delete '%s' from recycle" % + (self.distfile,)) + + def _distfiles_digester_exit(self, digester): + + self._assert_current(digester) + if self._was_cancelled(): + self.wait() + return + + if self._default_exit(digester) != os.EX_OK: + # IOError reading file in our main distfiles directory? This + # is a bad situation which normally does not occur, so + # skip this file and report it, in order to draw attention + # from the administrator. + msg = "%s distfiles digester failed unexpectedly" % \ + (self.distfile,) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + self.config.log_failure("%s\t%s\t%s" % + (self.cpv, self.distfile, msg)) + self.config.file_failures[self.distfile] = self.cpv + self.wait() + return + + wrong_digest = self._find_bad_digest(digester.digests) + if wrong_digest is None: + self._success() + self.returncode = os.EX_OK + self.wait() + return + + self._start_fetch() + + _mirror_info = collections.namedtuple('_mirror_info', + 'name location') + + def _start_fetch(self): + + self._previously_added = False + self._fs_mirror_stack = [] + if self.config.options.distfiles_local is not None: + self._fs_mirror_stack.append(self._mirror_info( + 'distfiles-local', self.config.options.distfiles_local)) + if self.config.options.recycle_dir is not None: + self._fs_mirror_stack.append(self._mirror_info( + 'recycle', self.config.options.recycle_dir)) + + self._primaryuri_stack = [] + self._mirror_stack = [] + for uri in reversed(self.uri_tuple): + if uri.startswith('mirror://'): + self._mirror_stack.append( + self._mirror_iterator(uri, self.config.mirrors)) + else: + self._primaryuri_stack.append(uri) + + self._tried_uris = set() + self._try_next_mirror() + + @staticmethod + def _mirror_iterator(uri, mirrors_dict): + + slash_index = uri.find("/", 9) + if slash_index != -1: + mirror_name = uri[9:slash_index].strip("/") + mirrors = mirrors_dict.get(mirror_name) + if not mirrors: + return + mirrors = list(mirrors) + while mirrors: + mirror = mirrors.pop(random.randint(0, len(mirrors) - 1)) + yield mirror.rstrip("/") + "/" + uri[slash_index+1:] + + def _try_next_mirror(self): + if self._fs_mirror_stack: + self._fetch_fs(self._fs_mirror_stack.pop()) + return + else: + uri = self._next_uri() + if uri is not None: + self._tried_uris.add(uri) + self._fetch_uri(uri) + return + + if self._tried_uris: + msg = "all uris failed" + else: + msg = "no fetchable uris" + + self.config.log_failure("%s\t%s\t%s" % + (self.cpv, self.distfile, msg)) + self.config.file_failures[self.distfile] = self.cpv + self.returncode = os.EX_OK + self.wait() + + def _next_uri(self): + remaining_tries = self.config.options.tries - len(self._tried_uris) + if remaining_tries > 0: + + if remaining_tries <= self.config.options.tries / 2: + while self._primaryuri_stack: + uri = self._primaryuri_stack.pop() + if uri not in self._tried_uris: + return uri + + while self._mirror_stack: + uri = next(self._mirror_stack[-1], None) + if uri is None: + self._mirror_stack.pop() + else: + if uri not in self._tried_uris: + return uri + + while self._primaryuri_stack: + uri = self._primaryuri_stack.pop() + if uri not in self._tried_uris: + return uri + + return None + + def _fetch_fs(self, mirror_info): + file_path = os.path.join(mirror_info.location, self.distfile) + + st = None + size_ok = False + try: + st = os.stat(file_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + msg = "%s stat failed in %s: %s" % \ + (self.distfile, mirror_info.name, e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + else: + size_ok = st.st_size == self.digests["size"] + self._current_stat = st + + if size_ok: + self._current_mirror = mirror_info + self._start_task( + FileDigester(file_path=file_path, + hash_names=(self._select_hash(),), + background=self.background, + logfile=self._log_path), + self._fs_mirror_digester_exit) + else: + self._try_next_mirror() + + def _fs_mirror_digester_exit(self, digester): + + self._assert_current(digester) + if self._was_cancelled(): + self.wait() + return + + current_mirror = self._current_mirror + if digester.returncode != os.EX_OK: + msg = "%s %s digester failed unexpectedly" % \ + (self.distfile, current_mirror.name) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + else: + bad_digest = self._find_bad_digest(digester.digests) + if bad_digest is not None: + msg = "%s %s has bad %s digest: expected %s, got %s" % \ + (self.distfile, current_mirror.name, bad_digest, + self.digests[bad_digest], digester.digests[bad_digest]) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + elif self.config.options.dry_run: + # Report success without actually touching any files + if self._same_device(current_mirror.location, + self.config.options.distfiles): + logging.info(("dry-run: hardlink '%s' from %s " + "to distfiles") % (self.distfile, current_mirror.name)) + else: + logging.info("dry-run: copy '%s' from %s to distfiles" % + (self.distfile, current_mirror.name)) + self._success() + self.returncode = os.EX_OK + self.wait() + return + else: + src = os.path.join(current_mirror.location, self.distfile) + dest = os.path.join(self.config.options.distfiles, self.distfile) + if self._hardlink_atomic(src, dest, + "%s to %s" % (current_mirror.name, "distfiles")): + logging.debug("hardlink '%s' from %s to distfiles" % + (self.distfile, current_mirror.name)) + self._success() + self.returncode = os.EX_OK + self.wait() + return + else: + self._start_task( + FileCopier(src_path=src, dest_path=dest, + background=(self.background and + self._log_path is not None), + logfile=self._log_path), + self._fs_mirror_copier_exit) + return + + self._try_next_mirror() + + def _fs_mirror_copier_exit(self, copier): + + self._assert_current(copier) + if self._was_cancelled(): + self.wait() + return + + current_mirror = self._current_mirror + if copier.returncode != os.EX_OK: + msg = "%s %s copy failed unexpectedly" % \ + (self.distfile, current_mirror.name) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + else: + + logging.debug("copy '%s' from %s to distfiles" % + (self.distfile, current_mirror.name)) + + # Apply the timestamp from the source file, but + # just rely on umask for permissions. + try: + if sys.hexversion >= 0x3030000: + os.utime(copier.dest_path, + ns=(self._current_stat.st_mtime_ns, + self._current_stat.st_mtime_ns)) + else: + os.utime(copier.dest_path, + (self._current_stat[stat.ST_MTIME], + self._current_stat[stat.ST_MTIME])) + except OSError as e: + msg = "%s %s utime failed unexpectedly: %s" % \ + (self.distfile, current_mirror.name, e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + + self._success() + self.returncode = os.EX_OK + self.wait() + return + + self._try_next_mirror() + + def _fetch_uri(self, uri): + + if self.config.options.dry_run: + # Simply report success. + logging.info("dry-run: fetch '%s' from '%s'" % + (self.distfile, uri)) + self._success() + self.returncode = os.EX_OK + self.wait() + return + + if self.config.options.temp_dir: + self._fetch_tmp_dir_info = 'temp-dir' + distdir = self.config.options.temp_dir + else: + self._fetch_tmp_dir_info = 'distfiles' + distdir = self.config.options.distfiles + + tmp_basename = self.distfile + '._emirrordist_fetch_.%s' % os.getpid() + + variables = { + "DISTDIR": distdir, + "URI": uri, + "FILE": tmp_basename + } + + self._fetch_tmp_file = os.path.join(distdir, tmp_basename) + + try: + os.unlink(self._fetch_tmp_file) + except OSError: + pass + + args = portage.util.shlex_split(default_fetchcommand) + args = [portage.util.varexpand(x, mydict=variables) + for x in args] + + if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \ + not os.path.isabs(args[0]): + # Python 3.1 _execvp throws TypeError for non-absolute executable + # path passed as bytes (see http://bugs.python.org/issue8513). + fullname = portage.process.find_binary(args[0]) + if fullname is None: + raise portage.exception.CommandNotFound(args[0]) + args[0] = fullname + + args = [_unicode_encode(x, + encoding=_encodings['fs'], errors='strict') for x in args] + + null_fd = os.open(os.devnull, os.O_RDONLY) + fetcher = PopenProcess(background=self.background, + proc=subprocess.Popen(args, stdin=null_fd, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT), + scheduler=self.scheduler) + os.close(null_fd) + + fetcher.pipe_reader = PipeLogger(background=self.background, + input_fd=fetcher.proc.stdout, log_file_path=self._log_path, + scheduler=self.scheduler) + + self._start_task(fetcher, self._fetcher_exit) + + def _fetcher_exit(self, fetcher): + + self._assert_current(fetcher) + if self._was_cancelled(): + self.wait() + return + + if os.path.exists(self._fetch_tmp_file): + self._start_task( + FileDigester(file_path=self._fetch_tmp_file, + hash_names=(self._select_hash(),), + background=self.background, + logfile=self._log_path), + self._fetch_digester_exit) + else: + self._try_next_mirror() + + def _fetch_digester_exit(self, digester): + + self._assert_current(digester) + if self._was_cancelled(): + self.wait() + return + + if digester.returncode != os.EX_OK: + msg = "%s %s digester failed unexpectedly" % \ + (self.distfile, self._fetch_tmp_dir_info) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + else: + bad_digest = self._find_bad_digest(digester.digests) + if bad_digest is not None: + msg = "%s has bad %s digest: expected %s, got %s" % \ + (self.distfile, bad_digest, + self.digests[bad_digest], digester.digests[bad_digest]) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + try: + os.unlink(self._fetch_tmp_file) + except OSError: + pass + else: + dest = os.path.join(self.config.options.distfiles, self.distfile) + try: + os.rename(self._fetch_tmp_file, dest) + except OSError: + self._start_task( + FileCopier(src_path=self._fetch_tmp_file, + dest_path=dest, + background=(self.background and + self._log_path is not None), + logfile=self._log_path), + self._fetch_copier_exit) + return + else: + self._success() + self.returncode = os.EX_OK + self.wait() + return + + self._try_next_mirror() + + def _fetch_copier_exit(self, copier): + + self._assert_current(copier) + + try: + os.unlink(self._fetch_tmp_file) + except OSError: + pass + + if self._was_cancelled(): + self.wait() + return + + if copier.returncode == os.EX_OK: + self._success() + self.returncode = os.EX_OK + self.wait() + else: + # out of space? + msg = "%s %s copy failed unexpectedly" % \ + (self.distfile, self._fetch_tmp_dir_info) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + self.config.log_failure("%s\t%s\t%s" % + (self.cpv, self.distfile, msg)) + self.config.file_failures[self.distfile] = self.cpv + self.returncode = 1 + self.wait() + + def _unlink_file(self, file_path, dir_info): + try: + os.unlink(file_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + msg = "unlink '%s' failed in %s: %s" % \ + (self.distfile, dir_info, e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + return False + return True + + def _have_needed_digests(self): + return "size" in self.digests and \ + self._select_hash() is not None + + def _select_hash(self): + if default_hash_name in self.digests: + return default_hash_name + else: + for hash_name in self.digests: + if hash_name != "size" and \ + hash_name in portage.checksum.hashfunc_map: + return hash_name + + return None + + def _find_bad_digest(self, digests): + for hash_name, hash_value in digests.items(): + if self.digests[hash_name] != hash_value: + return hash_name + return None + + @staticmethod + def _same_device(path1, path2): + try: + st1 = os.stat(path1) + st2 = os.stat(path2) + except OSError: + return False + else: + return st1.st_dev == st2.st_dev + + def _hardlink_atomic(self, src, dest, dir_info): + + head, tail = os.path.split(dest) + hardlink_tmp = os.path.join(head, ".%s._mirrordist_hardlink_.%s" % \ + (tail, os.getpid())) + + try: + try: + os.link(src, hardlink_tmp) + except OSError as e: + if e.errno != errno.EXDEV: + msg = "hardlink %s from %s failed: %s" % \ + (self.distfile, dir_info, e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + return False + + try: + os.rename(hardlink_tmp, dest) + except OSError as e: + msg = "hardlink rename '%s' from %s failed: %s" % \ + (self.distfile, dir_info, e) + self.scheduler.output(msg + '\n', background=True, + log_path=self._log_path) + logging.error(msg) + return False + finally: + try: + os.unlink(hardlink_tmp) + except OSError: + pass + + return True diff --git a/portage_with_autodep/pym/portage/_emirrordist/MirrorDistTask.py b/portage_with_autodep/pym/portage/_emirrordist/MirrorDistTask.py new file mode 100644 index 0000000..b6f875d --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/MirrorDistTask.py @@ -0,0 +1,218 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import logging +import sys +import time + +try: + import threading +except ImportError: + import dummy_threading as threading + +import portage +from portage import os +from portage.util._async.TaskScheduler import TaskScheduler +from _emerge.CompositeTask import CompositeTask +from .FetchIterator import FetchIterator +from .DeletionIterator import DeletionIterator + +if sys.hexversion >= 0x3000000: + long = int + +class MirrorDistTask(CompositeTask): + + __slots__ = ('_config', '_terminated', '_term_check_id') + + def __init__(self, config): + CompositeTask.__init__(self, scheduler=config.event_loop) + self._config = config + self._terminated = threading.Event() + + def _start(self): + self._term_check_id = self.scheduler.idle_add(self._termination_check) + fetch = TaskScheduler(iter(FetchIterator(self._config)), + max_jobs=self._config.options.jobs, + max_load=self._config.options.load_average, + event_loop=self._config.event_loop) + self._start_task(fetch, self._fetch_exit) + + def _fetch_exit(self, fetch): + + self._assert_current(fetch) + if self._was_cancelled(): + self.wait() + return + + if self._config.options.delete: + deletion = TaskScheduler(iter(DeletionIterator(self._config)), + max_jobs=self._config.options.jobs, + max_load=self._config.options.load_average, + event_loop=self._config.event_loop) + self._start_task(deletion, self._deletion_exit) + return + + self._post_deletion() + + def _deletion_exit(self, deletion): + + self._assert_current(deletion) + if self._was_cancelled(): + self.wait() + return + + self._post_deletion() + + def _post_deletion(self): + + if self._config.options.recycle_db is not None: + self._update_recycle_db() + + if self._config.options.scheduled_deletion_log is not None: + self._scheduled_deletion_log() + + self._summary() + + self.returncode = os.EX_OK + self._current_task = None + self.wait() + + def _update_recycle_db(self): + + start_time = self._config.start_time + recycle_dir = self._config.options.recycle_dir + recycle_db = self._config.recycle_db + r_deletion_delay = self._config.options.recycle_deletion_delay + + # Use a dict optimize access. + recycle_db_cache = dict(recycle_db.items()) + + for filename in os.listdir(recycle_dir): + + recycle_file = os.path.join(recycle_dir, filename) + + try: + st = os.stat(recycle_file) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + logging.error(("stat failed for '%s' in " + "recycle: %s") % (filename, e)) + continue + + value = recycle_db_cache.pop(filename, None) + if value is None: + logging.debug(("add '%s' to " + "recycle db") % filename) + recycle_db[filename] = (st.st_size, start_time) + else: + r_size, r_time = value + if long(r_size) != st.st_size: + recycle_db[filename] = (st.st_size, start_time) + elif r_time + r_deletion_delay < start_time: + if self._config.options.dry_run: + logging.info(("dry-run: delete '%s' from " + "recycle") % filename) + logging.info(("drop '%s' from " + "recycle db") % filename) + else: + try: + os.unlink(recycle_file) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + logging.error(("delete '%s' from " + "recycle failed: %s") % (filename, e)) + else: + logging.debug(("delete '%s' from " + "recycle") % filename) + try: + del recycle_db[filename] + except KeyError: + pass + else: + logging.debug(("drop '%s' from " + "recycle db") % filename) + + # Existing files were popped from recycle_db_cache, + # so any remaining entries are for files that no + # longer exist. + for filename in recycle_db_cache: + try: + del recycle_db[filename] + except KeyError: + pass + else: + logging.debug(("drop non-existent '%s' from " + "recycle db") % filename) + + def _scheduled_deletion_log(self): + + start_time = self._config.start_time + dry_run = self._config.options.dry_run + deletion_delay = self._config.options.deletion_delay + distfiles_db = self._config.distfiles_db + + date_map = {} + for filename, timestamp in self._config.deletion_db.items(): + date = timestamp + deletion_delay + if date < start_time: + date = start_time + date = time.strftime("%Y-%m-%d", time.gmtime(date)) + date_files = date_map.get(date) + if date_files is None: + date_files = [] + date_map[date] = date_files + date_files.append(filename) + + if dry_run: + logging.warn(("dry-run: scheduled-deletions log " + "will be summarized via logging.info")) + + lines = [] + for date in sorted(date_map): + date_files = date_map[date] + if dry_run: + logging.info(("dry-run: scheduled deletions for %s: %s files") % + (date, len(date_files))) + lines.append("%s\n" % date) + for filename in date_files: + cpv = "unknown" + if distfiles_db is not None: + cpv = distfiles_db.get(filename, cpv) + lines.append("\t%s\t%s\n" % (filename, cpv)) + + if not dry_run: + portage.util.write_atomic( + self._config.options.scheduled_deletion_log, + "".join(lines)) + + def _summary(self): + elapsed_time = time.time() - self._config.start_time + fail_count = len(self._config.file_failures) + delete_count = self._config.delete_count + scheduled_deletion_count = self._config.scheduled_deletion_count - delete_count + added_file_count = self._config.added_file_count + added_byte_count = self._config.added_byte_count + + logging.info("finished in %i seconds" % elapsed_time) + logging.info("failed to fetch %i files" % fail_count) + logging.info("deleted %i files" % delete_count) + logging.info("deletion of %i files scheduled" % + scheduled_deletion_count) + logging.info("added %i files" % added_file_count) + logging.info("added %i bytes total" % added_byte_count) + + def terminate(self): + self._terminated.set() + + def _termination_check(self): + if self._terminated.is_set(): + self.cancel() + self.wait() + return True + + def _wait(self): + CompositeTask._wait(self) + if self._term_check_id is not None: + self.scheduler.source_remove(self._term_check_id) + self._term_check_id = None diff --git a/portage_with_autodep/pym/portage/tests/dbapi/__init__.py b/portage_with_autodep/pym/portage/_emirrordist/__init__.py index 532918b..6cde932 100644 --- a/portage_with_autodep/pym/portage/tests/dbapi/__init__.py +++ b/portage_with_autodep/pym/portage/_emirrordist/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2011 Gentoo Foundation +# Copyright 2013 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/_emirrordist/main.py b/portage_with_autodep/pym/portage/_emirrordist/main.py new file mode 100644 index 0000000..f28aad7 --- /dev/null +++ b/portage_with_autodep/pym/portage/_emirrordist/main.py @@ -0,0 +1,455 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import logging +import sys + +import portage +from portage import os +from portage.util import normalize_path, writemsg_level +from portage.util._argparse import ArgumentParser +from portage.util._async.run_main_scheduler import run_main_scheduler +from portage.util._async.SchedulerInterface import SchedulerInterface +from portage.util._eventloop.global_event_loop import global_event_loop +from .Config import Config +from .MirrorDistTask import MirrorDistTask + +if sys.hexversion >= 0x3000000: + long = int + +seconds_per_day = 24 * 60 * 60 + +common_options = ( + { + "longopt" : "--dry-run", + "help" : "perform a trial run with no changes made (usually combined " + "with --verbose)", + "action" : "store_true" + }, + { + "longopt" : "--verbose", + "shortopt" : "-v", + "help" : "display extra information on stderr " + "(multiple occurences increase verbosity)", + "action" : "count", + "default" : 0, + }, + { + "longopt" : "--ignore-default-opts", + "help" : "do not use the EMIRRORDIST_DEFAULT_OPTS environment variable", + "action" : "store_true" + }, + { + "longopt" : "--distfiles", + "help" : "distfiles directory to use (required)", + "metavar" : "DIR" + }, + { + "longopt" : "--jobs", + "shortopt" : "-j", + "help" : "number of concurrent jobs to run", + "type" : int + }, + { + "longopt" : "--load-average", + "shortopt" : "-l", + "help" : "load average limit for spawning of new concurrent jobs", + "metavar" : "LOAD", + "type" : float + }, + { + "longopt" : "--tries", + "help" : "maximum number of tries per file, 0 means unlimited (default is 10)", + "default" : 10, + "type" : int + }, + { + "longopt" : "--repo", + "help" : "name of repo to operate on" + }, + { + "longopt" : "--config-root", + "help" : "location of portage config files", + "metavar" : "DIR" + }, + { + "longopt" : "--portdir", + "help" : "override the PORTDIR variable (deprecated in favor of --repositories-configuration)", + "metavar" : "DIR" + }, + { + "longopt" : "--portdir-overlay", + "help" : "override the PORTDIR_OVERLAY variable (deprecated in favor of --repositories-configuration)" + }, + { + "longopt" : "--repositories-configuration", + "help" : "override configuration of repositories (in format of repos.conf)" + }, + { + "longopt" : "--strict-manifests", + "help" : "manually override \"strict\" FEATURES setting", + "choices" : ("y", "n"), + "metavar" : "<y|n>", + }, + { + "longopt" : "--failure-log", + "help" : "log file for fetch failures, with tab-delimited " + "output, for reporting purposes", + "metavar" : "FILE" + }, + { + "longopt" : "--success-log", + "help" : "log file for fetch successes, with tab-delimited " + "output, for reporting purposes", + "metavar" : "FILE" + }, + { + "longopt" : "--scheduled-deletion-log", + "help" : "log file for scheduled deletions, with tab-delimited " + "output, for reporting purposes", + "metavar" : "FILE" + }, + { + "longopt" : "--delete", + "help" : "enable deletion of unused distfiles", + "action" : "store_true" + }, + { + "longopt" : "--deletion-db", + "help" : "database file used to track lifetime of files " + "scheduled for delayed deletion", + "metavar" : "FILE" + }, + { + "longopt" : "--deletion-delay", + "help" : "delay time for deletion, measured in seconds", + "metavar" : "SECONDS" + }, + { + "longopt" : "--temp-dir", + "help" : "temporary directory for downloads", + "metavar" : "DIR" + }, + { + "longopt" : "--mirror-overrides", + "help" : "file holding a list of mirror overrides", + "metavar" : "FILE" + }, + { + "longopt" : "--mirror-skip", + "help" : "comma delimited list of mirror targets to skip " + "when fetching" + }, + { + "longopt" : "--restrict-mirror-exemptions", + "help" : "comma delimited list of mirror targets for which to " + "ignore RESTRICT=\"mirror\"" + }, + { + "longopt" : "--verify-existing-digest", + "help" : "use digest as a verification of whether existing " + "distfiles are valid", + "action" : "store_true" + }, + { + "longopt" : "--distfiles-local", + "help" : "distfiles-local directory to use", + "metavar" : "DIR" + }, + { + "longopt" : "--distfiles-db", + "help" : "database file used to track which ebuilds a " + "distfile belongs to", + "metavar" : "FILE" + }, + { + "longopt" : "--recycle-dir", + "help" : "directory for extended retention of files that " + "are removed from distdir with the --delete option", + "metavar" : "DIR" + }, + { + "longopt" : "--recycle-db", + "help" : "database file used to track lifetime of files " + "in recycle dir", + "metavar" : "FILE" + }, + { + "longopt" : "--recycle-deletion-delay", + "help" : "delay time for deletion of unused files from " + "recycle dir, measured in seconds (defaults to " + "the equivalent of 60 days)", + "default" : 60 * seconds_per_day, + "metavar" : "SECONDS", + "type" : int + }, + { + "longopt" : "--fetch-log-dir", + "help" : "directory for individual fetch logs", + "metavar" : "DIR" + }, + { + "longopt" : "--whitelist-from", + "help" : "specifies a file containing a list of files to " + "whitelist, one per line, # prefixed lines ignored", + "action" : "append", + "metavar" : "FILE" + }, +) + +def parse_args(args): + description = "emirrordist - a fetch tool for mirroring " \ + "of package distfiles" + usage = "emirrordist [options] <action>" + parser = ArgumentParser(description=description, usage=usage) + + actions = parser.add_argument_group('Actions') + actions.add_argument("--version", + action="store_true", + help="display portage version and exit") + actions.add_argument("--mirror", + action="store_true", + help="mirror distfiles for the selected repository") + + common = parser.add_argument_group('Common options') + for opt_info in common_options: + opt_pargs = [opt_info["longopt"]] + if opt_info.get("shortopt"): + opt_pargs.append(opt_info["shortopt"]) + opt_kwargs = {"help" : opt_info["help"]} + for k in ("action", "choices", "default", "metavar", "type"): + if k in opt_info: + opt_kwargs[k] = opt_info[k] + common.add_argument(*opt_pargs, **opt_kwargs) + + options, args = parser.parse_known_args(args) + + return (parser, options, args) + +def emirrordist_main(args): + + # The calling environment is ignored, so the program is + # completely controlled by commandline arguments. + env = {} + + if not sys.stdout.isatty(): + portage.output.nocolor() + env['NOCOLOR'] = 'true' + + parser, options, args = parse_args(args) + + if options.version: + sys.stdout.write("Portage %s\n" % portage.VERSION) + return os.EX_OK + + config_root = options.config_root + + if options.portdir is not None: + writemsg_level("emirrordist: warning: --portdir option is deprecated in favor of --repositories-configuration option\n", + level=logging.WARNING, noiselevel=-1) + if options.portdir_overlay is not None: + writemsg_level("emirrordist: warning: --portdir-overlay option is deprecated in favor of --repositories-configuration option\n", + level=logging.WARNING, noiselevel=-1) + + if options.repositories_configuration is not None: + env['PORTAGE_REPOSITORIES'] = options.repositories_configuration + elif options.portdir_overlay is not None: + env['PORTDIR_OVERLAY'] = options.portdir_overlay + + if options.portdir is not None: + env['PORTDIR'] = options.portdir + + settings = portage.config(config_root=config_root, + local_config=False, env=env) + + default_opts = None + if not options.ignore_default_opts: + default_opts = settings.get('EMIRRORDIST_DEFAULT_OPTS', '').split() + + if default_opts: + parser, options, args = parse_args(default_opts + args) + + settings = portage.config(config_root=config_root, + local_config=False, env=env) + + if options.repo is None: + if len(settings.repositories.prepos) == 2: + for repo in settings.repositories: + if repo.name != "DEFAULT": + options.repo = repo.name + break + + if options.repo is None: + parser.error("--repo option is required") + + repo_path = settings.repositories.treemap.get(options.repo) + if repo_path is None: + parser.error("Unable to locate repository named '%s'" % (options.repo,)) + + if options.jobs is not None: + options.jobs = int(options.jobs) + + if options.load_average is not None: + options.load_average = float(options.load_average) + + if options.failure_log is not None: + options.failure_log = normalize_path( + os.path.abspath(options.failure_log)) + + parent_dir = os.path.dirname(options.failure_log) + if not (os.path.isdir(parent_dir) and + os.access(parent_dir, os.W_OK|os.X_OK)): + parser.error(("--failure-log '%s' parent is not a " + "writable directory") % options.failure_log) + + if options.success_log is not None: + options.success_log = normalize_path( + os.path.abspath(options.success_log)) + + parent_dir = os.path.dirname(options.success_log) + if not (os.path.isdir(parent_dir) and + os.access(parent_dir, os.W_OK|os.X_OK)): + parser.error(("--success-log '%s' parent is not a " + "writable directory") % options.success_log) + + if options.scheduled_deletion_log is not None: + options.scheduled_deletion_log = normalize_path( + os.path.abspath(options.scheduled_deletion_log)) + + parent_dir = os.path.dirname(options.scheduled_deletion_log) + if not (os.path.isdir(parent_dir) and + os.access(parent_dir, os.W_OK|os.X_OK)): + parser.error(("--scheduled-deletion-log '%s' parent is not a " + "writable directory") % options.scheduled_deletion_log) + + if options.deletion_db is None: + parser.error("--scheduled-deletion-log requires --deletion-db") + + if options.deletion_delay is not None: + options.deletion_delay = long(options.deletion_delay) + if options.deletion_db is None: + parser.error("--deletion-delay requires --deletion-db") + + if options.deletion_db is not None: + if options.deletion_delay is None: + parser.error("--deletion-db requires --deletion-delay") + options.deletion_db = normalize_path( + os.path.abspath(options.deletion_db)) + + if options.temp_dir is not None: + options.temp_dir = normalize_path( + os.path.abspath(options.temp_dir)) + + if not (os.path.isdir(options.temp_dir) and + os.access(options.temp_dir, os.W_OK|os.X_OK)): + parser.error(("--temp-dir '%s' is not a " + "writable directory") % options.temp_dir) + + if options.distfiles is not None: + options.distfiles = normalize_path( + os.path.abspath(options.distfiles)) + + if not (os.path.isdir(options.distfiles) and + os.access(options.distfiles, os.W_OK|os.X_OK)): + parser.error(("--distfiles '%s' is not a " + "writable directory") % options.distfiles) + else: + parser.error("missing required --distfiles parameter") + + if options.mirror_overrides is not None: + options.mirror_overrides = normalize_path( + os.path.abspath(options.mirror_overrides)) + + if not (os.access(options.mirror_overrides, os.R_OK) and + os.path.isfile(options.mirror_overrides)): + parser.error( + "--mirror-overrides-file '%s' is not a readable file" % + options.mirror_overrides) + + if options.distfiles_local is not None: + options.distfiles_local = normalize_path( + os.path.abspath(options.distfiles_local)) + + if not (os.path.isdir(options.distfiles_local) and + os.access(options.distfiles_local, os.W_OK|os.X_OK)): + parser.error(("--distfiles-local '%s' is not a " + "writable directory") % options.distfiles_local) + + if options.distfiles_db is not None: + options.distfiles_db = normalize_path( + os.path.abspath(options.distfiles_db)) + + if options.tries is not None: + options.tries = int(options.tries) + + if options.recycle_dir is not None: + options.recycle_dir = normalize_path( + os.path.abspath(options.recycle_dir)) + if not (os.path.isdir(options.recycle_dir) and + os.access(options.recycle_dir, os.W_OK|os.X_OK)): + parser.error(("--recycle-dir '%s' is not a " + "writable directory") % options.recycle_dir) + + if options.recycle_db is not None: + if options.recycle_dir is None: + parser.error("--recycle-db requires " + "--recycle-dir to be specified") + options.recycle_db = normalize_path( + os.path.abspath(options.recycle_db)) + + if options.recycle_deletion_delay is not None: + options.recycle_deletion_delay = \ + long(options.recycle_deletion_delay) + + if options.fetch_log_dir is not None: + options.fetch_log_dir = normalize_path( + os.path.abspath(options.fetch_log_dir)) + + if not (os.path.isdir(options.fetch_log_dir) and + os.access(options.fetch_log_dir, os.W_OK|os.X_OK)): + parser.error(("--fetch-log-dir '%s' is not a " + "writable directory") % options.fetch_log_dir) + + if options.whitelist_from: + normalized_paths = [] + for x in options.whitelist_from: + path = normalize_path(os.path.abspath(x)) + normalized_paths.append(path) + if not (os.access(path, os.R_OK) and os.path.isfile(path)): + parser.error( + "--whitelist-from '%s' is not a readable file" % x) + options.whitelist_from = normalized_paths + + if options.strict_manifests is not None: + if options.strict_manifests == "y": + settings.features.add("strict") + else: + settings.features.discard("strict") + + settings.lock() + + portdb = portage.portdbapi(mysettings=settings) + + # Limit ebuilds to the specified repo. + portdb.porttrees = [repo_path] + + portage.util.initialize_logger() + + if options.verbose > 0: + l = logging.getLogger() + l.setLevel(l.getEffectiveLevel() - 10 * options.verbose) + + with Config(options, portdb, + SchedulerInterface(global_event_loop())) as config: + + if not options.mirror: + parser.error('No action specified') + + returncode = os.EX_OK + + if options.mirror: + signum = run_main_scheduler(MirrorDistTask(config)) + if signum is not None: + sys.exit(128 + signum) + + return returncode diff --git a/portage_with_autodep/pym/portage/_global_updates.py b/portage_with_autodep/pym/portage/_global_updates.py index 868d1ee..5175043 100644 --- a/portage_with_autodep/pym/portage/_global_updates.py +++ b/portage_with_autodep/pym/portage/_global_updates.py @@ -15,7 +15,7 @@ from portage.update import grab_updates, parse_updates, update_config_files, upd from portage.util import grabfile, shlex_split, \ writemsg, writemsg_stdout, write_atomic -def _global_updates(trees, prev_mtimes, quiet=False): +def _global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True): """ Perform new global updates if they exist in 'profiles/updates/' subdirectories of all active repositories (PORTDIR + PORTDIR_OVERLAY). @@ -37,7 +37,7 @@ def _global_updates(trees, prev_mtimes, quiet=False): "SANDBOX_ACTIVE" in os.environ or \ len(trees) != 1: return retupd - root = "/" + root = trees._running_eroot mysettings = trees[root]["vartree"].settings portdb = trees[root]["porttree"].dbapi vardb = trees[root]["vartree"].dbapi @@ -73,10 +73,10 @@ def _global_updates(trees, prev_mtimes, quiet=False): continue try: - if mysettings.get("PORTAGE_CALLER") == "fixpackages": - update_data = grab_updates(updpath) + if if_mtime_changed: + update_data = grab_updates(updpath, prev_mtimes=prev_mtimes) else: - update_data = grab_updates(updpath, prev_mtimes) + update_data = grab_updates(updpath) except DirectoryNotFound: continue myupd = [] @@ -217,8 +217,7 @@ def _global_updates(trees, prev_mtimes, quiet=False): do_upgrade_packagesmessage = False # We gotta do the brute force updates for these now. - if mysettings.get("PORTAGE_CALLER") == "fixpackages" or \ - "fixpackages" in mysettings.features: + if True: def onUpdate(maxval, curval): if curval > 0: writemsg_stdout("#") diff --git a/portage_with_autodep/pym/portage/_global_updates.pyo b/portage_with_autodep/pym/portage/_global_updates.pyo Binary files differnew file mode 100644 index 0000000..3e2e8de --- /dev/null +++ b/portage_with_autodep/pym/portage/_global_updates.pyo diff --git a/portage_with_autodep/pym/portage/_legacy_globals.py b/portage_with_autodep/pym/portage/_legacy_globals.py index 615591a..abffa0e 100644 --- a/portage_with_autodep/pym/portage/_legacy_globals.py +++ b/portage_with_autodep/pym/portage/_legacy_globals.py @@ -35,19 +35,14 @@ def _get_legacy_global(name): constructed.add('db') del portage._initializing_globals - settings = portage.db["/"]["vartree"].settings - - for root in portage.db: - if root != "/": - settings = portage.db[root]["vartree"].settings - break - - portage.output._init(config_root=settings['PORTAGE_CONFIGROOT']) + settings = portage.db[portage.db._target_eroot]["vartree"].settings portage.settings = settings constructed.add('settings') - portage.root = root + # Since portage.db now uses EROOT for keys instead of ROOT, we make + # portage.root refer to EROOT such that it continues to work as a key. + portage.root = portage.db._target_eroot constructed.add('root') # COMPATIBILITY diff --git a/portage_with_autodep/pym/portage/_legacy_globals.pyo b/portage_with_autodep/pym/portage/_legacy_globals.pyo Binary files differnew file mode 100644 index 0000000..2e50cbe --- /dev/null +++ b/portage_with_autodep/pym/portage/_legacy_globals.pyo diff --git a/portage_with_autodep/pym/portage/_selinux.pyo b/portage_with_autodep/pym/portage/_selinux.pyo Binary files differnew file mode 100644 index 0000000..7a413e0 --- /dev/null +++ b/portage_with_autodep/pym/portage/_selinux.pyo diff --git a/portage_with_autodep/pym/portage/_sets/__init__.py b/portage_with_autodep/pym/portage/_sets/__init__.py index 1b3484e..88a4b3b 100644 --- a/portage_with_autodep/pym/portage/_sets/__init__.py +++ b/portage_with_autodep/pym/portage/_sets/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2007 Gentoo Foundation +# Copyright 2007-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import print_function @@ -6,16 +6,27 @@ from __future__ import print_function __all__ = ["SETPREFIX", "get_boolean", "SetConfigError", "SetConfig", "load_default_config"] +import io +import logging +import sys try: - from configparser import SafeConfigParser, NoOptionError + from configparser import NoOptionError, ParsingError + if sys.hexversion >= 0x3020000: + from configparser import ConfigParser as SafeConfigParser + else: + from configparser import SafeConfigParser except ImportError: - from ConfigParser import SafeConfigParser, NoOptionError + from ConfigParser import SafeConfigParser, NoOptionError, ParsingError from portage import os from portage import load_mod +from portage import _unicode_decode +from portage import _unicode_encode +from portage import _encodings from portage.const import USER_CONFIG_PATH, GLOBAL_CONFIG_PATH from portage.const import _ENABLE_SET_CONFIG from portage.exception import PackageSetNotFound from portage.localization import _ +from portage.util import writemsg_level SETPREFIX = "@" @@ -43,7 +54,32 @@ class SetConfig(object): }) if _ENABLE_SET_CONFIG: - self._parser.read(paths) + # use read_file/readfp in order to control decoding of unicode + try: + # Python >=3.2 + read_file = self._parser.read_file + except AttributeError: + read_file = self._parser.readfp + + for p in paths: + f = None + try: + f = io.open(_unicode_encode(p, + encoding=_encodings['fs'], errors='strict'), + mode='r', encoding=_encodings['repo.content'], + errors='replace') + except EnvironmentError: + pass + else: + try: + read_file(f) + except ParsingError as e: + writemsg_level(_unicode_decode( + _("!!! Error while reading sets config file: %s\n") + ) % e, level=logging.ERROR, noiselevel=-1) + finally: + if f is not None: + f.close() else: self._create_default_config() @@ -201,7 +237,6 @@ class SetConfig(object): except KeyError: raise PackageSetNotFound(setname) myatoms = myset.getAtoms() - parser = self._parser if ignorelist is None: ignorelist = set() diff --git a/portage_with_autodep/pym/portage/_sets/__init__.pyo b/portage_with_autodep/pym/portage/_sets/__init__.pyo Binary files differnew file mode 100644 index 0000000..5318dbe --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/__init__.pyo diff --git a/portage_with_autodep/pym/portage/_sets/base.pyo b/portage_with_autodep/pym/portage/_sets/base.pyo Binary files differnew file mode 100644 index 0000000..89e53be --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/base.pyo diff --git a/portage_with_autodep/pym/portage/_sets/dbapi.py b/portage_with_autodep/pym/portage/_sets/dbapi.py index 0f238f0..4982a92 100644 --- a/portage_with_autodep/pym/portage/_sets/dbapi.py +++ b/portage_with_autodep/pym/portage/_sets/dbapi.py @@ -1,10 +1,10 @@ -# Copyright 2007-2010 Gentoo Foundation +# Copyright 2007-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import time from portage import os -from portage.versions import catpkgsplit, catsplit, pkgcmp, best +from portage.versions import best, catsplit, vercmp from portage.dep import Atom from portage.localization import _ from portage._sets.base import PackageSet @@ -72,18 +72,16 @@ class OwnerSet(PackageSet): aux_keys = ["SLOT"] if exclude_paths is None: for link, p in vardb._owners.iter_owners(paths): - cat, pn = catpkgsplit(link.mycpv)[:2] slot, = aux_get(link.mycpv, aux_keys) - rValue.add("%s/%s:%s" % (cat, pn, slot)) + rValue.add("%s:%s" % (link.mycpv.cp, slot)) else: all_paths = set() all_paths.update(paths) all_paths.update(exclude_paths) exclude_atoms = set() for link, p in vardb._owners.iter_owners(all_paths): - cat, pn = catpkgsplit(link.mycpv)[:2] slot, = aux_get(link.mycpv, aux_keys) - atom = "%s/%s:%s" % (cat, pn, slot) + atom = "%s:%s" % (link.mycpv.cp, slot) rValue.add(atom) if p in exclude_paths: exclude_atoms.add(atom) @@ -184,9 +182,7 @@ class DowngradeSet(PackageSet): ebuild = xmatch(xmatch_level, slot_atom) if not ebuild: continue - ebuild_split = catpkgsplit(ebuild)[1:] - installed_split = catpkgsplit(cpv)[1:] - if pkgcmp(installed_split, ebuild_split) > 0: + if vercmp(cpv.version, ebuild.version) > 0: atoms.append(slot_atom) self._setAtoms(atoms) diff --git a/portage_with_autodep/pym/portage/_sets/dbapi.pyo b/portage_with_autodep/pym/portage/_sets/dbapi.pyo Binary files differnew file mode 100644 index 0000000..20bf848 --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/dbapi.pyo diff --git a/portage_with_autodep/pym/portage/_sets/files.pyo b/portage_with_autodep/pym/portage/_sets/files.pyo Binary files differnew file mode 100644 index 0000000..eb03c00 --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/files.pyo diff --git a/portage_with_autodep/pym/portage/_sets/libs.pyo b/portage_with_autodep/pym/portage/_sets/libs.pyo Binary files differnew file mode 100644 index 0000000..72fc1bb --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/libs.pyo diff --git a/portage_with_autodep/pym/portage/_sets/profiles.pyo b/portage_with_autodep/pym/portage/_sets/profiles.pyo Binary files differnew file mode 100644 index 0000000..9502044 --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/profiles.pyo diff --git a/portage_with_autodep/pym/portage/_sets/security.py b/portage_with_autodep/pym/portage/_sets/security.py index 2d8fcf6..7e856bc 100644 --- a/portage_with_autodep/pym/portage/_sets/security.py +++ b/portage_with_autodep/pym/portage/_sets/security.py @@ -1,9 +1,9 @@ -# Copyright 2007 Gentoo Foundation +# Copyright 2007-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import portage.glsa as glsa from portage._sets.base import PackageSet -from portage.versions import catpkgsplit, pkgcmp +from portage.versions import vercmp from portage._sets import get_boolean __all__ = ["SecuritySet", "NewGlsaSet", "NewAffectedSet", "AffectedSet"] @@ -45,12 +45,12 @@ class SecuritySet(PackageSet): for atom in atomlist[:]: cpv = self._portdbapi.xmatch("match-all", atom)[0] slot = self._portdbapi.aux_get(cpv, ["SLOT"])[0] - cps = "/".join(catpkgsplit(cpv)[0:2]) + ":" + slot + cps = "%s:%s" % (cpv.cp, slot) if not cps in mydict: mydict[cps] = (atom, cpv) else: other_cpv = mydict[cps][1] - if pkgcmp(catpkgsplit(cpv)[1:], catpkgsplit(other_cpv)[1:]) > 0: + if vercmp(cpv.version, other_cpv.version) > 0: atomlist.remove(mydict[cps][0]) mydict[cps] = (atom, cpv) return atomlist diff --git a/portage_with_autodep/pym/portage/_sets/security.pyo b/portage_with_autodep/pym/portage/_sets/security.pyo Binary files differnew file mode 100644 index 0000000..ea67514 --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/security.pyo diff --git a/portage_with_autodep/pym/portage/_sets/shell.pyo b/portage_with_autodep/pym/portage/_sets/shell.pyo Binary files differnew file mode 100644 index 0000000..e5e4561 --- /dev/null +++ b/portage_with_autodep/pym/portage/_sets/shell.pyo diff --git a/portage_with_autodep/pym/portage/cache/__init__.pyo b/portage_with_autodep/pym/portage/cache/__init__.pyo Binary files differnew file mode 100644 index 0000000..eb5a90e --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/__init__.pyo diff --git a/portage_with_autodep/pym/portage/cache/anydbm.pyo b/portage_with_autodep/pym/portage/cache/anydbm.pyo Binary files differnew file mode 100644 index 0000000..5946da9 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/anydbm.pyo diff --git a/portage_with_autodep/pym/portage/cache/cache_errors.pyo b/portage_with_autodep/pym/portage/cache/cache_errors.pyo Binary files differnew file mode 100644 index 0000000..866088e --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/cache_errors.pyo diff --git a/portage_with_autodep/pym/portage/cache/ebuild_xattr.py b/portage_with_autodep/pym/portage/cache/ebuild_xattr.py index 6b388fa..0086e40 100644 --- a/portage_with_autodep/pym/portage/cache/ebuild_xattr.py +++ b/portage_with_autodep/pym/portage/cache/ebuild_xattr.py @@ -1,5 +1,6 @@ +# -*- coding: UTF8 -*- # Copyright: 2009-2011 Gentoo Foundation -# Author(s): Petteri Räty (betelgeuse@gentoo.org) +# Author(s): Petteri Räty (betelgeuse@gentoo.org) # License: GPL2 __all__ = ['database'] diff --git a/portage_with_autodep/pym/portage/cache/ebuild_xattr.pyo b/portage_with_autodep/pym/portage/cache/ebuild_xattr.pyo Binary files differnew file mode 100644 index 0000000..fe32dcc --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/ebuild_xattr.pyo diff --git a/portage_with_autodep/pym/portage/cache/flat_hash.py b/portage_with_autodep/pym/portage/cache/flat_hash.py index b6bc074..2eae9f6 100644 --- a/portage_with_autodep/pym/portage/cache/flat_hash.py +++ b/portage_with_autodep/pym/portage/cache/flat_hash.py @@ -31,7 +31,7 @@ class database(fs_template.FsBased): self.label.lstrip(os.path.sep).rstrip(os.path.sep)) write_keys = set(self._known_keys) write_keys.add("_eclasses_") - write_keys.add("_mtime_") + write_keys.add("_%s_" % (self.validation_chf,)) self._write_keys = sorted(write_keys) if not self.readonly and not os.path.exists(self.location): self._ensure_dirs() @@ -69,7 +69,6 @@ class database(fs_template.FsBased): raise cache_errors.CacheCorruption(cpv, e) def _setitem(self, cpv, values): -# import pdb;pdb.set_trace() s = cpv.rfind("/") fp = os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:])) try: @@ -153,3 +152,9 @@ class database(fs_template.FsBased): dirs.append((depth+1, p)) continue yield p[len_base+1:] + + +class md5_database(database): + + validation_chf = 'md5' + store_eclass_paths = False diff --git a/portage_with_autodep/pym/portage/cache/flat_hash.pyo b/portage_with_autodep/pym/portage/cache/flat_hash.pyo Binary files differnew file mode 100644 index 0000000..4f568a8 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/flat_hash.pyo diff --git a/portage_with_autodep/pym/portage/cache/flat_list.pyo b/portage_with_autodep/pym/portage/cache/flat_list.pyo Binary files differnew file mode 100644 index 0000000..ab7dc82 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/flat_list.pyo diff --git a/portage_with_autodep/pym/portage/cache/fs_template.pyo b/portage_with_autodep/pym/portage/cache/fs_template.pyo Binary files differnew file mode 100644 index 0000000..6cbbc2f --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/fs_template.pyo diff --git a/portage_with_autodep/pym/portage/cache/mappings.py b/portage_with_autodep/pym/portage/cache/mappings.py index 60a918e..bc8ce9a 100644 --- a/portage_with_autodep/pym/portage/cache/mappings.py +++ b/portage_with_autodep/pym/portage/cache/mappings.py @@ -316,7 +316,7 @@ def slot_dict_class(keys, prefix="_val_"): attribute names from keys @type prefix: String @rtype: SlotDict - @returns: A class that constructs SlotDict instances + @return: A class that constructs SlotDict instances having the specified keys. """ if isinstance(keys, frozenset): diff --git a/portage_with_autodep/pym/portage/cache/mappings.pyo b/portage_with_autodep/pym/portage/cache/mappings.pyo Binary files differnew file mode 100644 index 0000000..1eb3f4f --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/mappings.pyo diff --git a/portage_with_autodep/pym/portage/cache/metadata.py b/portage_with_autodep/pym/portage/cache/metadata.py index 4c735d7..9d2c3a5 100644 --- a/portage_with_autodep/pym/portage/cache/metadata.py +++ b/portage_with_autodep/pym/portage/cache/metadata.py @@ -6,6 +6,7 @@ import errno import re import stat import sys +from operator import attrgetter from portage import os from portage import _encodings from portage import _unicode_encode @@ -63,13 +64,14 @@ class database(flat_hash.database): if "INHERITED" in d: if self.ec is None: self.ec = portage.eclass_cache.cache(self.location[:-15]) + getter = attrgetter(self.validation_chf) try: - d["_eclasses_"] = self.ec.get_eclass_data( - d["INHERITED"].split()) + ec_data = self.ec.get_eclass_data(d["INHERITED"].split()) + d["_eclasses_"] = dict((k, (v.eclass_dir, getter(v))) + for k,v in ec_data.items()) except KeyError as e: # INHERITED contains a non-existent eclass. raise cache_errors.CacheCorruption(cpv, e) - del d["INHERITED"] else: d["_eclasses_"] = {} elif isinstance(d["_eclasses_"], basestring): diff --git a/portage_with_autodep/pym/portage/cache/metadata.pyo b/portage_with_autodep/pym/portage/cache/metadata.pyo Binary files differnew file mode 100644 index 0000000..c98445b --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/metadata.pyo diff --git a/portage_with_autodep/pym/portage/cache/metadata_overlay.py b/portage_with_autodep/pym/portage/cache/metadata_overlay.py deleted file mode 100644 index cfa0051..0000000 --- a/portage_with_autodep/pym/portage/cache/metadata_overlay.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 1999-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.cache import template -from portage.cache.cache_errors import CacheCorruption -from portage.cache.flat_hash import database as db_rw -from portage.cache.metadata import database as db_ro - -class database(template.database): - - serialize_eclasses = False - - def __init__(self, location, label, auxdbkeys, db_rw=db_rw, db_ro=db_ro, - *args, **config): - super_config = config.copy() - super_config.pop("gid", None) - super_config.pop("perms", None) - super(database, self).__init__(location, label, auxdbkeys, - *args, **super_config) - self.db_rw = db_rw(location, label, auxdbkeys, **config) - self.commit = self.db_rw.commit - self.autocommits = self.db_rw.autocommits - if isinstance(db_ro, type): - ro_config = config.copy() - ro_config["readonly"] = True - self.db_ro = db_ro(label, "metadata/cache", auxdbkeys, **ro_config) - else: - self.db_ro = db_ro - - def __getitem__(self, cpv): - """funnel whiteout validation through here, since value needs to be fetched""" - try: - value = self.db_rw[cpv] - except KeyError: - return self.db_ro[cpv] # raises a KeyError when necessary - except CacheCorruption: - del self.db_rw[cpv] - return self.db_ro[cpv] # raises a KeyError when necessary - if self._is_whiteout(value): - if self._is_whiteout_valid(cpv, value): - raise KeyError(cpv) - else: - del self.db_rw[cpv] - return self.db_ro[cpv] # raises a KeyError when necessary - else: - return value - - def _setitem(self, name, values): - try: - value_ro = self.db_ro.get(name) - except CacheCorruption: - value_ro = None - if value_ro is not None and \ - self._are_values_identical(value_ro, values): - # we have matching values in the underlying db_ro - # so it is unnecessary to store data in db_rw - try: - del self.db_rw[name] # delete unwanted whiteout when necessary - except KeyError: - pass - return - self.db_rw[name] = values - - def _delitem(self, cpv): - value = self[cpv] # validates whiteout and/or raises a KeyError when necessary - if cpv in self.db_ro: - self.db_rw[cpv] = self._create_whiteout(value) - else: - del self.db_rw[cpv] - - def __contains__(self, cpv): - try: - self[cpv] # validates whiteout when necessary - except KeyError: - return False - return True - - def __iter__(self): - s = set() - for cpv in self.db_rw: - if cpv in self: # validates whiteout when necessary - yield cpv - # set includes whiteouts so they won't be yielded later - s.add(cpv) - for cpv in self.db_ro: - if cpv not in s: - yield cpv - - def _is_whiteout(self, value): - return value["EAPI"] == "whiteout" - - def _create_whiteout(self, value): - return {"EAPI":"whiteout","_eclasses_":value["_eclasses_"],"_mtime_":value["_mtime_"]} - - def _is_whiteout_valid(self, name, value_rw): - try: - value_ro = self.db_ro[name] - return self._are_values_identical(value_rw,value_ro) - except KeyError: - return False - - def _are_values_identical(self, value1, value2): - if value1['_mtime_'] != value2['_mtime_']: - return False - return value1["_eclasses_"] == value2["_eclasses_"] diff --git a/portage_with_autodep/pym/portage/cache/sql_template.pyo b/portage_with_autodep/pym/portage/cache/sql_template.pyo Binary files differnew file mode 100644 index 0000000..e2c5974 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/sql_template.pyo diff --git a/portage_with_autodep/pym/portage/cache/sqlite.pyo b/portage_with_autodep/pym/portage/cache/sqlite.pyo Binary files differnew file mode 100644 index 0000000..a82d25f --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/sqlite.pyo diff --git a/portage_with_autodep/pym/portage/cache/template.py b/portage_with_autodep/pym/portage/cache/template.py index f84d8f4..cf1e8ae 100644 --- a/portage_with_autodep/pym/portage/cache/template.py +++ b/portage_with_autodep/pym/portage/cache/template.py @@ -1,4 +1,4 @@ -# Copyright: 2005 Gentoo Foundation +# Copyright: 2005-2012 Gentoo Foundation # Author(s): Brian Harring (ferringb@gentoo.org) # License: GPL2 @@ -7,10 +7,14 @@ from portage.cache.cache_errors import InvalidRestriction from portage.cache.mappings import ProtectedDict import sys import warnings +import operator if sys.hexversion >= 0x3000000: + _unicode = str basestring = str long = int +else: + _unicode = unicode class database(object): # this is for metadata/cache transfer. @@ -21,6 +25,8 @@ class database(object): autocommits = False cleanse_keys = False serialize_eclasses = True + validation_chf = 'mtime' + store_eclass_paths = True def __init__(self, location, label, auxdbkeys, readonly=False): """ initialize the derived class; specifically, store label/keys""" @@ -40,9 +46,15 @@ class database(object): self.updates = 0 d=self._getitem(cpv) if self.serialize_eclasses and "_eclasses_" in d: - d["_eclasses_"] = reconstruct_eclasses(cpv, d["_eclasses_"]) + d["_eclasses_"] = reconstruct_eclasses(cpv, d["_eclasses_"], + self.validation_chf, paths=self.store_eclass_paths) elif "_eclasses_" not in d: d["_eclasses_"] = {} + # Never return INHERITED, since portdbapi.aux_get() will + # generate it automatically from _eclasses_, and we want + # to omit it in comparisons between cache entries like + # those that egencache uses to avoid redundant writes. + d.pop("INHERITED", None) mtime = d.get('_mtime_') if mtime is None: raise cache_errors.CacheCorruption(cpv, @@ -60,22 +72,46 @@ class database(object): override this in derived classess""" raise NotImplementedError + @staticmethod + def _internal_eclasses(extern_ec_dict, chf_type, paths): + """ + When serialize_eclasses is False, we have to convert an external + eclass dict containing hashed_path objects into an appropriate + internal dict containing values of chf_type (and eclass dirs + if store_eclass_paths is True). + """ + if not extern_ec_dict: + return extern_ec_dict + chf_getter = operator.attrgetter(chf_type) + if paths: + intern_ec_dict = dict((k, (v.eclass_dir, chf_getter(v))) + for k, v in extern_ec_dict.items()) + else: + intern_ec_dict = dict((k, chf_getter(v)) + for k, v in extern_ec_dict.items()) + return intern_ec_dict + def __setitem__(self, cpv, values): """set a cpv to values This shouldn't be overriden in derived classes since it handles the readonly checks""" if self.readonly: raise cache_errors.ReadOnlyRestriction() + d = None if self.cleanse_keys: d=ProtectedDict(values) for k, v in list(d.items()): if not v: del d[k] - if self.serialize_eclasses and "_eclasses_" in values: - d["_eclasses_"] = serialize_eclasses(d["_eclasses_"]) - elif self.serialize_eclasses and "_eclasses_" in values: - d = ProtectedDict(values) - d["_eclasses_"] = serialize_eclasses(d["_eclasses_"]) - else: + if "_eclasses_" in values: + if d is None: + d = ProtectedDict(values) + if self.serialize_eclasses: + d["_eclasses_"] = serialize_eclasses(d["_eclasses_"], + self.validation_chf, paths=self.store_eclass_paths) + else: + d["_eclasses_"] = self._internal_eclasses(d["_eclasses_"], + self.validation_chf, self.store_eclass_paths) + elif d is None: d = values self._setitem(cpv, d) if not self.autocommits: @@ -159,6 +195,23 @@ class database(object): except KeyError: return x + def validate_entry(self, entry, ebuild_hash, eclass_db): + hash_key = '_%s_' % self.validation_chf + try: + entry_hash = entry[hash_key] + except KeyError: + return False + else: + if entry_hash != getattr(ebuild_hash, self.validation_chf): + return False + update = eclass_db.validate_and_rewrite_cache(entry['_eclasses_'], self.validation_chf, + self.store_eclass_paths) + if update is None: + return False + if update: + entry['_eclasses_'] = update + return True + def get_matches(self, match_dict): """generic function for walking the entire cache db, matching restrictions to filter what cpv's are returned. Derived classes should override this if they @@ -195,7 +248,9 @@ class database(object): keys = __iter__ items = iteritems -def serialize_eclasses(eclass_dict): +_keysorter = operator.itemgetter(0) + +def serialize_eclasses(eclass_dict, chf_type='mtime', paths=True): """takes a dict, returns a string representing said dict""" """The "new format", which causes older versions of <portage-2.1.2 to traceback with a ValueError due to failed long() conversion. This format @@ -206,27 +261,40 @@ def serialize_eclasses(eclass_dict): """ if not eclass_dict: return "" - return "\t".join(k + "\t%s\t%s" % eclass_dict[k] \ - for k in sorted(eclass_dict)) + getter = operator.attrgetter(chf_type) + if paths: + return "\t".join("%s\t%s\t%s" % (k, v.eclass_dir, getter(v)) + for k, v in sorted(eclass_dict.items(), key=_keysorter)) + return "\t".join("%s\t%s" % (k, getter(v)) + for k, v in sorted(eclass_dict.items(), key=_keysorter)) + -def reconstruct_eclasses(cpv, eclass_string): +def reconstruct_eclasses(cpv, eclass_string, chf_type='mtime', paths=True): """returns a dict when handed a string generated by serialize_eclasses""" eclasses = eclass_string.rstrip().lstrip().split("\t") if eclasses == [""]: # occasionally this occurs in the fs backends. they suck. return {} - - if len(eclasses) % 2 != 0 and len(eclasses) % 3 != 0: + + converter = _unicode + if chf_type == 'mtime': + converter = long + + if paths: + if len(eclasses) % 3 != 0: + raise cache_errors.CacheCorruption(cpv, "_eclasses_ was of invalid len %i" % len(eclasses)) + elif len(eclasses) % 2 != 0: raise cache_errors.CacheCorruption(cpv, "_eclasses_ was of invalid len %i" % len(eclasses)) d={} try: - if eclasses[1].isdigit(): - for x in range(0, len(eclasses), 2): - d[eclasses[x]] = ("", long(eclasses[x + 1])) - else: + i = iter(eclasses) + if paths: # The old format contains paths that will be discarded. - for x in range(0, len(eclasses), 3): - d[eclasses[x]] = (eclasses[x + 1], long(eclasses[x + 2])) + for name, path, val in zip(i, i, i): + d[name] = (path, converter(val)) + else: + for name, val in zip(i, i): + d[name] = converter(val) except IndexError: raise cache_errors.CacheCorruption(cpv, "_eclasses_ was of invalid len %i" % len(eclasses)) diff --git a/portage_with_autodep/pym/portage/cache/template.pyo b/portage_with_autodep/pym/portage/cache/template.pyo Binary files differnew file mode 100644 index 0000000..45da015 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/template.pyo diff --git a/portage_with_autodep/pym/portage/cache/util.py b/portage_with_autodep/pym/portage/cache/util.py deleted file mode 100644 index b824689..0000000 --- a/portage_with_autodep/pym/portage/cache/util.py +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright: 2005 Gentoo Foundation -# Author(s): Brian Harring (ferringb@gentoo.org) -# License: GPL2 - -from __future__ import print_function - -__all__ = ["mirror_cache", "non_quiet_mirroring", "quiet_mirroring"] - -from itertools import chain -from portage.cache import cache_errors -from portage.localization import _ - -def mirror_cache(valid_nodes_iterable, src_cache, trg_cache, eclass_cache=None, verbose_instance=None): - - from portage import eapi_is_supported, \ - _validate_cache_for_unsupported_eapis - if not src_cache.complete_eclass_entries and not eclass_cache: - raise Exception("eclass_cache required for cache's of class %s!" % src_cache.__class__) - - if verbose_instance == None: - noise=quiet_mirroring() - else: - noise=verbose_instance - - dead_nodes = set(trg_cache) - count=0 - - if not trg_cache.autocommits: - trg_cache.sync(100) - - for x in valid_nodes_iterable: -# print "processing x=",x - count+=1 - dead_nodes.discard(x) - try: - entry = src_cache[x] - except KeyError as e: - noise.missing_entry(x) - del e - continue - except cache_errors.CacheError as ce: - noise.exception(x, ce) - del ce - continue - - eapi = entry.get('EAPI') - if not eapi: - eapi = '0' - eapi = eapi.lstrip('-') - eapi_supported = eapi_is_supported(eapi) - if not eapi_supported: - if not _validate_cache_for_unsupported_eapis: - noise.misc(x, _("unable to validate cache for EAPI='%s'") % eapi) - continue - - write_it = True - trg = None - try: - trg = trg_cache[x] - except (KeyError, cache_errors.CacheError): - pass - else: - if trg['_mtime_'] == entry['_mtime_'] and \ - eclass_cache.is_eclass_data_valid(trg['_eclasses_']) and \ - set(trg['_eclasses_']) == set(entry['_eclasses_']): - write_it = False - - for d in (entry, trg): - if d is not None and d.get('EAPI') in ('', '0'): - del d['EAPI'] - - if trg and not write_it: - """ We don't want to skip the write unless we're really sure that - the existing cache is identical, so don't trust _mtime_ and - _eclasses_ alone.""" - for k in set(chain(entry, trg)).difference( - ("_mtime_", "_eclasses_")): - if trg.get(k, "") != entry.get(k, ""): - write_it = True - break - - if write_it: - try: - inherited = entry.get("INHERITED", "") - eclasses = entry.get("_eclasses_") - except cache_errors.CacheError as ce: - noise.exception(x, ce) - del ce - continue - - if eclasses is not None: - if not eclass_cache.is_eclass_data_valid(entry["_eclasses_"]): - noise.eclass_stale(x) - continue - inherited = eclasses - else: - inherited = inherited.split() - - if inherited: - if src_cache.complete_eclass_entries and eclasses is None: - noise.corruption(x, "missing _eclasses_ field") - continue - - # Even if _eclasses_ already exists, replace it with data from - # eclass_cache, in order to insert local eclass paths. - try: - eclasses = eclass_cache.get_eclass_data(inherited) - except KeyError: - # INHERITED contains a non-existent eclass. - noise.eclass_stale(x) - continue - - if eclasses is None: - noise.eclass_stale(x) - continue - entry["_eclasses_"] = eclasses - - if not eapi_supported: - for k in set(entry).difference(("_mtime_", "_eclasses_")): - entry[k] = "" - entry["EAPI"] = "-" + eapi - - # by this time, if it reaches here, the eclass has been validated, and the entry has - # been updated/translated (if needs be, for metadata/cache mainly) - try: - trg_cache[x] = entry - except cache_errors.CacheError as ce: - noise.exception(x, ce) - del ce - continue - if count >= noise.call_update_min: - noise.update(x) - count = 0 - - if not trg_cache.autocommits: - trg_cache.commit() - - # ok. by this time, the trg_cache is up to date, and we have a dict - # with a crapload of cpv's. we now walk the target db, removing stuff if it's in the list. - for key in dead_nodes: - try: - del trg_cache[key] - except KeyError: - pass - except cache_errors.CacheError as ce: - noise.exception(ce) - del ce - noise.finish() - - -class quiet_mirroring(object): - # call_update_every is used by mirror_cache to determine how often to call in. - # quiet defaults to 2^24 -1. Don't call update, 'cept once every 16 million or so :) - call_update_min = 0xffffff - def update(self,key,*arg): pass - def exception(self,key,*arg): pass - def eclass_stale(self,*arg): pass - def missing_entry(self, key): pass - def misc(self,key,*arg): pass - def corruption(self, key, s): pass - def finish(self, *arg): pass - -class non_quiet_mirroring(quiet_mirroring): - call_update_min=1 - def update(self,key,*arg): print("processed",key) - def exception(self, key, *arg): print("exec",key,arg) - def missing(self,key): print("key %s is missing", key) - def corruption(self,key,*arg): print("corrupt %s:" % key,arg) - def eclass_stale(self,key,*arg):print("stale %s:"%key,arg) - diff --git a/portage_with_autodep/pym/portage/cache/volatile.py b/portage_with_autodep/pym/portage/cache/volatile.py index 0bf6bab..5516745 100644 --- a/portage_with_autodep/pym/portage/cache/volatile.py +++ b/portage_with_autodep/pym/portage/cache/volatile.py @@ -8,18 +8,23 @@ class database(template.database): autocommits = True serialize_eclasses = False + store_eclass_paths = False def __init__(self, *args, **config): config.pop("gid", None) config.pop("perms", None) super(database, self).__init__(*args, **config) self._data = {} - self.__iter__ = self._data.__iter__ self._delitem = self._data.__delitem__ - self.__contains__ = self._data.__contains__ def _setitem(self, name, values): self._data[name] = copy.deepcopy(values) - def _getitem(self, cpv): + def __getitem__(self, cpv): return copy.deepcopy(self._data[cpv]) + + def __iter__(self): + return iter(self._data) + + def __contains__(self, key): + return key in self._data diff --git a/portage_with_autodep/pym/portage/cache/volatile.pyo b/portage_with_autodep/pym/portage/cache/volatile.pyo Binary files differnew file mode 100644 index 0000000..fac5d55 --- /dev/null +++ b/portage_with_autodep/pym/portage/cache/volatile.pyo diff --git a/portage_with_autodep/pym/portage/checksum.py b/portage_with_autodep/pym/portage/checksum.py index 9e7e455..bd416ac 100644 --- a/portage_with_autodep/pym/portage/checksum.py +++ b/portage_with_autodep/pym/portage/checksum.py @@ -1,5 +1,5 @@ # checksum.py -- core Portage functionality -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import portage @@ -16,8 +16,31 @@ import tempfile hashfunc_map = {} hashorigin_map = {} -def _generate_hash_function(hashtype, hashobject, origin="unknown"): - def pyhash(filename): +def _open_file(filename): + try: + return open(_unicode_encode(filename, + encoding=_encodings['fs'], errors='strict'), 'rb') + except IOError as e: + func_call = "open('%s')" % filename + if e.errno == errno.EPERM: + raise portage.exception.OperationNotPermitted(func_call) + elif e.errno == errno.EACCES: + raise portage.exception.PermissionDenied(func_call) + elif e.errno == errno.ENOENT: + raise portage.exception.FileNotFound(filename) + else: + raise + +class _generate_hash_function(object): + + __slots__ = ("_hashobject",) + + def __init__(self, hashtype, hashobject, origin="unknown"): + self._hashobject = hashobject + hashfunc_map[hashtype] = self + hashorigin_map[hashtype] = origin + + def __call__(self, filename): """ Run a checksum against a file. @@ -25,23 +48,11 @@ def _generate_hash_function(hashtype, hashobject, origin="unknown"): @type filename: String @return: The hash and size of the data """ - try: - f = open(_unicode_encode(filename, - encoding=_encodings['fs'], errors='strict'), 'rb') - except IOError as e: - func_call = "open('%s')" % filename - if e.errno == errno.EPERM: - raise portage.exception.OperationNotPermitted(func_call) - elif e.errno == errno.EACCES: - raise portage.exception.PermissionDenied(func_call) - elif e.errno == errno.ENOENT: - raise portage.exception.FileNotFound(filename) - else: - raise + f = _open_file(filename) blocksize = HASHING_BLOCKSIZE data = f.read(blocksize) size = 0 - checksum = hashobject() + checksum = self._hashobject() while data: checksum.update(data) size = size + len(data) @@ -49,9 +60,6 @@ def _generate_hash_function(hashtype, hashobject, origin="unknown"): f.close() return (checksum.hexdigest(), size) - hashfunc_map[hashtype] = pyhash - hashorigin_map[hashtype] = origin - return pyhash # Define hash functions, try to use the best module available. Later definitions # override earlier ones @@ -71,40 +79,72 @@ except ImportError: sha1hash = _generate_hash_function("SHA1", _new_sha1, origin="internal") +# Try to use mhash if available +# mhash causes GIL presently, so it gets less priority than hashlib and +# pycrypto. However, it might be the only accelerated implementation of +# WHIRLPOOL available. +try: + import mhash, functools + md5hash = _generate_hash_function("MD5", functools.partial(mhash.MHASH, mhash.MHASH_MD5), origin="mhash") + sha1hash = _generate_hash_function("SHA1", functools.partial(mhash.MHASH, mhash.MHASH_SHA1), origin="mhash") + sha256hash = _generate_hash_function("SHA256", functools.partial(mhash.MHASH, mhash.MHASH_SHA256), origin="mhash") + sha512hash = _generate_hash_function("SHA512", functools.partial(mhash.MHASH, mhash.MHASH_SHA512), origin="mhash") + for local_name, hash_name in (("rmd160", "ripemd160"), ("whirlpool", "whirlpool")): + if hasattr(mhash, 'MHASH_%s' % local_name.upper()): + globals()['%shash' % local_name] = \ + _generate_hash_function(local_name.upper(), \ + functools.partial(mhash.MHASH, getattr(mhash, 'MHASH_%s' % hash_name.upper())), \ + origin='mhash') +except ImportError: + pass + # Use pycrypto when available, prefer it over the internal fallbacks +# Check for 'new' attributes, since they can be missing if the module +# is broken somehow. try: from Crypto.Hash import SHA256, RIPEMD - sha256hash = _generate_hash_function("SHA256", SHA256.new, origin="pycrypto") - rmd160hash = _generate_hash_function("RMD160", RIPEMD.new, origin="pycrypto") -except ImportError as e: + sha256hash = getattr(SHA256, 'new', None) + if sha256hash is not None: + sha256hash = _generate_hash_function("SHA256", + sha256hash, origin="pycrypto") + rmd160hash = getattr(RIPEMD, 'new', None) + if rmd160hash is not None: + rmd160hash = _generate_hash_function("RMD160", + rmd160hash, origin="pycrypto") +except ImportError: pass # Use hashlib from python-2.5 if available and prefer it over pycrypto and internal fallbacks. -# Need special handling for RMD160 as it may not always be provided by hashlib. +# Need special handling for RMD160/WHIRLPOOL as they may not always be provided by hashlib. try: - import hashlib + import hashlib, functools md5hash = _generate_hash_function("MD5", hashlib.md5, origin="hashlib") sha1hash = _generate_hash_function("SHA1", hashlib.sha1, origin="hashlib") sha256hash = _generate_hash_function("SHA256", hashlib.sha256, origin="hashlib") - try: - hashlib.new('ripemd160') - except ValueError: - pass - else: - def rmd160(): - return hashlib.new('ripemd160') - rmd160hash = _generate_hash_function("RMD160", rmd160, origin="hashlib") -except ImportError as e: + sha512hash = _generate_hash_function("SHA512", hashlib.sha512, origin="hashlib") + for local_name, hash_name in (("rmd160", "ripemd160"), ("whirlpool", "whirlpool")): + try: + hashlib.new(hash_name) + except ValueError: + pass + else: + globals()['%shash' % local_name] = \ + _generate_hash_function(local_name.upper(), \ + functools.partial(hashlib.new, hash_name), \ + origin='hashlib') + +except ImportError: pass - + +if "WHIRLPOOL" not in hashfunc_map: + # Bundled WHIRLPOOL implementation + from portage.util.whirlpool import new as _new_whirlpool + whirlpoolhash = _generate_hash_function("WHIRLPOOL", _new_whirlpool, origin="bundled") # Use python-fchksum if available, prefer it over all other MD5 implementations try: - import fchksum - - def md5hash(filename): - return fchksum.fmd5t(filename) + from fchksum import fmd5t as md5hash hashfunc_map["MD5"] = md5hash hashorigin_map["MD5"] = "python-fchksum" @@ -127,6 +167,15 @@ if os.path.exists(PRELINK_BINARY): prelink_capable=1 del results +def is_prelinkable_elf(filename): + f = _open_file(filename) + try: + magic = f.read(17) + finally: + f.close() + return (len(magic) == 17 and magic.startswith(b'\x7fELF') and + magic[16] in (b'\x02', b'\x03')) # 2=ET_EXEC, 3=ET_DYN + def perform_md5(x, calc_prelink=0): return perform_checksum(x, "MD5", calc_prelink)[0] @@ -137,7 +186,7 @@ def _perform_md5_merge(x, **kwargs): def perform_all(x, calc_prelink=0): mydict = {} for k in hashfunc_map: - mydict[k] = perform_checksum(x, hashfunc_map[k], calc_prelink)[0] + mydict[k] = perform_checksum(x, k, calc_prelink)[0] return mydict def get_valid_checksum_keys(): @@ -234,7 +283,8 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0): myfilename = filename prelink_tmpfile = None try: - if calc_prelink and prelink_capable: + if (calc_prelink and prelink_capable and + is_prelinkable_elf(filename)): # Create non-prelinked temporary file to checksum. # Files rejected by prelink are summed in place. try: @@ -255,8 +305,10 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0): " hash function not available (needs dev-python/pycrypto)") myhash, mysize = hashfunc_map[hashname](myfilename) except (OSError, IOError) as e: - if e.errno == errno.ENOENT: + if e.errno in (errno.ENOENT, errno.ESTALE): raise portage.exception.FileNotFound(myfilename) + elif e.errno == portage.exception.PermissionDenied.errno: + raise portage.exception.PermissionDenied(myfilename) raise return myhash, mysize finally: diff --git a/portage_with_autodep/pym/portage/checksum.pyo b/portage_with_autodep/pym/portage/checksum.pyo Binary files differnew file mode 100644 index 0000000..00231af --- /dev/null +++ b/portage_with_autodep/pym/portage/checksum.pyo diff --git a/portage_with_autodep/pym/portage/const.py b/portage_with_autodep/pym/portage/const.py index 2a391db..614dcdb 100644 --- a/portage_with_autodep/pym/portage/const.py +++ b/portage_with_autodep/pym/portage/const.py @@ -67,8 +67,7 @@ FAKEROOT_BINARY = "/usr/bin/fakeroot" BASH_BINARY = "/bin/bash" MOVE_BINARY = "/bin/mv" PRELINK_BINARY = "/usr/sbin/prelink" -AUTODEP_LIBRARY = "/usr/lib/file_hook.so" -#AUTODEP_LIBRARY = "/home/bay/autodep/src/hook_lib/file_hook.so" +AUTODEP_LIBRARY = "/usr/lib/file_hook.so" INVALID_ENV_FILE = "/etc/spork/is/not/valid/profile.env" @@ -89,12 +88,12 @@ EBUILD_PHASES = ("pretend", "setup", "unpack", "prepare", "configure" "package", "preinst", "postinst","prerm", "postrm", "nofetch", "config", "info", "other") SUPPORTED_FEATURES = frozenset([ - "allow-missing-manifests", "assume-digests", "binpkg-logs", "buildpkg", "buildsyspkg", "candy", - "ccache", "chflags", "collision-protect", "compress-build-logs", - "depcheck", "depcheckstrict", + "ccache", "chflags", "clean-logs", + "collision-protect", "compress-build-logs", "compressdebug", + "config-protect-if-modified", "depcheck", "depcheckstrict", "digest", "distcc", "distcc-pump", "distlocks", "ebuild-locks", "fakeroot", - "fail-clean", "fixpackages", "force-mirror", "getbinpkg", + "fail-clean", "force-mirror", "force-prefix", "getbinpkg", "installsources", "keeptemp", "keepwork", "fixlafiles", "lmirror", "metadata-transfer", "mirror", "multilib-strict", "news", "noauto", "noclean", "nodoc", "noinfo", "noman", @@ -107,18 +106,57 @@ SUPPORTED_FEATURES = frozenset([ "strict", "stricter", "suidctl", "test", "test-fail-continue", "unknown-features-filter", "unknown-features-warn", "unmerge-logs", "unmerge-orphans", "userfetch", "userpriv", - "usersandbox", "usersync", "webrsync-gpg"]) + "usersandbox", "usersync", "webrsync-gpg", "xattr"]) EAPI = 4 HASHING_BLOCKSIZE = 32768 MANIFEST1_HASH_FUNCTIONS = ("MD5", "SHA256", "RMD160") -MANIFEST2_HASH_FUNCTIONS = ("SHA1", "SHA256", "RMD160") - MANIFEST1_REQUIRED_HASH = "MD5" -MANIFEST2_REQUIRED_HASH = "SHA1" + +# Future events: +# +# After WHIRLPOOL is supported in stable portage: +# - Add SHA256 and WHIRLPOOL to MANIFEST2_HASH_DEFAULTS. +# - Remove SHA1 and RMD160 from MANIFEST2_HASH_*. +# - Set manifest-hashes in gentoo-x86/metadata/layout.conf as follows: +# manifest-hashes = SHA256 SHA512 WHIRLPOOL +# +# After WHIRLPOOL is supported in stable portage for at least 1 year: +# - Change MANIFEST2_REQUIRED_HASH to WHIRLPOOL. +# - Remove SHA256 from MANIFEST2_HASH_*. +# - Set manifest-hashes in gentoo-x86/metadata/layout.conf as follows: +# manifest-hashes = SHA512 WHIRLPOOL +# +# After SHA-3 is approved: +# - Add new hashes to MANIFEST2_HASH_*. +# +# After SHA-3 is supported in stable portage: +# - Set manifest-hashes in gentoo-x86/metadata/layout.conf as follows: +# manifest-hashes = SHA3 SHA512 WHIRLPOOL +# +# After layout.conf settings correspond to defaults in stable portage: +# - Remove redundant settings from gentoo-x86/metadata/layout.conf. + +MANIFEST2_HASH_FUNCTIONS = ("RMD160", "SHA1", "SHA256", "SHA512", "WHIRLPOOL") +MANIFEST2_HASH_DEFAULTS = frozenset(["SHA1", "SHA256", "RMD160"]) +MANIFEST2_REQUIRED_HASH = "SHA256" MANIFEST2_IDENTIFIERS = ("AUX", "MISC", "DIST", "EBUILD") + +# The EPREFIX for the current install is hardcoded here, but access to this +# constant should be minimal, in favor of access via the EPREFIX setting of +# a config instance (since it's possible to contruct a config instance with +# a different EPREFIX). Therefore, the EPREFIX constant should *NOT* be used +# in the definition of any other constants within this file. +EPREFIX="" + +# pick up EPREFIX from the environment if set +if "PORTAGE_OVERRIDE_EPREFIX" in os.environ: + EPREFIX = os.environ["PORTAGE_OVERRIDE_EPREFIX"] + if EPREFIX: + EPREFIX = os.path.normpath(EPREFIX) + # =========================================================================== # END OF CONSTANTS -- END OF CONSTANTS -- END OF CONSTANTS -- END OF CONSTANT # =========================================================================== @@ -129,7 +167,6 @@ _ENABLE_DYN_LINK_MAP = True _ENABLE_PRESERVE_LIBS = True _ENABLE_REPO_NAME_WARN = True _ENABLE_SET_CONFIG = True -_SANDBOX_COMPAT_LEVEL = "22" # The definitions above will differ between branches, so it's useful to have diff --git a/portage_with_autodep/pym/portage/const.py.rej b/portage_with_autodep/pym/portage/const.py.rej new file mode 100644 index 0000000..9fe70f8 --- /dev/null +++ b/portage_with_autodep/pym/portage/const.py.rej @@ -0,0 +1,12 @@ +--- pym/portage/const.py ++++ pym/portage/const.py +@@ -90,7 +92,8 @@ + SUPPORTED_FEATURES = frozenset([ + "allow-missing-manifests", + "assume-digests", "binpkg-logs", "buildpkg", "buildsyspkg", "candy", +- "ccache", "chflags", "collision-protect", "compress-build-logs", ++ "ccache", "chflags", "collision-protect", "compress-build-logs", ++ "depcheck", "depcheckstrict", + "digest", "distcc", "distcc-pump", "distlocks", "ebuild-locks", "fakeroot", + "fail-clean", "fixpackages", "force-mirror", "getbinpkg", + "installsources", "keeptemp", "keepwork", "fixlafiles", "lmirror", diff --git a/portage_with_autodep/pym/portage/const.pyo b/portage_with_autodep/pym/portage/const.pyo Binary files differnew file mode 100644 index 0000000..804420f --- /dev/null +++ b/portage_with_autodep/pym/portage/const.pyo diff --git a/portage_with_autodep/pym/portage/cvstree.py b/portage_with_autodep/pym/portage/cvstree.py index 9ba22f3..3680ae4 100644 --- a/portage_with_autodep/pym/portage/cvstree.py +++ b/portage_with_autodep/pym/portage/cvstree.py @@ -248,11 +248,13 @@ def getentries(mydir,recursive=0): if entries["files"][mysplit[1]]["revision"][0]=="-": entries["files"][mysplit[1]]["status"]+=["removed"] - for file in apply_cvsignore_filter(os.listdir(mydir)): + for file in os.listdir(mydir): if file=="CVS": continue if os.path.isdir(mydir+"/"+file): if file not in entries["dirs"]: + if ignore_list.match(file) is not None: + continue entries["dirs"][file]={"dirs":{},"files":{}} # It's normal for a directory to be unlisted in Entries # when checked out without -P (see bug #257660). @@ -266,6 +268,8 @@ def getentries(mydir,recursive=0): entries["dirs"][file]["status"]=["exists"] elif os.path.isfile(mydir+"/"+file): if file not in entries["files"]: + if ignore_list.match(file) is not None: + continue entries["files"][file]={"revision":"","date":"","flags":"","tags":""} if "status" in entries["files"][file]: if "exists" not in entries["files"][file]["status"]: @@ -285,7 +289,9 @@ def getentries(mydir,recursive=0): print("failed to stat",file) print(e) return - + + elif ignore_list.match(file) is not None: + pass else: print() print("File of unknown type:",mydir+"/"+file) diff --git a/portage_with_autodep/pym/portage/cvstree.pyo b/portage_with_autodep/pym/portage/cvstree.pyo Binary files differnew file mode 100644 index 0000000..4719daf --- /dev/null +++ b/portage_with_autodep/pym/portage/cvstree.pyo diff --git a/portage_with_autodep/pym/portage/data.py b/portage_with_autodep/pym/portage/data.py index c38fa17..c4d967a 100644 --- a/portage_with_autodep/pym/portage/data.py +++ b/portage_with_autodep/pym/portage/data.py @@ -1,5 +1,5 @@ # data.py -- Calculated/Discovered Data Values -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import os, pwd, grp, platform @@ -58,65 +58,165 @@ def portage_group_warning(): # If the "wheel" group does not exist then wheelgid falls back to 0. # If the "portage" group does not exist then portage_uid falls back to wheelgid. -secpass=0 - uid=os.getuid() wheelgid=0 -if uid==0: - secpass=2 try: wheelgid=grp.getgrnam("wheel")[2] except KeyError: pass -# Allow the overriding of the user used for 'userpriv' and 'userfetch' -_portage_uname = os.environ.get('PORTAGE_USERNAME', 'portage') -_portage_grpname = os.environ.get('PORTAGE_GRPNAME', 'portage') +# The portage_uid and portage_gid global constants, and others that +# depend on them are initialized lazily, in order to allow configuration +# via make.conf. Eventually, these constants may be deprecated in favor +# of config attributes, since it's conceivable that multiple +# configurations with different constants could be used simultaneously. +_initialized_globals = set() -#Discover the uid and gid of the portage user/group -try: - portage_uid = pwd.getpwnam(_portage_uname)[2] - portage_gid = grp.getgrnam(_portage_grpname)[2] - if secpass < 1 and portage_gid in os.getgroups(): - secpass=1 -except KeyError: - portage_uid=0 - portage_gid=0 - userpriv_groups = [portage_gid] - writemsg(colorize("BAD", - _("portage: 'portage' user or group missing.")) + "\n", noiselevel=-1) - writemsg(_( - " For the defaults, line 1 goes into passwd, " - "and 2 into group.\n"), noiselevel=-1) - writemsg(colorize("GOOD", - " portage:x:250:250:portage:/var/tmp/portage:/bin/false") \ - + "\n", noiselevel=-1) - writemsg(colorize("GOOD", " portage::250:portage") + "\n", - noiselevel=-1) - portage_group_warning() -else: - userpriv_groups = [portage_gid] - if secpass >= 2: - class _LazyUserprivGroups(portage.proxy.objectproxy.ObjectProxy): - def _get_target(self): - global userpriv_groups - if userpriv_groups is not self: - return userpriv_groups - userpriv_groups = _userpriv_groups - # Get a list of group IDs for the portage user. Do not use - # grp.getgrall() since it is known to trigger spurious - # SIGPIPE problems with nss_ldap. - mystatus, myoutput = \ - portage.subprocess_getstatusoutput("id -G %s" % _portage_uname) - if mystatus == os.EX_OK: - for x in myoutput.split(): - try: - userpriv_groups.append(int(x)) - except ValueError: - pass - userpriv_groups[:] = sorted(set(userpriv_groups)) - return userpriv_groups - - _userpriv_groups = userpriv_groups - userpriv_groups = _LazyUserprivGroups() +def _get_global(k): + if k in _initialized_globals: + return globals()[k] + + if k in ('portage_gid', 'portage_uid', 'secpass'): + global portage_gid, portage_uid, secpass + secpass = 0 + if uid == 0: + secpass = 2 + elif portage.const.EPREFIX: + secpass = 2 + #Discover the uid and gid of the portage user/group + try: + portage_uid = pwd.getpwnam(_get_global('_portage_username')).pw_uid + _portage_grpname = _get_global('_portage_grpname') + if platform.python_implementation() == 'PyPy': + # Somehow this prevents "TypeError: expected string" errors + # from grp.getgrnam() with PyPy 1.7 + _portage_grpname = str(_portage_grpname) + portage_gid = grp.getgrnam(_portage_grpname).gr_gid + if secpass < 1 and portage_gid in os.getgroups(): + secpass = 1 + except KeyError: + portage_uid = 0 + portage_gid = 0 + writemsg(colorize("BAD", + _("portage: 'portage' user or group missing.")) + "\n", noiselevel=-1) + writemsg(_( + " For the defaults, line 1 goes into passwd, " + "and 2 into group.\n"), noiselevel=-1) + writemsg(colorize("GOOD", + " portage:x:250:250:portage:/var/tmp/portage:/bin/false") \ + + "\n", noiselevel=-1) + writemsg(colorize("GOOD", " portage::250:portage") + "\n", + noiselevel=-1) + portage_group_warning() + + _initialized_globals.add('portage_gid') + _initialized_globals.add('portage_uid') + _initialized_globals.add('secpass') + + if k == 'portage_gid': + return portage_gid + elif k == 'portage_uid': + return portage_uid + elif k == 'secpass': + return secpass + else: + raise AssertionError('unknown name: %s' % k) + + elif k == 'userpriv_groups': + v = [portage_gid] + if secpass >= 2: + # Get a list of group IDs for the portage user. Do not use + # grp.getgrall() since it is known to trigger spurious + # SIGPIPE problems with nss_ldap. + mystatus, myoutput = \ + portage.subprocess_getstatusoutput("id -G %s" % _portage_username) + if mystatus == os.EX_OK: + for x in myoutput.split(): + try: + v.append(int(x)) + except ValueError: + pass + v = sorted(set(v)) + + # Avoid instantiating portage.settings when the desired + # variable is set in os.environ. + elif k in ('_portage_grpname', '_portage_username'): + v = None + if k == '_portage_grpname': + env_key = 'PORTAGE_GRPNAME' + else: + env_key = 'PORTAGE_USERNAME' + + if env_key in os.environ: + v = os.environ[env_key] + elif hasattr(portage, 'settings'): + v = portage.settings.get(env_key) + elif portage.const.EPREFIX: + # For prefix environments, default to the UID and GID of + # the top-level EROOT directory. The config class has + # equivalent code, but we also need to do it here if + # _disable_legacy_globals() has been called. + eroot = os.path.join(os.environ.get('ROOT', os.sep), + portage.const.EPREFIX.lstrip(os.sep)) + try: + eroot_st = os.stat(eroot) + except OSError: + pass + else: + if k == '_portage_grpname': + try: + grp_struct = grp.getgrgid(eroot_st.st_gid) + except KeyError: + pass + else: + v = grp_struct.gr_name + else: + try: + pwd_struct = pwd.getpwuid(eroot_st.st_uid) + except KeyError: + pass + else: + v = pwd_struct.pw_name + + if v is None: + v = 'portage' + else: + raise AssertionError('unknown name: %s' % k) + + globals()[k] = v + _initialized_globals.add(k) + return v + +class _GlobalProxy(portage.proxy.objectproxy.ObjectProxy): + + __slots__ = ('_name',) + + def __init__(self, name): + portage.proxy.objectproxy.ObjectProxy.__init__(self) + object.__setattr__(self, '_name', name) + + def _get_target(self): + return _get_global(object.__getattribute__(self, '_name')) + +for k in ('portage_gid', 'portage_uid', 'secpass', 'userpriv_groups', + '_portage_grpname', '_portage_username'): + globals()[k] = _GlobalProxy(k) +del k + +def _init(settings): + """ + Use config variables like PORTAGE_GRPNAME and PORTAGE_USERNAME to + initialize global variables. This allows settings to come from make.conf + instead of requiring them to be set in the calling environment. + """ + if '_portage_grpname' not in _initialized_globals and \ + '_portage_username' not in _initialized_globals: + + v = settings.get('PORTAGE_GRPNAME', 'portage') + globals()['_portage_grpname'] = v + _initialized_globals.add('_portage_grpname') + + v = settings.get('PORTAGE_USERNAME', 'portage') + globals()['_portage_username'] = v + _initialized_globals.add('_portage_username') diff --git a/portage_with_autodep/pym/portage/data.pyo b/portage_with_autodep/pym/portage/data.pyo Binary files differnew file mode 100644 index 0000000..7f749e0 --- /dev/null +++ b/portage_with_autodep/pym/portage/data.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/_MergeProcess.py b/portage_with_autodep/pym/portage/dbapi/_MergeProcess.py index 34ed031..b5f6a0b 100644 --- a/portage_with_autodep/pym/portage/dbapi/_MergeProcess.py +++ b/portage_with_autodep/pym/portage/dbapi/_MergeProcess.py @@ -1,22 +1,16 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import io -import shutil import signal -import tempfile +import sys import traceback import errno import fcntl import portage from portage import os, _unicode_decode -from portage.const import PORTAGE_PACKAGE_ATOM -from portage.dep import match_from_list import portage.elog.messages -from portage.elog import _preload_elog_modules -from portage.util import ensure_dirs -from _emerge.PollConstants import PollConstants from _emerge.SpawnProcess import SpawnProcess class MergeProcess(SpawnProcess): @@ -26,7 +20,7 @@ class MergeProcess(SpawnProcess): """ __slots__ = ('mycat', 'mypkg', 'settings', 'treetype', - 'vartree', 'scheduler', 'blockers', 'pkgloc', 'infloc', 'myebuild', + 'vartree', 'blockers', 'pkgloc', 'infloc', 'myebuild', 'mydbapi', 'prev_mtimes', 'unmerge', '_elog_reader_fd', '_elog_reg_id', '_buf', '_elog_keys', '_locked_vdb') @@ -46,8 +40,12 @@ class MergeProcess(SpawnProcess): settings.reset() settings.setcpv(cpv, mydb=self.mydbapi) - if not self.unmerge: - self._handle_self_reinstall() + # Inherit stdin by default, so that the pdb SIGUSR1 + # handler is usable for the subprocess. + if self.fd_pipes is None: + self.fd_pipes = {} + self.fd_pipes.setdefault(0, sys.stdin.fileno()) + super(MergeProcess, self)._start() def _lock_vdb(self): @@ -69,59 +67,9 @@ class MergeProcess(SpawnProcess): self.vartree.dbapi.unlock() self._locked_vdb = False - def _handle_self_reinstall(self): - """ - If portage is reinstalling itself, create temporary - copies of PORTAGE_BIN_PATH and PORTAGE_PYM_PATH in order - to avoid relying on the new versions which may be - incompatible. Register an atexit hook to clean up the - temporary directories. Pre-load elog modules here since - we won't be able to later if they get unmerged (happens - when namespace changes). - """ - - settings = self.settings - cpv = settings.mycpv - reinstall_self = False - if self.settings["ROOT"] == "/" and \ - match_from_list(PORTAGE_PACKAGE_ATOM, [cpv]): - inherited = frozenset(self.settings.get('INHERITED', '').split()) - if not self.vartree.dbapi.cpv_exists(cpv) or \ - '9999' in cpv or \ - 'git' in inherited or \ - 'git-2' in inherited: - reinstall_self = True - - if reinstall_self: - # Load lazily referenced portage submodules into memory, - # so imports won't fail during portage upgrade/downgrade. - _preload_elog_modules(self.settings) - portage.proxy.lazyimport._preload_portage_submodules() - - # Make the temp directory inside $PORTAGE_TMPDIR/portage, since - # it's common for /tmp and /var/tmp to be mounted with the - # "noexec" option (see bug #346899). - build_prefix = os.path.join(settings["PORTAGE_TMPDIR"], "portage") - ensure_dirs(build_prefix) - base_path_tmp = tempfile.mkdtemp( - "", "._portage_reinstall_.", build_prefix) - portage.process.atexit_register(shutil.rmtree, base_path_tmp) - dir_perms = 0o755 - for subdir in "bin", "pym": - var_name = "PORTAGE_%s_PATH" % subdir.upper() - var_orig = settings[var_name] - var_new = os.path.join(base_path_tmp, subdir) - settings[var_name] = var_new - settings.backup_changes(var_name) - shutil.copytree(var_orig, var_new, symlinks=True) - os.chmod(var_new, dir_perms) - portage._bin_path = settings['PORTAGE_BIN_PATH'] - portage._pym_path = settings['PORTAGE_PYM_PATH'] - os.chmod(base_path_tmp, dir_perms) - def _elog_output_handler(self, fd, event): output = None - if event & PollConstants.POLLIN: + if event & self.scheduler.IO_IN: try: output = os.read(fd, self._bufsize) except OSError as e: @@ -141,6 +89,15 @@ class MergeProcess(SpawnProcess): reporter = getattr(portage.elog.messages, funcname) reporter(msg, phase=phase, key=key, out=out) + if event & self.scheduler.IO_HUP: + self.scheduler.unregister(self._elog_reg_id) + self._elog_reg_id = None + os.close(self._elog_reader_fd) + self._elog_reader_fd = None + return False + + return True + def _spawn(self, args, fd_pipes, **kwargs): """ Fork a subprocess, apply local settings, and call @@ -178,6 +135,10 @@ class MergeProcess(SpawnProcess): pid = os.fork() if pid != 0: + if not isinstance(pid, int): + raise AssertionError( + "fork returned non-integer: %s" % (repr(pid),)) + os.close(elog_writer_fd) self._elog_reader_fd = elog_reader_fd self._buf = "" @@ -193,7 +154,9 @@ class MergeProcess(SpawnProcess): return [pid] os.close(elog_reader_fd) - portage.process._setup_pipes(fd_pipes) + portage.locks._close_fds() + # Disable close_fds since we don't exec (see _setup_pipes docstring). + portage.process._setup_pipes(fd_pipes, close_fds=False) # Use default signal handlers since the ones inherited # from the parent process are irrelevant here. @@ -270,7 +233,7 @@ class MergeProcess(SpawnProcess): if self._elog_reg_id is not None: self.scheduler.unregister(self._elog_reg_id) self._elog_reg_id = None - if self._elog_reader_fd: + if self._elog_reader_fd is not None: os.close(self._elog_reader_fd) self._elog_reader_fd = None if self._elog_keys is not None: diff --git a/portage_with_autodep/pym/portage/dbapi/_MergeProcess.pyo b/portage_with_autodep/pym/portage/dbapi/_MergeProcess.pyo Binary files differnew file mode 100644 index 0000000..5839ad8 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/_MergeProcess.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/_SyncfsProcess.py b/portage_with_autodep/pym/portage/dbapi/_SyncfsProcess.py new file mode 100644 index 0000000..7518214 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/_SyncfsProcess.py @@ -0,0 +1,53 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os +from portage.util._ctypes import find_library, LoadLibrary +from portage.util._async.ForkProcess import ForkProcess + +class SyncfsProcess(ForkProcess): + """ + Isolate ctypes usage in a subprocess, in order to avoid + potential problems with stale cached libraries as + described in bug #448858, comment #14 (also see + http://bugs.python.org/issue14597). + """ + + __slots__ = ('paths',) + + @staticmethod + def _get_syncfs(): + + filename = find_library("c") + if filename is not None: + library = LoadLibrary(filename) + if library is not None: + try: + return library.syncfs + except AttributeError: + pass + + return None + + def _run(self): + + syncfs_failed = False + syncfs = self._get_syncfs() + + if syncfs is not None: + for path in self.paths: + try: + fd = os.open(path, os.O_RDONLY) + except OSError: + pass + else: + try: + if syncfs(fd) != 0: + # Happens with PyPy (bug #446610) + syncfs_failed = True + finally: + os.close(fd) + + if syncfs is None or syncfs_failed: + return 1 + return os.EX_OK diff --git a/portage_with_autodep/pym/portage/dbapi/__init__.py b/portage_with_autodep/pym/portage/dbapi/__init__.py index e386faa..a1c5c56 100644 --- a/portage_with_autodep/pym/portage/dbapi/__init__.py +++ b/portage_with_autodep/pym/portage/dbapi/__init__.py @@ -1,4 +1,4 @@ -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ["dbapi"] @@ -11,7 +11,7 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.dep:match_from_list', 'portage.output:colorize', 'portage.util:cmp_sort_key,writemsg', - 'portage.versions:catsplit,catpkgsplit,vercmp', + 'portage.versions:catsplit,catpkgsplit,vercmp,_pkg_str', ) from portage import os @@ -46,7 +46,12 @@ class dbapi(object): def cp_list(self, cp, use_cache=1): raise NotImplementedError(self) - def _cpv_sort_ascending(self, cpv_list): + @staticmethod + def _cmp_cpv(cpv1, cpv2): + return vercmp(cpv1.version, cpv2.version) + + @staticmethod + def _cpv_sort_ascending(cpv_list): """ Use this to sort self.cp_list() results in ascending order. It sorts in place and returns None. @@ -55,12 +60,7 @@ class dbapi(object): # If the cpv includes explicit -r0, it has to be preserved # for consistency in findname and aux_get calls, so use a # dict to map strings back to their original values. - ver_map = {} - for cpv in cpv_list: - ver_map[cpv] = '-'.join(catpkgsplit(cpv)[2:]) - def cmp_cpv(cpv1, cpv2): - return vercmp(ver_map[cpv1], ver_map[cpv2]) - cpv_list.sort(key=cmp_sort_key(cmp_cpv)) + cpv_list.sort(key=cmp_sort_key(dbapi._cmp_cpv)) def cpv_all(self): """Return all CPVs in the db @@ -155,64 +155,74 @@ class dbapi(object): 2) Check enabled/disabled flag states. """ - iuse_implicit_match = self.settings._iuse_implicit_match + aux_keys = ["IUSE", "SLOT", "USE", "repository"] for cpv in cpv_iter: try: - iuse, slot, use = self.aux_get(cpv, ["IUSE", "SLOT", "USE"], myrepo=atom.repo) + metadata = dict(zip(aux_keys, + self.aux_get(cpv, aux_keys, myrepo=atom.repo))) except KeyError: continue - iuse = frozenset(x.lstrip('+-') for x in iuse.split()) - missing_iuse = False - for x in atom.unevaluated_atom.use.required: - if x not in iuse and not iuse_implicit_match(x): - missing_iuse = True - break - if missing_iuse: + + if not self._match_use(atom, cpv, metadata): continue - if not atom.use: - pass - elif not self._use_mutable: - # Use IUSE to validate USE settings for built packages, - # in case the package manager that built this package - # failed to do that for some reason (or in case of - # data corruption). - use = frozenset(x for x in use.split() if x in iuse or \ - iuse_implicit_match(x)) - missing_enabled = atom.use.missing_enabled.difference(iuse) - missing_disabled = atom.use.missing_disabled.difference(iuse) - - if atom.use.enabled: - if atom.use.enabled.intersection(missing_disabled): - continue - need_enabled = atom.use.enabled.difference(use) + + yield cpv + + def _match_use(self, atom, cpv, metadata): + iuse_implicit_match = self.settings._iuse_implicit_match + iuse = frozenset(x.lstrip('+-') for x in metadata["IUSE"].split()) + + for x in atom.unevaluated_atom.use.required: + if x not in iuse and not iuse_implicit_match(x): + return False + + if atom.use is None: + pass + + elif not self._use_mutable: + # Use IUSE to validate USE settings for built packages, + # in case the package manager that built this package + # failed to do that for some reason (or in case of + # data corruption). + use = frozenset(x for x in metadata["USE"].split() + if x in iuse or iuse_implicit_match(x)) + missing_enabled = atom.use.missing_enabled.difference(iuse) + missing_disabled = atom.use.missing_disabled.difference(iuse) + + if atom.use.enabled: + if atom.use.enabled.intersection(missing_disabled): + return False + need_enabled = atom.use.enabled.difference(use) + if need_enabled: + need_enabled = need_enabled.difference(missing_enabled) if need_enabled: - need_enabled = need_enabled.difference(missing_enabled) - if need_enabled: - continue + return False - if atom.use.disabled: - if atom.use.disabled.intersection(missing_enabled): - continue - need_disabled = atom.use.disabled.intersection(use) + if atom.use.disabled: + if atom.use.disabled.intersection(missing_enabled): + return False + need_disabled = atom.use.disabled.intersection(use) + if need_disabled: + need_disabled = need_disabled.difference(missing_disabled) if need_disabled: - need_disabled = need_disabled.difference(missing_disabled) - if need_disabled: - continue - else: - # Check masked and forced flags for repoman. - mysettings = getattr(self, 'settings', None) - if mysettings is not None and not mysettings.local_config: + return False - pkg = "%s:%s" % (cpv, slot) - usemask = mysettings._getUseMask(pkg) - if usemask.intersection(atom.use.enabled): - continue + elif not self.settings.local_config: + # Check masked and forced flags for repoman. + if hasattr(cpv, 'slot'): + pkg = cpv + else: + pkg = _pkg_str(cpv, slot=metadata["SLOT"], + repo=metadata.get("repository")) + usemask = self.settings._getUseMask(pkg) + if usemask.intersection(atom.use.enabled): + return False - useforce = mysettings._getUseForce(pkg).difference(usemask) - if useforce.intersection(atom.use.disabled): - continue + useforce = self.settings._getUseForce(pkg).difference(usemask) + if useforce.intersection(atom.use.disabled): + return False - yield cpv + return True def invalidentry(self, mypath): if '/-MERGING-' in mypath: diff --git a/portage_with_autodep/pym/portage/dbapi/__init__.pyo b/portage_with_autodep/pym/portage/dbapi/__init__.pyo Binary files differnew file mode 100644 index 0000000..e7b494d --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/__init__.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.py b/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.py index 6d6a27d..d379b4c 100644 --- a/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.py +++ b/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.py @@ -63,7 +63,8 @@ def expand_new_virt(vardb, atom): success, atoms = portage.dep_check(rdepend, None, vardb.settings, myuse=valid_use, - myroot=vardb.root, trees={vardb.root:{"porttree":vardb.vartree, + myroot=vardb.settings['EROOT'], + trees={vardb.settings['EROOT']:{"porttree":vardb.vartree, "vartree":vardb.vartree}}) if success: diff --git a/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.pyo b/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.pyo Binary files differnew file mode 100644 index 0000000..6c23a7e --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/_expand_new_virt.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/_similar_name_search.py b/portage_with_autodep/pym/portage/dbapi/_similar_name_search.py new file mode 100644 index 0000000..b6e4a1f --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/_similar_name_search.py @@ -0,0 +1,57 @@ +# Copyright 2011-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import difflib + +from portage.versions import catsplit + +def similar_name_search(dbs, atom): + + cp_lower = atom.cp.lower() + cat, pkg = catsplit(cp_lower) + if cat == "null": + cat = None + + all_cp = set() + for db in dbs: + all_cp.update(db.cp_all()) + + # discard dir containing no ebuilds + all_cp.discard(atom.cp) + + orig_cp_map = {} + for cp_orig in all_cp: + orig_cp_map.setdefault(cp_orig.lower(), []).append(cp_orig) + all_cp = set(orig_cp_map) + + if cat: + matches = difflib.get_close_matches(cp_lower, all_cp) + else: + pkg_to_cp = {} + for other_cp in list(all_cp): + other_pkg = catsplit(other_cp)[1] + if other_pkg == pkg: + # Check for non-identical package that + # differs only by upper/lower case. + identical = True + for cp_orig in orig_cp_map[other_cp]: + if catsplit(cp_orig)[1] != \ + catsplit(atom.cp)[1]: + identical = False + break + if identical: + # discard dir containing no ebuilds + all_cp.discard(other_cp) + continue + pkg_to_cp.setdefault(other_pkg, set()).add(other_cp) + + pkg_matches = difflib.get_close_matches(pkg, pkg_to_cp) + matches = [] + for pkg_match in pkg_matches: + matches.extend(pkg_to_cp[pkg_match]) + + matches_orig_case = [] + for cp in matches: + matches_orig_case.extend(orig_cp_map[cp]) + + return matches_orig_case diff --git a/portage_with_autodep/pym/portage/dbapi/bintree.py b/portage_with_autodep/pym/portage/dbapi/bintree.py index 62fc623..a8027ee 100644 --- a/portage_with_autodep/pym/portage/dbapi/bintree.py +++ b/portage_with_autodep/pym/portage/dbapi/bintree.py @@ -1,4 +1,4 @@ -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ["bindbapi", "binarytree"] @@ -11,19 +11,20 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.output:EOutput,colorize', 'portage.locks:lockfile,unlockfile', 'portage.package.ebuild.doebuild:_vdb_use_conditional_atoms', - 'portage.package.ebuild.fetch:_check_distfile', + 'portage.package.ebuild.fetch:_check_distfile,_hide_url_passwd', 'portage.update:update_dbentries', 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \ 'writemsg,writemsg_stdout', 'portage.util.listdir:listdir', - 'portage.versions:best,catpkgsplit,catsplit', + 'portage.util._urlopen:urlopen@_urlopen', + 'portage.versions:best,catpkgsplit,catsplit,_pkg_str', ) from portage.cache.mappings import slot_dict_class from portage.const import CACHE_PATH from portage.dbapi.virtual import fakedbapi from portage.dep import Atom, use_reduce, paren_enclose -from portage.exception import AlarmSignal, InvalidPackageName, \ +from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \ PermissionDenied, PortageException from portage.localization import _ from portage import _movefile @@ -35,19 +36,17 @@ from portage import _unicode_encode import codecs import errno import io -import re import stat import subprocess import sys import tempfile import textwrap +import warnings from itertools import chain try: from urllib.parse import urlparse - from urllib.request import urlopen as urllib_request_urlopen except ImportError: from urlparse import urlparse - from urllib import urlopen as urllib_request_urlopen if sys.hexversion >= 0x3000000: basestring = str @@ -67,7 +66,7 @@ class bindbapi(fakedbapi): ["BUILD_TIME", "CHOST", "DEPEND", "EAPI", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES", - "REQUIRED_USE"]) + ]) self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys) self._aux_cache = {} @@ -177,6 +176,34 @@ class bindbapi(fakedbapi): self.bintree.populate() return fakedbapi.cpv_all(self) + def getfetchsizes(self, pkg): + """ + This will raise MissingSignature if SIZE signature is not available, + or InvalidSignature if SIZE signature is invalid. + """ + + if not self.bintree.populated: + self.bintree.populate() + + pkg = getattr(pkg, 'cpv', pkg) + + filesdict = {} + if not self.bintree.isremote(pkg): + pass + else: + metadata = self.bintree._remotepkgs[pkg] + try: + size = int(metadata["SIZE"]) + except KeyError: + raise portage.exception.MissingSignature("SIZE") + except ValueError: + raise portage.exception.InvalidSignature( + "SIZE: %s" % metadata["SIZE"]) + else: + filesdict[os.path.basename(self.bintree.getname(pkg))] = size + + return filesdict + def _pkgindex_cpv_map_latest_build(pkgindex): """ Given a PackageIndex instance, create a dict of cpv -> metadata map. @@ -185,13 +212,20 @@ def _pkgindex_cpv_map_latest_build(pkgindex): @param pkgindex: A PackageIndex instance. @type pkgindex: PackageIndex @rtype: dict - @returns: a dict containing entry for the give cpv. + @return: a dict containing entry for the give cpv. """ cpv_map = {} for d in pkgindex.packages: cpv = d["CPV"] + try: + cpv = _pkg_str(cpv) + except InvalidData: + writemsg(_("!!! Invalid remote binary package: %s\n") % cpv, + noiselevel=-1) + continue + btime = d.get('BUILD_TIME', '') try: btime = int(btime) @@ -208,16 +242,35 @@ def _pkgindex_cpv_map_latest_build(pkgindex): if other_btime and (not btime or other_btime > btime): continue - cpv_map[cpv] = d + cpv_map[_pkg_str(cpv)] = d return cpv_map class binarytree(object): "this tree scans for a list of all packages available in PKGDIR" - def __init__(self, root, pkgdir, virtual=None, settings=None): + def __init__(self, _unused=None, pkgdir=None, + virtual=DeprecationWarning, settings=None): + + if pkgdir is None: + raise TypeError("pkgdir parameter is required") + + if settings is None: + raise TypeError("settings parameter is required") + + if _unused is not None and _unused != settings['ROOT']: + warnings.warn("The root parameter of the " + "portage.dbapi.bintree.binarytree" + " constructor is now unused. Use " + "settings['ROOT'] instead.", + DeprecationWarning, stacklevel=2) + + if virtual is not DeprecationWarning: + warnings.warn("The 'virtual' parameter of the " + "portage.dbapi.bintree.binarytree" + " constructor is unused", + DeprecationWarning, stacklevel=2) + if True: - self.root = root - #self.pkgdir=settings["PKGDIR"] self.pkgdir = normalize_path(pkgdir) self.dbapi = bindbapi(self, settings=settings) self.update_ents = self.dbapi.update_ents @@ -242,7 +295,7 @@ class binarytree(object): ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES", - "REQUIRED_USE", "BASE_URI"] + "BASE_URI"] self._pkgindex_aux_keys = list(self._pkgindex_aux_keys) self._pkgindex_use_evaluated_keys = \ ("LICENSE", "RDEPEND", "DEPEND", @@ -268,7 +321,6 @@ class binarytree(object): "SLOT" : "0", "USE" : "", "DEFINED_PHASES" : "", - "REQUIRED_USE" : "" } self._pkgindex_inherited_keys = ["CHOST", "repository"] @@ -302,6 +354,15 @@ class binarytree(object): chain(*self._pkgindex_translated_keys) )) + @property + def root(self): + warnings.warn("The root attribute of " + "portage.dbapi.bintree.binarytree" + " is deprecated. Use " + "settings['ROOT'] instead.", + DeprecationWarning, stacklevel=3) + return self.settings['ROOT'] + def move_ent(self, mylist, repo_match=None): if not self.populated: self.populate() @@ -603,6 +664,7 @@ class binarytree(object): if mycpv in pkg_paths: # discard duplicates (All/ is preferred) continue + mycpv = _pkg_str(mycpv) pkg_paths[mycpv] = mypath # update the path if the package has been moved oldpath = d.get("PATH") @@ -678,6 +740,7 @@ class binarytree(object): (mycpv, self.settings["PORTAGE_CONFIGROOT"]), noiselevel=-1) continue + mycpv = _pkg_str(mycpv) pkg_paths[mycpv] = mypath self.dbapi.cpv_inject(mycpv) update_pkgindex = True @@ -787,7 +850,7 @@ class binarytree(object): # slash, so join manually... url = base_url.rstrip("/") + "/Packages" try: - f = urllib_request_urlopen(url) + f = _urlopen(url) except IOError: path = parsed_url.path.rstrip("/") + "/Packages" @@ -859,7 +922,7 @@ class binarytree(object): noiselevel=-1) except EnvironmentError as e: writemsg(_("\n\n!!! Error fetching binhost package" \ - " info from '%s'\n") % base_url) + " info from '%s'\n") % _hide_url_passwd(base_url)) writemsg("!!! %s\n\n" % str(e)) del e pkgindex = None @@ -935,7 +998,7 @@ class binarytree(object): writemsg_stdout("\n") writemsg_stdout( colorize("GOOD", _("Fetching bininfo from ")) + \ - re.sub(r'//(.+):.+@(.+)/', r'//\1:*password*@\2/', base_url) + "\n") + _hide_url_passwd(base_url) + "\n") remotepkgs = portage.getbinpkg.dir_get_metadata( base_url, chunk_size=chunk_size) @@ -947,7 +1010,12 @@ class binarytree(object): noiselevel=-1) continue mycat = mycat.strip() - fullpkg = mycat+"/"+mypkg[:-5] + try: + fullpkg = _pkg_str(mycat+"/"+mypkg[:-5]) + except InvalidData: + writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg, + noiselevel=-1) + continue if fullpkg in metadata: # When using this old protocol, comparison with the remote @@ -1101,7 +1169,7 @@ class binarytree(object): Performs checksums and evaluates USE flag conditionals. Raises InvalidDependString if necessary. @rtype: dict - @returns: a dict containing entry for the give cpv. + @return: a dict containing entry for the give cpv. """ pkg_path = self.getname(cpv) @@ -1307,7 +1375,7 @@ class binarytree(object): Verify digests for the given package and raise DigestException if verification fails. @rtype: bool - @returns: True if digests could be located, False otherwise. + @return: True if digests could be located, False otherwise. """ cpv = pkg if not isinstance(cpv, basestring): diff --git a/portage_with_autodep/pym/portage/dbapi/bintree.pyo b/portage_with_autodep/pym/portage/dbapi/bintree.pyo Binary files differnew file mode 100644 index 0000000..f99f377 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/bintree.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/cpv_expand.pyo b/portage_with_autodep/pym/portage/dbapi/cpv_expand.pyo Binary files differnew file mode 100644 index 0000000..cf1a428 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/cpv_expand.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/dep_expand.pyo b/portage_with_autodep/pym/portage/dbapi/dep_expand.pyo Binary files differnew file mode 100644 index 0000000..b323f5b --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/dep_expand.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/porttree.py b/portage_with_autodep/pym/portage/dbapi/porttree.py index ecf275c..c5ee770 100644 --- a/portage_with_autodep/pym/portage/dbapi/porttree.py +++ b/portage_with_autodep/pym/portage/dbapi/porttree.py @@ -1,4 +1,4 @@ -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = [ @@ -14,20 +14,19 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.package.ebuild.doebuild:doebuild', 'portage.util:ensure_dirs,shlex_split,writemsg,writemsg_level', 'portage.util.listdir:listdir', - 'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp', + 'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp,_pkg_str', ) -from portage.cache import metadata_overlay, volatile +from portage.cache import volatile from portage.cache.cache_errors import CacheError from portage.cache.mappings import Mapping from portage.dbapi import dbapi from portage.exception import PortageException, \ FileNotFound, InvalidAtom, InvalidDependString, InvalidPackageName from portage.localization import _ -from portage.manifest import Manifest -from portage import eclass_cache, auxdbkeys, \ - eapi_is_supported, dep_check, \ +from portage import eclass_cache, \ + eapi_is_supported, \ _eapi_is_deprecated from portage import os from portage import _encodings @@ -37,8 +36,6 @@ from _emerge.EbuildMetadataPhase import EbuildMetadataPhase from _emerge.PollScheduler import PollScheduler import os as _os -import io -import stat import sys import traceback import warnings @@ -47,15 +44,6 @@ if sys.hexversion >= 0x3000000: basestring = str long = int -class _repo_info(object): - __slots__ = ('name', 'path', 'eclass_db', 'portdir', 'portdir_overlay') - def __init__(self, name, path, eclass_db): - self.name = name - self.path = path - self.eclass_db = eclass_db - self.portdir = eclass_db.porttrees[0] - self.portdir_overlay = ' '.join(eclass_db.porttrees[1:]) - class portdbapi(dbapi): """this tree will scan a portage directory located at root (passed to init)""" portdbapi_instances = [] @@ -69,6 +57,13 @@ class portdbapi(dbapi): def porttree_root(self): return self.settings.repositories.mainRepoLocation() + @property + def eclassdb(self): + main_repo = self.repositories.mainRepo() + if main_repo is None: + return None + return main_repo.eclass_db + def __init__(self, _unused_param=None, mysettings=None): """ @param _unused_param: deprecated, use mysettings['PORTDIR'] instead @@ -100,6 +95,7 @@ class portdbapi(dbapi): # this purpose because doebuild makes many changes to the config # instance that is passed in. self.doebuild_settings = config(clone=self.settings) + self._scheduler = PollScheduler().sched_iface self.depcachedir = os.path.realpath(self.settings.depcachedir) if os.environ.get("SANDBOX_ON") == "1": @@ -112,7 +108,6 @@ class portdbapi(dbapi): ":".join(filter(None, sandbox_write)) self.porttrees = list(self.settings.repositories.repoLocationList()) - self.eclassdb = eclass_cache.cache(self.settings.repositories.mainRepoLocation()) # This is used as sanity check for aux_get(). If there is no # root eclass dir, we assume that PORTDIR is invalid or @@ -121,86 +116,74 @@ class portdbapi(dbapi): self._have_root_eclass_dir = os.path.isdir( os.path.join(self.settings.repositories.mainRepoLocation(), "eclass")) - self.metadbmodule = self.settings.load_best_module("portdbapi.metadbmodule") - #if the portdbapi is "frozen", then we assume that we can cache everything (that no updates to it are happening) self.xcache = {} self.frozen = 0 - #Create eclass dbs - self._repo_info = {} - eclass_dbs = {self.settings.repositories.mainRepoLocation() : self.eclassdb} - for repo in self.repositories: - if repo.location in self._repo_info: - continue - - eclass_db = None - for eclass_location in repo.eclass_locations: - tree_db = eclass_dbs.get(eclass_location) - if tree_db is None: - tree_db = eclass_cache.cache(eclass_location) - eclass_dbs[eclass_location] = tree_db - if eclass_db is None: - eclass_db = tree_db.copy() - else: - eclass_db.append(tree_db) - - self._repo_info[repo.location] = _repo_info(repo.name, repo.location, eclass_db) - #Keep a list of repo names, sorted by priority (highest priority first). self._ordered_repo_name_list = tuple(reversed(self.repositories.prepos_order)) self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule") self.auxdb = {} self._pregen_auxdb = {} + # If the current user doesn't have depcachedir write permission, + # then the depcachedir cache is kept here read-only access. + self._ro_auxdb = {} self._init_cache_dirs() - depcachedir_w_ok = os.access(self.depcachedir, os.W_OK) - cache_kwargs = { - 'gid' : portage_gid, - 'perms' : 0o664 - } - - # XXX: REMOVE THIS ONCE UNUSED_0 IS YANKED FROM auxdbkeys - # ~harring - filtered_auxdbkeys = [x for x in auxdbkeys if not x.startswith("UNUSED_0")] - filtered_auxdbkeys.sort() + try: + depcachedir_st = os.stat(self.depcachedir) + depcachedir_w_ok = os.access(self.depcachedir, os.W_OK) + except OSError: + depcachedir_st = None + depcachedir_w_ok = False + + cache_kwargs = {} + + depcachedir_unshared = False + if portage.data.secpass < 1 and \ + depcachedir_w_ok and \ + depcachedir_st is not None and \ + os.getuid() == depcachedir_st.st_uid and \ + os.getgid() == depcachedir_st.st_gid: + # If this user owns depcachedir and is not in the + # portage group, then don't bother to set permissions + # on cache entries. This makes it possible to run + # egencache without any need to be a member of the + # portage group. + depcachedir_unshared = True + else: + cache_kwargs.update({ + 'gid' : portage_gid, + 'perms' : 0o664 + }) + # If secpass < 1, we don't want to write to the cache # since then we won't be able to apply group permissions # to the cache entries/directories. - if secpass < 1 or not depcachedir_w_ok: + if (secpass < 1 and not depcachedir_unshared) or not depcachedir_w_ok: for x in self.porttrees: + self.auxdb[x] = volatile.database( + self.depcachedir, x, self._known_keys, + **cache_kwargs) try: - db_ro = self.auxdbmodule(self.depcachedir, x, - filtered_auxdbkeys, readonly=True, **cache_kwargs) + self._ro_auxdb[x] = self.auxdbmodule(self.depcachedir, x, + self._known_keys, readonly=True, **cache_kwargs) except CacheError: - self.auxdb[x] = volatile.database( - self.depcachedir, x, filtered_auxdbkeys, - **cache_kwargs) - else: - self.auxdb[x] = metadata_overlay.database( - self.depcachedir, x, filtered_auxdbkeys, - db_rw=volatile.database, db_ro=db_ro, - **cache_kwargs) + pass else: for x in self.porttrees: if x in self.auxdb: continue # location, label, auxdbkeys self.auxdb[x] = self.auxdbmodule( - self.depcachedir, x, filtered_auxdbkeys, **cache_kwargs) - if self.auxdbmodule is metadata_overlay.database: - self.auxdb[x].db_ro.ec = self._repo_info[x].eclass_db + self.depcachedir, x, self._known_keys, **cache_kwargs) if "metadata-transfer" not in self.settings.features: for x in self.porttrees: if x in self._pregen_auxdb: continue - if os.path.isdir(os.path.join(x, "metadata", "cache")): - self._pregen_auxdb[x] = self.metadbmodule( - x, "metadata/cache", filtered_auxdbkeys, readonly=True) - try: - self._pregen_auxdb[x].ec = self._repo_info[x].eclass_db - except AttributeError: - pass + cache = self._create_pregen_cache(x) + if cache is not None: + self._pregen_auxdb[x] = cache # Selectively cache metadata in order to optimize dep matching. self._aux_cache_keys = set( ["DEPEND", "EAPI", "INHERITED", "IUSE", "KEYWORDS", "LICENSE", @@ -210,18 +193,28 @@ class portdbapi(dbapi): self._aux_cache = {} self._broken_ebuilds = set() + def _create_pregen_cache(self, tree): + conf = self.repositories.get_repo_for_location(tree) + cache = conf.get_pregenerated_cache( + self._known_keys, readonly=True) + if cache is not None: + try: + cache.ec = self.repositories.get_repo_for_location(tree).eclass_db + except AttributeError: + pass + return cache + def _init_cache_dirs(self): """Create /var/cache/edb/dep and adjust permissions for the portage group.""" dirmode = 0o2070 - filemode = 0o60 modemask = 0o2 try: ensure_dirs(self.depcachedir, gid=portage_gid, mode=dirmode, mask=modemask) - except PortageException as e: + except PortageException: pass def close_caches(self): @@ -260,7 +253,7 @@ class portdbapi(dbapi): @param canonical_repo_path: the canonical path of a repository, as resolved by os.path.realpath() @type canonical_repo_path: String - @returns: The repo_name for the corresponding repository, or None + @return: The repo_name for the corresponding repository, or None if the path does not correspond a known repository @rtype: String or None """ @@ -332,63 +325,33 @@ class portdbapi(dbapi): return (filename, x) return (None, 0) - def _metadata_process(self, cpv, ebuild_path, repo_path): - """ - Create an EbuildMetadataPhase instance to generate metadata for the - give ebuild. - @rtype: EbuildMetadataPhase - @returns: A new EbuildMetadataPhase instance, or None if the - metadata cache is already valid. - """ - metadata, st, emtime = self._pull_valid_cache(cpv, ebuild_path, repo_path) - if metadata is not None: - return None - - process = EbuildMetadataPhase(cpv=cpv, ebuild_path=ebuild_path, - ebuild_mtime=emtime, metadata_callback=self._metadata_callback, - portdb=self, repo_path=repo_path, settings=self.doebuild_settings) - return process - - def _metadata_callback(self, cpv, ebuild_path, repo_path, metadata, mtime): - - i = metadata - if hasattr(metadata, "items"): - i = iter(metadata.items()) - metadata = dict(i) - - if metadata.get("INHERITED", False): - metadata["_eclasses_"] = self._repo_info[repo_path - ].eclass_db.get_eclass_data(metadata["INHERITED"].split()) - else: - metadata["_eclasses_"] = {} - - metadata.pop("INHERITED", None) - metadata["_mtime_"] = mtime - - eapi = metadata.get("EAPI") - if not eapi or not eapi.strip(): - eapi = "0" - metadata["EAPI"] = eapi - if not eapi_is_supported(eapi): - for k in set(metadata).difference(("_mtime_", "_eclasses_")): - metadata[k] = "" - metadata["EAPI"] = "-" + eapi.lstrip("-") + def _write_cache(self, cpv, repo_path, metadata, ebuild_hash): try: - self.auxdb[repo_path][cpv] = metadata + cache = self.auxdb[repo_path] + chf = cache.validation_chf + metadata['_%s_' % chf] = getattr(ebuild_hash, chf) except CacheError: # Normally this shouldn't happen, so we'll show # a traceback for debugging purposes. traceback.print_exc() - return metadata + cache = None + + if cache is not None: + try: + cache[cpv] = metadata + except CacheError: + # Normally this shouldn't happen, so we'll show + # a traceback for debugging purposes. + traceback.print_exc() def _pull_valid_cache(self, cpv, ebuild_path, repo_path): try: - # Don't use unicode-wrapped os module, for better performance. - st = _os.stat(_unicode_encode(ebuild_path, - encoding=_encodings['fs'], errors='strict')) - emtime = st[stat.ST_MTIME] - except OSError: + ebuild_hash = eclass_cache.hashed_path(ebuild_path) + # snag mtime since we use it later, and to trigger stat failure + # if it doesn't exist + ebuild_hash.mtime + except FileNotFound: writemsg(_("!!! aux_get(): ebuild for " \ "'%s' does not exist at:\n") % (cpv,), noiselevel=-1) writemsg("!!! %s\n" % ebuild_path, noiselevel=-1) @@ -401,39 +364,39 @@ class portdbapi(dbapi): pregen_auxdb = self._pregen_auxdb.get(repo_path) if pregen_auxdb is not None: auxdbs.append(pregen_auxdb) + ro_auxdb = self._ro_auxdb.get(repo_path) + if ro_auxdb is not None: + auxdbs.append(ro_auxdb) auxdbs.append(self.auxdb[repo_path]) - eclass_db = self._repo_info[repo_path].eclass_db + eclass_db = self.repositories.get_repo_for_location(repo_path).eclass_db - doregen = True for auxdb in auxdbs: try: metadata = auxdb[cpv] except KeyError: - pass + continue except CacheError: - if auxdb is not pregen_auxdb: + if not auxdb.readonly: try: del auxdb[cpv] - except KeyError: - pass - except CacheError: + except (KeyError, CacheError): pass - else: - eapi = metadata.get('EAPI', '').strip() - if not eapi: - eapi = '0' - if not (eapi[:1] == '-' and eapi_is_supported(eapi[1:])) and \ - emtime == metadata['_mtime_'] and \ - eclass_db.is_eclass_data_valid(metadata['_eclasses_']): - doregen = False - - if not doregen: + continue + eapi = metadata.get('EAPI', '').strip() + if not eapi: + eapi = '0' + metadata['EAPI'] = eapi + if not eapi_is_supported(eapi): + # Since we're supposed to be able to efficiently obtain the + # EAPI from _parse_eapi_ebuild_head, we disregard cache entries + # for unsupported EAPIs. + continue + if auxdb.validate_entry(metadata, ebuild_hash, eclass_db): break - - if doregen: + else: metadata = None - return (metadata, st, emtime) + return (metadata, ebuild_hash) def aux_get(self, mycpv, mylist, mytree=None, myrepo=None): "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc." @@ -445,15 +408,22 @@ class portdbapi(dbapi): if mytree is None: raise KeyError(myrepo) - if not mytree: + if mytree is not None and len(self.porttrees) == 1 \ + and mytree == self.porttrees[0]: + # mytree matches our only tree, so it's safe to + # ignore mytree and cache the result + mytree = None + myrepo = None + + if mytree is None: cache_me = True - if not mytree and not self._known_keys.intersection( + if mytree is None and not self._known_keys.intersection( mylist).difference(self._aux_cache_keys): aux_cache = self._aux_cache.get(mycpv) if aux_cache is not None: return [aux_cache.get(x, "") for x in mylist] cache_me = True - global auxdbkeys, auxdbkeylen + try: cat, pkg = mycpv.split("/", 1) except ValueError: @@ -467,60 +437,35 @@ class portdbapi(dbapi): _("ebuild not found for '%s'") % mycpv, noiselevel=1) raise KeyError(mycpv) - mydata, st, emtime = self._pull_valid_cache(mycpv, myebuild, mylocation) + mydata, ebuild_hash = self._pull_valid_cache(mycpv, myebuild, mylocation) doregen = mydata is None if doregen: if myebuild in self._broken_ebuilds: raise KeyError(mycpv) - self.doebuild_settings.setcpv(mycpv) - eapi = None - - if eapi is None and \ - 'parse-eapi-ebuild-head' in self.doebuild_settings.features: - eapi = portage._parse_eapi_ebuild_head(io.open( - _unicode_encode(myebuild, - encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['repo.content'], - errors='replace')) - - if eapi is not None: - self.doebuild_settings.configdict['pkg']['EAPI'] = eapi - - if eapi is not None and not portage.eapi_is_supported(eapi): - mydata = self._metadata_callback( - mycpv, myebuild, mylocation, {'EAPI':eapi}, emtime) - else: - proc = EbuildMetadataPhase(cpv=mycpv, ebuild_path=myebuild, - ebuild_mtime=emtime, - metadata_callback=self._metadata_callback, portdb=self, - repo_path=mylocation, - scheduler=PollScheduler().sched_iface, - settings=self.doebuild_settings) + proc = EbuildMetadataPhase(cpv=mycpv, + ebuild_hash=ebuild_hash, portdb=self, + repo_path=mylocation, scheduler=self._scheduler, + settings=self.doebuild_settings) - proc.start() - proc.wait() + proc.start() + proc.wait() - if proc.returncode != os.EX_OK: - self._broken_ebuilds.add(myebuild) - raise KeyError(mycpv) + if proc.returncode != os.EX_OK: + self._broken_ebuilds.add(myebuild) + raise KeyError(mycpv) - mydata = proc.metadata + mydata = proc.metadata - # do we have a origin repository name for the current package mydata["repository"] = self.repositories.get_name_for_location(mylocation) - mydata["INHERITED"] = ' '.join(mydata.get("_eclasses_", [])) - mydata["_mtime_"] = st[stat.ST_MTIME] - + mydata["_mtime_"] = ebuild_hash.mtime eapi = mydata.get("EAPI") if not eapi: eapi = "0" mydata["EAPI"] = eapi - if not eapi_is_supported(eapi): - for k in set(mydata).difference(("_mtime_", "_eclasses_")): - mydata[k] = "" - mydata["EAPI"] = "-" + eapi.lstrip("-") + if eapi_is_supported(eapi): + mydata["INHERITED"] = " ".join(mydata.get("_eclasses_", [])) #finally, we look at our internal cache entry and return the requested data. returnme = [mydata.get(x, "") for x in mylist] @@ -546,7 +491,7 @@ class portdbapi(dbapi): @param mytree: The canonical path of the tree in which the ebuild is located, or None for automatic lookup @type mypkg: String - @returns: A dict which maps each file name to a set of alternative + @return: A dict which maps each file name to a set of alternative URIs. @rtype: dict """ @@ -565,7 +510,7 @@ class portdbapi(dbapi): # since callers already handle it. raise portage.exception.InvalidDependString( "getFetchMap(): '%s' has unsupported EAPI: '%s'" % \ - (mypkg, eapi.lstrip("-"))) + (mypkg, eapi)) return _parse_uri_map(mypkg, {'EAPI':eapi,'SRC_URI':myuris}, use=useflags) @@ -576,7 +521,9 @@ class portdbapi(dbapi): if myebuild is None: raise AssertionError(_("ebuild not found for '%s'") % mypkg) pkgdir = os.path.dirname(myebuild) - mf = Manifest(pkgdir, self.settings["DISTDIR"]) + mf = self.repositories.get_repo_for_location( + os.path.dirname(os.path.dirname(pkgdir))).load_manifest( + pkgdir, self.settings["DISTDIR"]) checksums = mf.getDigests() if not checksums: if debug: @@ -597,7 +544,7 @@ class portdbapi(dbapi): mystat = None try: mystat = os.stat(file_path) - except OSError as e: + except OSError: pass if mystat is None: existing_size = 0 @@ -644,7 +591,9 @@ class portdbapi(dbapi): if myebuild is None: raise AssertionError(_("ebuild not found for '%s'") % mypkg) pkgdir = os.path.dirname(myebuild) - mf = Manifest(pkgdir, self.settings["DISTDIR"]) + mf = self.repositories.get_repo_for_location( + os.path.dirname(os.path.dirname(pkgdir))) + mf = mf.load_manifest(pkgdir, self.settings["DISTDIR"]) mysums = mf.getDigests() failures = {} @@ -706,15 +655,22 @@ class portdbapi(dbapi): return l def cp_list(self, mycp, use_cache=1, mytree=None): + # NOTE: Cache can be safely shared with the match cache, since the + # match cache uses the result from dep_expand for the cache_key. + if self.frozen and mytree is not None \ + and len(self.porttrees) == 1 \ + and mytree == self.porttrees[0]: + # mytree matches our only tree, so it's safe to + # ignore mytree and cache the result + mytree = None + if self.frozen and mytree is None: cachelist = self.xcache["cp-list"].get(mycp) if cachelist is not None: # Try to propagate this to the match-all cache here for # repoman since he uses separate match-all caches for each - # profile (due to old-style virtuals). Do not propagate - # old-style virtuals since cp_list() doesn't expand them. - if not (not cachelist and mycp.startswith("virtual/")): - self.xcache["match-all"][mycp] = cachelist + # profile (due to differences in _get_implicit_iuse). + self.xcache["match-all"][(mycp, mycp)] = cachelist return cachelist[:] mysplit = mycp.split("/") invalid_category = mysplit[0] not in self._categories @@ -752,7 +708,7 @@ class portdbapi(dbapi): writemsg(_("\nInvalid ebuild version: %s\n") % \ os.path.join(oroot, mycp, x), noiselevel=-1) continue - d[mysplit[0]+"/"+pf] = None + d[_pkg_str(mysplit[0]+"/"+pf)] = None if invalid_category and d: writemsg(_("\n!!! '%s' has a category that is not listed in " \ "%setc/portage/categories\n") % \ @@ -766,14 +722,11 @@ class portdbapi(dbapi): if self.frozen and mytree is None: cachelist = mylist[:] self.xcache["cp-list"][mycp] = cachelist - # Do not propagate old-style virtuals since - # cp_list() doesn't expand them. - if not (not cachelist and mycp.startswith("virtual/")): - self.xcache["match-all"][mycp] = cachelist + self.xcache["match-all"][(mycp, mycp)] = cachelist return mylist def freeze(self): - for x in "bestmatch-visible", "cp-list", "list-visible", "match-all", \ + for x in "bestmatch-visible", "cp-list", "match-all", \ "match-all-cpv-only", "match-visible", "minimum-all", \ "minimum-visible": self.xcache[x]={} @@ -785,12 +738,12 @@ class portdbapi(dbapi): def xmatch(self,level,origdep,mydep=None,mykey=None,mylist=None): "caching match function; very trick stuff" - #if no updates are being made to the tree, we can consult our xcache... - if self.frozen: - try: - return self.xcache[level][origdep][:] - except KeyError: - pass + if level == "list-visible": + level = "match-visible" + warnings.warn("The 'list-visible' mode of " + "portage.dbapi.porttree.portdbapi.xmatch " + "has been renamed to match-visible", + DeprecationWarning, stacklevel=2) if mydep is None: #this stuff only runs on first call of xmatch() @@ -798,12 +751,24 @@ class portdbapi(dbapi): mydep = dep_expand(origdep, mydb=self, settings=self.settings) mykey = mydep.cp + #if no updates are being made to the tree, we can consult our xcache... + cache_key = None + if self.frozen: + cache_key = (mydep, mydep.unevaluated_atom) + try: + return self.xcache[level][cache_key][:] + except KeyError: + pass + myval = None mytree = None if mydep.repo is not None: mytree = self.treemap.get(mydep.repo) if mytree is None: - myval = [] + if level.startswith("match-"): + myval = [] + else: + myval = "" if myval is not None: # Unknown repo, empty result. @@ -822,27 +787,8 @@ class portdbapi(dbapi): myval = match_from_list(mydep, self.cp_list(mykey, mytree=mytree)) - elif level == "list-visible": - #a list of all visible packages, not called directly (just by xmatch()) - #myval = self.visible(self.cp_list(mykey)) - - myval = self.gvisible(self.visible( - self.cp_list(mykey, mytree=mytree))) - elif level == "minimum-all": - # Find the minimum matching version. This is optimized to - # minimize the number of metadata accesses (improves performance - # especially in cases where metadata needs to be generated). - if mydep == mykey: - cpv_iter = iter(self.cp_list(mykey, mytree=mytree)) - else: - cpv_iter = self._iter_match(mydep, - self.cp_list(mykey, mytree=mytree)) - try: - myval = next(cpv_iter) - except StopIteration: - myval = "" - - elif level in ("minimum-visible", "bestmatch-visible"): + elif level in ("bestmatch-visible", "match-all", "match-visible", + "minimum-all", "minimum-visible"): # Find the minimum matching visible version. This is optimized to # minimize the number of metadata accesses (improves performance # especially in cases where metadata needs to be generated). @@ -851,158 +797,172 @@ class portdbapi(dbapi): else: mylist = match_from_list(mydep, self.cp_list(mykey, mytree=mytree)) - myval = "" - settings = self.settings - local_config = settings.local_config + + visibility_filter = level not in ("match-all", "minimum-all") + single_match = level not in ("match-all", "match-visible") + myval = [] aux_keys = list(self._aux_cache_keys) - if level == "minimum-visible": + if level == "bestmatch-visible": + iterfunc = reversed + else: iterfunc = iter + + if mydep.repo is not None: + repos = [mydep.repo] else: - iterfunc = reversed + # We iterate over self.porttrees, since it's common to + # tweak this attribute in order to adjust match behavior. + repos = [] + for tree in reversed(self.porttrees): + repos.append(self.repositories.get_name_for_location(tree)) + for cpv in iterfunc(mylist): - try: - metadata = dict(zip(aux_keys, - self.aux_get(cpv, aux_keys))) - except KeyError: - # ebuild masked by corruption - continue - if not eapi_is_supported(metadata["EAPI"]): - continue - if mydep.slot and mydep.slot != metadata["SLOT"]: - continue - if settings._getMissingKeywords(cpv, metadata): - continue - if settings._getMaskAtom(cpv, metadata): - continue - if settings._getProfileMaskAtom(cpv, metadata): - continue - if local_config: - metadata["USE"] = "" - if "?" in metadata["LICENSE"] or "?" in metadata["PROPERTIES"]: - self.doebuild_settings.setcpv(cpv, mydb=metadata) - metadata["USE"] = self.doebuild_settings.get("USE", "") + for repo in repos: try: - if settings._getMissingLicenses(cpv, metadata): - continue - if settings._getMissingProperties(cpv, metadata): - continue - except InvalidDependString: + metadata = dict(zip(aux_keys, + self.aux_get(cpv, aux_keys, myrepo=repo))) + except KeyError: + # ebuild not in this repo, or masked by corruption continue - if mydep.use: - has_iuse = False - for has_iuse in self._iter_match_use(mydep, [cpv]): - break - if not has_iuse: + + if visibility_filter and not self._visible(cpv, metadata): continue - myval = cpv - break + + if mydep.slot is not None and \ + mydep.slot != metadata["SLOT"]: + continue + + if mydep.unevaluated_atom.use is not None and \ + not self._match_use(mydep, cpv, metadata): + continue + + myval.append(cpv) + # only yield a given cpv once + break + + if myval and single_match: + break + + if single_match: + if myval: + myval = myval[0] + else: + myval = "" + elif level == "bestmatch-list": #dep match -- find best match but restrict search to sublist - #no point in calling xmatch again since we're not caching list deps - + warnings.warn("The 'bestmatch-list' mode of " + "portage.dbapi.porttree.portdbapi.xmatch is deprecated", + DeprecationWarning, stacklevel=2) myval = best(list(self._iter_match(mydep, mylist))) elif level == "match-list": #dep match -- find all matches but restrict search to sublist (used in 2nd half of visible()) - + warnings.warn("The 'match-list' mode of " + "portage.dbapi.porttree.portdbapi.xmatch is deprecated", + DeprecationWarning, stacklevel=2) myval = list(self._iter_match(mydep, mylist)) - elif level == "match-visible": - #dep match -- find all visible matches - #get all visible packages, then get the matching ones - myval = list(self._iter_match(mydep, - self.xmatch("list-visible", mykey, mydep=Atom(mykey), mykey=mykey))) - elif level == "match-all": - #match *all* visible *and* masked packages - if mydep == mykey: - myval = self.cp_list(mykey, mytree=mytree) - else: - myval = list(self._iter_match(mydep, - self.cp_list(mykey, mytree=mytree))) else: raise AssertionError( "Invalid level argument: '%s'" % level) - if self.frozen and (level not in ["match-list", "bestmatch-list"]): - self.xcache[level][mydep] = myval - if origdep and origdep != mydep: - self.xcache[level][origdep] = myval - return myval[:] + if self.frozen: + xcache_this_level = self.xcache.get(level) + if xcache_this_level is not None: + xcache_this_level[cache_key] = myval + if not isinstance(myval, _pkg_str): + myval = myval[:] + + return myval def match(self, mydep, use_cache=1): return self.xmatch("match-visible", mydep) - def visible(self, mylist): - """two functions in one. Accepts a list of cpv values and uses the package.mask *and* - packages file to remove invisible entries, returning remaining items. This function assumes - that all entries in mylist have the same category and package name.""" - if not mylist: - return [] - - db_keys = ["SLOT"] - visible = [] - getMaskAtom = self.settings._getMaskAtom - getProfileMaskAtom = self.settings._getProfileMaskAtom - for cpv in mylist: - try: - metadata = dict(zip(db_keys, self.aux_get(cpv, db_keys))) - except KeyError: - # masked by corruption - continue - if not metadata["SLOT"]: - continue - if getMaskAtom(cpv, metadata): - continue - if getProfileMaskAtom(cpv, metadata): - continue - visible.append(cpv) - return visible - - def gvisible(self,mylist): - "strip out group-masked (not in current group) entries" + def gvisible(self, mylist): + warnings.warn("The 'gvisible' method of " + "portage.dbapi.porttree.portdbapi " + "is deprecated", + DeprecationWarning, stacklevel=2) + return list(self._iter_visible(iter(mylist))) - if mylist is None: + def visible(self, cpv_iter): + warnings.warn("The 'visible' method of " + "portage.dbapi.porttree.portdbapi " + "is deprecated", + DeprecationWarning, stacklevel=2) + if cpv_iter is None: return [] - newlist=[] + return list(self._iter_visible(iter(cpv_iter))) + + def _iter_visible(self, cpv_iter, myrepo=None): + """ + Return a new list containing only visible packages. + """ aux_keys = list(self._aux_cache_keys) metadata = {} - local_config = self.settings.local_config - chost = self.settings.get('CHOST', '') - accept_chost = self.settings._accept_chost - for mycpv in mylist: - metadata.clear() - try: - metadata.update(zip(aux_keys, self.aux_get(mycpv, aux_keys))) - except KeyError: - continue - except PortageException as e: - writemsg("!!! Error: aux_get('%s', %s)\n" % (mycpv, aux_keys), - noiselevel=-1) - writemsg("!!! %s\n" % (e,), noiselevel=-1) - del e - continue - eapi = metadata["EAPI"] - if not eapi_is_supported(eapi): - continue - if _eapi_is_deprecated(eapi): - continue - if self.settings._getMissingKeywords(mycpv, metadata): - continue - if local_config: - metadata['CHOST'] = chost - if not accept_chost(mycpv, metadata): - continue - metadata["USE"] = "" - if "?" in metadata["LICENSE"] or "?" in metadata["PROPERTIES"]: - self.doebuild_settings.setcpv(mycpv, mydb=metadata) - metadata['USE'] = self.doebuild_settings['PORTAGE_USE'] + + if myrepo is not None: + repos = [myrepo] + else: + # We iterate over self.porttrees, since it's common to + # tweak this attribute in order to adjust match behavior. + repos = [] + for tree in reversed(self.porttrees): + repos.append(self.repositories.get_name_for_location(tree)) + + for mycpv in cpv_iter: + for repo in repos: + metadata.clear() try: - if self.settings._getMissingLicenses(mycpv, metadata): - continue - if self.settings._getMissingProperties(mycpv, metadata): - continue - except InvalidDependString: + metadata.update(zip(aux_keys, + self.aux_get(mycpv, aux_keys, myrepo=repo))) + except KeyError: + continue + except PortageException as e: + writemsg("!!! Error: aux_get('%s', %s)\n" % + (mycpv, aux_keys), noiselevel=-1) + writemsg("!!! %s\n" % (e,), noiselevel=-1) + del e continue - newlist.append(mycpv) - return newlist + + if not self._visible(mycpv, metadata): + continue + + yield mycpv + # only yield a given cpv once + break + + def _visible(self, cpv, metadata): + eapi = metadata["EAPI"] + if not eapi_is_supported(eapi): + return False + if _eapi_is_deprecated(eapi): + return False + if not metadata["SLOT"]: + return False + + settings = self.settings + if settings._getMaskAtom(cpv, metadata): + return False + if settings._getMissingKeywords(cpv, metadata): + return False + if settings.local_config: + metadata['CHOST'] = settings.get('CHOST', '') + if not settings._accept_chost(cpv, metadata): + return False + metadata["USE"] = "" + if "?" in metadata["LICENSE"] or \ + "?" in metadata["PROPERTIES"]: + self.doebuild_settings.setcpv(cpv, mydb=metadata) + metadata['USE'] = self.doebuild_settings['PORTAGE_USE'] + try: + if settings._getMissingLicenses(cpv, metadata): + return False + if settings._getMissingProperties(cpv, metadata): + return False + except InvalidDependString: + return False + + return True def close_portdbapi_caches(): for i in portdbapi.portdbapi_instances: @@ -1011,7 +971,7 @@ def close_portdbapi_caches(): portage.process.atexit_register(portage.portageexit) class portagetree(object): - def __init__(self, root=None, virtual=None, settings=None): + def __init__(self, root=None, virtual=DeprecationWarning, settings=None): """ Constructor for a PortageTree @@ -1034,8 +994,14 @@ class portagetree(object): "settings['ROOT'] instead.", DeprecationWarning, stacklevel=2) + if virtual is not DeprecationWarning: + warnings.warn("The 'virtual' parameter of the " + "portage.dbapi.porttree.portagetree" + " constructor is unused", + DeprecationWarning, stacklevel=2) + self.portroot = settings["PORTDIR"] - self.virtual = virtual + self.__virtual = virtual self.dbapi = portdbapi(mysettings=settings) @property @@ -1044,9 +1010,17 @@ class portagetree(object): "portage.dbapi.porttree.portagetree" + \ " is deprecated. Use " + \ "settings['ROOT'] instead.", - DeprecationWarning, stacklevel=2) + DeprecationWarning, stacklevel=3) return self.settings['ROOT'] + @property + def virtual(self): + warnings.warn("The 'virtual' attribute of " + \ + "portage.dbapi.porttree.portagetree" + \ + " is deprecated.", + DeprecationWarning, stacklevel=3) + return self.__virtual + def dep_bestmatch(self,mydep): "compatibility method" mymatch = self.dbapi.xmatch("bestmatch-visible",mydep) @@ -1077,17 +1051,14 @@ class portagetree(object): psplit = pkgsplit(mysplit[1]) return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]])+".ebuild" - def depcheck(self, mycheck, use="yes", myusesplit=None): - return dep_check(mycheck, self.dbapi, use=use, myuse=myusesplit) - def getslot(self,mycatpkg): "Get a slot for a catpkg; assume it exists." myslot = "" try: myslot = self.dbapi.aux_get(mycatpkg, ["SLOT"])[0] - except SystemExit as e: + except SystemExit: raise - except Exception as e: + except Exception: pass return myslot @@ -1148,7 +1119,7 @@ def _parse_uri_map(cpv, metadata, use=None): while myuris: uri = myuris.pop() if myuris and myuris[-1] == "->": - operator = myuris.pop() + myuris.pop() distfile = myuris.pop() else: distfile = os.path.basename(uri) @@ -1163,6 +1134,5 @@ def _parse_uri_map(cpv, metadata, use=None): uri_map[distfile] = uri_set uri_set.add(uri) uri = None - operator = None return uri_map diff --git a/portage_with_autodep/pym/portage/dbapi/porttree.pyo b/portage_with_autodep/pym/portage/dbapi/porttree.pyo Binary files differnew file mode 100644 index 0000000..fb57919 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/porttree.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/vartree.py b/portage_with_autodep/pym/portage/dbapi/vartree.py index 7f7873b..517c873 100644 --- a/portage_with_autodep/pym/portage/dbapi/vartree.py +++ b/portage_with_autodep/pym/portage/dbapi/vartree.py @@ -1,4 +1,4 @@ -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = [ @@ -18,7 +18,7 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.locks:lockdir,unlockdir,lockfile,unlockfile', 'portage.output:bold,colorize', 'portage.package.ebuild.doebuild:doebuild_environment,' + \ - '_spawn_phase', + '_merge_unicode_error', '_spawn_phase', 'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs', 'portage.update:fixdbentries', 'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \ @@ -27,10 +27,13 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.util.digraph:digraph', 'portage.util.env_update:env_update', 'portage.util.listdir:dircache,listdir', + 'portage.util.movefile:movefile', 'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry', 'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap', - 'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \ - '_pkgsplit@pkgsplit', + 'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,vercmp,' + \ + '_pkgsplit@pkgsplit,_pkg_str', + 'subprocess', + 'tarfile', ) from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \ @@ -41,12 +44,12 @@ from portage.exception import CommandNotFound, \ InvalidData, InvalidLocation, InvalidPackageName, \ FileNotFound, PermissionDenied, UnsupportedAPIException from portage.localization import _ -from portage.util.movefile import movefile from portage import abssymlink, _movefile, bsd_chflags # This is a special version of the os module, wrapped for unicode support. from portage import os +from portage import shutil from portage import _encodings from portage import _os_merge from portage import _selinux_merge @@ -60,13 +63,15 @@ from _emerge.PollScheduler import PollScheduler from _emerge.MiscFunctionsProcess import MiscFunctionsProcess import errno +import fnmatch import gc +import grp import io from itertools import chain import logging import os as _os +import pwd import re -import shutil import stat import sys import tempfile @@ -82,6 +87,9 @@ except ImportError: if sys.hexversion >= 0x3000000: basestring = str long = int + _unicode = str +else: + _unicode = unicode class vardbapi(dbapi): @@ -129,12 +137,11 @@ class vardbapi(dbapi): if settings is None: settings = portage.settings self.settings = settings - self.root = settings['ROOT'] - if _unused_param is not None and _unused_param != self.root: - warnings.warn("The first parameter of the " + \ - "portage.dbapi.vartree.vardbapi" + \ - " constructor is now unused. Use " + \ + if _unused_param is not None and _unused_param != settings['ROOT']: + warnings.warn("The first parameter of the " + "portage.dbapi.vartree.vardbapi" + " constructor is now unused. Use " "settings['ROOT'] instead.", DeprecationWarning, stacklevel=2) @@ -148,14 +155,14 @@ class vardbapi(dbapi): self._fs_lock_count = 0 if vartree is None: - vartree = portage.db[self.root]["vartree"] + vartree = portage.db[settings['EROOT']]['vartree'] self.vartree = vartree self._aux_cache_keys = set( ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION", "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES", - "REQUIRED_USE"]) + ]) self._aux_cache_obj = None self._aux_cache_filename = os.path.join(self._eroot, CACHE_PATH, "vdb_metadata.pickle") @@ -164,7 +171,7 @@ class vardbapi(dbapi): self._plib_registry = None if _ENABLE_PRESERVE_LIBS: - self._plib_registry = PreservedLibsRegistry(self.root, + self._plib_registry = PreservedLibsRegistry(settings["ROOT"], os.path.join(self._eroot, PRIVATE_PATH, "preserved_libs_registry")) @@ -175,6 +182,15 @@ class vardbapi(dbapi): self._cached_counter = None + @property + def root(self): + warnings.warn("The root attribute of " + "portage.dbapi.vartree.vardbapi" + " is deprecated. Use " + "settings['ROOT'] instead.", + DeprecationWarning, stacklevel=3) + return self.settings['ROOT'] + def getpath(self, mykey, filename=None): # This is an optimized hotspot, so don't use unicode-wrapped # os module and don't use os.path.join(). @@ -373,7 +389,7 @@ class vardbapi(dbapi): continue if len(mysplit) > 1: if ps[0] == mysplit[1]: - returnme.append(mysplit[0]+"/"+x) + returnme.append(_pkg_str(mysplit[0]+"/"+x)) self._cpv_sort_ascending(returnme) if use_cache: self.cpcache[mycp] = [mystat, returnme[:]] @@ -472,6 +488,7 @@ class vardbapi(dbapi): "caching match function" mydep = dep_expand( origdep, mydb=self, use_cache=use_cache, settings=self.settings) + cache_key = (mydep, mydep.unevaluated_atom) mykey = dep_getkey(mydep) mycat = catsplit(mykey)[0] if not use_cache: @@ -493,8 +510,8 @@ class vardbapi(dbapi): if mydep not in self.matchcache[mycat]: mymatch = list(self._iter_match(mydep, self.cp_list(mydep.cp, use_cache=use_cache))) - self.matchcache[mycat][mydep] = mymatch - return self.matchcache[mycat][mydep][:] + self.matchcache[mycat][cache_key] = mymatch + return self.matchcache[mycat][cache_key][:] def findname(self, mycpv, myrepo=None): return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild") @@ -555,8 +572,11 @@ class vardbapi(dbapi): aux_cache = mypickle.load() f.close() del f - except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError) as e: - if isinstance(e, pickle.UnpicklingError): + except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e: + if isinstance(e, EnvironmentError) and \ + getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES): + pass + else: writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \ (self._aux_cache_filename, e), noiselevel=-1) del e @@ -610,7 +630,8 @@ class vardbapi(dbapi): cache_these_wants.add(x) if not cache_these_wants: - return self._aux_get(mycpv, wants) + mydata = self._aux_get(mycpv, wants) + return [mydata[x] for x in wants] cache_these = set(self._aux_cache_keys) cache_these.update(cache_these_wants) @@ -655,16 +676,15 @@ class vardbapi(dbapi): if pull_me: # pull any needed data and cache it aux_keys = list(pull_me) - for k, v in zip(aux_keys, - self._aux_get(mycpv, aux_keys, st=mydir_stat)): - mydata[k] = v + mydata.update(self._aux_get(mycpv, aux_keys, st=mydir_stat)) if not cache_valid or cache_these.difference(metadata): cache_data = {} if cache_valid and metadata: cache_data.update(metadata) for aux_key in cache_these: cache_data[aux_key] = mydata[aux_key] - self._aux_cache["packages"][mycpv] = (mydir_mtime, cache_data) + self._aux_cache["packages"][_unicode(mycpv)] = \ + (mydir_mtime, cache_data) self._aux_cache["modified"].add(mycpv) if _slot_re.match(mydata['SLOT']) is None: @@ -688,10 +708,11 @@ class vardbapi(dbapi): raise if not stat.S_ISDIR(st.st_mode): raise KeyError(mycpv) - results = [] + results = {} + env_keys = [] for x in wants: if x == "_mtime_": - results.append(st[stat.ST_MTIME]) + results[x] = st[stat.ST_MTIME] continue try: myf = io.open( @@ -703,16 +724,103 @@ class vardbapi(dbapi): myd = myf.read() finally: myf.close() - # Preserve \n for metadata that is known to - # contain multiple lines. - if self._aux_multi_line_re.match(x) is None: - myd = " ".join(myd.split()) except IOError: + if x not in self._aux_cache_keys and \ + self._aux_cache_keys_re.match(x) is None: + env_keys.append(x) + continue myd = _unicode_decode('') - if x == "EAPI" and not myd: - results.append(_unicode_decode('0')) - else: - results.append(myd) + + # Preserve \n for metadata that is known to + # contain multiple lines. + if self._aux_multi_line_re.match(x) is None: + myd = " ".join(myd.split()) + + results[x] = myd + + if env_keys: + env_results = self._aux_env_search(mycpv, env_keys) + for k in env_keys: + v = env_results.get(k) + if v is None: + v = _unicode_decode('') + if self._aux_multi_line_re.match(k) is None: + v = " ".join(v.split()) + results[k] = v + + if results.get("EAPI") == "": + results[_unicode_decode("EAPI")] = _unicode_decode('0') + + return results + + def _aux_env_search(self, cpv, variables): + """ + Search environment.bz2 for the specified variables. Returns + a dict mapping variables to values, and any variables not + found in the environment will not be included in the dict. + This is useful for querying variables like ${SRC_URI} and + ${A}, which are not saved in separate files but are available + in environment.bz2 (see bug #395463). + """ + env_file = self.getpath(cpv, filename="environment.bz2") + if not os.path.isfile(env_file): + return {} + bunzip2_cmd = portage.util.shlex_split( + self.settings.get("PORTAGE_BUNZIP2_COMMAND", "")) + if not bunzip2_cmd: + bunzip2_cmd = portage.util.shlex_split( + self.settings["PORTAGE_BZIP2_COMMAND"]) + bunzip2_cmd.append("-d") + args = bunzip2_cmd + ["-c", env_file] + try: + proc = subprocess.Popen(args, stdout=subprocess.PIPE) + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + raise portage.exception.CommandNotFound(args[0]) + + # Parts of the following code are borrowed from + # filter-bash-environment.py (keep them in sync). + var_assign_re = re.compile(r'(^|^declare\s+-\S+\s+|^declare\s+|^export\s+)([^=\s]+)=("|\')?(.*)$') + close_quote_re = re.compile(r'(\\"|"|\')\s*$') + def have_end_quote(quote, line): + close_quote_match = close_quote_re.search(line) + return close_quote_match is not None and \ + close_quote_match.group(1) == quote + + variables = frozenset(variables) + results = {} + for line in proc.stdout: + line = _unicode_decode(line, + encoding=_encodings['content'], errors='replace') + var_assign_match = var_assign_re.match(line) + if var_assign_match is not None: + key = var_assign_match.group(2) + quote = var_assign_match.group(3) + if quote is not None: + if have_end_quote(quote, + line[var_assign_match.end(2)+2:]): + value = var_assign_match.group(4) + else: + value = [var_assign_match.group(4)] + for line in proc.stdout: + line = _unicode_decode(line, + encoding=_encodings['content'], + errors='replace') + value.append(line) + if have_end_quote(quote, line): + break + value = ''.join(value) + # remove trailing quote and whitespace + value = value.rstrip()[:-1] + else: + value = var_assign_match.group(4).rstrip() + + if key in variables: + results[key] = value + + proc.wait() + proc.stdout.close() return results def aux_update(self, cpv, values): @@ -758,8 +866,7 @@ class vardbapi(dbapi): @param myroot: ignored, self._eroot is used instead """ - myroot = None - new_vdb = False + del myroot counter = -1 try: cfile = io.open( @@ -768,8 +875,9 @@ class vardbapi(dbapi): mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError as e: - new_vdb = not bool(self.cpv_all()) - if not new_vdb: + # Silently allow ENOENT since files under + # /var/cache/ are allowed to disappear. + if e.errno != errno.ENOENT: writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \ self._counter_path, noiselevel=-1) writemsg("!!! %s\n" % str(e), noiselevel=-1) @@ -806,10 +914,6 @@ class vardbapi(dbapi): if pkg_counter > max_counter: max_counter = pkg_counter - if counter < 0 and not new_vdb: - writemsg(_("!!! Initializing COUNTER to " \ - "value of %d\n") % max_counter, noiselevel=-1) - return max_counter + 1 def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None): @@ -823,7 +927,7 @@ class vardbapi(dbapi): @param myroot: ignored, self._eroot is used instead @param mycpv: ignored @rtype: int - @returns: new counter value + @return: new counter value """ myroot = None mycpv = None @@ -959,7 +1063,7 @@ class vardbapi(dbapi): counter = int(counter) except ValueError: counter = 0 - return (cpv, counter, mtime) + return (_unicode(cpv), counter, mtime) class _owners_db(object): @@ -1149,24 +1253,38 @@ class vardbapi(dbapi): class vartree(object): "this tree will scan a var/db/pkg database located at root (passed to init)" - def __init__(self, root=None, virtual=None, categories=None, + def __init__(self, root=None, virtual=DeprecationWarning, categories=None, settings=None): if settings is None: settings = portage.settings - self.root = settings['ROOT'] - if root is not None and root != self.root: - warnings.warn("The 'root' parameter of the " + \ - "portage.dbapi.vartree.vartree" + \ - " constructor is now unused. Use " + \ + if root is not None and root != settings['ROOT']: + warnings.warn("The 'root' parameter of the " + "portage.dbapi.vartree.vartree" + " constructor is now unused. Use " "settings['ROOT'] instead.", DeprecationWarning, stacklevel=2) + if virtual is not DeprecationWarning: + warnings.warn("The 'virtual' parameter of the " + "portage.dbapi.vartree.vartree" + " constructor is unused", + DeprecationWarning, stacklevel=2) + self.settings = settings self.dbapi = vardbapi(settings=settings, vartree=self) self.populated = 1 + @property + def root(self): + warnings.warn("The root attribute of " + "portage.dbapi.vartree.vartree" + " is deprecated. Use " + "settings['ROOT'] instead.", + DeprecationWarning, stacklevel=3) + return self.settings['ROOT'] + def getpath(self, mykey, filename=None): return self.dbapi.getpath(mykey, filename=filename) @@ -1276,6 +1394,20 @@ class dblink(object): r')$' ) + # These files are generated by emerge, so we need to remove + # them when they are the only thing left in a directory. + _infodir_cleanup = frozenset(["dir", "dir.old"]) + + _ignored_unlink_errnos = ( + errno.EBUSY, errno.ENOENT, + errno.ENOTDIR, errno.EISDIR) + + _ignored_rmdir_errnos = ( + errno.EEXIST, errno.ENOTEMPTY, + errno.EBUSY, errno.ENOENT, + errno.ENOTDIR, errno.EISDIR, + errno.EPERM) + def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None, vartree=None, blockers=None, scheduler=None, pipe=None): """ @@ -1300,22 +1432,23 @@ class dblink(object): raise TypeError("settings argument is required") mysettings = settings - myroot = settings['ROOT'] + self._eroot = mysettings['EROOT'] self.cat = cat self.pkg = pkg self.mycpv = self.cat + "/" + self.pkg - self.mysplit = list(catpkgsplit(self.mycpv)[1:]) - self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0]) + if self.mycpv == settings.mycpv and \ + isinstance(settings.mycpv, _pkg_str): + self.mycpv = settings.mycpv + else: + self.mycpv = _pkg_str(self.mycpv) + self.mysplit = list(self.mycpv.cpv_split[1:]) + self.mysplit[0] = self.mycpv.cp self.treetype = treetype if vartree is None: - vartree = portage.db[myroot]["vartree"] + vartree = portage.db[self._eroot]["vartree"] self.vartree = vartree self._blockers = blockers self._scheduler = scheduler - - # WARNING: EROOT support is experimental and may be incomplete - # for cases in which EPREFIX is non-empty. - self._eroot = mysettings['EROOT'] self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH)) self.dbcatdir = self.dbroot+"/"+cat self.dbpkgdir = self.dbcatdir+"/"+pkg @@ -1324,14 +1457,14 @@ class dblink(object): self.settings = mysettings self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1" - self.myroot=myroot + self.myroot = self.settings['ROOT'] self._installed_instance = None self.contentscache = None self._contents_inodes = None self._contents_basenames = None self._linkmap_broken = False - self._md5_merge_map = {} - self._hash_key = (self.myroot, self.mycpv) + self._hardlink_merge_map = {} + self._hash_key = (self._eroot, self.mycpv) self._protect_obj = None self._pipe = pipe @@ -1610,7 +1743,7 @@ class dblink(object): PreservedLibsRegistry yet. @type preserve_paths: set @rtype: Integer - @returns: + @return: 1. os.EX_OK if everything went well. 2. return code of the failed phase (for prerm, postrm, cleanrm) """ @@ -1839,16 +1972,19 @@ class dblink(object): else: self.settings.pop("PORTAGE_LOG_FILE", None) - # Lock the config memory file to prevent symlink creation - # in merge_contents from overlapping with env-update. - self.vartree.dbapi._fs_lock() - try: - env_update(target_root=self.settings['ROOT'], - prev_mtimes=ldpath_mtimes, - contents=contents, env=self.settings.environ(), - writemsg_level=self._display_merge) - finally: - self.vartree.dbapi._fs_unlock() + env_update(target_root=self.settings['ROOT'], + prev_mtimes=ldpath_mtimes, + contents=contents, env=self.settings, + writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi) + + unmerge_with_replacement = preserve_paths is not None + if not unmerge_with_replacement: + # When there's a replacement package which calls us via treewalk, + # treewalk will automatically call _prune_plib_registry for us. + # Otherwise, we need to call _prune_plib_registry ourselves. + # Don't pass in the "unmerge=True" flag here, since that flag + # is intended to be used _prior_ to unmerge, not after. + self._prune_plib_registry() return os.EX_OK @@ -1871,6 +2007,10 @@ class dblink(object): log_path=log_path, background=background, level=level, noiselevel=noiselevel) + def _show_unmerge(self, zing, desc, file_type, file_name): + self._display_merge("%s %s %s %s\n" % \ + (zing, desc.ljust(8), file_type, file_name)) + def _unmerge_pkgfiles(self, pkgfiles, others_in_slot): """ @@ -1887,6 +2027,9 @@ class dblink(object): os = _os_merge perf_md5 = perform_md5 showMessage = self._display_merge + show_unmerge = self._show_unmerge + ignored_unlink_errnos = self._ignored_unlink_errnos + ignored_rmdir_errnos = self._ignored_rmdir_errnos if not pkgfiles: showMessage(_("No package files given... Grabbing a set.\n")) @@ -1904,9 +2047,6 @@ class dblink(object): settings=self.settings, vartree=self.vartree, treetype="vartree", pipe=self._pipe)) - dest_root = self._eroot - dest_root_len = len(dest_root) - 1 - cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file) stale_confmem = [] protected_symlinks = {} @@ -1922,14 +2062,6 @@ class dblink(object): #process symlinks second-to-last, directories last. mydirs = set() - ignored_unlink_errnos = ( - errno.EBUSY, errno.ENOENT, - errno.ENOTDIR, errno.EISDIR) - ignored_rmdir_errnos = ( - errno.EEXIST, errno.ENOTEMPTY, - errno.EBUSY, errno.ENOENT, - errno.ENOTDIR, errno.EISDIR, - errno.EPERM) modprotect = os.path.join(self._eroot, "lib/modules/") def unlink(file_name, lstatobj): @@ -1965,10 +2097,6 @@ class dblink(object): # Restore the parent flags we saved before unlinking bsd_chflags.chflags(parent_name, pflags) - def show_unmerge(zing, desc, file_type, file_name): - showMessage("%s %s %s %s\n" % \ - (zing, desc.ljust(8), file_type, file_name)) - unmerge_desc = {} unmerge_desc["cfgpro"] = _("cfgpro") unmerge_desc["replaced"] = _("replaced") @@ -1980,14 +2108,12 @@ class dblink(object): unmerge_desc["!mtime"] = _("!mtime") unmerge_desc["!obj"] = _("!obj") unmerge_desc["!sym"] = _("!sym") + unmerge_desc["!prefix"] = _("!prefix") real_root = self.settings['ROOT'] real_root_len = len(real_root) - 1 - eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1 + eroot = self.settings["EROOT"] - # These files are generated by emerge, so we need to remove - # them when they are the only thing left in a directory. - infodir_cleanup = frozenset(["dir", "dir.old"]) infodirs = frozenset(infodir for infodir in chain( self.settings.get("INFOPATH", "").split(":"), self.settings.get("INFODIR", "").split(":")) if infodir) @@ -2023,6 +2149,12 @@ class dblink(object): file_data = pkgfiles[objkey] file_type = file_data[0] + + # don't try to unmerge the prefix offset itself + if len(obj) <= len(eroot) or not obj.startswith(eroot): + show_unmerge("---", unmerge_desc["!prefix"], file_type, obj) + continue + statobj = None try: statobj = os.stat(obj) @@ -2216,78 +2348,13 @@ class dblink(object): elif pkgfiles[objkey][0] == "dev": show_unmerge("---", "", file_type, obj) - mydirs = sorted(mydirs) - mydirs.reverse() + self._unmerge_dirs(mydirs, infodirs_inodes, + protected_symlinks, unmerge_desc, unlink, os) + mydirs.clear() - for obj, inode_key in mydirs: - # Treat any directory named "info" as a candidate here, - # since it might have been in INFOPATH previously even - # though it may not be there now. - if inode_key in infodirs_inodes or \ - os.path.basename(obj) == "info": - try: - remaining = os.listdir(obj) - except OSError: - pass - else: - cleanup_info_dir = () - if remaining and \ - len(remaining) <= len(infodir_cleanup): - if not set(remaining).difference(infodir_cleanup): - cleanup_info_dir = remaining - - for child in cleanup_info_dir: - child = os.path.join(obj, child) - try: - lstatobj = os.lstat(child) - if stat.S_ISREG(lstatobj.st_mode): - unlink(child, lstatobj) - show_unmerge("<<<", "", "obj", child) - except EnvironmentError as e: - if e.errno not in ignored_unlink_errnos: - raise - del e - show_unmerge("!!!", "", "obj", child) - try: - if bsd_chflags: - lstatobj = os.lstat(obj) - if lstatobj.st_flags != 0: - bsd_chflags.lchflags(obj, 0) - parent_name = os.path.dirname(obj) - # Use normal stat/chflags for the parent since we want to - # follow any symlinks to the real parent directory. - pflags = os.stat(parent_name).st_flags - if pflags != 0: - bsd_chflags.chflags(parent_name, 0) - try: - os.rmdir(obj) - finally: - if bsd_chflags and pflags != 0: - # Restore the parent flags we saved before unlinking - bsd_chflags.chflags(parent_name, pflags) - show_unmerge("<<<", "", "dir", obj) - except EnvironmentError as e: - if e.errno not in ignored_rmdir_errnos: - raise - if e.errno != errno.ENOENT: - show_unmerge("---", unmerge_desc["!empty"], "dir", obj) - del e - else: - # When a directory is successfully removed, there's - # no need to protect symlinks that point to it. - unmerge_syms = protected_symlinks.pop(inode_key, None) - if unmerge_syms is not None: - for relative_path in unmerge_syms: - obj = os.path.join(real_root, - relative_path.lstrip(os.sep)) - try: - unlink(obj, os.lstat(obj)) - show_unmerge("<<<", "", "sym", obj) - except (OSError, IOError) as e: - if e.errno not in ignored_unlink_errnos: - raise - del e - show_unmerge("!!!", "", "sym", obj) + if protected_symlinks: + self._unmerge_protected_symlinks(others_in_slot, infodirs_inodes, + protected_symlinks, unmerge_desc, unlink, os) if protected_symlinks: msg = "One or more symlinks to directories have been " + \ @@ -2313,6 +2380,168 @@ class dblink(object): #remove self from vartree database so that our own virtual gets zapped if we're the last node self.vartree.zap(self.mycpv) + def _unmerge_protected_symlinks(self, others_in_slot, infodirs_inodes, + protected_symlinks, unmerge_desc, unlink, os): + + real_root = self.settings['ROOT'] + show_unmerge = self._show_unmerge + ignored_unlink_errnos = self._ignored_unlink_errnos + + flat_list = set() + flat_list.update(*protected_symlinks.values()) + flat_list = sorted(flat_list) + + for f in flat_list: + for dblnk in others_in_slot: + if dblnk.isowner(f): + # If another package in the same slot installed + # a file via a protected symlink, return early + # and don't bother searching for any other owners. + return + + msg = [] + msg.append("") + msg.append(_("Directory symlink(s) may need protection:")) + msg.append("") + + for f in flat_list: + msg.append("\t%s" % \ + os.path.join(real_root, f.lstrip(os.path.sep))) + + msg.append("") + msg.append(_("Searching all installed" + " packages for files installed via above symlink(s)...")) + msg.append("") + self._elog("elog", "postrm", msg) + + self.lockdb() + try: + owners = self.vartree.dbapi._owners.get_owners(flat_list) + self.vartree.dbapi.flush_cache() + finally: + self.unlockdb() + + for owner in list(owners): + if owner.mycpv == self.mycpv: + owners.pop(owner, None) + + if not owners: + msg = [] + msg.append(_("The above directory symlink(s) are all " + "safe to remove. Removing them now...")) + msg.append("") + self._elog("elog", "postrm", msg) + dirs = set() + for unmerge_syms in protected_symlinks.values(): + for relative_path in unmerge_syms: + obj = os.path.join(real_root, + relative_path.lstrip(os.sep)) + parent = os.path.dirname(obj) + while len(parent) > len(self._eroot): + try: + lstatobj = os.lstat(parent) + except OSError: + break + else: + dirs.add((parent, + (lstatobj.st_dev, lstatobj.st_ino))) + parent = os.path.dirname(parent) + try: + unlink(obj, os.lstat(obj)) + show_unmerge("<<<", "", "sym", obj) + except (OSError, IOError) as e: + if e.errno not in ignored_unlink_errnos: + raise + del e + show_unmerge("!!!", "", "sym", obj) + + protected_symlinks.clear() + self._unmerge_dirs(dirs, infodirs_inodes, + protected_symlinks, unmerge_desc, unlink, os) + dirs.clear() + + def _unmerge_dirs(self, dirs, infodirs_inodes, + protected_symlinks, unmerge_desc, unlink, os): + + show_unmerge = self._show_unmerge + infodir_cleanup = self._infodir_cleanup + ignored_unlink_errnos = self._ignored_unlink_errnos + ignored_rmdir_errnos = self._ignored_rmdir_errnos + real_root = self.settings['ROOT'] + + dirs = sorted(dirs) + dirs.reverse() + + for obj, inode_key in dirs: + # Treat any directory named "info" as a candidate here, + # since it might have been in INFOPATH previously even + # though it may not be there now. + if inode_key in infodirs_inodes or \ + os.path.basename(obj) == "info": + try: + remaining = os.listdir(obj) + except OSError: + pass + else: + cleanup_info_dir = () + if remaining and \ + len(remaining) <= len(infodir_cleanup): + if not set(remaining).difference(infodir_cleanup): + cleanup_info_dir = remaining + + for child in cleanup_info_dir: + child = os.path.join(obj, child) + try: + lstatobj = os.lstat(child) + if stat.S_ISREG(lstatobj.st_mode): + unlink(child, lstatobj) + show_unmerge("<<<", "", "obj", child) + except EnvironmentError as e: + if e.errno not in ignored_unlink_errnos: + raise + del e + show_unmerge("!!!", "", "obj", child) + try: + if bsd_chflags: + lstatobj = os.lstat(obj) + if lstatobj.st_flags != 0: + bsd_chflags.lchflags(obj, 0) + parent_name = os.path.dirname(obj) + # Use normal stat/chflags for the parent since we want to + # follow any symlinks to the real parent directory. + pflags = os.stat(parent_name).st_flags + if pflags != 0: + bsd_chflags.chflags(parent_name, 0) + try: + os.rmdir(obj) + finally: + if bsd_chflags and pflags != 0: + # Restore the parent flags we saved before unlinking + bsd_chflags.chflags(parent_name, pflags) + show_unmerge("<<<", "", "dir", obj) + except EnvironmentError as e: + if e.errno not in ignored_rmdir_errnos: + raise + if e.errno != errno.ENOENT: + show_unmerge("---", unmerge_desc["!empty"], "dir", obj) + del e + else: + # When a directory is successfully removed, there's + # no need to protect symlinks that point to it. + unmerge_syms = protected_symlinks.pop(inode_key, None) + if unmerge_syms is not None: + for relative_path in unmerge_syms: + obj = os.path.join(real_root, + relative_path.lstrip(os.sep)) + try: + unlink(obj, os.lstat(obj)) + show_unmerge("<<<", "", "sym", obj) + except (OSError, IOError) as e: + if e.errno not in ignored_unlink_errnos: + raise + del e + show_unmerge("!!!", "", "sym", obj) + def isowner(self, filename, destroot=None): """ Check if a file belongs to this package. This may @@ -2328,7 +2557,7 @@ class dblink(object): @param destroot: @type destroot: @rtype: Boolean - @returns: + @return: 1. True if this package owns the file. 2. False if this package does not own the file. """ @@ -2857,9 +3086,13 @@ class dblink(object): os = _os_merge - collision_ignore = set([normalize_path(myignore) for myignore in \ - portage.util.shlex_split( - self.settings.get("COLLISION_IGNORE", ""))]) + collision_ignore = [] + for x in portage.util.shlex_split( + self.settings.get("COLLISION_IGNORE", "")): + if os.path.isdir(os.path.join(self._eroot, x.lstrip(os.sep))): + x = normalize_path(x) + x += "/*" + collision_ignore.append(x) # For collisions with preserved libraries, the current package # will assume ownership and the libraries will be unregistered. @@ -2960,15 +3193,12 @@ class dblink(object): if not isowned and self.isprotected(full_path): isowned = True if not isowned: + f_match = full_path[len(self._eroot)-1:] stopmerge = True - if collision_ignore: - if f in collision_ignore: + for pattern in collision_ignore: + if fnmatch.fnmatch(f_match, pattern): stopmerge = False - else: - for myignore in collision_ignore: - if f.startswith(myignore + os.path.sep): - stopmerge = False - break + break if stopmerge: collisions.append(f) return collisions, symlink_collisions, plib_collisions @@ -3121,9 +3351,10 @@ class dblink(object): if isinstance(lines, basestring): lines = [lines] for line in lines: - fields = (funcname, phase, cpv, line.rstrip('\n')) - str_buffer.append(' '.join(fields)) - str_buffer.append('\n') + for line in line.split('\n'): + fields = (funcname, phase, cpv, line) + str_buffer.append(' '.join(fields)) + str_buffer.append('\n') if str_buffer: os.write(self._pipe, _unicode_encode(''.join(str_buffer))) @@ -3157,7 +3388,7 @@ class dblink(object): @param prev_mtimes: { Filename:mtime } mapping for env_update @type prev_mtimes: Dictionary @rtype: Boolean - @returns: + @return: 1. 0 on success 2. 1 on failure @@ -3192,17 +3423,22 @@ class dblink(object): pass continue + f = None try: - val = io.open(_unicode_encode( + f = io.open(_unicode_encode( os.path.join(inforoot, var_name), encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], - errors='replace').readline().strip() + errors='replace') + val = f.readline().strip() except EnvironmentError as e: if e.errno != errno.ENOENT: raise del e val = '' + finally: + if f is not None: + f.close() if var_name == 'SLOT': slot = val @@ -3226,10 +3462,6 @@ class dblink(object): if not os.path.exists(self.dbcatdir): ensure_dirs(self.dbcatdir) - otherversions = [] - for v in self.vartree.dbapi.cp_list(self.mysplit[0]): - otherversions.append(v.split("/")[1]) - cp = self.mysplit[0] slot_atom = "%s:%s" % (cp, slot) @@ -3270,22 +3502,49 @@ class dblink(object): max_dblnk = dblnk self._installed_instance = max_dblnk + if self.settings.get("INSTALL_MASK") or \ + "nodoc" in self.settings.features or \ + "noinfo" in self.settings.features or \ + "noman" in self.settings.features: + # Apply INSTALL_MASK before collision-protect, since it may + # be useful to avoid collisions in some scenarios. + phase = MiscFunctionsProcess(background=False, + commands=["preinst_mask"], phase="preinst", + scheduler=self._scheduler, settings=self.settings) + phase.start() + phase.wait() + # We check for unicode encoding issues after src_install. However, # the check must be repeated here for binary packages (it's # inexpensive since we call os.walk() here anyway). unicode_errors = [] + line_ending_re = re.compile('[\n\r]') + srcroot_len = len(srcroot) + ed_len = len(self.settings["ED"]) while True: unicode_error = False + eagain_error = False myfilelist = [] mylinklist = [] paths_with_newlines = [] - srcroot_len = len(srcroot) def onerror(e): raise - for parent, dirs, files in os.walk(srcroot, onerror=onerror): + walk_iter = os.walk(srcroot, onerror=onerror) + while True: + try: + parent, dirs, files = next(walk_iter) + except StopIteration: + break + except OSError as e: + if e.errno != errno.EAGAIN: + raise + # Observed with PyPy 1.8. + eagain_error = True + break + try: parent = _unicode_decode(parent, encoding=_encodings['merge'], errors='strict') @@ -3293,12 +3552,12 @@ class dblink(object): new_parent = _unicode_decode(parent, encoding=_encodings['merge'], errors='replace') new_parent = _unicode_encode(new_parent, - encoding=_encodings['merge'], errors='backslashreplace') + encoding='ascii', errors='backslashreplace') new_parent = _unicode_decode(new_parent, encoding=_encodings['merge'], errors='replace') os.rename(parent, new_parent) unicode_error = True - unicode_errors.append(new_parent[srcroot_len:]) + unicode_errors.append(new_parent[ed_len:]) break for fname in files: @@ -3311,13 +3570,13 @@ class dblink(object): new_fname = _unicode_decode(fname, encoding=_encodings['merge'], errors='replace') new_fname = _unicode_encode(new_fname, - encoding=_encodings['merge'], errors='backslashreplace') + encoding='ascii', errors='backslashreplace') new_fname = _unicode_decode(new_fname, encoding=_encodings['merge'], errors='replace') new_fpath = os.path.join(parent, new_fname) os.rename(fpath, new_fpath) unicode_error = True - unicode_errors.append(new_fpath[srcroot_len:]) + unicode_errors.append(new_fpath[ed_len:]) fname = new_fname fpath = new_fpath else: @@ -3325,7 +3584,7 @@ class dblink(object): relative_path = fpath[srcroot_len:] - if "\n" in relative_path: + if line_ending_re.search(relative_path) is not None: paths_with_newlines.append(relative_path) file_mode = os.lstat(fpath).st_mode @@ -3340,19 +3599,20 @@ class dblink(object): if unicode_error: break - if not unicode_error: + if not (unicode_error or eagain_error): break if unicode_errors: - eerror(portage._merge_unicode_error(unicode_errors)) + self._elog("eqawarn", "preinst", + _merge_unicode_error(unicode_errors)) if paths_with_newlines: msg = [] - msg.append(_("This package installs one or more files containing a newline (\\n) character:")) + msg.append(_("This package installs one or more files containing line ending characters:")) msg.append("") paths_with_newlines.sort() for f in paths_with_newlines: - msg.append("\t/%s" % (f.replace("\n", "\\n"))) + msg.append("\t/%s" % (f.replace("\n", "\\n").replace("\r", "\\r"))) msg.append("") msg.append(_("package %s NOT merged") % self.mycpv) msg.append("") @@ -3394,14 +3654,6 @@ class dblink(object): if installed_files: return 1 - # check for package collisions - blockers = self._blockers - if blockers is None: - blockers = [] - collisions, symlink_collisions, plib_collisions = \ - self._collision_protect(srcroot, destroot, - others_in_slot + blockers, myfilelist, mylinklist) - # Make sure the ebuild environment is initialized and that ${T}/elog # exists for logging of collision-protect eerror messages. if myebuild is None: @@ -3413,6 +3665,29 @@ class dblink(object): for other in others_in_slot]) prepare_build_dirs(settings=self.settings, cleanup=cleanup) + # check for package collisions + blockers = self._blockers + if blockers is None: + blockers = [] + collisions, symlink_collisions, plib_collisions = \ + self._collision_protect(srcroot, destroot, + others_in_slot + blockers, myfilelist, mylinklist) + + if symlink_collisions: + # Symlink collisions need to be distinguished from other types + # of collisions, in order to avoid confusion (see bug #409359). + msg = _("Package '%s' has one or more collisions " + "between symlinks and directories, which is explicitly " + "forbidden by PMS section 13.4 (see bug #326685):") % \ + (self.settings.mycpv,) + msg = textwrap.wrap(msg, 70) + msg.append("") + for f in symlink_collisions: + msg.append("\t%s" % os.path.join(destroot, + f.lstrip(os.path.sep))) + msg.append("") + self._elog("eerror", "preinst", msg) + if collisions: collision_protect = "collision-protect" in self.settings.features protect_owned = "protect-owned" in self.settings.features @@ -3494,12 +3769,20 @@ class dblink(object): eerror([_("None of the installed" " packages claim the file(s)."), ""]) + symlink_abort_msg =_("Package '%s' NOT merged since it has " + "one or more collisions between symlinks and directories, " + "which is explicitly forbidden by PMS section 13.4 " + "(see bug #326685).") + # The explanation about the collision and how to solve # it may not be visible via a scrollback buffer, especially # if the number of file collisions is large. Therefore, # show a summary at the end. abort = False - if collision_protect: + if symlink_collisions: + abort = True + msg = symlink_abort_msg % (self.settings.mycpv,) + elif collision_protect: abort = True msg = _("Package '%s' NOT merged due to file collisions.") % \ self.settings.mycpv @@ -3507,12 +3790,6 @@ class dblink(object): abort = True msg = _("Package '%s' NOT merged due to file collisions.") % \ self.settings.mycpv - elif symlink_collisions: - abort = True - msg = _("Package '%s' NOT merged due to collision " + \ - "between a symlink and a directory which is explicitly " + \ - "forbidden by PMS (see bug #326685).") % \ - (self.settings.mycpv,) else: msg = _("Package '%s' merged despite file collisions.") % \ self.settings.mycpv @@ -3558,10 +3835,12 @@ class dblink(object): # write local package counter for recording if counter is None: counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv) - io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'), + f = io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], - errors='backslashreplace').write(_unicode_decode(str(counter))) + errors='backslashreplace') + f.write(_unicode_decode(str(counter))) + f.close() self.updateprotect() @@ -3577,9 +3856,8 @@ class dblink(object): # Always behave like --noconfmem is enabled for downgrades # so that people who don't know about this option are less # likely to get confused when doing upgrade/downgrade cycles. - pv_split = catpkgsplit(self.mycpv)[1:] for other in others_in_slot: - if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0: + if vercmp(self.mycpv.version, other.mycpv.version) < 0: cfgfiledict["IGNORE"] = 1 break @@ -3798,22 +4076,11 @@ class dblink(object): showMessage(_("!!! FAILED postinst: ")+str(a)+"\n", level=logging.ERROR, noiselevel=-1) - downgrade = False - for v in otherversions: - if pkgcmp(catpkgsplit(self.pkg)[1:], catpkgsplit(v)[1:]) < 0: - downgrade = True - - # Lock the config memory file to prevent symlink creation - # in merge_contents from overlapping with env-update. - self.vartree.dbapi._fs_lock() - try: - #update environment settings, library paths. DO NOT change symlinks. - env_update(makelinks=(not downgrade), - target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes, - contents=contents, env=self.settings.environ(), - writemsg_level=self._display_merge) - finally: - self.vartree.dbapi._fs_unlock() + #update environment settings, library paths. DO NOT change symlinks. + env_update( + target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes, + contents=contents, env=self.settings, + writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi) # For gcc upgrades, preserved libs have to be removed after the # the library path has been updated. @@ -3867,7 +4134,8 @@ class dblink(object): # we do a first merge; this will recurse through all files in our srcroot but also build up a # "second hand" of symlinks to merge later - if self.mergeme(srcroot, destroot, outfile, secondhand, "", cfgfiledict, mymtime): + if self.mergeme(srcroot, destroot, outfile, secondhand, + self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime): return 1 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore. The rest are @@ -3936,7 +4204,7 @@ class dblink(object): @param thismtime: The current time (typically long(time.time()) @type thismtime: Long @rtype: None or Boolean - @returns: + @return: 1. True on failure 2. None otherwise @@ -3952,6 +4220,10 @@ class dblink(object): destroot = normalize_path(destroot).rstrip(sep) + sep calc_prelink = "prelink-checksums" in self.settings.features + protect_if_modified = \ + "config-protect-if-modified" in self.settings.features and \ + self._installed_instance is not None + # this is supposed to merge a list of files. There will be 2 forms of argument passing. if isinstance(stufftomerge, basestring): #A directory is specified. Figure out protection paths, listdir() it and process it. @@ -3985,14 +4257,37 @@ class dblink(object): if stat.S_ISLNK(mymode): # we are merging a symbolic link - myabsto = abssymlink(mysrc) + # The file name of mysrc and the actual file that it points to + # will have earlier been forcefully converted to the 'merge' + # encoding if necessary, but the content of the symbolic link + # may need to be forcefully converted here. + myto = _os.readlink(_unicode_encode(mysrc, + encoding=_encodings['merge'], errors='strict')) + try: + myto = _unicode_decode(myto, + encoding=_encodings['merge'], errors='strict') + except UnicodeDecodeError: + myto = _unicode_decode(myto, encoding=_encodings['merge'], + errors='replace') + myto = _unicode_encode(myto, encoding='ascii', + errors='backslashreplace') + myto = _unicode_decode(myto, encoding=_encodings['merge'], + errors='replace') + os.unlink(mysrc) + os.symlink(myto, mysrc) + + # Pass in the symlink target in order to bypass the + # os.readlink() call inside abssymlink(), since that + # call is unsafe if the merge encoding is not ascii + # or utf_8 (see bug #382021). + myabsto = abssymlink(mysrc, target=myto) + if myabsto.startswith(srcroot): myabsto = myabsto[len(srcroot):] myabsto = myabsto.lstrip(sep) - myto = os.readlink(mysrc) if self.settings and self.settings["D"]: if myto.startswith(self.settings["D"]): - myto = myto[len(self.settings["D"]):] + myto = myto[len(self.settings["D"])-1:] # myrealto contains the path of the real file to which this symlink points. # we can simply test for existence of this file to see if the target has been merged yet myrealto = normalize_path(os.path.join(destroot, myabsto)) @@ -4170,9 +4465,18 @@ class dblink(object): # now, config file management may come into play. # we only need to tweak mydest if cfg file management is in play. if protected: + destmd5 = perform_md5(mydest, calc_prelink=calc_prelink) + if protect_if_modified: + contents_key = \ + self._installed_instance._match_contents(myrealdest) + if contents_key: + inst_info = self._installed_instance.getcontents()[contents_key] + if inst_info[0] == "obj" and inst_info[2] == destmd5: + protected = False + + if protected: # we have a protection path; enable config file management. cfgprot = 0 - destmd5 = perform_md5(mydest, calc_prelink=calc_prelink) if mymd5 == destmd5: #file already in place; simply update mtimes of destination moveme = 1 @@ -4207,10 +4511,10 @@ class dblink(object): # as hardlinks (having identical st_dev and st_ino). hardlink_key = (mystat.st_dev, mystat.st_ino) - hardlink_candidates = self._md5_merge_map.get(hardlink_key) + hardlink_candidates = self._hardlink_merge_map.get(hardlink_key) if hardlink_candidates is None: hardlink_candidates = [] - self._md5_merge_map[hardlink_key] = hardlink_candidates + self._hardlink_merge_map[hardlink_key] = hardlink_candidates mymtime = movefile(mysrc, mydest, newmtime=thismtime, sstat=mystat, mysettings=self.settings, @@ -4218,8 +4522,7 @@ class dblink(object): encoding=_encodings['merge']) if mymtime is None: return 1 - if hardlink_candidates is not None: - hardlink_candidates.append(mydest) + hardlink_candidates.append(mydest) zing = ">>>" if mymtime != None: @@ -4445,6 +4748,7 @@ def write_contents(contents, root, f): def tar_contents(contents, root, tar, protect=None, onProgress=None): os = _os_merge + encoding = _encodings['merge'] try: for x in contents: @@ -4464,7 +4768,9 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None): pass else: os = portage.os + encoding = _encodings['fs'] + tar.encoding = encoding root = normalize_path(root).rstrip(os.path.sep) + os.path.sep id_strings = {} maxval = len(contents) @@ -4486,7 +4792,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None): continue contents_type = contents[path][0] if path.startswith(root): - arcname = path[len(root):] + arcname = "./" + path[len(root):] else: raise ValueError("invalid root argument: '%s'" % root) live_path = path @@ -4498,7 +4804,51 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None): # recorded as a real directory in the tar file to ensure that tar # can properly extract it's children. live_path = os.path.realpath(live_path) - tarinfo = tar.gettarinfo(live_path, arcname) + lst = os.lstat(live_path) + + # Since os.lstat() inside TarFile.gettarinfo() can trigger a + # UnicodeEncodeError when python has something other than utf_8 + # return from sys.getfilesystemencoding() (as in bug #388773), + # we implement the needed functionality here, using the result + # of our successful lstat call. An alternative to this would be + # to pass in the fileobj argument to TarFile.gettarinfo(), so + # that it could use fstat instead of lstat. However, that would + # have the unwanted effect of dereferencing symlinks. + + tarinfo = tar.tarinfo() + tarinfo.name = arcname + tarinfo.mode = lst.st_mode + tarinfo.uid = lst.st_uid + tarinfo.gid = lst.st_gid + tarinfo.size = 0 + tarinfo.mtime = lst.st_mtime + tarinfo.linkname = "" + if stat.S_ISREG(lst.st_mode): + inode = (lst.st_ino, lst.st_dev) + if (lst.st_nlink > 1 and + inode in tar.inodes and + arcname != tar.inodes[inode]): + tarinfo.type = tarfile.LNKTYPE + tarinfo.linkname = tar.inodes[inode] + else: + tar.inodes[inode] = arcname + tarinfo.type = tarfile.REGTYPE + tarinfo.size = lst.st_size + elif stat.S_ISDIR(lst.st_mode): + tarinfo.type = tarfile.DIRTYPE + elif stat.S_ISLNK(lst.st_mode): + tarinfo.type = tarfile.SYMTYPE + tarinfo.linkname = os.readlink(live_path) + else: + continue + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass if stat.S_ISREG(lst.st_mode): if protect and protect(path): @@ -4515,7 +4865,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None): f.close() else: f = open(_unicode_encode(path, - encoding=object.__getattribute__(os, '_encoding'), + encoding=encoding, errors='strict'), 'rb') try: tar.addfile(tarinfo, f) diff --git a/portage_with_autodep/pym/portage/dbapi/vartree.pyo b/portage_with_autodep/pym/portage/dbapi/vartree.pyo Binary files differnew file mode 100644 index 0000000..7c186cf --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/vartree.pyo diff --git a/portage_with_autodep/pym/portage/dbapi/virtual.py b/portage_with_autodep/pym/portage/dbapi/virtual.py index ec97ffe..da15983 100644 --- a/portage_with_autodep/pym/portage/dbapi/virtual.py +++ b/portage_with_autodep/pym/portage/dbapi/virtual.py @@ -1,9 +1,10 @@ -# Copyright 1998-2007 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from portage.dbapi import dbapi -from portage import cpv_getkey +from portage.dbapi.dep_expand import dep_expand +from portage.versions import cpv_getkey, _pkg_str class fakedbapi(dbapi): """A fake dbapi that allows consumers to inject/remove packages to/from it @@ -31,27 +32,30 @@ class fakedbapi(dbapi): self._match_cache = {} def match(self, origdep, use_cache=1): - result = self._match_cache.get(origdep, None) + atom = dep_expand(origdep, mydb=self, settings=self.settings) + cache_key = (atom, atom.unevaluated_atom) + result = self._match_cache.get(cache_key) if result is not None: return result[:] - result = dbapi.match(self, origdep, use_cache=use_cache) - self._match_cache[origdep] = result + result = list(self._iter_match(atom, self.cp_list(atom.cp))) + self._match_cache[cache_key] = result return result[:] def cpv_exists(self, mycpv, myrepo=None): return mycpv in self.cpvdict def cp_list(self, mycp, use_cache=1, myrepo=None): - cachelist = self._match_cache.get(mycp) - # cp_list() doesn't expand old-style virtuals - if cachelist and cachelist[0].startswith(mycp): + # NOTE: Cache can be safely shared with the match cache, since the + # match cache uses the result from dep_expand for the cache_key. + cache_key = (mycp, mycp) + cachelist = self._match_cache.get(cache_key) + if cachelist is not None: return cachelist[:] cpv_list = self.cpdict.get(mycp) if cpv_list is None: cpv_list = [] self._cpv_sort_ascending(cpv_list) - if not (not cpv_list and mycp.startswith("virtual/")): - self._match_cache[mycp] = cpv_list + self._match_cache[cache_key] = cpv_list return cpv_list[:] def cp_all(self): @@ -70,7 +74,13 @@ class fakedbapi(dbapi): @param metadata: dict """ self._clear_cache() - mycp = cpv_getkey(mycpv) + if not hasattr(mycpv, 'cp'): + if metadata is None: + mycpv = _pkg_str(mycpv) + else: + mycpv = _pkg_str(mycpv, slot=metadata.get('SLOT'), + repo=metadata.get('repository')) + mycp = mycpv.cp self.cpvdict[mycpv] = metadata myslot = None if self._exclusive_slots and metadata: diff --git a/portage_with_autodep/pym/portage/dbapi/virtual.pyo b/portage_with_autodep/pym/portage/dbapi/virtual.pyo Binary files differnew file mode 100644 index 0000000..9f7c667 --- /dev/null +++ b/portage_with_autodep/pym/portage/dbapi/virtual.pyo diff --git a/portage_with_autodep/pym/portage/debug.py b/portage_with_autodep/pym/portage/debug.py index ce642fe..ebf1a13 100644 --- a/portage_with_autodep/pym/portage/debug.py +++ b/portage_with_autodep/pym/portage/debug.py @@ -1,4 +1,4 @@ -# Copyright 1999-2011 Gentoo Foundation +# Copyright 1999-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import os @@ -26,7 +26,7 @@ class trace_handler(object): def __init__(self): python_system_paths = [] for x in sys.path: - if os.path.basename(x).startswith("python2."): + if os.path.basename(x) == "python%s.%s" % sys.version_info[:2]: python_system_paths.append(x) self.ignore_prefixes = [] diff --git a/portage_with_autodep/pym/portage/debug.pyo b/portage_with_autodep/pym/portage/debug.pyo Binary files differnew file mode 100644 index 0000000..82a5e8f --- /dev/null +++ b/portage_with_autodep/pym/portage/debug.pyo diff --git a/portage_with_autodep/pym/portage/dep/__init__.py b/portage_with_autodep/pym/portage/dep/__init__.py index fd5ad30..152af0a 100644 --- a/portage_with_autodep/pym/portage/dep/__init__.py +++ b/portage_with_autodep/pym/portage/dep/__init__.py @@ -1,5 +1,5 @@ # deps.py -- Portage dependency resolution functions -# Copyright 2003-2011 Gentoo Foundation +# Copyright 2003-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = [ @@ -30,13 +30,20 @@ __all__ = [ import re, sys import warnings from itertools import chain + +import portage +portage.proxy.lazyimport.lazyimport(globals(), + 'portage.util:cmp_sort_key,writemsg', +) + from portage import _unicode_decode from portage.eapi import eapi_has_slot_deps, eapi_has_src_uri_arrows, \ - eapi_has_use_deps, eapi_has_strong_blocks, eapi_has_use_dep_defaults + eapi_has_use_deps, eapi_has_strong_blocks, eapi_has_use_dep_defaults, \ + eapi_has_repo_deps, eapi_allows_dots_in_PN, eapi_allows_dots_in_use_flags from portage.exception import InvalidAtom, InvalidData, InvalidDependString from portage.localization import _ from portage.versions import catpkgsplit, catsplit, \ - pkgcmp, ververify, _cp, _cpv + vercmp, ververify, _cp, _cpv, _pkg_str, _unknown_repo import portage.cache.mappings if sys.hexversion >= 0x3000000: @@ -55,7 +62,7 @@ def cpvequal(cpv1, cpv2): @param cpv2: CategoryPackageVersion (no operators) Example: "sys-apps/portage-2.1" @type cpv2: String @rtype: Boolean - @returns: + @return: 1. True if cpv1 = cpv2 2. False Otherwise 3. Throws PortageException if cpv1 or cpv2 is not a CPV @@ -67,16 +74,27 @@ def cpvequal(cpv1, cpv2): """ - split1 = catpkgsplit(cpv1) - split2 = catpkgsplit(cpv2) - - if not split1 or not split2: + try: + try: + split1 = cpv1.cpv_split + except AttributeError: + cpv1 = _pkg_str(cpv1) + split1 = cpv1.cpv_split + + try: + split2 = cpv2.cpv_split + except AttributeError: + cpv2 = _pkg_str(cpv2) + split2 = cpv2.cpv_split + + except InvalidData: raise portage.exception.PortageException(_("Invalid data '%s, %s', parameter was not a CPV") % (cpv1, cpv2)) - - if split1[0] != split2[0]: + + if split1[0] != split2[0] or \ + split1[1] != split2[1]: return False - - return (pkgcmp(split1[1:], split2[1:]) == 0) + + return vercmp(cpv1.version, cpv2.version) == 0 def strip_empty(myarr): """ @@ -635,8 +653,8 @@ def flatten(mylist): _usedep_re = { - "0": re.compile("^(?P<prefix>[!-]?)(?P<flag>[A-Za-z0-9][A-Za-z0-9+_@-]*)(?P<default>(\(\+\)|\(\-\))?)(?P<suffix>[?=]?)$"), - "4-python": re.compile("^(?P<prefix>[!-]?)(?P<flag>[A-Za-z0-9][A-Za-z0-9+_@.-]*)(?P<default>(\(\+\)|\(\-\))?)(?P<suffix>[?=]?)$"), + "dots_disallowed_in_use_flags": re.compile("^(?P<prefix>[!-]?)(?P<flag>[A-Za-z0-9][A-Za-z0-9+_@-]*)(?P<default>(\(\+\)|\(\-\))?)(?P<suffix>[?=]?)$"), + "dots_allowed_in_use_flags": re.compile("^(?P<prefix>[!-]?)(?P<flag>[A-Za-z0-9][A-Za-z0-9+_@.-]*)(?P<default>(\(\+\)|\(\-\))?)(?P<suffix>[?=]?)$"), } def _get_usedep_re(eapi): @@ -649,10 +667,10 @@ def _get_usedep_re(eapi): @return: A regular expression object that matches valid USE deps for the given eapi. """ - if eapi in (None, "4-python",): - return _usedep_re["4-python"] + if eapi is None or eapi_allows_dots_in_use_flags(eapi): + return _usedep_re["dots_allowed_in_use_flags"] else: - return _usedep_re["0"] + return _usedep_re["dots_disallowed_in_use_flags"] class _use_dep(object): @@ -1068,6 +1086,10 @@ class Atom(_atom_base): _atom_base.__init__(s) + atom_re = _get_atom_re(eapi) + if eapi_has_repo_deps(eapi): + allow_repo = True + if "!" == s[:1]: blocker = self._blocker(forbid_overlap=("!" == s[1:2])) if blocker.overlap.forbid: @@ -1077,11 +1099,11 @@ class Atom(_atom_base): else: blocker = False self.__dict__['blocker'] = blocker - m = _atom_re.match(s) + m = atom_re.match(s) extended_syntax = False if m is None: if allow_wildcard: - m = _atom_wildcard_re.match(s) + m = _get_atom_wildcard_re(eapi).match(s) if m is None: raise InvalidAtom(self) op = None @@ -1096,38 +1118,44 @@ class Atom(_atom_base): else: raise InvalidAtom(self) elif m.group('op') is not None: - base = _atom_re.groupindex['op'] + base = atom_re.groupindex['op'] op = m.group(base + 1) cpv = m.group(base + 2) cp = m.group(base + 3) - slot = m.group(_atom_re.groups - 2) - repo = m.group(_atom_re.groups - 1) - use_str = m.group(_atom_re.groups) + slot = m.group(atom_re.groups - 2) + repo = m.group(atom_re.groups - 1) + use_str = m.group(atom_re.groups) if m.group(base + 4) is not None: raise InvalidAtom(self) elif m.group('star') is not None: - base = _atom_re.groupindex['star'] + base = atom_re.groupindex['star'] op = '=*' cpv = m.group(base + 1) cp = m.group(base + 2) - slot = m.group(_atom_re.groups - 2) - repo = m.group(_atom_re.groups - 1) - use_str = m.group(_atom_re.groups) + slot = m.group(atom_re.groups - 2) + repo = m.group(atom_re.groups - 1) + use_str = m.group(atom_re.groups) if m.group(base + 3) is not None: raise InvalidAtom(self) elif m.group('simple') is not None: op = None - cpv = cp = m.group(_atom_re.groupindex['simple'] + 1) - slot = m.group(_atom_re.groups - 2) - repo = m.group(_atom_re.groups - 1) - use_str = m.group(_atom_re.groups) - if m.group(_atom_re.groupindex['simple'] + 2) is not None: + cpv = cp = m.group(atom_re.groupindex['simple'] + 1) + slot = m.group(atom_re.groups - 2) + repo = m.group(atom_re.groups - 1) + use_str = m.group(atom_re.groups) + if m.group(atom_re.groupindex['simple'] + 2) is not None: raise InvalidAtom(self) else: raise AssertionError(_("required group not found in atom: '%s'") % self) self.__dict__['cp'] = cp - self.__dict__['cpv'] = cpv + try: + self.__dict__['cpv'] = _pkg_str(cpv) + self.__dict__['version'] = self.cpv.version + except InvalidData: + # plain cp, wildcard, or something + self.__dict__['cpv'] = cpv + self.__dict__['version'] = None self.__dict__['repo'] = repo self.__dict__['slot'] = slot self.__dict__['operator'] = op @@ -1216,6 +1244,23 @@ class Atom(_atom_base): return Atom(self.replace(_slot_separator + self.slot, '', 1), allow_repo=True, allow_wildcard=True) + def with_repo(self, repo): + atom = remove_slot(self) + if self.slot is not None: + atom += _slot_separator + self.slot + atom += _repo_separator + repo + if self.use is not None: + atom += str(self.use) + return Atom(atom, allow_repo=True, allow_wildcard=True) + + def with_slot(self, slot): + atom = remove_slot(self) + _slot_separator + slot + if self.repo is not None: + atom += _repo_separator + self.repo + if self.use is not None: + atom += str(self.use) + return Atom(atom, allow_repo=True, allow_wildcard=True) + def __setattr__(self, name, value): raise AttributeError("Atom instances are immutable", self.__class__, name, value) @@ -1353,10 +1398,13 @@ class ExtendedAtomDict(portage.cache.mappings.MutableMapping): yield k def iteritems(self): - for item in self._normal.items(): - yield item - for item in self._extended.items(): - yield item + try: + for item in self._normal.items(): + yield item + for item in self._extended.items(): + yield item + except AttributeError: + pass # FEATURES=python-trace def __delitem__(self, cp): if "*" in cp: @@ -1610,20 +1658,45 @@ _repo_separator = "::" _repo_name = r'[\w][\w-]*' _repo = r'(?:' + _repo_separator + '(' + _repo_name + ')' + ')?' -_atom_re = re.compile('^(?P<without_use>(?:' + - '(?P<op>' + _op + _cpv + ')|' + - '(?P<star>=' + _cpv + r'\*)|' + - '(?P<simple>' + _cp + '))' + - '(' + _slot_separator + _slot + ')?' + _repo + ')(' + _use + ')?$', re.VERBOSE) +_atom_re = { + "dots_disallowed_in_PN": re.compile('^(?P<without_use>(?:' + + '(?P<op>' + _op + _cpv['dots_disallowed_in_PN'] + ')|' + + '(?P<star>=' + _cpv['dots_disallowed_in_PN'] + r'\*)|' + + '(?P<simple>' + _cp['dots_disallowed_in_PN'] + '))' + + '(' + _slot_separator + _slot + ')?' + _repo + ')(' + _use + ')?$', re.VERBOSE), + "dots_allowed_in_PN": re.compile('^(?P<without_use>(?:' + + '(?P<op>' + _op + _cpv['dots_allowed_in_PN'] + ')|' + + '(?P<star>=' + _cpv['dots_allowed_in_PN'] + r'\*)|' + + '(?P<simple>' + _cp['dots_allowed_in_PN'] + '))' + + '(' + _slot_separator + _slot + ')?' + _repo + ')(' + _use + ')?$', re.VERBOSE), +} + +def _get_atom_re(eapi): + if eapi is None or eapi_allows_dots_in_PN(eapi): + return _atom_re["dots_allowed_in_PN"] + else: + return _atom_re["dots_disallowed_in_PN"] _extended_cat = r'[\w+*][\w+.*-]*' -_extended_pkg = r'[\w+*][\w+*-]*?' +_extended_pkg = { + "dots_disallowed_in_PN": r'[\w+*][\w+*-]*?', + "dots_allowed_in_PN": r'[\w+*][\w+.*-]*?', +} -_atom_wildcard_re = re.compile('(?P<simple>(' + _extended_cat + ')/(' + _extended_pkg + '))(:(?P<slot>' + _slot + '))?(' + _repo_separator + '(?P<repo>' + _repo_name + '))?$') +_atom_wildcard_re = { + "dots_disallowed_in_PN": re.compile('(?P<simple>(' + _extended_cat + ')/(' + _extended_pkg['dots_disallowed_in_PN'] + '))(:(?P<slot>' + _slot + '))?(' + _repo_separator + '(?P<repo>' + _repo_name + '))?$'), + "dots_allowed_in_PN": re.compile('(?P<simple>(' + _extended_cat + ')/(' + _extended_pkg['dots_allowed_in_PN'] + '))(:(?P<slot>' + _slot + '))?(' + _repo_separator + '(?P<repo>' + _repo_name + '))?$'), +} + +def _get_atom_wildcard_re(eapi): + if eapi is None or eapi_allows_dots_in_PN(eapi): + return _atom_wildcard_re["dots_allowed_in_PN"] + else: + return _atom_wildcard_re["dots_disallowed_in_PN"] _useflag_re = { - "0": re.compile(r'^[A-Za-z0-9][A-Za-z0-9+_@-]*$'), - "4-python": re.compile(r'^[A-Za-z0-9][A-Za-z0-9+_@.-]*$'), + "dots_disallowed_in_use_flags": re.compile(r'^[A-Za-z0-9][A-Za-z0-9+_@-]*$'), + "dots_allowed_in_use_flags": re.compile(r'^[A-Za-z0-9][A-Za-z0-9+_@.-]*$'), } def _get_useflag_re(eapi): @@ -1636,10 +1709,10 @@ def _get_useflag_re(eapi): @return: A regular expression object that matches valid USE flags for the given eapi. """ - if eapi in (None, "4-python",): - return _useflag_re["4-python"] + if eapi is None or eapi_allows_dots_in_use_flags(eapi): + return _useflag_re["dots_allowed_in_use_flags"] else: - return _useflag_re["0"] + return _useflag_re["dots_disallowed_in_use_flags"] def isvalidatom(atom, allow_blockers=False, allow_wildcard=False, allow_repo=False): """ @@ -1753,7 +1826,14 @@ def match_to_list(mypkg, mylist): @rtype: List @return: A unique list of package atoms that match the given package atom """ - return [ x for x in set(mylist) if match_from_list(x, [mypkg]) ] + matches = set() + result = [] + pkgs = [mypkg] + for x in mylist: + if x not in matches and match_from_list(x, pkgs): + matches.add(x) + result.append(x) + return result def best_match_to_list(mypkg, mylist): """ @@ -1781,6 +1861,7 @@ def best_match_to_list(mypkg, mylist): '>':2, '<':2, '>=':2, '<=':2, None:1} maxvalue = -2 bestm = None + mypkg_cpv = None for x in match_to_list(mypkg, mylist): if x.extended_syntax: if dep_getslot(x) is not None: @@ -1800,6 +1881,31 @@ def best_match_to_list(mypkg, mylist): if op_val > maxvalue: maxvalue = op_val bestm = x + elif op_val == maxvalue and op_val == 2: + # For >, <, >=, and <=, the one with the version + # closest to mypkg is the best match. + if mypkg_cpv is None: + try: + mypkg_cpv = mypkg.cpv + except AttributeError: + mypkg_cpv = _pkg_str(remove_slot(mypkg)) + if bestm.cpv == mypkg_cpv or bestm.cpv == x.cpv: + pass + elif x.cpv == mypkg_cpv: + bestm = x + else: + # Sort the cpvs to find the one closest to mypkg_cpv + cpv_list = [bestm.cpv, mypkg_cpv, x.cpv] + def cmp_cpv(cpv1, cpv2): + return vercmp(cpv1.version, cpv2.version) + cpv_list.sort(key=cmp_sort_key(cmp_cpv)) + if cpv_list[0] is mypkg_cpv or cpv_list[-1] is mypkg_cpv: + if cpv_list[1] is x.cpv: + bestm = x + else: + # TODO: handle the case where mypkg_cpv is in the middle + pass + return bestm def match_from_list(mydep, candidate_list): @@ -1817,7 +1923,6 @@ def match_from_list(mydep, candidate_list): if not candidate_list: return [] - from portage.util import writemsg if "!" == mydep[:1]: if "!" == mydep[1:2]: mydep = mydep[2:] @@ -1882,7 +1987,7 @@ def match_from_list(mydep, candidate_list): myver = mysplit[2].lstrip("0") if not myver or not myver[0].isdigit(): myver = "0"+myver - mycpv = mysplit[0]+"/"+mysplit[1]+"-"+myver + mycpv_cmp = mysplit[0]+"/"+mysplit[1]+"-"+myver for x in candidate_list: xs = getattr(x, "cpv_split", None) if xs is None: @@ -1891,7 +1996,7 @@ def match_from_list(mydep, candidate_list): if not myver or not myver[0].isdigit(): myver = "0"+myver xcpv = xs[0]+"/"+xs[1]+"-"+myver - if xcpv.startswith(mycpv): + if xcpv.startswith(mycpv_cmp): mylist.append(x) elif operator == "~": # version, any revision, match @@ -1908,15 +2013,19 @@ def match_from_list(mydep, candidate_list): mylist.append(x) elif operator in [">", ">=", "<", "<="]: - mysplit = ["%s/%s" % (cat, pkg), ver, rev] for x in candidate_list: - xs = getattr(x, "cpv_split", None) - if xs is None: - xs = catpkgsplit(remove_slot(x)) - xcat, xpkg, xver, xrev = xs - xs = ["%s/%s" % (xcat, xpkg), xver, xrev] + if hasattr(x, 'cp'): + pkg = x + else: + try: + pkg = _pkg_str(remove_slot(x)) + except InvalidData: + continue + + if pkg.cp != mydep.cp: + continue try: - result = pkgcmp(xs, mysplit) + result = vercmp(pkg.version, mydep.version) except ValueError: # pkgcmp may return ValueError during int() conversion writemsg(_("\nInvalid package name: %s\n") % x, noiselevel=-1) raise @@ -1993,7 +2102,8 @@ def match_from_list(mydep, candidate_list): repo = getattr(x, "repo", False) if repo is False: repo = dep_getrepo(x) - if repo is not None and repo != mydep.repo: + if repo is not None and repo != _unknown_repo and \ + repo != mydep.repo: continue mylist.append(x) diff --git a/portage_with_autodep/pym/portage/dep/__init__.pyo b/portage_with_autodep/pym/portage/dep/__init__.pyo Binary files differnew file mode 100644 index 0000000..c78bb23 --- /dev/null +++ b/portage_with_autodep/pym/portage/dep/__init__.pyo diff --git a/portage_with_autodep/pym/portage/dep/_slot_operator.py b/portage_with_autodep/pym/portage/dep/_slot_operator.py new file mode 100644 index 0000000..7b64444 --- /dev/null +++ b/portage_with_autodep/pym/portage/dep/_slot_operator.py @@ -0,0 +1,97 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from __future__ import unicode_literals + +from portage.dep import Atom, paren_enclose, use_reduce +from portage.eapi import _get_eapi_attrs +from portage.exception import InvalidData +from _emerge.Package import Package + +def find_built_slot_operator_atoms(pkg): + atoms = {} + for k in Package._dep_keys: + atom_list = list(_find_built_slot_operator(use_reduce(pkg._metadata[k], + uselist=pkg.use.enabled, eapi=pkg.eapi, + token_class=Atom))) + if atom_list: + atoms[k] = atom_list + return atoms + +def _find_built_slot_operator(dep_struct): + for x in dep_struct: + if isinstance(x, list): + for atom in _find_built_slot_operator(x): + yield atom + elif isinstance(x, Atom) and x.slot_operator_built: + yield x + +def ignore_built_slot_operator_deps(dep_struct): + for i, x in enumerate(dep_struct): + if isinstance(x, list): + ignore_built_slot_operator_deps(x) + elif isinstance(x, Atom) and x.slot_operator_built: + # There's no way of knowing here whether the SLOT + # part of the slot/sub-slot pair should be kept, so we + # ignore both parts. + dep_struct[i] = x.without_slot + +def evaluate_slot_operator_equal_deps(settings, use, trees): + + metadata = settings.configdict['pkg'] + eapi = metadata['EAPI'] + eapi_attrs = _get_eapi_attrs(eapi) + running_vardb = trees[trees._running_eroot]["vartree"].dbapi + target_vardb = trees[trees._target_eroot]["vartree"].dbapi + vardbs = [target_vardb] + deps = {} + for k in Package._dep_keys: + deps[k] = use_reduce(metadata[k], + uselist=use, eapi=eapi, token_class=Atom) + + for k in Package._runtime_keys: + _eval_deps(deps[k], vardbs) + + if eapi_attrs.hdepend: + _eval_deps(deps["HDEPEND"], [running_vardb]) + _eval_deps(deps["DEPEND"], [target_vardb]) + else: + if running_vardb is not target_vardb: + vardbs.append(running_vardb) + _eval_deps(deps["DEPEND"], vardbs) + + result = {} + for k, v in deps.items(): + result[k] = paren_enclose(v) + + return result + +def _eval_deps(dep_struct, vardbs): + for i, x in enumerate(dep_struct): + if isinstance(x, list): + _eval_deps(x, vardbs) + elif isinstance(x, Atom) and x.slot_operator == "=": + for vardb in vardbs: + best_version = vardb.match(x) + if best_version: + best_version = best_version[-1] + try: + best_version = \ + vardb._pkg_str(best_version, None) + except (KeyError, InvalidData): + pass + else: + slot_part = "%s/%s=" % \ + (best_version.slot, best_version.sub_slot) + x = x.with_slot(slot_part) + dep_struct[i] = x + break + else: + # this dep could not be resolved, so remove the operator + # (user may be using package.provided and managing rebuilds + # manually) + if x.slot: + x = x.with_slot(x.slot) + else: + x = x.without_slot + dep_struct[i] = x diff --git a/portage_with_autodep/pym/portage/dep/dep_check.py b/portage_with_autodep/pym/portage/dep/dep_check.py index 01d5021..99a5eb0 100644 --- a/portage_with_autodep/pym/portage/dep/dep_check.py +++ b/portage_with_autodep/pym/portage/dep/dep_check.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['dep_check', 'dep_eval', 'dep_wordreduce', 'dep_zapdeps'] @@ -11,7 +11,7 @@ from portage.dep import Atom, match_from_list, use_reduce from portage.exception import InvalidDependString, ParseError from portage.localization import _ from portage.util import writemsg, writemsg_level -from portage.versions import catpkgsplit, cpv_getkey, pkgcmp +from portage.versions import vercmp, _pkg_str def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/", trees=None, use_mask=None, use_force=None, **kwargs): @@ -39,14 +39,12 @@ def _expand_new_virtuals(mysplit, edebug, mydbapi, mysettings, myroot="/", parent = mytrees.get("parent") virt_parent = mytrees.get("virt_parent") graph_parent = None - eapi = None if parent is not None: if virt_parent is not None: graph_parent = virt_parent parent = virt_parent else: graph_parent = parent - eapi = parent.metadata["EAPI"] repoman = not mysettings.local_config if kwargs["use_binaries"]: portdb = trees[myroot]["bintree"].dbapi @@ -352,8 +350,14 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None): avail_pkg = mydbapi.match(atom.without_use) if avail_pkg: avail_pkg = avail_pkg[-1] # highest (ascending order) - avail_slot = Atom("%s:%s" % (atom.cp, - mydbapi.aux_get(avail_pkg, ["SLOT"])[0])) + try: + slot = avail_pkg.slot + except AttributeError: + eapi, slot, repo = mydbapi.aux_get(avail_pkg, + ["EAPI", "SLOT", "repository"]) + avail_pkg = _pkg_str(avail_pkg, eapi=eapi, + slot=slot, repo=repo) + avail_slot = Atom("%s:%s" % (atom.cp, slot)) if not avail_pkg: all_available = False all_use_satisfied = False @@ -368,16 +372,19 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None): avail_pkg_use = avail_pkg_use[-1] if avail_pkg_use != avail_pkg: avail_pkg = avail_pkg_use - avail_slot = Atom("%s:%s" % (atom.cp, - mydbapi.aux_get(avail_pkg, ["SLOT"])[0])) + try: + slot = avail_pkg.slot + except AttributeError: + eapi, slot, repo = mydbapi.aux_get(avail_pkg, + ["EAPI", "SLOT", "repository"]) + avail_pkg = _pkg_str(avail_pkg, + eapi=eapi, slot=slot, repo=repo) slot_map[avail_slot] = avail_pkg - pkg_cp = cpv_getkey(avail_pkg) - highest_cpv = cp_map.get(pkg_cp) + highest_cpv = cp_map.get(avail_pkg.cp) if highest_cpv is None or \ - pkgcmp(catpkgsplit(avail_pkg)[1:], - catpkgsplit(highest_cpv)[1:]) > 0: - cp_map[pkg_cp] = avail_pkg + vercmp(avail_pkg.version, highest_cpv.version) > 0: + cp_map[avail_pkg.cp] = avail_pkg this_choice = (atoms, slot_map, cp_map, all_available) if all_available: @@ -515,8 +522,7 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None): for cp in intersecting_cps: version_1 = cp_map_1[cp] version_2 = cp_map_2[cp] - difference = pkgcmp(catpkgsplit(version_1)[1:], - catpkgsplit(version_2)[1:]) + difference = vercmp(version_1.version, version_2.version) if difference != 0: if difference > 0: has_upgrade = True @@ -539,8 +545,12 @@ def dep_zapdeps(unreduced, reduced, myroot, use_binaries=0, trees=None): assert(False) # This point should not be reachable def dep_check(depstring, mydbapi, mysettings, use="yes", mode=None, myuse=None, - use_cache=1, use_binaries=0, myroot="/", trees=None): - """Takes a depend string and parses the condition.""" + use_cache=1, use_binaries=0, myroot=None, trees=None): + """ + Takes a depend string, parses it, and selects atoms. + The myroot parameter is unused (use mysettings['EROOT'] instead). + """ + myroot = mysettings['EROOT'] edebug = mysettings.get("PORTAGE_DEBUG", None) == "1" #check_config_instance(mysettings) if trees is None: diff --git a/portage_with_autodep/pym/portage/dep/dep_check.pyo b/portage_with_autodep/pym/portage/dep/dep_check.pyo Binary files differnew file mode 100644 index 0000000..1b9e03f --- /dev/null +++ b/portage_with_autodep/pym/portage/dep/dep_check.pyo diff --git a/portage_with_autodep/pym/portage/dispatch_conf.py b/portage_with_autodep/pym/portage/dispatch_conf.py index 4991020..4c68dfc 100644 --- a/portage_with_autodep/pym/portage/dispatch_conf.py +++ b/portage_with_autodep/pym/portage/dispatch_conf.py @@ -1,5 +1,5 @@ # archive_conf.py -- functionality common to archive-conf and dispatch-conf -# Copyright 2003-2011 Gentoo Foundation +# Copyright 2003-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 @@ -8,11 +8,12 @@ from __future__ import print_function -import os, sys, shutil +import os, shutil, subprocess, sys import portage from portage.env.loaders import KeyValuePairFileLoader from portage.localization import _ +from portage.util import shlex_split, varexpand RCS_BRANCH = '1.1.1' RCS_LOCK = 'rcs -ko -M -l' @@ -22,24 +23,29 @@ RCS_MERGE = "rcsmerge -p -r" + RCS_BRANCH + " '%s' > '%s'" DIFF3_MERGE = "diff3 -mE '%s' '%s' '%s' > '%s'" -def diffstatusoutput_len(cmd): +def diffstatusoutput(cmd, file1, file2): """ Execute the string cmd in a shell with getstatusoutput() and return a - 2-tuple (status, output_length). If getstatusoutput() raises - UnicodeDecodeError (known to happen with python3.1), return a - 2-tuple (1, 1). This provides a simple way to check for non-zero - output length of diff commands, while providing simple handling of - UnicodeDecodeError when necessary. + 2-tuple (status, output). """ - try: - status, output = portage.subprocess_getstatusoutput(cmd) - return (status, len(output)) - except UnicodeDecodeError: - return (1, 1) + # Use Popen to emulate getstatusoutput(), since getstatusoutput() may + # raise a UnicodeDecodeError which makes the output inaccessible. + args = shlex_split(cmd % (file1, file2)) + if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000: + # Python 3.1 does not support bytes in Popen args. + args = [portage._unicode_encode(x, errors='strict') for x in args] + proc = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output = portage._unicode_decode(proc.communicate()[0]) + if output and output[-1] == "\n": + # getstatusoutput strips one newline + output = output[:-1] + return (proc.wait(), output) def read_config(mandatory_opts): - loader = KeyValuePairFileLoader( - '/etc/dispatch-conf.conf', None) + eprefix = portage.const.EPREFIX + config_path = os.path.join(eprefix or os.sep, "etc/dispatch-conf.conf") + loader = KeyValuePairFileLoader(config_path, None) opts, errors = loader.load() if not opts: print(_('dispatch-conf: Error reading /etc/dispatch-conf.conf; fatal'), file=sys.stderr) @@ -58,6 +64,10 @@ def read_config(mandatory_opts): else: print(_('dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal') % (key,), file=sys.stderr) + # archive-dir supports ${EPREFIX} expansion, in order to avoid hardcoding + variables = {"EPREFIX": eprefix} + opts['archive-dir'] = varexpand(opts['archive-dir'], mydict=variables) + if not os.path.exists(opts['archive-dir']): os.mkdir(opts['archive-dir']) # Use restrictive permissions by default, in order to protect @@ -132,7 +142,7 @@ def file_archive(archive, curconf, newconf, mrgconf): # Archive the current config file if it isn't already saved if os.path.exists(archive) \ - and diffstatusoutput_len("diff -aq '%s' '%s'" % (curconf,archive))[1] != 0: + and len(diffstatusoutput("diff -aq '%s' '%s'", curconf, archive)[1]) != 0: suf = 1 while suf < 9 and os.path.exists(archive + '.' + str(suf)): suf += 1 diff --git a/portage_with_autodep/pym/portage/dispatch_conf.pyo b/portage_with_autodep/pym/portage/dispatch_conf.pyo Binary files differnew file mode 100644 index 0000000..6239859 --- /dev/null +++ b/portage_with_autodep/pym/portage/dispatch_conf.pyo diff --git a/portage_with_autodep/pym/portage/eapi.py b/portage_with_autodep/pym/portage/eapi.py index da5fd8c..79cf891 100644 --- a/portage_with_autodep/pym/portage/eapi.py +++ b/portage_with_autodep/pym/portage/eapi.py @@ -1,4 +1,4 @@ -# Copyright 2010 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 def eapi_has_iuse_defaults(eapi): @@ -34,6 +34,9 @@ def eapi_exports_merge_type(eapi): def eapi_exports_replace_vars(eapi): return eapi not in ("0", "1", "2", "3") +def eapi_exports_REPOSITORY(eapi): + return eapi in ("4-python",) + def eapi_has_pkg_pretend(eapi): return eapi not in ("0", "1", "2", "3") @@ -48,3 +51,12 @@ def eapi_has_required_use(eapi): def eapi_has_use_dep_defaults(eapi): return eapi not in ("0", "1", "2", "3") + +def eapi_has_repo_deps(eapi): + return eapi in ("4-python",) + +def eapi_allows_dots_in_PN(eapi): + return eapi in ("4-python",) + +def eapi_allows_dots_in_use_flags(eapi): + return eapi in ("4-python",) diff --git a/portage_with_autodep/pym/portage/eapi.pyo b/portage_with_autodep/pym/portage/eapi.pyo Binary files differnew file mode 100644 index 0000000..ce67ab1 --- /dev/null +++ b/portage_with_autodep/pym/portage/eapi.pyo diff --git a/portage_with_autodep/pym/portage/eclass_cache.py b/portage_with_autodep/pym/portage/eclass_cache.py index 1374f1d..cb2cf8a 100644 --- a/portage_with_autodep/pym/portage/eclass_cache.py +++ b/portage_with_autodep/pym/portage/eclass_cache.py @@ -6,21 +6,59 @@ __all__ = ["cache"] import stat import sys +import operator from portage.util import normalize_path import errno -from portage.exception import PermissionDenied +from portage.exception import FileNotFound, PermissionDenied from portage import os +from portage import checksum if sys.hexversion >= 0x3000000: long = int + +class hashed_path(object): + + def __init__(self, location): + self.location = location + + def __getattr__(self, attr): + if attr == 'mtime': + # use stat.ST_MTIME; accessing .st_mtime gets you a float + # depending on the python version, and long(float) introduces + # some rounding issues that aren't present for people using + # the straight c api. + # thus use the defacto python compatibility work around; + # access via index, which guarantees you get the raw long. + try: + self.mtime = obj = os.stat(self.location)[stat.ST_MTIME] + except OSError as e: + if e.errno in (errno.ENOENT, errno.ESTALE): + raise FileNotFound(self.location) + elif e.errno == PermissionDenied.errno: + raise PermissionDenied(self.location) + raise + return obj + if not attr.islower(): + # we don't care to allow .mD5 as an alias for .md5 + raise AttributeError(attr) + hashname = attr.upper() + if hashname not in checksum.hashfunc_map: + raise AttributeError(attr) + val = checksum.perform_checksum(self.location, hashname)[0] + setattr(self, attr, val) + return val + + def __repr__(self): + return "<portage.eclass_cache.hashed_path('%s')>" % (self.location,) + class cache(object): """ Maintains the cache information about eclasses used in ebuild. """ def __init__(self, porttree_root, overlays=[]): - self.eclasses = {} # {"Name": ("location","_mtime_")} + self.eclasses = {} # {"Name": hashed_path} self._eclass_locations = {} # screw with the porttree ordering, w/out having bash inherit match it, and I'll hurt you. @@ -80,14 +118,16 @@ class cache(object): for y in eclass_filenames: if not y.endswith(".eclass"): continue + obj = hashed_path(os.path.join(x, y)) + obj.eclass_dir = x try: - mtime = os.stat(os.path.join(x, y))[stat.ST_MTIME] - except OSError: + mtime = obj.mtime + except FileNotFound: continue ys=y[:-eclass_len] if x == self._master_eclass_root: master_eclasses[ys] = mtime - self.eclasses[ys] = (x, mtime) + self.eclasses[ys] = obj self._eclass_locations[ys] = x continue @@ -98,22 +138,30 @@ class cache(object): # so prefer the master entry. continue - self.eclasses[ys] = (x, mtime) + self.eclasses[ys] = obj self._eclass_locations[ys] = x - def is_eclass_data_valid(self, ec_dict): + def validate_and_rewrite_cache(self, ec_dict, chf_type, stores_paths): + """ + This will return an empty dict if the ec_dict parameter happens + to be empty, therefore callers must take care to distinguish + between empty dict and None return values. + """ if not isinstance(ec_dict, dict): - return False - for eclass, tup in ec_dict.items(): - cached_data = self.eclasses.get(eclass, None) - """ Only use the mtime for validation since the probability of a - collision is small and, depending on the cache implementation, the - path may not be specified (cache from rsync mirrors, for example). - """ - if cached_data is None or tup[1] != cached_data[1]: - return False - - return True + return None + our_getter = operator.attrgetter(chf_type) + cache_getter = lambda x:x + if stores_paths: + cache_getter = operator.itemgetter(1) + d = {} + for eclass, ec_data in ec_dict.items(): + cached_data = self.eclasses.get(eclass) + if cached_data is None: + return None + if cache_getter(ec_data) != our_getter(cached_data): + return None + d[eclass] = cached_data + return d def get_eclass_data(self, inherits): ec_dict = {} diff --git a/portage_with_autodep/pym/portage/eclass_cache.pyo b/portage_with_autodep/pym/portage/eclass_cache.pyo Binary files differnew file mode 100644 index 0000000..ebe3463 --- /dev/null +++ b/portage_with_autodep/pym/portage/eclass_cache.pyo diff --git a/portage_with_autodep/pym/portage/elog/__init__.py b/portage_with_autodep/pym/portage/elog/__init__.py index 1a8309d..33dac17 100644 --- a/portage_with_autodep/pym/portage/elog/__init__.py +++ b/portage_with_autodep/pym/portage/elog/__init__.py @@ -1,7 +1,11 @@ # elog/__init__.py - elog core functions -# Copyright 2006-2009 Gentoo Foundation +# Copyright 2006-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 +import sys +if sys.hexversion >= 0x3000000: + basestring = str + import portage portage.proxy.lazyimport.lazyimport(globals(), 'portage.util:writemsg', @@ -52,11 +56,15 @@ def _combine_logentries(logentries): for msgtype, msgcontent in logentries[phase]: if previous_type != msgtype: previous_type = msgtype - rValue.append("%s: %s\n" % (msgtype, phase)) - for line in msgcontent: - rValue.append(line) - rValue.append("\n") - return "".join(rValue) + rValue.append("%s: %s" % (msgtype, phase)) + if isinstance(msgcontent, basestring): + rValue.append(msgcontent.rstrip("\n")) + else: + for line in msgcontent: + rValue.append(line.rstrip("\n")) + if rValue: + rValue.append("") + return "\n".join(rValue) _elog_mod_imports = {} def _load_mod(name): diff --git a/portage_with_autodep/pym/portage/elog/__init__.pyo b/portage_with_autodep/pym/portage/elog/__init__.pyo Binary files differnew file mode 100644 index 0000000..39dc595 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/__init__.pyo diff --git a/portage_with_autodep/pym/portage/elog/filtering.pyo b/portage_with_autodep/pym/portage/elog/filtering.pyo Binary files differnew file mode 100644 index 0000000..3a040cc --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/filtering.pyo diff --git a/portage_with_autodep/pym/portage/elog/messages.py b/portage_with_autodep/pym/portage/elog/messages.py index 6c1580a..a4897d8 100644 --- a/portage_with_autodep/pym/portage/elog/messages.py +++ b/portage_with_autodep/pym/portage/elog/messages.py @@ -18,6 +18,14 @@ from portage import _unicode_decode import io import sys +_log_levels = frozenset([ + "ERROR", + "INFO", + "LOG", + "QA", + "WARN", +]) + def collect_ebuild_messages(path): """ Collect elog messages generated by the bash logging function stored at 'path'. @@ -43,16 +51,21 @@ def collect_ebuild_messages(path): logentries[msgfunction] = [] lastmsgtype = None msgcontent = [] - for l in io.open(_unicode_encode(filename, + f = io.open(_unicode_encode(filename, encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['repo.content'], errors='replace'): + mode='r', encoding=_encodings['repo.content'], errors='replace') + # Use split('\n') since normal line iteration or readlines() will + # split on \r characters as shown in bug #390833. + for l in f.read().split('\n'): if not l: continue try: msgtype, msg = l.split(" ", 1) + if msgtype not in _log_levels: + raise ValueError(msgtype) except ValueError: writemsg(_("!!! malformed entry in " - "log file: '%s'\n") % filename, noiselevel=-1) + "log file: '%s': %s\n") % (filename, l), noiselevel=-1) continue if lastmsgtype is None: @@ -65,6 +78,7 @@ def collect_ebuild_messages(path): logentries[msgfunction].append((lastmsgtype, msgcontent)) msgcontent = [msg] lastmsgtype = msgtype + f.close() if msgcontent: logentries[msgfunction].append((lastmsgtype, msgcontent)) @@ -159,13 +173,17 @@ _functions = { "einfo": ("INFO", "GOOD"), "eerror": ("ERROR", "BAD"), } -def _make_msgfunction(level, color): - def _elog(msg, phase="other", key=None, out=None): - """ Display and log a message assigned to the given key/cpv - (or unassigned if no key is given). +class _make_msgfunction(object): + __slots__ = ('_color', '_level') + def __init__(self, level, color): + self._level = level + self._color = color + def __call__(self, msg, phase="other", key=None, out=None): + """ + Display and log a message assigned to the given key/cpv. """ - _elog_base(level, msg, phase=phase, key=key, color=color, out=out) - return _elog + _elog_base(self._level, msg, phase=phase, + key=key, color=self._color, out=out) for f in _functions: setattr(sys.modules[__name__], f, _make_msgfunction(_functions[f][0], _functions[f][1])) diff --git a/portage_with_autodep/pym/portage/elog/messages.pyo b/portage_with_autodep/pym/portage/elog/messages.pyo Binary files differnew file mode 100644 index 0000000..c033f55 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/messages.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_custom.pyo b/portage_with_autodep/pym/portage/elog/mod_custom.pyo Binary files differnew file mode 100644 index 0000000..317fab4 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_custom.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_echo.py b/portage_with_autodep/pym/portage/elog/mod_echo.py index 5de25bf..59117be 100644 --- a/portage_with_autodep/pym/portage/elog/mod_echo.py +++ b/portage_with_autodep/pym/portage/elog/mod_echo.py @@ -18,6 +18,19 @@ def process(mysettings, key, logentries, fulltext): _items.append((mysettings["ROOT"], key, logentries)) def finalize(): + # For consistency, send all message types to stdout. + sys.stdout.flush() + sys.stderr.flush() + stderr = sys.stderr + try: + sys.stderr = sys.stdout + _finalize() + finally: + sys.stderr = stderr + sys.stdout.flush() + sys.stderr.flush() + +def _finalize(): global _items printer = EOutput() for root, key, logentries in _items: diff --git a/portage_with_autodep/pym/portage/elog/mod_echo.pyo b/portage_with_autodep/pym/portage/elog/mod_echo.pyo Binary files differnew file mode 100644 index 0000000..6a00d4c --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_echo.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_mail.pyo b/portage_with_autodep/pym/portage/elog/mod_mail.pyo Binary files differnew file mode 100644 index 0000000..5d87aa6 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_mail.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_mail_summary.pyo b/portage_with_autodep/pym/portage/elog/mod_mail_summary.pyo Binary files differnew file mode 100644 index 0000000..d7306b5 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_mail_summary.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_save.py b/portage_with_autodep/pym/portage/elog/mod_save.py index 9350a6e..c69f4a3 100644 --- a/portage_with_autodep/pym/portage/elog/mod_save.py +++ b/portage_with_autodep/pym/portage/elog/mod_save.py @@ -4,20 +4,22 @@ import io import time +import portage from portage import os from portage import _encodings from portage import _unicode_decode from portage import _unicode_encode from portage.data import portage_gid, portage_uid from portage.package.ebuild.prepare_build_dirs import _ensure_log_subdirs -from portage.util import ensure_dirs, normalize_path +from portage.util import apply_permissions, ensure_dirs, normalize_path def process(mysettings, key, logentries, fulltext): if mysettings.get("PORT_LOGDIR"): logdir = normalize_path(mysettings["PORT_LOGDIR"]) else: - logdir = os.path.join(os.sep, "var", "log", "portage") + logdir = os.path.join(os.sep, mysettings["EPREFIX"].lstrip(os.sep), + "var", "log", "portage") if not os.path.isdir(logdir): # Only initialize group/mode if the directory doesn't @@ -25,7 +27,10 @@ def process(mysettings, key, logentries, fulltext): # were previously set by the administrator. # NOTE: These permissions should be compatible with our # default logrotate config as discussed in bug 374287. - ensure_dirs(logdir, uid=portage_uid, gid=portage_gid, mode=0o2770) + uid = -1 + if portage.data.secpass >= 2: + uid = portage_uid + ensure_dirs(logdir, uid=uid, gid=portage_gid, mode=0o2770) cat = mysettings['CATEGORY'] pf = mysettings['PF'] @@ -48,4 +53,21 @@ def process(mysettings, key, logentries, fulltext): elogfile.write(_unicode_decode(fulltext)) elogfile.close() + # Copy group permission bits from parent directory. + elogdir_st = os.stat(log_subdir) + elogdir_gid = elogdir_st.st_gid + elogdir_grp_mode = 0o060 & elogdir_st.st_mode + + # Copy the uid from the parent directory if we have privileges + # to do so, for compatibility with our default logrotate + # config (see bug 378451). With the "su portage portage" + # directive and logrotate-3.8.0, logrotate's chown call during + # the compression phase will only succeed if the log file's uid + # is portage_uid. + logfile_uid = -1 + if portage.data.secpass >= 2: + logfile_uid = elogdir_st.st_uid + apply_permissions(elogfilename, uid=logfile_uid, gid=elogdir_gid, + mode=elogdir_grp_mode, mask=0) + return elogfilename diff --git a/portage_with_autodep/pym/portage/elog/mod_save.pyo b/portage_with_autodep/pym/portage/elog/mod_save.pyo Binary files differnew file mode 100644 index 0000000..fb28b76 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_save.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_save_summary.py b/portage_with_autodep/pym/portage/elog/mod_save_summary.py index 4adc6f3..347f66e 100644 --- a/portage_with_autodep/pym/portage/elog/mod_save_summary.py +++ b/portage_with_autodep/pym/portage/elog/mod_save_summary.py @@ -4,6 +4,7 @@ import io import time +import portage from portage import os from portage import _encodings from portage import _unicode_decode @@ -17,7 +18,8 @@ def process(mysettings, key, logentries, fulltext): if mysettings.get("PORT_LOGDIR"): logdir = normalize_path(mysettings["PORT_LOGDIR"]) else: - logdir = os.path.join(os.sep, "var", "log", "portage") + logdir = os.path.join(os.sep, mysettings["EPREFIX"].lstrip(os.sep), + "var", "log", "portage") if not os.path.isdir(logdir): # Only initialize group/mode if the directory doesn't @@ -25,7 +27,10 @@ def process(mysettings, key, logentries, fulltext): # were previously set by the administrator. # NOTE: These permissions should be compatible with our # default logrotate config as discussed in bug 374287. - ensure_dirs(logdir, uid=portage_uid, gid=portage_gid, mode=0o2770) + logdir_uid = -1 + if portage.data.secpass >= 2: + logdir_uid = portage_uid + ensure_dirs(logdir, uid=logdir_uid, gid=portage_gid, mode=0o2770) elogdir = os.path.join(logdir, "elog") _ensure_log_subdirs(logdir, elogdir) @@ -40,7 +45,17 @@ def process(mysettings, key, logentries, fulltext): elogdir_st = os.stat(elogdir) elogdir_gid = elogdir_st.st_gid elogdir_grp_mode = 0o060 & elogdir_st.st_mode - apply_permissions(elogfilename, gid=elogdir_gid, + + # Copy the uid from the parent directory if we have privileges + # to do so, for compatibility with our default logrotate + # config (see bug 378451). With the "su portage portage" + # directive and logrotate-3.8.0, logrotate's chown call during + # the compression phase will only succeed if the log file's uid + # is portage_uid. + logfile_uid = -1 + if portage.data.secpass >= 2: + logfile_uid = elogdir_st.st_uid + apply_permissions(elogfilename, uid=logfile_uid, gid=elogdir_gid, mode=elogdir_grp_mode, mask=0) time_str = time.strftime("%Y-%m-%d %H:%M:%S %Z", diff --git a/portage_with_autodep/pym/portage/elog/mod_save_summary.pyo b/portage_with_autodep/pym/portage/elog/mod_save_summary.pyo Binary files differnew file mode 100644 index 0000000..8f99c51 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_save_summary.pyo diff --git a/portage_with_autodep/pym/portage/elog/mod_syslog.py b/portage_with_autodep/pym/portage/elog/mod_syslog.py index d71dab4..c8bf441 100644 --- a/portage_with_autodep/pym/portage/elog/mod_syslog.py +++ b/portage_with_autodep/pym/portage/elog/mod_syslog.py @@ -1,5 +1,5 @@ # elog/mod_syslog.py - elog dispatch module -# Copyright 2006-2007 Gentoo Foundation +# Copyright 2006-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import sys @@ -7,6 +7,9 @@ import syslog from portage.const import EBUILD_PHASES from portage import _encodings +if sys.hexversion >= 0x3000000: + basestring = str + _pri = { "INFO" : syslog.LOG_INFO, "WARN" : syslog.LOG_WARNING, @@ -21,12 +24,13 @@ def process(mysettings, key, logentries, fulltext): if not phase in logentries: continue for msgtype,msgcontent in logentries[phase]: - msgtext = "".join(msgcontent) - for line in msgtext.splitlines(): + if isinstance(msgcontent, basestring): + msgcontent = [msgcontent] + for line in msgcontent: line = "%s: %s: %s" % (key, phase, line) - if sys.hexversion < 0x3000000 and isinstance(msgtext, unicode): + if sys.hexversion < 0x3000000 and not isinstance(line, bytes): # Avoid TypeError from syslog.syslog() line = line.encode(_encodings['content'], 'backslashreplace') - syslog.syslog(_pri[msgtype], line) + syslog.syslog(_pri[msgtype], line.rstrip("\n")) syslog.closelog() diff --git a/portage_with_autodep/pym/portage/elog/mod_syslog.pyo b/portage_with_autodep/pym/portage/elog/mod_syslog.pyo Binary files differnew file mode 100644 index 0000000..c7b4248 --- /dev/null +++ b/portage_with_autodep/pym/portage/elog/mod_syslog.pyo diff --git a/portage_with_autodep/pym/portage/emaint/__init__.py b/portage_with_autodep/pym/portage/emaint/__init__.py new file mode 100644 index 0000000..48bc6e2 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/__init__.py @@ -0,0 +1,5 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""System health checks and maintenance utilities. +""" diff --git a/portage_with_autodep/pym/portage/emaint/defaults.py b/portage_with_autodep/pym/portage/emaint/defaults.py new file mode 100644 index 0000000..30f36af --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/defaults.py @@ -0,0 +1,25 @@ +# Copyright 2005-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +# parser option data +CHECK = {"short": "-c", "long": "--check", + "help": "Check for problems (a default option for most modules)", + 'status': "Checking %s for problems", + 'action': 'store_true', + 'func': 'check' + } + +FIX = {"short": "-f", "long": "--fix", + "help": "Attempt to fix problems (a default option for most modules)", + 'status': "Attempting to fix %s", + 'action': 'store_true', + 'func': 'fix' + } + +VERSION = {"long": "--version", + "help": "show program's version number and exit", + 'action': 'store_true', + } + +# parser options +DEFAULT_OPTIONS = {'check': CHECK, 'fix': FIX, 'version': VERSION} diff --git a/portage_with_autodep/pym/portage/emaint/main.py b/portage_with_autodep/pym/portage/emaint/main.py new file mode 100644 index 0000000..9f987fa --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/main.py @@ -0,0 +1,222 @@ +# Copyright 2005-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from __future__ import print_function + + +import sys +import textwrap + +import portage +from portage import os +from portage.emaint.module import Modules +from portage.emaint.progress import ProgressBar +from portage.emaint.defaults import DEFAULT_OPTIONS +from portage.util._argparse import ArgumentParser + +class OptionItem(object): + """class to hold module ArgumentParser options data + """ + + def __init__(self, opt): + """ + @type opt: dictionary + @param opt: options parser options + """ + self.short = opt.get('short') + self.long = opt.get('long') + self.help = opt.get('help') + self.status = opt.get('status') + self.func = opt.get('func') + self.action = opt.get('action') + self.type = opt.get('type') + self.dest = opt.get('dest') + + @property + def pargs(self): + pargs = [] + if self.short is not None: + pargs.append(self.short) + if self.long is not None: + pargs.append(self.long) + return pargs + + @property + def kwargs(self): + # Support for keyword arguments varies depending on the action, + # so only pass in the keywords that are needed, in order + # to avoid a TypeError. + kwargs = {} + if self.help is not None: + kwargs['help'] = self.help + if self.action is not None: + kwargs['action'] = self.action + if self.type is not None: + kwargs['type'] = self.type + if self.dest is not None: + kwargs['dest'] = self.dest + return kwargs + +def usage(module_controller): + _usage = "usage: emaint [options] COMMAND" + + desc = "The emaint program provides an interface to system health " + \ + "checks and maintenance. See the emaint(1) man page " + \ + "for additional information about the following commands:" + + _usage += "\n\n" + for line in textwrap.wrap(desc, 65): + _usage += "%s\n" % line + _usage += "\nCommands:\n" + _usage += " %s" % "all".ljust(15) + \ + "Perform all supported commands\n" + textwrap.subsequent_indent = ' '.ljust(17) + for mod in module_controller.module_names: + desc = textwrap.wrap(module_controller.get_description(mod), 65) + _usage += " %s%s\n" % (mod.ljust(15), desc[0]) + for d in desc[1:]: + _usage += " %s%s\n" % (' '.ljust(15), d) + return _usage + + +def module_opts(module_controller, module): + _usage = " %s module options:\n" % module + opts = module_controller.get_func_descriptions(module) + if opts == {}: + opts = DEFAULT_OPTIONS + for opt in sorted(opts): + optd = opts[opt] + opto = " %s, %s" %(optd['short'], optd['long']) + _usage += '%s %s\n' % (opto.ljust(15),optd['help']) + _usage += '\n' + return _usage + + +class TaskHandler(object): + """Handles the running of the tasks it is given + """ + + def __init__(self, show_progress_bar=True, verbose=True, callback=None): + self.show_progress_bar = show_progress_bar + self.verbose = verbose + self.callback = callback + self.isatty = os.environ.get('TERM') != 'dumb' and sys.stdout.isatty() + self.progress_bar = ProgressBar(self.isatty, title="Emaint", max_desc_length=27) + + + def run_tasks(self, tasks, func, status=None, verbose=True, options=None): + """Runs the module tasks""" + if tasks is None or func is None: + return + for task in tasks: + inst = task() + show_progress = self.show_progress_bar and self.isatty + # check if the function is capable of progressbar + # and possibly override it off + if show_progress and hasattr(inst, 'can_progressbar'): + show_progress = inst.can_progressbar(func) + if show_progress: + self.progress_bar.reset() + self.progress_bar.set_label(func + " " + inst.name()) + onProgress = self.progress_bar.start() + else: + onProgress = None + kwargs = { + 'onProgress': onProgress, + # pass in a copy of the options so a module can not pollute or change + # them for other tasks if there is more to do. + 'options': options.copy() + } + result = getattr(inst, func)(**kwargs) + if show_progress: + # make sure the final progress is displayed + self.progress_bar.display() + print() + self.progress_bar.stop() + if self.callback: + self.callback(result) + + +def print_results(results): + if results: + print() + print("\n".join(results)) + print("\n") + + +def emaint_main(myargv): + + # Similar to emerge, emaint needs a default umask so that created + # files (such as the world file) have sane permissions. + os.umask(0o22) + + module_controller = Modules(namepath="portage.emaint.modules") + module_names = module_controller.module_names[:] + module_names.insert(0, "all") + + + parser = ArgumentParser(usage=usage(module_controller)) + # add default options + parser_options = [] + for opt in DEFAULT_OPTIONS: + parser_options.append(OptionItem(DEFAULT_OPTIONS[opt])) + for mod in module_names[1:]: + desc = module_controller.get_func_descriptions(mod) + if desc: + for opt in desc: + parser_options.append(OptionItem(desc[opt])) + for opt in parser_options: + parser.add_argument(*opt.pargs, **opt.kwargs) + + options, args = parser.parse_known_args(args=myargv) + + if options.version: + print(portage.VERSION) + return os.EX_OK + + if len(args) != 1: + parser.error("Incorrect number of arguments") + if args[0] not in module_names: + parser.error("%s target is not a known target" % args[0]) + + check_opt = None + func = status = long_action = None + for opt in parser_options: + if opt.long == '--check': + # Default action + check_opt = opt + if opt.status and getattr(options, opt.long.lstrip("-"), False): + if long_action is not None: + parser.error("--%s and %s are exclusive options" % + (long_action, opt.long)) + status = opt.status + func = opt.func + long_action = opt.long.lstrip('-') + + if long_action is None: + long_action = 'check' + func = check_opt.func + status = check_opt.status + + if args[0] == "all": + tasks = [] + for m in module_names[1:]: + #print("DEBUG: module: %s, functions: " %(m, str(module_controller.get_functions(m)))) + if long_action in module_controller.get_functions(m): + tasks.append(module_controller.get_class(m)) + elif long_action in module_controller.get_functions(args[0]): + tasks = [module_controller.get_class(args[0] )] + else: + portage.util.writemsg( + "\nERROR: module '%s' does not have option '--%s'\n\n" % + (args[0], long_action), noiselevel=-1) + portage.util.writemsg(module_opts(module_controller, args[0]), + noiselevel=-1) + sys.exit(1) + + # need to pass the parser options dict to the modules + # so they are available if needed. + task_opts = options.__dict__ + taskmaster = TaskHandler(callback=print_results) + taskmaster.run_tasks(tasks, func, status, options=task_opts) + diff --git a/portage_with_autodep/pym/portage/emaint/module.py b/portage_with_autodep/pym/portage/emaint/module.py new file mode 100644 index 0000000..64b0c64 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/module.py @@ -0,0 +1,194 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + + +from __future__ import print_function + +from portage import os +from portage.exception import PortageException +from portage.cache.mappings import ProtectedDict + + +class InvalidModuleName(PortageException): + """An invalid or unknown module name.""" + + +class Module(object): + """Class to define and hold our plug-in module + + @type name: string + @param name: the module name + @type path: the path to the new module + """ + + def __init__(self, name, namepath): + """Some variables initialization""" + self.name = name + self._namepath = namepath + self.kids_names = [] + self.kids = {} + self.initialized = self._initialize() + + def _initialize(self): + """Initialize the plug-in module + + @rtype: boolean + """ + self.valid = False + try: + mod_name = ".".join([self._namepath, self.name]) + self._module = __import__(mod_name, [],[], ["not empty"]) + self.valid = True + except ImportError as e: + print("MODULE; failed import", mod_name, " error was:",e) + return False + self.module_spec = self._module.module_spec + for submodule in self.module_spec['provides']: + kid = self.module_spec['provides'][submodule] + kidname = kid['name'] + kid['module_name'] = '.'.join([mod_name, self.name]) + kid['is_imported'] = False + self.kids[kidname] = kid + self.kids_names.append(kidname) + return True + + def get_class(self, name): + if not name or name not in self.kids_names: + raise InvalidModuleName("Module name '%s' was invalid or not" + %name + "part of the module '%s'" %self.name) + kid = self.kids[name] + if kid['is_imported']: + module = kid['instance'] + else: + try: + module = __import__(kid['module_name'], [],[], ["not empty"]) + kid['instance'] = module + kid['is_imported'] = True + except ImportError: + raise + mod_class = getattr(module, kid['class']) + return mod_class + + +class Modules(object): + """Dynamic modules system for loading and retrieving any of the + installed emaint modules and/or provided class's + + @param path: Optional path to the "modules" directory or + defaults to the directory of this file + '/modules' + @param namepath: Optional python import path to the "modules" directory or + defaults to the directory name of this file + '.modules' + """ + + def __init__(self, path=None, namepath=None): + if path: + self._module_path = path + else: + self._module_path = os.path.join(( + os.path.dirname(os.path.realpath(__file__))), "modules") + if namepath: + self._namepath = namepath + else: + self._namepath = '.'.join(os.path.dirname( + os.path.realpath(__file__)), "modules") + self._modules = self._get_all_modules() + self.modules = ProtectedDict(self._modules) + self.module_names = sorted(self._modules) + #self.modules = {} + #for mod in self.module_names: + #self.module[mod] = LazyLoad( + + def _get_all_modules(self): + """scans the emaint modules dir for loadable modules + + @rtype: dictionary of module_plugins + """ + module_dir = self._module_path + importables = [] + names = os.listdir(module_dir) + for entry in names: + # skip any __init__ or __pycache__ files or directories + if entry.startswith('__'): + continue + try: + # test for statinfo to ensure it should a real module + # it will bail if it errors + os.lstat(os.path.join(module_dir, entry, '__init__.py')) + importables.append(entry) + except EnvironmentError: + pass + kids = {} + for entry in importables: + new_module = Module(entry, self._namepath) + for module_name in new_module.kids: + kid = new_module.kids[module_name] + kid['parent'] = new_module + kids[kid['name']] = kid + return kids + + def get_module_names(self): + """Convienence function to return the list of installed modules + available + + @rtype: list + @return: the installed module names available + """ + return self.module_names + + def get_class(self, modname): + """Retrieves a module class desired + + @type modname: string + @param modname: the module class name + """ + if modname and modname in self.module_names: + mod = self._modules[modname]['parent'].get_class(modname) + else: + raise InvalidModuleName("Module name '%s' was invalid or not" + %modname + "found") + return mod + + def get_description(self, modname): + """Retrieves the module class decription + + @type modname: string + @param modname: the module class name + @type string + @return: the modules class decription + """ + if modname and modname in self.module_names: + mod = self._modules[modname]['description'] + else: + raise InvalidModuleName("Module name '%s' was invalid or not" + %modname + "found") + return mod + + def get_functions(self, modname): + """Retrieves the module class exported function names + + @type modname: string + @param modname: the module class name + @type list + @return: the modules class exported function names + """ + if modname and modname in self.module_names: + mod = self._modules[modname]['functions'] + else: + raise InvalidModuleName("Module name '%s' was invalid or not" + %modname + "found") + return mod + + def get_func_descriptions(self, modname): + """Retrieves the module class exported functions descriptions + + @type modname: string + @param modname: the module class name + @type dictionary + @return: the modules class exported functions descriptions + """ + if modname and modname in self.module_names: + desc = self._modules[modname]['func_desc'] + else: + raise InvalidModuleName("Module name '%s' was invalid or not" + %modname + "found") + return desc diff --git a/portage_with_autodep/pym/portage/emaint/modules/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/__init__.py new file mode 100644 index 0000000..f67197d --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/__init__.py @@ -0,0 +1,5 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Plug-in modules for system health checks and maintenance. +""" diff --git a/portage_with_autodep/pym/portage/emaint/modules/binhost/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/binhost/__init__.py new file mode 100644 index 0000000..c60e8bc --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/binhost/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Scan and generate metadata indexes for binary packages. +""" + + +module_spec = { + 'name': 'binhost', + 'description': __doc__, + 'provides':{ + 'module1': { + 'name': "binhost", + 'class': "BinhostHandler", + 'description': __doc__, + 'functions': ['check', 'fix'], + 'func_desc': {} + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/binhost/binhost.py b/portage_with_autodep/pym/portage/emaint/modules/binhost/binhost.py new file mode 100644 index 0000000..c297545 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/binhost/binhost.py @@ -0,0 +1,163 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import stat + +import portage +from portage import os +from portage.util import writemsg + +import sys +if sys.hexversion >= 0x3000000: + long = int + +class BinhostHandler(object): + + short_desc = "Generate a metadata index for binary packages" + + def name(): + return "binhost" + name = staticmethod(name) + + def __init__(self): + eroot = portage.settings['EROOT'] + self._bintree = portage.db[eroot]["bintree"] + self._bintree.populate() + self._pkgindex_file = self._bintree._pkgindex_file + self._pkgindex = self._bintree._load_pkgindex() + + def _need_update(self, cpv, data): + + if "MD5" not in data: + return True + + size = data.get("SIZE") + if size is None: + return True + + mtime = data.get("MTIME") + if mtime is None: + return True + + pkg_path = self._bintree.getname(cpv) + try: + s = os.lstat(pkg_path) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + raise + # We can't update the index for this one because + # it disappeared. + return False + + try: + if long(mtime) != s[stat.ST_MTIME]: + return True + if long(size) != long(s.st_size): + return True + except ValueError: + return True + + return False + + def check(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + missing = [] + cpv_all = self._bintree.dbapi.cpv_all() + cpv_all.sort() + maxval = len(cpv_all) + if onProgress: + onProgress(maxval, 0) + pkgindex = self._pkgindex + missing = [] + metadata = {} + for d in pkgindex.packages: + metadata[d["CPV"]] = d + for i, cpv in enumerate(cpv_all): + d = metadata.get(cpv) + if not d or self._need_update(cpv, d): + missing.append(cpv) + if onProgress: + onProgress(maxval, i+1) + errors = ["'%s' is not in Packages" % cpv for cpv in missing] + stale = set(metadata).difference(cpv_all) + for cpv in stale: + errors.append("'%s' is not in the repository" % cpv) + return errors + + def fix(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + bintree = self._bintree + cpv_all = self._bintree.dbapi.cpv_all() + cpv_all.sort() + missing = [] + maxval = 0 + if onProgress: + onProgress(maxval, 0) + pkgindex = self._pkgindex + missing = [] + metadata = {} + for d in pkgindex.packages: + metadata[d["CPV"]] = d + + for i, cpv in enumerate(cpv_all): + d = metadata.get(cpv) + if not d or self._need_update(cpv, d): + missing.append(cpv) + + stale = set(metadata).difference(cpv_all) + if missing or stale: + from portage import locks + pkgindex_lock = locks.lockfile( + self._pkgindex_file, wantnewlockfile=1) + try: + # Repopulate with lock held. + bintree._populate() + cpv_all = self._bintree.dbapi.cpv_all() + cpv_all.sort() + + pkgindex = bintree._load_pkgindex() + self._pkgindex = pkgindex + + metadata = {} + for d in pkgindex.packages: + metadata[d["CPV"]] = d + + # Recount missing packages, with lock held. + del missing[:] + for i, cpv in enumerate(cpv_all): + d = metadata.get(cpv) + if not d or self._need_update(cpv, d): + missing.append(cpv) + + maxval = len(missing) + for i, cpv in enumerate(missing): + try: + metadata[cpv] = bintree._pkgindex_entry(cpv) + except portage.exception.InvalidDependString: + writemsg("!!! Invalid binary package: '%s'\n" % \ + bintree.getname(cpv), noiselevel=-1) + + if onProgress: + onProgress(maxval, i+1) + + for cpv in set(metadata).difference( + self._bintree.dbapi.cpv_all()): + del metadata[cpv] + + # We've updated the pkgindex, so set it to + # repopulate when necessary. + bintree.populated = False + + del pkgindex.packages[:] + pkgindex.packages.extend(metadata.values()) + bintree._pkgindex_write(self._pkgindex) + + finally: + locks.unlockfile(pkgindex_lock) + + if onProgress: + if maxval == 0: + maxval = 1 + onProgress(maxval, maxval) + return None diff --git a/portage_with_autodep/pym/portage/emaint/modules/config/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/config/__init__.py new file mode 100644 index 0000000..f0585b3 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/config/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Check and clean the config tracker list for uninstalled packages. +""" + + +module_spec = { + 'name': 'config', + 'description': __doc__, + 'provides':{ + 'module1': { + 'name': "cleanconfmem", + 'class': "CleanConfig", + 'description': __doc__, + 'functions': ['check', 'fix'], + 'func_desc': {} + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/config/config.py b/portage_with_autodep/pym/portage/emaint/modules/config/config.py new file mode 100644 index 0000000..dad024b --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/config/config.py @@ -0,0 +1,79 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os +from portage.const import PRIVATE_PATH +from portage.util import grabdict, writedict + +class CleanConfig(object): + + short_desc = "Discard any no longer installed configs from emerge's tracker list" + + def __init__(self): + self._root = portage.settings["ROOT"] + self.target = os.path.join(portage.settings["EROOT"], PRIVATE_PATH, 'config') + + def name(): + return "cleanconfmem" + name = staticmethod(name) + + def load_configlist(self): + return grabdict(self.target) + + def check(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + configs = self.load_configlist() + messages = [] + maxval = len(configs) + if onProgress: + onProgress(maxval, 0) + i = 0 + keys = sorted(configs) + for config in keys: + if not os.path.exists(config): + messages.append(" %s" % config) + if onProgress: + onProgress(maxval, i+1) + i += 1 + return self._format_output(messages) + + def fix(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + configs = self.load_configlist() + messages = [] + maxval = len(configs) + if onProgress: + onProgress(maxval, 0) + i = 0 + + root = self._root + if root == "/": + root = None + modified = False + for config in sorted(configs): + if root is None: + full_path = config + else: + full_path = os.path.join(root, config.lstrip(os.sep)) + if not os.path.exists(full_path): + modified = True + configs.pop(config) + messages.append(" %s" % config) + if onProgress: + onProgress(maxval, i+1) + i += 1 + if modified: + writedict(configs, self.target) + return self._format_output(messages, True) + + def _format_output(self, messages=[], cleaned=False): + output = [] + if messages: + output.append('Not Installed:') + output += messages + tot = '------------------------------------\n Total %i Not installed' + if cleaned: + tot += ' ...Cleaned' + output.append(tot % len(messages)) + return output diff --git a/portage_with_autodep/pym/portage/emaint/modules/logs/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/logs/__init__.py new file mode 100644 index 0000000..0407efe --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/logs/__init__.py @@ -0,0 +1,45 @@ +# Copyright 2005-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Check and clean old logs in the PORT_LOGDIR. +""" + + +module_spec = { + 'name': 'logs', + 'description': __doc__, + 'provides':{ + 'module1': { + 'name': "logs", + 'class': "CleanLogs", + 'description': __doc__, + 'functions': ['check','clean'], + 'func_desc': { + 'clean': { + "short": "-C", "long": "--clean", + "help": "Cleans out logs more than 7 days old (cleanlogs only)" + \ + " module-options: -t, -p", + 'status': "Cleaning %s", + 'action': 'store_true', + 'func': 'clean', + }, + 'time': { + "short": "-t", "long": "--time", + "help": "(cleanlogs only): -t, --time Delete logs older than NUM of days", + 'status': "", + 'type': int, + 'dest': 'NUM', + 'func': 'clean' + }, + 'pretend': { + "short": "-p", "long": "--pretend", + "help": "(cleanlogs only): -p, --pretend Output logs that would be deleted", + 'status': "", + 'action': 'store_true', + 'dest': 'pretend', + 'func': 'clean' + } + } + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/logs/logs.py b/portage_with_autodep/pym/portage/emaint/modules/logs/logs.py new file mode 100644 index 0000000..fe65cf5 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/logs/logs.py @@ -0,0 +1,103 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os +from portage.util import shlex_split, varexpand + +## default clean command from make.globals +## PORT_LOGDIR_CLEAN = 'find "${PORT_LOGDIR}" -type f ! -name "summary.log*" -mtime +7 -delete' + +class CleanLogs(object): + + short_desc = "Clean PORT_LOGDIR logs" + + def name(): + return "logs" + name = staticmethod(name) + + + def can_progressbar(self, func): + return False + + + def check(self, **kwargs): + if kwargs: + options = kwargs.get('options', None) + if options: + options['pretend'] = True + return self.clean(**kwargs) + + + def clean(self, **kwargs): + """Log directory cleaning function + + @param **kwargs: optional dictionary of values used in this function are: + settings: portage settings instance: defaults to portage.settings + "PORT_LOGDIR": directory to clean + "PORT_LOGDIR_CLEAN": command for cleaning the logs. + options: dict: + 'NUM': int: number of days + 'pretend': boolean + """ + messages = [] + num_of_days = None + pretend = False + if kwargs: + # convuluted, I know, but portage.settings does not exist in + # kwargs.get() when called from _emerge.main.clean_logs() + settings = kwargs.get('settings', None) + if not settings: + settings = portage.settings + options = kwargs.get('options', None) + if options: + num_of_days = options.get('NUM', None) + pretend = options.get('pretend', False) + + clean_cmd = settings.get("PORT_LOGDIR_CLEAN") + if clean_cmd: + clean_cmd = shlex_split(clean_cmd) + if '-mtime' in clean_cmd and num_of_days is not None: + if num_of_days == 0: + i = clean_cmd.index('-mtime') + clean_cmd.remove('-mtime') + clean_cmd.pop(i) + else: + clean_cmd[clean_cmd.index('-mtime') +1] = \ + '+%s' % str(num_of_days) + if pretend: + if "-delete" in clean_cmd: + clean_cmd.remove("-delete") + + if not clean_cmd: + return [] + rval = self._clean_logs(clean_cmd, settings) + messages += self._convert_errors(rval) + return messages + + + @staticmethod + def _clean_logs(clean_cmd, settings): + logdir = settings.get("PORT_LOGDIR") + if logdir is None or not os.path.isdir(logdir): + return + + variables = {"PORT_LOGDIR" : logdir} + cmd = [varexpand(x, mydict=variables) for x in clean_cmd] + + try: + rval = portage.process.spawn(cmd, env=os.environ) + except portage.exception.CommandNotFound: + rval = 127 + return rval + + + @staticmethod + def _convert_errors(rval): + msg = [] + if rval != os.EX_OK: + msg.append("PORT_LOGDIR_CLEAN command returned %s" + % ("%d" % rval if rval else "None")) + msg.append("See the make.conf(5) man page for " + "PORT_LOGDIR_CLEAN usage instructions.") + return msg diff --git a/portage_with_autodep/pym/portage/emaint/modules/move/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/move/__init__.py new file mode 100644 index 0000000..d31d7b3 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/move/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Perform package move updates for installed and binary packages. +""" + + +module_spec = { + 'name': 'move', + 'description': __doc__, + 'provides':{ + 'module1': { + 'name': "moveinst", + 'class': "MoveInstalled", + 'description': __doc__, + 'options': ['check', 'fix'], + 'functions': ['check', 'fix'], + 'func_desc': { + } + }, + 'module2':{ + 'name': "movebin", + 'class': "MoveBinary", + 'description': "Perform package move updates for binary packages", + 'functions': ['check', 'fix'], + 'func_desc': { + } + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/move/move.py b/portage_with_autodep/pym/portage/emaint/modules/move/move.py new file mode 100644 index 0000000..ef674d4 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/move/move.py @@ -0,0 +1,180 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os +from portage.exception import InvalidData +from _emerge.Package import Package +from portage.versions import _pkg_str + +class MoveHandler(object): + + def __init__(self, tree, porttree): + self._tree = tree + self._portdb = porttree.dbapi + self._update_keys = Package._dep_keys + ("PROVIDE",) + self._master_repo = \ + self._portdb.getRepositoryName(self._portdb.porttree_root) + + def _grab_global_updates(self): + from portage.update import grab_updates, parse_updates + retupdates = {} + errors = [] + + for repo_name in self._portdb.getRepositories(): + repo = self._portdb.getRepositoryPath(repo_name) + updpath = os.path.join(repo, "profiles", "updates") + if not os.path.isdir(updpath): + continue + + try: + rawupdates = grab_updates(updpath) + except portage.exception.DirectoryNotFound: + rawupdates = [] + upd_commands = [] + for mykey, mystat, mycontent in rawupdates: + commands, errors = parse_updates(mycontent) + upd_commands.extend(commands) + errors.extend(errors) + retupdates[repo_name] = upd_commands + + if self._master_repo in retupdates: + retupdates['DEFAULT'] = retupdates[self._master_repo] + + return retupdates, errors + + def check(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + allupdates, errors = self._grab_global_updates() + # Matching packages and moving them is relatively fast, so the + # progress bar is updated in indeterminate mode. + match = self._tree.dbapi.match + aux_get = self._tree.dbapi.aux_get + pkg_str = self._tree.dbapi._pkg_str + settings = self._tree.dbapi.settings + if onProgress: + onProgress(0, 0) + for repo, updates in allupdates.items(): + if repo == 'DEFAULT': + continue + if not updates: + continue + + def repo_match(repository): + return repository == repo or \ + (repo == self._master_repo and \ + repository not in allupdates) + + for i, update_cmd in enumerate(updates): + if update_cmd[0] == "move": + origcp, newcp = update_cmd[1:] + for cpv in match(origcp): + try: + cpv = pkg_str(cpv, origcp.repo) + except (KeyError, InvalidData): + continue + if repo_match(cpv.repo): + errors.append("'%s' moved to '%s'" % (cpv, newcp)) + elif update_cmd[0] == "slotmove": + pkg, origslot, newslot = update_cmd[1:] + atom = pkg.with_slot(origslot) + for cpv in match(atom): + try: + cpv = pkg_str(cpv, atom.repo) + except (KeyError, InvalidData): + continue + if repo_match(cpv.repo): + errors.append("'%s' slot moved from '%s' to '%s'" % \ + (cpv, origslot, newslot)) + if onProgress: + onProgress(0, 0) + + # Searching for updates in all the metadata is relatively slow, so this + # is where the progress bar comes out of indeterminate mode. + cpv_all = self._tree.dbapi.cpv_all() + cpv_all.sort() + maxval = len(cpv_all) + meta_keys = self._update_keys + self._portdb._pkg_str_aux_keys + if onProgress: + onProgress(maxval, 0) + for i, cpv in enumerate(cpv_all): + try: + metadata = dict(zip(meta_keys, aux_get(cpv, meta_keys))) + except KeyError: + continue + try: + pkg = _pkg_str(cpv, metadata=metadata, settings=settings) + except InvalidData: + continue + metadata = dict((k, metadata[k]) for k in self._update_keys) + try: + updates = allupdates[pkg.repo] + except KeyError: + try: + updates = allupdates['DEFAULT'] + except KeyError: + continue + if not updates: + continue + metadata_updates = \ + portage.update_dbentries(updates, metadata, parent=pkg) + if metadata_updates: + errors.append("'%s' has outdated metadata" % cpv) + if onProgress: + onProgress(maxval, i+1) + return errors + + def fix(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + allupdates, errors = self._grab_global_updates() + # Matching packages and moving them is relatively fast, so the + # progress bar is updated in indeterminate mode. + move = self._tree.dbapi.move_ent + slotmove = self._tree.dbapi.move_slot_ent + if onProgress: + onProgress(0, 0) + for repo, updates in allupdates.items(): + if repo == 'DEFAULT': + continue + if not updates: + continue + + def repo_match(repository): + return repository == repo or \ + (repo == self._master_repo and \ + repository not in allupdates) + + for i, update_cmd in enumerate(updates): + if update_cmd[0] == "move": + move(update_cmd, repo_match=repo_match) + elif update_cmd[0] == "slotmove": + slotmove(update_cmd, repo_match=repo_match) + if onProgress: + onProgress(0, 0) + + # Searching for updates in all the metadata is relatively slow, so this + # is where the progress bar comes out of indeterminate mode. + self._tree.dbapi.update_ents(allupdates, onProgress=onProgress) + return errors + +class MoveInstalled(MoveHandler): + + short_desc = "Perform package move updates for installed packages" + + def name(): + return "moveinst" + name = staticmethod(name) + def __init__(self): + eroot = portage.settings['EROOT'] + MoveHandler.__init__(self, portage.db[eroot]["vartree"], portage.db[eroot]["porttree"]) + +class MoveBinary(MoveHandler): + + short_desc = "Perform package move updates for binary packages" + + def name(): + return "movebin" + name = staticmethod(name) + def __init__(self): + eroot = portage.settings['EROOT'] + MoveHandler.__init__(self, portage.db[eroot]["bintree"], portage.db[eroot]['porttree']) diff --git a/portage_with_autodep/pym/portage/emaint/modules/resume/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/resume/__init__.py new file mode 100644 index 0000000..965e8f9 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/resume/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Check and fix problems in the resume and/or resume_backup files. +""" + + +module_spec = { + 'name': 'resume', + 'description': __doc__, + 'provides':{ + 'module1': { + 'name': "cleanresume", + 'class': "CleanResume", + 'description': "Discard emerge --resume merge lists", + 'functions': ['check', 'fix'], + 'func_desc': {} + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/resume/resume.py b/portage_with_autodep/pym/portage/emaint/modules/resume/resume.py new file mode 100644 index 0000000..1bada52 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/resume/resume.py @@ -0,0 +1,58 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage + + +class CleanResume(object): + + short_desc = "Discard emerge --resume merge lists" + + def name(): + return "cleanresume" + name = staticmethod(name) + + def check(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + messages = [] + mtimedb = portage.mtimedb + resume_keys = ("resume", "resume_backup") + maxval = len(resume_keys) + if onProgress: + onProgress(maxval, 0) + for i, k in enumerate(resume_keys): + try: + d = mtimedb.get(k) + if d is None: + continue + if not isinstance(d, dict): + messages.append("unrecognized resume list: '%s'" % k) + continue + mergelist = d.get("mergelist") + if mergelist is None or not hasattr(mergelist, "__len__"): + messages.append("unrecognized resume list: '%s'" % k) + continue + messages.append("resume list '%s' contains %d packages" % \ + (k, len(mergelist))) + finally: + if onProgress: + onProgress(maxval, i+1) + return messages + + def fix(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + delete_count = 0 + mtimedb = portage.mtimedb + resume_keys = ("resume", "resume_backup") + maxval = len(resume_keys) + if onProgress: + onProgress(maxval, 0) + for i, k in enumerate(resume_keys): + try: + if mtimedb.pop(k, None) is not None: + delete_count += 1 + finally: + if onProgress: + onProgress(maxval, i+1) + if delete_count: + mtimedb.commit() diff --git a/portage_with_autodep/pym/portage/emaint/modules/world/__init__.py b/portage_with_autodep/pym/portage/emaint/modules/world/__init__.py new file mode 100644 index 0000000..3f62270 --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/world/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +"""Check and fix problems in the world file. +""" + + +module_spec = { + 'name': 'world', + 'description': __doc__, + 'provides':{ + 'module1':{ + 'name': "world", + 'class': "WorldHandler", + 'description': __doc__, + 'functions': ['check', 'fix'], + 'func_desc': {} + } + } + } diff --git a/portage_with_autodep/pym/portage/emaint/modules/world/world.py b/portage_with_autodep/pym/portage/emaint/modules/world/world.py new file mode 100644 index 0000000..2c9dbff --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/modules/world/world.py @@ -0,0 +1,89 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os + + +class WorldHandler(object): + + short_desc = "Fix problems in the world file" + + def name(): + return "world" + name = staticmethod(name) + + def __init__(self): + self.invalid = [] + self.not_installed = [] + self.okay = [] + from portage._sets import load_default_config + setconfig = load_default_config(portage.settings, + portage.db[portage.settings['EROOT']]) + self._sets = setconfig.getSets() + + def _check_world(self, onProgress): + eroot = portage.settings['EROOT'] + self.world_file = os.path.join(eroot, portage.const.WORLD_FILE) + self.found = os.access(self.world_file, os.R_OK) + vardb = portage.db[eroot]["vartree"].dbapi + + from portage._sets import SETPREFIX + sets = self._sets + world_atoms = list(sets["selected"]) + maxval = len(world_atoms) + if onProgress: + onProgress(maxval, 0) + for i, atom in enumerate(world_atoms): + if not isinstance(atom, portage.dep.Atom): + if atom.startswith(SETPREFIX): + s = atom[len(SETPREFIX):] + if s in sets: + self.okay.append(atom) + else: + self.not_installed.append(atom) + else: + self.invalid.append(atom) + if onProgress: + onProgress(maxval, i+1) + continue + okay = True + if not vardb.match(atom): + self.not_installed.append(atom) + okay = False + if okay: + self.okay.append(atom) + if onProgress: + onProgress(maxval, i+1) + + def check(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + self._check_world(onProgress) + errors = [] + if self.found: + errors += ["'%s' is not a valid atom" % x for x in self.invalid] + errors += ["'%s' is not installed" % x for x in self.not_installed] + else: + errors.append(self.world_file + " could not be opened for reading") + return errors + + def fix(self, **kwargs): + onProgress = kwargs.get('onProgress', None) + world_set = self._sets["selected"] + world_set.lock() + try: + world_set.load() # maybe it's changed on disk + before = set(world_set) + self._check_world(onProgress) + after = set(self.okay) + errors = [] + if before != after: + try: + world_set.replace(self.okay) + except portage.exception.PortageException: + errors.append("%s could not be opened for writing" % \ + self.world_file) + return errors + finally: + world_set.unlock() + diff --git a/portage_with_autodep/pym/portage/emaint/progress.py b/portage_with_autodep/pym/portage/emaint/progress.py new file mode 100644 index 0000000..e43c2af --- /dev/null +++ b/portage_with_autodep/pym/portage/emaint/progress.py @@ -0,0 +1,61 @@ +# Copyright 2005-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import time +import signal + +import portage + + +class ProgressHandler(object): + def __init__(self): + self.reset() + + def reset(self): + self.curval = 0 + self.maxval = 0 + self.last_update = 0 + self.min_display_latency = 0.2 + + def onProgress(self, maxval, curval): + self.maxval = maxval + self.curval = curval + cur_time = time.time() + if cur_time - self.last_update >= self.min_display_latency: + self.last_update = cur_time + self.display() + + def display(self): + raise NotImplementedError(self) + + +class ProgressBar(ProgressHandler): + """Class to set up and return a Progress Bar""" + + def __init__(self, isatty, **kwargs): + self.isatty = isatty + self.kwargs = kwargs + ProgressHandler.__init__(self) + self.progressBar = None + + def start(self): + if self.isatty: + self.progressBar = portage.output.TermProgressBar(**self.kwargs) + signal.signal(signal.SIGWINCH, self.sigwinch_handler) + else: + self.onProgress = None + return self.onProgress + + def set_label(self, _label): + self.kwargs['label'] = _label + + def display(self): + self.progressBar.set(self.curval, self.maxval) + + def sigwinch_handler(self, signum, frame): + lines, self.progressBar.term_columns = \ + portage.output.get_term_size() + + def stop(self): + signal.signal(signal.SIGWINCH, signal.SIG_DFL) + diff --git a/portage_with_autodep/pym/portage/env/__init__.pyo b/portage_with_autodep/pym/portage/env/__init__.pyo Binary files differnew file mode 100644 index 0000000..846aea3 --- /dev/null +++ b/portage_with_autodep/pym/portage/env/__init__.pyo diff --git a/portage_with_autodep/pym/portage/env/config.pyo b/portage_with_autodep/pym/portage/env/config.pyo Binary files differnew file mode 100644 index 0000000..13c2e86 --- /dev/null +++ b/portage_with_autodep/pym/portage/env/config.pyo diff --git a/portage_with_autodep/pym/portage/env/loaders.py b/portage_with_autodep/pym/portage/env/loaders.py index b540fbb..372bc12 100644 --- a/portage_with_autodep/pym/portage/env/loaders.py +++ b/portage_with_autodep/pym/portage/env/loaders.py @@ -40,7 +40,7 @@ def RecursiveFileLoader(filename): @param filename: name of a file/directory to traverse @rtype: list - @returns: List of files to process + @return: List of files to process """ try: @@ -139,7 +139,7 @@ class FileLoader(DataLoader): load all files in self.fname @type: Boolean @rtype: tuple - @returns: + @return: Returns (data,errors), both may be empty dicts or populated. """ data = {} diff --git a/portage_with_autodep/pym/portage/env/loaders.pyo b/portage_with_autodep/pym/portage/env/loaders.pyo Binary files differnew file mode 100644 index 0000000..2622a9f --- /dev/null +++ b/portage_with_autodep/pym/portage/env/loaders.pyo diff --git a/portage_with_autodep/pym/portage/env/validators.pyo b/portage_with_autodep/pym/portage/env/validators.pyo Binary files differnew file mode 100644 index 0000000..cd18adb --- /dev/null +++ b/portage_with_autodep/pym/portage/env/validators.pyo diff --git a/portage_with_autodep/pym/portage/exception.py b/portage_with_autodep/pym/portage/exception.py index 7891120..5ccd750 100644 --- a/portage_with_autodep/pym/portage/exception.py +++ b/portage_with_autodep/pym/portage/exception.py @@ -78,6 +78,10 @@ class OperationNotPermitted(PortageException): from errno import EPERM as errno """An operation was not permitted operating system""" +class OperationNotSupported(PortageException): + from errno import EOPNOTSUPP as errno + """Operation not supported""" + class PermissionDenied(PortageException): from errno import EACCES as errno """Permission denied""" diff --git a/portage_with_autodep/pym/portage/exception.pyo b/portage_with_autodep/pym/portage/exception.pyo Binary files differnew file mode 100644 index 0000000..3a60e7c --- /dev/null +++ b/portage_with_autodep/pym/portage/exception.pyo diff --git a/portage_with_autodep/pym/portage/getbinpkg.py b/portage_with_autodep/pym/portage/getbinpkg.py index a511f51..212f788 100644 --- a/portage_with_autodep/pym/portage/getbinpkg.py +++ b/portage_with_autodep/pym/portage/getbinpkg.py @@ -1,5 +1,5 @@ # getbinpkg.py -- Portage binary-package helper functions -# Copyright 2003-2011 Gentoo Foundation +# Copyright 2003-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from portage.output import colorize @@ -8,7 +8,10 @@ from portage.localization import _ import portage from portage import os from portage import _encodings +from portage import _unicode_decode from portage import _unicode_encode +from portage.package.ebuild.fetch import _hide_url_passwd +from _emerge.Package import _all_metadata_keys import sys import socket @@ -65,8 +68,15 @@ def make_metadata_dict(data): myid,myglob = data mydict = {} - for x in portage.xpak.getindex_mem(myid): - mydict[x] = portage.xpak.getitem(data,x) + for k_bytes in portage.xpak.getindex_mem(myid): + k = _unicode_decode(k_bytes, + encoding=_encodings['repo.content'], errors='replace') + if k not in _all_metadata_keys and \ + k != "CATEGORY": + continue + v = _unicode_decode(portage.xpak.getitem(data, k_bytes), + encoding=_encodings['repo.content'], errors='replace') + mydict[k] = v return mydict @@ -149,11 +159,16 @@ def create_conn(baseurl,conn=None): http_headers = {} http_params = {} if username and password: + try: + encodebytes = base64.encodebytes + except AttributeError: + # Python 2 + encodebytes = base64.encodestring http_headers = { - "Authorization": "Basic %s" % - base64.encodestring("%s:%s" % (username, password)).replace( - "\012", - "" + b"Authorization": "Basic %s" % \ + encodebytes(_unicode_encode("%s:%s" % (username, password))).replace( + b"\012", + b"" ), } @@ -354,7 +369,7 @@ def dir_get_list(baseurl,conn=None): if page: parser = ParseLinks() - parser.feed(page) + parser.feed(_unicode_decode(page)) del page listing = parser.get_anchors() else: @@ -542,7 +557,7 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache= out.write(_("Loaded metadata pickle.\n")) out.flush() metadatafile.close() - except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError): + except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError): metadata = {} if baseurl not in metadata: metadata[baseurl]={} @@ -564,7 +579,8 @@ def dir_get_metadata(baseurl, conn=None, chunk_size=3000, verbose=1, usingcache= try: filelist = dir_get_list(baseurl, conn) except portage.exception.PortageException as e: - sys.stderr.write(_("!!! Error connecting to '%s'.\n") % baseurl) + sys.stderr.write(_("!!! Error connecting to '%s'.\n") % + _hide_url_passwd(baseurl)) sys.stderr.write("!!! %s\n" % str(e)) del e return metadata[baseurl]["data"] diff --git a/portage_with_autodep/pym/portage/getbinpkg.pyo b/portage_with_autodep/pym/portage/getbinpkg.pyo Binary files differnew file mode 100644 index 0000000..53ec2e9 --- /dev/null +++ b/portage_with_autodep/pym/portage/getbinpkg.pyo diff --git a/portage_with_autodep/pym/portage/glsa.py b/portage_with_autodep/pym/portage/glsa.py index a784d14..1857695 100644 --- a/portage_with_autodep/pym/portage/glsa.py +++ b/portage_with_autodep/pym/portage/glsa.py @@ -1,4 +1,4 @@ -# Copyright 2003-2011 Gentoo Foundation +# Copyright 2003-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import absolute_import @@ -17,7 +17,7 @@ from portage import os from portage import _encodings from portage import _unicode_decode from portage import _unicode_encode -from portage.versions import pkgsplit, catpkgsplit, pkgcmp, best +from portage.versions import pkgsplit, vercmp, best from portage.util import grabfile from portage.const import CACHE_PATH from portage.localization import _ @@ -372,17 +372,14 @@ def getMinUpgrade(vulnerableList, unaffectedList, portdbapi, vardbapi, minimize= for u in unaffectedList: mylist = match(u, portdbapi, match_type="match-all") for c in mylist: - c_pv = catpkgsplit(c) - i_pv = catpkgsplit(best(v_installed)) - if pkgcmp(c_pv[1:], i_pv[1:]) > 0 \ + i = best(v_installed) + if vercmp(c.version, i.version) > 0 \ and (rValue == None \ or not match("="+rValue, portdbapi) \ - or (minimize ^ (pkgcmp(c_pv[1:], catpkgsplit(rValue)[1:]) > 0)) \ + or (minimize ^ (vercmp(c.version, rValue.version) > 0)) \ and match("="+c, portdbapi)) \ and portdbapi.aux_get(c, ["SLOT"]) == vardbapi.aux_get(best(v_installed), ["SLOT"]): - rValue = c_pv[0]+"/"+c_pv[1]+"-"+c_pv[2] - if c_pv[3] != "r0": # we don't like -r0 for display - rValue += "-"+c_pv[3] + rValue = c return rValue def format_date(datestr): @@ -488,7 +485,7 @@ class Glsa: @type myfile: String @param myfile: Filename to grab the XML data from @rtype: None - @returns: None + @return: None """ self.DOM = xml.dom.minidom.parse(myfile) if not self.DOM.doctype: @@ -634,7 +631,7 @@ class Glsa: architectures. @rtype: Boolean - @returns: True if the system is affected, False if not + @return: True if the system is affected, False if not """ rValue = False for k in self.packages: @@ -654,7 +651,7 @@ class Glsa: GLSA was already applied. @rtype: Boolean - @returns: True if the GLSA was applied, False if not + @return: True if the GLSA was applied, False if not """ return (self.nr in get_applied_glsas(self.config)) @@ -665,7 +662,7 @@ class Glsa: applied or on explicit user request. @rtype: None - @returns: None + @return: None """ if not self.isApplied(): checkfile = io.open( diff --git a/portage_with_autodep/pym/portage/glsa.pyo b/portage_with_autodep/pym/portage/glsa.pyo Binary files differnew file mode 100644 index 0000000..65162f1 --- /dev/null +++ b/portage_with_autodep/pym/portage/glsa.pyo diff --git a/portage_with_autodep/pym/portage/localization.pyo b/portage_with_autodep/pym/portage/localization.pyo Binary files differnew file mode 100644 index 0000000..e992e3a --- /dev/null +++ b/portage_with_autodep/pym/portage/localization.pyo diff --git a/portage_with_autodep/pym/portage/locks.py b/portage_with_autodep/pym/portage/locks.py index 9ed1d6a..59fbc6e 100644 --- a/portage_with_autodep/pym/portage/locks.py +++ b/portage_with_autodep/pym/portage/locks.py @@ -1,5 +1,5 @@ # portage: Lock management code -# Copyright 2004-2010 Gentoo Foundation +# Copyright 2004-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ["lockdir", "unlockdir", "lockfile", "unlockfile", \ @@ -8,13 +8,13 @@ __all__ = ["lockdir", "unlockdir", "lockfile", "unlockfile", \ import errno import fcntl -import stat +import platform import sys import time +import warnings import portage -from portage import os -from portage.const import PORTAGE_BIN_PATH +from portage import os, _encodings, _unicode_decode from portage.exception import DirectoryNotFound, FileNotFound, \ InvalidData, TryAgain, OperationNotPermitted, PermissionDenied from portage.data import portage_gid @@ -25,12 +25,30 @@ if sys.hexversion >= 0x3000000: basestring = str HARDLINK_FD = -2 +_HARDLINK_POLL_LATENCY = 3 # seconds _default_lock_fn = fcntl.lockf +if platform.python_implementation() == 'PyPy': + # workaround for https://bugs.pypy.org/issue747 + _default_lock_fn = fcntl.flock + # Used by emerge in order to disable the "waiting for lock" message # so that it doesn't interfere with the status display. _quiet = False + +_open_fds = set() + +def _close_fds(): + """ + This is intended to be called after a fork, in order to close file + descriptors for locks held by the parent process. This can be called + safely after a fork without exec, unlike the _setup_pipes close_fds + behavior. + """ + while _open_fds: + os.close(_open_fds.pop()) + def lockdir(mydir, flags=0): return lockfile(mydir, wantnewlockfile=1, flags=flags) def unlockdir(mylock): @@ -46,19 +64,31 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0, if not mypath: raise InvalidData(_("Empty path given")) + # Support for file object or integer file descriptor parameters is + # deprecated due to ambiguity in whether or not it's safe to close + # the file descriptor, making it prone to "Bad file descriptor" errors + # or file descriptor leaks. if isinstance(mypath, basestring) and mypath[-1] == '/': mypath = mypath[:-1] + lockfilename_path = mypath if hasattr(mypath, 'fileno'): + warnings.warn("portage.locks.lockfile() support for " + "file object parameters is deprecated. Use a file path instead.", + DeprecationWarning, stacklevel=2) + lockfilename_path = getattr(mypath, 'name', None) mypath = mypath.fileno() if isinstance(mypath, int): + warnings.warn("portage.locks.lockfile() support for integer file " + "descriptor parameters is deprecated. Use a file path instead.", + DeprecationWarning, stacklevel=2) lockfilename = mypath wantnewlockfile = 0 unlinkfile = 0 elif wantnewlockfile: base, tail = os.path.split(mypath) lockfilename = os.path.join(base, "." + tail + ".portage_lockfile") - del base, tail + lockfilename_path = lockfilename unlinkfile = 1 else: lockfilename = mypath @@ -112,6 +142,8 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0, # we're waiting on lockfile and use a blocking attempt. locking_method = _default_lock_fn try: + if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ: + raise IOError(errno.ENOSYS, "Function not implemented") locking_method(myfd, fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError as e: if not hasattr(e, "errno"): @@ -143,20 +175,22 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0, raise if out is not None: out.eend(os.EX_OK) - elif e.errno == errno.ENOLCK: + elif e.errno in (errno.ENOSYS, errno.ENOLCK): # We're not allowed to lock on this FS. - os.close(myfd) - link_success = False - if lockfilename == str(lockfilename): - if wantnewlockfile: - try: - if os.stat(lockfilename)[stat.ST_NLINK] == 1: - os.unlink(lockfilename) - except OSError: - pass - link_success = hardlink_lockfile(lockfilename) + if not isinstance(lockfilename, int): + # If a file object was passed in, it's not safe + # to close the file descriptor because it may + # still be in use. + os.close(myfd) + lockfilename_path = _unicode_decode(lockfilename_path, + encoding=_encodings['fs'], errors='strict') + if not isinstance(lockfilename_path, basestring): + raise + link_success = hardlink_lockfile(lockfilename_path, + waiting_msg=waiting_msg, flags=flags) if not link_success: raise + lockfilename = lockfilename_path locking_method = None myfd = HARDLINK_FD else: @@ -172,6 +206,9 @@ def lockfile(mypath, wantnewlockfile=0, unlinkfile=0, mypath, wantnewlockfile=wantnewlockfile, unlinkfile=unlinkfile, waiting_msg=waiting_msg, flags=flags) + if myfd != HARDLINK_FD: + _open_fds.add(myfd) + writemsg(str((lockfilename,myfd,unlinkfile))+"\n",1) return (lockfilename,myfd,unlinkfile,locking_method) @@ -203,7 +240,7 @@ def unlockfile(mytuple): raise InvalidData if(myfd == HARDLINK_FD): - unhardlink_lockfile(lockfilename) + unhardlink_lockfile(lockfilename, unlinkfile=unlinkfile) return True # myfd may be None here due to myfd = mypath in lockfile() @@ -212,6 +249,7 @@ def unlockfile(mytuple): writemsg(_("lockfile does not exist '%s'\n") % lockfilename,1) if myfd is not None: os.close(myfd) + _open_fds.remove(myfd) return False try: @@ -222,6 +260,7 @@ def unlockfile(mytuple): except OSError: if isinstance(lockfilename, basestring): os.close(myfd) + _open_fds.remove(myfd) raise IOError(_("Failed to unlock file '%s'\n") % lockfilename) try: @@ -243,6 +282,7 @@ def unlockfile(mytuple): else: writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1) os.close(myfd) + _open_fds.remove(myfd) return False except SystemExit: raise @@ -255,6 +295,7 @@ def unlockfile(mytuple): # open fd closed automatically on them. if isinstance(lockfilename, basestring): os.close(myfd) + _open_fds.remove(myfd) return True @@ -262,65 +303,148 @@ def unlockfile(mytuple): def hardlock_name(path): - return path+".hardlock-"+os.uname()[1]+"-"+str(os.getpid()) + base, tail = os.path.split(path) + return os.path.join(base, ".%s.hardlock-%s-%s" % + (tail, os.uname()[1], os.getpid())) def hardlink_is_mine(link,lock): try: - return os.stat(link).st_nlink == 2 + lock_st = os.stat(lock) + if lock_st.st_nlink == 2: + link_st = os.stat(link) + return lock_st.st_ino == link_st.st_ino and \ + lock_st.st_dev == link_st.st_dev except OSError: - return False + pass + return False -def hardlink_lockfile(lockfilename, max_wait=14400): +def hardlink_lockfile(lockfilename, max_wait=DeprecationWarning, + waiting_msg=None, flags=0): """Does the NFS, hardlink shuffle to ensure locking on the disk. - We create a PRIVATE lockfile, that is just a placeholder on the disk. - Then we HARDLINK the real lockfile to that private file. + We create a PRIVATE hardlink to the real lockfile, that is just a + placeholder on the disk. If our file can 2 references, then we have the lock. :) Otherwise we lather, rise, and repeat. - We default to a 4 hour timeout. """ - start_time = time.time() + if max_wait is not DeprecationWarning: + warnings.warn("The 'max_wait' parameter of " + "portage.locks.hardlink_lockfile() is now unused. Use " + "flags=os.O_NONBLOCK instead.", + DeprecationWarning, stacklevel=2) + + global _quiet + out = None + displayed_waiting_msg = False + preexisting = os.path.exists(lockfilename) myhardlock = hardlock_name(lockfilename) - reported_waiting = False - - while(time.time() < (start_time + max_wait)): - # We only need it to exist. - myfd = os.open(myhardlock, os.O_CREAT|os.O_RDWR,0o660) - os.close(myfd) - - if not os.path.exists(myhardlock): - raise FileNotFound( - _("Created lockfile is missing: %(filename)s") % \ - {"filename" : myhardlock}) - try: - res = os.link(myhardlock, lockfilename) - except OSError: + # myhardlock must not exist prior to our link() call, and we can + # safely unlink it since its file name is unique to our PID + try: + os.unlink(myhardlock) + except OSError as e: + if e.errno in (errno.ENOENT, errno.ESTALE): pass + else: + func_call = "unlink('%s')" % myhardlock + if e.errno == OperationNotPermitted.errno: + raise OperationNotPermitted(func_call) + elif e.errno == PermissionDenied.errno: + raise PermissionDenied(func_call) + else: + raise - if hardlink_is_mine(myhardlock, lockfilename): - # We have the lock. - if reported_waiting: - writemsg("\n", noiselevel=-1) - return True - - if reported_waiting: - writemsg(".", noiselevel=-1) + while True: + # create lockfilename if it doesn't exist yet + try: + myfd = os.open(lockfilename, os.O_CREAT|os.O_RDWR, 0o660) + except OSError as e: + func_call = "open('%s')" % lockfilename + if e.errno == OperationNotPermitted.errno: + raise OperationNotPermitted(func_call) + elif e.errno == PermissionDenied.errno: + raise PermissionDenied(func_call) + else: + raise else: - reported_waiting = True - msg = _("\nWaiting on (hardlink) lockfile: (one '.' per 3 seconds)\n" - "%(bin_path)s/clean_locks can fix stuck locks.\n" - "Lockfile: %(lockfilename)s\n") % \ - {"bin_path": PORTAGE_BIN_PATH, "lockfilename": lockfilename} - writemsg(msg, noiselevel=-1) - time.sleep(3) - - os.unlink(myhardlock) - return False + myfd_st = None + try: + myfd_st = os.fstat(myfd) + if not preexisting: + # Don't chown the file if it is preexisting, since we + # want to preserve existing permissions in that case. + if myfd_st.st_gid != portage_gid: + os.fchown(myfd, -1, portage_gid) + except OSError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + writemsg("%s: fchown('%s', -1, %d)\n" % \ + (e, lockfilename, portage_gid), noiselevel=-1) + writemsg(_("Cannot chown a lockfile: '%s'\n") % \ + lockfilename, noiselevel=-1) + writemsg(_("Group IDs of current user: %s\n") % \ + " ".join(str(n) for n in os.getgroups()), + noiselevel=-1) + else: + # another process has removed the file, so we'll have + # to create it again + continue + finally: + os.close(myfd) + + # If fstat shows more than one hardlink, then it's extremely + # unlikely that the following link call will result in a lock, + # so optimize away the wasteful link call and sleep or raise + # TryAgain. + if myfd_st is not None and myfd_st.st_nlink < 2: + try: + os.link(lockfilename, myhardlock) + except OSError as e: + func_call = "link('%s', '%s')" % (lockfilename, myhardlock) + if e.errno == OperationNotPermitted.errno: + raise OperationNotPermitted(func_call) + elif e.errno == PermissionDenied.errno: + raise PermissionDenied(func_call) + elif e.errno in (errno.ESTALE, errno.ENOENT): + # another process has removed the file, so we'll have + # to create it again + continue + else: + raise + else: + if hardlink_is_mine(myhardlock, lockfilename): + if out is not None: + out.eend(os.EX_OK) + break + + try: + os.unlink(myhardlock) + except OSError as e: + # This should not happen, since the file name of + # myhardlock is unique to our host and PID, + # and the above link() call succeeded. + if e.errno not in (errno.ENOENT, errno.ESTALE): + raise + raise FileNotFound(myhardlock) + + if flags & os.O_NONBLOCK: + raise TryAgain(lockfilename) + + if out is None and not _quiet: + out = portage.output.EOutput() + if out is not None and not displayed_waiting_msg: + displayed_waiting_msg = True + if waiting_msg is None: + waiting_msg = _("waiting for lock on %s\n") % lockfilename + out.ebegin(waiting_msg) + + time.sleep(_HARDLINK_POLL_LATENCY) + + return True -def unhardlink_lockfile(lockfilename): +def unhardlink_lockfile(lockfilename, unlinkfile=True): myhardlock = hardlock_name(lockfilename) - if hardlink_is_mine(myhardlock, lockfilename): + if unlinkfile and hardlink_is_mine(myhardlock, lockfilename): # Make sure not to touch lockfilename unless we really have a lock. try: os.unlink(lockfilename) @@ -344,7 +468,7 @@ def hardlock_cleanup(path, remove_all_locks=False): if os.path.isfile(path+"/"+x): parts = x.split(".hardlock-") if len(parts) == 2: - filename = parts[0] + filename = parts[0][1:] hostpid = parts[1].split("-") host = "-".join(hostpid[:-1]) pid = hostpid[-1] @@ -368,7 +492,7 @@ def hardlock_cleanup(path, remove_all_locks=False): remove_all_locks: for y in mylist[x]: for z in mylist[x][y]: - filename = path+"/"+x+".hardlock-"+y+"-"+z + filename = path+"/."+x+".hardlock-"+y+"-"+z if filename == mylockname: continue try: diff --git a/portage_with_autodep/pym/portage/locks.pyo b/portage_with_autodep/pym/portage/locks.pyo Binary files differnew file mode 100644 index 0000000..9c90a2f --- /dev/null +++ b/portage_with_autodep/pym/portage/locks.pyo diff --git a/portage_with_autodep/pym/portage/mail.py b/portage_with_autodep/pym/portage/mail.py index 17dfcaf..3fcadd2 100644 --- a/portage_with_autodep/pym/portage/mail.py +++ b/portage_with_autodep/pym/portage/mail.py @@ -40,8 +40,7 @@ else: def TextMessage(_text): from email.mime.text import MIMEText mimetext = MIMEText(_text) - if sys.hexversion >= 0x3000000: - mimetext.set_charset("UTF-8") + mimetext.set_charset("UTF-8") return mimetext def create_message(sender, recipient, subject, body, attachments=None): diff --git a/portage_with_autodep/pym/portage/mail.pyo b/portage_with_autodep/pym/portage/mail.pyo Binary files differnew file mode 100644 index 0000000..bc3a76d --- /dev/null +++ b/portage_with_autodep/pym/portage/mail.pyo diff --git a/portage_with_autodep/pym/portage/manifest.py b/portage_with_autodep/pym/portage/manifest.py index 13efab7..90324ee 100644 --- a/portage_with_autodep/pym/portage/manifest.py +++ b/portage_with_autodep/pym/portage/manifest.py @@ -1,8 +1,10 @@ -# Copyright 1999-2011 Gentoo Foundation +# Copyright 1999-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import errno import io +import re +import warnings import portage portage.proxy.lazyimport.lazyimport(globals(), @@ -17,8 +19,13 @@ from portage import _unicode_encode from portage.exception import DigestException, FileNotFound, \ InvalidDataType, MissingParameter, PermissionDenied, \ PortageException, PortagePackageException +from portage.const import (MANIFEST1_HASH_FUNCTIONS, MANIFEST2_HASH_DEFAULTS, + MANIFEST2_HASH_FUNCTIONS, MANIFEST2_IDENTIFIERS, MANIFEST2_REQUIRED_HASH) from portage.localization import _ +# Characters prohibited by repoman's file.name check. +_prohibited_filename_chars_re = re.compile(r'[^a-zA-Z0-9._\-+:]') + class FileNotInManifestException(PortageException): pass @@ -30,10 +37,14 @@ def manifest2AuxfileFilter(filename): for x in mysplit: if x[:1] == '.': return False + if _prohibited_filename_chars_re.search(x) is not None: + return False return not filename[:7] == 'digest-' def manifest2MiscfileFilter(filename): filename = filename.strip(os.sep) + if _prohibited_filename_chars_re.search(filename) is not None: + return False return not (filename in ["CVS", ".svn", "files", "Manifest"] or filename.endswith(".ebuild")) def guessManifestFileType(filename): @@ -49,9 +60,15 @@ def guessManifestFileType(filename): else: return "DIST" +def guessThinManifestFileType(filename): + type = guessManifestFileType(filename) + if type != "DIST": + return None + return "DIST" + def parseManifest2(mysplit): myentry = None - if len(mysplit) > 4 and mysplit[0] in portage.const.MANIFEST2_IDENTIFIERS: + if len(mysplit) > 4 and mysplit[0] in MANIFEST2_IDENTIFIERS: mytype = mysplit[0] myname = mysplit[1] try: @@ -93,25 +110,33 @@ class Manifest2Entry(ManifestEntry): class Manifest(object): parsers = (parseManifest2,) def __init__(self, pkgdir, distdir, fetchlist_dict=None, - manifest1_compat=False, from_scratch=False): - """ create new Manifest instance for package in pkgdir - and add compability entries for old portage versions if manifest1_compat == True. + manifest1_compat=DeprecationWarning, from_scratch=False, thin=False, + allow_missing=False, allow_create=True, hashes=None): + """ Create new Manifest instance for package in pkgdir. Do not parse Manifest file if from_scratch == True (only for internal use) The fetchlist_dict parameter is required only for generation of - a Manifest (not needed for parsing and checking sums).""" + a Manifest (not needed for parsing and checking sums). + If thin is specified, then the manifest carries only info for + distfiles.""" + + if manifest1_compat is not DeprecationWarning: + warnings.warn("The manifest1_compat parameter of the " + "portage.manifest.Manifest constructor is deprecated.", + DeprecationWarning, stacklevel=2) + self.pkgdir = _unicode_decode(pkgdir).rstrip(os.sep) + os.sep self.fhashdict = {} self.hashes = set() - self.hashes.update(portage.const.MANIFEST2_HASH_FUNCTIONS) - if manifest1_compat: - raise NotImplementedError("manifest1 support has been removed") + + if hashes is None: + hashes = MANIFEST2_HASH_DEFAULTS + + self.hashes.update(hashes.intersection(MANIFEST2_HASH_FUNCTIONS)) self.hashes.difference_update(hashname for hashname in \ list(self.hashes) if hashname not in hashfunc_map) self.hashes.add("size") - if manifest1_compat: - raise NotImplementedError("manifest1 support has been removed") - self.hashes.add(portage.const.MANIFEST2_REQUIRED_HASH) - for t in portage.const.MANIFEST2_IDENTIFIERS: + self.hashes.add(MANIFEST2_REQUIRED_HASH) + for t in MANIFEST2_IDENTIFIERS: self.fhashdict[t] = {} if not from_scratch: self._read() @@ -120,7 +145,13 @@ class Manifest(object): else: self.fetchlist_dict = {} self.distdir = distdir - self.guessType = guessManifestFileType + self.thin = thin + if thin: + self.guessType = guessThinManifestFileType + else: + self.guessType = guessManifestFileType + self.allow_missing = allow_missing + self.allow_create = allow_create def getFullname(self): """ Returns the absolute path to the Manifest file for this instance """ @@ -129,7 +160,7 @@ class Manifest(object): def getDigests(self): """ Compability function for old digest/manifest code, returns dict of filename:{hashfunction:hashvalue} """ rval = {} - for t in portage.const.MANIFEST2_IDENTIFIERS: + for t in MANIFEST2_IDENTIFIERS: rval.update(self.fhashdict[t]) return rval @@ -200,7 +231,7 @@ class Manifest(object): return myhashdict def _createManifestEntries(self): - valid_hashes = set(portage.const.MANIFEST2_HASH_FUNCTIONS) + valid_hashes = set(MANIFEST2_HASH_FUNCTIONS) valid_hashes.add('size') mytypes = list(self.fhashdict) mytypes.sort() @@ -218,16 +249,19 @@ class Manifest(object): def checkIntegrity(self): for t in self.fhashdict: for f in self.fhashdict[t]: - if portage.const.MANIFEST2_REQUIRED_HASH not in self.fhashdict[t][f]: - raise MissingParameter(_("Missing %s checksum: %s %s") % (portage.const.MANIFEST2_REQUIRED_HASH, t, f)) + if MANIFEST2_REQUIRED_HASH not in self.fhashdict[t][f]: + raise MissingParameter(_("Missing %s checksum: %s %s") % + (MANIFEST2_REQUIRED_HASH, t, f)) def write(self, sign=False, force=False): """ Write Manifest instance to disk, optionally signing it """ + if not self.allow_create: + return self.checkIntegrity() try: myentries = list(self._createManifestEntries()) update_manifest = True - if not force: + if myentries and not force: try: f = io.open(_unicode_encode(self.getFullname(), encoding=_encodings['fs'], errors='strict'), @@ -246,9 +280,24 @@ class Manifest(object): pass else: raise + if update_manifest: - write_atomic(self.getFullname(), - "".join("%s\n" % str(myentry) for myentry in myentries)) + if myentries or not (self.thin or self.allow_missing): + # If myentries is empty, don't write an empty manifest + # when thin or allow_missing is enabled. Except for + # thin manifests with no DIST entries, myentries is + # non-empty for all currently known use cases. + write_atomic(self.getFullname(), "".join("%s\n" % + str(myentry) for myentry in myentries)) + else: + # With thin manifest, there's no need to have + # a Manifest file if there are no DIST entries. + try: + os.unlink(self.getFullname()) + except OSError as e: + if e.errno != errno.ENOENT: + raise + if sign: self.sign() except (IOError, OSError) as e: @@ -270,14 +319,14 @@ class Manifest(object): fname = os.path.join("files", fname) if not os.path.exists(self.pkgdir+fname) and not ignoreMissing: raise FileNotFound(fname) - if not ftype in portage.const.MANIFEST2_IDENTIFIERS: + if not ftype in MANIFEST2_IDENTIFIERS: raise InvalidDataType(ftype) if ftype == "AUX" and fname.startswith("files"): fname = fname[6:] self.fhashdict[ftype][fname] = {} if hashdict != None: self.fhashdict[ftype][fname].update(hashdict) - if not portage.const.MANIFEST2_REQUIRED_HASH in self.fhashdict[ftype][fname]: + if not MANIFEST2_REQUIRED_HASH in self.fhashdict[ftype][fname]: self.updateFileHashes(ftype, fname, checkExisting=False, ignoreMissing=ignoreMissing) def removeFile(self, ftype, fname): @@ -290,7 +339,7 @@ class Manifest(object): def findFile(self, fname): """ Return entrytype of the given file if present in Manifest or None if not present """ - for t in portage.const.MANIFEST2_IDENTIFIERS: + for t in MANIFEST2_IDENTIFIERS: if fname in self.fhashdict[t]: return t return None @@ -305,6 +354,8 @@ class Manifest(object): distfiles to raise a FileNotFound exception for (if no file or existing checksums are available), and defaults to all distfiles when not specified.""" + if not self.allow_create: + return if checkExisting: self.checkAllHashes() if assumeDistHashesSometimes or assumeDistHashesAlways: @@ -313,13 +364,88 @@ class Manifest(object): distfilehashes = {} self.__init__(self.pkgdir, self.distdir, fetchlist_dict=self.fetchlist_dict, from_scratch=True, - manifest1_compat=False) - cpvlist = [] + thin=self.thin, allow_missing=self.allow_missing, + allow_create=self.allow_create, hashes=self.hashes) pn = os.path.basename(self.pkgdir.rstrip(os.path.sep)) cat = self._pkgdir_category() pkgdir = self.pkgdir + if self.thin: + cpvlist = self._update_thin_pkgdir(cat, pn, pkgdir) + else: + cpvlist = self._update_thick_pkgdir(cat, pn, pkgdir) + + distlist = set() + for cpv in cpvlist: + distlist.update(self._getCpvDistfiles(cpv)) + + if requiredDistfiles is None: + # This allows us to force removal of stale digests for the + # ebuild --force digest option (no distfiles are required). + requiredDistfiles = set() + elif len(requiredDistfiles) == 0: + # repoman passes in an empty list, which implies that all distfiles + # are required. + requiredDistfiles = distlist.copy() + required_hash_types = set() + required_hash_types.add("size") + required_hash_types.add(MANIFEST2_REQUIRED_HASH) + for f in distlist: + fname = os.path.join(self.distdir, f) + mystat = None + try: + mystat = os.stat(fname) + except OSError: + pass + if f in distfilehashes and \ + not required_hash_types.difference(distfilehashes[f]) and \ + ((assumeDistHashesSometimes and mystat is None) or \ + (assumeDistHashesAlways and mystat is None) or \ + (assumeDistHashesAlways and mystat is not None and \ + set(distfilehashes[f]) == set(self.hashes) and \ + distfilehashes[f]["size"] == mystat.st_size)): + self.fhashdict["DIST"][f] = distfilehashes[f] + else: + try: + self.fhashdict["DIST"][f] = perform_multiple_checksums(fname, self.hashes) + except FileNotFound: + if f in requiredDistfiles: + raise + def _is_cpv(self, cat, pn, filename): + if not filename.endswith(".ebuild"): + return None + pf = filename[:-7] + ps = portage.versions._pkgsplit(pf) + cpv = "%s/%s" % (cat, pf) + if not ps: + raise PortagePackageException( + _("Invalid package name: '%s'") % cpv) + if ps[0] != pn: + raise PortagePackageException( + _("Package name does not " + "match directory name: '%s'") % cpv) + return cpv + + def _update_thin_pkgdir(self, cat, pn, pkgdir): + for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir): + break + cpvlist = [] + for f in pkgdir_files: + try: + f = _unicode_decode(f, + encoding=_encodings['fs'], errors='strict') + except UnicodeDecodeError: + continue + if f[:1] == '.': + continue + pf = self._is_cpv(cat, pn, f) + if pf is not None: + cpvlist.append(pf) + return cpvlist + + def _update_thick_pkgdir(self, cat, pn, pkgdir): + cpvlist = [] for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir): break for f in pkgdir_files: @@ -330,21 +456,10 @@ class Manifest(object): continue if f[:1] == ".": continue - pf = None - if f[-7:] == '.ebuild': - pf = f[:-7] + pf = self._is_cpv(cat, pn, f) if pf is not None: mytype = "EBUILD" - ps = portage.versions._pkgsplit(pf) - cpv = "%s/%s" % (cat, pf) - if not ps: - raise PortagePackageException( - _("Invalid package name: '%s'") % cpv) - if ps[0] != pn: - raise PortagePackageException( - _("Package name does not " - "match directory name: '%s'") % cpv) - cpvlist.append(cpv) + cpvlist.append(pf) elif manifest2MiscfileFilter(f): mytype = "MISC" else: @@ -368,41 +483,7 @@ class Manifest(object): continue self.fhashdict["AUX"][f] = perform_multiple_checksums( os.path.join(self.pkgdir, "files", f.lstrip(os.sep)), self.hashes) - distlist = set() - for cpv in cpvlist: - distlist.update(self._getCpvDistfiles(cpv)) - if requiredDistfiles is None: - # This allows us to force removal of stale digests for the - # ebuild --force digest option (no distfiles are required). - requiredDistfiles = set() - elif len(requiredDistfiles) == 0: - # repoman passes in an empty list, which implies that all distfiles - # are required. - requiredDistfiles = distlist.copy() - required_hash_types = set() - required_hash_types.add("size") - required_hash_types.add(portage.const.MANIFEST2_REQUIRED_HASH) - for f in distlist: - fname = os.path.join(self.distdir, f) - mystat = None - try: - mystat = os.stat(fname) - except OSError: - pass - if f in distfilehashes and \ - not required_hash_types.difference(distfilehashes[f]) and \ - ((assumeDistHashesSometimes and mystat is None) or \ - (assumeDistHashesAlways and mystat is None) or \ - (assumeDistHashesAlways and mystat is not None and \ - len(distfilehashes[f]) == len(self.hashes) and \ - distfilehashes[f]["size"] == mystat.st_size)): - self.fhashdict["DIST"][f] = distfilehashes[f] - else: - try: - self.fhashdict["DIST"][f] = perform_multiple_checksums(fname, self.hashes) - except FileNotFound: - if f in requiredDistfiles: - raise + return cpvlist def _pkgdir_category(self): return self.pkgdir.rstrip(os.sep).split(os.sep)[-2] @@ -417,7 +498,7 @@ class Manifest(object): return absname def checkAllHashes(self, ignoreMissingFiles=False): - for t in portage.const.MANIFEST2_IDENTIFIERS: + for t in MANIFEST2_IDENTIFIERS: self.checkTypeHashes(t, ignoreMissingFiles=ignoreMissingFiles) def checkTypeHashes(self, idtype, ignoreMissingFiles=False): @@ -481,7 +562,7 @@ class Manifest(object): def updateAllHashes(self, checkExisting=False, ignoreMissingFiles=True): """ Regenerate all hashes for all files in this Manifest. """ - for idtype in portage.const.MANIFEST2_IDENTIFIERS: + for idtype in MANIFEST2_IDENTIFIERS: self.updateTypeHashes(idtype, checkExisting=checkExisting, ignoreMissingFiles=ignoreMissingFiles) @@ -526,9 +607,11 @@ class Manifest(object): myfile.close() for l in lines: mysplit = l.split() - if len(mysplit) == 4 and mysplit[0] in portage.const.MANIFEST1_HASH_FUNCTIONS and not 1 in rVal: + if len(mysplit) == 4 and mysplit[0] in MANIFEST1_HASH_FUNCTIONS \ + and 1 not in rVal: rVal.append(1) - elif len(mysplit) > 4 and mysplit[0] in portage.const.MANIFEST2_IDENTIFIERS and ((len(mysplit) - 3) % 2) == 0 and not 2 in rVal: + elif len(mysplit) > 4 and mysplit[0] in MANIFEST2_IDENTIFIERS \ + and ((len(mysplit) - 3) % 2) == 0 and not 2 in rVal: rVal.append(2) return rVal diff --git a/portage_with_autodep/pym/portage/manifest.pyo b/portage_with_autodep/pym/portage/manifest.pyo Binary files differnew file mode 100644 index 0000000..d482bbd --- /dev/null +++ b/portage_with_autodep/pym/portage/manifest.pyo diff --git a/portage_with_autodep/pym/portage/news.py b/portage_with_autodep/pym/portage/news.py index 866e5b0..bbd9325 100644 --- a/portage_with_autodep/pym/portage/news.py +++ b/portage_with_autodep/pym/portage/news.py @@ -2,24 +2,30 @@ # Copyright 2006-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 +from __future__ import print_function + __all__ = ["NewsManager", "NewsItem", "DisplayRestriction", "DisplayProfileRestriction", "DisplayKeywordRestriction", - "DisplayInstalledRestriction"] + "DisplayInstalledRestriction", + "count_unread_news", "display_news_notifications"] import io import logging import os as _os import re +from portage import OrderedDict from portage import os from portage import _encodings from portage import _unicode_decode from portage import _unicode_encode +from portage.const import NEWS_LIB_PATH from portage.util import apply_secpass_permissions, ensure_dirs, \ grabfile, normalize_path, write_atomic, writemsg_level from portage.data import portage_gid from portage.dep import isvalidatom from portage.localization import _ from portage.locks import lockfile, unlockfile +from portage.output import colorize from portage.exception import InvalidLocation, OperationNotPermitted, \ PermissionDenied @@ -39,7 +45,6 @@ class NewsManager(object): def __init__(self, portdb, vardb, news_path, unread_path, language_id='en'): self.news_path = news_path self.unread_path = unread_path - self.target_root = vardb.root self.language_id = language_id self.config = vardb.settings self.vdb = vardb @@ -114,7 +119,6 @@ class NewsManager(object): except PermissionDenied: return - updates = [] for itemid in news: try: itemid = _unicode_decode(itemid, @@ -250,10 +254,11 @@ class NewsItem(object): return self._valid def parse(self): - lines = io.open(_unicode_encode(self.path, + f = io.open(_unicode_encode(self.path, encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['content'], errors='replace' - ).readlines() + mode='r', encoding=_encodings['content'], errors='replace') + lines = f.readlines() + f.close() self.restrictions = {} invalids = [] for i, line in enumerate(lines): @@ -349,3 +354,67 @@ class DisplayInstalledRestriction(DisplayRestriction): if vdb.match(self.atom): return True return False + +def count_unread_news(portdb, vardb, repos=None, update=True): + """ + Returns a dictionary mapping repos to integer counts of unread news items. + By default, this will scan all repos and check for new items that have + appeared since the last scan. + + @param portdb: a portage tree database + @type portdb: pordbapi + @param vardb: an installed package database + @type vardb: vardbapi + @param repos: names of repos to scan (None means to scan all available repos) + @type repos: list or None + @param update: check for new items (default is True) + @type update: boolean + @rtype: dict + @return: dictionary mapping repos to integer counts of unread news items + """ + + NEWS_PATH = os.path.join("metadata", "news") + UNREAD_PATH = os.path.join(vardb.settings['EROOT'], NEWS_LIB_PATH, "news") + news_counts = OrderedDict() + if repos is None: + repos = portdb.getRepositories() + + permission_msgs = set() + for repo in repos: + try: + manager = NewsManager(portdb, vardb, NEWS_PATH, UNREAD_PATH) + count = manager.getUnreadItems(repo, update=True) + except PermissionDenied as e: + # NOTE: The NewsManager typically handles permission errors by + # returning silently, so PermissionDenied won't necessarily be + # raised even if we do trigger a permission error above. + msg = _unicode_decode("Permission denied: '%s'\n") % (e,) + if msg in permission_msgs: + pass + else: + permission_msgs.add(msg) + writemsg_level(msg, level=logging.ERROR, noiselevel=-1) + news_counts[repo] = 0 + else: + news_counts[repo] = count + + return news_counts + +def display_news_notifications(news_counts): + """ + Display a notification for unread news items, using a dictionary mapping + repos to integer counts, like that returned from count_unread_news(). + """ + newsReaderDisplay = False + for repo, count in news_counts.items(): + if count > 0: + if not newsReaderDisplay: + newsReaderDisplay = True + print() + print(colorize("WARN", " * IMPORTANT:"), end=' ') + print("%s news items need reading for repository '%s'." % (count, repo)) + + if newsReaderDisplay: + print(colorize("WARN", " *"), end=' ') + print("Use " + colorize("GOOD", "eselect news") + " to read news items.") + print() diff --git a/portage_with_autodep/pym/portage/news.pyo b/portage_with_autodep/pym/portage/news.pyo Binary files differnew file mode 100644 index 0000000..bbd247c --- /dev/null +++ b/portage_with_autodep/pym/portage/news.pyo diff --git a/portage_with_autodep/pym/portage/output.py b/portage_with_autodep/pym/portage/output.py index 0e8245f..98bec81 100644 --- a/portage_with_autodep/pym/portage/output.py +++ b/portage_with_autodep/pym/portage/output.py @@ -162,11 +162,14 @@ def _parse_color_map(config_root='/', onerror=None): if token[0] in quotes and token[0] == token[-1]: token = token[1:-1] return token + + f = None try: - lineno=0 - for line in io.open(_unicode_encode(myfile, + f = io.open(_unicode_encode(myfile, encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['content'], errors='replace'): + mode='r', encoding=_encodings['content'], errors='replace') + lineno = 0 + for line in f: lineno += 1 commenter_pos = line.find("#") @@ -226,6 +229,9 @@ def _parse_color_map(config_root='/', onerror=None): elif e.errno == errno.EACCES: raise PermissionDenied(myfile) raise + finally: + if f is not None: + f.close() def nc_len(mystr): tmp = re.sub(esc_seq + "^m]+m", "", mystr); @@ -319,6 +325,12 @@ def style_to_ansi_code(style): ret += codes.get(attr_name, attr_name) return ret +def colormap(): + mycolors = [] + for c in ("GOOD", "WARN", "BAD", "HILITE", "BRACKET", "NORMAL"): + mycolors.append("%s=$'%s'" % (c, style_to_ansi_code(c))) + return "\n".join(mycolors) + def colorize(color_key, text): global havecolor if havecolor: @@ -335,12 +347,12 @@ compat_functions_colors = ["bold","white","teal","turquoise","darkteal", "fuchsia","purple","blue","darkblue","green","darkgreen","yellow", "brown","darkyellow","red","darkred"] -def create_color_func(color_key): - def derived_func(*args): - newargs = list(args) - newargs.insert(0, color_key) - return colorize(*newargs) - return derived_func +class create_color_func(object): + __slots__ = ("_color_key",) + def __init__(self, color_key): + self._color_key = color_key + def __call__(self, text): + return colorize(self._color_key, text) for c in compat_functions_colors: globals()[c] = create_color_func(c) @@ -416,12 +428,14 @@ class StyleWriter(formatter.DumbWriter): def get_term_size(): """ Get the number of lines and columns of the tty that is connected to - stdout. Returns a tuple of (lines, columns) or (-1, -1) if an error + stdout. Returns a tuple of (lines, columns) or (0, 0) if an error occurs. The curses module is used if available, otherwise the output of - `stty size` is parsed. + `stty size` is parsed. The lines and columns values are guaranteed to be + greater than or equal to zero, since a negative COLUMNS variable is + known to prevent some commands from working (see bug #394091). """ if not sys.stdout.isatty(): - return -1, -1 + return (0, 0) try: import curses try: @@ -436,10 +450,13 @@ def get_term_size(): out = out.split() if len(out) == 2: try: - return int(out[0]), int(out[1]) + val = (int(out[0]), int(out[1])) except ValueError: pass - return -1, -1 + else: + if val[0] >= 0 and val[1] >= 0: + return val + return (0, 0) def set_term_size(lines, columns, fd): """ diff --git a/portage_with_autodep/pym/portage/output.pyo b/portage_with_autodep/pym/portage/output.pyo Binary files differnew file mode 100644 index 0000000..993a2de --- /dev/null +++ b/portage_with_autodep/pym/portage/output.pyo diff --git a/portage_with_autodep/pym/portage/package/__init__.pyo b/portage_with_autodep/pym/portage/package/__init__.pyo Binary files differnew file mode 100644 index 0000000..9d8f30c --- /dev/null +++ b/portage_with_autodep/pym/portage/package/__init__.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/__init__.pyo b/portage_with_autodep/pym/portage/package/ebuild/__init__.pyo Binary files differnew file mode 100644 index 0000000..927b4bc --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/__init__.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.py b/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.py index cd22554..0c613ce 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ( @@ -11,7 +11,7 @@ from portage.dep import ExtendedAtomDict, _repo_separator, _slot_separator from portage.localization import _ from portage.package.ebuild._config.helper import ordered_by_atom_specificity from portage.util import grabdict_package, stack_lists, writemsg -from portage.versions import cpv_getkey +from portage.versions import cpv_getkey, _pkg_str class KeywordsManager(object): """Manager class to handle keywords processing and validation""" @@ -20,7 +20,8 @@ class KeywordsManager(object): global_accept_keywords=""): self._pkeywords_list = [] rawpkeywords = [grabdict_package( - os.path.join(x, "package.keywords"), recursive=1, + os.path.join(x.location, "package.keywords"), + recursive=x.portage1_directories, verify_eapi=True) \ for x in profiles] for pkeyworddict in rawpkeywords: @@ -35,7 +36,8 @@ class KeywordsManager(object): self._p_accept_keywords = [] raw_p_accept_keywords = [grabdict_package( - os.path.join(x, "package.accept_keywords"), recursive=1, + os.path.join(x.location, "package.accept_keywords"), + recursive=x.portage1_directories, verify_eapi=True) \ for x in profiles] for d in raw_p_accept_keywords: @@ -75,10 +77,11 @@ class KeywordsManager(object): def getKeywords(self, cpv, slot, keywords, repo): - cp = cpv_getkey(cpv) - pkg = "".join((cpv, _slot_separator, slot)) - if repo and repo != Package.UNKNOWN_REPO: - pkg = "".join((pkg, _repo_separator, repo)) + if not hasattr(cpv, 'slot'): + pkg = _pkg_str(cpv, slot=slot, repo=repo) + else: + pkg = cpv + cp = pkg.cp keywords = [[x for x in keywords.split() if x != "-*"]] for pkeywords_dict in self._pkeywords_list: cpdict = pkeywords_dict.get(cp) @@ -206,12 +209,16 @@ class KeywordsManager(object): hasstable = False hastesting = False for gp in mygroups: - if gp == "*" or (gp == "-*" and len(mygroups) == 1): - writemsg(_("--- WARNING: Package '%(cpv)s' uses" - " '%(keyword)s' keyword.\n") % {"cpv": cpv, "keyword": gp}, - noiselevel=-1) - if gp == "*": - match = True + if gp == "*": + match = True + break + elif gp == "~*": + hastesting = True + for x in pgroups: + if x[:1] == "~": + match = True + break + if match: break elif gp in pgroups: match = True @@ -254,18 +261,19 @@ class KeywordsManager(object): """ pgroups = global_accept_keywords.split() + if not hasattr(cpv, 'slot'): + cpv = _pkg_str(cpv, slot=slot, repo=repo) cp = cpv_getkey(cpv) unmaskgroups = [] if self._p_accept_keywords: - cpv_slot = "%s:%s" % (cpv, slot) accept_keywords_defaults = tuple('~' + keyword for keyword in \ pgroups if keyword[:1] not in "~-") for d in self._p_accept_keywords: cpdict = d.get(cp) if cpdict: pkg_accept_keywords = \ - ordered_by_atom_specificity(cpdict, cpv_slot) + ordered_by_atom_specificity(cpdict, cpv) if pkg_accept_keywords: for x in pkg_accept_keywords: if not x: @@ -274,9 +282,8 @@ class KeywordsManager(object): pkgdict = self.pkeywordsdict.get(cp) if pkgdict: - cpv_slot = "%s:%s" % (cpv, slot) pkg_accept_keywords = \ - ordered_by_atom_specificity(pkgdict, cpv_slot, repo=repo) + ordered_by_atom_specificity(pkgdict, cpv) if pkg_accept_keywords: for x in pkg_accept_keywords: unmaskgroups.extend(x) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.pyo Binary files differnew file mode 100644 index 0000000..15043f0 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/KeywordsManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.py b/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.py index effd55b..f76e7e2 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.py @@ -1,4 +1,4 @@ -# Copyright 2010 Gentoo Foundation +# Copyright 201-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ( @@ -10,7 +10,7 @@ from portage.dep import ExtendedAtomDict, use_reduce from portage.exception import InvalidDependString from portage.localization import _ from portage.util import grabdict, grabdict_package, writemsg -from portage.versions import cpv_getkey +from portage.versions import cpv_getkey, _pkg_str from portage.package.ebuild._config.helper import ordered_by_atom_specificity @@ -119,8 +119,9 @@ class LicenseManager(object): cp = cpv_getkey(cpv) cpdict = self._plicensedict.get(cp) if cpdict: - cpv_slot = "%s:%s" % (cpv, slot) - plicence_list = ordered_by_atom_specificity(cpdict, cpv_slot, repo) + if not hasattr(cpv, slot): + cpv = _pkg_str(cpv, slot=slot, repo=repo) + plicence_list = ordered_by_atom_specificity(cpdict, cpv) if plicence_list: accept_license = list(self._accept_license) for x in plicence_list: diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.pyo Binary files differnew file mode 100644 index 0000000..4a38298 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/LicenseManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.py b/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.py index c2b115b..f7a1177 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.py @@ -5,7 +5,11 @@ __all__ = ( 'LocationsManager', ) +import collections import io +import warnings + +import portage from portage import os, eapi_is_supported, _encodings, _unicode_encode from portage.const import CUSTOM_PROFILE_PATH, GLOBAL_CONFIG_PATH, \ PROFILE_PATH, USER_CONFIG_PATH @@ -13,7 +17,20 @@ from portage.exception import DirectoryNotFound, ParseError from portage.localization import _ from portage.util import ensure_dirs, grabfile, \ normalize_path, shlex_split, writemsg +from portage.repository.config import parse_layout_conf, \ + _portage1_profiles_allow_directories + + +_PORTAGE1_DIRECTORIES = frozenset([ + 'package.mask', 'package.provided', + 'package.use', 'package.use.mask', 'package.use.force', + 'use.mask', 'use.force']) + +_profile_node = collections.namedtuple('_profile_node', + 'location portage1_directories') +_allow_parent_colon = frozenset( + ["portage-2"]) class LocationsManager(object): @@ -25,9 +42,9 @@ class LocationsManager(object): self.config_root = config_root self.target_root = target_root self._user_config = local_config - + if self.eprefix is None: - self.eprefix = "" + self.eprefix = portage.const.EPREFIX if self.config_root is None: self.config_root = self.eprefix + os.sep @@ -37,17 +54,33 @@ class LocationsManager(object): self._check_var_directory("PORTAGE_CONFIGROOT", self.config_root) self.abs_user_config = os.path.join(self.config_root, USER_CONFIG_PATH) + self.config_profile_path = config_profile_path + + def load_profiles(self, repositories, known_repository_paths): + known_repository_paths = set(os.path.realpath(x) + for x in known_repository_paths) - if config_profile_path is None: - config_profile_path = \ + known_repos = [] + for x in known_repository_paths: + try: + layout_data = {"profile-formats": + repositories.get_repo_for_location(x).profile_formats} + except KeyError: + layout_data = parse_layout_conf(x)[0] + # force a trailing '/' for ease of doing startswith checks + known_repos.append((x + '/', layout_data)) + known_repos = tuple(known_repos) + + if self.config_profile_path is None: + self.config_profile_path = \ os.path.join(self.config_root, PROFILE_PATH) - if os.path.isdir(config_profile_path): - self.profile_path = config_profile_path + if os.path.isdir(self.config_profile_path): + self.profile_path = self.config_profile_path else: - config_profile_path = \ + self.config_profile_path = \ os.path.join(self.abs_user_config, 'make.profile') - if os.path.isdir(config_profile_path): - self.profile_path = config_profile_path + if os.path.isdir(self.config_profile_path): + self.profile_path = self.config_profile_path else: self.profile_path = None else: @@ -55,19 +88,22 @@ class LocationsManager(object): # here, in order to create an empty profile # for checking dependencies of packages with # empty KEYWORDS. - self.profile_path = config_profile_path + self.profile_path = self.config_profile_path # The symlink might not exist or might not be a symlink. self.profiles = [] + self.profiles_complex = [] if self.profile_path: try: - self._addProfile(os.path.realpath(self.profile_path)) + self._addProfile(os.path.realpath(self.profile_path), + repositories, known_repos) except ParseError as e: writemsg(_("!!! Unable to parse profile: '%s'\n") % \ self.profile_path, noiselevel=-1) writemsg("!!! ParseError: %s\n" % str(e), noiselevel=-1) self.profiles = [] + self.profiles_complex = [] if self._user_config and self.profiles: custom_prof = os.path.join( @@ -75,9 +111,11 @@ class LocationsManager(object): if os.path.exists(custom_prof): self.user_profile_dir = custom_prof self.profiles.append(custom_prof) + self.profiles_complex.append(_profile_node(custom_prof, True)) del custom_prof self.profiles = tuple(self.profiles) + self.profiles_complex = tuple(self.profiles_complex) def _check_var_directory(self, varname, var): if not os.path.isdir(var): @@ -86,14 +124,45 @@ class LocationsManager(object): noiselevel=-1) raise DirectoryNotFound(var) - def _addProfile(self, currentPath): + def _addProfile(self, currentPath, repositories, known_repos): + current_abs_path = os.path.abspath(currentPath) + allow_directories = True + allow_parent_colon = True + repo_loc = None + compat_mode = False + intersecting_repos = [x for x in known_repos if current_abs_path.startswith(x[0])] + if intersecting_repos: + # protect against nested repositories. Insane configuration, but the longest + # path will be the correct one. + repo_loc, layout_data = max(intersecting_repos, key=lambda x:len(x[0])) + allow_directories = any(x in _portage1_profiles_allow_directories + for x in layout_data['profile-formats']) + compat_mode = layout_data['profile-formats'] == ('portage-1-compat',) + allow_parent_colon = any(x in _allow_parent_colon + for x in layout_data['profile-formats']) + + if compat_mode: + offenders = _PORTAGE1_DIRECTORIES.intersection(os.listdir(currentPath)) + offenders = sorted(x for x in offenders + if os.path.isdir(os.path.join(currentPath, x))) + if offenders: + warnings.warn(_("Profile '%(profile_path)s' in repository " + "'%(repo_name)s' is implicitly using 'portage-1' profile format, but " + "the repository profiles are not marked as that format. This will break " + "in the future. Please either convert the following paths " + "to files, or add\nprofile-formats = portage-1\nto the " + "repositories layout.conf. Files: '%(files)s'\n") + % dict(profile_path=currentPath, repo_name=repo_loc, + files=', '.join(offenders))) + parentsFile = os.path.join(currentPath, "parent") eapi_file = os.path.join(currentPath, "eapi") + f = None try: - eapi = io.open(_unicode_encode(eapi_file, + f = io.open(_unicode_encode(eapi_file, encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['content'], errors='replace' - ).readline().strip() + mode='r', encoding=_encodings['content'], errors='replace') + eapi = f.readline().strip() except IOError: pass else: @@ -102,21 +171,69 @@ class LocationsManager(object): "Profile contains unsupported " "EAPI '%s': '%s'") % \ (eapi, os.path.realpath(eapi_file),)) + finally: + if f is not None: + f.close() if os.path.exists(parentsFile): parents = grabfile(parentsFile) if not parents: raise ParseError( _("Empty parent file: '%s'") % parentsFile) for parentPath in parents: + abs_parent = parentPath[:1] == os.sep + if not abs_parent and allow_parent_colon: + parentPath = self._expand_parent_colon(parentsFile, + parentPath, repo_loc, repositories) + + # NOTE: This os.path.join() call is intended to ignore + # currentPath if parentPath is already absolute. parentPath = normalize_path(os.path.join( currentPath, parentPath)) + + if abs_parent or repo_loc is None or \ + not parentPath.startswith(repo_loc): + # It seems that this parent may point outside + # of the current repo, so realpath it. + parentPath = os.path.realpath(parentPath) + if os.path.exists(parentPath): - self._addProfile(parentPath) + self._addProfile(parentPath, repositories, known_repos) else: raise ParseError( _("Parent '%s' not found: '%s'") % \ (parentPath, parentsFile)) + self.profiles.append(currentPath) + self.profiles_complex.append( + _profile_node(currentPath, allow_directories)) + + def _expand_parent_colon(self, parentsFile, parentPath, + repo_loc, repositories): + colon = parentPath.find(":") + if colon == -1: + return parentPath + + if colon == 0: + if repo_loc is None: + raise ParseError( + _("Parent '%s' not found: '%s'") % \ + (parentPath, parentsFile)) + else: + parentPath = normalize_path(os.path.join( + repo_loc, 'profiles', parentPath[colon+1:])) + else: + p_repo_name = parentPath[:colon] + try: + p_repo_loc = repositories.get_location_for_name(p_repo_name) + except KeyError: + raise ParseError( + _("Parent '%s' not found: '%s'") % \ + (parentPath, parentsFile)) + else: + parentPath = normalize_path(os.path.join( + p_repo_loc, 'profiles', parentPath[colon+1:])) + + return parentPath def set_root_override(self, root_overwrite=None): # Allow ROOT setting to come from make.conf if it's not overridden diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.pyo Binary files differnew file mode 100644 index 0000000..c64d313 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/LocationsManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.py b/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.py index df93e10..bce1152 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.py @@ -5,9 +5,12 @@ __all__ = ( 'MaskManager', ) +import warnings + from portage import os from portage.dep import ExtendedAtomDict, match_from_list, _repo_separator, _slot_separator -from portage.util import append_repo, grabfile_package, stack_lists +from portage.localization import _ +from portage.util import append_repo, grabfile_package, stack_lists, writemsg from portage.versions import cpv_getkey from _emerge.Package import Package @@ -32,30 +35,76 @@ class MaskManager(object): # repo may be often referenced by others as the master. pmask_cache = {} - def grab_pmask(loc): + def grab_pmask(loc, repo_config): if loc not in pmask_cache: - pmask_cache[loc] = grabfile_package( - os.path.join(loc, "profiles", "package.mask"), - recursive=1, remember_source_file=True, verify_eapi=True) + path = os.path.join(loc, 'profiles', 'package.mask') + pmask_cache[loc] = grabfile_package(path, + recursive=repo_config.portage1_profiles, + remember_source_file=True, verify_eapi=True) + if repo_config.portage1_profiles_compat and os.path.isdir(path): + warnings.warn(_("Repository '%(repo_name)s' is implicitly using " + "'portage-1' profile format in its profiles/package.mask, but " + "the repository profiles are not marked as that format. This will break " + "in the future. Please either convert the following paths " + "to files, or add\nprofile-formats = portage-1\nto the " + "repositories layout.conf.\n") + % dict(repo_name=repo_config.name)) + return pmask_cache[loc] repo_pkgmasklines = [] for repo in repositories.repos_with_profiles(): lines = [] - repo_lines = grab_pmask(repo.location) + repo_lines = grab_pmask(repo.location, repo) + removals = frozenset(line[0][1:] for line in repo_lines + if line[0][:1] == "-") + matched_removals = set() for master in repo.masters: - master_lines = grab_pmask(master.location) + master_lines = grab_pmask(master.location, master) + for line in master_lines: + if line[0] in removals: + matched_removals.add(line[0]) + # Since we don't stack masters recursively, there aren't any + # atoms earlier in the stack to be matched by negative atoms in + # master_lines. Also, repo_lines may contain negative atoms + # that are intended to negate atoms from a different master + # than the one with which we are currently stacking. Therefore, + # we disable warn_for_unmatched_removal here (see bug #386569). lines.append(stack_lists([master_lines, repo_lines], incremental=1, - remember_source_file=True, warn_for_unmatched_removal=True, - strict_warn_for_unmatched_removal=strict_umatched_removal)) - if not repo.masters: + remember_source_file=True, warn_for_unmatched_removal=False)) + + # It's safe to warn for unmatched removal if masters have not + # been overridden by the user, which is guaranteed when + # user_config is false (when called by repoman). + if repo.masters: + unmatched_removals = removals.difference(matched_removals) + if unmatched_removals and not user_config: + source_file = os.path.join(repo.location, + "profiles", "package.mask") + unmatched_removals = list(unmatched_removals) + if len(unmatched_removals) > 3: + writemsg( + _("--- Unmatched removal atoms in %s: %s and %s more\n") % + (source_file, + ", ".join("-" + x for x in unmatched_removals[:3]), + len(unmatched_removals) - 3), noiselevel=-1) + else: + writemsg( + _("--- Unmatched removal atom(s) in %s: %s\n") % + (source_file, + ", ".join("-" + x for x in unmatched_removals)), + noiselevel=-1) + + else: lines.append(stack_lists([repo_lines], incremental=1, - remember_source_file=True, warn_for_unmatched_removal=True, + remember_source_file=True, warn_for_unmatched_removal=not user_config, strict_warn_for_unmatched_removal=strict_umatched_removal)) repo_pkgmasklines.extend(append_repo(stack_lists(lines), repo.name, remember_source_file=True)) repo_pkgunmasklines = [] for repo in repositories.repos_with_profiles(): + if not repo.portage1_profiles: + continue repo_lines = grabfile_package(os.path.join(repo.location, "profiles", "package.unmask"), \ recursive=1, remember_source_file=True, verify_eapi=True) lines = stack_lists([repo_lines], incremental=1, \ @@ -69,9 +118,14 @@ class MaskManager(object): profile_pkgunmasklines = [] for x in profiles: profile_pkgmasklines.append(grabfile_package( - os.path.join(x, "package.mask"), recursive=1, remember_source_file=True, verify_eapi=True)) - profile_pkgunmasklines.append(grabfile_package( - os.path.join(x, "package.unmask"), recursive=1, remember_source_file=True, verify_eapi=True)) + os.path.join(x.location, "package.mask"), + recursive=x.portage1_directories, + remember_source_file=True, verify_eapi=True)) + if x.portage1_directories: + profile_pkgunmasklines.append(grabfile_package( + os.path.join(x.location, "package.unmask"), + recursive=x.portage1_directories, + remember_source_file=True, verify_eapi=True)) profile_pkgmasklines = stack_lists(profile_pkgmasklines, incremental=1, \ remember_source_file=True, warn_for_unmatched_removal=True, strict_warn_for_unmatched_removal=strict_umatched_removal) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.pyo Binary files differnew file mode 100644 index 0000000..f48eb47 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/MaskManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.py b/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.py index d7ef0f6..e1ec7f4 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ( @@ -7,10 +7,10 @@ __all__ = ( from _emerge.Package import Package from portage import os -from portage.dep import ExtendedAtomDict, remove_slot, _get_useflag_re +from portage.dep import dep_getrepo, dep_getslot, ExtendedAtomDict, remove_slot, _get_useflag_re from portage.localization import _ from portage.util import grabfile, grabdict_package, read_corresponding_eapi_file, stack_lists, writemsg -from portage.versions import cpv_getkey +from portage.versions import cpv_getkey, _pkg_str from portage.package.ebuild._config.helper import ordered_by_atom_specificity @@ -65,9 +65,9 @@ class UseManager(object): self.repositories = repositories - def _parse_file_to_tuple(self, file_name): + def _parse_file_to_tuple(self, file_name, recursive=True): ret = [] - lines = grabfile(file_name, recursive=1) + lines = grabfile(file_name, recursive=recursive) eapi = read_corresponding_eapi_file(file_name) useflag_re = _get_useflag_re(eapi) for prefixed_useflag in lines: @@ -82,10 +82,10 @@ class UseManager(object): ret.append(prefixed_useflag) return tuple(ret) - def _parse_file_to_dict(self, file_name, juststrings=False): + def _parse_file_to_dict(self, file_name, juststrings=False, recursive=True): ret = {} location_dict = {} - file_dict = grabdict_package(file_name, recursive=1, verify_eapi=True) + file_dict = grabdict_package(file_name, recursive=recursive, verify_eapi=True) eapi = read_corresponding_eapi_file(file_name) useflag_re = _get_useflag_re(eapi) for k, v in file_dict.items(): @@ -132,19 +132,29 @@ class UseManager(object): return ret def _parse_profile_files_to_tuple_of_tuples(self, file_name, locations): - return tuple(self._parse_file_to_tuple(os.path.join(profile, file_name)) for profile in locations) + return tuple(self._parse_file_to_tuple( + os.path.join(profile.location, file_name), + recursive=profile.portage1_directories) + for profile in locations) def _parse_profile_files_to_tuple_of_dicts(self, file_name, locations, juststrings=False): - return tuple(self._parse_file_to_dict(os.path.join(profile, file_name), juststrings) for profile in locations) + return tuple(self._parse_file_to_dict( + os.path.join(profile.location, file_name), juststrings, + recursive=profile.portage1_directories) + for profile in locations) def getUseMask(self, pkg=None): if pkg is None: return frozenset(stack_lists( self._usemask_list, incremental=True)) + slot = None cp = getattr(pkg, "cp", None) if cp is None: - cp = cpv_getkey(remove_slot(pkg)) + slot = dep_getslot(pkg) + repo = dep_getrepo(pkg) + pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo) + cp = pkg.cp usemask = [] if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO: repos = [] diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.pyo Binary files differnew file mode 100644 index 0000000..2c9a609 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/UseManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/VirtualsManager.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/VirtualsManager.pyo Binary files differnew file mode 100644 index 0000000..b2ebd21 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/VirtualsManager.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/__init__.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/__init__.pyo Binary files differnew file mode 100644 index 0000000..b03cc29 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/__init__.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/env_var_validation.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/env_var_validation.pyo Binary files differnew file mode 100644 index 0000000..aeee789 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/env_var_validation.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/features_set.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/features_set.pyo Binary files differnew file mode 100644 index 0000000..9854444 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/features_set.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/helper.py b/portage_with_autodep/pym/portage/package/ebuild/_config/helper.py index 4f46781..ee0c090 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/helper.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/helper.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ( @@ -24,7 +24,7 @@ def ordered_by_atom_specificity(cpdict, pkg, repo=None): order to achieve desired results (and thus corrupting the ChangeLog like ordering of the file). """ - if repo and repo != Package.UNKNOWN_REPO: + if not hasattr(pkg, 'repo') and repo and repo != Package.UNKNOWN_REPO: pkg = pkg + _repo_separator + repo results = [] diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/helper.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/helper.pyo Binary files differnew file mode 100644 index 0000000..f2b9261 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/helper.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.py b/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.py index 6d42809..1a75de9 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ( @@ -15,14 +15,14 @@ env_blacklist = frozenset(( "A", "AA", "CATEGORY", "DEPEND", "DESCRIPTION", "EAPI", "EBUILD_FORCE_TEST", "EBUILD_PHASE", "EBUILD_SKIP_MANIFEST", "ED", "EMERGE_FROM", "EPREFIX", "EROOT", - "HOMEPAGE", "INHERITED", "IUSE", + "GREP_OPTIONS", "HOMEPAGE", "INHERITED", "IUSE", "KEYWORDS", "LICENSE", "MERGE_TYPE", "PDEPEND", "PF", "PKGUSE", "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE", "PORTAGE_BUILDIR_LOCKED", "PORTAGE_BUILT_USE", "PORTAGE_CONFIGROOT", "PORTAGE_IUSE", - "PORTAGE_NONFATAL", "PORTAGE_REPO_NAME", "PORTAGE_SANDBOX_COMPAT_LEVEL", - "PORTAGE_USE", "PROPERTIES", "PROVIDE", "RDEPEND", "RESTRICT", - "ROOT", "SLOT", "SRC_URI" + "PORTAGE_NONFATAL", "PORTAGE_REPO_NAME", + "PORTAGE_USE", "PROPERTIES", "PROVIDE", "RDEPEND", "REPOSITORY", + "RESTRICT", "ROOT", "SLOT", "SRC_URI" )) environ_whitelist = [] @@ -36,7 +36,7 @@ environ_whitelist = [] # environment in order to prevent sandbox from sourcing /etc/profile # in it's bashrc (causing major leakage). environ_whitelist += [ - "ACCEPT_LICENSE", "BASH_ENV", "BUILD_PREFIX", "D", + "ACCEPT_LICENSE", "BASH_ENV", "BUILD_PREFIX", "COLUMNS", "D", "DISTDIR", "DOC_SYMLINKS_DIR", "EAPI", "EBUILD", "EBUILD_FORCE_TEST", "EBUILD_PHASE", "ECLASSDIR", "ECLASS_DEPTH", "ED", @@ -50,23 +50,23 @@ environ_whitelist += [ "PORTAGE_BINPKG_TMPFILE", "PORTAGE_BIN_PATH", "PORTAGE_BUILDDIR", "PORTAGE_BUNZIP2_COMMAND", "PORTAGE_BZIP2_COMMAND", - "PORTAGE_COLORMAP", + "PORTAGE_COLORMAP", "PORTAGE_COMPRESS_EXCLUDE_SUFFIXES", "PORTAGE_CONFIGROOT", "PORTAGE_DEBUG", "PORTAGE_DEPCACHEDIR", "PORTAGE_EBUILD_EXIT_FILE", "PORTAGE_FEATURES", "PORTAGE_GID", "PORTAGE_GRPNAME", "PORTAGE_INST_GID", "PORTAGE_INST_UID", "PORTAGE_IPC_DAEMON", "PORTAGE_IUSE", - "PORTAGE_LOG_FILE", + "PORTAGE_LOG_FILE", "PORTAGE_OVERRIDE_EPREFIX", "PORTAGE_PYM_PATH", "PORTAGE_PYTHON", "PORTAGE_QUIET", "PORTAGE_REPO_NAME", "PORTAGE_RESTRICT", - "PORTAGE_SANDBOX_COMPAT_LEVEL", "PORTAGE_SIGPIPE_STATUS", + "PORTAGE_SIGPIPE_STATUS", "PORTAGE_TMPDIR", "PORTAGE_UPDATE_ENV", "PORTAGE_USERNAME", "PORTAGE_VERBOSE", "PORTAGE_WORKDIR_MODE", "PORTDIR", "PORTDIR_OVERLAY", "PREROOTPATH", "PROFILE_PATHS", "REPLACING_VERSIONS", "REPLACED_BY_VERSION", "ROOT", "ROOTPATH", "T", "TMP", "TMPDIR", "USE_EXPAND", "USE_ORDER", "WORKDIR", - "XARGS", + "XARGS", "__PORTAGE_TEST_HARDLINK_LOCKS", ] # user config variables @@ -134,8 +134,9 @@ environ_filter += [ # portage config variables and variables set directly by portage environ_filter += [ "ACCEPT_CHOSTS", "ACCEPT_KEYWORDS", "ACCEPT_PROPERTIES", "AUTOCLEAN", - "CLEAN_DELAY", "COLLISION_IGNORE", "CONFIG_PROTECT", - "CONFIG_PROTECT_MASK", "EGENCACHE_DEFAULT_OPTS", "EMERGE_DEFAULT_OPTS", + "CLEAN_DELAY", "COLLISION_IGNORE", + "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", + "EGENCACHE_DEFAULT_OPTS", "EMERGE_DEFAULT_OPTS", "EMERGE_LOG_DIR", "EMERGE_WARNING_DELAY", "FETCHCOMMAND", "FETCHCOMMAND_FTP", @@ -143,7 +144,8 @@ environ_filter += [ "FETCHCOMMAND_RSYNC", "FETCHCOMMAND_SFTP", "GENTOO_MIRRORS", "NOCONFMEM", "O", "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE", - "PORTAGE_BINHOST_CHUNKSIZE", "PORTAGE_BUILDIR_LOCKED", "PORTAGE_CALLER", + "PORTAGE_BINHOST", + "PORTAGE_BINHOST_CHUNKSIZE", "PORTAGE_BUILDIR_LOCKED", "PORTAGE_ELOG_CLASSES", "PORTAGE_ELOG_MAILFROM", "PORTAGE_ELOG_MAILSUBJECT", "PORTAGE_ELOG_MAILURI", "PORTAGE_ELOG_SYSTEM", @@ -156,8 +158,8 @@ environ_filter += [ "PORTAGE_RO_DISTDIRS", "PORTAGE_RSYNC_EXTRA_OPTS", "PORTAGE_RSYNC_OPTS", "PORTAGE_RSYNC_RETRIES", "PORTAGE_SYNC_STALE", - "PORTAGE_USE", "PORT_LOGDIR", - "QUICKPKG_DEFAULT_OPTS", + "PORTAGE_USE", "PORT_LOGDIR", "PORT_LOGDIR_CLEAN", + "QUICKPKG_DEFAULT_OPTS", "REPOMAN_DEFAULT_OPTS", "RESUMECOMMAND", "RESUMECOMMAND_FTP", "RESUMECOMMAND_HTTP", "RESUMECOMMAND_HTTPS", "RESUMECOMMAND_RSYNC", "RESUMECOMMAND_SFTP", diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.pyo b/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.pyo Binary files differnew file mode 100644 index 0000000..06ea37e --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/special_env_vars.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_config/unpack_dependencies.py b/portage_with_autodep/pym/portage/package/ebuild/_config/unpack_dependencies.py new file mode 100644 index 0000000..1375189 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_config/unpack_dependencies.py @@ -0,0 +1,38 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os, _supported_eapis +from portage.dep import use_reduce +from portage.eapi import eapi_has_automatic_unpack_dependencies +from portage.exception import InvalidDependString +from portage.localization import _ +from portage.util import grabfile, writemsg + +def load_unpack_dependencies_configuration(repositories): + repo_dict = {} + for repo in repositories.repos_with_profiles(): + for eapi in _supported_eapis: + if eapi_has_automatic_unpack_dependencies(eapi): + file_name = os.path.join(repo.location, "profiles", "unpack_dependencies", eapi) + lines = grabfile(file_name, recursive=True) + for line in lines: + elements = line.split() + suffix = elements[0].lower() + if len(elements) == 1: + writemsg(_("--- Missing unpack dependencies for '%s' suffix in '%s'\n") % (suffix, file_name)) + depend = " ".join(elements[1:]) + try: + use_reduce(depend, eapi=eapi) + except InvalidDependString as e: + writemsg(_("--- Invalid unpack dependencies for '%s' suffix in '%s': '%s'\n" % (suffix, file_name, e))) + else: + repo_dict.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend + + ret = {} + for repo in repositories.repos_with_profiles(): + for repo_name in [x.name for x in repo.masters] + [repo.name]: + for eapi in repo_dict.get(repo_name, {}): + for suffix, depend in repo_dict.get(repo_name, {}).get(eapi, {}).items(): + ret.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend + + return ret diff --git a/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.py b/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.py new file mode 100644 index 0000000..d23677d --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.py @@ -0,0 +1,54 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import textwrap + +import portage +from portage.dep import _repo_separator +from portage.elog import elog_process +from portage.elog.messages import eerror + +def eapi_invalid(self, cpv, repo_name, settings, + eapi_var, eapi_parsed, eapi_lineno): + + msg = [] + msg.extend(textwrap.wrap(("EAPI assignment in ebuild '%s%s%s' does not" + " conform with PMS section 7.3.1 (see bug #402167):") % + (cpv, _repo_separator, repo_name), 70)) + + if not eapi_parsed: + # None means the assignment was not found, while an + # empty string indicates an (invalid) empty assingment. + msg.append( + "\tvalid EAPI assignment must" + " occur on or before line: %s" % + eapi_lineno) + else: + msg.append(("\tbash returned EAPI '%s' which does not match " + "assignment on line: %s") % + (eapi_var, eapi_lineno)) + + if 'parse-eapi-ebuild-head' in settings.features: + msg.extend(textwrap.wrap(("NOTE: This error will soon" + " become unconditionally fatal in a future version of Portage," + " but at this time, it can by made non-fatal by setting" + " FEATURES=-parse-eapi-ebuild-head in" + " make.conf."), 70)) + else: + msg.extend(textwrap.wrap(("NOTE: This error will soon" + " become unconditionally fatal in a future version of Portage." + " At the earliest opportunity, please enable" + " FEATURES=parse-eapi-ebuild-head in make.conf in order to" + " make this error fatal."), 70)) + + if portage.data.secpass >= 2: + # TODO: improve elog permission error handling (bug #416231) + for line in msg: + eerror(line, phase="other", key=cpv) + elog_process(cpv, settings, + phasefilter=("other",)) + + else: + out = portage.output.EOutput() + for line in msg: + out.eerror(line) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.pyo b/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.pyo Binary files differnew file mode 100644 index 0000000..0181c03 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_eapi_invalid.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_ipc/ExitCommand.pyo b/portage_with_autodep/pym/portage/package/ebuild/_ipc/ExitCommand.pyo Binary files differnew file mode 100644 index 0000000..315cb0f --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_ipc/ExitCommand.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_ipc/IpcCommand.pyo b/portage_with_autodep/pym/portage/package/ebuild/_ipc/IpcCommand.pyo Binary files differnew file mode 100644 index 0000000..9f75518 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_ipc/IpcCommand.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.py b/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.py index fb6e61e..7bbb0e8 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import io @@ -7,6 +7,7 @@ import portage from portage import os from portage import _unicode_decode from portage.dep import Atom +from portage.eapi import eapi_has_repo_deps from portage.elog import messages as elog_messages from portage.exception import InvalidAtom from portage.package.ebuild._ipc.IpcCommand import IpcCommand @@ -26,19 +27,21 @@ class QueryCommand(IpcCommand): def __call__(self, argv): """ - @returns: tuple of (stdout, stderr, returncode) + @return: tuple of (stdout, stderr, returncode) """ cmd, root, atom_str = argv + eapi = self.settings.get('EAPI') + allow_repo = eapi_has_repo_deps(eapi) try: - atom = Atom(atom_str) + atom = Atom(atom_str, allow_repo=allow_repo) except InvalidAtom: return ('', 'invalid atom: %s\n' % atom_str, 2) warnings = [] try: - atom = Atom(atom_str, eapi=self.settings.get('EAPI')) + atom = Atom(atom_str, allow_repo=allow_repo, eapi=eapi) except InvalidAtom as e: warnings.append(_unicode_decode("QA Notice: %s: %s") % (cmd, e)) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.pyo b/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.pyo Binary files differnew file mode 100644 index 0000000..0e9ee96 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_ipc/QueryCommand.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_ipc/__init__.pyo b/portage_with_autodep/pym/portage/package/ebuild/_ipc/__init__.pyo Binary files differnew file mode 100644 index 0000000..d9f8d25 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_ipc/__init__.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/_metadata_invalid.py b/portage_with_autodep/pym/portage/package/ebuild/_metadata_invalid.py new file mode 100644 index 0000000..bcf1f7f --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_metadata_invalid.py @@ -0,0 +1,41 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import textwrap + +import portage +from portage.dep import _repo_separator +from portage.elog import elog_process +from portage.elog.messages import eerror + +def eapi_invalid(self, cpv, repo_name, settings, + eapi_var, eapi_parsed, eapi_lineno): + + msg = [] + msg.extend(textwrap.wrap(("EAPI assignment in ebuild '%s%s%s' does not" + " conform with PMS section 7.3.1 (see bug #402167):") % + (cpv, _repo_separator, repo_name), 70)) + + if not eapi_parsed: + # None means the assignment was not found, while an + # empty string indicates an (invalid) empty assingment. + msg.append( + "\tvalid EAPI assignment must" + " occur on or before line: %s" % + eapi_lineno) + else: + msg.append(("\tbash returned EAPI '%s' which does not match " + "assignment on line: %s") % + (eapi_var, eapi_lineno)) + + if portage.data.secpass >= 2: + # TODO: improve elog permission error handling (bug #416231) + for line in msg: + eerror(line, phase="other", key=cpv) + elog_process(cpv, settings, + phasefilter=("other",)) + + else: + out = portage.output.EOutput() + for line in msg: + out.eerror(line) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py new file mode 100644 index 0000000..44e2576 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestProcess.py @@ -0,0 +1,43 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os +from portage.exception import (FileNotFound, + PermissionDenied, PortagePackageException) +from portage.localization import _ +from portage.util._async.ForkProcess import ForkProcess + +class ManifestProcess(ForkProcess): + + __slots__ = ("cp", "distdir", "fetchlist_dict", "repo_config") + + MODIFIED = 16 + + def _run(self): + mf = self.repo_config.load_manifest( + os.path.join(self.repo_config.location, self.cp), + self.distdir, fetchlist_dict=self.fetchlist_dict) + + try: + mf.create(assumeDistHashesAlways=True) + except FileNotFound as e: + portage.writemsg(_("!!! File %s doesn't exist, can't update " + "Manifest\n") % e, noiselevel=-1) + return 1 + + except PortagePackageException as e: + portage.writemsg(("!!! %s\n") % (e,), noiselevel=-1) + return 1 + + try: + modified = mf.write(sign=False) + except PermissionDenied as e: + portage.writemsg("!!! %s: %s\n" % (_("Permission Denied"), e,), + noiselevel=-1) + return 1 + else: + if modified: + return self.MODIFIED + else: + return os.EX_OK diff --git a/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py new file mode 100644 index 0000000..38ac482 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py @@ -0,0 +1,93 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import portage +from portage import os +from portage.dep import _repo_separator +from portage.exception import InvalidDependString +from portage.localization import _ +from portage.util._async.AsyncScheduler import AsyncScheduler +from .ManifestTask import ManifestTask + +class ManifestScheduler(AsyncScheduler): + + def __init__(self, portdb, cp_iter=None, + gpg_cmd=None, gpg_vars=None, force_sign_key=None, **kwargs): + + AsyncScheduler.__init__(self, **kwargs) + + self._portdb = portdb + + if cp_iter is None: + cp_iter = self._iter_every_cp() + self._cp_iter = cp_iter + self._gpg_cmd = gpg_cmd + self._gpg_vars = gpg_vars + self._force_sign_key = force_sign_key + self._task_iter = self._iter_tasks() + + def _next_task(self): + return next(self._task_iter) + + def _iter_every_cp(self): + # List categories individually, in order to start yielding quicker, + # and in order to reduce latency in case of a signal interrupt. + cp_all = self._portdb.cp_all + for category in sorted(self._portdb.categories): + for cp in cp_all(categories=(category,)): + yield cp + + def _iter_tasks(self): + portdb = self._portdb + distdir = portdb.settings["DISTDIR"] + disabled_repos = set() + + for cp in self._cp_iter: + if self._terminated.is_set(): + break + # We iterate over portdb.porttrees, since it's common to + # tweak this attribute in order to adjust repo selection. + for mytree in portdb.porttrees: + if self._terminated.is_set(): + break + repo_config = portdb.repositories.get_repo_for_location(mytree) + if not repo_config.create_manifest: + if repo_config.name not in disabled_repos: + disabled_repos.add(repo_config.name) + portage.writemsg( + _(">>> Skipping creating Manifest for %s%s%s; " + "repository is configured to not use them\n") % + (cp, _repo_separator, repo_config.name), + noiselevel=-1) + continue + cpv_list = portdb.cp_list(cp, mytree=[repo_config.location]) + if not cpv_list: + continue + fetchlist_dict = {} + try: + for cpv in cpv_list: + fetchlist_dict[cpv] = \ + list(portdb.getFetchMap(cpv, mytree=mytree)) + except InvalidDependString as e: + portage.writemsg( + _("!!! %s%s%s: SRC_URI: %s\n") % + (cp, _repo_separator, repo_config.name, e), + noiselevel=-1) + self._error_count += 1 + continue + + yield ManifestTask(cp=cp, distdir=distdir, + fetchlist_dict=fetchlist_dict, repo_config=repo_config, + gpg_cmd=self._gpg_cmd, gpg_vars=self._gpg_vars, + force_sign_key=self._force_sign_key) + + def _task_exit(self, task): + + if task.returncode != os.EX_OK: + if not self._terminated_tasks: + portage.writemsg( + "Error processing %s%s%s, continuing...\n" % + (task.cp, _repo_separator, task.repo_config.name), + noiselevel=-1) + + AsyncScheduler._task_exit(self, task) diff --git a/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py new file mode 100644 index 0000000..0ee2b91 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/ManifestTask.py @@ -0,0 +1,186 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import re +import subprocess + +from portage import os +from portage import _unicode_encode, _encodings +from portage.const import MANIFEST2_IDENTIFIERS +from portage.util import (atomic_ofstream, grablines, + shlex_split, varexpand, writemsg) +from portage.util._async.PipeLogger import PipeLogger +from portage.util._async.PopenProcess import PopenProcess +from _emerge.CompositeTask import CompositeTask +from _emerge.PipeReader import PipeReader +from .ManifestProcess import ManifestProcess + +class ManifestTask(CompositeTask): + + __slots__ = ("cp", "distdir", "fetchlist_dict", "gpg_cmd", + "gpg_vars", "repo_config", "force_sign_key", "_manifest_path") + + _PGP_HEADER = b"BEGIN PGP SIGNED MESSAGE" + _manifest_line_re = re.compile(r'^(%s) ' % "|".join(MANIFEST2_IDENTIFIERS)) + _gpg_key_id_re = re.compile(r'^[0-9A-F]*$') + _gpg_key_id_lengths = (8, 16, 24, 32, 40) + + def _start(self): + self._manifest_path = os.path.join(self.repo_config.location, + self.cp, "Manifest") + manifest_proc = ManifestProcess(cp=self.cp, distdir=self.distdir, + fetchlist_dict=self.fetchlist_dict, repo_config=self.repo_config, + scheduler=self.scheduler) + self._start_task(manifest_proc, self._manifest_proc_exit) + + def _manifest_proc_exit(self, manifest_proc): + self._assert_current(manifest_proc) + if manifest_proc.returncode not in (os.EX_OK, manifest_proc.MODIFIED): + self.returncode = manifest_proc.returncode + self._current_task = None + self.wait() + return + + modified = manifest_proc.returncode == manifest_proc.MODIFIED + sign = self.gpg_cmd is not None + + if not modified and sign: + sign = self._need_signature() + if not sign and self.force_sign_key is not None \ + and os.path.exists(self._manifest_path): + self._check_sig_key() + return + + if not sign or not os.path.exists(self._manifest_path): + self.returncode = os.EX_OK + self._current_task = None + self.wait() + return + + self._start_gpg_proc() + + def _check_sig_key(self): + null_fd = os.open('/dev/null', os.O_RDONLY) + popen_proc = PopenProcess(proc=subprocess.Popen( + ["gpg", "--verify", self._manifest_path], + stdin=null_fd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT), + pipe_reader=PipeReader()) + os.close(null_fd) + popen_proc.pipe_reader.input_files = { + "producer" : popen_proc.proc.stdout} + self._start_task(popen_proc, self._check_sig_key_exit) + + @staticmethod + def _parse_gpg_key(output): + """ + Returns the first token which appears to represent a gpg key + id, or None if there is no such token. + """ + regex = ManifestTask._gpg_key_id_re + lengths = ManifestTask._gpg_key_id_lengths + for token in output.split(): + m = regex.match(token) + if m is not None and len(m.group(0)) in lengths: + return m.group(0) + return None + + @staticmethod + def _normalize_gpg_key(key_str): + """ + Strips leading "0x" and trailing "!", and converts to uppercase + (intended to be the same format as that in gpg --verify output). + """ + key_str = key_str.upper() + if key_str.startswith("0X"): + key_str = key_str[2:] + key_str = key_str.rstrip("!") + return key_str + + def _check_sig_key_exit(self, proc): + self._assert_current(proc) + + parsed_key = self._parse_gpg_key( + proc.pipe_reader.getvalue().decode('utf_8', 'replace')) + if parsed_key is not None and \ + self._normalize_gpg_key(parsed_key) == \ + self._normalize_gpg_key(self.force_sign_key): + self.returncode = os.EX_OK + self._current_task = None + self.wait() + return + + if self._was_cancelled(): + self.wait() + return + + self._strip_sig(self._manifest_path) + self._start_gpg_proc() + + @staticmethod + def _strip_sig(manifest_path): + """ + Strip an existing signature from a Manifest file. + """ + line_re = ManifestTask._manifest_line_re + lines = grablines(manifest_path) + f = None + try: + f = atomic_ofstream(manifest_path) + for line in lines: + if line_re.match(line) is not None: + f.write(line) + f.close() + f = None + finally: + if f is not None: + f.abort() + + def _start_gpg_proc(self): + gpg_vars = self.gpg_vars + if gpg_vars is None: + gpg_vars = {} + else: + gpg_vars = gpg_vars.copy() + gpg_vars["FILE"] = self._manifest_path + gpg_cmd = varexpand(self.gpg_cmd, mydict=gpg_vars) + gpg_cmd = shlex_split(gpg_cmd) + gpg_proc = PopenProcess(proc=subprocess.Popen(gpg_cmd, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT)) + # PipeLogger echos output and efficiently monitors for process + # exit by listening for the stdout EOF event. + gpg_proc.pipe_reader = PipeLogger(background=self.background, + input_fd=gpg_proc.proc.stdout, scheduler=self.scheduler) + self._start_task(gpg_proc, self._gpg_proc_exit) + + def _gpg_proc_exit(self, gpg_proc): + if self._default_exit(gpg_proc) != os.EX_OK: + self.wait() + return + + rename_args = (self._manifest_path + ".asc", self._manifest_path) + try: + os.rename(*rename_args) + except OSError as e: + writemsg("!!! rename('%s', '%s'): %s\n" % rename_args + (e,), + noiselevel=-1) + try: + os.unlink(self._manifest_path + ".asc") + except OSError: + pass + self.returncode = 1 + else: + self.returncode = os.EX_OK + + self._current_task = None + self.wait() + + def _need_signature(self): + try: + with open(_unicode_encode(self._manifest_path, + encoding=_encodings['fs'], errors='strict'), 'rb') as f: + return self._PGP_HEADER not in f.readline() + except IOError as e: + if e.errno in (errno.ENOENT, errno.ESTALE): + return False + raise diff --git a/portage_with_autodep/pym/portage/tests/resolver/__init__.py b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/__init__.py index 21a391a..418ad86 100644 --- a/portage_with_autodep/pym/portage/tests/resolver/__init__.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_parallel_manifest/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2010 Gentoo Foundation +# Copyright 2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.py b/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.py index befdc89..94f8c79 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.py +++ b/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.py @@ -1,10 +1,10 @@ # Copyright 2010-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 -import shutil import tempfile from portage import os +from portage import shutil from portage.const import EBUILD_PHASES from portage.elog import elog_process from portage.package.ebuild.config import config @@ -20,7 +20,11 @@ def spawn_nofetch(portdb, ebuild_path, settings=None): to cache metadata. It will be cloned internally, in order to prevent any changes from interfering with the calling code. If settings is None then a suitable config instance will be - acquired from the given portdbapi instance. + acquired from the given portdbapi instance. Do not use the + settings parameter unless setcpv has been called on the given + instance, since otherwise it's possible to trigger issues like + bug #408817 due to fragile assumptions involving the config + state inside doebuild_environment(). A private PORTAGE_BUILDDIR will be created and cleaned up, in order to avoid any interference with any other processes. diff --git a/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.pyo b/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.pyo Binary files differnew file mode 100644 index 0000000..ac449ea --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/_spawn_nofetch.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/config.py b/portage_with_autodep/pym/portage/package/ebuild/config.py index a8c6ad6..97cbd99 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/config.py +++ b/portage_with_autodep/pym/portage/package/ebuild/config.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = [ @@ -7,7 +7,10 @@ __all__ = [ import copy from itertools import chain +import grp import logging +import platform +import pwd import re import sys import warnings @@ -21,10 +24,9 @@ from portage import bsd_chflags, \ load_mod, os, selinux, _unicode_decode from portage.const import CACHE_PATH, \ DEPCACHE_PATH, INCREMENTALS, MAKE_CONF_FILE, \ - MODULES_FILE_PATH, PORTAGE_BIN_PATH, PORTAGE_PYM_PATH, \ + MODULES_FILE_PATH, \ PRIVATE_PATH, PROFILE_PATH, USER_CONFIG_PATH, \ USER_VIRTUALS_FILE -from portage.const import _SANDBOX_COMPAT_LEVEL from portage.dbapi import dbapi from portage.dbapi.porttree import portdbapi from portage.dbapi.vartree import vartree @@ -41,7 +43,7 @@ from portage.util import ensure_dirs, getconfig, grabdict, \ grabdict_package, grabfile, grabfile_package, LazyItemsDict, \ normalize_path, shlex_split, stack_dictlist, stack_dicts, stack_lists, \ writemsg, writemsg_level -from portage.versions import catpkgsplit, catsplit, cpv_getkey +from portage.versions import catpkgsplit, catsplit, cpv_getkey, _pkg_str from portage.package.ebuild._config import special_env_vars from portage.package.ebuild._config.env_var_validation import validate_cmd_var @@ -120,11 +122,19 @@ class config(object): virtuals ...etc you look in here. """ + _constant_keys = frozenset(['PORTAGE_BIN_PATH', 'PORTAGE_GID', + 'PORTAGE_PYM_PATH']) + _setcpv_aux_keys = ('DEFINED_PHASES', 'DEPEND', 'EAPI', 'INHERITED', 'IUSE', 'REQUIRED_USE', 'KEYWORDS', 'LICENSE', 'PDEPEND', 'PROPERTIES', 'PROVIDE', 'RDEPEND', 'SLOT', 'repository', 'RESTRICT', 'LICENSE',) + _module_aliases = { + "cache.metadata_overlay.database" : "portage.cache.flat_hash.database", + "portage.cache.metadata_overlay.database" : "portage.cache.flat_hash.database", + } + _case_insensitive_vars = special_env_vars.case_insensitive_vars _default_globals = special_env_vars.default_globals _env_blacklist = special_env_vars.env_blacklist @@ -135,7 +145,8 @@ class config(object): def __init__(self, clone=None, mycpv=None, config_profile_path=None, config_incrementals=None, config_root=None, target_root=None, - _eprefix=None, local_config=True, env=None, _unmatched_removal=False): + eprefix=None, local_config=True, env=None, + _unmatched_removal=False): """ @param clone: If provided, init will use deepcopy to copy by value the instance. @type clone: Instance of config class. @@ -151,8 +162,8 @@ class config(object): @type config_root: String @param target_root: __init__ override of $ROOT env variable. @type target_root: String - @param _eprefix: set the EPREFIX variable (private, used by internal tests) - @type _eprefix: String + @param eprefix: set the EPREFIX variable (default is portage.const.EPREFIX) + @type eprefix: String @param local_config: Enables loading of local config (/etc/portage); used most by repoman to ignore local config (keywording and unmasking) @type local_config: Boolean @@ -164,10 +175,6 @@ class config(object): @type _unmatched_removal: Boolean """ - # rename local _eprefix variable for convenience - eprefix = _eprefix - del _eprefix - # When initializing the global portage.settings instance, avoid # raising exceptions whenever possible since exceptions thrown # from 'import portage' or 'import portage.exceptions' statements @@ -208,6 +215,7 @@ class config(object): self.repositories = clone.repositories self._iuse_implicit_match = clone._iuse_implicit_match self._non_user_variables = clone._non_user_variables + self._env_d_blacklist = clone._env_d_blacklist self._repo_make_defaults = clone._repo_make_defaults self.usemask = clone.usemask self.useforce = clone.useforce @@ -272,9 +280,6 @@ class config(object): eprefix = locations_manager.eprefix config_root = locations_manager.config_root - self.profiles = locations_manager.profiles - self.profile_path = locations_manager.profile_path - self.user_profile_dir = locations_manager.user_profile_dir abs_user_config = locations_manager.abs_user_config make_conf = getconfig( @@ -293,6 +298,38 @@ class config(object): eroot = locations_manager.eroot self.global_config_path = locations_manager.global_config_path + # The expand_map is used for variable substitution + # in getconfig() calls, and the getconfig() calls + # update expand_map with the value of each variable + # assignment that occurs. Variable substitution occurs + # in the following order, which corresponds to the + # order of appearance in self.lookuplist: + # + # * env.d + # * make.globals + # * make.defaults + # * make.conf + # + # Notably absent is "env", since we want to avoid any + # interaction with the calling environment that might + # lead to unexpected results. + + env_d = getconfig(os.path.join(eroot, "etc", "profile.env"), + expand=False) or {} + expand_map = env_d.copy() + self._expand_map = expand_map + + # Allow make.globals to set default paths relative to ${EPREFIX}. + expand_map["EPREFIX"] = eprefix + + make_globals = getconfig(os.path.join( + self.global_config_path, 'make.globals'), expand=expand_map) + if make_globals is None: + make_globals = {} + + for k, v in self._default_globals.items(): + make_globals.setdefault(k, v) + if config_incrementals is None: self.incrementals = INCREMENTALS else: @@ -302,14 +339,20 @@ class config(object): self.module_priority = ("user", "default") self.modules = {} - modules_loader = KeyValuePairFileLoader( - os.path.join(config_root, MODULES_FILE_PATH), None, None) + modules_file = os.path.join(config_root, MODULES_FILE_PATH) + modules_loader = KeyValuePairFileLoader(modules_file, None, None) modules_dict, modules_errors = modules_loader.load() self.modules["user"] = modules_dict if self.modules["user"] is None: self.modules["user"] = {} + user_auxdbmodule = \ + self.modules["user"].get("portdbapi.auxdbmodule") + if user_auxdbmodule is not None and \ + user_auxdbmodule in self._module_aliases: + warnings.warn("'%s' is deprecated: %s" % + (user_auxdbmodule, modules_file)) + self.modules["default"] = { - "portdbapi.metadbmodule": "portage.cache.metadata.database", "portdbapi.auxdbmodule": "portage.cache.flat_hash.database", } @@ -328,43 +371,9 @@ class config(object): self.configlist.append({}) self.configdict["pkginternal"] = self.configlist[-1] - self.packages_list = [grabfile_package(os.path.join(x, "packages"), verify_eapi=True) for x in self.profiles] - self.packages = tuple(stack_lists(self.packages_list, incremental=1)) - del self.packages_list - #self.packages = grab_stacked("packages", self.profiles, grabfile, incremental_lines=1) - - # revmaskdict - self.prevmaskdict={} - for x in self.packages: - # Negative atoms are filtered by the above stack_lists() call. - if not isinstance(x, Atom): - x = Atom(x.lstrip('*')) - self.prevmaskdict.setdefault(x.cp, []).append(x) - - # The expand_map is used for variable substitution - # in getconfig() calls, and the getconfig() calls - # update expand_map with the value of each variable - # assignment that occurs. Variable substitution occurs - # in the following order, which corresponds to the - # order of appearance in self.lookuplist: - # - # * env.d - # * make.globals - # * make.defaults - # * make.conf - # - # Notably absent is "env", since we want to avoid any - # interaction with the calling environment that might - # lead to unexpected results. - expand_map = {} - self._expand_map = expand_map - - env_d = getconfig(os.path.join(eroot, "etc", "profile.env"), - expand=expand_map) # env_d will be None if profile.env doesn't exist. if env_d: self.configdict["env.d"].update(env_d) - expand_map.update(env_d) # backupenv is used for calculating incremental variables. if env is None: @@ -390,40 +399,72 @@ class config(object): self.configdict["env"] = LazyItemsDict(self.backupenv) - for x in (self.global_config_path,): - self.mygcfg = getconfig(os.path.join(x, "make.globals"), - expand=expand_map) - if self.mygcfg: - break + self.configlist.append(make_globals) + self.configdict["globals"]=self.configlist[-1] - if self.mygcfg is None: - self.mygcfg = {} + self.make_defaults_use = [] - for k, v in self._default_globals.items(): - self.mygcfg.setdefault(k, v) + #Loading Repositories + self["PORTAGE_CONFIGROOT"] = config_root + self["ROOT"] = target_root + self["EPREFIX"] = eprefix + self["EROOT"] = eroot + known_repos = [] + portdir = "" + portdir_overlay = "" + for confs in [make_globals, make_conf, self.configdict["env"]]: + v = confs.get("PORTDIR") + if v is not None: + portdir = v + known_repos.append(v) + v = confs.get("PORTDIR_OVERLAY") + if v is not None: + portdir_overlay = v + known_repos.extend(shlex_split(v)) + known_repos = frozenset(known_repos) + self["PORTDIR"] = portdir + self["PORTDIR_OVERLAY"] = portdir_overlay + self.lookuplist = [self.configdict["env"]] + self.repositories = load_repository_config(self) - self.configlist.append(self.mygcfg) - self.configdict["globals"]=self.configlist[-1] + locations_manager.load_profiles(self.repositories, known_repos) - self.make_defaults_use = [] - self.mygcfg = {} + profiles_complex = locations_manager.profiles_complex + self.profiles = locations_manager.profiles + self.profile_path = locations_manager.profile_path + self.user_profile_dir = locations_manager.user_profile_dir + + packages_list = [grabfile_package(os.path.join(x, "packages"), + verify_eapi=True) for x in self.profiles] + self.packages = tuple(stack_lists(packages_list, incremental=1)) + + # revmaskdict + self.prevmaskdict={} + for x in self.packages: + # Negative atoms are filtered by the above stack_lists() call. + if not isinstance(x, Atom): + x = Atom(x.lstrip('*')) + self.prevmaskdict.setdefault(x.cp, []).append(x) + + + mygcfg = {} if self.profiles: mygcfg_dlists = [getconfig(os.path.join(x, "make.defaults"), expand=expand_map) for x in self.profiles] self._make_defaults = mygcfg_dlists - self.mygcfg = stack_dicts(mygcfg_dlists, + mygcfg = stack_dicts(mygcfg_dlists, incrementals=self.incrementals) - if self.mygcfg is None: - self.mygcfg = {} - self.configlist.append(self.mygcfg) + if mygcfg is None: + mygcfg = {} + self.configlist.append(mygcfg) self.configdict["defaults"]=self.configlist[-1] - self.mygcfg = getconfig( + mygcfg = getconfig( os.path.join(config_root, MAKE_CONF_FILE), tolerant=tolerant, allow_sourcing=True, expand=expand_map) or {} - self.mygcfg.update(getconfig( + mygcfg.update(getconfig( os.path.join(abs_user_config, 'make.conf'), tolerant=tolerant, allow_sourcing=True, expand=expand_map) or {}) @@ -439,10 +480,18 @@ class config(object): non_user_variables = frozenset(non_user_variables) self._non_user_variables = non_user_variables + self._env_d_blacklist = frozenset(chain( + profile_only_variables, + self._env_blacklist, + )) + env_d = self.configdict["env.d"] + for k in self._env_d_blacklist: + env_d.pop(k, None) + for k in profile_only_variables: - self.mygcfg.pop(k, None) + mygcfg.pop(k, None) - self.configlist.append(self.mygcfg) + self.configlist.append(mygcfg) self.configdict["conf"]=self.configlist[-1] self.configlist.append(LazyItemsDict()) @@ -472,25 +521,23 @@ class config(object): self["ROOT"] = target_root self.backup_changes("ROOT") + # The PORTAGE_OVERRIDE_EPREFIX variable propagates the EPREFIX + # of this config instance to any portage commands or API + # consumers running in subprocesses. self["EPREFIX"] = eprefix self.backup_changes("EPREFIX") + self["PORTAGE_OVERRIDE_EPREFIX"] = eprefix + self.backup_changes("PORTAGE_OVERRIDE_EPREFIX") self["EROOT"] = eroot self.backup_changes("EROOT") - self["PORTAGE_SANDBOX_COMPAT_LEVEL"] = _SANDBOX_COMPAT_LEVEL - self.backup_changes("PORTAGE_SANDBOX_COMPAT_LEVEL") - self._ppropertiesdict = portage.dep.ExtendedAtomDict(dict) self._penvdict = portage.dep.ExtendedAtomDict(dict) - #Loading Repositories - self.repositories = load_repository_config(self) - #filling PORTDIR and PORTDIR_OVERLAY variable for compatibility main_repo = self.repositories.mainRepo() if main_repo is not None: - main_repo = main_repo.user_location - self["PORTDIR"] = main_repo + self["PORTDIR"] = main_repo.user_location self.backup_changes("PORTDIR") # repoman controls PORTDIR_OVERLAY via the environment, so no @@ -501,11 +548,11 @@ class config(object): new_ov = [] if portdir_overlay: - whitespace_re = re.compile(r"\s") + shell_quote_re = re.compile(r"[\s\\\"'$`]") for ov in portdir_overlay: ov = normalize_path(ov) if os.path.isdir(ov): - if whitespace_re.search(ov) is not None: + if shell_quote_re.search(ov) is not None: ov = portage._shell_quote(ov) new_ov.append(ov) else: @@ -528,11 +575,11 @@ class config(object): self._repo_make_defaults[repo.name] = d #Read package.keywords and package.accept_keywords. - self._keywords_manager = KeywordsManager(self.profiles, abs_user_config, \ + self._keywords_manager = KeywordsManager(profiles_complex, abs_user_config, \ local_config, global_accept_keywords=self.configdict["defaults"].get("ACCEPT_KEYWORDS", "")) #Read all USE related files from profiles and optionally from user config. - self._use_manager = UseManager(self.repositories, self.profiles, abs_user_config, user_config=local_config) + self._use_manager = UseManager(self.repositories, profiles_complex, abs_user_config, user_config=local_config) #Initialize all USE related variables we track ourselves. self.usemask = self._use_manager.getUseMask() self.useforce = self._use_manager.getUseForce() @@ -549,7 +596,7 @@ class config(object): self.configdict["conf"].get("ACCEPT_LICENSE", "")) #Read package.mask and package.unmask from profiles and optionally from user config - self._mask_manager = MaskManager(self.repositories, self.profiles, + self._mask_manager = MaskManager(self.repositories, profiles_complex, abs_user_config, user_config=local_config, strict_umatched_removal=_unmatched_removal) @@ -597,16 +644,21 @@ class config(object): self.categories = [grabfile(os.path.join(x, "categories")) \ for x in locations_manager.profile_and_user_locations] category_re = dbapi._category_re - self.categories = tuple(sorted( + # categories used to be a tuple, but now we use a frozenset + # for hashed category validation in pordbapi.cp_list() + self.categories = frozenset( x for x in stack_lists(self.categories, incremental=1) - if category_re.match(x) is not None)) + if category_re.match(x) is not None) archlist = [grabfile(os.path.join(x, "arch.list")) \ for x in locations_manager.profile_and_user_locations] archlist = stack_lists(archlist, incremental=1) self.configdict["conf"]["PORTAGE_ARCHLIST"] = " ".join(archlist) - pkgprovidedlines = [grabfile(os.path.join(x, "package.provided"), recursive=1) for x in self.profiles] + pkgprovidedlines = [grabfile( + os.path.join(x.location, "package.provided"), + recursive=x.portage1_directories) + for x in profiles_complex] pkgprovidedlines = stack_lists(pkgprovidedlines, incremental=1) has_invalid_data = False for x in range(len(pkgprovidedlines)-1, -1, -1): @@ -649,9 +701,6 @@ class config(object): if "USE_ORDER" not in self: self.backupenv["USE_ORDER"] = "env:pkg:conf:defaults:pkginternal:repo:env.d" - self["PORTAGE_GID"] = str(portage_gid) - self.backup_changes("PORTAGE_GID") - self.depcachedir = DEPCACHE_PATH if eprefix: # See comments about make.globals and EPREFIX @@ -678,19 +727,60 @@ class config(object): self["CBUILD"] = self["CHOST"] self.backup_changes("CBUILD") - self["PORTAGE_BIN_PATH"] = PORTAGE_BIN_PATH - self.backup_changes("PORTAGE_BIN_PATH") - self["PORTAGE_PYM_PATH"] = PORTAGE_PYM_PATH - self.backup_changes("PORTAGE_PYM_PATH") + if "USERLAND" not in self: + # Set default USERLAND so that our test cases can assume that + # it's always set. This allows isolated-functions.sh to avoid + # calling uname -s when sourced. + system = platform.system() + if system is not None and \ + (system.endswith("BSD") or system == "DragonFly"): + self["USERLAND"] = "BSD" + else: + self["USERLAND"] = "GNU" + self.backup_changes("USERLAND") - for var in ("PORTAGE_INST_UID", "PORTAGE_INST_GID"): + default_inst_ids = { + "PORTAGE_INST_GID": "0", + "PORTAGE_INST_UID": "0", + } + + if eprefix: + # For prefix environments, default to the UID and GID of + # the top-level EROOT directory. try: - self[var] = str(int(self.get(var, "0"))) + eroot_st = os.stat(eroot) + except OSError: + pass + else: + default_inst_ids["PORTAGE_INST_GID"] = str(eroot_st.st_gid) + default_inst_ids["PORTAGE_INST_UID"] = str(eroot_st.st_uid) + + if "PORTAGE_USERNAME" not in self: + try: + pwd_struct = pwd.getpwuid(eroot_st.st_uid) + except KeyError: + pass + else: + self["PORTAGE_USERNAME"] = pwd_struct.pw_name + self.backup_changes("PORTAGE_USERNAME") + + if "PORTAGE_GRPNAME" not in self: + try: + grp_struct = grp.getgrgid(eroot_st.st_gid) + except KeyError: + pass + else: + self["PORTAGE_GRPNAME"] = grp_struct.gr_name + self.backup_changes("PORTAGE_GRPNAME") + + for var, default_val in default_inst_ids.items(): + try: + self[var] = str(int(self.get(var, default_val))) except ValueError: writemsg(_("!!! %s='%s' is not a valid integer. " - "Falling back to '0'.\n") % (var, self[var]), + "Falling back to %s.\n") % (var, self[var], default_val), noiselevel=-1) - self[var] = "0" + self[var] = default_val self.backup_changes(var) # initialize self.features @@ -699,21 +789,33 @@ class config(object): if bsd_chflags: self.features.add('chflags') - if 'parse-eapi-ebuild-head' in self.features: - portage._validate_cache_for_unsupported_eapis = False - self._iuse_implicit_match = _iuse_implicit_match_cache(self) self._validate_commands() - for k in self._case_insensitive_vars: - if k in self: - self[k] = self[k].lower() - self.backup_changes(k) + for k in self._case_insensitive_vars: + if k in self: + self[k] = self[k].lower() + self.backup_changes(k) + + if main_repo is not None and not main_repo.sync: + main_repo_sync = self.get("SYNC") + if main_repo_sync: + main_repo.sync = main_repo_sync + + # The first constructed config object initializes these modules, + # and subsequent calls to the _init() functions have no effect. + portage.output._init(config_root=self['PORTAGE_CONFIGROOT']) + portage.data._init(self) if mycpv: self.setcpv(mycpv) + @property + def mygcfg(self): + warnings.warn("portage.config.mygcfg is deprecated", stacklevel=3) + return {} + def _validate_commands(self): for k in special_env_vars.validate_commands: v = self.get(k) @@ -817,11 +919,26 @@ class config(object): writemsg(_("!!! INVALID ACCEPT_KEYWORDS: %s\n") % str(group), noiselevel=-1) - abs_profile_path = os.path.join(self["PORTAGE_CONFIGROOT"], - PROFILE_PATH) - if (not self.profile_path or \ - not os.path.exists(os.path.join(self.profile_path, "parent"))) and \ - os.path.exists(os.path.join(self["PORTDIR"], "profiles")): + profile_broken = not self.profile_path or \ + not os.path.exists(os.path.join(self.profile_path, "parent")) and \ + os.path.exists(os.path.join(self["PORTDIR"], "profiles")) + + if profile_broken: + abs_profile_path = None + for x in (PROFILE_PATH, 'etc/portage/make.profile'): + x = os.path.join(self["PORTAGE_CONFIGROOT"], x) + try: + os.lstat(x) + except OSError: + pass + else: + abs_profile_path = x + break + + if abs_profile_path is None: + abs_profile_path = os.path.join(self["PORTAGE_CONFIGROOT"], + PROFILE_PATH) + writemsg(_("\n\n!!! %s is not a symlink and will probably prevent most merges.\n") % abs_profile_path, noiselevel=-1) writemsg(_("!!! It should point into a profile within %s/profiles/\n") % self["PORTDIR"]) @@ -851,13 +968,32 @@ class config(object): writemsg(_("!!! FEATURES=fakeroot is enabled, but the " "fakeroot binary is not installed.\n"), noiselevel=-1) + if os.getuid() == 0 and not hasattr(os, "setgroups"): + warning_shown = False + + if "userpriv" in self.features: + writemsg(_("!!! FEATURES=userpriv is enabled, but " + "os.setgroups is not available.\n"), noiselevel=-1) + warning_shown = True + + if "userfetch" in self.features: + writemsg(_("!!! FEATURES=userfetch is enabled, but " + "os.setgroups is not available.\n"), noiselevel=-1) + warning_shown = True + + if warning_shown and platform.python_implementation() == 'PyPy': + writemsg(_("!!! See https://bugs.pypy.org/issue833 for details.\n"), + noiselevel=-1) + def load_best_module(self,property_string): best_mod = best_from_dict(property_string,self.modules,self.module_priority) mod = None try: mod = load_mod(best_mod) except ImportError: - if not best_mod.startswith("cache."): + if best_mod in self._module_aliases: + mod = load_mod(self._module_aliases[best_mod]) + elif not best_mod.startswith("cache."): raise else: best_mod = "portage." + best_mod @@ -1099,8 +1235,11 @@ class config(object): # packages since we want to save it PORTAGE_BUILT_USE for # evaluating conditional USE deps in atoms passed via IPC to # helpers like has_version and best_version. + aux_keys = set(aux_keys) + if hasattr(mydb, '_aux_cache_keys'): + aux_keys = aux_keys.intersection(mydb._aux_cache_keys) + aux_keys.add('USE') aux_keys = list(aux_keys) - aux_keys.append('USE') for k, v in zip(aux_keys, mydb.aux_get(self.mycpv, aux_keys)): pkg_configdict[k] = v built_use = frozenset(pkg_configdict.pop('USE').split()) @@ -1115,7 +1254,7 @@ class config(object): slot = pkg_configdict["SLOT"] iuse = pkg_configdict["IUSE"] if pkg is None: - cpv_slot = "%s:%s" % (self.mycpv, slot) + cpv_slot = _pkg_str(self.mycpv, slot=slot, repo=repository) else: cpv_slot = pkg pkginternaluse = [] @@ -1462,6 +1601,9 @@ class config(object): @return: A matching profile atom string or None if one is not found. """ + warnings.warn("The config._getProfileMaskAtom() method is deprecated.", + DeprecationWarning, stacklevel=2) + cp = cpv_getkey(cpv) profile_atoms = self.prevmaskdict.get(cp) if profile_atoms: @@ -1564,11 +1706,13 @@ class config(object): @return: A list of properties that have not been accepted. """ accept_properties = self._accept_properties + if not hasattr(cpv, 'slot'): + cpv = _pkg_str(cpv, slot=metadata["SLOT"], + repo=metadata.get("repository")) cp = cpv_getkey(cpv) cpdict = self._ppropertiesdict.get(cp) if cpdict: - cpv_slot = "%s:%s" % (cpv, metadata["SLOT"]) - pproperties_list = ordered_by_atom_specificity(cpdict, cpv_slot, repo=metadata.get('repository')) + pproperties_list = ordered_by_atom_specificity(cpdict, cpv) if pproperties_list: accept_properties = list(self._accept_properties) for x in pproperties_list: @@ -1699,6 +1843,8 @@ class config(object): env_d = getconfig(env_d_filename, expand=False) if env_d: # env_d will be None if profile.env doesn't exist. + for k in self._env_d_blacklist: + env_d.pop(k, None) self.configdict["env.d"].update(env_d) def regenerate(self, useonly=0, use_cache=None): @@ -2025,25 +2171,43 @@ class config(object): self._virtuals_manager._treeVirtuals = {} def __delitem__(self,mykey): - self.modifying() - for x in self.lookuplist: - if x != None: - if mykey in x: - del x[mykey] + self.pop(mykey) + + def __getitem__(self, key): + try: + return self._getitem(key) + except KeyError: + return '' # for backward compat, don't raise KeyError + + def _getitem(self, mykey): + + if mykey in self._constant_keys: + # These two point to temporary values when + # portage plans to update itself. + if mykey == "PORTAGE_BIN_PATH": + return portage._bin_path + elif mykey == "PORTAGE_PYM_PATH": + return portage._pym_path + + elif mykey == "PORTAGE_GID": + return _unicode_decode(str(portage_gid)) - def __getitem__(self,mykey): for d in self.lookuplist: - if mykey in d: + try: return d[mykey] - return '' # for backward compat, don't raise KeyError + except KeyError: + pass + + raise KeyError(mykey) def get(self, k, x=None): - for d in self.lookuplist: - if k in d: - return d[k] - return x + try: + return self._getitem(k) + except KeyError: + return x def pop(self, key, *args): + self.modifying() if len(args) > 1: raise TypeError( "pop expected at most 2 arguments, got " + \ @@ -2059,10 +2223,12 @@ class config(object): def __contains__(self, mykey): """Called to implement membership test operators (in and not in).""" - for d in self.lookuplist: - if mykey in d: - return True - return False + try: + self._getitem(mykey) + except KeyError: + return False + else: + return True def setdefault(self, k, x=None): v = self.get(k) @@ -2077,6 +2243,7 @@ class config(object): def __iter__(self): keys = set() + keys.update(self._constant_keys) for d in self.lookuplist: keys.update(d) return iter(keys) @@ -2086,7 +2253,7 @@ class config(object): def iteritems(self): for k in self: - yield (k, self[k]) + yield (k, self._getitem(k)) def items(self): return list(self.iteritems()) @@ -2168,8 +2335,14 @@ class config(object): if not eapi_exports_merge_type(eapi): mydict.pop("MERGE_TYPE", None) - # Prefix variables are supported starting with EAPI 3. - if phase == 'depend' or eapi is None or not eapi_supports_prefix(eapi): + # Prefix variables are supported beginning with EAPI 3, or when + # force-prefix is in FEATURES, since older EAPIs would otherwise be + # useless with prefix configurations. This brings compatibility with + # the prefix branch of portage, which also supports EPREFIX for all + # EAPIs (for obvious reasons). + if phase == 'depend' or \ + ('force-prefix' not in self.features and + eapi is not None and not eapi_supports_prefix(eapi)): mydict.pop("ED", None) mydict.pop("EPREFIX", None) mydict.pop("EROOT", None) diff --git a/portage_with_autodep/pym/portage/package/ebuild/config.pyo b/portage_with_autodep/pym/portage/package/ebuild/config.pyo Binary files differnew file mode 100644 index 0000000..742ee2b --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/config.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/deprecated_profile_check.pyo b/portage_with_autodep/pym/portage/package/ebuild/deprecated_profile_check.pyo Binary files differnew file mode 100644 index 0000000..2b9362b --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/deprecated_profile_check.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/digestcheck.py b/portage_with_autodep/pym/portage/package/ebuild/digestcheck.py index 1e34b14..8705639 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/digestcheck.py +++ b/portage_with_autodep/pym/portage/package/ebuild/digestcheck.py @@ -8,7 +8,6 @@ import warnings from portage import os, _encodings, _unicode_decode from portage.exception import DigestException, FileNotFound from portage.localization import _ -from portage.manifest import Manifest from portage.output import EOutput from portage.util import writemsg @@ -16,7 +15,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None): """ Verifies checksums. Assumes all files have been downloaded. @rtype: int - @returns: 1 on success and 0 on failure + @return: 1 on success and 0 on failure """ if justmanifest is not None: @@ -28,49 +27,33 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None): if mysettings.get("EBUILD_SKIP_MANIFEST") == "1": return 1 - allow_missing = "allow-missing-manifests" in mysettings.features pkgdir = mysettings["O"] - manifest_path = os.path.join(pkgdir, "Manifest") - if not os.path.exists(manifest_path): - if allow_missing: - return 1 - writemsg(_("!!! Manifest file not found: '%s'\n") % manifest_path, - noiselevel=-1) - if strict: - return 0 - else: - return 1 if mf is None: - mf = Manifest(pkgdir, mysettings["DISTDIR"]) - manifest_empty = True - for d in mf.fhashdict.values(): - if d: - manifest_empty = False - break - if manifest_empty: - writemsg(_("!!! Manifest is empty: '%s'\n") % manifest_path, - noiselevel=-1) - if strict: - return 0 - else: - return 1 + mf = mysettings.repositories.get_repo_for_location( + os.path.dirname(os.path.dirname(pkgdir))) + mf = mf.load_manifest(pkgdir, mysettings["DISTDIR"]) eout = EOutput() eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1" try: - if strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings: - eout.ebegin(_("checking ebuild checksums ;-)")) - mf.checkTypeHashes("EBUILD") - eout.eend(0) - eout.ebegin(_("checking auxfile checksums ;-)")) - mf.checkTypeHashes("AUX") - eout.eend(0) - eout.ebegin(_("checking miscfile checksums ;-)")) - mf.checkTypeHashes("MISC", ignoreMissingFiles=True) - eout.eend(0) + if not mf.thin and strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings: + if mf.fhashdict.get("EBUILD"): + eout.ebegin(_("checking ebuild checksums ;-)")) + mf.checkTypeHashes("EBUILD") + eout.eend(0) + if mf.fhashdict.get("AUX"): + eout.ebegin(_("checking auxfile checksums ;-)")) + mf.checkTypeHashes("AUX") + eout.eend(0) + if mf.fhashdict.get("MISC"): + eout.ebegin(_("checking miscfile checksums ;-)")) + mf.checkTypeHashes("MISC", ignoreMissingFiles=True) + eout.eend(0) for f in myfiles: eout.ebegin(_("checking %s ;-)") % f) ftype = mf.findFile(f) if ftype is None: + if mf.allow_missing: + continue eout.eend(1) writemsg(_("\n!!! Missing digest for '%s'\n") % (f,), noiselevel=-1) @@ -90,7 +73,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None): writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1) writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1) return 0 - if allow_missing: + if mf.thin or mf.allow_missing: # In this case we ignore any missing digests that # would otherwise be detected below. return 1 diff --git a/portage_with_autodep/pym/portage/package/ebuild/digestcheck.pyo b/portage_with_autodep/pym/portage/package/ebuild/digestcheck.pyo Binary files differnew file mode 100644 index 0000000..66987a2 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/digestcheck.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/digestgen.py b/portage_with_autodep/pym/portage/package/ebuild/digestgen.py index eb7210e..6ad3397 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/digestgen.py +++ b/portage_with_autodep/pym/portage/package/ebuild/digestgen.py @@ -17,7 +17,6 @@ from portage.dep import use_reduce from portage.exception import InvalidDependString, FileNotFound, \ PermissionDenied, PortagePackageException from portage.localization import _ -from portage.manifest import Manifest from portage.output import colorize from portage.package.ebuild.fetch import fetch from portage.util import writemsg, writemsg_stdout @@ -34,7 +33,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None): @param myportdb: a portdbapi instance @type myportdb: portdbapi @rtype: int - @returns: 1 on success and 0 on failure + @return: 1 on success and 0 on failure """ if mysettings is None or myportdb is None: raise TypeError("portage.digestgen(): 'mysettings' and 'myportdb' parameter are required.") @@ -52,9 +51,21 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None): del e return 0 mytree = os.path.dirname(os.path.dirname(mysettings["O"])) - manifest1_compat = False - mf = Manifest(mysettings["O"], mysettings["DISTDIR"], - fetchlist_dict=fetchlist_dict, manifest1_compat=manifest1_compat) + try: + mf = mysettings.repositories.get_repo_for_location(mytree) + except KeyError: + # backward compatibility + mytree = os.path.realpath(mytree) + mf = mysettings.repositories.get_repo_for_location(mytree) + + mf = mf.load_manifest(mysettings["O"], mysettings["DISTDIR"], + fetchlist_dict=fetchlist_dict) + + if not mf.allow_create: + writemsg_stdout(_(">>> Skipping creating Manifest for %s; " + "repository is configured to not use them\n") % mysettings["O"]) + return 1 + # Don't require all hashes since that can trigger excessive # fetches when sufficient digests already exist. To ease transition # while Manifest 1 is being removed, only require hashes that will @@ -102,8 +113,6 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None): continue if missing_files: - mytree = os.path.realpath(os.path.dirname( - os.path.dirname(mysettings["O"]))) for myfile in missing_files: uris = set() all_restrict = set() @@ -139,8 +148,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None): if not fetch({myfile : uris}, mysettings): myebuild = os.path.join(mysettings["O"], catsplit(cpv)[1] + ".ebuild") - spawn_nofetch(myportdb, myebuild, - settings=mysettings) + spawn_nofetch(myportdb, myebuild) writemsg(_("!!! Fetch failed for %s, can't update " "Manifest\n") % myfile, noiselevel=-1) if myfile in dist_hashes and \ @@ -183,8 +191,6 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None): os.path.join(mysettings["DISTDIR"], filename)): auto_assumed.append(filename) if auto_assumed: - mytree = os.path.realpath( - os.path.dirname(os.path.dirname(mysettings["O"]))) cp = os.path.sep.join(mysettings["O"].split(os.path.sep)[-2:]) pkgs = myportdb.cp_list(cp, mytree=mytree) pkgs.sort() diff --git a/portage_with_autodep/pym/portage/package/ebuild/digestgen.pyo b/portage_with_autodep/pym/portage/package/ebuild/digestgen.pyo Binary files differnew file mode 100644 index 0000000..66876ec --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/digestgen.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/doebuild.py b/portage_with_autodep/pym/portage/package/ebuild/doebuild.py index c76c1ed..610172f 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/doebuild.py +++ b/portage_with_autodep/pym/portage/package/ebuild/doebuild.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['doebuild', 'doebuild_environment', 'spawn', 'spawnebuild'] @@ -10,7 +10,6 @@ from itertools import chain import logging import os as _os import re -import shutil import signal import stat import sys @@ -31,8 +30,8 @@ portage.proxy.lazyimport.lazyimport(globals(), ) from portage import auxdbkeys, bsd_chflags, \ - eapi_is_supported, merge, os, selinux, \ - unmerge, _encodings, _parse_eapi_ebuild_head, _os_merge, \ + eapi_is_supported, merge, os, selinux, shutil, \ + unmerge, _encodings, _os_merge, \ _shell_quote, _unicode_decode, _unicode_encode from portage.const import EBUILD_SH_ENV_FILE, EBUILD_SH_ENV_DIR, \ EBUILD_SH_BINARY, INVALID_ENV_FILE, MISC_SH_BINARY @@ -42,16 +41,16 @@ from portage.dbapi.porttree import _parse_uri_map from portage.dep import Atom, check_required_use, \ human_readable_required_use, paren_enclose, use_reduce from portage.eapi import eapi_exports_KV, eapi_exports_merge_type, \ - eapi_exports_replace_vars, eapi_has_required_use, \ - eapi_has_src_prepare_and_src_configure, eapi_has_pkg_pretend -from portage.elog import elog_process + eapi_exports_replace_vars, eapi_exports_REPOSITORY, \ + eapi_has_required_use, eapi_has_src_prepare_and_src_configure, \ + eapi_has_pkg_pretend +from portage.elog import elog_process, _preload_elog_modules from portage.elog.messages import eerror, eqawarn from portage.exception import DigestException, FileNotFound, \ IncorrectParameter, InvalidDependString, PermissionDenied, \ UnsupportedAPIException from portage.localization import _ -from portage.manifest import Manifest -from portage.output import style_to_ansi_code +from portage.output import colormap from portage.package.ebuild.prepare_build_dirs import prepare_build_dirs from portage.util import apply_recursive_permissions, \ apply_secpass_permissions, noiselimit, normalize_path, \ @@ -116,6 +115,38 @@ def _spawn_phase(phase, settings, actionmap=None, **kwargs): ebuild_phase.wait() return ebuild_phase.returncode +def _doebuild_path(settings, eapi=None): + """ + Generate the PATH variable. + """ + + # Note: PORTAGE_BIN_PATH may differ from the global constant + # when portage is reinstalling itself. + portage_bin_path = settings["PORTAGE_BIN_PATH"] + eprefix = settings["EPREFIX"] + prerootpath = [x for x in settings.get("PREROOTPATH", "").split(":") if x] + rootpath = [x for x in settings.get("ROOTPATH", "").split(":") if x] + + prefixes = [] + if eprefix: + prefixes.append(eprefix) + prefixes.append("/") + + path = [] + + if eapi not in (None, "0", "1", "2", "3"): + path.append(os.path.join(portage_bin_path, "ebuild-helpers", "4")) + + path.append(os.path.join(portage_bin_path, "ebuild-helpers")) + path.extend(prerootpath) + + for prefix in prefixes: + for x in ("usr/local/sbin", "usr/local/bin", "usr/sbin", "usr/bin", "sbin", "bin"): + path.append(os.path.join(prefix, x)) + + path.extend(rootpath) + settings["PATH"] = ":".join(path) + def doebuild_environment(myebuild, mydo, myroot=None, settings=None, debug=False, use_cache=None, db=None): """ @@ -143,20 +174,32 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, ebuild_path = os.path.abspath(myebuild) pkg_dir = os.path.dirname(ebuild_path) mytree = os.path.dirname(os.path.dirname(pkg_dir)) - - if "CATEGORY" in mysettings.configdict["pkg"]: - cat = mysettings.configdict["pkg"]["CATEGORY"] - else: - cat = os.path.basename(normalize_path(os.path.join(pkg_dir, ".."))) - mypv = os.path.basename(ebuild_path)[:-7] - - mycpv = cat+"/"+mypv - mysplit = _pkgsplit(mypv) + mysplit = _pkgsplit(mypv, eapi=mysettings.configdict["pkg"].get("EAPI")) if mysplit is None: raise IncorrectParameter( _("Invalid ebuild path: '%s'") % myebuild) + if mysettings.mycpv is not None and \ + mysettings.configdict["pkg"].get("PF") == mypv and \ + "CATEGORY" in mysettings.configdict["pkg"]: + # Assume that PF is enough to assume that we've got + # the correct CATEGORY, though this is not really + # a solid assumption since it's possible (though + # unlikely) that two packages in different + # categories have the same PF. Callers should call + # setcpv or create a clean clone of a locked config + # instance in order to ensure that this assumption + # does not fail like in bug #408817. + cat = mysettings.configdict["pkg"]["CATEGORY"] + mycpv = mysettings.mycpv + elif os.path.basename(pkg_dir) in (mysplit[0], mypv): + # portdbapi or vardbapi + cat = os.path.basename(os.path.dirname(pkg_dir)) + mycpv = cat + "/" + mypv + else: + raise AssertionError("unable to determine CATEGORY") + # Make a backup of PORTAGE_TMPDIR prior to calling config.reset() # so that the caller can override it. tmpdir = mysettings["PORTAGE_TMPDIR"] @@ -208,11 +251,11 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, mysettings["FILESDIR"] = pkg_dir+"/files" mysettings["PF"] = mypv - if hasattr(mydbapi, '_repo_info'): - repo_info = mydbapi._repo_info[mytree] - mysettings['PORTDIR'] = repo_info.portdir - mysettings['PORTDIR_OVERLAY'] = repo_info.portdir_overlay - mysettings.configdict["pkg"]["PORTAGE_REPO_NAME"] = repo_info.name + if hasattr(mydbapi, 'repositories'): + repo = mydbapi.repositories.get_repo_for_location(mytree) + mysettings['PORTDIR'] = repo.eclass_db.porttrees[0] + mysettings['PORTDIR_OVERLAY'] = ' '.join(repo.eclass_db.porttrees[1:]) + mysettings.configdict["pkg"]["PORTAGE_REPO_NAME"] = repo.name mysettings["PORTDIR"] = os.path.realpath(mysettings["PORTDIR"]) mysettings["DISTDIR"] = os.path.realpath(mysettings["DISTDIR"]) @@ -235,16 +278,6 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, else: mysettings["PVR"]=mysplit[1]+"-"+mysplit[2] - if "PATH" in mysettings: - mysplit=mysettings["PATH"].split(":") - else: - mysplit=[] - # Note: PORTAGE_BIN_PATH may differ from the global constant - # when portage is reinstalling itself. - portage_bin_path = mysettings["PORTAGE_BIN_PATH"] - if portage_bin_path not in mysplit: - mysettings["PATH"] = portage_bin_path + ":" + mysettings["PATH"] - # All temporary directories should be subdirectories of # $PORTAGE_TMPDIR/portage, since it's common for /tmp and /var/tmp # to be mounted with the "noexec" option (see bug #346899). @@ -268,7 +301,9 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, mysettings["T"] = os.path.join(mysettings["PORTAGE_BUILDDIR"], "temp") # Prefix forward compatability - mysettings["ED"] = mysettings["D"] + eprefix_lstrip = mysettings["EPREFIX"].lstrip(os.sep) + mysettings["ED"] = os.path.join( + mysettings["D"], eprefix_lstrip).rstrip(os.sep) + os.sep mysettings["PORTAGE_BASHRC"] = os.path.join( mysettings["PORTAGE_CONFIGROOT"], EBUILD_SH_ENV_FILE) @@ -276,37 +311,41 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, mysettings["PORTAGE_CONFIGROOT"], EBUILD_SH_ENV_DIR) # Allow color.map to control colors associated with einfo, ewarn, etc... - mycolors = [] - for c in ("GOOD", "WARN", "BAD", "HILITE", "BRACKET"): - mycolors.append("%s=$'%s'" % \ - (c, style_to_ansi_code(c))) - mysettings["PORTAGE_COLORMAP"] = "\n".join(mycolors) - - # All EAPI dependent code comes last, so that essential variables - # like PORTAGE_BUILDDIR are still initialized even in cases when + mysettings["PORTAGE_COLORMAP"] = colormap() + + if "COLUMNS" not in mysettings: + # Set COLUMNS, in order to prevent unnecessary stty calls + # inside the set_colors function of isolated-functions.sh. + # We cache the result in os.environ, in order to avoid + # multiple stty calls in cases when get_term_size() falls + # back to stty due to a missing or broken curses module. + columns = os.environ.get("COLUMNS") + if columns is None: + rows, columns = portage.output.get_term_size() + if columns < 1: + # Force a sane value for COLUMNS, so that tools + # like ls don't complain (see bug #394091). + columns = 80 + columns = str(columns) + os.environ["COLUMNS"] = columns + mysettings["COLUMNS"] = columns + + # EAPI is always known here, even for the "depend" phase, because + # EbuildMetadataPhase gets it from _parse_eapi_ebuild_head(). + eapi = mysettings.configdict['pkg']['EAPI'] + _doebuild_path(mysettings, eapi=eapi) + + # All EAPI dependent code comes last, so that essential variables like + # PATH and PORTAGE_BUILDDIR are still initialized even in cases when # UnsupportedAPIException needs to be raised, which can be useful # when uninstalling a package that has corrupt EAPI metadata. - eapi = None - if mydo == 'depend' and 'EAPI' not in mysettings.configdict['pkg']: - if eapi is None and 'parse-eapi-ebuild-head' in mysettings.features: - eapi = _parse_eapi_ebuild_head( - io.open(_unicode_encode(ebuild_path, - encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['content'], errors='replace')) - - if eapi is not None: - if not eapi_is_supported(eapi): - raise UnsupportedAPIException(mycpv, eapi) - mysettings.configdict['pkg']['EAPI'] = eapi + if not eapi_is_supported(eapi): + raise UnsupportedAPIException(mycpv, eapi) - if mydo != "depend": - # Metadata vars such as EAPI and RESTRICT are - # set by the above config.setcpv() call. - eapi = mysettings["EAPI"] - if not eapi_is_supported(eapi): - # can't do anything with this. - raise UnsupportedAPIException(mycpv, eapi) + if eapi_exports_REPOSITORY(eapi) and "PORTAGE_REPO_NAME" in mysettings.configdict["pkg"]: + mysettings.configdict["pkg"]["REPOSITORY"] = mysettings.configdict["pkg"]["PORTAGE_REPO_NAME"] + if mydo != "depend": if hasattr(mydbapi, "getFetchMap") and \ ("A" not in mysettings.configdict["pkg"] or \ "AA" not in mysettings.configdict["pkg"]): @@ -331,22 +370,41 @@ def doebuild_environment(myebuild, mydo, myroot=None, settings=None, else: mysettings.configdict["pkg"]["AA"] = " ".join(uri_map) - if not eapi_exports_KV(eapi): - # Discard KV for EAPIs that don't support it. Cache KV is restored - # from the backupenv whenever config.reset() is called. - mysettings.pop('KV', None) - elif mydo != 'depend' and 'KV' not in mysettings and \ - mydo in ('compile', 'config', 'configure', 'info', - 'install', 'nofetch', 'postinst', 'postrm', 'preinst', - 'prepare', 'prerm', 'setup', 'test', 'unpack'): - mykv, err1 = ExtractKernelVersion( - os.path.join(mysettings['EROOT'], "usr/src/linux")) - if mykv: - # Regular source tree - mysettings["KV"] = mykv - else: - mysettings["KV"] = "" - mysettings.backup_changes("KV") + ccache = "ccache" in mysettings.features + distcc = "distcc" in mysettings.features + if ccache or distcc: + # Use default ABI libdir in accordance with bug #355283. + libdir = None + default_abi = mysettings.get("DEFAULT_ABI") + if default_abi: + libdir = mysettings.get("LIBDIR_" + default_abi) + if not libdir: + libdir = "lib" + + if distcc: + mysettings["PATH"] = os.path.join(os.sep, eprefix_lstrip, + "usr", libdir, "distcc", "bin") + ":" + mysettings["PATH"] + + if ccache: + mysettings["PATH"] = os.path.join(os.sep, eprefix_lstrip, + "usr", libdir, "ccache", "bin") + ":" + mysettings["PATH"] + + if not eapi_exports_KV(eapi): + # Discard KV for EAPIs that don't support it. Cached KV is restored + # from the backupenv whenever config.reset() is called. + mysettings.pop('KV', None) + elif 'KV' not in mysettings and \ + mydo in ('compile', 'config', 'configure', 'info', + 'install', 'nofetch', 'postinst', 'postrm', 'preinst', + 'prepare', 'prerm', 'setup', 'test', 'unpack'): + mykv, err1 = ExtractKernelVersion( + os.path.join(mysettings['EROOT'], "usr/src/linux")) + if mykv: + # Regular source tree + mysettings["KV"] = mykv + else: + mysettings["KV"] = "" + mysettings.backup_changes("KV") _doebuild_manifest_cache = None _doebuild_broken_ebuilds = set() @@ -356,7 +414,7 @@ _doebuild_commands_without_builddir = ( 'fetch', 'fetchall', 'help', 'manifest' ) -def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, +def doebuild(myebuild, mydo, _unused=None, settings=None, debug=0, listonly=0, fetchonly=0, cleanup=0, dbkey=None, use_cache=1, fetchall=0, tree=None, mydbapi=None, vartree=None, prev_mtimes=None, fd_pipes=None, returnpid=False): @@ -368,10 +426,10 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, @type myebuild: String @param mydo: Phase to run @type mydo: String - @param myroot: $ROOT (usually '/', see man make.conf) - @type myroot: String - @param mysettings: Portage Configuration - @type mysettings: instance of portage.config + @param _unused: Deprecated (use settings["ROOT"] instead) + @type _unused: String + @param settings: Portage Configuration + @type settings: instance of portage.config @param debug: Turns on various debug information (eg, debug for spawn) @type debug: Boolean @param listonly: Used to wrap fetch(); passed such that fetch only lists files required. @@ -403,7 +461,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, caller clean up all returned PIDs. @type returnpid: Boolean @rtype: Boolean - @returns: + @return: 1. 0 for success 2. 1 for error @@ -414,7 +472,18 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, Other variables may not be strictly required, many have defaults that are set inside of doebuild. """ - + + if settings is None: + raise TypeError("settings parameter is required") + mysettings = settings + myroot = settings['EROOT'] + + if _unused is not None and _unused != mysettings['EROOT']: + warnings.warn("The third parameter of the " + "portage.doebuild() is now unused. Use " + "settings['ROOT'] instead.", + DeprecationWarning, stacklevel=2) + if not tree: writemsg("Warning: tree not specified to doebuild\n") tree = "porttree" @@ -432,6 +501,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, "install":["test"], "rpm": ["install"], "package":["install"], + "merge" :["install"], } if mydbapi is None: @@ -480,21 +550,28 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, return 1 global _doebuild_manifest_cache + pkgdir = os.path.dirname(myebuild) + manifest_path = os.path.join(pkgdir, "Manifest") + if tree == "porttree": + repo_config = mysettings.repositories.get_repo_for_location( + os.path.dirname(os.path.dirname(pkgdir))) + else: + repo_config = None + mf = None if "strict" in features and \ "digest" not in features and \ tree == "porttree" and \ + not repo_config.thin_manifest and \ mydo not in ("digest", "manifest", "help") and \ - not portage._doebuild_manifest_exempt_depend: + not portage._doebuild_manifest_exempt_depend and \ + not (repo_config.allow_missing_manifest and not os.path.exists(manifest_path)): # Always verify the ebuild checksums before executing it. global _doebuild_broken_ebuilds if myebuild in _doebuild_broken_ebuilds: return 1 - pkgdir = os.path.dirname(myebuild) - manifest_path = os.path.join(pkgdir, "Manifest") - # Avoid checking the same Manifest several times in a row during a # regen with an empty cache. if _doebuild_manifest_cache is None or \ @@ -505,7 +582,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, out.eerror(_("Manifest not found for '%s'") % (myebuild,)) _doebuild_broken_ebuilds.add(myebuild) return 1 - mf = Manifest(pkgdir, mysettings["DISTDIR"]) + mf = repo_config.load_manifest(pkgdir, mysettings["DISTDIR"]) else: mf = _doebuild_manifest_cache @@ -513,10 +590,12 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, try: mf.checkFileHashes("EBUILD", os.path.basename(myebuild)) except KeyError: - out = portage.output.EOutput() - out.eerror(_("Missing digest for '%s'") % (myebuild,)) - _doebuild_broken_ebuilds.add(myebuild) - return 1 + if not (mf.allow_missing and + os.path.basename(myebuild) not in mf.fhashdict["EBUILD"]): + out = portage.output.EOutput() + out.eerror(_("Missing digest for '%s'") % (myebuild,)) + _doebuild_broken_ebuilds.add(myebuild) + return 1 except FileNotFound: out = portage.output.EOutput() out.eerror(_("A file listed in the Manifest " @@ -536,7 +615,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, if mf.getFullname() in _doebuild_broken_manifests: return 1 - if mf is not _doebuild_manifest_cache: + if mf is not _doebuild_manifest_cache and not mf.allow_missing: # Make sure that all of the ebuilds are # actually listed in the Manifest. @@ -553,8 +632,8 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, _doebuild_broken_manifests.add(manifest_path) return 1 - # Only cache it if the above stray files test succeeds. - _doebuild_manifest_cache = mf + # We cache it only after all above checks succeed. + _doebuild_manifest_cache = mf logfile=None builddir_lock = None @@ -594,7 +673,6 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, if builddir_lock is not None: builddir_lock.unlock() - restrict = set(mysettings.get('PORTAGE_RESTRICT', '').split()) # get possible slot information from the deps file if mydo == "depend": writemsg("!!! DEBUG: dbkey: %s\n" % str(dbkey), 2) @@ -654,6 +732,13 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, if rval != os.EX_OK: return rval + else: + # FEATURES=noauto only makes sense for porttree, and we don't want + # it to trigger redundant sourcing of the ebuild for API consumers + # that are using binary packages + if "noauto" in mysettings.features: + mysettings.features.discard("noauto") + # The info phase is special because it uses mkdtemp so and # user (not necessarily in the portage group) can run it. if mydo not in ('info',) and \ @@ -666,6 +751,73 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, return unmerge(mysettings["CATEGORY"], mysettings["PF"], myroot, mysettings, vartree=vartree) + phases_to_run = set() + if "noauto" in mysettings.features or \ + mydo not in actionmap_deps: + phases_to_run.add(mydo) + else: + phase_stack = [mydo] + while phase_stack: + x = phase_stack.pop() + if x in phases_to_run: + continue + phases_to_run.add(x) + phase_stack.extend(actionmap_deps.get(x, [])) + del phase_stack + + alist = set(mysettings.configdict["pkg"].get("A", "").split()) + + unpacked = False + if tree != "porttree": + pass + elif "unpack" not in phases_to_run: + unpacked = os.path.exists(os.path.join( + mysettings["PORTAGE_BUILDDIR"], ".unpacked")) + else: + try: + workdir_st = os.stat(mysettings["WORKDIR"]) + except OSError: + pass + else: + newstuff = False + if not os.path.exists(os.path.join( + mysettings["PORTAGE_BUILDDIR"], ".unpacked")): + writemsg_stdout(_( + ">>> Not marked as unpacked; recreating WORKDIR...\n")) + newstuff = True + else: + for x in alist: + writemsg_stdout(">>> Checking %s's mtime...\n" % x) + try: + x_st = os.stat(os.path.join( + mysettings["DISTDIR"], x)) + except OSError: + # file not fetched yet + x_st = None + + if x_st is None or x_st.st_mtime > workdir_st.st_mtime: + writemsg_stdout(_(">>> Timestamp of " + "%s has changed; recreating WORKDIR...\n") % x) + newstuff = True + break + + if newstuff: + if builddir_lock is None and \ + 'PORTAGE_BUILDIR_LOCKED' not in mysettings: + builddir_lock = EbuildBuildDir( + scheduler=PollScheduler().sched_iface, + settings=mysettings) + builddir_lock.lock() + try: + _spawn_phase("clean", mysettings) + finally: + if builddir_lock is not None: + builddir_lock.unlock() + builddir_lock = None + else: + writemsg_stdout(_(">>> WORKDIR is up-to-date, keeping...\n")) + unpacked = True + # Build directory creation isn't required for any of these. # In the fetch phase, the directory is needed only for RESTRICT=fetch # in order to satisfy the sane $PWD requirement (from bug #239560) @@ -739,10 +891,9 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, # Only try and fetch the files if we are going to need them ... # otherwise, if user has FEATURES=noauto and they run `ebuild clean # unpack compile install`, we will try and fetch 4 times :/ - need_distfiles = tree == "porttree" and \ + need_distfiles = tree == "porttree" and not unpacked and \ (mydo in ("fetch", "unpack") or \ mydo not in ("digest", "manifest") and "noauto" not in features) - alist = set(mysettings.configdict["pkg"].get("A", "").split()) if need_distfiles: src_uri, = mydbapi.aux_get(mysettings.mycpv, @@ -783,10 +934,14 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, return 0 return 1 - if mydo == "fetch": + if need_distfiles: # Files are already checked inside fetch(), # so do not check them again. checkme = [] + elif unpacked: + # The unpack phase is marked as complete, so it + # would be wasteful to check distfiles again. + checkme = [] else: checkme = alist @@ -845,7 +1000,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, # this phase. This can raise PermissionDenied if # the current user doesn't have write access to $PKGDIR. if hasattr(portage, 'db'): - bintree = portage.db[mysettings["ROOT"]]["bintree"] + bintree = portage.db[mysettings['EROOT']]['bintree'] mysettings["PORTAGE_BINPKG_TMPFILE"] = \ bintree.getname(mysettings.mycpv) + \ ".%s" % (os.getpid(),) @@ -866,6 +1021,13 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, if mydo == "package" and bintree is not None: bintree.inject(mysettings.mycpv, filename=mysettings["PORTAGE_BINPKG_TMPFILE"]) + else: + if "PORTAGE_BINPKG_TMPFILE" in mysettings: + try: + os.unlink(mysettings["PORTAGE_BINPKG_TMPFILE"]) + except OSError: + pass + elif mydo=="qmerge": # check to ensure install was run. this *only* pops up when users # forget it and are using ebuild @@ -877,6 +1039,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, # qmerge is a special phase that implies noclean. if "noclean" not in mysettings.features: mysettings.features.add("noclean") + _handle_self_update(mysettings, vartree.dbapi) #qmerge is specifically not supposed to do a runtime dep check retval = merge( mysettings["CATEGORY"], mysettings["PF"], mysettings["D"], @@ -893,6 +1056,7 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0, # so that it's only called once. elog_process(mysettings.mycpv, mysettings) if retval == os.EX_OK: + _handle_self_update(mysettings, vartree.dbapi) retval = merge(mysettings["CATEGORY"], mysettings["PF"], mysettings["D"], os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info"), myroot, mysettings, @@ -944,10 +1108,31 @@ def _check_temp_dir(settings): # as some people use a separate PORTAGE_TMPDIR mount # we prefer that as the checks below would otherwise be pointless # for those people. - if os.path.exists(os.path.join(settings["PORTAGE_TMPDIR"], "portage")): - checkdir = os.path.join(settings["PORTAGE_TMPDIR"], "portage") + tmpdir = os.path.realpath(settings["PORTAGE_TMPDIR"]) + if os.path.exists(os.path.join(tmpdir, "portage")): + checkdir = os.path.realpath(os.path.join(tmpdir, "portage")) + if ("sandbox" in settings.features or + "usersandox" in settings.features) and \ + not checkdir.startswith(tmpdir + os.sep): + msg = _("The 'portage' subdirectory of the directory " + "referenced by the PORTAGE_TMPDIR variable appears to be " + "a symlink. In order to avoid sandbox violations (see bug " + "#378379), you must adjust PORTAGE_TMPDIR instead of using " + "the symlink located at '%s'. A suitable PORTAGE_TMPDIR " + "setting would be '%s'.") % \ + (os.path.join(tmpdir, "portage"), checkdir) + lines = [] + lines.append("") + lines.append("") + lines.extend(wrap(msg, 72)) + lines.append("") + for line in lines: + if line: + line = "!!! %s" % (line,) + writemsg("%s\n" % (line,), noiselevel=-1) + return 1 else: - checkdir = settings["PORTAGE_TMPDIR"] + checkdir = tmpdir if not os.access(checkdir, os.W_OK): writemsg(_("%s is not writable.\n" @@ -955,8 +1140,7 @@ def _check_temp_dir(settings): noiselevel=-1) return 1 - else: - fd = tempfile.NamedTemporaryFile(prefix="exectest-", dir=checkdir) + with tempfile.NamedTemporaryFile(prefix="exectest-", dir=checkdir) as fd: os.chmod(fd.name, 0o755) if not os.access(fd.name, os.X_OK): writemsg(_("Can not execute files in %s\n" @@ -1085,7 +1269,8 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi): all_keys.add("SRC_URI") all_keys = tuple(all_keys) metadata = dict(zip(all_keys, - mydbapi.aux_get(mysettings.mycpv, all_keys))) + mydbapi.aux_get(mysettings.mycpv, all_keys, + myrepo=mysettings.get("PORTAGE_REPO_NAME")))) class FakeTree(object): def __init__(self, mydb): @@ -1173,7 +1358,7 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero @param keywords: Extra options encoded as a dict, to be passed to spawn @type keywords: Dictionary @rtype: Integer - @returns: + @return: 1. The return code of the spawned process. """ @@ -1201,7 +1386,8 @@ def spawn(mystring, mysettings, debug=0, free=0, droppriv=0, sesandbox=0, fakero # fake ownership/permissions will have to be converted to real # permissions in the merge phase. fakeroot = fakeroot and uid != 0 and portage.process.fakeroot_capable - if droppriv and not uid and portage_gid and portage_uid: + if droppriv and uid == 0 and portage_gid and portage_uid and \ + hasattr(os, "setgroups"): keywords.update({"uid":portage_uid,"gid":portage_gid, "groups":userpriv_groups,"umask":0o02}) if not free: @@ -1277,6 +1463,17 @@ def spawnebuild(mydo, actionmap, mysettings, debug, alwaysdep=0, if mydo == "pretend" and not eapi_has_pkg_pretend(eapi): return os.EX_OK + if not (mydo == "install" and "noauto" in mysettings.features): + check_file = os.path.join( + mysettings["PORTAGE_BUILDDIR"], ".%sed" % mydo.rstrip('e')) + if os.path.exists(check_file): + writemsg_stdout(_(">>> It appears that " + "'%(action)s' has already executed for '%(pkg)s'; skipping.\n") % + {"action":mydo, "pkg":mysettings["PF"]}) + writemsg_stdout(_(">>> Remove '%(file)s' to force %(action)s.\n") % + {"file":check_file, "action":mydo}) + return os.EX_OK + return _spawn_phase(mydo, mysettings, actionmap=actionmap, logfile=logfile, fd_pipes=fd_pipes, returnpid=returnpid) @@ -1285,13 +1482,14 @@ _post_phase_cmds = { "install" : [ "install_qa_check", - "install_symlink_html_docs"], + "install_symlink_html_docs", + "install_hooks"], "preinst" : [ "preinst_sfperms", "preinst_selinux_labels", "preinst_suid_scan", - "preinst_mask"] + ] } def _post_phase_userpriv_perms(mysettings): @@ -1320,7 +1518,9 @@ def _check_build_log(mysettings, out=None): except EnvironmentError: return + f_real = None if logfile.endswith('.gz'): + f_real = f f = gzip.GzipFile(filename='', mode='rb', fileobj=f) am_maintainer_mode = [] @@ -1425,19 +1625,32 @@ def _check_build_log(mysettings, out=None): msg.extend("\t" + line for line in make_jobserver) _eqawarn(msg) + f.close() + if f_real is not None: + f_real.close() + def _post_src_install_chost_fix(settings): """ It's possible that the ebuild has changed the CHOST variable, so revert it to the initial - setting. + setting. Also, revert IUSE in case it's corrupted + due to local environment settings like in bug #386829. """ - if settings.get('CATEGORY') == 'virtual': - return - chost = settings.get('CHOST') - if chost: - write_atomic(os.path.join(settings['PORTAGE_BUILDDIR'], - 'build-info', 'CHOST'), chost + '\n') + build_info_dir = os.path.join(settings['PORTAGE_BUILDDIR'], 'build-info') + + for k in ('IUSE',): + v = settings.get(k) + if v is not None: + write_atomic(os.path.join(build_info_dir, k), v + '\n') + + # The following variables are irrelevant for virtual packages. + if settings.get('CATEGORY') != 'virtual': + + for k in ('CHOST',): + v = settings.get(k) + if v is not None: + write_atomic(os.path.join(build_info_dir, k), v + '\n') _vdb_use_conditional_keys = ('DEPEND', 'LICENSE', 'PDEPEND', 'PROPERTIES', 'PROVIDE', 'RDEPEND', 'RESTRICT',) @@ -1481,6 +1694,7 @@ def _post_src_install_uid_fix(mysettings, out): _preinst_bsdflags(mysettings) destdir = mysettings["D"] + ed_len = len(mysettings["ED"]) unicode_errors = [] while True: @@ -1499,12 +1713,12 @@ def _post_src_install_uid_fix(mysettings, out): new_parent = _unicode_decode(parent, encoding=_encodings['merge'], errors='replace') new_parent = _unicode_encode(new_parent, - encoding=_encodings['merge'], errors='backslashreplace') + encoding='ascii', errors='backslashreplace') new_parent = _unicode_decode(new_parent, encoding=_encodings['merge'], errors='replace') os.rename(parent, new_parent) unicode_error = True - unicode_errors.append(new_parent[len(destdir):]) + unicode_errors.append(new_parent[ed_len:]) break for fname in chain(dirs, files): @@ -1517,13 +1731,13 @@ def _post_src_install_uid_fix(mysettings, out): new_fname = _unicode_decode(fname, encoding=_encodings['merge'], errors='replace') new_fname = _unicode_encode(new_fname, - encoding=_encodings['merge'], errors='backslashreplace') + encoding='ascii', errors='backslashreplace') new_fname = _unicode_decode(new_fname, encoding=_encodings['merge'], errors='replace') new_fpath = os.path.join(parent, new_fname) os.rename(fpath, new_fpath) unicode_error = True - unicode_errors.append(new_fpath[len(destdir):]) + unicode_errors.append(new_fpath[ed_len:]) fname = new_fname fpath = new_fpath else: @@ -1597,20 +1811,24 @@ def _post_src_install_uid_fix(mysettings, out): if unicode_errors: for l in _merge_unicode_error(unicode_errors): - eerror(l, phase='install', key=mysettings.mycpv, out=out) + eqawarn(l, phase='install', key=mysettings.mycpv, out=out) build_info_dir = os.path.join(mysettings['PORTAGE_BUILDDIR'], 'build-info') - io.open(_unicode_encode(os.path.join(build_info_dir, + f = io.open(_unicode_encode(os.path.join(build_info_dir, 'SIZE'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], - errors='strict').write(_unicode_decode(str(size) + '\n')) + errors='strict') + f.write(_unicode_decode(str(size) + '\n')) + f.close() - io.open(_unicode_encode(os.path.join(build_info_dir, + f = io.open(_unicode_encode(os.path.join(build_info_dir, 'BUILD_TIME'), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], - errors='strict').write(_unicode_decode("%.0f\n" % (time.time(),))) + errors='strict') + f.write(_unicode_decode("%.0f\n" % (time.time(),))) + f.close() use = frozenset(mysettings['PORTAGE_USE'].split()) for k in _vdb_use_conditional_keys: @@ -1636,10 +1854,12 @@ def _post_src_install_uid_fix(mysettings, out): except OSError: pass continue - io.open(_unicode_encode(os.path.join(build_info_dir, + f = io.open(_unicode_encode(os.path.join(build_info_dir, k), encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], - errors='strict').write(_unicode_decode(v + '\n')) + errors='strict') + f.write(_unicode_decode(v + '\n')) + f.close() _reapply_bsdflags_to_image(mysettings) @@ -1664,15 +1884,46 @@ def _post_src_install_soname_symlinks(mysettings, out): needed_filename = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info", "NEEDED.ELF.2") + f = None try: - lines = io.open(_unicode_encode(needed_filename, + f = io.open(_unicode_encode(needed_filename, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], - errors='replace').readlines() + errors='replace') + lines = f.readlines() except IOError as e: if e.errno not in (errno.ENOENT, errno.ESTALE): raise return + finally: + if f is not None: + f.close() + + qa_no_symlink = "" + f = None + try: + f = io.open(_unicode_encode(os.path.join( + mysettings["PORTAGE_BUILDDIR"], + "build-info", "QA_SONAME_NO_SYMLINK"), + encoding=_encodings['fs'], errors='strict'), + mode='r', encoding=_encodings['repo.content'], + errors='replace') + qa_no_symlink = f.read() + except IOError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + raise + finally: + if f is not None: + f.close() + + qa_no_symlink = qa_no_symlink.split() + if qa_no_symlink: + if len(qa_no_symlink) > 1: + qa_no_symlink = "|".join("(%s)" % x for x in qa_no_symlink) + qa_no_symlink = "^(%s)$" % qa_no_symlink + else: + qa_no_symlink = "^%s$" % qa_no_symlink[0] + qa_no_symlink = re.compile(qa_no_symlink) libpaths = set(portage.util.getlibpaths( mysettings["ROOT"], env=mysettings)) @@ -1730,6 +1981,8 @@ def _post_src_install_soname_symlinks(mysettings, out): continue if not is_libdir(os.path.dirname(obj)): continue + if qa_no_symlink and qa_no_symlink.match(obj.strip(os.sep)) is not None: + continue obj_file_path = os.path.join(image_dir, obj.lstrip(os.sep)) sym_file_path = os.path.join(os.path.dirname(obj_file_path), soname) @@ -1746,8 +1999,7 @@ def _post_src_install_soname_symlinks(mysettings, out): if not missing_symlinks: return - qa_msg = ["QA Notice: Missing soname symlink(s) " + \ - "will be automatically created:"] + qa_msg = ["QA Notice: Missing soname symlink(s):"] qa_msg.append("") qa_msg.extend("\t%s -> %s" % (os.path.join( os.path.dirname(obj).lstrip(os.sep), soname), @@ -1757,20 +2009,11 @@ def _post_src_install_soname_symlinks(mysettings, out): for line in qa_msg: eqawarn(line, key=mysettings.mycpv, out=out) - _preinst_bsdflags(mysettings) - for obj, soname in missing_symlinks: - obj_file_path = os.path.join(image_dir, obj.lstrip(os.sep)) - sym_file_path = os.path.join(os.path.dirname(obj_file_path), soname) - os.symlink(os.path.basename(obj_file_path), sym_file_path) - _reapply_bsdflags_to_image(mysettings) - def _merge_unicode_error(errors): lines = [] - msg = _("This package installs one or more file names containing " - "characters that do not match your current locale " - "settings. The current setting for filesystem encoding is '%s'.") \ - % _encodings['merge'] + msg = _("QA Notice: This package installs one or more file names " + "containing characters that are not encoded with the UTF-8 encoding.") lines.extend(wrap(msg, 72)) lines.append("") @@ -1778,14 +2021,55 @@ def _merge_unicode_error(errors): lines.extend("\t" + x for x in errors) lines.append("") - if _encodings['merge'].lower().replace('_', '').replace('-', '') != 'utf8': - msg = _("For best results, UTF-8 encoding is recommended. See " - "the Gentoo Linux Localization Guide for instructions " - "about how to configure your locale for UTF-8 encoding:") - lines.extend(wrap(msg, 72)) - lines.append("") - lines.append("\t" + \ - "http://www.gentoo.org/doc/en/guide-localization.xml") - lines.append("") - return lines + +def _prepare_self_update(settings): + """ + Call this when portage is updating itself, in order to create + temporary copies of PORTAGE_BIN_PATH and PORTAGE_PYM_PATH, since + the new versions may be incompatible. An atexit hook will + automatically clean up the temporary copies. + """ + + # sanity check: ensure that that this routine only runs once + if portage._bin_path != portage.const.PORTAGE_BIN_PATH: + return + + # Load lazily referenced portage submodules into memory, + # so imports won't fail during portage upgrade/downgrade. + _preload_elog_modules(settings) + portage.proxy.lazyimport._preload_portage_submodules() + + # Make the temp directory inside $PORTAGE_TMPDIR/portage, since + # it's common for /tmp and /var/tmp to be mounted with the + # "noexec" option (see bug #346899). + build_prefix = os.path.join(settings["PORTAGE_TMPDIR"], "portage") + portage.util.ensure_dirs(build_prefix) + base_path_tmp = tempfile.mkdtemp( + "", "._portage_reinstall_.", build_prefix) + portage.process.atexit_register(shutil.rmtree, base_path_tmp) + + orig_bin_path = portage._bin_path + portage._bin_path = os.path.join(base_path_tmp, "bin") + shutil.copytree(orig_bin_path, portage._bin_path, symlinks=True) + + orig_pym_path = portage._pym_path + portage._pym_path = os.path.join(base_path_tmp, "pym") + shutil.copytree(orig_pym_path, portage._pym_path, symlinks=True) + + for dir_path in (base_path_tmp, portage._bin_path, portage._pym_path): + os.chmod(dir_path, 0o755) + +def _handle_self_update(settings, vardb): + cpv = settings.mycpv + if settings["ROOT"] == "/" and \ + portage.dep.match_from_list( + portage.const.PORTAGE_PACKAGE_ATOM, [cpv]): + inherited = frozenset(settings.get('INHERITED', '').split()) + if not vardb.cpv_exists(cpv) or \ + '9999' in cpv or \ + 'git' in inherited or \ + 'git-2' in inherited: + _prepare_self_update(settings) + return True + return False diff --git a/portage_with_autodep/pym/portage/package/ebuild/doebuild.pyo b/portage_with_autodep/pym/portage/package/ebuild/doebuild.pyo Binary files differnew file mode 100644 index 0000000..a6ebb1d --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/doebuild.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/fetch.py b/portage_with_autodep/pym/portage/package/ebuild/fetch.py index 5cbbf87..b795b28 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/fetch.py +++ b/portage_with_autodep/pym/portage/package/ebuild/fetch.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import print_function @@ -10,7 +10,6 @@ import io import logging import random import re -import shutil import stat import sys import tempfile @@ -24,7 +23,7 @@ portage.proxy.lazyimport.lazyimport(globals(), 'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs', ) -from portage import OrderedDict, os, selinux, _encodings, \ +from portage import OrderedDict, os, selinux, shutil, _encodings, \ _shell_quote, _unicode_encode from portage.checksum import hashfunc_map, perform_md5, verify_all from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \ @@ -34,7 +33,6 @@ from portage.exception import FileNotFound, OperationNotPermitted, \ PortageException, TryAgain from portage.localization import _ from portage.locks import lockfile, unlockfile -from portage.manifest import Manifest from portage.output import colorize, EOutput from portage.util import apply_recursive_permissions, \ apply_secpass_permissions, ensure_dirs, grabdict, shlex_split, \ @@ -48,6 +46,9 @@ _userpriv_spawn_kwargs = ( ("umask", 0o02), ) +def _hide_url_passwd(url): + return re.sub(r'//(.+):.+@(.+)', r'//\1:*password*@\2', url) + def _spawn_fetch(settings, args, **kwargs): """ Spawn a process with appropriate settings for fetching, including @@ -68,7 +69,8 @@ def _spawn_fetch(settings, args, **kwargs): } if "userfetch" in settings.features and \ - os.getuid() == 0 and portage_gid and portage_uid: + os.getuid() == 0 and portage_gid and portage_uid and \ + hasattr(os, "setgroups"): kwargs.update(_userpriv_spawn_kwargs) spawn_func = spawn @@ -356,7 +358,8 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, allow_missing_digests = True pkgdir = mysettings.get("O") if digests is None and not (pkgdir is None or skip_manifest): - mydigests = Manifest( + mydigests = mysettings.repositories.get_repo_for_location( + os.path.dirname(os.path.dirname(pkgdir))).load_manifest( pkgdir, mysettings["DISTDIR"]).getTypeDigests("DIST") elif digests is None or skip_manifest: # no digests because fetch was not called for a specific package @@ -612,18 +615,6 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, elif userfetch: has_space = False - if not has_space: - writemsg(_("!!! Insufficient space to store %s in %s\n") % \ - (myfile, mysettings["DISTDIR"]), noiselevel=-1) - - if has_space_superuser: - writemsg(_("!!! Insufficient privileges to use " - "remaining space.\n"), noiselevel=-1) - if userfetch: - writemsg(_("!!! You may set FEATURES=\"-userfetch\"" - " in /etc/make.conf in order to fetch with\n" - "!!! superuser privileges.\n"), noiselevel=-1) - if distdir_writable and use_locks: lock_kwargs = {} @@ -646,7 +637,10 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, match, mystat = _check_distfile( myfile_path, pruned_digests, eout) if match: - if distdir_writable: + # Skip permission adjustment for symlinks, since we don't + # want to modify anything outside of the primary DISTDIR, + # and symlinks typically point to PORTAGE_RO_DISTDIRS. + if distdir_writable and not os.path.islink(myfile_path): try: apply_secpass_permissions(myfile_path, gid=portage_gid, mode=0o664, mask=0o2, @@ -727,6 +721,20 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, os.symlink(readonly_file, myfile_path) continue + # this message is shown only after we know that + # the file is not already fetched + if not has_space: + writemsg(_("!!! Insufficient space to store %s in %s\n") % \ + (myfile, mysettings["DISTDIR"]), noiselevel=-1) + + if has_space_superuser: + writemsg(_("!!! Insufficient privileges to use " + "remaining space.\n"), noiselevel=-1) + if userfetch: + writemsg(_("!!! You may set FEATURES=\"-userfetch\"" + " in /etc/make.conf in order to fetch with\n" + "!!! superuser privileges.\n"), noiselevel=-1) + if fsmirrors and not os.path.exists(myfile_path) and has_space: for mydir in fsmirrors: mirror_file = os.path.join(mydir, myfile) @@ -746,14 +754,18 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, raise del e else: - try: - apply_secpass_permissions( - myfile_path, gid=portage_gid, mode=0o664, mask=0o2, - stat_cached=mystat) - except PortageException as e: - if not os.access(myfile_path, os.R_OK): - writemsg(_("!!! Failed to adjust permissions:" - " %s\n") % str(e), noiselevel=-1) + # Skip permission adjustment for symlinks, since we don't + # want to modify anything outside of the primary DISTDIR, + # and symlinks typically point to PORTAGE_RO_DISTDIRS. + if not os.path.islink(myfile_path): + try: + apply_secpass_permissions(myfile_path, + gid=portage_gid, mode=0o664, mask=0o2, + stat_cached=mystat) + except PortageException as e: + if not os.access(myfile_path, os.R_OK): + writemsg(_("!!! Failed to adjust permissions:" + " %s\n") % (e,), noiselevel=-1) # If the file is empty then it's obviously invalid. Remove # the empty file and try to download if possible. @@ -940,7 +952,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locfetch=fetchcommand command_var = fetchcommand_var writemsg_stdout(_(">>> Downloading '%s'\n") % \ - re.sub(r'//(.+):.+@(.+)/',r'//\1:*password*@\2/', loc)) + _hide_url_passwd(loc)) variables = { "DISTDIR": mysettings["DISTDIR"], "URI": loc, @@ -1019,18 +1031,19 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, # Fetch failed... Try the next one... Kill 404 files though. if (mystat[stat.ST_SIZE]<100000) and (len(myfile)>4) and not ((myfile[-5:]==".html") or (myfile[-4:]==".htm")): html404=re.compile("<title>.*(not found|404).*</title>",re.I|re.M) - if html404.search(io.open( + with io.open( _unicode_encode(myfile_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['content'], errors='replace' - ).read()): - try: - os.unlink(mysettings["DISTDIR"]+"/"+myfile) - writemsg(_(">>> Deleting invalid distfile. (Improper 404 redirect from server.)\n")) - fetched = 0 - continue - except (IOError, OSError): - pass + ) as f: + if html404.search(f.read()): + try: + os.unlink(mysettings["DISTDIR"]+"/"+myfile) + writemsg(_(">>> Deleting invalid distfile. (Improper 404 redirect from server.)\n")) + fetched = 0 + continue + except (IOError, OSError): + pass fetched = 1 continue if True: @@ -1040,7 +1053,6 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, # from another mirror... verified_ok,reason = verify_all(mysettings["DISTDIR"]+"/"+myfile, mydigests[myfile]) if not verified_ok: - print(reason) writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile, noiselevel=-1) writemsg(_("!!! Reason: %s\n") % reason[0], diff --git a/portage_with_autodep/pym/portage/package/ebuild/fetch.pyo b/portage_with_autodep/pym/portage/package/ebuild/fetch.pyo Binary files differnew file mode 100644 index 0000000..3bd81df --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/fetch.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.py b/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.py index f2af638..8a88c2f 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.py +++ b/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.py @@ -83,7 +83,13 @@ def getmaskingreason(mycpv, metadata=None, settings=None, pmasklists = [] for profile in locations: pmask_filename = os.path.join(profile, "package.mask") - pmasklists.append((pmask_filename, grablines(pmask_filename, recursive=1))) + node = None + for l, recursive_filename in grablines(pmask_filename, + recursive=1, remember_source_file=True): + if node is None or node[0] != recursive_filename: + node = (recursive_filename, []) + pmasklists.append(node) + node[1].append(l) pmaskdict = settings._mask_manager._pmaskdict if mycp in pmaskdict: diff --git a/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.pyo b/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.pyo Binary files differnew file mode 100644 index 0000000..1614244 --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/getmaskingreason.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.py b/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.py index 4c65fcc..9bf605d 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.py +++ b/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['getmaskingstatus'] @@ -7,11 +7,9 @@ import sys import portage from portage import eapi_is_supported, _eapi_is_deprecated -from portage.dep import match_from_list, _slot_separator, _repo_separator from portage.localization import _ from portage.package.ebuild.config import config -from portage.versions import catpkgsplit, cpv_getkey -from _emerge.Package import Package +from portage.versions import catpkgsplit, _pkg_str if sys.hexversion >= 0x3000000: basestring = str @@ -53,9 +51,6 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None): metadata = pkg.metadata installed = pkg.installed - mysplit = catpkgsplit(mycpv) - if not mysplit: - raise ValueError(_("invalid CPV: %s") % mycpv) if metadata is None: db_keys = list(portdb._aux_cache_keys) try: @@ -70,11 +65,14 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None): else: metadata["USE"] = "" - rValue = [] + if not hasattr(mycpv, 'slot'): + try: + mycpv = _pkg_str(mycpv, slot=metadata['SLOT'], + repo=metadata.get('repository')) + except portage.exception.InvalidData: + raise ValueError(_("invalid CPV: %s") % mycpv) - # profile checking - if settings._getProfileMaskAtom(mycpv, metadata): - rValue.append(_MaskReason("profile", "profile")) + rValue = [] # package.mask checking if settings._getMaskAtom(mycpv, metadata): @@ -85,8 +83,6 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None): mygroups = settings._getKeywords(mycpv, metadata) licenses = metadata["LICENSE"] properties = metadata["PROPERTIES"] - if eapi.startswith("-"): - eapi = eapi[1:] if not eapi_is_supported(eapi): return [_MaskReason("EAPI", "EAPI %s" % eapi)] elif _eapi_is_deprecated(eapi) and not installed: diff --git a/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.pyo b/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.pyo Binary files differnew file mode 100644 index 0000000..9cf1d9d --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/getmaskingstatus.pyo diff --git a/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.py b/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.py index 616dc2e..b8fbdc5 100644 --- a/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.py +++ b/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.py @@ -5,11 +5,11 @@ __all__ = ['prepare_build_dirs'] import errno import gzip -import shutil import stat import time -from portage import os, _encodings, _unicode_encode, _unicode_decode +import portage +from portage import os, shutil, _encodings, _unicode_encode, _unicode_decode from portage.data import portage_gid, portage_uid, secpass from portage.exception import DirectoryNotFound, FileNotFound, \ OperationNotPermitted, PermissionDenied, PortageException @@ -118,11 +118,13 @@ def _adjust_perms_msg(settings, msg): background = settings.get("PORTAGE_BACKGROUND") == "1" log_path = settings.get("PORTAGE_LOG_FILE") log_file = None + log_file_real = None if background and log_path is not None: try: log_file = open(_unicode_encode(log_path, encoding=_encodings['fs'], errors='strict'), mode='ab') + log_file_real = log_file except IOError: def write(msg): pass @@ -139,6 +141,8 @@ def _adjust_perms_msg(settings, msg): finally: if log_file is not None: log_file.close() + if log_file_real is not log_file: + log_file_real.close() def _prepare_features_dirs(mysettings): @@ -311,7 +315,7 @@ def _prepare_workdir(mysettings): logdir = normalize_path(mysettings["PORT_LOGDIR"]) logid_path = os.path.join(mysettings["PORTAGE_BUILDDIR"], ".logid") if not os.path.exists(logid_path): - open(_unicode_encode(logid_path), 'w') + open(_unicode_encode(logid_path), 'w').close() logid_time = _unicode_decode(time.strftime("%Y%m%d-%H%M%S", time.gmtime(os.stat(logid_path).st_mtime)), encoding=_encodings['content'], errors='replace') @@ -342,13 +346,31 @@ def _prepare_workdir(mysettings): writemsg(_unicode_decode("!!! %s: %s\n") % (_("Permission Denied"), log_subdir), noiselevel=-1) + tmpdir_log_path = os.path.join( + mysettings["T"], "build.log%s" % compress_log_ext) if not logdir_subdir_ok: # NOTE: When sesandbox is enabled, the local SELinux security policies # may not allow output to be piped out of the sesandbox domain. The # current policy will allow it to work when a pty is available, but # not through a normal pipe. See bug #162404. - mysettings["PORTAGE_LOG_FILE"] = os.path.join( - mysettings["T"], "build.log%s" % compress_log_ext) + mysettings["PORTAGE_LOG_FILE"] = tmpdir_log_path + else: + # Create a symlink from tmpdir_log_path to PORTAGE_LOG_FILE, as + # requested in bug #412865. + make_new_symlink = False + try: + target = os.readlink(tmpdir_log_path) + except OSError: + make_new_symlink = True + else: + if target != mysettings["PORTAGE_LOG_FILE"]: + make_new_symlink = True + if make_new_symlink: + try: + os.unlink(tmpdir_log_path) + except OSError: + pass + os.symlink(mysettings["PORTAGE_LOG_FILE"], tmpdir_log_path) def _ensure_log_subdirs(logdir, subdir): """ @@ -358,13 +380,27 @@ def _ensure_log_subdirs(logdir, subdir): and subdir are assumed to be normalized absolute paths. """ st = os.stat(logdir) + uid = -1 gid = st.st_gid grp_mode = 0o2070 & st.st_mode + # If logdir is writable by the portage group but its uid + # is not portage_uid, then set the uid to portage_uid if + # we have privileges to do so, for compatibility with our + # default logrotate config (see bug 378451). With the + # "su portage portage" directive and logrotate-3.8.0, + # logrotate's chown call during the compression phase will + # only succeed if the log file's uid is portage_uid. + if grp_mode and gid == portage_gid and \ + portage.data.secpass >= 2: + uid = portage_uid + if st.st_uid != portage_uid: + ensure_dirs(logdir, uid=uid) + logdir_split_len = len(logdir.split(os.sep)) subdir_split = subdir.split(os.sep)[logdir_split_len:] subdir_split.reverse() current = logdir while subdir_split: current = os.path.join(current, subdir_split.pop()) - ensure_dirs(current, gid=gid, mode=grp_mode, mask=0) + ensure_dirs(current, uid=uid, gid=gid, mode=grp_mode, mask=0) diff --git a/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.pyo b/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.pyo Binary files differnew file mode 100644 index 0000000..2dcfaea --- /dev/null +++ b/portage_with_autodep/pym/portage/package/ebuild/prepare_build_dirs.pyo diff --git a/portage_with_autodep/pym/portage/process.py b/portage_with_autodep/pym/portage/process.py index 6866a2f..d7d1037 100644 --- a/portage_with_autodep/pym/portage/process.py +++ b/portage_with_autodep/pym/portage/process.py @@ -1,9 +1,11 @@ # portage.py -- core Portage functionality -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import atexit +import errno +import platform import signal import sys import traceback @@ -32,6 +34,18 @@ if os.path.isdir("/proc/%i/fd" % os.getpid()): def get_open_fds(): return (int(fd) for fd in os.listdir("/proc/%i/fd" % os.getpid()) \ if fd.isdigit()) + + if platform.python_implementation() == 'PyPy': + # EAGAIN observed with PyPy 1.8. + _get_open_fds = get_open_fds + def get_open_fds(): + try: + return _get_open_fds() + except OSError as e: + if e.errno != errno.EAGAIN: + raise + return range(max_fd_limit) + else: def get_open_fds(): return range(max_fd_limit) @@ -257,7 +271,7 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False, pid = os.fork() - if not pid: + if pid == 0: try: _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask, pre_exec) @@ -272,6 +286,9 @@ def spawn(mycommand, env={}, opt_name=None, fd_pipes=None, returnpid=False, sys.stderr.flush() os._exit(1) + if not isinstance(pid, int): + raise AssertionError("fork returned non-integer: %s" % (repr(pid),)) + # Add the pid to our local and the global pid lists. mypids.append(pid) spawned_pids.append(pid) @@ -350,13 +367,18 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask, @param pre_exec: A function to be called with no arguments just prior to the exec call. @type pre_exec: callable @rtype: None - @returns: Never returns (calls os.execve) + @return: Never returns (calls os.execve) """ # If the process we're creating hasn't been given a name # assign it the name of the executable. if not opt_name: - opt_name = os.path.basename(binary) + if binary is portage._python_interpreter: + # NOTE: PyPy 1.7 will die due to "libary path not found" if argv[0] + # does not contain the full path of the binary. + opt_name = binary + else: + opt_name = os.path.basename(binary) # Set up the command's argument list. myargs = [opt_name] @@ -391,8 +413,20 @@ def _exec(binary, mycommand, opt_name, fd_pipes, env, gid, groups, uid, umask, # And switch to the new process. os.execve(binary, myargs, env) -def _setup_pipes(fd_pipes): - """Setup pipes for a forked process.""" +def _setup_pipes(fd_pipes, close_fds=True): + """Setup pipes for a forked process. + + WARNING: When not followed by exec, the close_fds behavior + can trigger interference from destructors that close file + descriptors. This interference happens when the garbage + collector intermittently executes such destructors after their + corresponding file descriptors have been re-used, leading + to intermittent "[Errno 9] Bad file descriptor" exceptions in + forked processes. This problem has been observed with PyPy 1.8, + and also with CPython under some circumstances (as triggered + by xmpppy in bug #374335). In order to close a safe subset of + file descriptors, see portage.locks._close_fds(). + """ my_fds = {} # To protect from cases where direct assignment could # clobber needed fds ({1:2, 2:1}) we first dupe the fds @@ -402,14 +436,16 @@ def _setup_pipes(fd_pipes): # Then assign them to what they should be. for fd in my_fds: os.dup2(my_fds[fd], fd) - # Then close _all_ fds that haven't been explicitly - # requested to be kept open. - for fd in get_open_fds(): - if fd not in my_fds: - try: - os.close(fd) - except OSError: - pass + + if close_fds: + # Then close _all_ fds that haven't been explicitly + # requested to be kept open. + for fd in get_open_fds(): + if fd not in my_fds: + try: + os.close(fd) + except OSError: + pass def find_binary(binary): """ @@ -418,7 +454,7 @@ def find_binary(binary): @param binary: Name of the binary to find @type string @rtype: None or string - @returns: full path to binary or None if the binary could not be located. + @return: full path to binary or None if the binary could not be located. """ for path in os.environ.get("PATH", "").split(":"): filename = "%s/%s" % (path, binary) diff --git a/portage_with_autodep/pym/portage/process.pyo b/portage_with_autodep/pym/portage/process.pyo Binary files differnew file mode 100644 index 0000000..c53af30 --- /dev/null +++ b/portage_with_autodep/pym/portage/process.pyo diff --git a/portage_with_autodep/pym/portage/proxy/__init__.pyo b/portage_with_autodep/pym/portage/proxy/__init__.pyo Binary files differnew file mode 100644 index 0000000..b3d096b --- /dev/null +++ b/portage_with_autodep/pym/portage/proxy/__init__.pyo diff --git a/portage_with_autodep/pym/portage/proxy/lazyimport.pyo b/portage_with_autodep/pym/portage/proxy/lazyimport.pyo Binary files differnew file mode 100644 index 0000000..9da8089 --- /dev/null +++ b/portage_with_autodep/pym/portage/proxy/lazyimport.pyo diff --git a/portage_with_autodep/pym/portage/proxy/objectproxy.pyo b/portage_with_autodep/pym/portage/proxy/objectproxy.pyo Binary files differnew file mode 100644 index 0000000..f0919ff --- /dev/null +++ b/portage_with_autodep/pym/portage/proxy/objectproxy.pyo diff --git a/portage_with_autodep/pym/portage/repository/__init__.pyo b/portage_with_autodep/pym/portage/repository/__init__.pyo Binary files differnew file mode 100644 index 0000000..ab526c0 --- /dev/null +++ b/portage_with_autodep/pym/portage/repository/__init__.pyo diff --git a/portage_with_autodep/pym/portage/repository/config.py b/portage_with_autodep/pym/portage/repository/config.py index 9f0bb99..20f1919 100644 --- a/portage_with_autodep/pym/portage/repository/config.py +++ b/portage_with_autodep/pym/portage/repository/config.py @@ -1,21 +1,37 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import io import logging +import warnings +import sys import re try: - from configparser import SafeConfigParser, ParsingError + from configparser import ParsingError + if sys.hexversion >= 0x3020000: + from configparser import ConfigParser as SafeConfigParser + else: + from configparser import SafeConfigParser except ImportError: from ConfigParser import SafeConfigParser, ParsingError -from portage import os -from portage.const import USER_CONFIG_PATH, REPO_NAME_LOC +from portage import eclass_cache, os +from portage.const import (MANIFEST2_HASH_FUNCTIONS, MANIFEST2_REQUIRED_HASH, + REPO_NAME_LOC, USER_CONFIG_PATH) from portage.env.loaders import KeyValuePairFileLoader -from portage.util import normalize_path, writemsg, writemsg_level, shlex_split +from portage.util import (normalize_path, read_corresponding_eapi_file, shlex_split, + stack_lists, writemsg, writemsg_level) from portage.localization import _ +from portage import _unicode_decode from portage import _unicode_encode from portage import _encodings +from portage import manifest + +_valid_profile_formats = frozenset( + ['pms', 'portage-1', 'portage-2']) + +_portage1_profiles_allow_directories = frozenset( + ["portage-1-compat", "portage-1", 'portage-2']) _repo_name_sub_re = re.compile(r'[^\w-]') @@ -35,8 +51,13 @@ def _gen_valid_repo(name): class RepoConfig(object): """Stores config of one repository""" - __slots__ = ['aliases', 'eclass_overrides', 'eclass_locations', 'location', 'user_location', 'masters', 'main_repo', - 'missing_repo_name', 'name', 'priority', 'sync', 'format'] + __slots__ = ('aliases', 'allow_missing_manifest', 'allow_provide_virtual', + 'cache_formats', 'create_manifest', 'disable_manifest', 'eapi', + 'eclass_db', 'eclass_locations', 'eclass_overrides', 'format', 'location', + 'main_repo', 'manifest_hashes', 'masters', 'missing_repo_name', + 'name', 'portage1_profiles', 'portage1_profiles_compat', 'priority', + 'profile_formats', 'sign_commit', 'sign_manifest', 'sync', + 'thin_manifest', 'update_changelog', 'user_location') def __init__(self, name, repo_opts): """Build a RepoConfig with options in repo_opts @@ -51,11 +72,15 @@ class RepoConfig(object): if eclass_overrides is not None: eclass_overrides = tuple(eclass_overrides.split()) self.eclass_overrides = eclass_overrides - #Locations are computed later. + # Eclass databases and locations are computed later. + self.eclass_db = None self.eclass_locations = None - #Masters are only read from layout.conf. - self.masters = None + # Masters from repos.conf override layout.conf. + masters = repo_opts.get('masters') + if masters is not None: + masters = tuple(masters.split()) + self.masters = masters #The main-repo key makes only sense for the 'DEFAULT' section. self.main_repo = repo_opts.get('main-repo') @@ -87,59 +112,153 @@ class RepoConfig(object): location = None self.location = location + eapi = None missing = True if self.location is not None: - name, missing = self._read_repo_name(self.location) - # We must ensure that the name conforms to PMS 3.1.5 - # in order to avoid InvalidAtom exceptions when we - # use it to generate atoms. - name = _gen_valid_repo(name) - if not name: - # name only contains invalid characters - name = "x-" + os.path.basename(self.location) - name = _gen_valid_repo(name) - # If basename only contains whitespace then the - # end result is name = 'x-'. - + eapi = read_corresponding_eapi_file(os.path.join(self.location, REPO_NAME_LOC)) + name, missing = self._read_valid_repo_name(self.location) elif name == "DEFAULT": missing = False + + self.eapi = eapi self.name = name self.missing_repo_name = missing + # sign_commit is disabled by default, since it requires Git >=1.7.9, + # and key_id configured by `git config user.signingkey key_id` + self.sign_commit = False + self.sign_manifest = True + self.thin_manifest = False + self.allow_missing_manifest = False + self.allow_provide_virtual = False + self.create_manifest = True + self.disable_manifest = False + self.manifest_hashes = None + self.update_changelog = False + self.cache_formats = None + self.portage1_profiles = True + self.portage1_profiles_compat = False + + # Parse layout.conf. + if self.location: + layout_filename = os.path.join(self.location, "metadata", "layout.conf") + layout_data = parse_layout_conf(self.location, self.name)[0] + + # layout.conf masters may be overridden here if we have a masters + # setting from the user's repos.conf + if self.masters is None: + self.masters = layout_data['masters'] + + if layout_data['aliases']: + aliases = self.aliases + if aliases is None: + aliases = () + # repos.conf aliases come after layout.conf aliases, giving + # them the ability to do incremental overrides + self.aliases = layout_data['aliases'] + tuple(aliases) + + for value in ('allow-missing-manifest', + 'allow-provide-virtual', 'cache-formats', + 'create-manifest', 'disable-manifest', 'manifest-hashes', + 'profile-formats', + 'sign-commit', 'sign-manifest', 'thin-manifest', 'update-changelog'): + setattr(self, value.lower().replace("-", "_"), layout_data[value]) + + self.portage1_profiles = any(x in _portage1_profiles_allow_directories + for x in layout_data['profile-formats']) + self.portage1_profiles_compat = layout_data['profile-formats'] == ('portage-1-compat',) + + def iter_pregenerated_caches(self, auxdbkeys, readonly=True, force=False): + """ + Reads layout.conf cache-formats from left to right and yields cache + instances for each supported type that's found. If no cache-formats + are specified in layout.conf, 'pms' type is assumed if the + metadata/cache directory exists or force is True. + """ + formats = self.cache_formats + if not formats: + if not force: + return + formats = ('pms',) + + for fmt in formats: + name = None + if fmt == 'pms': + from portage.cache.metadata import database + name = 'metadata/cache' + elif fmt == 'md5-dict': + from portage.cache.flat_hash import md5_database as database + name = 'metadata/md5-cache' + + if name is not None: + yield database(self.location, name, + auxdbkeys, readonly=readonly) + + def get_pregenerated_cache(self, auxdbkeys, readonly=True, force=False): + """ + Returns the first cache instance yielded from + iter_pregenerated_caches(), or None if no cache is available or none + of the available formats are supported. + """ + return next(self.iter_pregenerated_caches( + auxdbkeys, readonly=readonly, force=force), None) + + def load_manifest(self, *args, **kwds): + kwds['thin'] = self.thin_manifest + kwds['allow_missing'] = self.allow_missing_manifest + kwds['allow_create'] = self.create_manifest + kwds['hashes'] = self.manifest_hashes + if self.disable_manifest: + kwds['from_scratch'] = True + return manifest.Manifest(*args, **kwds) def update(self, new_repo): """Update repository with options in another RepoConfig""" - if new_repo.aliases is not None: - self.aliases = new_repo.aliases - if new_repo.eclass_overrides is not None: - self.eclass_overrides = new_repo.eclass_overrides - if new_repo.masters is not None: - self.masters = new_repo.masters + + keys = set(self.__slots__) + keys.discard("missing_repo_name") + for k in keys: + v = getattr(new_repo, k, None) + if v is not None: + setattr(self, k, v) + if new_repo.name is not None: - self.name = new_repo.name self.missing_repo_name = new_repo.missing_repo_name - if new_repo.user_location is not None: - self.user_location = new_repo.user_location - if new_repo.location is not None: - self.location = new_repo.location - if new_repo.priority is not None: - self.priority = new_repo.priority - if new_repo.sync is not None: - self.sync = new_repo.sync - - def _read_repo_name(self, repo_path): + + @staticmethod + def _read_valid_repo_name(repo_path): + name, missing = RepoConfig._read_repo_name(repo_path) + # We must ensure that the name conforms to PMS 3.1.5 + # in order to avoid InvalidAtom exceptions when we + # use it to generate atoms. + name = _gen_valid_repo(name) + if not name: + # name only contains invalid characters + name = "x-" + os.path.basename(repo_path) + name = _gen_valid_repo(name) + # If basename only contains whitespace then the + # end result is name = 'x-'. + return name, missing + + @staticmethod + def _read_repo_name(repo_path): """ Read repo_name from repo_path. Returns repo_name, missing. """ repo_name_path = os.path.join(repo_path, REPO_NAME_LOC) + f = None try: - return io.open( + f = io.open( _unicode_encode(repo_name_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], - errors='replace').readline().strip(), False + errors='replace') + return f.readline().strip(), False except EnvironmentError: return "x-" + os.path.basename(repo_path), True + finally: + if f is not None: + f.close() def info_string(self): """ @@ -167,109 +286,154 @@ class RepoConfig(object): repo_msg.append("") return "\n".join(repo_msg) + def __repr__(self): + return "<portage.repository.config.RepoConfig(name='%s', location='%s')>" % (self.name, _unicode_decode(self.location)) + + def __str__(self): + d = {} + for k in self.__slots__: + d[k] = getattr(self, k, None) + return _unicode_decode("%s") % (d,) + + if sys.hexversion < 0x3000000: + + __unicode__ = __str__ + + def __str__(self): + return _unicode_encode(self.__unicode__()) + class RepoConfigLoader(object): """Loads and store config of several repositories, loaded from PORTDIR_OVERLAY or repos.conf""" - def __init__(self, paths, settings): - """Load config from files in paths""" - def parse(paths, prepos, ignored_map, ignored_location_map): - """Parse files in paths to load config""" - parser = SafeConfigParser() - try: - parser.read(paths) - except ParsingError as e: - writemsg(_("!!! Error while reading repo config file: %s\n") % e, noiselevel=-1) - prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults()) - for sname in parser.sections(): - optdict = {} - for oname in parser.options(sname): - optdict[oname] = parser.get(sname, oname) - - repo = RepoConfig(sname, optdict) - if repo.location and not os.path.exists(repo.location): - writemsg(_("!!! Invalid repos.conf entry '%s'" - " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1) - continue - if repo.name in prepos: - old_location = prepos[repo.name].location - if old_location is not None and repo.location is not None and old_location != repo.location: - ignored_map.setdefault(repo.name, []).append(old_location) - ignored_location_map[old_location] = repo.name - prepos[repo.name].update(repo) - else: + @staticmethod + def _add_repositories(portdir, portdir_overlay, prepos, ignored_map, ignored_location_map): + """Add overlays in PORTDIR_OVERLAY as repositories""" + overlays = [] + if portdir: + portdir = normalize_path(portdir) + overlays.append(portdir) + try: + port_ov = [normalize_path(i) for i in shlex_split(portdir_overlay)] + except ValueError as e: + #File "/usr/lib/python3.2/shlex.py", line 168, in read_token + # raise ValueError("No closing quotation") + writemsg(_("!!! Invalid PORTDIR_OVERLAY:" + " %s: %s\n") % (e, portdir_overlay), noiselevel=-1) + port_ov = [] + overlays.extend(port_ov) + default_repo_opts = {} + if prepos['DEFAULT'].aliases is not None: + default_repo_opts['aliases'] = \ + ' '.join(prepos['DEFAULT'].aliases) + if prepos['DEFAULT'].eclass_overrides is not None: + default_repo_opts['eclass-overrides'] = \ + ' '.join(prepos['DEFAULT'].eclass_overrides) + if prepos['DEFAULT'].masters is not None: + default_repo_opts['masters'] = \ + ' '.join(prepos['DEFAULT'].masters) + + if overlays: + # We need a copy of the original repos.conf data, since we're + # going to modify the prepos dict and some of the RepoConfig + # objects that we put in prepos may have to be discarded if + # they get overridden by a repository with the same name but + # a different location. This is common with repoman, for example, + # when temporarily overriding an rsync repo with another copy + # of the same repo from CVS. + repos_conf = prepos.copy() + #overlay priority is negative because we want them to be looked before any other repo + base_priority = 0 + for ov in overlays: + if os.path.isdir(ov): + repo_opts = default_repo_opts.copy() + repo_opts['location'] = ov + repo = RepoConfig(None, repo_opts) + # repos_conf_opts contains options from repos.conf + repos_conf_opts = repos_conf.get(repo.name) + if repos_conf_opts is not None: + # Selectively copy only the attributes which + # repos.conf is allowed to override. + for k in ('aliases', 'eclass_overrides', 'masters', 'priority'): + v = getattr(repos_conf_opts, k, None) + if v is not None: + setattr(repo, k, v) + + if repo.name in prepos: + old_location = prepos[repo.name].location + if old_location is not None and old_location != repo.location: + ignored_map.setdefault(repo.name, []).append(old_location) + ignored_location_map[old_location] = repo.name + if old_location == portdir: + portdir = repo.user_location + + if ov == portdir and portdir not in port_ov: + repo.priority = -1000 + elif repo.priority is None: + repo.priority = base_priority + base_priority += 1 + prepos[repo.name] = repo + else: + writemsg(_("!!! Invalid PORTDIR_OVERLAY" + " (not a dir): '%s'\n") % ov, noiselevel=-1) - def add_overlays(portdir, portdir_overlay, prepos, ignored_map, ignored_location_map): - """Add overlays in PORTDIR_OVERLAY as repositories""" - overlays = [] - if portdir: - portdir = normalize_path(portdir) - overlays.append(portdir) - port_ov = [normalize_path(i) for i in shlex_split(portdir_overlay)] - overlays.extend(port_ov) - default_repo_opts = {} - if prepos['DEFAULT'].aliases is not None: - default_repo_opts['aliases'] = \ - ' '.join(prepos['DEFAULT'].aliases) - if prepos['DEFAULT'].eclass_overrides is not None: - default_repo_opts['eclass-overrides'] = \ - ' '.join(prepos['DEFAULT'].eclass_overrides) - if prepos['DEFAULT'].masters is not None: - default_repo_opts['masters'] = \ - ' '.join(prepos['DEFAULT'].masters) - if overlays: - #overlay priority is negative because we want them to be looked before any other repo - base_priority = 0 - for ov in overlays: - if os.path.isdir(ov): - repo_opts = default_repo_opts.copy() - repo_opts['location'] = ov - repo = RepoConfig(None, repo_opts) - repo_conf_opts = prepos.get(repo.name) - if repo_conf_opts is not None: - if repo_conf_opts.aliases is not None: - repo_opts['aliases'] = \ - ' '.join(repo_conf_opts.aliases) - if repo_conf_opts.eclass_overrides is not None: - repo_opts['eclass-overrides'] = \ - ' '.join(repo_conf_opts.eclass_overrides) - if repo_conf_opts.masters is not None: - repo_opts['masters'] = \ - ' '.join(repo_conf_opts.masters) - repo = RepoConfig(repo.name, repo_opts) - if repo.name in prepos: - old_location = prepos[repo.name].location - if old_location is not None and old_location != repo.location: - ignored_map.setdefault(repo.name, []).append(old_location) - ignored_location_map[old_location] = repo.name - if old_location == portdir: - portdir = repo.user_location - prepos[repo.name].update(repo) - repo = prepos[repo.name] - else: - prepos[repo.name] = repo - - if ov == portdir and portdir not in port_ov: - repo.priority = -1000 - else: - repo.priority = base_priority - base_priority += 1 + return portdir - else: - writemsg(_("!!! Invalid PORTDIR_OVERLAY" - " (not a dir): '%s'\n") % ov, noiselevel=-1) + @staticmethod + def _parse(paths, prepos, ignored_map, ignored_location_map): + """Parse files in paths to load config""" + parser = SafeConfigParser() - return portdir + # use read_file/readfp in order to control decoding of unicode + try: + # Python >=3.2 + read_file = parser.read_file + except AttributeError: + read_file = parser.readfp - def repo_priority(r): - """ - Key funtion for comparing repositories by priority. - None is equal priority zero. - """ - x = prepos[r].priority - if x is None: - return 0 - return x + for p in paths: + f = None + try: + f = io.open(_unicode_encode(p, + encoding=_encodings['fs'], errors='strict'), + mode='r', encoding=_encodings['repo.content'], + errors='replace') + except EnvironmentError: + pass + else: + try: + read_file(f) + except ParsingError as e: + writemsg(_unicode_decode( + _("!!! Error while reading repo config file: %s\n") + ) % e, noiselevel=-1) + finally: + if f is not None: + f.close() + + prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults()) + for sname in parser.sections(): + optdict = {} + for oname in parser.options(sname): + optdict[oname] = parser.get(sname, oname) + + repo = RepoConfig(sname, optdict) + if repo.location and not os.path.exists(repo.location): + writemsg(_("!!! Invalid repos.conf entry '%s'" + " (not a dir): '%s'\n") % (sname, repo.location), noiselevel=-1) + continue + + if repo.name in prepos: + old_location = prepos[repo.name].location + if old_location is not None and repo.location is not None and old_location != repo.location: + ignored_map.setdefault(repo.name, []).append(old_location) + ignored_location_map[old_location] = repo.name + prepos[repo.name].update(repo) + else: + prepos[repo.name] = repo + + def __init__(self, paths, settings): + """Load config from files in paths""" prepos = {} location_map = {} @@ -279,10 +443,12 @@ class RepoConfigLoader(object): portdir = settings.get('PORTDIR', '') portdir_overlay = settings.get('PORTDIR_OVERLAY', '') - parse(paths, prepos, ignored_map, ignored_location_map) + + self._parse(paths, prepos, ignored_map, ignored_location_map) + # If PORTDIR_OVERLAY contains a repo with the same repo_name as # PORTDIR, then PORTDIR is overridden. - portdir = add_overlays(portdir, portdir_overlay, prepos, + portdir = self._add_repositories(portdir, portdir_overlay, prepos, ignored_map, ignored_location_map) if portdir and portdir.strip(): portdir = os.path.realpath(portdir) @@ -294,38 +460,14 @@ class RepoConfigLoader(object): for repo in prepos.values() if repo.location is not None and repo.missing_repo_name) - #Parse layout.conf and read masters key. - for repo in prepos.values(): - if not repo.location: - continue - layout_filename = os.path.join(repo.location, "metadata", "layout.conf") - layout_file = KeyValuePairFileLoader(layout_filename, None, None) - layout_data, layout_errors = layout_file.load() - - masters = layout_data.get('masters') - if masters and masters.strip(): - masters = masters.split() - else: - masters = None - repo.masters = masters - - aliases = layout_data.get('aliases') - if aliases and aliases.strip(): - aliases = aliases.split() - else: - aliases = None - if aliases: - if repo.aliases: - aliases.extend(repo.aliases) - repo.aliases = tuple(sorted(set(aliases))) - #Take aliases into account. new_prepos = {} for repo_name, repo in prepos.items(): names = set() names.add(repo_name) if repo.aliases: - names.update(repo.aliases) + aliases = stack_lists([repo.aliases], incremental=True) + names.update(aliases) for name in names: if name in new_prepos: @@ -342,16 +484,11 @@ class RepoConfigLoader(object): # filter duplicates from aliases, by only including # items where repo.name == key - prepos_order = [repo.name for key, repo in prepos.items() \ - if repo.name == key and repo.location is not None] - prepos_order.sort(key=repo_priority) - if portdir in location_map: - portdir_repo = prepos[location_map[portdir]] - portdir_sync = settings.get('SYNC', '') - #if SYNC variable is set and not overwritten by repos.conf - if portdir_sync and not portdir_repo.sync: - portdir_repo.sync = portdir_sync + prepos_order = sorted(prepos.items(), key=lambda r:r[1].priority or 0) + + prepos_order = [repo.name for (key, repo) in prepos_order + if repo.name == key and repo.location is not None] if prepos['DEFAULT'].main_repo is None or \ prepos['DEFAULT'].main_repo not in prepos: @@ -406,7 +543,12 @@ class RepoConfigLoader(object): eclass_locations = [] eclass_locations.extend(master_repo.location for master_repo in repo.masters) - eclass_locations.append(repo.location) + # Only append the current repo to eclass_locations if it's not + # there already. This allows masters to have more control over + # eclass override order, which may be useful for scenarios in + # which there is a plan to migrate eclasses to a master repo. + if repo.location not in eclass_locations: + eclass_locations.append(repo.location) if repo.eclass_overrides: for other_repo_name in repo.eclass_overrides: @@ -419,6 +561,23 @@ class RepoConfigLoader(object): level=logging.ERROR, noiselevel=-1) repo.eclass_locations = tuple(eclass_locations) + eclass_dbs = {} + for repo_name, repo in prepos.items(): + if repo_name == "DEFAULT": + continue + + eclass_db = None + for eclass_location in repo.eclass_locations: + tree_db = eclass_dbs.get(eclass_location) + if tree_db is None: + tree_db = eclass_cache.cache(eclass_location) + eclass_dbs[eclass_location] = tree_db + if eclass_db is None: + eclass_db = tree_db.copy() + else: + eclass_db.append(tree_db) + repo.eclass_db = eclass_db + self._prepos_changed = True self._repo_location_list = [] @@ -488,6 +647,9 @@ class RepoConfigLoader(object): return None return self.treemap[repo_name] + def get_repo_for_location(self, location): + return self.prepos[self.get_name_for_location(location)] + def __getitem__(self, repo_name): return self.prepos[repo_name] @@ -502,3 +664,113 @@ def load_repository_config(settings): repoconfigpaths.append(os.path.join(settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "repos.conf")) return RepoConfigLoader(repoconfigpaths, settings) + +def _get_repo_name(repo_location, cached=None): + if cached is not None: + return cached + name, missing = RepoConfig._read_repo_name(repo_location) + if missing: + return None + return name + +def parse_layout_conf(repo_location, repo_name=None): + eapi = read_corresponding_eapi_file(os.path.join(repo_location, REPO_NAME_LOC)) + + layout_filename = os.path.join(repo_location, "metadata", "layout.conf") + layout_file = KeyValuePairFileLoader(layout_filename, None, None) + layout_data, layout_errors = layout_file.load() + + data = {} + + # None indicates abscence of a masters setting, which later code uses + # to trigger a backward compatibility fallback that sets an implicit + # master. In order to avoid this fallback behavior, layout.conf can + # explicitly set masters to an empty value, which will result in an + # empty tuple here instead of None. + masters = layout_data.get('masters') + if masters is not None: + masters = tuple(masters.split()) + data['masters'] = masters + data['aliases'] = tuple(layout_data.get('aliases', '').split()) + + data['allow-provide-virtual'] = \ + layout_data.get('allow-provide-virtuals', 'false').lower() == 'true' + + data['sign-commit'] = layout_data.get('sign-commits', 'false').lower() \ + == 'true' + + data['sign-manifest'] = layout_data.get('sign-manifests', 'true').lower() \ + == 'true' + + data['thin-manifest'] = layout_data.get('thin-manifests', 'false').lower() \ + == 'true' + + manifest_policy = layout_data.get('use-manifests', 'strict').lower() + data['allow-missing-manifest'] = manifest_policy != 'strict' + data['create-manifest'] = manifest_policy != 'false' + data['disable-manifest'] = manifest_policy == 'false' + + # for compatibility w/ PMS, fallback to pms; but also check if the + # cache exists or not. + cache_formats = layout_data.get('cache-formats', 'pms').lower().split() + if 'pms' in cache_formats and not os.path.isdir( + os.path.join(repo_location, 'metadata', 'cache')): + cache_formats.remove('pms') + data['cache-formats'] = tuple(cache_formats) + + manifest_hashes = layout_data.get('manifest-hashes') + if manifest_hashes is not None: + manifest_hashes = frozenset(manifest_hashes.upper().split()) + if MANIFEST2_REQUIRED_HASH not in manifest_hashes: + repo_name = _get_repo_name(repo_location, cached=repo_name) + warnings.warn((_("Repository named '%(repo_name)s' has a " + "'manifest-hashes' setting that does not contain " + "the '%(hash)s' hash which is required by this " + "portage version. You will have to upgrade portage " + "if you want to generate valid manifests for this " + "repository: %(layout_filename)s") % + {"repo_name": repo_name or 'unspecified', + "hash":MANIFEST2_REQUIRED_HASH, + "layout_filename":layout_filename}), + DeprecationWarning) + unsupported_hashes = manifest_hashes.difference( + MANIFEST2_HASH_FUNCTIONS) + if unsupported_hashes: + repo_name = _get_repo_name(repo_location, cached=repo_name) + warnings.warn((_("Repository named '%(repo_name)s' has a " + "'manifest-hashes' setting that contains one " + "or more hash types '%(hashes)s' which are not supported by " + "this portage version. You will have to upgrade " + "portage if you want to generate valid manifests for " + "this repository: %(layout_filename)s") % + {"repo_name": repo_name or 'unspecified', + "hashes":" ".join(sorted(unsupported_hashes)), + "layout_filename":layout_filename}), + DeprecationWarning) + data['manifest-hashes'] = manifest_hashes + + data['update-changelog'] = layout_data.get('update-changelog', 'false').lower() \ + == 'true' + + raw_formats = layout_data.get('profile-formats') + if raw_formats is None: + if eapi in ('4-python',): + raw_formats = ('portage-1',) + else: + raw_formats = ('portage-1-compat',) + else: + raw_formats = set(raw_formats.split()) + unknown = raw_formats.difference(_valid_profile_formats) + if unknown: + repo_name = _get_repo_name(repo_location, cached=repo_name) + warnings.warn((_("Repository named '%(repo_name)s' has unsupported " + "profiles in use ('profile-formats = %(unknown_fmts)s' setting in " + "'%(layout_filename)s; please upgrade portage.") % + dict(repo_name=repo_name or 'unspecified', + layout_filename=layout_filename, + unknown_fmts=" ".join(unknown))), + DeprecationWarning) + raw_formats = tuple(raw_formats.intersection(_valid_profile_formats)) + data['profile-formats'] = raw_formats + + return data, layout_errors diff --git a/portage_with_autodep/pym/portage/repository/config.pyo b/portage_with_autodep/pym/portage/repository/config.pyo Binary files differnew file mode 100644 index 0000000..f9ee26d --- /dev/null +++ b/portage_with_autodep/pym/portage/repository/config.pyo diff --git a/portage_with_autodep/pym/portage/tests/__init__.py b/portage_with_autodep/pym/portage/tests/__init__.py index a647aa2..492ece4 100644 --- a/portage_with_autodep/pym/portage/tests/__init__.py +++ b/portage_with_autodep/pym/portage/tests/__init__.py @@ -1,10 +1,13 @@ # tests/__init__.py -- Portage Unit Test functionality -# Copyright 2006-2010 Gentoo Foundation +# Copyright 2006-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 +from __future__ import print_function + import sys import time import unittest +from optparse import OptionParser, OptionValueError try: from unittest.runner import _TextTestResult # new in python-2.7 @@ -16,35 +19,33 @@ from portage import _encodings from portage import _unicode_decode def main(): - - TEST_FILE = b'__test__' - svn_dirname = b'.svn' suite = unittest.TestSuite() basedir = os.path.dirname(os.path.realpath(__file__)) - testDirs = [] - if len(sys.argv) > 1: - suite.addTests(getTestFromCommandLine(sys.argv[1:], basedir)) - return TextTestRunner(verbosity=2).run(suite) + usage = "usage: %s [options] [tests to run]" % os.path.basename(sys.argv[0]) + parser = OptionParser(usage=usage) + parser.add_option("-l", "--list", help="list all tests", + action="store_true", dest="list_tests") + (options, args) = parser.parse_args(args=sys.argv) - # the os.walk help mentions relative paths as being quirky - # I was tired of adding dirs to the list, so now we add __test__ - # to each dir we want tested. - for root, dirs, files in os.walk(basedir): - if svn_dirname in dirs: - dirs.remove(svn_dirname) - try: - root = _unicode_decode(root, - encoding=_encodings['fs'], errors='strict') - except UnicodeDecodeError: - continue + if options.list_tests: + testdir = os.path.dirname(sys.argv[0]) + for mydir in getTestDirs(basedir): + testsubdir = os.path.basename(mydir) + for name in getTestNames(mydir): + print("%s/%s/%s.py" % (testdir, testsubdir, name)) + return os.EX_OK - if TEST_FILE in files: - testDirs.append(root) + if len(args) > 1: + suite.addTests(getTestFromCommandLine(args[1:], basedir)) + else: + for mydir in getTestDirs(basedir): + suite.addTests(getTests(os.path.join(basedir, mydir), basedir)) - for mydir in testDirs: - suite.addTests(getTests(os.path.join(basedir, mydir), basedir) ) - return TextTestRunner(verbosity=2).run(suite) + result = TextTestRunner(verbosity=2).run(suite) + if not result.wasSuccessful(): + return 1 + return os.EX_OK def my_import(name): mod = __import__(name) @@ -54,7 +55,7 @@ def my_import(name): return mod def getTestFromCommandLine(args, base_path): - ret = [] + result = [] for arg in args: realpath = os.path.realpath(arg) path = os.path.dirname(realpath) @@ -64,28 +65,39 @@ def getTestFromCommandLine(args, base_path): raise Exception("Invalid argument: '%s'" % arg) mymodule = f[:-3] + result.extend(getTestsFromFiles(path, base_path, [mymodule])) + return result - parent_path = path[len(base_path)+1:] - parent_module = ".".join(("portage", "tests", parent_path)) - parent_module = parent_module.replace('/', '.') - result = [] +def getTestDirs(base_path): + TEST_FILE = b'__test__' + svn_dirname = b'.svn' + testDirs = [] - # Make the trailing / a . for module importing - modname = ".".join((parent_module, mymodule)) - mod = my_import(modname) - ret.append(unittest.TestLoader().loadTestsFromModule(mod)) - return ret + # the os.walk help mentions relative paths as being quirky + # I was tired of adding dirs to the list, so now we add __test__ + # to each dir we want tested. + for root, dirs, files in os.walk(base_path): + if svn_dirname in dirs: + dirs.remove(svn_dirname) + try: + root = _unicode_decode(root, + encoding=_encodings['fs'], errors='strict') + except UnicodeDecodeError: + continue -def getTests(path, base_path): - """ + if TEST_FILE in files: + testDirs.append(root) - path is the path to a given subdir ( 'portage/' for example) - This does a simple filter on files in that dir to give us modules - to import + testDirs.sort() + return testDirs - """ +def getTestNames(path): files = os.listdir(path) files = [ f[:-3] for f in files if f.startswith("test") and f.endswith(".py") ] + files.sort() + return files + +def getTestsFromFiles(path, base_path, files): parent_path = path[len(base_path)+1:] parent_module = ".".join(("portage", "tests", parent_path)) parent_module = parent_module.replace('/', '.') @@ -97,6 +109,16 @@ def getTests(path, base_path): result.append(unittest.TestLoader().loadTestsFromModule(mod)) return result +def getTests(path, base_path): + """ + + path is the path to a given subdir ( 'portage/' for example) + This does a simple filter on files in that dir to give us modules + to import + + """ + return getTestsFromFiles(path, base_path, getTestNames(path)) + class TextTestResult(_TextTestResult): """ We need a subclass of unittest._TextTestResult to handle tests with TODO @@ -109,6 +131,7 @@ class TextTestResult(_TextTestResult): def __init__(self, stream, descriptions, verbosity): super(TextTestResult, self).__init__(stream, descriptions, verbosity) self.todoed = [] + self.portage_skipped = [] def addTodo(self, test, info): self.todoed.append((test,info)) @@ -117,12 +140,20 @@ class TextTestResult(_TextTestResult): elif self.dots: self.stream.write(".") + def addPortageSkip(self, test, info): + self.portage_skipped.append((test,info)) + if self.showAll: + self.stream.writeln("SKIP") + elif self.dots: + self.stream.write(".") + def printErrors(self): if self.dots or self.showAll: self.stream.writeln() self.printErrorList('ERROR', self.errors) self.printErrorList('FAIL', self.failures) self.printErrorList('TODO', self.todoed) + self.printErrorList('SKIP', self.portage_skipped) class TestCase(unittest.TestCase): """ @@ -131,15 +162,12 @@ class TestCase(unittest.TestCase): and then fix the code later. This may not be a great approach (broken code!!??!11oneone) but it does happen at times. """ - - def __init__(self, methodName='runTest'): - # This method exists because unittest.py in python 2.4 stores - # the methodName as __testMethodName while 2.5 uses - # _testMethodName. - self._testMethodName = methodName - unittest.TestCase.__init__(self, methodName) + + def __init__(self, *pargs, **kwargs): + unittest.TestCase.__init__(self, *pargs, **kwargs) self.todo = False - + self.portage_skip = None + def defaultTestResult(self): return TextTestResult() @@ -162,7 +190,13 @@ class TestCase(unittest.TestCase): testMethod() ok = True except self.failureException: - if self.todo: + if self.portage_skip is not None: + if self.portage_skip is True: + result.addPortageSkip(self, "%s: SKIP" % testMethod) + else: + result.addPortageSkip(self, "%s: SKIP: %s" % + (testMethod, self.portage_skip)) + elif self.todo: result.addTodo(self,"%s: TODO" % testMethod) else: result.addFailure(self, sys.exc_info()) @@ -192,21 +226,21 @@ class TestCase(unittest.TestCase): unexpected exception. """ try: - callableObj(*args, **kwargs) + callableObj(*args, **kwargs) except excClass: - return + return else: - if hasattr(excClass,'__name__'): excName = excClass.__name__ - else: excName = str(excClass) - raise self.failureException("%s not raised: %s" % (excName, msg)) - + if hasattr(excClass,'__name__'): excName = excClass.__name__ + else: excName = str(excClass) + raise self.failureException("%s not raised: %s" % (excName, msg)) + class TextTestRunner(unittest.TextTestRunner): """ We subclass unittest.TextTestRunner to output SKIP for tests that fail but are skippable """ - + def _makeResult(self): - return TextTestResult(self.stream, self.descriptions, self.verbosity) + return TextTestResult(self.stream, self.descriptions, self.verbosity) def run(self, test): """ @@ -236,7 +270,7 @@ class TextTestRunner(unittest.TextTestRunner): else: self.stream.writeln("OK") return result - + test_cps = ['sys-apps/portage','virtual/portage'] test_versions = ['1.0', '1.0-r1','2.3_p4','1.0_alpha57'] test_slots = [ None, '1','gentoo-sources-2.6.17','spankywashere'] diff --git a/portage_with_autodep/pym/portage/tests/__init__.pyo b/portage_with_autodep/pym/portage/tests/__init__.pyo Binary files differnew file mode 100644 index 0000000..0e961b8 --- /dev/null +++ b/portage_with_autodep/pym/portage/tests/__init__.pyo diff --git a/portage_with_autodep/pym/portage/tests/bin/__init__.py b/portage_with_autodep/pym/portage/tests/bin/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/bin/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/bin/__test__ b/portage_with_autodep/pym/portage/tests/bin/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/bin/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/bin/setup_env.py b/portage_with_autodep/pym/portage/tests/bin/setup_env.py deleted file mode 100644 index e07643d..0000000 --- a/portage_with_autodep/pym/portage/tests/bin/setup_env.py +++ /dev/null @@ -1,85 +0,0 @@ -# setup_env.py -- Make sure bin subdir has sane env for testing -# Copyright 2007-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import tempfile - -from portage import os -from portage import shutil -from portage.tests import TestCase -from portage.process import spawn - -basepath = os.path.join(os.path.dirname(os.path.dirname( - os.path.abspath(__file__))), - "..", "..", "..") -bindir = os.path.join(basepath, "bin") -pymdir = os.path.join(basepath, "pym") -basedir = None -env = None - -def binTestsCleanup(): - global basedir - if basedir is None: - return - if os.access(basedir, os.W_OK): - shutil.rmtree(basedir) - basedir = None - -def binTestsInit(): - binTestsCleanup() - global basedir, env - basedir = tempfile.mkdtemp() - env = os.environ.copy() - env["D"] = os.path.join(basedir, "image") - env["T"] = os.path.join(basedir, "temp") - env["S"] = os.path.join(basedir, "workdir") - env["PF"] = "portage-tests-0.09-r1" - env["PATH"] = bindir + ":" + env["PATH"] - env["PORTAGE_BIN_PATH"] = bindir - env["PORTAGE_PYM_PATH"] = pymdir - os.mkdir(env["D"]) - os.mkdir(env["T"]) - os.mkdir(env["S"]) - -class BinTestCase(TestCase): - def init(self): - binTestsInit() - def cleanup(self): - binTestsCleanup() - -def _exists_in_D(path): - # Note: do not use os.path.join() here, we assume D to end in / - return os.access(env["D"] + path, os.W_OK) -def exists_in_D(path): - if not _exists_in_D(path): - raise TestCase.failureException -def xexists_in_D(path): - if _exists_in_D(path): - raise TestCase.failureException - -def portage_func(func, args, exit_status=0): - # we don't care about the output of the programs, - # just their exit value and the state of $D - global env - f = open('/dev/null', 'wb') - fd_pipes = {0:0,1:f.fileno(),2:f.fileno()} - def pre_exec(): - os.chdir(env["S"]) - spawn([func] + args.split(), env=env, - fd_pipes=fd_pipes, pre_exec=pre_exec) - f.close() - -def create_portage_wrapper(bin): - def derived_func(*args): - newargs = list(args) - newargs.insert(0, bin) - return portage_func(*newargs) - return derived_func - -for bin in os.listdir(os.path.join(bindir, "ebuild-helpers")): - if bin.startswith("do") or \ - bin.startswith("new") or \ - bin.startswith("prep") or \ - bin in ["ecompress","ecompressdir","fowners","fperms"]: - globals()[bin] = create_portage_wrapper( - os.path.join(bindir, "ebuild-helpers", bin)) diff --git a/portage_with_autodep/pym/portage/tests/bin/test_dobin.py b/portage_with_autodep/pym/portage/tests/bin/test_dobin.py deleted file mode 100644 index 6f50d7a..0000000 --- a/portage_with_autodep/pym/portage/tests/bin/test_dobin.py +++ /dev/null @@ -1,16 +0,0 @@ -# test_dobin.py -- Portage Unit Testing Functionality -# Copyright 2007-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests.bin.setup_env import BinTestCase, dobin, xexists_in_D - -class DoBin(BinTestCase): - def testDoBin(self): - self.init() - try: - dobin("does-not-exist", 1) - xexists_in_D("does-not-exist") - xexists_in_D("/bin/does-not-exist") - xexists_in_D("/usr/bin/does-not-exist") - finally: - self.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/bin/test_dodir.py b/portage_with_autodep/pym/portage/tests/bin/test_dodir.py deleted file mode 100644 index f4eb9b2..0000000 --- a/portage_with_autodep/pym/portage/tests/bin/test_dodir.py +++ /dev/null @@ -1,16 +0,0 @@ -# test_dodir.py -- Portage Unit Testing Functionality -# Copyright 2007-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests.bin.setup_env import BinTestCase, dodir, exists_in_D - -class DoDir(BinTestCase): - def testDoDir(self): - self.init() - try: - dodir("usr /usr") - exists_in_D("/usr") - dodir("/var/lib/moocow") - exists_in_D("/var/lib/moocow") - finally: - self.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/dbapi/__test__ b/portage_with_autodep/pym/portage/tests/dbapi/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/dbapi/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/dbapi/test_fakedbapi.py b/portage_with_autodep/pym/portage/tests/dbapi/test_fakedbapi.py deleted file mode 100644 index a2c5f77..0000000 --- a/portage_with_autodep/pym/portage/tests/dbapi/test_fakedbapi.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import shutil -import tempfile - -from portage import os -from portage.dbapi.virtual import fakedbapi -from portage.package.ebuild.config import config -from portage.tests import TestCase - -class TestFakedbapi(TestCase): - - def testFakedbapi(self): - packages = ( - ("sys-apps/portage-2.1.10", { - "EAPI" : "2", - "IUSE" : "ipc doc", - "repository" : "gentoo", - "SLOT" : "0", - "USE" : "ipc missing-iuse", - }), - ("virtual/package-manager-0", { - "EAPI" : "0", - "repository" : "gentoo", - "SLOT" : "0", - }), - ) - - match_tests = ( - ("sys-apps/portage:0[ipc]", ["sys-apps/portage-2.1.10"]), - ("sys-apps/portage:0[-ipc]", []), - ("sys-apps/portage:0[doc]", []), - ("sys-apps/portage:0[-doc]", ["sys-apps/portage-2.1.10"]), - ("sys-apps/portage:0", ["sys-apps/portage-2.1.10"]), - ("sys-apps/portage:0[missing-iuse]", []), - ("sys-apps/portage:0[-missing-iuse]", []), - ("sys-apps/portage:0::gentoo[ipc]", ["sys-apps/portage-2.1.10"]), - ("sys-apps/portage:0::multilib[ipc]", []), - ("virtual/package-manager", ["virtual/package-manager-0"]), - ) - - tempdir = tempfile.mkdtemp() - try: - portdir = os.path.join(tempdir, "usr/portage") - os.makedirs(portdir) - env = { - "PORTDIR": portdir, - } - fakedb = fakedbapi(settings=config(config_profile_path="", - env=env, _eprefix=tempdir)) - for cpv, metadata in packages: - fakedb.cpv_inject(cpv, metadata=metadata) - - for atom, expected_result in match_tests: - self.assertEqual( fakedb.match(atom), expected_result ) - finally: - shutil.rmtree(tempdir) diff --git a/portage_with_autodep/pym/portage/tests/dep/__init__.py b/portage_with_autodep/pym/portage/tests/dep/__init__.py deleted file mode 100644 index 9c3f524..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# tests/portage.dep/__init__.py -- Portage Unit Test functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/dep/__test__ b/portage_with_autodep/pym/portage/tests/dep/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/dep/testAtom.py b/portage_with_autodep/pym/portage/tests/dep/testAtom.py deleted file mode 100644 index 092cacf..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/testAtom.py +++ /dev/null @@ -1,315 +0,0 @@ -# Copyright 2006, 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import Atom -from portage.exception import InvalidAtom - -class TestAtom(TestCase): - - def testAtom(self): - - tests = ( - ( "=sys-apps/portage-2.1-r1:0[doc,a=,!b=,c?,!d?,-e]", - ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', None), False, False ), - ( "=sys-apps/portage-2.1-r1*:0[doc]", - ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', None), False, False ), - ( "sys-apps/portage:0[doc]", - (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False ), - ( "sys-apps/portage:0[doc]", - (None, 'sys-apps/portage', None, '0', '[doc]', None), False, False ), - ( "*/*", - (None, '*/*', None, None, None, None), True, False ), - ( "sys-apps/*", - (None, 'sys-apps/*', None, None, None, None), True, False ), - ( "*/portage", - (None, '*/portage', None, None, None, None), True, False ), - ( "s*s-*/portage:1", - (None, 's*s-*/portage', None, '1', None, None), True, False ), - ( "*/po*ge:2", - (None, '*/po*ge', None, '2', None, None), True, False ), - ( "!dev-libs/A", - (None, 'dev-libs/A', None, None, None, None), True, True ), - ( "!!dev-libs/A", - (None, 'dev-libs/A', None, None, None, None), True, True ), - ( "!!dev-libs/A", - (None, 'dev-libs/A', None, None, None, None), True, True ), - ( "dev-libs/A[foo(+)]", - (None, 'dev-libs/A', None, None, "[foo(+)]", None), True, True ), - ( "dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", - (None, 'dev-libs/A', None, None, "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True ), - ( "dev-libs/A:2[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", - (None, 'dev-libs/A', None, "2", "[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", None), True, True ), - - ( "=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]", - ('=', 'sys-apps/portage', '2.1-r1', '0', '[doc,a=,!b=,c?,!d?,-e]', 'repo_name'), False, True ), - ( "=sys-apps/portage-2.1-r1*:0::repo_name[doc]", - ('=*', 'sys-apps/portage', '2.1-r1', '0', '[doc]', 'repo_name'), False, True ), - ( "sys-apps/portage:0::repo_name[doc]", - (None, 'sys-apps/portage', None, '0', '[doc]', 'repo_name'), False, True ), - - ( "*/*::repo_name", - (None, '*/*', None, None, None, 'repo_name'), True, True ), - ( "sys-apps/*::repo_name", - (None, 'sys-apps/*', None, None, None, 'repo_name'), True, True ), - ( "*/portage::repo_name", - (None, '*/portage', None, None, None, 'repo_name'), True, True ), - ( "s*s-*/portage:1::repo_name", - (None, 's*s-*/portage', None, '1', None, 'repo_name'), True, True ), - ) - - tests_xfail = ( - ( Atom("sys-apps/portage"), False, False ), - ( "cat/pkg[a!]", False, False ), - ( "cat/pkg[!a]", False, False ), - ( "cat/pkg[!a!]", False, False ), - ( "cat/pkg[!a-]", False, False ), - ( "cat/pkg[-a=]", False, False ), - ( "cat/pkg[-a?]", False, False ), - ( "cat/pkg[-a!]", False, False ), - ( "cat/pkg[=a]", False, False ), - ( "cat/pkg[=a=]", False, False ), - ( "cat/pkg[=a?]", False, False ), - ( "cat/pkg[=a!]", False, False ), - ( "cat/pkg[=a-]", False, False ), - ( "cat/pkg[?a]", False, False ), - ( "cat/pkg[?a=]", False, False ), - ( "cat/pkg[?a?]", False, False ), - ( "cat/pkg[?a!]", False, False ), - ( "cat/pkg[?a-]", False, False ), - ( "sys-apps/portage[doc]:0", False, False ), - ( "*/*", False, False ), - ( "sys-apps/*", False, False ), - ( "*/portage", False, False ), - ( "*/**", True, False ), - ( "*/portage[use]", True, False ), - ( "cat/pkg[a()]", False, False ), - ( "cat/pkg[a(]", False, False ), - ( "cat/pkg[a)]", False, False ), - ( "cat/pkg[a(,b]", False, False ), - ( "cat/pkg[a),b]", False, False ), - ( "cat/pkg[a(*)]", False, False ), - ( "cat/pkg[a(*)]", True, False ), - ( "cat/pkg[a(+-)]", False, False ), - ( "cat/pkg[a()]", False, False ), - ( "cat/pkg[(+)a]", False, False ), - ( "cat/pkg[a=(+)]", False, False ), - ( "cat/pkg[!(+)a=]", False, False ), - ( "cat/pkg[!a=(+)]", False, False ), - ( "cat/pkg[a?(+)]", False, False ), - ( "cat/pkg[!a?(+)]", False, False ), - ( "cat/pkg[!(+)a?]", False, False ), - ( "cat/pkg[-(+)a]", False, False ), - ( "cat/pkg[a(+),-a]", False, False ), - ( "cat/pkg[a(-),-a]", False, False ), - ( "cat/pkg[-a,a(+)]", False, False ), - ( "cat/pkg[-a,a(-)]", False, False ), - ( "cat/pkg[-a(+),a(-)]", False, False ), - ( "cat/pkg[-a(-),a(+)]", False, False ), - ( "sys-apps/portage[doc]::repo_name", False, False ), - ( "sys-apps/portage:0[doc]::repo_name", False, False ), - ( "sys-apps/portage[doc]:0::repo_name", False, False ), - ( "=sys-apps/portage-2.1-r1:0::repo_name[doc,a=,!b=,c?,!d?,-e]", False, False ), - ( "=sys-apps/portage-2.1-r1*:0::repo_name[doc]", False, False ), - ( "sys-apps/portage:0::repo_name[doc]", False, False ), - ( "*/*::repo_name", True, False ), - ) - - for atom, parts, allow_wildcard, allow_repo in tests: - a = Atom(atom, allow_wildcard=allow_wildcard, allow_repo=allow_repo) - op, cp, ver, slot, use, repo = parts - self.assertEqual( op, a.operator, - msg="Atom('%s').operator = %s == '%s'" % ( atom, a.operator, op ) ) - self.assertEqual( cp, a.cp, - msg="Atom('%s').cp = %s == '%s'" % ( atom, a.cp, cp ) ) - if ver is not None: - cpv = "%s-%s" % (cp, ver) - else: - cpv = cp - self.assertEqual( cpv, a.cpv, - msg="Atom('%s').cpv = %s == '%s'" % ( atom, a.cpv, cpv ) ) - self.assertEqual( slot, a.slot, - msg="Atom('%s').slot = %s == '%s'" % ( atom, a.slot, slot ) ) - self.assertEqual( repo, a.repo, - msg="Atom('%s').repo == %s == '%s'" % ( atom, a.repo, repo ) ) - - if a.use: - returned_use = str(a.use) - else: - returned_use = None - self.assertEqual( use, returned_use, - msg="Atom('%s').use = %s == '%s'" % ( atom, returned_use, use ) ) - - for atom, allow_wildcard, allow_repo in tests_xfail: - self.assertRaisesMsg(atom, (InvalidAtom, TypeError), Atom, atom, \ - allow_wildcard=allow_wildcard, allow_repo=allow_repo) - - def test_intersects(self): - test_cases = ( - ("dev-libs/A", "dev-libs/A", True), - ("dev-libs/A", "dev-libs/B", False), - ("dev-libs/A", "sci-libs/A", False), - ("dev-libs/A[foo]", "sci-libs/A[bar]", False), - ("dev-libs/A[foo(+)]", "sci-libs/A[foo(-)]", False), - ("=dev-libs/A-1", "=dev-libs/A-1-r1", False), - ("~dev-libs/A-1", "=dev-libs/A-1", False), - ("=dev-libs/A-1:1", "=dev-libs/A-1", True), - ("=dev-libs/A-1:1", "=dev-libs/A-1:1", True), - ("=dev-libs/A-1:1", "=dev-libs/A-1:2", False), - ) - - for atom, other, expected_result in test_cases: - self.assertEqual(Atom(atom).intersects(Atom(other)), expected_result, \ - "%s and %s should intersect: %s" % (atom, other, expected_result)) - - def test_violated_conditionals(self): - test_cases = ( - ("dev-libs/A", ["foo"], ["foo"], None, "dev-libs/A"), - ("dev-libs/A[foo]", [], ["foo"], None, "dev-libs/A[foo]"), - ("dev-libs/A[foo]", ["foo"], ["foo"], None, "dev-libs/A"), - ("dev-libs/A[foo]", [], ["foo"], [], "dev-libs/A[foo]"), - ("dev-libs/A[foo]", ["foo"], ["foo"], [], "dev-libs/A"), - - ("dev-libs/A:0[foo]", ["foo"], ["foo"], [], "dev-libs/A:0"), - - ("dev-libs/A[foo,-bar]", [], ["foo", "bar"], None, "dev-libs/A[foo]"), - ("dev-libs/A[-foo,bar]", [], ["foo", "bar"], None, "dev-libs/A[bar]"), - - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a,!c=]"), - - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["b"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a,b=,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["c"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["e"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a,!c=,!e?]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["f"], ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a,!c=,-f]"), - - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a"], ["a", "b", "c", "d", "e", "f"], ["a"], "dev-libs/A[!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["b"], ["a", "b", "c", "d", "e", "f"], ["b"], "dev-libs/A[a,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["c"], ["a", "b", "c", "d", "e", "f"], ["c"], "dev-libs/A[a,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d"], ["a", "b", "c", "d", "e", "f"], ["d"], "dev-libs/A[a,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["e"], ["a", "b", "c", "d", "e", "f"], ["e"], "dev-libs/A[a,!c=]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["f"], ["a", "b", "c", "d", "e", "f"], ["f"], "dev-libs/A[a,!c=,-f]"), - - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["a"], ["a", "b", "c", "d", "e", "f"], ["a"], "dev-libs/A[!c(+)=]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["b"], ["a", "b", "c", "d", "e", "f"], ["b"], "dev-libs/A[a(-),!c(-)=]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["c"], ["a", "b", "c", "d", "e", "f"], ["c"], "dev-libs/A[a(+),!c(+)=]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["d"], ["a", "b", "c", "d", "e", "f"], ["d"], "dev-libs/A[a(-),!c(-)=]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["e"], ["a", "b", "c", "d", "e", "f"], ["e"], "dev-libs/A[a(+),!c(+)=]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["f"], ["a", "b", "c", "d", "e", "f"], ["f"], "dev-libs/A[a(-),!c(-)=,-f(+)]"), - - ("dev-libs/A[a(+),b(+)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["a"], ["a"], ["a"], "dev-libs/A[b(+)=,!e(+)?]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["b"], ["b"], ["b"], "dev-libs/A[a(-),!c(-)=,-f(+)]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["c"], ["c"], ["c"], "dev-libs/A[!c(+)=,!e(+)?]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["d"], ["d"], ["d"], "dev-libs/A[a(-),b(+)=,!c(-)=,-f(+)]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["e"], ["e"], ["e"], "dev-libs/A"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["f"], ["f"], ["f"], "dev-libs/A[a(-),b(+)=,!c(-)=,-f(+)]"), - - #Some more test cases to trigger all remaining code paths - ("dev-libs/B[x?]", [], ["x"], ["x"], "dev-libs/B[x?]"), - ("dev-libs/B[x(+)?]", [], [], ["x"], "dev-libs/B"), - ("dev-libs/B[x(-)?]", [], [], ["x"], "dev-libs/B[x(-)?]"), - - ("dev-libs/C[x=]", [], ["x"], ["x"], "dev-libs/C[x=]"), - ("dev-libs/C[x(+)=]", [], [], ["x"], "dev-libs/C"), - ("dev-libs/C[x(-)=]", [], [], ["x"], "dev-libs/C[x(-)=]"), - - ("dev-libs/D[!x=]", [], ["x"], ["x"], "dev-libs/D"), - ("dev-libs/D[!x(+)=]", [], [], ["x"], "dev-libs/D[!x(+)=]"), - ("dev-libs/D[!x(-)=]", [], [], ["x"], "dev-libs/D"), - - #Missing IUSE test cases - ("dev-libs/B[x]", [], [], [], "dev-libs/B[x]"), - ("dev-libs/B[-x]", [], [], [], "dev-libs/B[-x]"), - ("dev-libs/B[x?]", [], [], [], "dev-libs/B[x?]"), - ("dev-libs/B[x=]", [], [], [], "dev-libs/B[x=]"), - ("dev-libs/B[!x=]", [], [], ["x"], "dev-libs/B[!x=]"), - ("dev-libs/B[!x?]", [], [], ["x"], "dev-libs/B[!x?]"), - ) - - test_cases_xfail = ( - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], ["a", "b", "c", "d", "e", "f"], None), - ) - - class use_flag_validator(object): - def __init__(self, iuse): - self.iuse = iuse - - def is_valid_flag(self, flag): - return flag in iuse - - for atom, other_use, iuse, parent_use, expected_violated_atom in test_cases: - a = Atom(atom) - validator = use_flag_validator(iuse) - violated_atom = a.violated_conditionals(other_use, validator.is_valid_flag, parent_use) - if parent_use is None: - fail_msg = "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s" % \ - (atom, " ".join(other_use), " ".join(iuse), "None", str(violated_atom), expected_violated_atom) - else: - fail_msg = "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s" % \ - (atom, " ".join(other_use), " ".join(iuse), " ".join(parent_use), str(violated_atom), expected_violated_atom) - self.assertEqual(str(violated_atom), expected_violated_atom, fail_msg) - - for atom, other_use, iuse, parent_use in test_cases_xfail: - a = Atom(atom) - validator = use_flag_validator(iuse) - self.assertRaisesMsg(atom, InvalidAtom, \ - a.violated_conditionals, other_use, validator.is_valid_flag, parent_use) - - def test_evaluate_conditionals(self): - test_cases = ( - ("dev-libs/A[foo]", [], "dev-libs/A[foo]"), - ("dev-libs/A[foo]", ["foo"], "dev-libs/A[foo]"), - - ("dev-libs/A:0[foo=]", ["foo"], "dev-libs/A:0[foo]"), - - ("dev-libs/A[foo,-bar]", [], "dev-libs/A[foo,-bar]"), - ("dev-libs/A[-foo,bar]", [], "dev-libs/A[-foo,bar]"), - - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], "dev-libs/A[a,-b,c,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a"], "dev-libs/A[a,-b,c,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["b"], "dev-libs/A[a,b,c,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["c"], "dev-libs/A[a,-b,-c,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d"], "dev-libs/A[a,-b,c,d,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["e"], "dev-libs/A[a,-b,c,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["f"], "dev-libs/A[a,-b,c,-e,-f]"), - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", ["d"], "dev-libs/A[a(-),-b(+),c(-),d(+),-e(-),-f(+)]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", ["f"], "dev-libs/A[a(+),-b(-),c(+),-e(+),-f(-)]"), - ) - - for atom, use, expected_atom in test_cases: - a = Atom(atom) - b = a.evaluate_conditionals(use) - self.assertEqual(str(b), expected_atom) - self.assertEqual(str(b.unevaluated_atom), atom) - - def test__eval_qa_conditionals(self): - test_cases = ( - ("dev-libs/A[foo]", [], [], "dev-libs/A[foo]"), - ("dev-libs/A[foo]", ["foo"], [], "dev-libs/A[foo]"), - ("dev-libs/A[foo]", [], ["foo"], "dev-libs/A[foo]"), - - ("dev-libs/A:0[foo]", [], [], "dev-libs/A:0[foo]"), - ("dev-libs/A:0[foo]", ["foo"], [], "dev-libs/A:0[foo]"), - ("dev-libs/A:0[foo]", [], ["foo"], "dev-libs/A:0[foo]"), - ("dev-libs/A:0[foo=]", [], ["foo"], "dev-libs/A:0[foo]"), - - ("dev-libs/A[foo,-bar]", ["foo"], ["bar"], "dev-libs/A[foo,-bar]"), - ("dev-libs/A[-foo,bar]", ["foo", "bar"], [], "dev-libs/A[-foo,bar]"), - - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["a", "b", "c"], [], "dev-libs/A[a,-b,c,d,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], ["a", "b", "c"], "dev-libs/A[a,b,-c,d,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", ["d", "e", "f"], [], "dev-libs/A[a,b,-b,c,-c,-e,-f]"), - ("dev-libs/A[a,b=,!c=,d?,!e?,-f]", [], ["d", "e", "f"], "dev-libs/A[a,b,-b,c,-c,d,-f]"), - - ("dev-libs/A[a(-),b(+)=,!c(-)=,d(+)?,!e(-)?,-f(+)]", \ - ["a", "b", "c", "d", "e", "f"], [], "dev-libs/A[a(-),-b(+),c(-),-e(-),-f(+)]"), - ("dev-libs/A[a(+),b(-)=,!c(+)=,d(-)?,!e(+)?,-f(-)]", \ - [], ["a", "b", "c", "d", "e", "f"], "dev-libs/A[a(+),b(-),-c(+),d(-),-f(-)]"), - ) - - for atom, use_mask, use_force, expected_atom in test_cases: - a = Atom(atom) - b = a._eval_qa_conditionals(use_mask, use_force) - self.assertEqual(str(b), expected_atom) - self.assertEqual(str(b.unevaluated_atom), atom) diff --git a/portage_with_autodep/pym/portage/tests/dep/testCheckRequiredUse.py b/portage_with_autodep/pym/portage/tests/dep/testCheckRequiredUse.py deleted file mode 100644 index 54791e0..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/testCheckRequiredUse.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import check_required_use -from portage.exception import InvalidDependString - -class TestCheckRequiredUse(TestCase): - - def testCheckRequiredUse(self): - test_cases = ( - ( "|| ( a b )", [], ["a", "b"], False), - ( "|| ( a b )", ["a"], ["a", "b"], True), - ( "|| ( a b )", ["b"], ["a", "b"], True), - ( "|| ( a b )", ["a", "b"], ["a", "b"], True), - - ( "^^ ( a b )", [], ["a", "b"], False), - ( "^^ ( a b )", ["a"], ["a", "b"], True), - ( "^^ ( a b )", ["b"], ["a", "b"], True), - ( "^^ ( a b )", ["a", "b"], ["a", "b"], False), - - ( "^^ ( || ( a b ) c )", [], ["a", "b", "c"], False), - ( "^^ ( || ( a b ) c )", ["a"], ["a", "b", "c"], True), - - ( "^^ ( || ( ( a b ) ) ( c ) )", [], ["a", "b", "c"], False), - ( "( ^^ ( ( || ( ( a ) ( b ) ) ) ( ( c ) ) ) )", ["a"], ["a", "b", "c"], True), - - ( "a || ( b c )", ["a"], ["a", "b", "c"], False), - ( "|| ( b c ) a", ["a"], ["a", "b", "c"], False), - - ( "|| ( a b c )", ["a"], ["a", "b", "c"], True), - ( "|| ( a b c )", ["b"], ["a", "b", "c"], True), - ( "|| ( a b c )", ["c"], ["a", "b", "c"], True), - - ( "^^ ( a b c )", ["a"], ["a", "b", "c"], True), - ( "^^ ( a b c )", ["b"], ["a", "b", "c"], True), - ( "^^ ( a b c )", ["c"], ["a", "b", "c"], True), - ( "^^ ( a b c )", ["a", "b"], ["a", "b", "c"], False), - ( "^^ ( a b c )", ["b", "c"], ["a", "b", "c"], False), - ( "^^ ( a b c )", ["a", "c"], ["a", "b", "c"], False), - ( "^^ ( a b c )", ["a", "b", "c"], ["a", "b", "c"], False), - - ( "a? ( ^^ ( b c ) )", [], ["a", "b", "c"], True), - ( "a? ( ^^ ( b c ) )", ["a"], ["a", "b", "c"], False), - ( "a? ( ^^ ( b c ) )", ["b"], ["a", "b", "c"], True), - ( "a? ( ^^ ( b c ) )", ["c"], ["a", "b", "c"], True), - ( "a? ( ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True), - ( "a? ( ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False), - - ( "^^ ( a? ( !b ) !c? ( d ) )", [], ["a", "b", "c", "d"], False), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a"], ["a", "b", "c", "d"], True), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["c"], ["a", "b", "c", "d"], True), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "c"], ["a", "b", "c", "d"], True), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "c"], ["a", "b", "c", "d"], False), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "b", "d"], ["a", "b", "c", "d"], True), - ( "^^ ( a? ( !b ) !c? ( d ) )", ["a", "d"], ["a", "b", "c", "d"], False), - - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True), - ( "|| ( ^^ ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], False), - - ( "^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"], False), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a"], ["a", "b", "c"], True), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["b"], ["a", "b", "c"], False), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["c"], ["a", "b", "c"], True), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b"], ["a", "b", "c"], False), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "c"], ["a", "b", "c"], False), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["b", "c"], ["a", "b", "c"], True), - ( "^^ ( || ( a b ) ^^ ( b c ) )", ["a", "b", "c"], ["a", "b", "c"], True), - - ( "|| ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], True), - ( "|| ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True), - ( "|| ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True), - ( "|| ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True), - ( "|| ( ( a b ) c )", ["a"], ["a", "b", "c"], False), - ( "|| ( ( a b ) c )", ["b"], ["a", "b", "c"], False), - ( "|| ( ( a b ) c )", ["c"], ["a", "b", "c"], True), - ( "|| ( ( a b ) c )", [], ["a", "b", "c"], False), - - ( "^^ ( ( a b ) c )", ["a", "b", "c"], ["a", "b", "c"], False), - ( "^^ ( ( a b ) c )", ["b", "c"], ["a", "b", "c"], True), - ( "^^ ( ( a b ) c )", ["a", "c"], ["a", "b", "c"], True), - ( "^^ ( ( a b ) c )", ["a", "b"], ["a", "b", "c"], True), - ( "^^ ( ( a b ) c )", ["a"], ["a", "b", "c"], False), - ( "^^ ( ( a b ) c )", ["b"], ["a", "b", "c"], False), - ( "^^ ( ( a b ) c )", ["c"], ["a", "b", "c"], True), - ( "^^ ( ( a b ) c )", [], ["a", "b", "c"], False), - ) - - test_cases_xfail = ( - ( "^^ ( || ( a b ) ^^ ( b c ) )", [], ["a", "b"]), - ( "^^ ( || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]), - ( "^^( || ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]), - ( "^^ || ( a b ) ^^ ( b c )", [], ["a", "b", "c"]), - ( "^^ ( ( || ) ( a b ) ^^ ( b c ) )", [], ["a", "b", "c"]), - ( "^^ ( || ( a b ) ) ^^ ( b c ) )", [], ["a", "b", "c"]), - ) - - for required_use, use, iuse, expected in test_cases: - self.assertEqual(bool(check_required_use(required_use, use, iuse.__contains__)), \ - expected, required_use + ", USE = " + " ".join(use)) - - for required_use, use, iuse in test_cases_xfail: - self.assertRaisesMsg(required_use + ", USE = " + " ".join(use), \ - InvalidDependString, check_required_use, required_use, use, iuse.__contains__) - - def testCheckRequiredUseFilterSatisfied(self): - """ - Test filtering of satisfied parts of REQUIRED_USE, - in order to reduce noise for bug #353234. - """ - test_cases = ( - ( - "bindist? ( !amr !faac !win32codecs ) cdio? ( !cdparanoia !cddb ) dvdnav? ( dvd )", - ("cdio", "cdparanoia"), - "cdio? ( !cdparanoia )" - ), - ( - "|| ( !amr !faac !win32codecs ) cdio? ( !cdparanoia !cddb ) ^^ ( foo bar )", - ["cdio", "cdparanoia", "foo"], - "cdio? ( !cdparanoia )" - ), - ( - "^^ ( || ( a b ) c )", - ("a", "b", "c"), - "^^ ( || ( a b ) c )" - ), - ( - "^^ ( || ( ( a b ) ) ( c ) )", - ("a", "b", "c"), - "^^ ( ( a b ) c )" - ), - ( - "a? ( ( c e ) ( b d ) )", - ("a", "c", "e"), - "a? ( b d )" - ), - ( - "a? ( ( c e ) ( b d ) )", - ("a", "b", "c", "e"), - "a? ( d )" - ), - ( - "a? ( ( c e ) ( c e b c d e c ) )", - ("a", "c", "e"), - "a? ( b d )" - ), - ( - "^^ ( || ( a b ) ^^ ( b c ) )", - ("a", "b"), - "^^ ( || ( a b ) ^^ ( b c ) )" - ), - ( - "^^ ( || ( a b ) ^^ ( b c ) )", - ["a", "c"], - "^^ ( || ( a b ) ^^ ( b c ) )" - ), - ( - "^^ ( || ( a b ) ^^ ( b c ) )", - ["b", "c"], - "" - ), - ( - "^^ ( || ( a b ) ^^ ( b c ) )", - ["a", "b", "c"], - "" - ), - ( - "^^ ( ( a b c ) ( b c d ) )", - ["a", "b", "c"], - "" - ), - ( - "^^ ( ( a b c ) ( b c d ) )", - ["a", "b", "c", "d"], - "^^ ( ( a b c ) ( b c d ) )" - ), - ( - "^^ ( ( a b c ) ( b c !d ) )", - ["a", "b", "c"], - "^^ ( ( a b c ) ( b c !d ) )" - ), - ( - "^^ ( ( a b c ) ( b c !d ) )", - ["a", "b", "c", "d"], - "" - ), - ( - "( ( ( a ) ) ( ( ( b c ) ) ) )", - [""], - "a b c" - ), - ( - "|| ( ( ( ( a ) ) ( ( ( b c ) ) ) ) )", - [""], - "a b c" - ), - ( - "|| ( ( a ( ( ) ( ) ) ( ( ) ) ( b ( ) c ) ) )", - [""], - "a b c" - ), - ( - "|| ( ( a b c ) ) || ( ( d e f ) )", - [""], - "a b c d e f" - ), - ) - for required_use, use, expected in test_cases: - result = check_required_use(required_use, use, lambda k: True).tounicode() - self.assertEqual(result, expected, - "REQUIRED_USE = '%s', USE = '%s', '%s' != '%s'" % \ - (required_use, " ".join(use), result, expected)) diff --git a/portage_with_autodep/pym/portage/tests/dep/testExtendedAtomDict.py b/portage_with_autodep/pym/portage/tests/dep/testExtendedAtomDict.py deleted file mode 100644 index 69d092e..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/testExtendedAtomDict.py +++ /dev/null @@ -1,18 +0,0 @@ -# test_isvalidatom.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import ExtendedAtomDict - -class TestExtendedAtomDict(TestCase): - - def testExtendedAtomDict(self): - d = ExtendedAtomDict(dict) - d["*/*"] = { "test1": "x" } - d["dev-libs/*"] = { "test2": "y" } - d.setdefault("sys-apps/portage", {})["test3"] = "z" - self.assertEqual(d.get("dev-libs/A"), { "test1": "x", "test2": "y" }) - self.assertEqual(d.get("sys-apps/portage"), { "test1": "x", "test3": "z" }) - self.assertEqual(d["dev-libs/*"], { "test2": "y" }) - self.assertEqual(d["sys-apps/portage"], {'test1': 'x', 'test3': 'z'}) diff --git a/portage_with_autodep/pym/portage/tests/dep/testExtractAffectingUSE.py b/portage_with_autodep/pym/portage/tests/dep/testExtractAffectingUSE.py deleted file mode 100644 index 026a552..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/testExtractAffectingUSE.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import extract_affecting_use -from portage.exception import InvalidDependString - -class TestExtractAffectingUSE(TestCase): - - def testExtractAffectingUSE(self): - test_cases = ( - ("a? ( A ) !b? ( B ) !c? ( C ) d? ( D )", "A", ("a",)), - ("a? ( A ) !b? ( B ) !c? ( C ) d? ( D )", "B", ("b",)), - ("a? ( A ) !b? ( B ) !c? ( C ) d? ( D )", "C", ("c",)), - ("a? ( A ) !b? ( B ) !c? ( C ) d? ( D )", "D", ("d",)), - - ("a? ( b? ( AB ) )", "AB", ("a", "b")), - ("a? ( b? ( c? ( ABC ) ) )", "ABC", ("a", "b", "c")), - - ("a? ( A b? ( c? ( ABC ) AB ) )", "A", ("a",)), - ("a? ( A b? ( c? ( ABC ) AB ) )", "AB", ("a", "b")), - ("a? ( A b? ( c? ( ABC ) AB ) )", "ABC", ("a", "b", "c")), - ("a? ( A b? ( c? ( ABC ) AB ) ) X", "X", []), - ("X a? ( A b? ( c? ( ABC ) AB ) )", "X", []), - - ("ab? ( || ( A B ) )", "A", ("ab",)), - ("!ab? ( || ( A B ) )", "B", ("ab",)), - ("ab? ( || ( A || ( b? ( || ( B C ) ) ) ) )", "A", ("ab",)), - ("ab? ( || ( A || ( b? ( || ( B C ) ) ) ) )", "B", ("ab", "b")), - ("ab? ( || ( A || ( b? ( || ( B C ) ) ) ) )", "C", ("ab", "b")), - - ("( ab? ( || ( ( A ) || ( b? ( ( ( || ( B ( C ) ) ) ) ) ) ) ) )", "A", ("ab",)), - ("( ab? ( || ( ( A ) || ( b? ( ( ( || ( B ( C ) ) ) ) ) ) ) ) )", "B", ("ab", "b")), - ("( ab? ( || ( ( A ) || ( b? ( ( ( || ( B ( C ) ) ) ) ) ) ) ) )", "C", ("ab", "b")), - - ("a? ( A )", "B", []), - - ("a? ( || ( A B ) )", "B", ["a"]), - - # test USE dep defaults for bug #363073 - ("a? ( >=dev-lang/php-5.2[pcre(+)] )", ">=dev-lang/php-5.2[pcre(+)]", ["a"]), - ) - - test_cases_xfail = ( - ("? ( A )", "A"), - ("!? ( A )", "A"), - ("( A", "A"), - ("A )", "A"), - - ("||( A B )", "A"), - ("|| (A B )", "A"), - ("|| ( A B)", "A"), - ("|| ( A B", "A"), - ("|| A B )", "A"), - ("|| A B", "A"), - ("|| ( A B ) )", "A"), - ("|| || B C", "A"), - ("|| ( A B || )", "A"), - ("a? A", "A"), - ("( || ( || || ( A ) foo? ( B ) ) )", "A"), - ("( || ( || bar? ( A ) foo? ( B ) ) )", "A"), - ) - - for dep, atom, expected in test_cases: - expected = set(expected) - result = extract_affecting_use(dep, atom, eapi="0") - fail_msg = "dep: " + dep + ", atom: " + atom + ", got: " + \ - " ".join(sorted(result)) + ", expected: " + " ".join(sorted(expected)) - self.assertEqual(result, expected, fail_msg) - - for dep, atom in test_cases_xfail: - fail_msg = "dep: " + dep + ", atom: " + atom + ", got: " + \ - " ".join(sorted(result)) + ", expected: " + " ".join(sorted(expected)) - self.assertRaisesMsg(fail_msg, \ - InvalidDependString, extract_affecting_use, dep, atom, eapi="0") diff --git a/portage_with_autodep/pym/portage/tests/dep/testStandalone.py b/portage_with_autodep/pym/portage/tests/dep/testStandalone.py deleted file mode 100644 index e9f01df..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/testStandalone.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import cpvequal -from portage.exception import PortageException - -class TestStandalone(TestCase): - """ Test some small functions portage.dep - """ - - def testCPVequal(self): - - test_cases = ( - ( "sys-apps/portage-2.1","sys-apps/portage-2.1", True ), - ( "sys-apps/portage-2.1","sys-apps/portage-2.0", False ), - ( "sys-apps/portage-2.1","sys-apps/portage-2.1-r1", False ), - ( "sys-apps/portage-2.1-r1","sys-apps/portage-2.1", False ), - ( "sys-apps/portage-2.1_alpha3","sys-apps/portage-2.1", False ), - ( "sys-apps/portage-2.1_alpha3_p6","sys-apps/portage-2.1_alpha3", False ), - ( "sys-apps/portage-2.1_alpha3","sys-apps/portage-2.1", False ), - ( "sys-apps/portage-2.1","sys-apps/X-2.1", False ), - ( "sys-apps/portage-2.1","portage-2.1", False ), - ) - - test_cases_xfail = ( - ( "sys-apps/portage","sys-apps/portage" ), - ( "sys-apps/portage-2.1-6","sys-apps/portage-2.1-6" ), - ) - - for cpv1, cpv2, expected_result in test_cases: - self.assertEqual(cpvequal(cpv1, cpv2), expected_result) - - for cpv1, cpv2 in test_cases_xfail: - self.assertRaisesMsg("cpvequal("+cpv1+", "+cpv2+")", \ - PortageException, cpvequal, cpv1, cpv2) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_best_match_to_list.py b/portage_with_autodep/pym/portage/tests/dep/test_best_match_to_list.py deleted file mode 100644 index d050adc..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_best_match_to_list.py +++ /dev/null @@ -1,43 +0,0 @@ -# test_best_match_to_list.py -- Portage Unit Testing Functionality -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import Atom, best_match_to_list - -class Test_best_match_to_list(TestCase): - - def best_match_to_list_wrapper(self, mypkg, mylist): - """ - This function uses best_match_to_list to create sorted - list of matching atoms. - """ - ret = [] - while mylist: - m = best_match_to_list(mypkg, mylist) - if m is not None: - ret.append(m) - mylist.remove(m) - else: - break - - return ret - - def testBest_match_to_list(self): - tests = [ - ("dev-libs/A-1", [Atom("dev-libs/A"), Atom("=dev-libs/A-1")], \ - [Atom("=dev-libs/A-1"), Atom("dev-libs/A")]), - ("dev-libs/A-1", [Atom("dev-libs/B"), Atom("=dev-libs/A-1:0")], \ - [Atom("=dev-libs/A-1:0")]), - ("dev-libs/A-1", [Atom("dev-libs/*", allow_wildcard=True), Atom("=dev-libs/A-1:0")], \ - [Atom("=dev-libs/A-1:0"), Atom("dev-libs/*", allow_wildcard=True)]), - ("dev-libs/A-1:0", [Atom("dev-*/*", allow_wildcard=True), Atom("dev-*/*:0", allow_wildcard=True),\ - Atom("dev-libs/A"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A:0"), \ - Atom("=dev-libs/A-1*"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1")], \ - [Atom("=dev-libs/A-1"), Atom("~dev-libs/A-1"), Atom("=dev-libs/A-1*"), \ - Atom("dev-libs/A:0"), Atom("<=dev-libs/A-2"), Atom("dev-libs/A"), \ - Atom("dev-*/*:0", allow_wildcard=True), Atom("dev-*/*", allow_wildcard=True)]) - ] - - for pkg, atom_list, result in tests: - self.assertEqual( self.best_match_to_list_wrapper( pkg, atom_list ), result ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_dep_getcpv.py b/portage_with_autodep/pym/portage/tests/dep/test_dep_getcpv.py deleted file mode 100644 index 8a0a8aa..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_dep_getcpv.py +++ /dev/null @@ -1,35 +0,0 @@ -# test_dep_getcpv.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import dep_getcpv - -class DepGetCPV(TestCase): - """ A simple testcase for isvalidatom - """ - - def testDepGetCPV(self): - - prefix_ops = ["<", ">", "=", "~", "<=", - ">=", "!=", "!<", "!>", "!~"] - - bad_prefix_ops = [ ">~", "<~", "~>", "~<" ] - postfix_ops = [ ("=", "*"), ] - - cpvs = ["sys-apps/portage-2.1", "sys-apps/portage-2.1", - "sys-apps/portage-2.1"] - slots = [None, ":foo", ":2"] - for cpv in cpvs: - for slot in slots: - for prefix in prefix_ops: - mycpv = prefix + cpv - if slot: - mycpv += slot - self.assertEqual( dep_getcpv( mycpv ), cpv ) - - for prefix, postfix in postfix_ops: - mycpv = prefix + cpv + postfix - if slot: - mycpv += slot - self.assertEqual( dep_getcpv( mycpv ), cpv ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_dep_getrepo.py b/portage_with_autodep/pym/portage/tests/dep/test_dep_getrepo.py deleted file mode 100644 index 78ead8c..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_dep_getrepo.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import dep_getrepo - -class DepGetRepo(TestCase): - """ A simple testcase for isvalidatom - """ - - def testDepGetRepo(self): - - repo_char = "::" - repos = ( "a", "repo-name", "repo_name", "repo123", None ) - cpvs = ["sys-apps/portage"] - versions = ["2.1.1","2.1-r1", None] - uses = ["[use]", None] - for cpv in cpvs: - for version in versions: - for use in uses: - for repo in repos: - pkg = cpv - if version: - pkg = '=' + pkg + '-' + version - if repo is not None: - pkg = pkg + repo_char + repo - if use: - pkg = pkg + use - self.assertEqual( dep_getrepo( pkg ), repo ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_dep_getslot.py b/portage_with_autodep/pym/portage/tests/dep/test_dep_getslot.py deleted file mode 100644 index 206cecc..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_dep_getslot.py +++ /dev/null @@ -1,28 +0,0 @@ -# test_dep_getslot.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import dep_getslot - -class DepGetSlot(TestCase): - """ A simple testcase for isvalidatom - """ - - def testDepGetSlot(self): - - slot_char = ":" - slots = ( "a", "1.2", "1", "IloveVapier", None ) - cpvs = ["sys-apps/portage"] - versions = ["2.1.1","2.1-r1"] - for cpv in cpvs: - for version in versions: - for slot in slots: - mycpv = cpv - if version: - mycpv = '=' + mycpv + '-' + version - if slot is not None: - self.assertEqual( dep_getslot( - mycpv + slot_char + slot ), slot ) - else: - self.assertEqual( dep_getslot( mycpv ), slot ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_dep_getusedeps.py b/portage_with_autodep/pym/portage/tests/dep/test_dep_getusedeps.py deleted file mode 100644 index d2494f7..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_dep_getusedeps.py +++ /dev/null @@ -1,35 +0,0 @@ -# test_dep_getusedeps.py -- Portage Unit Testing Functionality -# Copyright 2007-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import dep_getusedeps - -from portage.tests import test_cps, test_slots, test_versions, test_usedeps - -class DepGetUseDeps(TestCase): - """ A simple testcase for dep_getusedeps - """ - - def testDepGetUseDeps(self): - - for mycpv in test_cps: - for version in test_versions: - for slot in test_slots: - for use in test_usedeps: - cpv = mycpv[:] - if version: - cpv += version - if slot: - cpv += ":" + slot - if isinstance(use, tuple): - cpv += "[%s]" % (",".join(use),) - self.assertEqual( dep_getusedeps( - cpv ), use ) - else: - if len(use): - self.assertEqual( dep_getusedeps( - cpv + "[" + use + "]" ), (use,) ) - else: - self.assertEqual( dep_getusedeps( - cpv + "[" + use + "]" ), () ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_get_operator.py b/portage_with_autodep/pym/portage/tests/dep/test_get_operator.py deleted file mode 100644 index 4f9848f..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_get_operator.py +++ /dev/null @@ -1,33 +0,0 @@ -# test_get_operator.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import get_operator - -class GetOperator(TestCase): - - def testGetOperator(self): - - # get_operator does not validate operators - tests = [ ( "~", "~" ), ( "=", "=" ), ( ">", ">" ), - ( ">=", ">=" ), ( "<=", "<=" ), - ] - - test_cpvs = ["sys-apps/portage-2.1"] - slots = [ None,"1","linux-2.5.6" ] - for cpv in test_cpvs: - for test in tests: - for slot in slots: - atom = cpv[:] - if slot: - atom += ":" + slot - result = get_operator( test[0] + atom ) - self.assertEqual( result, test[1], - msg="get_operator(%s) != %s" % (test[0] + atom, test[1]) ) - - result = get_operator( "sys-apps/portage" ) - self.assertEqual( result, None ) - - result = get_operator( "=sys-apps/portage-2.1*" ) - self.assertEqual( result , "=*" ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_get_required_use_flags.py b/portage_with_autodep/pym/portage/tests/dep/test_get_required_use_flags.py deleted file mode 100644 index 06f8110..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_get_required_use_flags.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import get_required_use_flags -from portage.exception import InvalidDependString - -class TestCheckRequiredUse(TestCase): - - def testCheckRequiredUse(self): - test_cases = ( - ("a b c", ["a", "b", "c"]), - - ("|| ( a b c )", ["a", "b", "c"]), - ("^^ ( a b c )", ["a", "b", "c"]), - - ("|| ( a b ^^ ( d e f ) )", ["a", "b", "d", "e", "f"]), - ("^^ ( a b || ( d e f ) )", ["a", "b", "d", "e", "f"]), - - ("( ^^ ( a ( b ) ( || ( ( d e ) ( f ) ) ) ) )", ["a", "b", "d", "e", "f"]), - - ("a? ( ^^ ( b c ) )", ["a", "b", "c"]), - ("a? ( ^^ ( !b !d? ( c ) ) )", ["a", "b", "c", "d"]), - ) - - test_cases_xfail = ( - ("^^ ( || ( a b ) ^^ ( b c )"), - ("^^( || ( a b ) ^^ ( b c ) )"), - ("^^ || ( a b ) ^^ ( b c )"), - ("^^ ( ( || ) ( a b ) ^^ ( b c ) )"), - ("^^ ( || ( a b ) ) ^^ ( b c ) )"), - ) - - for required_use, expected in test_cases: - result = get_required_use_flags(required_use) - expected = set(expected) - self.assertEqual(result, expected, \ - "REQUIRED_USE: '%s', expected: '%s', got: '%s'" % (required_use, expected, result)) - - for required_use in test_cases_xfail: - self.assertRaisesMsg("REQUIRED_USE: '%s'" % (required_use,), \ - InvalidDependString, get_required_use_flags, required_use) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_isjustname.py b/portage_with_autodep/pym/portage/tests/dep/test_isjustname.py deleted file mode 100644 index c16fb54..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_isjustname.py +++ /dev/null @@ -1,24 +0,0 @@ -# test_isjustname.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import isjustname - -class IsJustName(TestCase): - - def testIsJustName(self): - - cats = ( "", "sys-apps/", "foo/", "virtual/" ) - pkgs = ( "portage", "paludis", "pkgcore", "notARealPkg" ) - vers = ( "", "-2.0-r3", "-1.0_pre2", "-3.1b" ) - - for pkg in pkgs: - for cat in cats: - for ver in vers: - if len(ver): - self.assertFalse( isjustname( cat + pkg + ver ), - msg="isjustname(%s) is True!" % (cat + pkg + ver) ) - else: - self.assertTrue( isjustname( cat + pkg + ver ), - msg="isjustname(%s) is False!" % (cat + pkg + ver) ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_isvalidatom.py b/portage_with_autodep/pym/portage/tests/dep/test_isvalidatom.py deleted file mode 100644 index 173ab0d..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_isvalidatom.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2006-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import isvalidatom - -class IsValidAtomTestCase(object): - def __init__(self, atom, expected, allow_wildcard=False, allow_repo=False): - self.atom = atom - self.expected = expected - self.allow_wildcard = allow_wildcard - self.allow_repo = allow_repo - -class IsValidAtom(TestCase): - - def testIsValidAtom(self): - - test_cases = ( - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("=sys-apps/portage-2.1", True), - IsValidAtomTestCase("=sys-apps/portage-2.1*", True), - IsValidAtomTestCase(">=sys-apps/portage-2.1", True), - IsValidAtomTestCase("<=sys-apps/portage-2.1", True), - IsValidAtomTestCase(">sys-apps/portage-2.1", True), - IsValidAtomTestCase("<sys-apps/portage-2.1", True), - IsValidAtomTestCase("~sys-apps/portage-2.1", True), - IsValidAtomTestCase("sys-apps/portage:foo", True), - IsValidAtomTestCase("sys-apps/portage-2.1:foo", False), - IsValidAtomTestCase( "sys-apps/portage-2.1:", False), - IsValidAtomTestCase("sys-apps/portage-2.1:", False), - IsValidAtomTestCase("sys-apps/portage-2.1:[foo]", False), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - IsValidAtomTestCase("sys-apps/portage", True), - - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar?,!baz?,!doc=,build=]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[doc?]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!doc?]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[doc=]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!doc=]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!doc]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!-doc]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!-doc=]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[!-doc?]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[-doc?]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[-doc=]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[-doc!=]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[-doc=]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar][-baz][doc?][!build?]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar,-baz,doc?,!build?]", True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar,-baz,doc?,!build?,]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[,bar,-baz,doc?,!build?]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar,-baz][doc?,!build?]", False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo[bar][doc,build]", False), - IsValidAtomTestCase(">~cate-gory/foo-1.0", False), - IsValidAtomTestCase(">~category/foo-1.0", False), - IsValidAtomTestCase("<~category/foo-1.0", False), - IsValidAtomTestCase("###cat/foo-1.0", False), - IsValidAtomTestCase("~sys-apps/portage", False), - IsValidAtomTestCase("portage", False), - IsValidAtomTestCase("=portage", False), - IsValidAtomTestCase(">=portage-2.1", False), - IsValidAtomTestCase("~portage-2.1", False), - IsValidAtomTestCase("=portage-2.1*", False), - IsValidAtomTestCase("null/portage", True), - IsValidAtomTestCase("null/portage*:0", False), - IsValidAtomTestCase(">=null/portage-2.1", True), - IsValidAtomTestCase(">=null/portage", False), - IsValidAtomTestCase(">null/portage", False), - IsValidAtomTestCase("=null/portage*", False), - IsValidAtomTestCase("=null/portage", False), - IsValidAtomTestCase("~null/portage", False), - IsValidAtomTestCase("<=null/portage", False), - IsValidAtomTestCase("<null/portage", False), - IsValidAtomTestCase("~null/portage-2.1", True), - IsValidAtomTestCase("=null/portage-2.1*", True), - IsValidAtomTestCase("null/portage-2.1*", False), - IsValidAtomTestCase("app-doc/php-docs-20071125", False), - IsValidAtomTestCase("app-doc/php-docs-20071125-r2", False), - IsValidAtomTestCase("=foo/bar-1-r1-1-r1", False), - IsValidAtomTestCase("foo/-z-1", False), - - # These are invalid because pkg name must not end in hyphen - # followed by numbers - IsValidAtomTestCase("=foo/bar-1-r1-1-r1", False), - IsValidAtomTestCase("=foo/bar-123-1", False), - IsValidAtomTestCase("=foo/bar-123-1*", False), - IsValidAtomTestCase("foo/bar-123", False), - IsValidAtomTestCase("=foo/bar-123-1-r1", False), - IsValidAtomTestCase("=foo/bar-123-1-r1*", False), - IsValidAtomTestCase("foo/bar-123-r1", False), - IsValidAtomTestCase("foo/bar-1", False), - - IsValidAtomTestCase("=foo/bar--baz-1-r1", True), - IsValidAtomTestCase("=foo/bar-baz--1-r1", True), - IsValidAtomTestCase("=foo/bar-baz---1-r1", True), - IsValidAtomTestCase("=foo/bar-baz---1", True), - IsValidAtomTestCase("=foo/bar-baz-1--r1", False), - IsValidAtomTestCase("games-strategy/ufo2000", True), - IsValidAtomTestCase("~games-strategy/ufo2000-0.1", True), - IsValidAtomTestCase("=media-libs/x264-20060810", True), - IsValidAtomTestCase("foo/b", True), - IsValidAtomTestCase("app-text/7plus", True), - IsValidAtomTestCase("foo/666", True), - IsValidAtomTestCase("=dev-libs/poppler-qt3-0.11*", True), - - #Testing atoms with repositories - IsValidAtomTestCase("sys-apps/portage::repo_123-name", True, allow_repo=True), - IsValidAtomTestCase("=sys-apps/portage-2.1::repo", True, allow_repo=True), - IsValidAtomTestCase("=sys-apps/portage-2.1*::repo", True, allow_repo=True), - IsValidAtomTestCase("sys-apps/portage:foo::repo", True, allow_repo=True), - IsValidAtomTestCase("sys-apps/portage-2.1:foo::repo", False, allow_repo=True), - IsValidAtomTestCase("sys-apps/portage-2.1:::repo", False, allow_repo=True), - IsValidAtomTestCase("sys-apps/portage-2.1:::repo[foo]", False, allow_repo=True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo::repo[bar?,!baz?,!doc=,build=]", True, allow_repo=True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo::repo[doc?]", True, allow_repo=True), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo::repo[!doc]", False, allow_repo=True), - IsValidAtomTestCase("###cat/foo-1.0::repo", False, allow_repo=True), - IsValidAtomTestCase("~sys-apps/portage::repo", False, allow_repo=True), - IsValidAtomTestCase("portage::repo", False, allow_repo=True), - IsValidAtomTestCase("=portage::repo", False, allow_repo=True), - IsValidAtomTestCase("null/portage::repo", True, allow_repo=True), - IsValidAtomTestCase("app-doc/php-docs-20071125::repo", False, allow_repo=True), - IsValidAtomTestCase("=foo/bar-1-r1-1-r1::repo", False, allow_repo=True), - - IsValidAtomTestCase("sys-apps/portage::repo_123-name", False, allow_repo=False), - IsValidAtomTestCase("=sys-apps/portage-2.1::repo", False, allow_repo=False), - IsValidAtomTestCase("=sys-apps/portage-2.1*::repo", False, allow_repo=False), - IsValidAtomTestCase("sys-apps/portage:foo::repo", False, allow_repo=False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo::repo[bar?,!baz?,!doc=,build=]", False, allow_repo=False), - IsValidAtomTestCase("=sys-apps/portage-2.2*:foo::repo[doc?]", False, allow_repo=False), - IsValidAtomTestCase("null/portage::repo", False, allow_repo=False), - ) - - for test_case in test_cases: - if test_case.expected: - atom_type = "valid" - else: - atom_type = "invalid" - self.assertEqual( bool(isvalidatom(test_case.atom, allow_wildcard=test_case.allow_wildcard, \ - allow_repo=test_case.allow_repo)), test_case.expected, - msg="isvalidatom(%s) != %s" % ( test_case.atom, test_case.expected ) ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_match_from_list.py b/portage_with_autodep/pym/portage/tests/dep/test_match_from_list.py deleted file mode 100644 index afba414..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_match_from_list.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2006, 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import sys -from portage.tests import TestCase -from portage.dep import Atom, match_from_list, _repo_separator -from portage.versions import catpkgsplit - -if sys.hexversion >= 0x3000000: - basestring = str - -class Package(object): - """ - Provides a minimal subset of attributes of _emerge.Package.Package - """ - def __init__(self, atom): - atom = Atom(atom, allow_repo=True) - self.cp = atom.cp - self.cpv = atom.cpv - self.cpv_split = catpkgsplit(self.cpv) - self.slot = atom.slot - self.repo = atom.repo - if atom.use: - self.use = self._use_class(atom.use.enabled) - self.iuse = self._iuse_class(atom.use.required) - else: - self.use = self._use_class([]) - self.iuse = self._iuse_class([]) - - class _use_class(object): - def __init__(self, use): - self.enabled = frozenset(use) - - class _iuse_class(object): - def __init__(self, iuse): - self.all = frozenset(iuse) - - def is_valid_flag(self, flags): - if isinstance(flags, basestring): - flags = [flags] - for flag in flags: - if not flag in self.all: - return False - return True - -class Test_match_from_list(TestCase): - - def testMatch_from_list(self): - tests = ( - ("=sys-apps/portage-45*", [], [] ), - ("=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("!!=sys-apps/portage-45*", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("=sys-apps/portage-045", ["sys-apps/portage-046"], [] ), - ("~sys-apps/portage-045", ["sys-apps/portage-045-r1"], ["sys-apps/portage-045-r1"] ), - ("~sys-apps/portage-045", ["sys-apps/portage-046-r1"], [] ), - ("<=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("<=sys-apps/portage-045", ["sys-apps/portage-046"], [] ), - ("<sys-apps/portage-046", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - ("<sys-apps/portage-046", ["sys-apps/portage-046"], [] ), - (">=sys-apps/portage-045", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - (">=sys-apps/portage-047", ["sys-apps/portage-046-r1"], [] ), - (">sys-apps/portage-044", ["sys-apps/portage-045"], ["sys-apps/portage-045"] ), - (">sys-apps/portage-047", ["sys-apps/portage-046-r1"], [] ), - ("sys-apps/portage:0", [Package("=sys-apps/portage-045:0")], ["sys-apps/portage-045"] ), - ("sys-apps/portage:0", [Package("=sys-apps/portage-045:1")], [] ), - ("=sys-fs/udev-1*", ["sys-fs/udev-123"], ["sys-fs/udev-123"]), - ("=sys-fs/udev-4*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ), - ("*/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ), - ("sys-fs/*", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ), - ("*/udev", ["sys-fs/udev-456"], ["sys-fs/udev-456"] ), - ("=sys-apps/portage-2*", ["sys-apps/portage-2.1"], ["sys-apps/portage-2.1"] ), - ("=sys-apps/portage-2.1*", ["sys-apps/portage-2.1.2"], ["sys-apps/portage-2.1.2"] ), - ("dev-libs/*", ["sys-apps/portage-2.1.2"], [] ), - ("*/tar", ["sys-apps/portage-2.1.2"], [] ), - ("*/*", ["dev-libs/A-1", "dev-libs/B-1"], ["dev-libs/A-1", "dev-libs/B-1"] ), - ("dev-libs/*", ["dev-libs/A-1", "sci-libs/B-1"], ["dev-libs/A-1"] ), - - ("dev-libs/A[foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-1"] ), - ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], ["dev-libs/A-2"] ), - ("dev-libs/A[-foo]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2")], [] ), - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo]")], [] ), - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[-foo,bar]")], [] ), - ("dev-libs/A[foo,bar]", [Package("=dev-libs/A-1[foo]"), Package("=dev-libs/A-2[foo,bar]")], ["dev-libs/A-2"] ), - ("dev-libs/A[foo,bar(+)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"] ), - ("dev-libs/A[foo,bar(-)]", [Package("=dev-libs/A-1[-foo]"), Package("=dev-libs/A-2[foo]")], [] ), - ("dev-libs/A[foo,-bar(-)]", [Package("=dev-libs/A-1[-foo,bar]"), Package("=dev-libs/A-2[foo]")], ["dev-libs/A-2"] ), - - ("dev-libs/A::repo1", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo1"] ), - ("dev-libs/A::repo2", [Package("=dev-libs/A-1::repo1"), Package("=dev-libs/A-1::repo2")], ["dev-libs/A-1::repo2"] ), - ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[foo]"), Package("=dev-libs/A-1::repo2[-foo]")], [] ), - ("dev-libs/A::repo2[foo]", [Package("=dev-libs/A-1::repo1[-foo]"), Package("=dev-libs/A-1::repo2[foo]")], ["dev-libs/A-1::repo2"] ), - ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:1::repo1"), Package("=dev-libs/A-1:2::repo2")], [] ), - ("dev-libs/A:1::repo2[foo]", [Package("=dev-libs/A-1:2::repo1"), Package("=dev-libs/A-1:1::repo2[foo]")], ["dev-libs/A-1::repo2"] ), - ) - - for atom, cpv_list, expected_result in tests: - result = [] - for pkg in match_from_list( atom, cpv_list ): - if isinstance(pkg, Package): - if pkg.repo: - result.append(pkg.cpv + _repo_separator + pkg.repo) - else: - result.append(pkg.cpv) - else: - result.append(pkg) - self.assertEqual( result, expected_result ) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_paren_reduce.py b/portage_with_autodep/pym/portage/tests/dep/test_paren_reduce.py deleted file mode 100644 index 9a147a0..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_paren_reduce.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.dep import paren_reduce -from portage.exception import InvalidDependString - -class TestParenReduce(TestCase): - - def testParenReduce(self): - - test_cases = ( - ( "A", ["A"]), - ( "( A )", ["A"]), - ( "|| ( A B )", [ "||", ["A", "B"] ]), - ( "|| ( A || ( B C ) )", [ "||", ["A", "||", ["B", "C"]]]), - ( "|| ( A || ( B C D ) )", [ "||", ["A", "||", ["B", "C", "D"]] ]), - ( "|| ( A || ( B || ( C D ) E ) )", [ "||", ["A", "||", ["B", "||", ["C", "D"], "E"]] ]), - ( "a? ( A )", ["a?", ["A"]]), - - ( "( || ( ( ( A ) B ) ) )", ["A", "B"]), - ( "( || ( || ( ( A ) B ) ) )", [ "||", ["A", "B"] ]), - ( "|| ( A )", ["A"]), - ( "( || ( || ( || ( A ) foo? ( B ) ) ) )", [ "||", ["A", "foo?", ["B"] ]]), - ( "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", [ "||", ["bar?", ["A"], "foo?", ["B"] ]]), - ( "A || ( ) foo? ( ) B", ["A", "B"]), - - ( "|| ( A ) || ( B )", ["A", "B"]), - ( "foo? ( A ) foo? ( B )", ["foo?", ["A"], "foo?", ["B"]]), - - ( "|| ( ( A B ) C )", [ "||", [ ["A", "B"], "C"] ]), - ( "|| ( ( A B ) ( C ) )", [ "||", [ ["A", "B"], "C"] ]), - # test USE dep defaults for bug #354003 - ( ">=dev-lang/php-5.2[pcre(+)]", [ ">=dev-lang/php-5.2[pcre(+)]" ]), - ) - - test_cases_xfail = ( - "( A", - "A )", - - "||( A B )", - "|| (A B )", - "|| ( A B)", - "|| ( A B", - "|| A B )", - - "|| A B", - "|| ( A B ) )", - "|| || B C", - - "|| ( A B || )", - - "a? A", - - ( "( || ( || || ( A ) foo? ( B ) ) )"), - ( "( || ( || bar? ( A ) foo? ( B ) ) )"), - ) - - for dep_str, expected_result in test_cases: - self.assertEqual(paren_reduce(dep_str), expected_result, - "input: '%s' result: %s != %s" % (dep_str, - paren_reduce(dep_str), expected_result)) - - for dep_str in test_cases_xfail: - self.assertRaisesMsg(dep_str, - InvalidDependString, paren_reduce, dep_str) diff --git a/portage_with_autodep/pym/portage/tests/dep/test_use_reduce.py b/portage_with_autodep/pym/portage/tests/dep/test_use_reduce.py deleted file mode 100644 index 1618430..0000000 --- a/portage_with_autodep/pym/portage/tests/dep/test_use_reduce.py +++ /dev/null @@ -1,627 +0,0 @@ -# Copyright 2009-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.exception import InvalidDependString -from portage.dep import Atom, use_reduce - -class UseReduceTestCase(object): - def __init__(self, deparray, uselist=[], masklist=[], \ - matchall=0, excludeall=[], is_src_uri=False, \ - eapi="0", opconvert=False, flat=False, expected_result=None, \ - is_valid_flag=None, token_class=None): - self.deparray = deparray - self.uselist = uselist - self.masklist = masklist - self.matchall = matchall - self.excludeall = excludeall - self.is_src_uri = is_src_uri - self.eapi = eapi - self.opconvert = opconvert - self.flat = flat - self.is_valid_flag = is_valid_flag - self.token_class = token_class - self.expected_result = expected_result - - def run(self): - try: - return use_reduce(self.deparray, self.uselist, self.masklist, \ - self.matchall, self.excludeall, self.is_src_uri, self.eapi, \ - self.opconvert, self.flat, self.is_valid_flag, self.token_class) - except InvalidDependString as e: - raise InvalidDependString("%s: %s" % (e, self.deparray)) - -class UseReduce(TestCase): - - def always_true(self, ununsed_parameter): - return True - - def always_false(self, ununsed_parameter): - return False - - def testUseReduce(self): - - EAPI_WITH_SRC_URI_ARROWS = "2" - EAPI_WITHOUT_SRC_URI_ARROWS = "0" - - test_cases = ( - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - uselist = ["a", "b", "c", "d"], - expected_result = ["A", "B"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - uselist = ["a", "b", "c"], - expected_result = ["A", "B", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - uselist = ["b", "c"], - expected_result = ["B", "D"] - ), - - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - matchall = True, - expected_result = ["A", "B", "C", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - masklist = ["a", "c"], - expected_result = ["C", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - matchall = True, - masklist = ["a", "c"], - expected_result = ["B", "C", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - uselist = ["a", "b"], - masklist = ["a", "c"], - expected_result = ["B", "C", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - excludeall = ["a", "c"], - expected_result = ["D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - uselist = ["b"], - excludeall = ["a", "c"], - expected_result = ["B", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - matchall = True, - excludeall = ["a", "c"], - expected_result = ["A", "B", "D"] - ), - UseReduceTestCase( - "a? ( A ) b? ( B ) !c? ( C ) !d? ( D )", - matchall = True, - excludeall = ["a", "c"], - masklist = ["b"], - expected_result = ["A", "D"] - ), - - - UseReduceTestCase( - "a? ( b? ( AB ) )", - uselist = ["a", "b"], - expected_result = ["AB"] - ), - UseReduceTestCase( - "a? ( b? ( AB ) C )", - uselist = ["a"], - expected_result = ["C"] - ), - UseReduceTestCase( - "a? ( b? ( || ( AB CD ) ) )", - uselist = ["a", "b"], - expected_result = ["||", ["AB", "CD"]] - ), - UseReduceTestCase( - "|| ( || ( a? ( A ) b? ( B ) ) )", - uselist = ["a", "b"], - expected_result = ["||", ["A", "B"]] - ), - UseReduceTestCase( - "|| ( || ( a? ( A ) b? ( B ) ) )", - uselist = ["a"], - expected_result = ["A"] - ), - UseReduceTestCase( - "|| ( || ( a? ( A ) b? ( B ) ) )", - uselist = [], - expected_result = [] - ), - UseReduceTestCase( - "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )", - uselist = [], - expected_result = [] - ), - UseReduceTestCase( - "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )", - uselist = ["a"], - expected_result = ["A"] - ), - UseReduceTestCase( - "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )", - uselist = ["b"], - expected_result = ["B"] - ), - UseReduceTestCase( - "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )", - uselist = ["c"], - expected_result = [] - ), - UseReduceTestCase( - "|| ( || ( a? ( || ( A c? ( C ) ) ) b? ( B ) ) )", - uselist = ["a", "c"], - expected_result = ["||", [ "A", "C"]] - ), - - #paren_reduce tests - UseReduceTestCase( - "A", - expected_result = ["A"]), - UseReduceTestCase( - "( A )", - expected_result = ["A"]), - UseReduceTestCase( - "|| ( A B )", - expected_result = [ "||", ["A", "B"] ]), - UseReduceTestCase( - "|| ( ( A B ) C )", - expected_result = [ "||", [ ["A", "B"], "C"] ]), - UseReduceTestCase( - "|| ( ( A B ) ( C ) )", - expected_result = [ "||", [ ["A", "B"], "C"] ]), - UseReduceTestCase( - "|| ( A || ( B C ) )", - expected_result = [ "||", ["A", "B", "C"]]), - UseReduceTestCase( - "|| ( A || ( B C D ) )", - expected_result = [ "||", ["A", "B", "C", "D"] ]), - UseReduceTestCase( - "|| ( A || ( B || ( C D ) E ) )", - expected_result = [ "||", ["A", "B", "C", "D", "E"] ]), - UseReduceTestCase( - "( || ( ( ( A ) B ) ) )", - expected_result = ["A", "B"] ), - UseReduceTestCase( - "( || ( || ( ( A ) B ) ) )", - expected_result = [ "||", ["A", "B"] ]), - UseReduceTestCase( - "( || ( || ( ( A ) B ) ) )", - expected_result = [ "||", ["A", "B"] ]), - UseReduceTestCase( - "|| ( A )", - expected_result = ["A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - expected_result = ["A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - uselist = ["foo"], - expected_result = [ "||", ["A", "B"] ]), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - expected_result = []), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - uselist = ["foo", "bar"], - expected_result = [ "||", [ "A", "B" ] ]), - UseReduceTestCase( - "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B", - expected_result = ["A", "B"]), - UseReduceTestCase( - "|| ( A ) || ( B )", - expected_result = ["A", "B"]), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - expected_result = []), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - uselist = ["foo"], - expected_result = ["A", "B"]), - UseReduceTestCase( - "|| ( A B ) C", - expected_result = ['||', ['A', 'B'], 'C']), - UseReduceTestCase( - "A || ( B C )", - expected_result = ['A', '||', ['B', 'C']]), - - #SRC_URI stuff - UseReduceTestCase( - "http://foo/bar -> blah.tbz2", - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = ["http://foo/bar", "->", "blah.tbz2"]), - UseReduceTestCase( - "foo? ( http://foo/bar -> blah.tbz2 )", - uselist = [], - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = []), - UseReduceTestCase( - "foo? ( http://foo/bar -> blah.tbz2 )", - uselist = ["foo"], - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = ["http://foo/bar", "->", "blah.tbz2"]), - UseReduceTestCase( - "http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )", - uselist = [], - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = ["http://foo/bar", "->", "bar.tbz2"]), - UseReduceTestCase( - "http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )", - uselist = ["foo"], - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = ["http://foo/bar", "->", "bar.tbz2", "ftp://foo/a"]), - UseReduceTestCase( - "http://foo.com/foo http://foo/bar -> blah.tbz2", - uselist = ["foo"], - is_src_uri = True, - eapi = EAPI_WITH_SRC_URI_ARROWS, - expected_result = ["http://foo.com/foo", "http://foo/bar", "->", "blah.tbz2"]), - - #opconvert tests - UseReduceTestCase( - "A", - opconvert = True, - expected_result = ["A"]), - UseReduceTestCase( - "( A )", - opconvert = True, - expected_result = ["A"]), - UseReduceTestCase( - "|| ( A B )", - opconvert = True, - expected_result = [['||', 'A', 'B']]), - UseReduceTestCase( - "|| ( ( A B ) C )", - opconvert = True, - expected_result = [['||', ['A', 'B'], 'C']]), - UseReduceTestCase( - "|| ( A || ( B C ) )", - opconvert = True, - expected_result = [['||', 'A', 'B', 'C']]), - UseReduceTestCase( - "|| ( A || ( B C D ) )", - opconvert = True, - expected_result = [['||', 'A', 'B', 'C', 'D']]), - UseReduceTestCase( - "|| ( A || ( B || ( C D ) E ) )", - expected_result = [ "||", ["A", "B", "C", "D", "E"] ]), - UseReduceTestCase( - "( || ( ( ( A ) B ) ) )", - opconvert = True, - expected_result = [ "A", "B" ] ), - UseReduceTestCase( - "( || ( || ( ( A ) B ) ) )", - opconvert = True, - expected_result = [['||', 'A', 'B']]), - UseReduceTestCase( - "|| ( A B ) C", - opconvert = True, - expected_result = [['||', 'A', 'B'], 'C']), - UseReduceTestCase( - "A || ( B C )", - opconvert = True, - expected_result = ['A', ['||', 'B', 'C']]), - UseReduceTestCase( - "A foo? ( || ( B || ( bar? ( || ( C D E ) ) !bar? ( F ) ) ) ) G", - uselist = ["foo", "bar"], - opconvert = True, - expected_result = ['A', ['||', 'B', 'C', 'D', 'E'], 'G']), - UseReduceTestCase( - "A foo? ( || ( B || ( bar? ( || ( C D E ) ) !bar? ( F ) ) ) ) G", - uselist = ["foo", "bar"], - opconvert = False, - expected_result = ['A', '||', ['B', 'C', 'D', 'E'], 'G']), - - UseReduceTestCase( - "|| ( A )", - opconvert = True, - expected_result = ["A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - expected_result = ["A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - uselist = ["foo"], - opconvert = True, - expected_result = [['||', 'A', 'B']]), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - opconvert = True, - expected_result = []), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - uselist = ["foo", "bar"], - opconvert = True, - expected_result = [['||', 'A', 'B']]), - UseReduceTestCase( - "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B", - opconvert = True, - expected_result = ["A", "B"]), - UseReduceTestCase( - "|| ( A ) || ( B )", - opconvert = True, - expected_result = ["A", "B"]), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - opconvert = True, - expected_result = []), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - uselist = ["foo"], - opconvert = True, - expected_result = ["A", "B"]), - UseReduceTestCase( - "|| ( foo? ( || ( A B ) ) )", - uselist = ["foo"], - opconvert = True, - expected_result = [['||', 'A', 'B']]), - - UseReduceTestCase( - "|| ( ( A B ) foo? ( || ( C D ) ) )", - uselist = ["foo"], - opconvert = True, - expected_result = [['||', ['A', 'B'], 'C', 'D']]), - - UseReduceTestCase( - "|| ( ( A B ) foo? ( || ( C D ) ) )", - uselist = ["foo"], - opconvert = False, - expected_result = ['||', [['A', 'B'], 'C', 'D']]), - - UseReduceTestCase( - "|| ( ( A B ) || ( C D ) )", - expected_result = ['||', [['A', 'B'], 'C', 'D']]), - - UseReduceTestCase( - "|| ( ( A B ) || ( C D || ( E ( F G ) || ( H ) ) ) )", - expected_result = ['||', [['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]), - - UseReduceTestCase( - "|| ( ( A B ) || ( C D || ( E ( F G ) || ( H ) ) ) )", - opconvert = True, - expected_result = [['||', ['A', 'B'], 'C', 'D', 'E', ['F', 'G'], 'H']]), - - UseReduceTestCase( - "|| ( foo? ( A B ) )", - uselist = ["foo"], - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( || ( foo? ( A B ) ) )", - uselist = ["foo"], - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( || ( || ( a? ( b? ( c? ( || ( || ( || ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) )", - uselist = ["a", "b", "c", "d", "e", "f"], - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( || ( ( || ( a? ( ( b? ( c? ( || ( || ( || ( ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) ) ) ) )", - uselist = ["a", "b", "c", "d", "e", "f"], - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( ( A ( || ( B ) ) ) )", - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( ( A B ) || ( foo? ( bar? ( ( C D || ( baz? ( E ) ( F G ) || ( H ) ) ) ) ) ) )", - uselist = ["foo", "bar", "baz"], - expected_result = ['||', [['A', 'B'], ['C', 'D', '||', ['E', ['F', 'G'], 'H']]]]), - - UseReduceTestCase( - "|| ( ( A B ) || ( foo? ( bar? ( ( C D || ( baz? ( E ) ( F G ) || ( H ) ) ) ) ) ) )", - uselist = ["foo", "bar", "baz"], - opconvert = True, - expected_result = [['||', ['A', 'B'], ['C', 'D', ['||', 'E', ['F', 'G'], 'H']]]]), - - UseReduceTestCase( - "|| ( foo? ( A B ) )", - uselist = ["foo"], - opconvert=True, - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( || ( foo? ( A B ) ) )", - uselist = ["foo"], - opconvert=True, - expected_result = ['A', 'B']), - - UseReduceTestCase( - "|| ( || ( || ( a? ( b? ( c? ( || ( || ( || ( d? ( e? ( f? ( A B ) ) ) ) ) ) ) ) ) ) ) )", - uselist = ["a", "b", "c", "d", "e", "f"], - opconvert=True, - expected_result = ['A', 'B']), - - #flat test - UseReduceTestCase( - "A", - flat = True, - expected_result = ["A"]), - UseReduceTestCase( - "( A )", - flat = True, - expected_result = ["A"]), - UseReduceTestCase( - "|| ( A B )", - flat = True, - expected_result = [ "||", "A", "B" ] ), - UseReduceTestCase( - "|| ( A || ( B C ) )", - flat = True, - expected_result = [ "||", "A", "||", "B", "C" ]), - UseReduceTestCase( - "|| ( A || ( B C D ) )", - flat = True, - expected_result = [ "||", "A", "||", "B", "C", "D" ]), - UseReduceTestCase( - "|| ( A || ( B || ( C D ) E ) )", - flat = True, - expected_result = [ "||", "A", "||", "B", "||", "C", "D", "E" ]), - UseReduceTestCase( - "( || ( ( ( A ) B ) ) )", - flat = True, - expected_result = [ "||", "A", "B"] ), - UseReduceTestCase( - "( || ( || ( ( A ) B ) ) )", - flat = True, - expected_result = [ "||", "||", "A", "B" ]), - UseReduceTestCase( - "( || ( || ( ( A ) B ) ) )", - flat = True, - expected_result = [ "||", "||", "A", "B" ]), - UseReduceTestCase( - "|| ( A )", - flat = True, - expected_result = ["||", "A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - expected_result = ["A"]), - UseReduceTestCase( - "( || ( || ( || ( A ) foo? ( B ) ) ) )", - uselist = ["foo"], - flat = True, - expected_result = [ "||", "||","||", "A", "B" ]), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - flat = True, - expected_result = ["||", "||","||"]), - UseReduceTestCase( - "( || ( || ( bar? ( A ) || ( foo? ( B ) ) ) ) )", - uselist = ["foo", "bar"], - flat = True, - expected_result = [ "||", "||", "A", "||", "B" ]), - UseReduceTestCase( - "A || ( bar? ( C ) ) foo? ( bar? ( C ) ) B", - flat = True, - expected_result = ["A", "||", "B"]), - UseReduceTestCase( - "|| ( A ) || ( B )", - flat = True, - expected_result = ["||", "A", "||", "B"]), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - flat = True, - expected_result = []), - UseReduceTestCase( - "foo? ( A ) foo? ( B )", - uselist = ["foo"], - flat = True, - expected_result = ["A", "B"]), - - #use flag validation - UseReduceTestCase( - "foo? ( A )", - uselist = ["foo"], - is_valid_flag = self.always_true, - expected_result = ["A"]), - UseReduceTestCase( - "foo? ( A )", - is_valid_flag = self.always_true, - expected_result = []), - - #token_class - UseReduceTestCase( - "foo? ( dev-libs/A )", - uselist = ["foo"], - token_class=Atom, - expected_result = ["dev-libs/A"]), - UseReduceTestCase( - "foo? ( dev-libs/A )", - token_class=Atom, - expected_result = []), - ) - - test_cases_xfail = ( - UseReduceTestCase("? ( A )"), - UseReduceTestCase("!? ( A )"), - UseReduceTestCase("( A"), - UseReduceTestCase("A )"), - UseReduceTestCase("||( A B )"), - UseReduceTestCase("|| (A B )"), - UseReduceTestCase("|| ( A B)"), - UseReduceTestCase("|| ( A B"), - UseReduceTestCase("|| A B )"), - UseReduceTestCase("|| A B"), - UseReduceTestCase("|| ( A B ) )"), - UseReduceTestCase("|| || B C"), - UseReduceTestCase("|| ( A B || )"), - UseReduceTestCase("a? A"), - UseReduceTestCase("( || ( || || ( A ) foo? ( B ) ) )"), - UseReduceTestCase("( || ( || bar? ( A ) foo? ( B ) ) )"), - UseReduceTestCase("foo?"), - UseReduceTestCase("foo? || ( A )"), - UseReduceTestCase("|| ( )"), - UseReduceTestCase("foo? ( )"), - - #SRC_URI stuff - UseReduceTestCase("http://foo/bar -> blah.tbz2", is_src_uri = True, eapi = EAPI_WITHOUT_SRC_URI_ARROWS), - UseReduceTestCase("|| ( http://foo/bar -> blah.tbz2 )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar -> foo? ( ftp://foo/a )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar blah.tbz2 ->", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("-> http://foo/bar blah.tbz2 )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar ->", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar -> foo? ( http://foo.com/foo )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("foo? ( http://foo/bar -> ) blah.tbz2", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar -> foo/blah.tbz2", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - UseReduceTestCase("http://foo/bar -> -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri = True, eapi = EAPI_WITH_SRC_URI_ARROWS), - - UseReduceTestCase("http://foo/bar -> bar.tbz2 foo? ( ftp://foo/a )", is_src_uri = False, eapi = EAPI_WITH_SRC_URI_ARROWS), - - UseReduceTestCase( - "A", - opconvert = True, - flat = True), - - #use flag validation - UseReduceTestCase("1.0? ( A )"), - UseReduceTestCase("!1.0? ( A )"), - UseReduceTestCase("!? ( A )"), - UseReduceTestCase("!?? ( A )"), - UseReduceTestCase( - "foo? ( A )", - is_valid_flag = self.always_false, - ), - UseReduceTestCase( - "foo? ( A )", - uselist = ["foo"], - is_valid_flag = self.always_false, - ), - - #token_class - UseReduceTestCase( - "foo? ( A )", - uselist = ["foo"], - token_class=Atom), - UseReduceTestCase( - "A(B", - token_class=Atom), - ) - - for test_case in test_cases: - # If it fails then show the input, since lots of our - # test cases have the same output but different input, - # making it difficult deduce which test has failed. - self.assertEqual(test_case.run(), test_case.expected_result, - "input: '%s' result: %s != %s" % (test_case.deparray, - test_case.run(), test_case.expected_result)) - - for test_case in test_cases_xfail: - self.assertRaisesMsg(test_case.deparray, (InvalidDependString, ValueError), test_case.run) diff --git a/portage_with_autodep/pym/portage/tests/ebuild/__init__.py b/portage_with_autodep/pym/portage/tests/ebuild/__init__.py deleted file mode 100644 index e2d487e..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 1998-2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/ebuild/__test__ b/portage_with_autodep/pym/portage/tests/ebuild/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_array_fromfile_eof.py b/portage_with_autodep/pym/portage/tests/ebuild/test_array_fromfile_eof.py deleted file mode 100644 index d8277f2..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_array_fromfile_eof.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2009 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import array -import tempfile - -from portage import _unicode_decode -from portage import _unicode_encode -from portage.tests import TestCase - -class ArrayFromfileEofTestCase(TestCase): - - def testArrayFromfileEof(self): - # This tests if the following python issue is fixed - # in the currently running version of python: - # http://bugs.python.org/issue5334 - - input_data = "an arbitrary string" - input_bytes = _unicode_encode(input_data, - encoding='utf_8', errors='strict') - f = tempfile.TemporaryFile() - f.write(input_bytes) - - f.seek(0) - data = [] - eof = False - while not eof: - a = array.array('B') - try: - a.fromfile(f, len(input_bytes) + 1) - except (EOFError, IOError): - # python-3.0 lost data here - eof = True - - if not a: - eof = True - else: - data.append(_unicode_decode(a.tostring(), - encoding='utf_8', errors='strict')) - - f.close() - - self.assertEqual(input_data, ''.join(data)) diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_config.py b/portage_with_autodep/pym/portage/tests/ebuild/test_config.py deleted file mode 100644 index 7bec8c6..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_config.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import portage -from portage import os -from portage.package.ebuild.config import config -from portage.package.ebuild._config.LicenseManager import LicenseManager -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class ConfigTestCase(TestCase): - - def testClone(self): - """ - Test the clone via constructor. - """ - - ebuilds = { - "dev-libs/A-1": { }, - } - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - settings = config(clone=playground.settings) - result = playground.run(["=dev-libs/A-1"]) - pkg, existing_node = result.depgraph._select_package( - playground.root, "=dev-libs/A-1") - settings.setcpv(pkg) - - # clone after setcpv tests deepcopy of LazyItemsDict - settings2 = config(clone=settings) - finally: - playground.cleanup() - - def testFeaturesMutation(self): - """ - Test whether mutation of config.features updates the FEATURES - variable and persists through config.regenerate() calls. Also - verify that features_set._prune_overrides() works correctly. - """ - playground = ResolverPlayground() - try: - settings = config(clone=playground.settings) - - settings.features.add('noclean') - self.assertEqual('noclean' in settings['FEATURES'].split(), True) - settings.regenerate() - self.assertEqual('noclean' in settings['FEATURES'].split(),True) - - settings.features.discard('noclean') - self.assertEqual('noclean' in settings['FEATURES'].split(), False) - settings.regenerate() - self.assertEqual('noclean' in settings['FEATURES'].split(), False) - - settings.features.add('noclean') - self.assertEqual('noclean' in settings['FEATURES'].split(), True) - settings.regenerate() - self.assertEqual('noclean' in settings['FEATURES'].split(),True) - - # before: ['noclean', '-noclean', 'noclean'] - settings.features._prune_overrides() - # after: ['noclean'] - self.assertEqual(settings._features_overrides.count('noclean'), 1) - self.assertEqual(settings._features_overrides.count('-noclean'), 0) - - settings.features.remove('noclean') - - # before: ['noclean', '-noclean'] - settings.features._prune_overrides() - # after: ['-noclean'] - self.assertEqual(settings._features_overrides.count('noclean'), 0) - self.assertEqual(settings._features_overrides.count('-noclean'), 1) - finally: - playground.cleanup() - - def testLicenseManager(self): - - user_config = { - "package.license": - ( - "dev-libs/* TEST", - "dev-libs/A -TEST2", - "=dev-libs/A-2 TEST3 @TEST", - "*/* @EULA TEST2", - "=dev-libs/C-1 *", - "=dev-libs/C-2 -*", - ), - } - - playground = ResolverPlayground(user_config=user_config) - try: - portage.util.noiselimit = -2 - - license_group_locations = (os.path.join(playground.portdir, "profiles"),) - pkg_license = os.path.join(playground.eroot, "etc", "portage") - - lic_man = LicenseManager(license_group_locations, pkg_license) - - self.assertEqual(lic_man._accept_license_str, None) - self.assertEqual(lic_man._accept_license, None) - self.assertEqual(lic_man._license_groups, {"EULA": frozenset(["TEST"])}) - self.assertEqual(lic_man._undef_lic_groups, set(["TEST"])) - - self.assertEqual(lic_man.extract_global_changes(), "TEST TEST2") - self.assertEqual(lic_man.extract_global_changes(), "") - - lic_man.set_accept_license_str("TEST TEST2") - self.assertEqual(lic_man._getPkgAcceptLicense("dev-libs/B-1", "0", None), ["TEST", "TEST2", "TEST"]) - self.assertEqual(lic_man._getPkgAcceptLicense("dev-libs/A-1", "0", None), ["TEST", "TEST2", "TEST", "-TEST2"]) - self.assertEqual(lic_man._getPkgAcceptLicense("dev-libs/A-2", "0", None), ["TEST", "TEST2", "TEST", "-TEST2", "TEST3", "@TEST"]) - - self.assertEqual(lic_man.get_prunned_accept_license("dev-libs/B-1", [], "TEST", "0", None), "TEST") - self.assertEqual(lic_man.get_prunned_accept_license("dev-libs/A-1", [], "-TEST2", "0", None), "") - self.assertEqual(lic_man.get_prunned_accept_license("dev-libs/A-2", [], "|| ( TEST TEST2 )", "0", None), "TEST") - self.assertEqual(lic_man.get_prunned_accept_license("dev-libs/C-1", [], "TEST5", "0", None), "TEST5") - self.assertEqual(lic_man.get_prunned_accept_license("dev-libs/C-2", [], "TEST2", "0", None), "") - - self.assertEqual(lic_man.getMissingLicenses("dev-libs/B-1", [], "TEST", "0", None), []) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/A-1", [], "-TEST2", "0", None), ["-TEST2"]) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/A-2", [], "|| ( TEST TEST2 )", "0", None), []) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/A-3", [], "|| ( TEST2 || ( TEST3 TEST4 ) )", "0", None), ["TEST2", "TEST3", "TEST4"]) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/C-1", [], "TEST5", "0", None), []) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/C-2", [], "TEST2", "0", None), ["TEST2"]) - self.assertEqual(lic_man.getMissingLicenses("dev-libs/D-1", [], "", "0", None), []) - finally: - portage.util.noiselimit = 0 - playground.cleanup() - - def testPackageMaskOrder(self): - - ebuilds = { - "dev-libs/A-1": { }, - "dev-libs/B-1": { }, - "dev-libs/C-1": { }, - "dev-libs/D-1": { }, - "dev-libs/E-1": { }, - } - - repo_configs = { - "test_repo": { - "package.mask": - ( - "dev-libs/A", - "dev-libs/C", - ), - } - } - - profile = { - "package.mask": - ( - "-dev-libs/A", - "dev-libs/B", - "-dev-libs/B", - "dev-libs/D", - ), - } - - user_config = { - "package.mask": - ( - "-dev-libs/C", - "-dev-libs/D", - "dev-libs/E", - ), - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = { "--autounmask": 'n' }, - success = False), - ResolverPlaygroundTestCase( - ["dev-libs/B"], - success = True, - mergelist = ["dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/C"], - success = True, - mergelist = ["dev-libs/C-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/D"], - success = True, - mergelist = ["dev-libs/D-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/E"], - options = { "--autounmask": 'n' }, - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, repo_configs=repo_configs, \ - profile=profile, user_config=user_config) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_doebuild_spawn.py b/portage_with_autodep/pym/portage/tests/ebuild/test_doebuild_spawn.py deleted file mode 100644 index ed08b2a..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_doebuild_spawn.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage import _python_interpreter -from portage import _shell_quote -from portage.const import EBUILD_SH_BINARY -from portage.package.ebuild.config import config -from portage.package.ebuild.doebuild import spawn as doebuild_spawn -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground -from _emerge.EbuildPhase import EbuildPhase -from _emerge.MiscFunctionsProcess import MiscFunctionsProcess -from _emerge.Package import Package -from _emerge.PollScheduler import PollScheduler - -class DoebuildSpawnTestCase(TestCase): - """ - Invoke portage.package.ebuild.doebuild.spawn() with a - minimal environment. This gives coverage to some of - the ebuild execution internals, like ebuild.sh, - AbstractEbuildProcess, and EbuildIpcDaemon. - """ - - def testDoebuildSpawn(self): - playground = ResolverPlayground() - try: - settings = config(clone=playground.settings) - cpv = 'sys-apps/portage-2.1' - metadata = { - 'EAPI' : '2', - 'INHERITED' : 'python eutils', - 'IUSE' : 'build doc epydoc python3 selinux', - 'LICENSE' : 'GPL-2', - 'PROVIDE' : 'virtual/portage', - 'RDEPEND' : '>=app-shells/bash-3.2_p17 >=dev-lang/python-2.6', - 'SLOT' : '0', - } - root_config = playground.trees[playground.root]['root_config'] - pkg = Package(built=False, cpv=cpv, installed=False, - metadata=metadata, root_config=root_config, - type_name='ebuild') - settings.setcpv(pkg) - settings['PORTAGE_PYTHON'] = _python_interpreter - settings['PORTAGE_BUILDDIR'] = os.path.join( - settings['PORTAGE_TMPDIR'], cpv) - settings['T'] = os.path.join( - settings['PORTAGE_BUILDDIR'], 'temp') - for x in ('PORTAGE_BUILDDIR', 'T'): - os.makedirs(settings[x]) - # Create a fake environment, to pretend as if the ebuild - # has been sourced already. - open(os.path.join(settings['T'], 'environment'), 'wb') - - scheduler = PollScheduler().sched_iface - for phase in ('_internal_test',): - - # Test EbuildSpawnProcess by calling doebuild.spawn() with - # returnpid=False. This case is no longer used by portage - # internals since EbuildPhase is used instead and that passes - # returnpid=True to doebuild.spawn(). - rval = doebuild_spawn("%s %s" % (_shell_quote( - os.path.join(settings["PORTAGE_BIN_PATH"], - os.path.basename(EBUILD_SH_BINARY))), phase), - settings, free=1) - self.assertEqual(rval, os.EX_OK) - - ebuild_phase = EbuildPhase(background=False, - phase=phase, scheduler=scheduler, - settings=settings) - ebuild_phase.start() - ebuild_phase.wait() - self.assertEqual(ebuild_phase.returncode, os.EX_OK) - - ebuild_phase = MiscFunctionsProcess(background=False, - commands=['success_hooks'], - scheduler=scheduler, settings=settings) - ebuild_phase.start() - ebuild_phase.wait() - self.assertEqual(ebuild_phase.returncode, os.EX_OK) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_ipc_daemon.py b/portage_with_autodep/pym/portage/tests/ebuild/test_ipc_daemon.py deleted file mode 100644 index b5b4796..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_ipc_daemon.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import shutil -import tempfile -import time -from portage import os -from portage import _python_interpreter -from portage.tests import TestCase -from portage.const import PORTAGE_BIN_PATH -from portage.const import PORTAGE_PYM_PATH -from portage.const import BASH_BINARY -from portage.package.ebuild._ipc.ExitCommand import ExitCommand -from portage.util import ensure_dirs -from _emerge.SpawnProcess import SpawnProcess -from _emerge.EbuildBuildDir import EbuildBuildDir -from _emerge.EbuildIpcDaemon import EbuildIpcDaemon -from _emerge.TaskScheduler import TaskScheduler - -class IpcDaemonTestCase(TestCase): - - _SCHEDULE_TIMEOUT = 40000 # 40 seconds - - def testIpcDaemon(self): - tmpdir = tempfile.mkdtemp() - build_dir = None - try: - env = {} - - # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they - # need to be inherited by ebuild subprocesses. - if 'PORTAGE_USERNAME' in os.environ: - env['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME'] - if 'PORTAGE_GRPNAME' in os.environ: - env['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME'] - - env['PORTAGE_PYTHON'] = _python_interpreter - env['PORTAGE_BIN_PATH'] = PORTAGE_BIN_PATH - env['PORTAGE_PYM_PATH'] = PORTAGE_PYM_PATH - env['PORTAGE_BUILDDIR'] = os.path.join(tmpdir, 'cat', 'pkg-1') - - task_scheduler = TaskScheduler(max_jobs=2) - build_dir = EbuildBuildDir( - scheduler=task_scheduler.sched_iface, - settings=env) - build_dir.lock() - ensure_dirs(env['PORTAGE_BUILDDIR']) - - input_fifo = os.path.join(env['PORTAGE_BUILDDIR'], '.ipc_in') - output_fifo = os.path.join(env['PORTAGE_BUILDDIR'], '.ipc_out') - os.mkfifo(input_fifo) - os.mkfifo(output_fifo) - - for exitcode in (0, 1, 2): - exit_command = ExitCommand() - commands = {'exit' : exit_command} - daemon = EbuildIpcDaemon(commands=commands, - input_fifo=input_fifo, - output_fifo=output_fifo, - scheduler=task_scheduler.sched_iface) - proc = SpawnProcess( - args=[BASH_BINARY, "-c", - '"$PORTAGE_BIN_PATH"/ebuild-ipc exit %d' % exitcode], - env=env, scheduler=task_scheduler.sched_iface) - - self.received_command = False - def exit_command_callback(): - self.received_command = True - proc.cancel() - daemon.cancel() - - exit_command.reply_hook = exit_command_callback - task_scheduler.add(daemon) - task_scheduler.add(proc) - start_time = time.time() - task_scheduler.run(timeout=self._SCHEDULE_TIMEOUT) - task_scheduler.clear() - - self.assertEqual(self.received_command, True, - "command not received after %d seconds" % \ - (time.time() - start_time,)) - self.assertEqual(proc.isAlive(), False) - self.assertEqual(daemon.isAlive(), False) - self.assertEqual(exit_command.exitcode, exitcode) - - # Intentionally short timeout test for QueueScheduler.run() - sleep_time_s = 10 # 10.000 seconds - short_timeout_ms = 10 # 0.010 seconds - - for i in range(3): - exit_command = ExitCommand() - commands = {'exit' : exit_command} - daemon = EbuildIpcDaemon(commands=commands, - input_fifo=input_fifo, - output_fifo=output_fifo, - scheduler=task_scheduler.sched_iface) - proc = SpawnProcess( - args=[BASH_BINARY, "-c", 'exec sleep %d' % sleep_time_s], - env=env, scheduler=task_scheduler.sched_iface) - - self.received_command = False - def exit_command_callback(): - self.received_command = True - proc.cancel() - daemon.cancel() - - exit_command.reply_hook = exit_command_callback - task_scheduler.add(daemon) - task_scheduler.add(proc) - start_time = time.time() - task_scheduler.run(timeout=short_timeout_ms) - task_scheduler.clear() - - self.assertEqual(self.received_command, False, - "command received after %d seconds" % \ - (time.time() - start_time,)) - self.assertEqual(proc.isAlive(), False) - self.assertEqual(daemon.isAlive(), False) - self.assertEqual(proc.returncode == os.EX_OK, False) - - finally: - if build_dir is not None: - build_dir.unlock() - shutil.rmtree(tmpdir) diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_pty_eof.py b/portage_with_autodep/pym/portage/tests/ebuild/test_pty_eof.py deleted file mode 100644 index 4b6ff21..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_pty_eof.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2009-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.util._pty import _can_test_pty_eof, _test_pty_eof - -class PtyEofFdopenBufferedTestCase(TestCase): - - def testPtyEofFdopenBuffered(self): - # This tests if the following python issue is fixed yet: - # http://bugs.python.org/issue5380 - # Since it might not be fixed, mark as todo. - self.todo = True - # The result is only valid if openpty does not raise EnvironmentError. - if _can_test_pty_eof(): - try: - self.assertEqual(_test_pty_eof(fdopen_buffered=True), True) - except EnvironmentError: - pass - -class PtyEofFdopenUnBufferedTestCase(TestCase): - def testPtyEofFdopenUnBuffered(self): - # New development: It appears that array.fromfile() is usable - # with python3 as long as fdopen is called with a bufsize - # argument of 0. - - # The result is only valid if openpty does not raise EnvironmentError. - if _can_test_pty_eof(): - try: - self.assertEqual(_test_pty_eof(), True) - except EnvironmentError: - pass diff --git a/portage_with_autodep/pym/portage/tests/ebuild/test_spawn.py b/portage_with_autodep/pym/portage/tests/ebuild/test_spawn.py deleted file mode 100644 index fea4738..0000000 --- a/portage_with_autodep/pym/portage/tests/ebuild/test_spawn.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 1998-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import errno -import io -import sys -import tempfile -from portage import os -from portage import _encodings -from portage import _unicode_encode -from portage.const import BASH_BINARY -from portage.tests import TestCase -from _emerge.SpawnProcess import SpawnProcess -from _emerge.PollScheduler import PollScheduler - -class SpawnTestCase(TestCase): - - def testLogfile(self): - logfile = None - try: - fd, logfile = tempfile.mkstemp() - os.close(fd) - null_fd = os.open('/dev/null', os.O_RDWR) - test_string = 2 * "blah blah blah\n" - scheduler = PollScheduler().sched_iface - proc = SpawnProcess( - args=[BASH_BINARY, "-c", - "echo -n '%s'" % test_string], - env={}, fd_pipes={0:sys.stdin.fileno(), 1:null_fd, 2:null_fd}, - scheduler=scheduler, - logfile=logfile) - proc.start() - os.close(null_fd) - self.assertEqual(proc.wait(), os.EX_OK) - f = io.open(_unicode_encode(logfile, - encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['content'], errors='strict') - log_content = f.read() - f.close() - # When logging passes through a pty, this comparison will fail - # unless the oflag terminal attributes have the termios.OPOST - # bit disabled. Otherwise, tranformations such as \n -> \r\n - # may occur. - self.assertEqual(test_string, log_content) - finally: - if logfile: - try: - os.unlink(logfile) - except EnvironmentError as e: - if e.errno != errno.ENOENT: - raise - del e diff --git a/portage_with_autodep/pym/portage/tests/env/__init__.py b/portage_with_autodep/pym/portage/tests/env/__init__.py deleted file mode 100644 index cbeabe5..0000000 --- a/portage_with_autodep/pym/portage/tests/env/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# tests/portage/env/__init__.py -- Portage Unit Test functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - diff --git a/portage_with_autodep/pym/portage/tests/env/__test__ b/portage_with_autodep/pym/portage/tests/env/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/env/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/env/config/__init__.py b/portage_with_autodep/pym/portage/tests/env/config/__init__.py deleted file mode 100644 index ef5cc43..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# tests/portage/env/config/__init__.py -- Portage Unit Test functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - diff --git a/portage_with_autodep/pym/portage/tests/env/config/__test__ b/portage_with_autodep/pym/portage/tests/env/config/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/env/config/test_PackageKeywordsFile.py b/portage_with_autodep/pym/portage/tests/env/config/test_PackageKeywordsFile.py deleted file mode 100644 index f1e9e98..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/test_PackageKeywordsFile.py +++ /dev/null @@ -1,40 +0,0 @@ -# test_PackageKeywordsFile.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from portage.env.config import PackageKeywordsFile -from tempfile import mkstemp - -class PackageKeywordsFileTestCase(TestCase): - - cpv = ['sys-apps/portage'] - keywords = ['~x86', 'amd64', '-mips'] - - def testPackageKeywordsFile(self): - """ - A simple test to ensure the load works properly - """ - - self.BuildFile() - try: - f = PackageKeywordsFile(self.fname) - f.load() - i = 0 - for cpv, keyword in f.items(): - self.assertEqual( cpv, self.cpv[i] ) - [k for k in keyword if self.assertTrue(k in self.keywords)] - i = i + 1 - finally: - self.NukeFile() - - def BuildFile(self): - fd, self.fname = mkstemp() - f = os.fdopen(fd, 'w') - for c in self.cpv: - f.write("%s %s\n" % (c,' '.join(self.keywords))) - f.close() - - def NukeFile(self): - os.unlink(self.fname) diff --git a/portage_with_autodep/pym/portage/tests/env/config/test_PackageMaskFile.py b/portage_with_autodep/pym/portage/tests/env/config/test_PackageMaskFile.py deleted file mode 100644 index 0c5b30f..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/test_PackageMaskFile.py +++ /dev/null @@ -1,29 +0,0 @@ -# test_PackageMaskFile.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.env.config import PackageMaskFile -from portage.tests import TestCase, test_cps -from tempfile import mkstemp - -class PackageMaskFileTestCase(TestCase): - - def testPackageMaskFile(self): - self.BuildFile() - try: - f = PackageMaskFile(self.fname) - f.load() - for atom in f: - self.assertTrue(atom in test_cps) - finally: - self.NukeFile() - - def BuildFile(self): - fd, self.fname = mkstemp() - f = os.fdopen(fd, 'w') - f.write("\n".join(test_cps)) - f.close() - - def NukeFile(self): - os.unlink(self.fname) diff --git a/portage_with_autodep/pym/portage/tests/env/config/test_PackageUseFile.py b/portage_with_autodep/pym/portage/tests/env/config/test_PackageUseFile.py deleted file mode 100644 index 7a38067..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/test_PackageUseFile.py +++ /dev/null @@ -1,37 +0,0 @@ -# test_PackageUseFile.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from portage.env.config import PackageUseFile -from tempfile import mkstemp - - -class PackageUseFileTestCase(TestCase): - - cpv = 'sys-apps/portage' - useflags = ['cdrom', 'far', 'boo', 'flag', 'blat'] - - def testPackageUseFile(self): - """ - A simple test to ensure the load works properly - """ - self.BuildFile() - try: - f = PackageUseFile(self.fname) - f.load() - for cpv, use in f.items(): - self.assertEqual( cpv, self.cpv ) - [flag for flag in use if self.assertTrue(flag in self.useflags)] - finally: - self.NukeFile() - - def BuildFile(self): - fd, self.fname = mkstemp() - f = os.fdopen(fd, 'w') - f.write("%s %s" % (self.cpv, ' '.join(self.useflags))) - f.close() - - def NukeFile(self): - os.unlink(self.fname) diff --git a/portage_with_autodep/pym/portage/tests/env/config/test_PortageModulesFile.py b/portage_with_autodep/pym/portage/tests/env/config/test_PortageModulesFile.py deleted file mode 100644 index 2cd1a8a..0000000 --- a/portage_with_autodep/pym/portage/tests/env/config/test_PortageModulesFile.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2006-2009 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from portage.env.config import PortageModulesFile -from tempfile import mkstemp - -class PortageModulesFileTestCase(TestCase): - - keys = ['foo.bar','baz','bob','extra_key'] - invalid_keys = ['',""] - modules = ['spanky','zmedico','antarus','ricer','5','6'] - - def setUp(self): - self.items = {} - for k, v in zip(self.keys + self.invalid_keys, - self.modules): - self.items[k] = v - - def testPortageModulesFile(self): - self.BuildFile() - f = PortageModulesFile(self.fname) - f.load() - for k in self.keys: - self.assertEqual(f[k], self.items[k]) - for ik in self.invalid_keys: - self.assertEqual(False, ik in f) - self.NukeFile() - - def BuildFile(self): - fd, self.fname = mkstemp() - f = os.fdopen(fd, 'w') - for k, v in self.items.items(): - f.write('%s=%s\n' % (k,v)) - f.close() - - def NukeFile(self): - os.unlink(self.fname) diff --git a/portage_with_autodep/pym/portage/tests/lafilefixer/__init__.py b/portage_with_autodep/pym/portage/tests/lafilefixer/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/lafilefixer/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/lafilefixer/__test__ b/portage_with_autodep/pym/portage/tests/lafilefixer/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/lafilefixer/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/lafilefixer/test_lafilefixer.py b/portage_with_autodep/pym/portage/tests/lafilefixer/test_lafilefixer.py deleted file mode 100644 index 0bcffaa..0000000 --- a/portage_with_autodep/pym/portage/tests/lafilefixer/test_lafilefixer.py +++ /dev/null @@ -1,145 +0,0 @@ -# test_lafilefixer.py -- Portage Unit Testing Functionality -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.exception import InvalidData - -class test_lafilefixer(TestCase): - - def get_test_cases_clean(self): - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -lm'\n" + \ - b"current=6\n" + \ - b"age=0\n" + \ - b"revision=2\n" + \ - b"installed=yes\n" + \ - b"dlopen=''\n" + \ - b"dlpreopen=''\n" + \ - b"libdir='/usr/lib64'\n" - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -lm'\n" + \ - b"current=6\n" + \ - b"age=0\n" + \ - b"revision=2\n" + \ - b"installed=yes\n" + \ - b"dlopen=''\n" + \ - b"dlpreopen=''\n" + \ - b"libdir='/usr/lib64'\n" - yield b"dependency_libs=' liba.la /usr/lib64/bar.la -lc'\n" - - def get_test_cases_update(self): - #.la -> -l* - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc'\n", \ - b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -L/usr/lib64 -la -lb -lc'\n" - #move stuff into inherited_linker_flags - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la -pthread /usr/lib64/libb.la -lc'\n" + \ - b"inherited_linker_flags=''\n", \ - b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -L/usr/lib64 -la -lb -lc'\n" + \ - b"inherited_linker_flags=' -pthread'\n" - #reorder - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la -R/usr/lib64 /usr/lib64/libb.la -lc'\n", \ - b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -R/usr/lib64 -L/usr/lib64 -la -lb -lc'\n" - #remove duplicates from dependency_libs (the original version didn't do it for inherited_linker_flags) - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libc.la -pthread -mt" + \ - b" -L/usr/lib -R/usr/lib64 -lc /usr/lib64/libb.la -lc'\n" +\ - b"inherited_linker_flags=' -pthread -pthread'\n", \ - b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' -R/usr/lib64 -L/usr/lib64 -L/usr/lib -la -lc -lb'\n" +\ - b"inherited_linker_flags=' -pthread -pthread -mt'\n" - #-L rewriting - yield b"dependency_libs=' -L/usr/X11R6/lib'\n", \ - b"dependency_libs=' -L/usr/lib'\n" - yield b"dependency_libs=' -L/usr/local/lib'\n", \ - b"dependency_libs=' -L/usr/lib'\n" - yield b"dependency_libs=' -L/usr/lib64/pkgconfig/../..'\n", \ - b"dependency_libs=' -L/usr'\n" - yield b"dependency_libs=' -L/usr/lib/pkgconfig/..'\n", \ - b"dependency_libs=' -L/usr/lib'\n" - yield b"dependency_libs=' -L/usr/lib/pkgconfig/../.. -L/usr/lib/pkgconfig/..'\n", \ - b"dependency_libs=' -L/usr -L/usr/lib'\n" - #we once got a backtrace on this one - yield b"dependency_libs=' /usr/lib64/libMagickCore.la -L/usr/lib64 -llcms2 /usr/lib64/libtiff.la " + \ - b"-ljbig -lc /usr/lib64/libfreetype.la /usr/lib64/libjpeg.la /usr/lib64/libXext.la " + \ - b"/usr/lib64/libXt.la /usr/lib64/libSM.la -lICE -luuid /usr/lib64/libICE.la /usr/lib64/libX11.la " + \ - b"/usr/lib64/libxcb.la /usr/lib64/libXau.la /usr/lib64/libXdmcp.la -lbz2 -lz -lm " + \ - b"/usr/lib/gcc/x86_64-pc-linux-gnu/4.4.4/libgomp.la -lrt -lpthread /usr/lib64/libltdl.la -ldl " + \ - b"/usr/lib64/libfpx.la -lstdc++'", \ - b"dependency_libs=' -L/usr/lib64 -L/usr/lib/gcc/x86_64-pc-linux-gnu/4.4.4 -lMagickCore -llcms2 " + \ - b"-ltiff -ljbig -lc -lfreetype -ljpeg -lXext -lXt -lSM -lICE -luuid -lX11 -lxcb -lXau -lXdmcp " + \ - b"-lbz2 -lz -lm -lgomp -lrt -lpthread -lltdl -ldl -lfpx -lstdc++'" - - - def get_test_cases_broken(self): - yield b"" - #no dependency_libs - yield b"dlname='libfoo.so.1'\n" + \ - b"current=6\n" + \ - b"age=0\n" + \ - b"revision=2\n" - #borken dependency_libs - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc' \n" - #borken dependency_libs - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc\n" - #crap in dependency_libs - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc /-lstdc++'\n" - #dependency_libs twice - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc /-lstdc++'\n" +\ - b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc /-lstdc++'\n" - #inherited_linker_flags twice - yield b"dlname='libfoo.so.1'\n" + \ - b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + \ - b"old_library='libpdf.a'\n" + \ - b"inherited_linker_flags=''\n" +\ - b"inherited_linker_flags=''\n" - - def testlafilefixer(self): - from portage.util.lafilefixer import _parse_lafile_contents, rewrite_lafile - - for clean_contents in self.get_test_cases_clean(): - self.assertEqual(rewrite_lafile(clean_contents), (False, None)) - - for original_contents, fixed_contents in self.get_test_cases_update(): - self.assertEqual(rewrite_lafile(original_contents), (True, fixed_contents)) - - for broken_contents in self.get_test_cases_broken(): - self.assertRaises(InvalidData, rewrite_lafile, broken_contents) diff --git a/portage_with_autodep/pym/portage/tests/lazyimport/__init__.py b/portage_with_autodep/pym/portage/tests/lazyimport/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/lazyimport/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/lazyimport/__test__ b/portage_with_autodep/pym/portage/tests/lazyimport/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/lazyimport/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py b/portage_with_autodep/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py deleted file mode 100644 index 08ccfa7..0000000 --- a/portage_with_autodep/pym/portage/tests/lazyimport/test_lazy_import_portage_baseline.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import re -import portage -from portage import os -from portage.const import PORTAGE_PYM_PATH -from portage.tests import TestCase - -from _emerge.PollScheduler import PollScheduler -from _emerge.PipeReader import PipeReader -from _emerge.SpawnProcess import SpawnProcess - -class LazyImportPortageBaselineTestCase(TestCase): - - _module_re = re.compile(r'^(portage|repoman|_emerge)\.') - - _baseline_imports = frozenset([ - 'portage.const', 'portage.localization', - 'portage.proxy', 'portage.proxy.lazyimport', - 'portage.proxy.objectproxy', - 'portage._selinux', - ]) - - _baseline_import_cmd = [portage._python_interpreter, '-c', ''' -import os -import sys -sys.path.insert(0, os.environ["PORTAGE_PYM_PATH"]) -import portage -sys.stdout.write(" ".join(k for k in sys.modules - if sys.modules[k] is not None)) -'''] - - def testLazyImportPortageBaseline(self): - """ - Check what modules are imported by a baseline module import. - """ - - env = os.environ.copy() - pythonpath = env.get('PYTHONPATH') - if pythonpath is not None and not pythonpath.strip(): - pythonpath = None - if pythonpath is None: - pythonpath = '' - else: - pythonpath = ':' + pythonpath - pythonpath = PORTAGE_PYM_PATH + pythonpath - env[pythonpath] = pythonpath - - # If python is patched to insert the path of the - # currently installed portage module into sys.path, - # then the above PYTHONPATH override doesn't help. - env['PORTAGE_PYM_PATH'] = PORTAGE_PYM_PATH - - scheduler = PollScheduler().sched_iface - master_fd, slave_fd = os.pipe() - master_file = os.fdopen(master_fd, 'rb', 0) - slave_file = os.fdopen(slave_fd, 'wb') - producer = SpawnProcess( - args=self._baseline_import_cmd, - env=env, fd_pipes={1:slave_fd}, - scheduler=scheduler) - producer.start() - slave_file.close() - - consumer = PipeReader( - input_files={"producer" : master_file}, - scheduler=scheduler) - - consumer.start() - consumer.wait() - self.assertEqual(producer.wait(), os.EX_OK) - self.assertEqual(consumer.wait(), os.EX_OK) - - output = consumer.getvalue().decode('ascii', 'replace').split() - - unexpected_modules = " ".join(sorted(x for x in output \ - if self._module_re.match(x) is not None and \ - x not in self._baseline_imports)) - - self.assertEqual("", unexpected_modules) diff --git a/portage_with_autodep/pym/portage/tests/lazyimport/test_preload_portage_submodules.py b/portage_with_autodep/pym/portage/tests/lazyimport/test_preload_portage_submodules.py deleted file mode 100644 index 9d20eba..0000000 --- a/portage_with_autodep/pym/portage/tests/lazyimport/test_preload_portage_submodules.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import portage -from portage.tests import TestCase - -class PreloadPortageSubmodulesTestCase(TestCase): - - def testPreloadPortageSubmodules(self): - """ - Verify that _preload_portage_submodules() doesn't leave any - remaining proxies that refer to the portage.* namespace. - """ - portage.proxy.lazyimport._preload_portage_submodules() - for name in portage.proxy.lazyimport._module_proxies: - self.assertEqual(name.startswith('portage.'), False) diff --git a/portage_with_autodep/pym/portage/tests/lint/__init__.pyo b/portage_with_autodep/pym/portage/tests/lint/__init__.pyo Binary files differnew file mode 100644 index 0000000..a1241e5 --- /dev/null +++ b/portage_with_autodep/pym/portage/tests/lint/__init__.pyo diff --git a/portage_with_autodep/pym/portage/tests/lint/test_bash_syntax.pyo b/portage_with_autodep/pym/portage/tests/lint/test_bash_syntax.pyo Binary files differnew file mode 100644 index 0000000..a7ddc80 --- /dev/null +++ b/portage_with_autodep/pym/portage/tests/lint/test_bash_syntax.pyo diff --git a/portage_with_autodep/pym/portage/tests/lint/test_compile_modules.pyo b/portage_with_autodep/pym/portage/tests/lint/test_compile_modules.pyo Binary files differnew file mode 100644 index 0000000..7b1460d --- /dev/null +++ b/portage_with_autodep/pym/portage/tests/lint/test_compile_modules.pyo diff --git a/portage_with_autodep/pym/portage/tests/lint/test_import_modules.pyo b/portage_with_autodep/pym/portage/tests/lint/test_import_modules.pyo Binary files differnew file mode 100644 index 0000000..b3a1d61 --- /dev/null +++ b/portage_with_autodep/pym/portage/tests/lint/test_import_modules.pyo diff --git a/portage_with_autodep/pym/portage/tests/locks/__test__ b/portage_with_autodep/pym/portage/tests/locks/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/locks/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/locks/test_asynchronous_lock.py b/portage_with_autodep/pym/portage/tests/locks/test_asynchronous_lock.py deleted file mode 100644 index 8946caf..0000000 --- a/portage_with_autodep/pym/portage/tests/locks/test_asynchronous_lock.py +++ /dev/null @@ -1,124 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import shutil -import signal -import tempfile - -from portage import os -from portage.tests import TestCase -from _emerge.AsynchronousLock import AsynchronousLock -from _emerge.PollScheduler import PollScheduler - -class AsynchronousLockTestCase(TestCase): - - def testAsynchronousLock(self): - scheduler = PollScheduler().sched_iface - tempdir = tempfile.mkdtemp() - try: - path = os.path.join(tempdir, 'lock_me') - for force_async in (True, False): - for force_dummy in (True, False): - async_lock = AsynchronousLock(path=path, - scheduler=scheduler, _force_async=force_async, - _force_thread=True, - _force_dummy=force_dummy) - async_lock.start() - self.assertEqual(async_lock.wait(), os.EX_OK) - self.assertEqual(async_lock.returncode, os.EX_OK) - async_lock.unlock() - - async_lock = AsynchronousLock(path=path, - scheduler=scheduler, _force_async=force_async, - _force_process=True) - async_lock.start() - self.assertEqual(async_lock.wait(), os.EX_OK) - self.assertEqual(async_lock.returncode, os.EX_OK) - async_lock.unlock() - - finally: - shutil.rmtree(tempdir) - - def testAsynchronousLockWait(self): - scheduler = PollScheduler().sched_iface - tempdir = tempfile.mkdtemp() - try: - path = os.path.join(tempdir, 'lock_me') - lock1 = AsynchronousLock(path=path, scheduler=scheduler) - lock1.start() - self.assertEqual(lock1.wait(), os.EX_OK) - self.assertEqual(lock1.returncode, os.EX_OK) - - # lock2 requires _force_async=True since the portage.locks - # module is not designed to work as intended here if the - # same process tries to lock the same file more than - # one time concurrently. - lock2 = AsynchronousLock(path=path, scheduler=scheduler, - _force_async=True, _force_process=True) - lock2.start() - # lock2 should be waiting for lock1 to release - self.assertEqual(lock2.poll(), None) - self.assertEqual(lock2.returncode, None) - - lock1.unlock() - self.assertEqual(lock2.wait(), os.EX_OK) - self.assertEqual(lock2.returncode, os.EX_OK) - lock2.unlock() - finally: - shutil.rmtree(tempdir) - - def testAsynchronousLockWaitCancel(self): - scheduler = PollScheduler().sched_iface - tempdir = tempfile.mkdtemp() - try: - path = os.path.join(tempdir, 'lock_me') - lock1 = AsynchronousLock(path=path, scheduler=scheduler) - lock1.start() - self.assertEqual(lock1.wait(), os.EX_OK) - self.assertEqual(lock1.returncode, os.EX_OK) - lock2 = AsynchronousLock(path=path, scheduler=scheduler, - _force_async=True, _force_process=True) - lock2.start() - # lock2 should be waiting for lock1 to release - self.assertEqual(lock2.poll(), None) - self.assertEqual(lock2.returncode, None) - - # Cancel lock2 and then check wait() and returncode results. - lock2.cancel() - self.assertEqual(lock2.wait() == os.EX_OK, False) - self.assertEqual(lock2.returncode == os.EX_OK, False) - self.assertEqual(lock2.returncode is None, False) - lock1.unlock() - finally: - shutil.rmtree(tempdir) - - def testAsynchronousLockWaitKill(self): - scheduler = PollScheduler().sched_iface - tempdir = tempfile.mkdtemp() - try: - path = os.path.join(tempdir, 'lock_me') - lock1 = AsynchronousLock(path=path, scheduler=scheduler) - lock1.start() - self.assertEqual(lock1.wait(), os.EX_OK) - self.assertEqual(lock1.returncode, os.EX_OK) - lock2 = AsynchronousLock(path=path, scheduler=scheduler, - _force_async=True, _force_process=True) - lock2.start() - # lock2 should be waiting for lock1 to release - self.assertEqual(lock2.poll(), None) - self.assertEqual(lock2.returncode, None) - - # Kill lock2's process and then check wait() and - # returncode results. This is intended to simulate - # a SIGINT sent via the controlling tty. - self.assertEqual(lock2._imp is not None, True) - self.assertEqual(lock2._imp._proc is not None, True) - self.assertEqual(lock2._imp._proc.pid is not None, True) - lock2._imp._kill_test = True - os.kill(lock2._imp._proc.pid, signal.SIGTERM) - self.assertEqual(lock2.wait() == os.EX_OK, False) - self.assertEqual(lock2.returncode == os.EX_OK, False) - self.assertEqual(lock2.returncode is None, False) - lock1.unlock() - finally: - shutil.rmtree(tempdir) diff --git a/portage_with_autodep/pym/portage/tests/locks/test_lock_nonblock.py b/portage_with_autodep/pym/portage/tests/locks/test_lock_nonblock.py deleted file mode 100644 index d5748ad..0000000 --- a/portage_with_autodep/pym/portage/tests/locks/test_lock_nonblock.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import shutil -import tempfile -import traceback - -import portage -from portage import os -from portage.tests import TestCase - -class LockNonblockTestCase(TestCase): - - def testLockNonblock(self): - tempdir = tempfile.mkdtemp() - try: - path = os.path.join(tempdir, 'lock_me') - lock1 = portage.locks.lockfile(path) - pid = os.fork() - if pid == 0: - portage.process._setup_pipes({0:0, 1:1, 2:2}) - rval = 2 - try: - try: - lock2 = portage.locks.lockfile(path, flags=os.O_NONBLOCK) - except portage.exception.TryAgain: - rval = os.EX_OK - else: - rval = 1 - portage.locks.unlockfile(lock2) - except SystemExit: - raise - except: - traceback.print_exc() - finally: - os._exit(rval) - - self.assertEqual(pid > 0, True) - pid, status = os.waitpid(pid, 0) - self.assertEqual(os.WIFEXITED(status), True) - self.assertEqual(os.WEXITSTATUS(status), os.EX_OK) - - portage.locks.unlockfile(lock1) - finally: - shutil.rmtree(tempdir) - diff --git a/portage_with_autodep/pym/portage/tests/news/__init__.py b/portage_with_autodep/pym/portage/tests/news/__init__.py deleted file mode 100644 index 28a753f..0000000 --- a/portage_with_autodep/pym/portage/tests/news/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# tests/portage.news/__init__.py -- Portage Unit Test functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/news/__test__ b/portage_with_autodep/pym/portage/tests/news/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/news/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/news/test_NewsItem.py b/portage_with_autodep/pym/portage/tests/news/test_NewsItem.py deleted file mode 100644 index a4e76f3..0000000 --- a/portage_with_autodep/pym/portage/tests/news/test_NewsItem.py +++ /dev/null @@ -1,95 +0,0 @@ -# test_NewsItem.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from portage.news import NewsItem -from portage.dbapi.virtual import testdbapi -from tempfile import mkstemp -# TODO(antarus) Make newsitem use a loader so we can load using a string instead of a tempfile - -class NewsItemTestCase(TestCase): - """These tests suck: they use your running config instead of making their own""" - fakeItem = """ -Title: YourSQL Upgrades from 4.0 to 4.1 -Author: Ciaran McCreesh <ciaranm@gentoo.org> -Content-Type: text/plain -Posted: 01-Nov-2005 -Revision: 1 -#Display-If-Installed: -#Display-If-Profile: -#Display-If-Arch: - -YourSQL databases created using YourSQL version 4.0 are incompatible -with YourSQL version 4.1 or later. There is no reliable way to -automate the database format conversion, so action from the system -administrator is required before an upgrade can take place. - -Please see the Gentoo YourSQL Upgrade Guide for instructions: - - http://www.gentoo.org/doc/en/yoursql-upgrading.xml - -Also see the official YourSQL documentation: - - http://dev.yoursql.com/doc/refman/4.1/en/upgrading-from-4-0.html - -After upgrading, you should also recompile any packages which link -against YourSQL: - - revdep-rebuild --library=libyoursqlclient.so.12 - -The revdep-rebuild tool is provided by app-portage/gentoolkit. -""" - def setUp(self): - self.profile = "/usr/portage/profiles/default-linux/x86/2007.0/" - self.keywords = "x86" - # Use fake/test dbapi to avoid slow tests - self.vardb = testdbapi() - # self.vardb.inject_cpv('sys-apps/portage-2.0', { 'SLOT' : 0 }) - # Consumers only use ARCH, so avoid portage.settings by using a dict - self.settings = { 'ARCH' : 'x86' } - - def testDisplayIfProfile(self): - tmpItem = self.fakeItem[:].replace("#Display-If-Profile:", "Display-If-Profile: %s" % - self.profile) - - item = self._processItem(tmpItem) - try: - self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile), - msg="Expected %s to be relevant, but it was not!" % tmpItem) - finally: - os.unlink(item.path) - - def testDisplayIfInstalled(self): - tmpItem = self.fakeItem[:].replace("#Display-If-Installed:", "Display-If-Installed: %s" % - "sys-apps/portage") - - try: - item = self._processItem(tmpItem) - self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile), - msg="Expected %s to be relevant, but it was not!" % tmpItem) - finally: - os.unlink(item.path) - - def testDisplayIfKeyword(self): - tmpItem = self.fakeItem[:].replace("#Display-If-Keyword:", "Display-If-Keyword: %s" % - self.keywords) - - try: - item = self._processItem(tmpItem) - self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile), - msg="Expected %s to be relevant, but it was not!" % tmpItem) - finally: - os.unlink(item.path) - - def _processItem(self, item): - filename = None - fd, filename = mkstemp() - f = os.fdopen(fd, 'w') - f.write(item) - f.close() - try: - return NewsItem(filename, 0) - except TypeError: - self.fail("Error while processing news item %s" % filename) diff --git a/portage_with_autodep/pym/portage/tests/process/__init__.py b/portage_with_autodep/pym/portage/tests/process/__init__.py deleted file mode 100644 index d19e353..0000000 --- a/portage_with_autodep/pym/portage/tests/process/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# Copyright 1998-2008 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/process/__test__ b/portage_with_autodep/pym/portage/tests/process/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/process/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/process/test_poll.py b/portage_with_autodep/pym/portage/tests/process/test_poll.py deleted file mode 100644 index ee6ee0c..0000000 --- a/portage_with_autodep/pym/portage/tests/process/test_poll.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 1998-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from _emerge.PollScheduler import PollScheduler -from _emerge.PipeReader import PipeReader -from _emerge.SpawnProcess import SpawnProcess - -class PipeReaderTestCase(TestCase): - - def testPipeReader(self): - """ - Use a poll loop to read data from a pipe and assert that - the data written to the pipe is identical to the data - read from the pipe. - """ - - test_string = 2 * "blah blah blah\n" - - scheduler = PollScheduler().sched_iface - master_fd, slave_fd = os.pipe() - master_file = os.fdopen(master_fd, 'rb', 0) - slave_file = os.fdopen(slave_fd, 'wb') - producer = SpawnProcess( - args=["bash", "-c", "echo -n '%s'" % test_string], - env=os.environ, fd_pipes={1:slave_fd}, - scheduler=scheduler) - producer.start() - slave_file.close() - - consumer = PipeReader( - input_files={"producer" : master_file}, - scheduler=scheduler) - - consumer.start() - consumer.wait() - output = consumer.getvalue().decode('ascii', 'replace') - self.assertEqual(test_string, output) diff --git a/portage_with_autodep/pym/portage/tests/resolver/ResolverPlayground.py b/portage_with_autodep/pym/portage/tests/resolver/ResolverPlayground.py deleted file mode 100644 index 6a8e3c1..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/ResolverPlayground.py +++ /dev/null @@ -1,690 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from itertools import permutations -import shutil -import sys -import tempfile -import portage -from portage import os -from portage.const import PORTAGE_BASE_PATH -from portage.dbapi.vartree import vartree -from portage.dbapi.porttree import portagetree -from portage.dbapi.bintree import binarytree -from portage.dep import Atom, _repo_separator -from portage.package.ebuild.config import config -from portage.package.ebuild.digestgen import digestgen -from portage._sets import load_default_config -from portage._sets.base import InternalPackageSet -from portage.versions import catsplit - -import _emerge -from _emerge.actions import calc_depclean -from _emerge.Blocker import Blocker -from _emerge.create_depgraph_params import create_depgraph_params -from _emerge.depgraph import backtrack_depgraph -from _emerge.RootConfig import RootConfig - -if sys.hexversion >= 0x3000000: - basestring = str - -class ResolverPlayground(object): - """ - This class helps to create the necessary files on disk and - the needed settings instances, etc. for the resolver to do - its work. - """ - - config_files = frozenset(("package.use", "package.mask", "package.keywords", \ - "package.unmask", "package.properties", "package.license", "use.mask", "use.force")) - - def __init__(self, ebuilds={}, installed={}, profile={}, repo_configs={}, \ - user_config={}, sets={}, world=[], debug=False): - """ - ebuilds: cpv -> metadata mapping simulating available ebuilds. - installed: cpv -> metadata mapping simulating installed packages. - If a metadata key is missing, it gets a default value. - profile: settings defined by the profile. - """ - self.debug = debug - self.root = "/" - self.eprefix = tempfile.mkdtemp() - self.eroot = self.root + self.eprefix.lstrip(os.sep) + os.sep - self.portdir = os.path.join(self.eroot, "usr/portage") - self.vdbdir = os.path.join(self.eroot, "var/db/pkg") - os.makedirs(self.portdir) - os.makedirs(self.vdbdir) - - if not debug: - portage.util.noiselimit = -2 - - self.repo_dirs = {} - #Make sure the main repo is always created - self._get_repo_dir("test_repo") - - self._create_ebuilds(ebuilds) - self._create_installed(installed) - self._create_profile(ebuilds, installed, profile, repo_configs, user_config, sets) - self._create_world(world) - - self.settings, self.trees = self._load_config() - - self._create_ebuild_manifests(ebuilds) - - portage.util.noiselimit = 0 - - def _get_repo_dir(self, repo): - """ - Create the repo directory if needed. - """ - if repo not in self.repo_dirs: - if repo == "test_repo": - repo_path = self.portdir - else: - repo_path = os.path.join(self.eroot, "usr", "local", repo) - - self.repo_dirs[repo] = repo_path - profile_path = os.path.join(repo_path, "profiles") - - try: - os.makedirs(profile_path) - except os.error: - pass - - repo_name_file = os.path.join(profile_path, "repo_name") - f = open(repo_name_file, "w") - f.write("%s\n" % repo) - f.close() - - return self.repo_dirs[repo] - - def _create_ebuilds(self, ebuilds): - for cpv in ebuilds: - a = Atom("=" + cpv, allow_repo=True) - repo = a.repo - if repo is None: - repo = "test_repo" - - metadata = ebuilds[cpv].copy() - eapi = metadata.pop("EAPI", 0) - lic = metadata.pop("LICENSE", "") - properties = metadata.pop("PROPERTIES", "") - slot = metadata.pop("SLOT", 0) - keywords = metadata.pop("KEYWORDS", "x86") - iuse = metadata.pop("IUSE", "") - depend = metadata.pop("DEPEND", "") - rdepend = metadata.pop("RDEPEND", None) - pdepend = metadata.pop("PDEPEND", None) - required_use = metadata.pop("REQUIRED_USE", None) - - if metadata: - raise ValueError("metadata of ebuild '%s' contains unknown keys: %s" % (cpv, metadata.keys())) - - repo_dir = self._get_repo_dir(repo) - ebuild_dir = os.path.join(repo_dir, a.cp) - ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild") - try: - os.makedirs(ebuild_dir) - except os.error: - pass - - f = open(ebuild_path, "w") - f.write('EAPI="' + str(eapi) + '"\n') - f.write('LICENSE="' + str(lic) + '"\n') - f.write('PROPERTIES="' + str(properties) + '"\n') - f.write('SLOT="' + str(slot) + '"\n') - f.write('KEYWORDS="' + str(keywords) + '"\n') - f.write('IUSE="' + str(iuse) + '"\n') - f.write('DEPEND="' + str(depend) + '"\n') - if rdepend is not None: - f.write('RDEPEND="' + str(rdepend) + '"\n') - if pdepend is not None: - f.write('PDEPEND="' + str(pdepend) + '"\n') - if required_use is not None: - f.write('REQUIRED_USE="' + str(required_use) + '"\n') - f.close() - - def _create_ebuild_manifests(self, ebuilds): - tmpsettings = config(clone=self.settings) - tmpsettings['PORTAGE_QUIET'] = '1' - for cpv in ebuilds: - a = Atom("=" + cpv, allow_repo=True) - repo = a.repo - if repo is None: - repo = "test_repo" - - repo_dir = self._get_repo_dir(repo) - ebuild_dir = os.path.join(repo_dir, a.cp) - ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild") - - portdb = self.trees[self.root]["porttree"].dbapi - tmpsettings['O'] = ebuild_dir - if not digestgen(mysettings=tmpsettings, myportdb=portdb): - raise AssertionError('digest creation failed for %s' % ebuild_path) - - def _create_installed(self, installed): - for cpv in installed: - a = Atom("=" + cpv, allow_repo=True) - repo = a.repo - if repo is None: - repo = "test_repo" - - vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv) - try: - os.makedirs(vdb_pkg_dir) - except os.error: - pass - - metadata = installed[cpv].copy() - eapi = metadata.pop("EAPI", 0) - lic = metadata.pop("LICENSE", "") - properties = metadata.pop("PROPERTIES", "") - slot = metadata.pop("SLOT", 0) - keywords = metadata.pop("KEYWORDS", "~x86") - iuse = metadata.pop("IUSE", "") - use = metadata.pop("USE", "") - depend = metadata.pop("DEPEND", "") - rdepend = metadata.pop("RDEPEND", None) - pdepend = metadata.pop("PDEPEND", None) - required_use = metadata.pop("REQUIRED_USE", None) - - if metadata: - raise ValueError("metadata of installed '%s' contains unknown keys: %s" % (cpv, metadata.keys())) - - def write_key(key, value): - f = open(os.path.join(vdb_pkg_dir, key), "w") - f.write(str(value) + "\n") - f.close() - - write_key("EAPI", eapi) - write_key("LICENSE", lic) - write_key("PROPERTIES", properties) - write_key("SLOT", slot) - write_key("LICENSE", lic) - write_key("PROPERTIES", properties) - write_key("repository", repo) - write_key("KEYWORDS", keywords) - write_key("IUSE", iuse) - write_key("USE", use) - write_key("DEPEND", depend) - if rdepend is not None: - write_key("RDEPEND", rdepend) - if pdepend is not None: - write_key("PDEPEND", pdepend) - if required_use is not None: - write_key("REQUIRED_USE", required_use) - - def _create_profile(self, ebuilds, installed, profile, repo_configs, user_config, sets): - - for repo in self.repo_dirs: - repo_dir = self._get_repo_dir(repo) - profile_dir = os.path.join(self._get_repo_dir(repo), "profiles") - - #Create $REPO/profiles/categories - categories = set() - for cpv in ebuilds: - ebuilds_repo = Atom("="+cpv, allow_repo=True).repo - if ebuilds_repo is None: - ebuilds_repo = "test_repo" - if ebuilds_repo == repo: - categories.add(catsplit(cpv)[0]) - - categories_file = os.path.join(profile_dir, "categories") - f = open(categories_file, "w") - for cat in categories: - f.write(cat + "\n") - f.close() - - #Create $REPO/profiles/license_groups - license_file = os.path.join(profile_dir, "license_groups") - f = open(license_file, "w") - f.write("EULA TEST\n") - f.close() - - repo_config = repo_configs.get(repo) - if repo_config: - for config_file, lines in repo_config.items(): - if config_file not in self.config_files: - raise ValueError("Unknown config file: '%s'" % config_file) - - file_name = os.path.join(profile_dir, config_file) - f = open(file_name, "w") - for line in lines: - f.write("%s\n" % line) - f.close() - - #Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there) - os.makedirs(os.path.join(repo_dir, "eclass")) - - if repo == "test_repo": - #Create a minimal profile in /usr/portage - sub_profile_dir = os.path.join(profile_dir, "default", "linux", "x86", "test_profile") - os.makedirs(sub_profile_dir) - - eapi_file = os.path.join(sub_profile_dir, "eapi") - f = open(eapi_file, "w") - f.write("0\n") - f.close() - - make_defaults_file = os.path.join(sub_profile_dir, "make.defaults") - f = open(make_defaults_file, "w") - f.write("ARCH=\"x86\"\n") - f.write("ACCEPT_KEYWORDS=\"x86\"\n") - f.close() - - use_force_file = os.path.join(sub_profile_dir, "use.force") - f = open(use_force_file, "w") - f.write("x86\n") - f.close() - - if profile: - for config_file, lines in profile.items(): - if config_file not in self.config_files: - raise ValueError("Unknown config file: '%s'" % config_file) - - file_name = os.path.join(sub_profile_dir, config_file) - f = open(file_name, "w") - for line in lines: - f.write("%s\n" % line) - f.close() - - #Create profile symlink - os.makedirs(os.path.join(self.eroot, "etc")) - os.symlink(sub_profile_dir, os.path.join(self.eroot, "etc", "make.profile")) - - user_config_dir = os.path.join(self.eroot, "etc", "portage") - - try: - os.makedirs(user_config_dir) - except os.error: - pass - - repos_conf_file = os.path.join(user_config_dir, "repos.conf") - f = open(repos_conf_file, "w") - priority = 0 - for repo in sorted(self.repo_dirs.keys()): - f.write("[%s]\n" % repo) - f.write("LOCATION=%s\n" % self.repo_dirs[repo]) - if repo == "test_repo": - f.write("PRIORITY=%s\n" % -1000) - else: - f.write("PRIORITY=%s\n" % priority) - priority += 1 - f.close() - - for config_file, lines in user_config.items(): - if config_file not in self.config_files: - raise ValueError("Unknown config file: '%s'" % config_file) - - file_name = os.path.join(user_config_dir, config_file) - f = open(file_name, "w") - for line in lines: - f.write("%s\n" % line) - f.close() - - #Create /usr/share/portage/config/sets/portage.conf - default_sets_conf_dir = os.path.join(self.eroot, "usr/share/portage/config/sets") - - try: - os.makedirs(default_sets_conf_dir) - except os.error: - pass - - provided_sets_portage_conf = \ - os.path.join(PORTAGE_BASE_PATH, "cnf/sets/portage.conf") - os.symlink(provided_sets_portage_conf, os.path.join(default_sets_conf_dir, "portage.conf")) - - set_config_dir = os.path.join(user_config_dir, "sets") - - try: - os.makedirs(set_config_dir) - except os.error: - pass - - for sets_file, lines in sets.items(): - file_name = os.path.join(set_config_dir, sets_file) - f = open(file_name, "w") - for line in lines: - f.write("%s\n" % line) - f.close() - - user_config_dir = os.path.join(self.eroot, "etc", "portage") - - try: - os.makedirs(user_config_dir) - except os.error: - pass - - for config_file, lines in user_config.items(): - if config_file not in self.config_files: - raise ValueError("Unknown config file: '%s'" % config_file) - - file_name = os.path.join(user_config_dir, config_file) - f = open(file_name, "w") - for line in lines: - f.write("%s\n" % line) - f.close() - - def _create_world(self, world): - #Create /var/lib/portage/world - var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage") - os.makedirs(var_lib_portage) - - world_file = os.path.join(var_lib_portage, "world") - - f = open(world_file, "w") - for atom in world: - f.write("%s\n" % atom) - f.close() - - def _load_config(self): - portdir_overlay = [] - for repo_name in sorted(self.repo_dirs): - path = self.repo_dirs[repo_name] - if path != self.portdir: - portdir_overlay.append(path) - - env = { - "ACCEPT_KEYWORDS": "x86", - "PORTDIR": self.portdir, - "PORTDIR_OVERLAY": " ".join(portdir_overlay), - 'PORTAGE_TMPDIR' : os.path.join(self.eroot, 'var/tmp'), - } - - # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they - # need to be inherited by ebuild subprocesses. - if 'PORTAGE_USERNAME' in os.environ: - env['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME'] - if 'PORTAGE_GRPNAME' in os.environ: - env['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME'] - - settings = config(_eprefix=self.eprefix, env=env) - settings.lock() - - trees = { - self.root: { - "vartree": vartree(settings=settings), - "porttree": portagetree(self.root, settings=settings), - "bintree": binarytree(self.root, - os.path.join(self.eroot, "usr/portage/packages"), - settings=settings) - } - } - - for root, root_trees in trees.items(): - settings = root_trees["vartree"].settings - settings._init_dirs() - setconfig = load_default_config(settings, root_trees) - root_trees["root_config"] = RootConfig(settings, root_trees, setconfig) - - return settings, trees - - def run(self, atoms, options={}, action=None): - options = options.copy() - options["--pretend"] = True - if self.debug: - options["--debug"] = True - - global_noiselimit = portage.util.noiselimit - global_emergelog_disable = _emerge.emergelog._disable - try: - - if not self.debug: - portage.util.noiselimit = -2 - _emerge.emergelog._disable = True - - if options.get("--depclean"): - rval, cleanlist, ordered, req_pkg_count = \ - calc_depclean(self.settings, self.trees, None, - options, "depclean", InternalPackageSet(initial_atoms=atoms, allow_wildcard=True), None) - result = ResolverPlaygroundDepcleanResult( \ - atoms, rval, cleanlist, ordered, req_pkg_count) - else: - params = create_depgraph_params(options, action) - success, depgraph, favorites = backtrack_depgraph( - self.settings, self.trees, options, params, action, atoms, None) - depgraph._show_merge_list() - depgraph.display_problems() - result = ResolverPlaygroundResult(atoms, success, depgraph, favorites) - finally: - portage.util.noiselimit = global_noiselimit - _emerge.emergelog._disable = global_emergelog_disable - - return result - - def run_TestCase(self, test_case): - if not isinstance(test_case, ResolverPlaygroundTestCase): - raise TypeError("ResolverPlayground needs a ResolverPlaygroundTestCase") - for atoms in test_case.requests: - result = self.run(atoms, test_case.options, test_case.action) - if not test_case.compare_with_result(result): - return - - def cleanup(self): - portdb = self.trees[self.root]["porttree"].dbapi - portdb.close_caches() - portage.dbapi.porttree.portdbapi.portdbapi_instances.remove(portdb) - if self.debug: - print("\nEROOT=%s" % self.eroot) - else: - shutil.rmtree(self.eroot) - -class ResolverPlaygroundTestCase(object): - - def __init__(self, request, **kwargs): - self.all_permutations = kwargs.pop("all_permutations", False) - self.ignore_mergelist_order = kwargs.pop("ignore_mergelist_order", False) - self.ambiguous_merge_order = kwargs.pop("ambiguous_merge_order", False) - self.check_repo_names = kwargs.pop("check_repo_names", False) - self.merge_order_assertions = kwargs.pop("merge_order_assertions", False) - - if self.all_permutations: - self.requests = list(permutations(request)) - else: - self.requests = [request] - - self.options = kwargs.pop("options", {}) - self.action = kwargs.pop("action", None) - self.test_success = True - self.fail_msg = None - self._checks = kwargs.copy() - - def compare_with_result(self, result): - checks = dict.fromkeys(result.checks) - for key, value in self._checks.items(): - if not key in checks: - raise KeyError("Not an available check: '%s'" % key) - checks[key] = value - - fail_msgs = [] - for key, value in checks.items(): - got = getattr(result, key) - expected = value - - if key in result.optional_checks and expected is None: - continue - - if key == "mergelist": - if not self.check_repo_names: - #Strip repo names if we don't check them - if got: - new_got = [] - for cpv in got: - if cpv[:1] == "!": - new_got.append(cpv) - continue - a = Atom("="+cpv, allow_repo=True) - new_got.append(a.cpv) - got = new_got - if expected: - new_expected = [] - for obj in expected: - if isinstance(obj, basestring): - if obj[:1] == "!": - new_expected.append(obj) - continue - a = Atom("="+obj, allow_repo=True) - new_expected.append(a.cpv) - continue - new_expected.append(set()) - for cpv in obj: - if cpv[:1] != "!": - cpv = Atom("="+cpv, allow_repo=True).cpv - new_expected[-1].add(cpv) - expected = new_expected - if self.ignore_mergelist_order and got is not None: - got = set(got) - expected = set(expected) - - if self.ambiguous_merge_order and got: - expected_stack = list(reversed(expected)) - got_stack = list(reversed(got)) - new_expected = [] - match = True - while got_stack and expected_stack: - got_token = got_stack.pop() - expected_obj = expected_stack.pop() - if isinstance(expected_obj, basestring): - new_expected.append(expected_obj) - if got_token == expected_obj: - continue - # result doesn't match, so stop early - match = False - break - expected_obj = set(expected_obj) - try: - expected_obj.remove(got_token) - except KeyError: - # result doesn't match, so stop early - match = False - break - new_expected.append(got_token) - while got_stack and expected_obj: - got_token = got_stack.pop() - try: - expected_obj.remove(got_token) - except KeyError: - match = False - break - new_expected.append(got_token) - if not match: - # result doesn't match, so stop early - break - if expected_obj: - # result does not match, so stop early - match = False - new_expected.append(tuple(expected_obj)) - break - if expected_stack: - # result does not match, add leftovers to new_expected - match = False - expected_stack.reverse() - new_expected.extend(expected_stack) - expected = new_expected - - if match and self.merge_order_assertions: - for node1, node2 in self.merge_order_assertions: - if not (got.index(node1) < got.index(node2)): - fail_msgs.append("atoms: (" + \ - ", ".join(result.atoms) + "), key: " + \ - ("merge_order_assertions, expected: %s" % \ - str((node1, node2))) + \ - ", got: " + str(got)) - - elif key in ("unstable_keywords", "needed_p_mask_changes") and expected is not None: - expected = set(expected) - - if got != expected: - fail_msgs.append("atoms: (" + ", ".join(result.atoms) + "), key: " + \ - key + ", expected: " + str(expected) + ", got: " + str(got)) - if fail_msgs: - self.test_success = False - self.fail_msg = "\n".join(fail_msgs) - return False - return True - -class ResolverPlaygroundResult(object): - - checks = ( - "success", "mergelist", "use_changes", "license_changes", "unstable_keywords", "slot_collision_solutions", - "circular_dependency_solutions", "needed_p_mask_changes", - ) - optional_checks = ( - ) - - def __init__(self, atoms, success, mydepgraph, favorites): - self.atoms = atoms - self.success = success - self.depgraph = mydepgraph - self.favorites = favorites - self.mergelist = None - self.use_changes = None - self.license_changes = None - self.unstable_keywords = None - self.needed_p_mask_changes = None - self.slot_collision_solutions = None - self.circular_dependency_solutions = None - - if self.depgraph._dynamic_config._serialized_tasks_cache is not None: - self.mergelist = [] - for x in self.depgraph._dynamic_config._serialized_tasks_cache: - if isinstance(x, Blocker): - self.mergelist.append(x.atom) - else: - repo_str = "" - if x.metadata["repository"] != "test_repo": - repo_str = _repo_separator + x.metadata["repository"] - self.mergelist.append(x.cpv + repo_str) - - if self.depgraph._dynamic_config._needed_use_config_changes: - self.use_changes = {} - for pkg, needed_use_config_changes in \ - self.depgraph._dynamic_config._needed_use_config_changes.items(): - new_use, changes = needed_use_config_changes - self.use_changes[pkg.cpv] = changes - - if self.depgraph._dynamic_config._needed_unstable_keywords: - self.unstable_keywords = set() - for pkg in self.depgraph._dynamic_config._needed_unstable_keywords: - self.unstable_keywords.add(pkg.cpv) - - if self.depgraph._dynamic_config._needed_p_mask_changes: - self.needed_p_mask_changes = set() - for pkg in self.depgraph._dynamic_config._needed_p_mask_changes: - self.needed_p_mask_changes.add(pkg.cpv) - - if self.depgraph._dynamic_config._needed_license_changes: - self.license_changes = {} - for pkg, missing_licenses in self.depgraph._dynamic_config._needed_license_changes.items(): - self.license_changes[pkg.cpv] = missing_licenses - - if self.depgraph._dynamic_config._slot_conflict_handler is not None: - self.slot_collision_solutions = [] - handler = self.depgraph._dynamic_config._slot_conflict_handler - - for change in handler.changes: - new_change = {} - for pkg in change: - new_change[pkg.cpv] = change[pkg] - self.slot_collision_solutions.append(new_change) - - if self.depgraph._dynamic_config._circular_dependency_handler is not None: - handler = self.depgraph._dynamic_config._circular_dependency_handler - sol = handler.solutions - self.circular_dependency_solutions = dict( zip([x.cpv for x in sol.keys()], sol.values()) ) - -class ResolverPlaygroundDepcleanResult(object): - - checks = ( - "success", "cleanlist", "ordered", "req_pkg_count", - ) - optional_checks = ( - "ordered", "req_pkg_count", - ) - - def __init__(self, atoms, rval, cleanlist, ordered, req_pkg_count): - self.atoms = atoms - self.success = rval == 0 - self.cleanlist = cleanlist - self.ordered = ordered - self.req_pkg_count = req_pkg_count diff --git a/portage_with_autodep/pym/portage/tests/resolver/__test__ b/portage_with_autodep/pym/portage/tests/resolver/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_autounmask.py b/portage_with_autodep/pym/portage/tests/resolver/test_autounmask.py deleted file mode 100644 index 54c435f..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_autounmask.py +++ /dev/null @@ -1,326 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class AutounmaskTestCase(TestCase): - - def testAutounmask(self): - - ebuilds = { - #ebuilds to test use changes - "dev-libs/A-1": { "SLOT": 1, "DEPEND": "dev-libs/B[foo]", "EAPI": 2}, - "dev-libs/A-2": { "SLOT": 2, "DEPEND": "dev-libs/B[bar]", "EAPI": 2}, - "dev-libs/B-1": { "DEPEND": "foo? ( dev-libs/C ) bar? ( dev-libs/D )", "IUSE": "foo bar"}, - "dev-libs/C-1": {}, - "dev-libs/D-1": {}, - - #ebuilds to test if we allow changing of masked or forced flags - "dev-libs/E-1": { "SLOT": 1, "DEPEND": "dev-libs/F[masked-flag]", "EAPI": 2}, - "dev-libs/E-2": { "SLOT": 2, "DEPEND": "dev-libs/G[-forced-flag]", "EAPI": 2}, - "dev-libs/F-1": { "IUSE": "masked-flag"}, - "dev-libs/G-1": { "IUSE": "forced-flag"}, - - #ebuilds to test keyword changes - "app-misc/Z-1": { "KEYWORDS": "~x86", "DEPEND": "app-misc/Y" }, - "app-misc/Y-1": { "KEYWORDS": "~x86" }, - "app-misc/W-1": {}, - "app-misc/W-2": { "KEYWORDS": "~x86" }, - "app-misc/V-1": { "KEYWORDS": "~x86", "DEPEND": ">=app-misc/W-2"}, - - #ebuilds to test mask and keyword changes - "app-text/A-1": {}, - "app-text/B-1": { "KEYWORDS": "~x86" }, - "app-text/C-1": { "KEYWORDS": "" }, - "app-text/D-1": { "KEYWORDS": "~x86" }, - "app-text/D-2": { "KEYWORDS": "" }, - - #ebuilds for mixed test for || dep handling - "sci-libs/K-1": { "DEPEND": " || ( sci-libs/L[bar] || ( sci-libs/M sci-libs/P ) )", "EAPI": 2}, - "sci-libs/K-2": { "DEPEND": " || ( sci-libs/L[bar] || ( sci-libs/P sci-libs/M ) )", "EAPI": 2}, - "sci-libs/K-3": { "DEPEND": " || ( sci-libs/M || ( sci-libs/L[bar] sci-libs/P ) )", "EAPI": 2}, - "sci-libs/K-4": { "DEPEND": " || ( sci-libs/M || ( sci-libs/P sci-libs/L[bar] ) )", "EAPI": 2}, - "sci-libs/K-5": { "DEPEND": " || ( sci-libs/P || ( sci-libs/L[bar] sci-libs/M ) )", "EAPI": 2}, - "sci-libs/K-6": { "DEPEND": " || ( sci-libs/P || ( sci-libs/M sci-libs/L[bar] ) )", "EAPI": 2}, - "sci-libs/K-7": { "DEPEND": " || ( sci-libs/M sci-libs/L[bar] )", "EAPI": 2}, - "sci-libs/K-8": { "DEPEND": " || ( sci-libs/L[bar] sci-libs/M )", "EAPI": 2}, - - "sci-libs/L-1": { "IUSE": "bar" }, - "sci-libs/M-1": { "KEYWORDS": "~x86" }, - "sci-libs/P-1": { }, - - #ebuilds to test these nice "required by cat/pkg[foo]" messages - "dev-util/Q-1": { "DEPEND": "foo? ( dev-util/R[bar] )", "IUSE": "+foo", "EAPI": 2 }, - "dev-util/Q-2": { "RDEPEND": "!foo? ( dev-util/R[bar] )", "IUSE": "foo", "EAPI": 2 }, - "dev-util/R-1": { "IUSE": "bar" }, - - #ebuilds to test interaction with REQUIRED_USE - "app-portage/A-1": { "DEPEND": "app-portage/B[foo]", "EAPI": 2 }, - "app-portage/A-2": { "DEPEND": "app-portage/B[foo=]", "IUSE": "+foo", "REQUIRED_USE": "foo", "EAPI": "4" }, - - "app-portage/B-1": { "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - "app-portage/C-1": { "IUSE": "+foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - } - - test_cases = ( - #Test USE changes. - #The simple case. - - ResolverPlaygroundTestCase( - ["dev-libs/A:1"], - options = {"--autounmask": "n"}, - success = False), - ResolverPlaygroundTestCase( - ["dev-libs/A:1"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"], - use_changes = { "dev-libs/B-1": {"foo": True} } ), - - #Make sure we restart if needed. - ResolverPlaygroundTestCase( - ["dev-libs/A:1", "dev-libs/B"], - options = {"--autounmask": True}, - all_permutations = True, - success = False, - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"], - use_changes = { "dev-libs/B-1": {"foo": True} } ), - ResolverPlaygroundTestCase( - ["dev-libs/A:1", "dev-libs/A:2", "dev-libs/B"], - options = {"--autounmask": True}, - all_permutations = True, - success = False, - mergelist = ["dev-libs/D-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", "dev-libs/A-2"], - ignore_mergelist_order = True, - use_changes = { "dev-libs/B-1": {"foo": True, "bar": True} } ), - - #Test keywording. - #The simple case. - - ResolverPlaygroundTestCase( - ["app-misc/Z"], - options = {"--autounmask": "n"}, - success = False), - ResolverPlaygroundTestCase( - ["app-misc/Z"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-misc/Y-1", "app-misc/Z-1"], - unstable_keywords = ["app-misc/Y-1", "app-misc/Z-1"]), - - #Make sure that the backtracking for slot conflicts handles our mess. - - ResolverPlaygroundTestCase( - ["=app-misc/V-1", "app-misc/W"], - options = {"--autounmask": True}, - all_permutations = True, - success = False, - mergelist = ["app-misc/W-2", "app-misc/V-1"], - unstable_keywords = ["app-misc/W-2", "app-misc/V-1"]), - - #Mixed testing - #Make sure we don't change use for something in a || dep if there is another choice - #that needs no change. - - ResolverPlaygroundTestCase( - ["=sci-libs/K-1"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-1"]), - ResolverPlaygroundTestCase( - ["=sci-libs/K-2"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-2"]), - ResolverPlaygroundTestCase( - ["=sci-libs/K-3"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-3"]), - ResolverPlaygroundTestCase( - ["=sci-libs/K-4"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-4"]), - ResolverPlaygroundTestCase( - ["=sci-libs/K-5"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-5"]), - ResolverPlaygroundTestCase( - ["=sci-libs/K-6"], - options = {"--autounmask": True}, - success = True, - mergelist = ["sci-libs/P-1", "sci-libs/K-6"]), - - #Make sure we prefer use changes over keyword changes. - ResolverPlaygroundTestCase( - ["=sci-libs/K-7"], - options = {"--autounmask": True}, - success = False, - mergelist = ["sci-libs/L-1", "sci-libs/K-7"], - use_changes = { "sci-libs/L-1": { "bar": True } }), - ResolverPlaygroundTestCase( - ["=sci-libs/K-8"], - options = {"--autounmask": True}, - success = False, - mergelist = ["sci-libs/L-1", "sci-libs/K-8"], - use_changes = { "sci-libs/L-1": { "bar": True } }), - - #Test these nice "required by cat/pkg[foo]" messages. - ResolverPlaygroundTestCase( - ["=dev-util/Q-1"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-util/R-1", "dev-util/Q-1"], - use_changes = { "dev-util/R-1": { "bar": True } }), - ResolverPlaygroundTestCase( - ["=dev-util/Q-2"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-util/R-1", "dev-util/Q-2"], - use_changes = { "dev-util/R-1": { "bar": True } }), - - #Test interaction with REQUIRED_USE. - ResolverPlaygroundTestCase( - ["=app-portage/A-1"], - options = { "--autounmask": True }, - use_changes = None, - success = False), - ResolverPlaygroundTestCase( - ["=app-portage/A-2"], - options = { "--autounmask": True }, - use_changes = None, - success = False), - ResolverPlaygroundTestCase( - ["=app-portage/C-1"], - options = { "--autounmask": True }, - use_changes = None, - success = False), - - #Make sure we don't change masked/forced flags. - ResolverPlaygroundTestCase( - ["dev-libs/E:1"], - options = {"--autounmask": True}, - use_changes = None, - success = False), - ResolverPlaygroundTestCase( - ["dev-libs/E:2"], - options = {"--autounmask": True}, - use_changes = None, - success = False), - - #Test mask and keyword changes. - ResolverPlaygroundTestCase( - ["app-text/A"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-text/A-1"], - needed_p_mask_changes = ["app-text/A-1"]), - ResolverPlaygroundTestCase( - ["app-text/B"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-text/B-1"], - unstable_keywords = ["app-text/B-1"], - needed_p_mask_changes = ["app-text/B-1"]), - ResolverPlaygroundTestCase( - ["app-text/C"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-text/C-1"], - unstable_keywords = ["app-text/C-1"], - needed_p_mask_changes = ["app-text/C-1"]), - #Make sure unstable keyword is preferred over missing keyword - ResolverPlaygroundTestCase( - ["app-text/D"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-text/D-1"], - unstable_keywords = ["app-text/D-1"]), - #Test missing keyword - ResolverPlaygroundTestCase( - ["=app-text/D-2"], - options = {"--autounmask": True}, - success = False, - mergelist = ["app-text/D-2"], - unstable_keywords = ["app-text/D-2"]) - ) - - profile = { - "use.mask": - ( - "masked-flag", - ), - "use.force": - ( - "forced-flag", - ), - "package.mask": - ( - "app-text/A", - "app-text/B", - "app-text/C", - ), - } - - playground = ResolverPlayground(ebuilds=ebuilds, profile=profile) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - def testAutounmaskForLicenses(self): - - ebuilds = { - "dev-libs/A-1": { "LICENSE": "TEST" }, - "dev-libs/B-1": { "LICENSE": "TEST", "IUSE": "foo", "KEYWORDS": "~x86"}, - "dev-libs/C-1": { "DEPEND": "dev-libs/B[foo]", "EAPI": 2 }, - - "dev-libs/D-1": { "DEPEND": "dev-libs/E dev-libs/F", "LICENSE": "TEST" }, - "dev-libs/E-1": { "LICENSE": "TEST" }, - "dev-libs/E-2": { "LICENSE": "TEST" }, - "dev-libs/F-1": { "DEPEND": "=dev-libs/E-1", "LICENSE": "TEST" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["=dev-libs/A-1"], - options = {"--autounmask": 'n'}, - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/A-1"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-libs/A-1"], - license_changes = { "dev-libs/A-1": set(["TEST"]) }), - - #Test license+keyword+use change at once. - ResolverPlaygroundTestCase( - ["=dev-libs/C-1"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-libs/B-1", "dev-libs/C-1"], - license_changes = { "dev-libs/B-1": set(["TEST"]) }, - unstable_keywords = ["dev-libs/B-1"], - use_changes = { "dev-libs/B-1": { "foo": True } }), - - #Test license with backtracking. - ResolverPlaygroundTestCase( - ["=dev-libs/D-1"], - options = {"--autounmask": True}, - success = False, - mergelist = ["dev-libs/E-1", "dev-libs/F-1", "dev-libs/D-1"], - license_changes = { "dev-libs/D-1": set(["TEST"]), "dev-libs/E-1": set(["TEST"]), "dev-libs/E-2": set(["TEST"]), "dev-libs/F-1": set(["TEST"]) }), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_backtracking.py b/portage_with_autodep/pym/portage/tests/resolver/test_backtracking.py deleted file mode 100644 index fc49306..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_backtracking.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class BacktrackingTestCase(TestCase): - - def testBacktracking(self): - ebuilds = { - "dev-libs/A-1": {}, - "dev-libs/A-2": {}, - "dev-libs/B-1": { "DEPEND": "dev-libs/A" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["=dev-libs/A-1", "dev-libs/B"], - all_permutations = True, - mergelist = ["dev-libs/A-1", "dev-libs/B-1"], - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - - def testHittingTheBacktrackLimit(self): - ebuilds = { - "dev-libs/A-1": {}, - "dev-libs/A-2": {}, - "dev-libs/B-1": {}, - "dev-libs/B-2": {}, - "dev-libs/C-1": { "DEPEND": "dev-libs/A dev-libs/B" }, - "dev-libs/D-1": { "DEPEND": "=dev-libs/A-1 =dev-libs/B-1" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/C", "dev-libs/D"], - all_permutations = True, - mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"], - ignore_mergelist_order = True, - success = True), - #This one hits the backtrack limit. Be aware that this depends on the argument order. - ResolverPlaygroundTestCase( - ["dev-libs/D", "dev-libs/C"], - options = { "--backtrack": 1 }, - mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/A-2", "dev-libs/B-2", "dev-libs/C-1", "dev-libs/D-1"], - ignore_mergelist_order = True, - slot_collision_solutions = [], - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - - def testBacktrackingGoodVersionFirst(self): - """ - When backtracking due to slot conflicts, we masked the version that has been pulled - in first. This is not always a good idea. Mask the highest version instead. - """ - - ebuilds = { - "dev-libs/A-1": { "DEPEND": "=dev-libs/C-1 dev-libs/B" }, - "dev-libs/B-1": { "DEPEND": "=dev-libs/C-1" }, - "dev-libs/B-2": { "DEPEND": "=dev-libs/C-2" }, - "dev-libs/C-1": { }, - "dev-libs/C-2": { }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/A"], - mergelist = ["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1", ], - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - def testBacktrackWithoutUpdates(self): - """ - If --update is not given we might have to mask the old installed version later. - """ - - ebuilds = { - "dev-libs/A-1": { "DEPEND": "dev-libs/Z" }, - "dev-libs/B-1": { "DEPEND": ">=dev-libs/Z-2" }, - "dev-libs/Z-1": { }, - "dev-libs/Z-2": { }, - } - - installed = { - "dev-libs/Z-1": { "USE": "" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/B", "dev-libs/A"], - all_permutations = True, - mergelist = ["dev-libs/Z-2", "dev-libs/B-1", "dev-libs/A-1", ], - ignore_mergelist_order = True, - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - def testBacktrackMissedUpdates(self): - """ - An update is missed due to a dependency on an older version. - """ - - ebuilds = { - "dev-libs/A-1": { }, - "dev-libs/A-2": { }, - "dev-libs/B-1": { "RDEPEND": "<=dev-libs/A-1" }, - } - - installed = { - "dev-libs/A-1": { "USE": "" }, - "dev-libs/B-1": { "USE": "", "RDEPEND": "<=dev-libs/A-1" }, - } - - options = {'--update' : True, '--deep' : True, '--selective' : True} - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/A", "dev-libs/B"], - options = options, - all_permutations = True, - mergelist = [], - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_circular_dependencies.py b/portage_with_autodep/pym/portage/tests/resolver/test_circular_dependencies.py deleted file mode 100644 index f8331ac..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_circular_dependencies.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class CircularDependencyTestCase(TestCase): - - #TODO: - # use config change by autounmask - # conflict on parent's parent - # difference in RDEPEND and DEPEND - # is there anything else than priority buildtime and runtime? - # play with use.{mask,force} - # play with REQUIRED_USE - - - def testCircularDependency(self): - - ebuilds = { - "dev-libs/Z-1": { "DEPEND": "foo? ( !bar? ( dev-libs/Y ) )", "IUSE": "+foo bar", "EAPI": 1 }, - "dev-libs/Z-2": { "DEPEND": "foo? ( dev-libs/Y ) !bar? ( dev-libs/Y )", "IUSE": "+foo bar", "EAPI": 1 }, - "dev-libs/Z-3": { "DEPEND": "foo? ( !bar? ( dev-libs/Y ) ) foo? ( dev-libs/Y ) !bar? ( dev-libs/Y )", "IUSE": "+foo bar", "EAPI": 1 }, - "dev-libs/Y-1": { "DEPEND": "dev-libs/Z" }, - "dev-libs/W-1": { "DEPEND": "dev-libs/Z[foo] dev-libs/Y", "EAPI": 2 }, - "dev-libs/W-2": { "DEPEND": "dev-libs/Z[foo=] dev-libs/Y", "IUSE": "+foo", "EAPI": 2 }, - "dev-libs/W-3": { "DEPEND": "dev-libs/Z[bar] dev-libs/Y", "EAPI": 2 }, - - "app-misc/A-1": { "DEPEND": "foo? ( =app-misc/B-1 )", "IUSE": "+foo bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - "app-misc/A-2": { "DEPEND": "foo? ( =app-misc/B-2 ) bar? ( =app-misc/B-2 )", "IUSE": "+foo bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - "app-misc/B-1": { "DEPEND": "=app-misc/A-1" }, - "app-misc/B-2": { "DEPEND": "=app-misc/A-2" }, - } - - test_cases = ( - #Simple tests - ResolverPlaygroundTestCase( - ["=dev-libs/Z-1"], - circular_dependency_solutions = { "dev-libs/Y-1": frozenset([frozenset([("foo", False)]), frozenset([("bar", True)])])}, - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/Z-2"], - circular_dependency_solutions = { "dev-libs/Y-1": frozenset([frozenset([("foo", False), ("bar", True)])])}, - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/Z-3"], - circular_dependency_solutions = { "dev-libs/Y-1": frozenset([frozenset([("foo", False), ("bar", True)])])}, - success = False), - - #Conflict on parent - ResolverPlaygroundTestCase( - ["=dev-libs/W-1"], - circular_dependency_solutions = {}, - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/W-2"], - circular_dependency_solutions = { "dev-libs/Y-1": frozenset([frozenset([("foo", False), ("bar", True)])])}, - success = False), - - #Conflict with autounmask - ResolverPlaygroundTestCase( - ["=dev-libs/W-3"], - circular_dependency_solutions = { "dev-libs/Y-1": frozenset([frozenset([("foo", False)])])}, - use_changes = { "dev-libs/Z-3": {"bar": True}}, - success = False), - - #Conflict with REQUIRED_USE - ResolverPlaygroundTestCase( - ["=app-misc/B-1"], - circular_dependency_solutions = { "app-misc/B-1": frozenset([frozenset([("foo", False), ("bar", True)])])}, - success = False), - ResolverPlaygroundTestCase( - ["=app-misc/B-2"], - circular_dependency_solutions = {}, - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_depclean.py b/portage_with_autodep/pym/portage/tests/resolver/test_depclean.py deleted file mode 100644 index ba70144..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_depclean.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class SimpleDepcleanTestCase(TestCase): - - def testSimpleDepclean(self): - ebuilds = { - "dev-libs/A-1": {}, - "dev-libs/B-1": {}, - } - installed = { - "dev-libs/A-1": {}, - "dev-libs/B-1": {}, - } - - world = ( - "dev-libs/A", - ) - - test_cases = ( - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/B-1"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - -class DepcleanWithDepsTestCase(TestCase): - - def testDepcleanWithDeps(self): - ebuilds = { - "dev-libs/A-1": { "RDEPEND": "dev-libs/C" }, - "dev-libs/B-1": { "RDEPEND": "dev-libs/D" }, - "dev-libs/C-1": {}, - "dev-libs/D-1": { "RDEPEND": "dev-libs/E" }, - "dev-libs/E-1": { "RDEPEND": "dev-libs/F" }, - "dev-libs/F-1": {}, - } - installed = { - "dev-libs/A-1": { "RDEPEND": "dev-libs/C" }, - "dev-libs/B-1": { "RDEPEND": "dev-libs/D" }, - "dev-libs/C-1": {}, - "dev-libs/D-1": { "RDEPEND": "dev-libs/E" }, - "dev-libs/E-1": { "RDEPEND": "dev-libs/F" }, - "dev-libs/F-1": {}, - } - - world = ( - "dev-libs/A", - ) - - test_cases = ( - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/B-1", "dev-libs/D-1", - "dev-libs/E-1", "dev-libs/F-1"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - -class DepcleanWithInstalledMaskedTestCase(TestCase): - - def testDepcleanWithInstalledMasked(self): - """ - Test case for bug 332719. - emerge --declean ignores that B is masked by license and removes C. - The next emerge -uDN world doesn't take B and installs C again. - """ - ebuilds = { - "dev-libs/A-1": { "RDEPEND": "|| ( dev-libs/B dev-libs/C )" }, - "dev-libs/B-1": { "LICENSE": "TEST", "KEYWORDS": "x86" }, - "dev-libs/C-1": { "KEYWORDS": "x86" }, - } - installed = { - "dev-libs/A-1": { "RDEPEND": "|| ( dev-libs/B dev-libs/C )" }, - "dev-libs/B-1": { "LICENSE": "TEST", "KEYWORDS": "x86" }, - "dev-libs/C-1": { "KEYWORDS": "x86" }, - } - - world = ( - "dev-libs/A", - ) - - test_cases = ( - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - #cleanlist = ["dev-libs/C-1"]), - cleanlist = ["dev-libs/B-1"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - -class DepcleanInstalledKeywordMaskedSlotTestCase(TestCase): - - def testDepcleanInstalledKeywordMaskedSlot(self): - """ - Verify that depclean removes newer slot - masked by KEYWORDS (see bug #350285). - """ - ebuilds = { - "dev-libs/A-1": { "RDEPEND": "|| ( =dev-libs/B-2.7* =dev-libs/B-2.6* )" }, - "dev-libs/B-2.6": { "SLOT":"2.6", "KEYWORDS": "x86" }, - "dev-libs/B-2.7": { "SLOT":"2.7", "KEYWORDS": "~x86" }, - } - installed = { - "dev-libs/A-1": { "EAPI" : "3", "RDEPEND": "|| ( dev-libs/B:2.7 dev-libs/B:2.6 )" }, - "dev-libs/B-2.6": { "SLOT":"2.6", "KEYWORDS": "x86" }, - "dev-libs/B-2.7": { "SLOT":"2.7", "KEYWORDS": "~x86" }, - } - - world = ( - "dev-libs/A", - ) - - test_cases = ( - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/B-2.7"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - -class DepcleanWithExcludeTestCase(TestCase): - - def testDepcleanWithExclude(self): - - installed = { - "dev-libs/A-1": {}, - "dev-libs/B-1": { "RDEPEND": "dev-libs/A" }, - } - - test_cases = ( - #Without --exclude. - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/B-1", "dev-libs/A-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--depclean": True}, - success = True, - cleanlist = []), - ResolverPlaygroundTestCase( - ["dev-libs/B"], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/B-1"]), - - #With --exclude - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True, "--exclude": ["dev-libs/A"]}, - success = True, - cleanlist = ["dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/B"], - options = {"--depclean": True, "--exclude": ["dev-libs/B"]}, - success = True, - cleanlist = []), - ) - - playground = ResolverPlayground(installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - -class DepcleanWithExcludeAndSlotsTestCase(TestCase): - - def testDepcleanWithExcludeAndSlots(self): - - installed = { - "dev-libs/Z-1": { "SLOT": 1}, - "dev-libs/Z-2": { "SLOT": 2}, - "dev-libs/Y-1": { "RDEPEND": "=dev-libs/Z-1", "SLOT": 1 }, - "dev-libs/Y-2": { "RDEPEND": "=dev-libs/Z-2", "SLOT": 2 }, - } - - world = [ "dev-libs/Y" ] - - test_cases = ( - #Without --exclude. - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/Y-1", "dev-libs/Z-1"]), - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True, "--exclude": ["dev-libs/Z"]}, - success = True, - cleanlist = ["dev-libs/Y-1"]), - ResolverPlaygroundTestCase( - [], - options = {"--depclean": True, "--exclude": ["dev-libs/Y"]}, - success = True, - cleanlist = []), - ) - - playground = ResolverPlayground(installed=installed, world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - -class DepcleanAndWildcardsTestCase(TestCase): - - def testDepcleanAndWildcards(self): - - installed = { - "dev-libs/A-1": { "RDEPEND": "dev-libs/B" }, - "dev-libs/B-1": {}, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["*/*"], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/A-1", "dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/*"], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/A-1", "dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["*/A"], - options = {"--depclean": True}, - success = True, - cleanlist = ["dev-libs/A-1"]), - ResolverPlaygroundTestCase( - ["*/B"], - options = {"--depclean": True}, - success = True, - cleanlist = []), - ) - - playground = ResolverPlayground(installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_depth.py b/portage_with_autodep/pym/portage/tests/resolver/test_depth.py deleted file mode 100644 index cb1e2dd..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_depth.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright 2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import (ResolverPlayground, - ResolverPlaygroundTestCase) - -class ResolverDepthTestCase(TestCase): - - def testResolverDepth(self): - - ebuilds = { - "dev-libs/A-1": {"RDEPEND" : "dev-libs/B"}, - "dev-libs/A-2": {"RDEPEND" : "dev-libs/B"}, - "dev-libs/B-1": {"RDEPEND" : "dev-libs/C"}, - "dev-libs/B-2": {"RDEPEND" : "dev-libs/C"}, - "dev-libs/C-1": {}, - "dev-libs/C-2": {}, - - "virtual/libusb-0" : {"EAPI" :"2", "SLOT" : "0", "RDEPEND" : "|| ( >=dev-libs/libusb-0.1.12-r1:0 dev-libs/libusb-compat >=sys-freebsd/freebsd-lib-8.0[usb] )"}, - "virtual/libusb-1" : {"EAPI" :"2", "SLOT" : "1", "RDEPEND" : ">=dev-libs/libusb-1.0.4:1"}, - "dev-libs/libusb-0.1.13" : {}, - "dev-libs/libusb-1.0.5" : {"SLOT":"1"}, - "dev-libs/libusb-compat-1" : {}, - "sys-freebsd/freebsd-lib-8": {"IUSE" : "+usb"}, - - "sys-fs/udev-164" : {"EAPI" : "1", "RDEPEND" : "virtual/libusb:0"}, - - "virtual/jre-1.5.0" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =dev-java/sun-jre-bin-1.5.0* =virtual/jdk-1.5.0* )"}, - "virtual/jre-1.5.0-r1" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =dev-java/sun-jre-bin-1.5.0* =virtual/jdk-1.5.0* )"}, - "virtual/jre-1.6.0" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =dev-java/sun-jre-bin-1.6.0* =virtual/jdk-1.6.0* )"}, - "virtual/jre-1.6.0-r1" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =dev-java/sun-jre-bin-1.6.0* =virtual/jdk-1.6.0* )"}, - "virtual/jdk-1.5.0" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )"}, - "virtual/jdk-1.5.0-r1" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )"}, - "virtual/jdk-1.6.0" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )"}, - "virtual/jdk-1.6.0-r1" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )"}, - "dev-java/gcj-jdk-4.5" : {}, - "dev-java/gcj-jdk-4.5-r1" : {}, - "dev-java/icedtea-6.1" : {}, - "dev-java/icedtea-6.1-r1" : {}, - "dev-java/sun-jdk-1.5" : {"SLOT" : "1.5"}, - "dev-java/sun-jdk-1.6" : {"SLOT" : "1.6"}, - "dev-java/sun-jre-bin-1.5" : {"SLOT" : "1.5"}, - "dev-java/sun-jre-bin-1.6" : {"SLOT" : "1.6"}, - - "dev-java/ant-core-1.8" : {"DEPEND" : ">=virtual/jdk-1.4"}, - "dev-db/hsqldb-1.8" : {"RDEPEND" : ">=virtual/jre-1.6"}, - } - - installed = { - "dev-libs/A-1": {"RDEPEND" : "dev-libs/B"}, - "dev-libs/B-1": {"RDEPEND" : "dev-libs/C"}, - "dev-libs/C-1": {}, - - "virtual/jre-1.5.0" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =virtual/jdk-1.5.0* =dev-java/sun-jre-bin-1.5.0* )"}, - "virtual/jre-1.6.0" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =virtual/jdk-1.6.0* =dev-java/sun-jre-bin-1.6.0* )"}, - "virtual/jdk-1.5.0" : {"SLOT" : "1.5", "RDEPEND" : "|| ( =dev-java/sun-jdk-1.5.0* dev-java/gcj-jdk )"}, - "virtual/jdk-1.6.0" : {"SLOT" : "1.6", "RDEPEND" : "|| ( =dev-java/icedtea-6* =dev-java/sun-jdk-1.6.0* )"}, - "dev-java/gcj-jdk-4.5" : {}, - "dev-java/icedtea-6.1" : {}, - - "virtual/libusb-0" : {"EAPI" :"2", "SLOT" : "0", "RDEPEND" : "|| ( >=dev-libs/libusb-0.1.12-r1:0 dev-libs/libusb-compat >=sys-freebsd/freebsd-lib-8.0[usb] )"}, - } - - world = ["dev-libs/A"] - - test_cases = ( - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--update": True, "--deep": 0}, - success = True, - mergelist = ["dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--update": True, "--deep": 1}, - success = True, - mergelist = ["dev-libs/B-2", "dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--update": True, "--deep": 2}, - success = True, - mergelist = ["dev-libs/C-2", "dev-libs/B-2", "dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["@world"], - options = {"--update": True, "--deep": True}, - success = True, - mergelist = ["dev-libs/C-2", "dev-libs/B-2", "dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["@world"], - options = {"--emptytree": True}, - success = True, - mergelist = ["dev-libs/C-2", "dev-libs/B-2", "dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["@world"], - options = {"--selective": True, "--deep": True}, - success = True, - mergelist = []), - - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--deep": 2}, - success = True, - mergelist = ["dev-libs/A-2"]), - - ResolverPlaygroundTestCase( - ["virtual/jre"], - options = {}, - success = True, - mergelist = ['virtual/jre-1.6.0-r1']), - - ResolverPlaygroundTestCase( - ["virtual/jre"], - options = {"--deep" : True}, - success = True, - mergelist = ['virtual/jre-1.6.0-r1']), - - # Test bug #141118, where we avoid pulling in - # redundant deps, satisfying nested virtuals - # as efficiently as possible. - ResolverPlaygroundTestCase( - ["virtual/jre"], - options = {"--selective" : True, "--deep" : True}, - success = True, - mergelist = []), - - # Test bug #150361, where depgraph._greedy_slots() - # is triggered by --update with AtomArg. - ResolverPlaygroundTestCase( - ["virtual/jre"], - options = {"--update" : True}, - success = True, - ambiguous_merge_order = True, - mergelist = [('virtual/jre-1.6.0-r1', 'virtual/jre-1.5.0-r1')]), - - # Recursively traversed virtual dependencies, and their - # direct dependencies, are considered to have the same - # depth as direct dependencies. - ResolverPlaygroundTestCase( - ["virtual/jre"], - options = {"--update" : True, "--deep" : 1}, - success = True, - ambiguous_merge_order = True, - merge_order_assertions=(('dev-java/icedtea-6.1-r1', 'virtual/jdk-1.6.0-r1'), ('virtual/jdk-1.6.0-r1', 'virtual/jre-1.6.0-r1'), - ('dev-java/gcj-jdk-4.5-r1', 'virtual/jdk-1.5.0-r1'), ('virtual/jdk-1.5.0-r1', 'virtual/jre-1.5.0-r1')), - mergelist = [('dev-java/icedtea-6.1-r1', 'dev-java/gcj-jdk-4.5-r1', 'virtual/jdk-1.6.0-r1', 'virtual/jdk-1.5.0-r1', 'virtual/jre-1.6.0-r1', 'virtual/jre-1.5.0-r1')]), - - ResolverPlaygroundTestCase( - ["virtual/jre:1.5"], - options = {"--update" : True}, - success = True, - mergelist = ['virtual/jre-1.5.0-r1']), - - ResolverPlaygroundTestCase( - ["virtual/jre:1.6"], - options = {"--update" : True}, - success = True, - mergelist = ['virtual/jre-1.6.0-r1']), - - # Test that we don't pull in any unnecessary updates - # when --update is not specified, even though we - # specified --deep. - ResolverPlaygroundTestCase( - ["dev-java/ant-core"], - options = {"--deep" : True}, - success = True, - mergelist = ["dev-java/ant-core-1.8"]), - - ResolverPlaygroundTestCase( - ["dev-java/ant-core"], - options = {"--update" : True}, - success = True, - mergelist = ["dev-java/ant-core-1.8"]), - - # Recursively traversed virtual dependencies, and their - # direct dependencies, are considered to have the same - # depth as direct dependencies. - ResolverPlaygroundTestCase( - ["dev-java/ant-core"], - options = {"--update" : True, "--deep" : 1}, - success = True, - mergelist = ['dev-java/icedtea-6.1-r1', 'virtual/jdk-1.6.0-r1', 'dev-java/ant-core-1.8']), - - ResolverPlaygroundTestCase( - ["dev-db/hsqldb"], - options = {"--deep" : True}, - success = True, - mergelist = ["dev-db/hsqldb-1.8"]), - - # Don't traverse deps of an installed package with --deep=0, - # even if it's a virtual. - ResolverPlaygroundTestCase( - ["virtual/libusb:0"], - options = {"--selective" : True, "--deep" : 0}, - success = True, - mergelist = []), - - # Satisfy unsatisfied dep of installed package with --deep=1. - ResolverPlaygroundTestCase( - ["virtual/libusb:0"], - options = {"--selective" : True, "--deep" : 1}, - success = True, - mergelist = ['dev-libs/libusb-0.1.13']), - - # Pull in direct dep of virtual, even with --deep=0. - ResolverPlaygroundTestCase( - ["sys-fs/udev"], - options = {"--deep" : 0}, - success = True, - mergelist = ['dev-libs/libusb-0.1.13', 'sys-fs/udev-164']), - - # Test --nodeps with direct virtual deps. - ResolverPlaygroundTestCase( - ["sys-fs/udev"], - options = {"--nodeps" : True}, - success = True, - mergelist = ["sys-fs/udev-164"]), - - # Test that --nodeps overrides --deep. - ResolverPlaygroundTestCase( - ["sys-fs/udev"], - options = {"--nodeps" : True, "--deep" : True}, - success = True, - mergelist = ["sys-fs/udev-164"]), - - # Test that --nodeps overrides --emptytree. - ResolverPlaygroundTestCase( - ["sys-fs/udev"], - options = {"--nodeps" : True, "--emptytree" : True}, - success = True, - mergelist = ["sys-fs/udev-164"]), - - # Test --emptytree with virtuals. - ResolverPlaygroundTestCase( - ["sys-fs/udev"], - options = {"--emptytree" : True}, - success = True, - mergelist = ['dev-libs/libusb-0.1.13', 'virtual/libusb-0', 'sys-fs/udev-164']), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed, - world=world) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_eapi.py b/portage_with_autodep/pym/portage/tests/resolver/test_eapi.py deleted file mode 100644 index 525b585..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_eapi.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class EAPITestCase(TestCase): - - def testEAPI(self): - - ebuilds = { - #EAPI-1: IUSE-defaults - "dev-libs/A-1.0": { "EAPI": 0, "IUSE": "+foo" }, - "dev-libs/A-1.1": { "EAPI": 1, "IUSE": "+foo" }, - "dev-libs/A-1.2": { "EAPI": 2, "IUSE": "+foo" }, - "dev-libs/A-1.3": { "EAPI": 3, "IUSE": "+foo" }, - "dev-libs/A-1.4": { "EAPI": "4", "IUSE": "+foo" }, - - #EAPI-1: slot deps - "dev-libs/A-2.0": { "EAPI": 0, "DEPEND": "dev-libs/B:0" }, - "dev-libs/A-2.1": { "EAPI": 1, "DEPEND": "dev-libs/B:0" }, - "dev-libs/A-2.2": { "EAPI": 2, "DEPEND": "dev-libs/B:0" }, - "dev-libs/A-2.3": { "EAPI": 3, "DEPEND": "dev-libs/B:0" }, - "dev-libs/A-2.4": { "EAPI": "4", "DEPEND": "dev-libs/B:0" }, - - #EAPI-2: use deps - "dev-libs/A-3.0": { "EAPI": 0, "DEPEND": "dev-libs/B[foo]" }, - "dev-libs/A-3.1": { "EAPI": 1, "DEPEND": "dev-libs/B[foo]" }, - "dev-libs/A-3.2": { "EAPI": 2, "DEPEND": "dev-libs/B[foo]" }, - "dev-libs/A-3.3": { "EAPI": 3, "DEPEND": "dev-libs/B[foo]" }, - "dev-libs/A-3.4": { "EAPI": "4", "DEPEND": "dev-libs/B[foo]" }, - - #EAPI-2: strong blocks - "dev-libs/A-4.0": { "EAPI": 0, "DEPEND": "!!dev-libs/B" }, - "dev-libs/A-4.1": { "EAPI": 1, "DEPEND": "!!dev-libs/B" }, - "dev-libs/A-4.2": { "EAPI": 2, "DEPEND": "!!dev-libs/B" }, - "dev-libs/A-4.3": { "EAPI": 3, "DEPEND": "!!dev-libs/B" }, - "dev-libs/A-4.4": { "EAPI": "4", "DEPEND": "!!dev-libs/B" }, - - #EAPI-4: slot operator deps - #~ "dev-libs/A-5.0": { "EAPI": 0, "DEPEND": "dev-libs/B:*" }, - #~ "dev-libs/A-5.1": { "EAPI": 1, "DEPEND": "dev-libs/B:*" }, - #~ "dev-libs/A-5.2": { "EAPI": 2, "DEPEND": "dev-libs/B:*" }, - #~ "dev-libs/A-5.3": { "EAPI": 3, "DEPEND": "dev-libs/B:*" }, - #~ "dev-libs/A-5.4": { "EAPI": "4", "DEPEND": "dev-libs/B:*" }, - - #EAPI-4: use dep defaults - "dev-libs/A-6.0": { "EAPI": 0, "DEPEND": "dev-libs/B[bar(+)]" }, - "dev-libs/A-6.1": { "EAPI": 1, "DEPEND": "dev-libs/B[bar(+)]" }, - "dev-libs/A-6.2": { "EAPI": 2, "DEPEND": "dev-libs/B[bar(+)]" }, - "dev-libs/A-6.3": { "EAPI": 3, "DEPEND": "dev-libs/B[bar(+)]" }, - "dev-libs/A-6.4": { "EAPI": "4", "DEPEND": "dev-libs/B[bar(+)]" }, - - #EAPI-4: REQUIRED_USE - "dev-libs/A-7.0": { "EAPI": 0, "IUSE": "foo bar", "REQUIRED_USE": "|| ( foo bar )" }, - "dev-libs/A-7.1": { "EAPI": 1, "IUSE": "foo +bar", "REQUIRED_USE": "|| ( foo bar )" }, - "dev-libs/A-7.2": { "EAPI": 2, "IUSE": "foo +bar", "REQUIRED_USE": "|| ( foo bar )" }, - "dev-libs/A-7.3": { "EAPI": 3, "IUSE": "foo +bar", "REQUIRED_USE": "|| ( foo bar )" }, - "dev-libs/A-7.4": { "EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "|| ( foo bar )" }, - - "dev-libs/B-1": {"EAPI": 1, "IUSE": "+foo"}, - } - - test_cases = ( - ResolverPlaygroundTestCase(["=dev-libs/A-1.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-1.1"], success = True, mergelist = ["dev-libs/A-1.1"]), - ResolverPlaygroundTestCase(["=dev-libs/A-1.2"], success = True, mergelist = ["dev-libs/A-1.2"]), - ResolverPlaygroundTestCase(["=dev-libs/A-1.3"], success = True, mergelist = ["dev-libs/A-1.3"]), - ResolverPlaygroundTestCase(["=dev-libs/A-1.4"], success = True, mergelist = ["dev-libs/A-1.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-2.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-2.1"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-2.1"]), - ResolverPlaygroundTestCase(["=dev-libs/A-2.2"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-2.2"]), - ResolverPlaygroundTestCase(["=dev-libs/A-2.3"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-2.3"]), - ResolverPlaygroundTestCase(["=dev-libs/A-2.4"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-2.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-3.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-3.1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-3.2"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-3.2"]), - ResolverPlaygroundTestCase(["=dev-libs/A-3.3"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-3.3"]), - ResolverPlaygroundTestCase(["=dev-libs/A-3.4"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-3.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-4.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-4.1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-4.2"], success = True, mergelist = ["dev-libs/A-4.2"]), - ResolverPlaygroundTestCase(["=dev-libs/A-4.3"], success = True, mergelist = ["dev-libs/A-4.3"]), - ResolverPlaygroundTestCase(["=dev-libs/A-4.4"], success = True, mergelist = ["dev-libs/A-4.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-5.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-5.1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-5.2"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-5.3"], success = False), - # not implemented: EAPI-4: slot operator deps - #~ ResolverPlaygroundTestCase(["=dev-libs/A-5.4"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-5.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-6.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-6.1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-6.2"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-6.3"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-6.4"], success = True, mergelist = ["dev-libs/B-1", "dev-libs/A-6.4"]), - - ResolverPlaygroundTestCase(["=dev-libs/A-7.0"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-7.1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-7.2"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-7.3"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-7.4"], success = True, mergelist = ["dev-libs/A-7.4"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_merge_order.py b/portage_with_autodep/pym/portage/tests/resolver/test_merge_order.py deleted file mode 100644 index 0a52c81..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_merge_order.py +++ /dev/null @@ -1,453 +0,0 @@ -# Copyright 2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import portage -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import (ResolverPlayground, - ResolverPlaygroundTestCase) - -class MergeOrderTestCase(TestCase): - - def testMergeOrder(self): - ebuilds = { - "app-misc/blocker-buildtime-a-1" : {}, - "app-misc/blocker-buildtime-unbuilt-a-1" : { - "DEPEND" : "!app-misc/installed-blocker-a", - }, - "app-misc/blocker-buildtime-unbuilt-hard-a-1" : { - "EAPI" : "2", - "DEPEND" : "!!app-misc/installed-blocker-a", - }, - "app-misc/blocker-update-order-a-1" : {}, - "app-misc/blocker-update-order-hard-a-1" : {}, - "app-misc/blocker-update-order-hard-unsolvable-a-1" : {}, - "app-misc/blocker-runtime-a-1" : {}, - "app-misc/blocker-runtime-b-1" : {}, - "app-misc/blocker-runtime-hard-a-1" : {}, - "app-misc/circ-buildtime-a-0": {}, - "app-misc/circ-buildtime-a-1": { - "RDEPEND": "app-misc/circ-buildtime-b", - }, - "app-misc/circ-buildtime-b-1": { - "RDEPEND": "app-misc/circ-buildtime-c", - }, - "app-misc/circ-buildtime-c-1": { - "DEPEND": "app-misc/circ-buildtime-a", - }, - "app-misc/circ-buildtime-unsolvable-a-1": { - "RDEPEND": "app-misc/circ-buildtime-unsolvable-b", - }, - "app-misc/circ-buildtime-unsolvable-b-1": { - "RDEPEND": "app-misc/circ-buildtime-unsolvable-c", - }, - "app-misc/circ-buildtime-unsolvable-c-1": { - "DEPEND": "app-misc/circ-buildtime-unsolvable-a", - }, - "app-misc/circ-post-runtime-a-1": { - "PDEPEND": "app-misc/circ-post-runtime-b", - }, - "app-misc/circ-post-runtime-b-1": { - "RDEPEND": "app-misc/circ-post-runtime-c", - }, - "app-misc/circ-post-runtime-c-1": { - "RDEPEND": "app-misc/circ-post-runtime-a", - }, - "app-misc/circ-runtime-a-1": { - "RDEPEND": "app-misc/circ-runtime-b", - }, - "app-misc/circ-runtime-b-1": { - "RDEPEND": "app-misc/circ-runtime-c", - }, - "app-misc/circ-runtime-c-1": { - "RDEPEND": "app-misc/circ-runtime-a", - }, - "app-misc/circ-satisfied-a-0": { - "RDEPEND": "app-misc/circ-satisfied-b", - }, - "app-misc/circ-satisfied-a-1": { - "RDEPEND": "app-misc/circ-satisfied-b", - }, - "app-misc/circ-satisfied-b-0": { - "RDEPEND": "app-misc/circ-satisfied-c", - }, - "app-misc/circ-satisfied-b-1": { - "RDEPEND": "app-misc/circ-satisfied-c", - }, - "app-misc/circ-satisfied-c-0": { - "DEPEND": "app-misc/circ-satisfied-a", - "RDEPEND": "app-misc/circ-satisfied-a", - }, - "app-misc/circ-satisfied-c-1": { - "DEPEND": "app-misc/circ-satisfied-a", - "RDEPEND": "app-misc/circ-satisfied-a", - }, - "app-misc/circ-smallest-a-1": { - "RDEPEND": "app-misc/circ-smallest-b", - }, - "app-misc/circ-smallest-b-1": { - "RDEPEND": "app-misc/circ-smallest-a", - }, - "app-misc/circ-smallest-c-1": { - "RDEPEND": "app-misc/circ-smallest-d", - }, - "app-misc/circ-smallest-d-1": { - "RDEPEND": "app-misc/circ-smallest-e", - }, - "app-misc/circ-smallest-e-1": { - "RDEPEND": "app-misc/circ-smallest-c", - }, - "app-misc/circ-smallest-f-1": { - "RDEPEND": "app-misc/circ-smallest-g app-misc/circ-smallest-a app-misc/circ-smallest-c", - }, - "app-misc/circ-smallest-g-1": { - "RDEPEND": "app-misc/circ-smallest-f", - }, - "app-misc/installed-blocker-a-1" : { - "EAPI" : "2", - "DEPEND" : "!app-misc/blocker-buildtime-a", - "RDEPEND" : "!app-misc/blocker-runtime-a !app-misc/blocker-runtime-b !!app-misc/blocker-runtime-hard-a", - }, - "app-misc/installed-old-version-blocks-a-1" : { - "RDEPEND" : "!app-misc/blocker-update-order-a", - }, - "app-misc/installed-old-version-blocks-a-2" : {}, - "app-misc/installed-old-version-blocks-hard-a-1" : { - "EAPI" : "2", - "RDEPEND" : "!!app-misc/blocker-update-order-hard-a", - }, - "app-misc/installed-old-version-blocks-hard-a-2" : {}, - "app-misc/installed-old-version-blocks-hard-unsolvable-a-1" : { - "EAPI" : "2", - "RDEPEND" : "!!app-misc/blocker-update-order-hard-unsolvable-a", - }, - "app-misc/installed-old-version-blocks-hard-unsolvable-a-2" : { - "DEPEND" : "app-misc/blocker-update-order-hard-unsolvable-a", - "RDEPEND" : "", - }, - "app-misc/some-app-a-1": { - "RDEPEND": "app-misc/circ-runtime-a app-misc/circ-runtime-b", - }, - "app-misc/some-app-b-1": { - "RDEPEND": "app-misc/circ-post-runtime-a app-misc/circ-post-runtime-b", - }, - "app-misc/some-app-c-1": { - "RDEPEND": "app-misc/circ-buildtime-a app-misc/circ-buildtime-b", - }, - "app-admin/eselect-python-20100321" : {}, - "sys-apps/portage-2.1.9.42" : { - "DEPEND" : "dev-lang/python", - "RDEPEND" : "dev-lang/python", - }, - "sys-apps/portage-2.1.9.49" : { - "DEPEND" : "dev-lang/python >=app-admin/eselect-python-20091230", - "RDEPEND" : "dev-lang/python", - }, - "dev-lang/python-3.1" : {}, - "dev-lang/python-3.2" : {}, - "virtual/libc-0" : { - "RDEPEND" : "sys-libs/glibc", - }, - "sys-devel/gcc-4.5.2" : {}, - "sys-devel/binutils-2.18" : {}, - "sys-devel/binutils-2.20.1" : {}, - "sys-libs/glibc-2.11" : { - "DEPEND" : "virtual/os-headers sys-devel/gcc sys-devel/binutils", - "RDEPEND": "", - }, - "sys-libs/glibc-2.13" : { - "DEPEND" : "virtual/os-headers sys-devel/gcc sys-devel/binutils", - "RDEPEND": "", - }, - "virtual/os-headers-0" : { - "RDEPEND" : "sys-kernel/linux-headers", - }, - "sys-kernel/linux-headers-2.6.38": { - "DEPEND" : "app-arch/xz-utils", - "RDEPEND": "", - }, - "sys-kernel/linux-headers-2.6.39": { - "DEPEND" : "app-arch/xz-utils", - "RDEPEND": "", - }, - "app-arch/xz-utils-5.0.1" : {}, - "app-arch/xz-utils-5.0.2" : {}, - "dev-util/pkgconfig-0.25-r2" : {}, - "kde-base/kdelibs-3.5.7" : { - "PDEPEND" : "kde-misc/kdnssd-avahi", - }, - "kde-misc/kdnssd-avahi-0.1.2" : { - "DEPEND" : "kde-base/kdelibs app-arch/xz-utils dev-util/pkgconfig", - "RDEPEND" : "kde-base/kdelibs", - }, - "kde-base/kdnssd-3.5.7" : { - "DEPEND" : "kde-base/kdelibs", - "RDEPEND" : "kde-base/kdelibs", - }, - "kde-base/libkdegames-3.5.7" : { - "DEPEND" : "kde-base/kdelibs", - "RDEPEND" : "kde-base/kdelibs", - }, - "kde-base/kmines-3.5.7" : { - "DEPEND" : "kde-base/libkdegames", - "RDEPEND" : "kde-base/libkdegames", - }, - "media-video/libav-0.7_pre20110327" : { - "EAPI" : "2", - "IUSE" : "X +encode", - "RDEPEND" : "!media-video/ffmpeg", - }, - "media-video/ffmpeg-0.7_rc1" : { - "EAPI" : "2", - "IUSE" : "X +encode", - }, - "virtual/ffmpeg-0.6.90" : { - "EAPI" : "2", - "IUSE" : "X +encode", - "RDEPEND" : "|| ( >=media-video/ffmpeg-0.6.90_rc0-r2[X=,encode=] >=media-video/libav-0.6.90_rc[X=,encode=] )", - }, - } - - installed = { - "app-misc/circ-buildtime-a-0": {}, - "app-misc/circ-satisfied-a-0": { - "RDEPEND": "app-misc/circ-satisfied-b", - }, - "app-misc/circ-satisfied-b-0": { - "RDEPEND": "app-misc/circ-satisfied-c", - }, - "app-misc/circ-satisfied-c-0": { - "DEPEND": "app-misc/circ-satisfied-a", - "RDEPEND": "app-misc/circ-satisfied-a", - }, - "app-misc/installed-blocker-a-1" : { - "EAPI" : "2", - "DEPEND" : "!app-misc/blocker-buildtime-a", - "RDEPEND" : "!app-misc/blocker-runtime-a !app-misc/blocker-runtime-b !!app-misc/blocker-runtime-hard-a", - }, - "app-misc/installed-old-version-blocks-a-1" : { - "RDEPEND" : "!app-misc/blocker-update-order-a", - }, - "app-misc/installed-old-version-blocks-hard-a-1" : { - "EAPI" : "2", - "RDEPEND" : "!!app-misc/blocker-update-order-hard-a", - }, - "app-misc/installed-old-version-blocks-hard-unsolvable-a-1" : { - "EAPI" : "2", - "RDEPEND" : "!!app-misc/blocker-update-order-hard-unsolvable-a", - }, - "sys-apps/portage-2.1.9.42" : { - "DEPEND" : "dev-lang/python", - "RDEPEND" : "dev-lang/python", - }, - "dev-lang/python-3.1" : {}, - "virtual/libc-0" : { - "RDEPEND" : "sys-libs/glibc", - }, - "sys-devel/binutils-2.18" : {}, - "sys-libs/glibc-2.11" : { - "DEPEND" : "virtual/os-headers sys-devel/gcc sys-devel/binutils", - "RDEPEND": "", - }, - "virtual/os-headers-0" : { - "RDEPEND" : "sys-kernel/linux-headers", - }, - "sys-kernel/linux-headers-2.6.38": { - "DEPEND" : "app-arch/xz-utils", - "RDEPEND": "", - }, - "app-arch/xz-utils-5.0.1" : {}, - "media-video/ffmpeg-0.7_rc1" : { - "EAPI" : "2", - "IUSE" : "X +encode", - "USE" : "encode", - }, - "virtual/ffmpeg-0.6.90" : { - "EAPI" : "2", - "IUSE" : "X +encode", - "USE" : "encode", - "RDEPEND" : "|| ( >=media-video/ffmpeg-0.6.90_rc0-r2[X=,encode=] >=media-video/libav-0.6.90_rc[X=,encode=] )", - }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["app-misc/some-app-a"], - success = True, - ambiguous_merge_order = True, - mergelist = [("app-misc/circ-runtime-a-1", "app-misc/circ-runtime-b-1", "app-misc/circ-runtime-c-1"), "app-misc/some-app-a-1"]), - ResolverPlaygroundTestCase( - ["app-misc/some-app-a"], - success = True, - ambiguous_merge_order = True, - mergelist = [("app-misc/circ-runtime-c-1", "app-misc/circ-runtime-b-1", "app-misc/circ-runtime-a-1"), "app-misc/some-app-a-1"]), - # Test unsolvable circular dep that is RDEPEND in one - # direction and DEPEND in the other. - ResolverPlaygroundTestCase( - ["app-misc/circ-buildtime-unsolvable-a"], - success = False, - circular_dependency_solutions = {}), - # Test optimal merge order for a circular dep that is - # RDEPEND in one direction and DEPEND in the other. - # This requires an installed instance of the DEPEND - # package in order to be solvable. - ResolverPlaygroundTestCase( - ["app-misc/some-app-c", "app-misc/circ-buildtime-a"], - success = True, - ambiguous_merge_order = True, - mergelist = [("app-misc/circ-buildtime-b-1", "app-misc/circ-buildtime-c-1"), "app-misc/circ-buildtime-a-1", "app-misc/some-app-c-1"]), - # Test optimal merge order for a circular dep that is - # RDEPEND in one direction and PDEPEND in the other. - ResolverPlaygroundTestCase( - ["app-misc/some-app-b"], - success = True, - ambiguous_merge_order = True, - mergelist = ["app-misc/circ-post-runtime-a-1", ("app-misc/circ-post-runtime-b-1", "app-misc/circ-post-runtime-c-1"), "app-misc/some-app-b-1"]), - # Test optimal merge order for a circular dep that is - # RDEPEND in one direction and DEPEND in the other, - # with all dependencies initially satisfied. Optimally, - # the DEPEND/buildtime dep should be updated before the - # package that depends on it, even though it's feasible - # to update it later since it is already satisfied. - ResolverPlaygroundTestCase( - ["app-misc/circ-satisfied-a", "app-misc/circ-satisfied-b", "app-misc/circ-satisfied-c"], - success = True, - all_permutations = True, - ambiguous_merge_order = True, - merge_order_assertions = (("app-misc/circ-satisfied-a-1", "app-misc/circ-satisfied-c-1"),), - mergelist = [("app-misc/circ-satisfied-a-1", "app-misc/circ-satisfied-b-1", "app-misc/circ-satisfied-c-1")]), - # In the case of multiple runtime cycles, where some cycles - # may depend on smaller independent cycles, it's optimal - # to merge smaller independent cycles before other cycles - # that depend on them. - ResolverPlaygroundTestCase( - ["app-misc/circ-smallest-a", "app-misc/circ-smallest-c", "app-misc/circ-smallest-f"], - success = True, - ambiguous_merge_order = True, - all_permutations = True, - mergelist = [('app-misc/circ-smallest-a-1', 'app-misc/circ-smallest-b-1'), - ('app-misc/circ-smallest-c-1', 'app-misc/circ-smallest-d-1', 'app-misc/circ-smallest-e-1'), - ('app-misc/circ-smallest-f-1', 'app-misc/circ-smallest-g-1')]), - # installed package has buildtime-only blocker - # that should be ignored - ResolverPlaygroundTestCase( - ["app-misc/blocker-buildtime-a"], - success = True, - mergelist = ["app-misc/blocker-buildtime-a-1"]), - # We're installing a package that an old version of - # an installed package blocks. However, an update is - # available to the old package. The old package should - # be updated first, in order to solve the blocker without - # any need for blocking packages to temporarily overlap. - ResolverPlaygroundTestCase( - ["app-misc/blocker-update-order-a", "app-misc/installed-old-version-blocks-a"], - success = True, - all_permutations = True, - mergelist = ["app-misc/installed-old-version-blocks-a-2", "app-misc/blocker-update-order-a-1"]), - # This is the same as above but with a hard blocker. The hard - # blocker is solved automatically since the update makes it - # irrelevant. - ResolverPlaygroundTestCase( - ["app-misc/blocker-update-order-hard-a", "app-misc/installed-old-version-blocks-hard-a"], - success = True, - all_permutations = True, - mergelist = ["app-misc/installed-old-version-blocks-hard-a-2", "app-misc/blocker-update-order-hard-a-1"]), - # This is similar to the above case except that it's unsolvable - # due to merge order, unless bug 250286 is implemented so that - # the installed blocker will be unmerged before installation - # of the package it blocks (rather than after like a soft blocker - # would be handled). The "unmerge before" behavior requested - # in bug 250286 must be optional since essential programs or - # libraries may be temporarily unavailable during a - # non-overlapping update like this. - ResolverPlaygroundTestCase( - ["app-misc/blocker-update-order-hard-unsolvable-a", "app-misc/installed-old-version-blocks-hard-unsolvable-a"], - success = False, - all_permutations = True, - ambiguous_merge_order = True, - merge_order_assertions = (('app-misc/blocker-update-order-hard-unsolvable-a-1', 'app-misc/installed-old-version-blocks-hard-unsolvable-a-2'),), - mergelist = [('app-misc/blocker-update-order-hard-unsolvable-a-1', 'app-misc/installed-old-version-blocks-hard-unsolvable-a-2', '!!app-misc/blocker-update-order-hard-unsolvable-a')]), - # The installed package has runtime blockers that - # should cause it to be uninstalled. The uninstall - # task is executed only after blocking packages have - # been merged. - # TODO: distinguish between install/uninstall tasks in mergelist - ResolverPlaygroundTestCase( - ["app-misc/blocker-runtime-a", "app-misc/blocker-runtime-b"], - success = True, - all_permutations = True, - ambiguous_merge_order = True, - mergelist = [("app-misc/blocker-runtime-a-1", "app-misc/blocker-runtime-b-1"), "app-misc/installed-blocker-a-1", ("!app-misc/blocker-runtime-a", "!app-misc/blocker-runtime-b")]), - # We have a soft buildtime blocker against an installed - # package that should cause it to be uninstalled. Note that with - # soft blockers, the blocking packages are allowed to temporarily - # overlap. This allows any essential programs/libraries provided - # by both packages to be available at all times. - # TODO: distinguish between install/uninstall tasks in mergelist - ResolverPlaygroundTestCase( - ["app-misc/blocker-buildtime-unbuilt-a"], - success = True, - mergelist = ["app-misc/blocker-buildtime-unbuilt-a-1", "app-misc/installed-blocker-a-1", "!app-misc/installed-blocker-a"]), - # We have a hard buildtime blocker against an installed - # package that will not resolve automatically (unless - # the option requested in bug 250286 is implemented). - ResolverPlaygroundTestCase( - ["app-misc/blocker-buildtime-unbuilt-hard-a"], - success = False, - mergelist = ['app-misc/blocker-buildtime-unbuilt-hard-a-1', '!!app-misc/installed-blocker-a']), - # An installed package has a hard runtime blocker that - # will not resolve automatically (unless the option - # requested in bug 250286 is implemented). - ResolverPlaygroundTestCase( - ["app-misc/blocker-runtime-hard-a"], - success = False, - mergelist = ['app-misc/blocker-runtime-hard-a-1', '!!app-misc/blocker-runtime-hard-a']), - # Test swapping of providers for a new-style virtual package, - # which relies on delayed evaluation of disjunctive (virtual - # and ||) deps as required to solve bug #264434. Note that - # this behavior is not supported for old-style PROVIDE virtuals, - # as reported in bug #339164. - ResolverPlaygroundTestCase( - ["media-video/libav"], - success=True, - mergelist = ['media-video/libav-0.7_pre20110327', 'media-video/ffmpeg-0.7_rc1', '!media-video/ffmpeg']), - # Test that PORTAGE_PACKAGE_ATOM is merged asap. Optimally, - # satisfied deps are always merged after the asap nodes that - # depend on them. - ResolverPlaygroundTestCase( - ["dev-lang/python", portage.const.PORTAGE_PACKAGE_ATOM], - success = True, - all_permutations = True, - mergelist = ['app-admin/eselect-python-20100321', 'sys-apps/portage-2.1.9.49', 'dev-lang/python-3.2']), - # Test that OS_HEADERS_PACKAGE_ATOM and LIBC_PACKAGE_ATOM - # are merged asap, in order to account for implicit - # dependencies. See bug #303567. Optimally, satisfied deps - # are always merged after the asap nodes that depend on them. - ResolverPlaygroundTestCase( - ["app-arch/xz-utils", "sys-kernel/linux-headers", "sys-devel/binutils", "sys-libs/glibc"], - options = {"--complete-graph" : True}, - success = True, - all_permutations = True, - ambiguous_merge_order = True, - mergelist = ['sys-kernel/linux-headers-2.6.39', 'sys-devel/gcc-4.5.2', 'sys-libs/glibc-2.13', ('app-arch/xz-utils-5.0.2', 'sys-devel/binutils-2.20.1')]), - # Test asap install of PDEPEND for bug #180045. - ResolverPlaygroundTestCase( - ["kde-base/kmines", "kde-base/kdnssd", "kde-base/kdelibs", "app-arch/xz-utils"], - success = True, - all_permutations = True, - ambiguous_merge_order = True, - merge_order_assertions = ( - ('dev-util/pkgconfig-0.25-r2', 'kde-misc/kdnssd-avahi-0.1.2'), - ('kde-misc/kdnssd-avahi-0.1.2', 'kde-base/libkdegames-3.5.7'), - ('kde-misc/kdnssd-avahi-0.1.2', 'kde-base/kdnssd-3.5.7'), - ('kde-base/libkdegames-3.5.7', 'kde-base/kmines-3.5.7'), - ), - mergelist = [('kde-base/kdelibs-3.5.7', 'dev-util/pkgconfig-0.25-r2', 'kde-misc/kdnssd-avahi-0.1.2', 'app-arch/xz-utils-5.0.2', 'kde-base/libkdegames-3.5.7', 'kde-base/kdnssd-3.5.7', 'kde-base/kmines-3.5.7')]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_missing_iuse_and_evaluated_atoms.py b/portage_with_autodep/pym/portage/tests/resolver/test_missing_iuse_and_evaluated_atoms.py deleted file mode 100644 index a860e7b..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_missing_iuse_and_evaluated_atoms.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class MissingIUSEandEvaluatedAtomsTestCase(TestCase): - - def testMissingIUSEandEvaluatedAtoms(self): - ebuilds = { - "dev-libs/A-1": { "DEPEND": "dev-libs/B[foo?]", "IUSE": "foo bar", "EAPI": 2 }, - "dev-libs/A-2": { "DEPEND": "dev-libs/B[foo?,bar]", "IUSE": "foo bar", "EAPI": 2 }, - "dev-libs/B-1": { "IUSE": "bar" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["=dev-libs/A-1"], - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/A-2"], - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, debug=False) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_multirepo.py b/portage_with_autodep/pym/portage/tests/resolver/test_multirepo.py deleted file mode 100644 index 34c6d45..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_multirepo.py +++ /dev/null @@ -1,318 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class MultirepoTestCase(TestCase): - - def testMultirepo(self): - ebuilds = { - #Simple repo selection - "dev-libs/A-1": { }, - "dev-libs/A-1::repo1": { }, - "dev-libs/A-2::repo1": { }, - "dev-libs/A-1::repo2": { }, - - #Packages in exactly one repo - "dev-libs/B-1": { }, - "dev-libs/C-1::repo1": { }, - - #Package in repository 1 and 2, but 1 must be used - "dev-libs/D-1::repo1": { }, - "dev-libs/D-1::repo2": { }, - - "dev-libs/E-1": { }, - "dev-libs/E-1::repo1": { }, - "dev-libs/E-1::repo2": { "SLOT": "1" }, - - "dev-libs/F-1::repo1": { "SLOT": "1" }, - "dev-libs/F-1::repo2": { "SLOT": "1" }, - - "dev-libs/G-1::repo1": { "EAPI" : "4", "IUSE":"+x +y", "REQUIRED_USE" : "" }, - "dev-libs/G-1::repo2": { "EAPI" : "4", "IUSE":"+x +y", "REQUIRED_USE" : "^^ ( x y )" }, - - "dev-libs/H-1": { "KEYWORDS": "x86", "EAPI" : "3", - "RDEPEND" : "|| ( dev-libs/I:2 dev-libs/I:1 )" }, - - "dev-libs/I-1::repo2": { "SLOT" : "1"}, - "dev-libs/I-2::repo2": { "SLOT" : "2"}, - } - - installed = { - "dev-libs/H-1": { "RDEPEND" : "|| ( dev-libs/I:2 dev-libs/I:1 )"}, - "dev-libs/I-2::repo1": {"SLOT" : "2"}, - } - - sets = { - "multirepotest": - ( "dev-libs/A::test_repo", ) - } - - test_cases = ( - #Simple repo selection - ResolverPlaygroundTestCase( - ["dev-libs/A"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-2::repo1"]), - ResolverPlaygroundTestCase( - ["dev-libs/A::test_repo"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/A::repo2"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-1::repo2"]), - ResolverPlaygroundTestCase( - ["=dev-libs/A-1::repo1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-1::repo1"]), - ResolverPlaygroundTestCase( - ["@multirepotest"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-1"]), - - #Packages in exactly one repo - ResolverPlaygroundTestCase( - ["dev-libs/B"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/C"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/C-1::repo1"]), - - #Package in repository 1 and 2, but 2 must be used - ResolverPlaygroundTestCase( - ["dev-libs/D"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/D-1::repo2"]), - - #Atoms with slots - ResolverPlaygroundTestCase( - ["dev-libs/E"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/E-1::repo2"]), - ResolverPlaygroundTestCase( - ["dev-libs/E:1::repo2"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/E-1::repo2"]), - ResolverPlaygroundTestCase( - ["dev-libs/E:1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/E-1::repo2"]), - ResolverPlaygroundTestCase( - ["dev-libs/F:1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/F-1::repo2"]), - ResolverPlaygroundTestCase( - ["=dev-libs/F-1:1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/F-1::repo2"]), - ResolverPlaygroundTestCase( - ["=dev-libs/F-1:1::repo1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/F-1::repo1"]), - - # Dependency on installed dev-libs/C-2 ebuild for which ebuild is - # not available from the same repo should not unnecessarily - # reinstall the same version from a different repo. - ResolverPlaygroundTestCase( - ["dev-libs/H"], - options = {"--update": True, "--deep": True}, - success = True, - mergelist = []), - - # Check interaction between repo priority and unsatisfied - # REQUIRED_USE, for bug #350254. - ResolverPlaygroundTestCase( - ["=dev-libs/G-1"], - check_repo_names = True, - success = False), - - ) - - playground = ResolverPlayground(ebuilds=ebuilds, - installed=installed, sets=sets) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - - def testMultirepoUserConfig(self): - ebuilds = { - #package.use test - "dev-libs/A-1": { "IUSE": "foo" }, - "dev-libs/A-2::repo1": { "IUSE": "foo" }, - "dev-libs/A-3::repo2": { }, - "dev-libs/B-1": { "DEPEND": "dev-libs/A", "EAPI": 2 }, - "dev-libs/B-2": { "DEPEND": "dev-libs/A[foo]", "EAPI": 2 }, - "dev-libs/B-3": { "DEPEND": "dev-libs/A[-foo]", "EAPI": 2 }, - - #package.keywords test - "dev-libs/C-1": { "KEYWORDS": "~x86" }, - "dev-libs/C-1::repo1": { "KEYWORDS": "~x86" }, - - #package.license - "dev-libs/D-1": { "LICENSE": "TEST" }, - "dev-libs/D-1::repo1": { "LICENSE": "TEST" }, - - #package.mask - "dev-libs/E-1": { }, - "dev-libs/E-1::repo1": { }, - "dev-libs/H-1": { }, - "dev-libs/H-1::repo1": { }, - "dev-libs/I-1::repo2": { "SLOT" : "1"}, - "dev-libs/I-2::repo2": { "SLOT" : "2"}, - "dev-libs/J-1": { "KEYWORDS": "x86", "EAPI" : "3", - "RDEPEND" : "|| ( dev-libs/I:2 dev-libs/I:1 )" }, - - #package.properties - "dev-libs/F-1": { "PROPERTIES": "bar"}, - "dev-libs/F-1::repo1": { "PROPERTIES": "bar"}, - - #package.unmask - "dev-libs/G-1": { }, - "dev-libs/G-1::repo1": { }, - - #package.mask with wildcards - "dev-libs/Z-1::repo3": { }, - } - - installed = { - "dev-libs/J-1": { "RDEPEND" : "|| ( dev-libs/I:2 dev-libs/I:1 )"}, - "dev-libs/I-2::repo1": {"SLOT" : "2"}, - } - - user_config = { - "package.use": - ( - "dev-libs/A::repo1 foo", - ), - "package.keywords": - ( - "=dev-libs/C-1::test_repo", - ), - "package.license": - ( - "=dev-libs/D-1::test_repo TEST", - ), - "package.mask": - ( - "dev-libs/E::repo1", - "dev-libs/H", - "dev-libs/I::repo1", - #needed for package.unmask test - "dev-libs/G", - #wildcard test - "*/*::repo3", - ), - "package.properties": - ( - "dev-libs/F::repo1 -bar", - ), - "package.unmask": - ( - "dev-libs/G::test_repo", - ), - } - - test_cases = ( - #package.use test - ResolverPlaygroundTestCase( - ["=dev-libs/B-1"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-3::repo2", "dev-libs/B-1"]), - ResolverPlaygroundTestCase( - ["=dev-libs/B-2"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/A-2::repo1", "dev-libs/B-2"]), - ResolverPlaygroundTestCase( - ["=dev-libs/B-3"], - options = { "--autounmask": 'n' }, - success = False, - check_repo_names = True), - - #package.keywords test - ResolverPlaygroundTestCase( - ["dev-libs/C"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/C-1"]), - - #package.license test - ResolverPlaygroundTestCase( - ["dev-libs/D"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/D-1"]), - - #package.mask test - ResolverPlaygroundTestCase( - ["dev-libs/E"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/E-1"]), - - # Dependency on installed dev-libs/C-2 ebuild for which ebuild is - # masked from the same repo should not unnecessarily pull - # in a different slot. It should just pull in the same slot from - # a different repo (bug #351828). - ResolverPlaygroundTestCase( - ["dev-libs/J"], - options = {"--update": True, "--deep": True}, - success = True, - mergelist = ["dev-libs/I-2"]), - - #package.properties test - ResolverPlaygroundTestCase( - ["dev-libs/F"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/F-1"]), - - #package.mask test - ResolverPlaygroundTestCase( - ["dev-libs/G"], - success = True, - check_repo_names = True, - mergelist = ["dev-libs/G-1"]), - ResolverPlaygroundTestCase( - ["dev-libs/H"], - options = { "--autounmask": 'n' }, - success = False), - - #package.mask with wildcards - ResolverPlaygroundTestCase( - ["dev-libs/Z"], - options = { "--autounmask": 'n' }, - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, - installed=installed, user_config=user_config) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_multislot.py b/portage_with_autodep/pym/portage/tests/resolver/test_multislot.py deleted file mode 100644 index 8615419..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_multislot.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class MultSlotTestCase(TestCase): - - def testMultiSlotSelective(self): - """ - Test that a package isn't reinstalled due to SLOT dependency - interaction with USE=multislot (bug #220341). - """ - - ebuilds = { - "sys-devel/gcc-4.4.4": { "SLOT": "4.4" }, - } - - installed = { - "sys-devel/gcc-4.4.4": { "SLOT": "i686-pc-linux-gnu-4.4.4" }, - } - - options = {'--update' : True, '--deep' : True, '--selective' : True} - - test_cases = ( - ResolverPlaygroundTestCase( - ["sys-devel/gcc:4.4"], - options = options, - mergelist = [], - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_old_dep_chain_display.py b/portage_with_autodep/pym/portage/tests/resolver/test_old_dep_chain_display.py deleted file mode 100644 index 8aedf59..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_old_dep_chain_display.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class OldDepChainDisplayTestCase(TestCase): - - def testOldDepChainDisplay(self): - ebuilds = { - "dev-libs/A-1": { "DEPEND": "foo? ( dev-libs/B[-bar] )", "IUSE": "+foo", "EAPI": "2" }, - "dev-libs/A-2": { "DEPEND": "foo? ( dev-libs/C )", "IUSE": "+foo", "EAPI": "1" }, - "dev-libs/B-1": { "IUSE": "bar", "DEPEND": "!bar? ( dev-libs/D[-baz] )", "EAPI": "2" }, - "dev-libs/C-1": { "KEYWORDS": "~x86" }, - "dev-libs/D-1": { "IUSE": "+baz", "EAPI": "1" }, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["=dev-libs/A-1"], - options = { "--autounmask": 'n' }, - success = False), - ResolverPlaygroundTestCase( - ["=dev-libs/A-2"], - options = { "--autounmask": 'n' }, - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_output.py b/portage_with_autodep/pym/portage/tests/resolver/test_output.py deleted file mode 100644 index 34efe9c..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_output.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class MergelistOutputTestCase(TestCase): - - def testMergelistOutput(self): - """ - This test doesn't check if the output is correct, but makes sure - that we don't backtrace somewhere in the output code. - """ - ebuilds = { - "dev-libs/A-1": { "DEPEND": "dev-libs/B dev-libs/C", "IUSE": "+foo", "EAPI": 1 }, - "dev-libs/B-1": { "DEPEND": "dev-libs/D", "IUSE": "foo +bar", "EAPI": 1 }, - "dev-libs/C-1": { "DEPEND": "dev-libs/E", "IUSE": "foo bar" }, - "dev-libs/D-1": { "IUSE": "" }, - "dev-libs/E-1": {}, - - #reinstall for flags - "dev-libs/Z-1": { "IUSE": "+foo", "EAPI": 1 }, - "dev-libs/Y-1": { "IUSE": "foo", "EAPI": 1 }, - "dev-libs/X-1": {}, - "dev-libs/W-1": { "IUSE": "+foo", "EAPI": 1 }, - } - - installed = { - "dev-libs/Z-1": { "USE": "", "IUSE": "foo" }, - "dev-libs/Y-1": { "USE": "foo", "IUSE": "+foo", "EAPI": 1 }, - "dev-libs/X-1": { "USE": "foo", "IUSE": "+foo", "EAPI": 1 }, - "dev-libs/W-1": { }, - } - - option_cobos = ( - (), - ("verbose",), - ("tree",), - ("tree", "unordered-display",), - ("verbose",), - ("verbose", "tree",), - ("verbose", "tree", "unordered-display",), - ) - - test_cases = [] - for options in option_cobos: - testcase_opts = {} - for opt in options: - testcase_opts["--" + opt] = True - - test_cases.append(ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = testcase_opts, - success = True, - ignore_mergelist_order=True, - mergelist = ["dev-libs/D-1", "dev-libs/E-1", "dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"])) - - test_cases.append(ResolverPlaygroundTestCase( - ["dev-libs/Z"], - options = testcase_opts, - success = True, - mergelist = ["dev-libs/Z-1"])) - - test_cases.append(ResolverPlaygroundTestCase( - ["dev-libs/Y"], - options = testcase_opts, - success = True, - mergelist = ["dev-libs/Y-1"])) - - test_cases.append(ResolverPlaygroundTestCase( - ["dev-libs/X"], - options = testcase_opts, - success = True, - mergelist = ["dev-libs/X-1"])) - - test_cases.append(ResolverPlaygroundTestCase( - ["dev-libs/W"], - options = testcase_opts, - success = True, - mergelist = ["dev-libs/W-1"])) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_rebuild.py b/portage_with_autodep/pym/portage/tests/resolver/test_rebuild.py deleted file mode 100644 index b9c4d6d..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_rebuild.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import (ResolverPlayground, - ResolverPlaygroundTestCase) - -class RebuildTestCase(TestCase): - - def testRebuild(self): - """ - Rebuild packages when dependencies that are used at both build-time and - run-time are upgraded. - """ - - ebuilds = { - "sys-libs/x-1": { }, - "sys-libs/x-1-r1": { }, - "sys-libs/x-2": { }, - "sys-apps/a-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/a-2": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/b-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/b-2": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/c-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : ""}, - "sys-apps/c-2": { "DEPEND" : "sys-libs/x", "RDEPEND" : ""}, - "sys-apps/d-1": { "RDEPEND" : "sys-libs/x"}, - "sys-apps/d-2": { "RDEPEND" : "sys-libs/x"}, - "sys-apps/e-2": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/f-2": { "DEPEND" : "sys-apps/a", "RDEPEND" : "sys-apps/a"}, - "sys-apps/g-2": { "DEPEND" : "sys-apps/b sys-libs/x", - "RDEPEND" : "sys-apps/b"}, - } - - installed = { - "sys-libs/x-1": { }, - "sys-apps/a-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/b-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/c-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : ""}, - "sys-apps/d-1": { "RDEPEND" : "sys-libs/x"}, - "sys-apps/e-1": { "DEPEND" : "sys-libs/x", "RDEPEND" : "sys-libs/x"}, - "sys-apps/f-1": { "DEPEND" : "sys-apps/a", "RDEPEND" : "sys-apps/a"}, - "sys-apps/g-1": { "DEPEND" : "sys-apps/b sys-libs/x", - "RDEPEND" : "sys-apps/b"}, - } - - world = ["sys-apps/a", "sys-apps/b", "sys-apps/c", "sys-apps/d", - "sys-apps/e", "sys-apps/f", "sys-apps/g"] - - test_cases = ( - ResolverPlaygroundTestCase( - ["sys-libs/x"], - options = {"--rebuild-if-unbuilt" : True, - "--rebuild-exclude" : ["sys-apps/b"]}, - mergelist = ['sys-libs/x-2', 'sys-apps/a-2', 'sys-apps/e-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["sys-libs/x"], - options = {"--rebuild-if-unbuilt" : True}, - mergelist = ['sys-libs/x-2', 'sys-apps/a-2', 'sys-apps/b-2', - 'sys-apps/e-2', 'sys-apps/g-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["sys-libs/x"], - options = {"--rebuild-if-unbuilt" : True, - "--rebuild-ignore" : ["sys-libs/x"]}, - mergelist = ['sys-libs/x-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["sys-libs/x"], - options = {"--rebuild-if-unbuilt" : True, - "--rebuild-ignore" : ["sys-apps/b"]}, - mergelist = ['sys-libs/x-2', 'sys-apps/a-2', 'sys-apps/b-2', - 'sys-apps/e-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["=sys-libs/x-1-r1"], - options = {"--rebuild-if-unbuilt" : True}, - mergelist = ['sys-libs/x-1-r1', 'sys-apps/a-2', - 'sys-apps/b-2', 'sys-apps/e-2', 'sys-apps/g-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["=sys-libs/x-1-r1"], - options = {"--rebuild-if-new-rev" : True}, - mergelist = ['sys-libs/x-1-r1', 'sys-apps/a-2', - 'sys-apps/b-2', 'sys-apps/e-2', 'sys-apps/g-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["=sys-libs/x-1-r1"], - options = {"--rebuild-if-new-ver" : True}, - mergelist = ['sys-libs/x-1-r1'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["sys-libs/x"], - options = {"--rebuild-if-new-ver" : True}, - mergelist = ['sys-libs/x-2', 'sys-apps/a-2', - 'sys-apps/b-2', 'sys-apps/e-2', 'sys-apps/g-2'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["=sys-libs/x-1"], - options = {"--rebuild-if-new-rev" : True}, - mergelist = ['sys-libs/x-1'], - ignore_mergelist_order = True, - success = True), - - ResolverPlaygroundTestCase( - ["=sys-libs/x-1"], - options = {"--rebuild-if-unbuilt" : True}, - mergelist = ['sys-libs/x-1', 'sys-apps/a-2', - 'sys-apps/b-2', 'sys-apps/e-2', 'sys-apps/g-2'], - ignore_mergelist_order = True, - success = True), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, - installed=installed, world=world) - - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_required_use.py b/portage_with_autodep/pym/portage/tests/resolver/test_required_use.py deleted file mode 100644 index c8810fa..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_required_use.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class RequiredUSETestCase(TestCase): - - def testRequiredUSE(self): - """ - Only simple REQUIRED_USE values here. The parser is tested under in dep/testCheckRequiredUse - """ - - ebuilds = { - "dev-libs/A-1" : {"EAPI": "4", "IUSE": "foo bar", "REQUIRED_USE": "|| ( foo bar )"}, - "dev-libs/A-2" : {"EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "|| ( foo bar )"}, - "dev-libs/A-3" : {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "|| ( foo bar )"}, - "dev-libs/A-4" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "|| ( foo bar )"}, - "dev-libs/A-5" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "|| ( )"}, - - "dev-libs/B-1" : {"EAPI": "4", "IUSE": "foo bar", "REQUIRED_USE": "^^ ( foo bar )"}, - "dev-libs/B-2" : {"EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )"}, - "dev-libs/B-3" : {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "^^ ( foo bar )"}, - "dev-libs/B-4" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "^^ ( foo bar )"}, - "dev-libs/B-5" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "^^ ( )"}, - - "dev-libs/C-1" : {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "foo? ( !bar )"}, - "dev-libs/C-2" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "foo? ( !bar )"}, - "dev-libs/C-3" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "foo? ( bar )"}, - "dev-libs/C-4" : {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "foo? ( bar )"}, - "dev-libs/C-5" : {"EAPI": "4", "IUSE": "foo bar", "REQUIRED_USE": "foo? ( bar )"}, - "dev-libs/C-6" : {"EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "foo? ( bar )"}, - "dev-libs/C-7" : {"EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "!foo? ( bar )"}, - "dev-libs/C-8" : {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "!foo? ( bar )"}, - "dev-libs/C-9" : {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "!foo? ( bar )"}, - "dev-libs/C-10": {"EAPI": "4", "IUSE": "foo bar", "REQUIRED_USE": "!foo? ( bar )"}, - "dev-libs/C-11": {"EAPI": "4", "IUSE": "foo bar", "REQUIRED_USE": "!foo? ( !bar )"}, - "dev-libs/C-12": {"EAPI": "4", "IUSE": "foo +bar", "REQUIRED_USE": "!foo? ( !bar )"}, - "dev-libs/C-13": {"EAPI": "4", "IUSE": "+foo +bar", "REQUIRED_USE": "!foo? ( !bar )"}, - "dev-libs/C-14": {"EAPI": "4", "IUSE": "+foo bar", "REQUIRED_USE": "!foo? ( !bar )"}, - - "dev-libs/D-1" : {"EAPI": "4", "IUSE": "+w +x +y z", "REQUIRED_USE": "w? ( x || ( y z ) )"}, - "dev-libs/D-2" : {"EAPI": "4", "IUSE": "+w +x +y +z", "REQUIRED_USE": "w? ( x || ( y z ) )"}, - "dev-libs/D-3" : {"EAPI": "4", "IUSE": "+w +x y z", "REQUIRED_USE": "w? ( x || ( y z ) )"}, - "dev-libs/D-4" : {"EAPI": "4", "IUSE": "+w x +y +z", "REQUIRED_USE": "w? ( x || ( y z ) )"}, - "dev-libs/D-5" : {"EAPI": "4", "IUSE": "w x y z", "REQUIRED_USE": "w? ( x || ( y z ) )"}, - } - - test_cases = ( - ResolverPlaygroundTestCase(["=dev-libs/A-1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/A-2"], success = True, mergelist=["dev-libs/A-2"]), - ResolverPlaygroundTestCase(["=dev-libs/A-3"], success = True, mergelist=["dev-libs/A-3"]), - ResolverPlaygroundTestCase(["=dev-libs/A-4"], success = True, mergelist=["dev-libs/A-4"]), - ResolverPlaygroundTestCase(["=dev-libs/A-5"], success = True, mergelist=["dev-libs/A-5"]), - - ResolverPlaygroundTestCase(["=dev-libs/B-1"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/B-2"], success = True, mergelist=["dev-libs/B-2"]), - ResolverPlaygroundTestCase(["=dev-libs/B-3"], success = True, mergelist=["dev-libs/B-3"]), - ResolverPlaygroundTestCase(["=dev-libs/B-4"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/B-5"], success = True, mergelist=["dev-libs/B-5"]), - - ResolverPlaygroundTestCase(["=dev-libs/C-1"], success = True, mergelist=["dev-libs/C-1"]), - ResolverPlaygroundTestCase(["=dev-libs/C-2"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/C-3"], success = True, mergelist=["dev-libs/C-3"]), - ResolverPlaygroundTestCase(["=dev-libs/C-4"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/C-5"], success = True, mergelist=["dev-libs/C-5"]), - ResolverPlaygroundTestCase(["=dev-libs/C-6"], success = True, mergelist=["dev-libs/C-6"]), - ResolverPlaygroundTestCase(["=dev-libs/C-7"], success = True, mergelist=["dev-libs/C-7"]), - ResolverPlaygroundTestCase(["=dev-libs/C-8"], success = True, mergelist=["dev-libs/C-8"]), - ResolverPlaygroundTestCase(["=dev-libs/C-9"], success = True, mergelist=["dev-libs/C-9"]), - ResolverPlaygroundTestCase(["=dev-libs/C-10"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/C-11"], success = True, mergelist=["dev-libs/C-11"]), - ResolverPlaygroundTestCase(["=dev-libs/C-12"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/C-13"], success = True, mergelist=["dev-libs/C-13"]), - ResolverPlaygroundTestCase(["=dev-libs/C-14"], success = True, mergelist=["dev-libs/C-14"]), - - ResolverPlaygroundTestCase(["=dev-libs/D-1"], success = True, mergelist=["dev-libs/D-1"]), - ResolverPlaygroundTestCase(["=dev-libs/D-2"], success = True, mergelist=["dev-libs/D-2"]), - ResolverPlaygroundTestCase(["=dev-libs/D-3"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/D-4"], success = False), - ResolverPlaygroundTestCase(["=dev-libs/D-5"], success = True, mergelist=["dev-libs/D-5"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() - - def testRequiredUseOrDeps(self): - - ebuilds = { - "dev-libs/A-1": { "IUSE": "+x +y", "REQUIRED_USE": "^^ ( x y )", "EAPI": "4" }, - "dev-libs/B-1": { "IUSE": "+x +y", "REQUIRED_USE": "", "EAPI": "4" }, - "app-misc/p-1": { "RDEPEND": "|| ( =dev-libs/A-1 =dev-libs/B-1 )" }, - } - - test_cases = ( - # This should fail and show a REQUIRED_USE error for - # dev-libs/A-1, since this choice it preferred. - ResolverPlaygroundTestCase( - ["=app-misc/p-1"], - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_simple.py b/portage_with_autodep/pym/portage/tests/resolver/test_simple.py deleted file mode 100644 index 0bcfc4b..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_simple.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class SimpleResolverTestCase(TestCase): - - def testSimple(self): - ebuilds = { - "dev-libs/A-1": { "KEYWORDS": "x86" }, - "dev-libs/A-2": { "KEYWORDS": "~x86" }, - "dev-libs/B-1.2": {}, - - "app-misc/Z-1": { "DEPEND": "|| ( app-misc/Y ( app-misc/X app-misc/W ) )", "RDEPEND": "" }, - "app-misc/Y-1": { "KEYWORDS": "~x86" }, - "app-misc/X-1": {}, - "app-misc/W-1": {}, - } - installed = { - "dev-libs/A-1": {}, - "dev-libs/B-1.1": {}, - } - - test_cases = ( - ResolverPlaygroundTestCase(["dev-libs/A"], success = True, mergelist = ["dev-libs/A-1"]), - ResolverPlaygroundTestCase(["=dev-libs/A-2"], options = { "--autounmask": 'n' }, success = False), - - ResolverPlaygroundTestCase( - ["dev-libs/A"], - options = {"--noreplace": True}, - success = True, - mergelist = []), - ResolverPlaygroundTestCase( - ["dev-libs/B"], - options = {"--noreplace": True}, - success = True, - mergelist = []), - ResolverPlaygroundTestCase( - ["dev-libs/B"], - options = {"--update": True}, - success = True, - mergelist = ["dev-libs/B-1.2"]), - - ResolverPlaygroundTestCase( - ["app-misc/Z"], - success = True, - mergelist = ["app-misc/W-1", "app-misc/X-1", "app-misc/Z-1"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_slot_collisions.py b/portage_with_autodep/pym/portage/tests/resolver/test_slot_collisions.py deleted file mode 100644 index 4867cea..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_slot_collisions.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class SlotCollisionTestCase(TestCase): - - def testSlotCollision(self): - - ebuilds = { - "dev-libs/A-1": { "PDEPEND": "foo? ( dev-libs/B )", "IUSE": "foo" }, - "dev-libs/B-1": { "IUSE": "foo" }, - "dev-libs/C-1": { "DEPEND": "dev-libs/A[foo]", "EAPI": 2 }, - "dev-libs/D-1": { "DEPEND": "dev-libs/A[foo=] dev-libs/B[foo=]", "IUSE": "foo", "EAPI": 2 }, - "dev-libs/E-1": { }, - "dev-libs/E-2": { "IUSE": "foo" }, - - "app-misc/Z-1": { }, - "app-misc/Z-2": { }, - "app-misc/Y-1": { "DEPEND": "=app-misc/Z-1" }, - "app-misc/Y-2": { "DEPEND": ">app-misc/Z-1" }, - "app-misc/X-1": { "DEPEND": "=app-misc/Z-2" }, - "app-misc/X-2": { "DEPEND": "<app-misc/Z-2" }, - - "sci-libs/K-1": { "IUSE": "+foo", "EAPI": 1 }, - "sci-libs/L-1": { "DEPEND": "sci-libs/K[-foo]", "EAPI": 2 }, - "sci-libs/M-1": { "DEPEND": "sci-libs/K[foo=]", "IUSE": "+foo", "EAPI": 2 }, - - "sci-libs/Q-1": { "SLOT": "1", "IUSE": "+bar foo", "EAPI": 1 }, - "sci-libs/Q-2": { "SLOT": "2", "IUSE": "+bar +foo", "EAPI": 2, "PDEPEND": "sci-libs/Q:1[bar?,foo?]" }, - "sci-libs/P-1": { "DEPEND": "sci-libs/Q:1[foo=]", "IUSE": "foo", "EAPI": 2 }, - - "sys-libs/A-1": { "RDEPEND": "foo? ( sys-libs/J[foo=] )", "IUSE": "+foo", "EAPI": "4" }, - "sys-libs/B-1": { "RDEPEND": "bar? ( sys-libs/J[bar=] )", "IUSE": "+bar", "EAPI": "4" }, - "sys-libs/C-1": { "RDEPEND": "sys-libs/J[bar]", "EAPI": "4" }, - "sys-libs/D-1": { "RDEPEND": "sys-libs/J[bar?]", "IUSE": "bar", "EAPI": "4" }, - "sys-libs/E-1": { "RDEPEND": "sys-libs/J[foo(+)?]", "IUSE": "+foo", "EAPI": "4" }, - "sys-libs/F-1": { "RDEPEND": "sys-libs/J[foo(+)]", "EAPI": "4" }, - "sys-libs/J-1": { "IUSE": "+foo", "EAPI": "4" }, - "sys-libs/J-2": { "IUSE": "+bar", "EAPI": "4" }, - - "app-misc/A-1": { "IUSE": "foo +bar", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - "app-misc/B-1": { "DEPEND": "=app-misc/A-1[foo=]", "IUSE": "foo", "EAPI": 2 }, - "app-misc/C-1": { "DEPEND": "=app-misc/A-1[foo]", "EAPI": 2 }, - "app-misc/E-1": { "RDEPEND": "dev-libs/E[foo?]", "IUSE": "foo", "EAPI": "2" }, - "app-misc/F-1": { "RDEPEND": "=dev-libs/E-1", "IUSE": "foo", "EAPI": "2" }, - } - installed = { - "dev-libs/A-1": { "PDEPEND": "foo? ( dev-libs/B )", "IUSE": "foo", "USE": "foo" }, - "dev-libs/B-1": { "IUSE": "foo", "USE": "foo" }, - "dev-libs/C-1": { "DEPEND": "dev-libs/A[foo]", "EAPI": 2 }, - "dev-libs/D-1": { "DEPEND": "dev-libs/A[foo=] dev-libs/B[foo=]", "IUSE": "foo", "USE": "foo", "EAPI": 2 }, - - "sci-libs/K-1": { "IUSE": "foo", "USE": "" }, - "sci-libs/L-1": { "DEPEND": "sci-libs/K[-foo]" }, - - "sci-libs/Q-1": { "SLOT": "1", "IUSE": "+bar +foo", "USE": "bar foo", "EAPI": 1 }, - "sci-libs/Q-2": { "SLOT": "2", "IUSE": "+bar +foo", "USE": "bar foo", "EAPI": 2, "PDEPEND": "sci-libs/Q:1[bar?,foo?]" }, - - "app-misc/A-1": { "IUSE": "+foo bar", "USE": "foo", "REQUIRED_USE": "^^ ( foo bar )", "EAPI": "4" }, - } - - test_cases = ( - #A qt-*[qt3support] like mess. - ResolverPlaygroundTestCase( - ["dev-libs/A", "dev-libs/B", "dev-libs/C", "dev-libs/D"], - options = { "--autounmask": 'n' }, - success = False, - mergelist = ["dev-libs/A-1", "dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1"], - ignore_mergelist_order = True, - slot_collision_solutions = [ {"dev-libs/A-1": {"foo": True}, "dev-libs/D-1": {"foo": True}} ]), - - ResolverPlaygroundTestCase( - ["sys-libs/A", "sys-libs/B", "sys-libs/C", "sys-libs/D", "sys-libs/E", "sys-libs/F"], - options = { "--autounmask": 'n' }, - success = False, - ignore_mergelist_order = True, - slot_collision_solutions = [], - mergelist = ['sys-libs/J-2', 'sys-libs/J-1', 'sys-libs/A-1', 'sys-libs/B-1', 'sys-libs/C-1', 'sys-libs/D-1', 'sys-libs/E-1', 'sys-libs/F-1'], - ), - - #A version based conflicts, nothing we can do. - ResolverPlaygroundTestCase( - ["=app-misc/X-1", "=app-misc/Y-1"], - success = False, - mergelist = ["app-misc/Z-1", "app-misc/Z-2", "app-misc/X-1", "app-misc/Y-1"], - ignore_mergelist_order = True, - slot_collision_solutions = [] - ), - ResolverPlaygroundTestCase( - ["=app-misc/X-2", "=app-misc/Y-2"], - success = False, - mergelist = ["app-misc/Z-1", "app-misc/Z-2", "app-misc/X-2", "app-misc/Y-2"], - ignore_mergelist_order = True, - slot_collision_solutions = [] - ), - - ResolverPlaygroundTestCase( - ["=app-misc/E-1", "=app-misc/F-1"], - success = False, - mergelist = ["dev-libs/E-1", "dev-libs/E-2", "app-misc/E-1", "app-misc/F-1"], - ignore_mergelist_order = True, - slot_collision_solutions = [] - ), - - #Simple cases. - ResolverPlaygroundTestCase( - ["sci-libs/L", "sci-libs/M"], - success = False, - mergelist = ["sci-libs/L-1", "sci-libs/M-1", "sci-libs/K-1"], - ignore_mergelist_order = True, - slot_collision_solutions = [{"sci-libs/K-1": {"foo": False}, "sci-libs/M-1": {"foo": False}}] - ), - - #Avoid duplicates. - ResolverPlaygroundTestCase( - ["sci-libs/P", "sci-libs/Q:2"], - success = False, - options = { "--update": True, "--complete-graph": True, "--autounmask": 'n' }, - mergelist = ["sci-libs/P-1", "sci-libs/Q-1"], - ignore_mergelist_order = True, - all_permutations=True, - slot_collision_solutions = [{"sci-libs/Q-1": {"foo": True}, "sci-libs/P-1": {"foo": True}}] - ), - - #Conflict with REQUIRED_USE - ResolverPlaygroundTestCase( - ["=app-misc/C-1", "=app-misc/B-1"], - all_permutations = True, - slot_collision_solutions = [], - mergelist = ["app-misc/A-1", "app-misc/C-1", "app-misc/B-1"], - ignore_mergelist_order = True, - success = False), - ) - - playground = ResolverPlayground(ebuilds=ebuilds, installed=installed) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/resolver/test_use_dep_defaults.py b/portage_with_autodep/pym/portage/tests/resolver/test_use_dep_defaults.py deleted file mode 100644 index 7d17106..0000000 --- a/portage_with_autodep/pym/portage/tests/resolver/test_use_dep_defaults.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.tests.resolver.ResolverPlayground import ResolverPlayground, ResolverPlaygroundTestCase - -class UseDepDefaultsTestCase(TestCase): - - def testUseDepDefaultse(self): - - ebuilds = { - "dev-libs/A-1": { "DEPEND": "dev-libs/B[foo]", "RDEPEND": "dev-libs/B[foo]", "EAPI": "2" }, - "dev-libs/A-2": { "DEPEND": "dev-libs/B[foo(+)]", "RDEPEND": "dev-libs/B[foo(+)]", "EAPI": "4" }, - "dev-libs/A-3": { "DEPEND": "dev-libs/B[foo(-)]", "RDEPEND": "dev-libs/B[foo(-)]", "EAPI": "4" }, - "dev-libs/B-1": { "IUSE": "+foo", "EAPI": "1" }, - "dev-libs/B-2": {}, - } - - test_cases = ( - ResolverPlaygroundTestCase( - ["=dev-libs/A-1"], - success = True, - mergelist = ["dev-libs/B-1", "dev-libs/A-1"]), - ResolverPlaygroundTestCase( - ["=dev-libs/A-2"], - success = True, - mergelist = ["dev-libs/B-2", "dev-libs/A-2"]), - ResolverPlaygroundTestCase( - ["=dev-libs/A-3"], - success = True, - mergelist = ["dev-libs/B-1", "dev-libs/A-3"]), - ) - - playground = ResolverPlayground(ebuilds=ebuilds) - try: - for test_case in test_cases: - playground.run_TestCase(test_case) - self.assertEqual(test_case.test_success, True, test_case.fail_msg) - finally: - playground.cleanup() diff --git a/portage_with_autodep/pym/portage/tests/runTests b/portage_with_autodep/pym/portage/tests/runTests index 6b3311d..4c10087 100755 --- a/portage_with_autodep/pym/portage/tests/runTests +++ b/portage_with_autodep/pym/portage/tests/runTests @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python -Wd # runTests.py -- Portage Unit Test Functionality # Copyright 2006 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 @@ -41,6 +41,4 @@ del path if __name__ == "__main__": - result = tests.main() - if not result.wasSuccessful(): - sys.exit(1) + sys.exit(tests.main()) diff --git a/portage_with_autodep/pym/portage/tests/sets/__init__.py b/portage_with_autodep/pym/portage/tests/sets/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/base/__init__.py b/portage_with_autodep/pym/portage/tests/sets/base/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/base/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/base/__test__ b/portage_with_autodep/pym/portage/tests/sets/base/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/base/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/base/testInternalPackageSet.py b/portage_with_autodep/pym/portage/tests/sets/base/testInternalPackageSet.py deleted file mode 100644 index e0a3478..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/base/testInternalPackageSet.py +++ /dev/null @@ -1,61 +0,0 @@ -# testConfigFileSet.py -- Portage Unit Testing Functionality -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.dep import Atom -from portage.exception import InvalidAtom -from portage.tests import TestCase -from portage._sets.base import InternalPackageSet - -class InternalPackageSetTestCase(TestCase): - """Simple Test Case for InternalPackageSet""" - - def testInternalPackageSet(self): - i1_atoms = set(("dev-libs/A", ">=dev-libs/A-1", "dev-libs/B")) - i2_atoms = set(("dev-libs/A", "dev-libs/*", "dev-libs/C")) - - i1 = InternalPackageSet(initial_atoms=i1_atoms) - i2 = InternalPackageSet(initial_atoms=i2_atoms, allow_wildcard=True) - self.assertRaises(InvalidAtom, InternalPackageSet, initial_atoms=i2_atoms) - - self.assertEqual(i1.getAtoms(), i1_atoms) - self.assertEqual(i2.getAtoms(), i2_atoms) - - new_atom = Atom("*/*", allow_wildcard=True) - self.assertRaises(InvalidAtom, i1.add, new_atom) - i2.add(new_atom) - - i2_atoms.add(new_atom) - - self.assertEqual(i1.getAtoms(), i1_atoms) - self.assertEqual(i2.getAtoms(), i2_atoms) - - removed_atom = Atom("dev-libs/A") - - i1.remove(removed_atom) - i2.remove(removed_atom) - - i1_atoms.remove(removed_atom) - i2_atoms.remove(removed_atom) - - self.assertEqual(i1.getAtoms(), i1_atoms) - self.assertEqual(i2.getAtoms(), i2_atoms) - - update_atoms = [Atom("dev-libs/C"), Atom("dev-*/C", allow_wildcard=True)] - - self.assertRaises(InvalidAtom, i1.update, update_atoms) - i2.update(update_atoms) - - i2_atoms.update(update_atoms) - - self.assertEqual(i1.getAtoms(), i1_atoms) - self.assertEqual(i2.getAtoms(), i2_atoms) - - replace_atoms = [Atom("dev-libs/D"), Atom("*-libs/C", allow_wildcard=True)] - - self.assertRaises(InvalidAtom, i1.replace, replace_atoms) - i2.replace(replace_atoms) - - i2_atoms = set(replace_atoms) - - self.assertEqual(i2.getAtoms(), i2_atoms) diff --git a/portage_with_autodep/pym/portage/tests/sets/files/__init__.py b/portage_with_autodep/pym/portage/tests/sets/files/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/files/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/files/__test__ b/portage_with_autodep/pym/portage/tests/sets/files/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/files/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/files/testConfigFileSet.py b/portage_with_autodep/pym/portage/tests/sets/files/testConfigFileSet.py deleted file mode 100644 index 3ec26a0..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/files/testConfigFileSet.py +++ /dev/null @@ -1,32 +0,0 @@ -# testConfigFileSet.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import tempfile - -from portage import os -from portage.tests import TestCase, test_cps -from portage._sets.files import ConfigFileSet - -class ConfigFileSetTestCase(TestCase): - """Simple Test Case for ConfigFileSet""" - - def setUp(self): - fd, self.testfile = tempfile.mkstemp(suffix=".testdata", prefix=self.__class__.__name__, text=True) - f = os.fdopen(fd, 'w') - for i in range(0, len(test_cps)): - atom = test_cps[i] - if i % 2 == 0: - f.write(atom + ' abc def\n') - else: - f.write(atom + '\n') - f.close() - - def tearDown(self): - os.unlink(self.testfile) - - def testConfigStaticFileSet(self): - s = ConfigFileSet(self.testfile) - s.load() - self.assertEqual(set(test_cps), s.getAtoms()) - diff --git a/portage_with_autodep/pym/portage/tests/sets/files/testStaticFileSet.py b/portage_with_autodep/pym/portage/tests/sets/files/testStaticFileSet.py deleted file mode 100644 index d515a67..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/files/testStaticFileSet.py +++ /dev/null @@ -1,27 +0,0 @@ -# testStaticFileSet.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import tempfile - -from portage import os -from portage.tests import TestCase, test_cps -from portage._sets.files import StaticFileSet - -class StaticFileSetTestCase(TestCase): - """Simple Test Case for StaticFileSet""" - - def setUp(self): - fd, self.testfile = tempfile.mkstemp(suffix=".testdata", prefix=self.__class__.__name__, text=True) - f = os.fdopen(fd, 'w') - f.write("\n".join(test_cps)) - f.close() - - def tearDown(self): - os.unlink(self.testfile) - - def testSampleStaticFileSet(self): - s = StaticFileSet(self.testfile) - s.load() - self.assertEqual(set(test_cps), s.getAtoms()) - diff --git a/portage_with_autodep/pym/portage/tests/sets/shell/__init__.py b/portage_with_autodep/pym/portage/tests/sets/shell/__init__.py deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/shell/__init__.py +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/shell/__test__ b/portage_with_autodep/pym/portage/tests/sets/shell/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/shell/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/sets/shell/testShell.py b/portage_with_autodep/pym/portage/tests/sets/shell/testShell.py deleted file mode 100644 index 2cdd833..0000000 --- a/portage_with_autodep/pym/portage/tests/sets/shell/testShell.py +++ /dev/null @@ -1,28 +0,0 @@ -# testCommandOututSet.py -- Portage Unit Testing Functionality -# Copyright 2007 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.process import find_binary -from portage.tests import TestCase, test_cps -from portage._sets.shell import CommandOutputSet - -class CommandOutputSetTestCase(TestCase): - """Simple Test Case for CommandOutputSet""" - - def setUp(self): - pass - - def tearDown(self): - pass - - def testCommand(self): - - input = set(test_cps) - command = find_binary("bash") - command += " -c '" - for a in input: - command += " echo -e \"%s\" ; " % a - command += "'" - s = CommandOutputSet(command) - atoms = s.getAtoms() - self.assertEqual(atoms, input) diff --git a/portage_with_autodep/pym/portage/tests/unicode/__test__ b/portage_with_autodep/pym/portage/tests/unicode/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/unicode/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/unicode/test_string_format.py b/portage_with_autodep/pym/portage/tests/unicode/test_string_format.py deleted file mode 100644 index fb6e8e0..0000000 --- a/portage_with_autodep/pym/portage/tests/unicode/test_string_format.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -import sys - -from portage import _encodings, _unicode_decode -from portage.exception import PortageException -from portage.tests import TestCase -from _emerge.DependencyArg import DependencyArg -from _emerge.UseFlagDisplay import UseFlagDisplay - -if sys.hexversion >= 0x3000000: - basestring = str - -STR_IS_UNICODE = sys.hexversion >= 0x3000000 - -class StringFormatTestCase(TestCase): - """ - Test that string formatting works correctly in the current interpretter, - which may be either python2 or python3. - """ - - # In order to get some unicode test strings in a way that works in - # both python2 and python3, write them here as byte strings and - # decode them before use. This assumes _encodings['content'] is - # utf_8. - - unicode_strings = ( - b'\xE2\x80\x98', - b'\xE2\x80\x99', - ) - - def testDependencyArg(self): - - self.assertEqual(_encodings['content'], 'utf_8') - - for arg_bytes in self.unicode_strings: - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content']) - dependency_arg = DependencyArg(arg=arg_unicode) - - # Force unicode format string so that __unicode__() is - # called in python2. - formatted_str = _unicode_decode("%s") % (dependency_arg,) - self.assertEqual(formatted_str, arg_unicode) - - if STR_IS_UNICODE: - - # Test the __str__ method which returns unicode in python3 - formatted_str = "%s" % (dependency_arg,) - self.assertEqual(formatted_str, arg_unicode) - - else: - - # Test the __str__ method which returns encoded bytes in python2 - formatted_bytes = "%s" % (dependency_arg,) - self.assertEqual(formatted_bytes, arg_bytes) - - def testPortageException(self): - - self.assertEqual(_encodings['content'], 'utf_8') - - for arg_bytes in self.unicode_strings: - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content']) - e = PortageException(arg_unicode) - - # Force unicode format string so that __unicode__() is - # called in python2. - formatted_str = _unicode_decode("%s") % (e,) - self.assertEqual(formatted_str, arg_unicode) - - if STR_IS_UNICODE: - - # Test the __str__ method which returns unicode in python3 - formatted_str = "%s" % (e,) - self.assertEqual(formatted_str, arg_unicode) - - else: - - # Test the __str__ method which returns encoded bytes in python2 - formatted_bytes = "%s" % (e,) - self.assertEqual(formatted_bytes, arg_bytes) - - def testUseFlagDisplay(self): - - self.assertEqual(_encodings['content'], 'utf_8') - - for enabled in (True, False): - for forced in (True, False): - for arg_bytes in self.unicode_strings: - arg_unicode = _unicode_decode(arg_bytes, encoding=_encodings['content']) - e = UseFlagDisplay(arg_unicode, enabled, forced) - - # Force unicode format string so that __unicode__() is - # called in python2. - formatted_str = _unicode_decode("%s") % (e,) - self.assertEqual(isinstance(formatted_str, basestring), True) - - if STR_IS_UNICODE: - - # Test the __str__ method which returns unicode in python3 - formatted_str = "%s" % (e,) - self.assertEqual(isinstance(formatted_str, str), True) - - else: - - # Test the __str__ method which returns encoded bytes in python2 - formatted_bytes = "%s" % (e,) - self.assertEqual(isinstance(formatted_bytes, bytes), True) diff --git a/portage_with_autodep/pym/portage/tests/util/__init__.py b/portage_with_autodep/pym/portage/tests/util/__init__.py deleted file mode 100644 index 69ce189..0000000 --- a/portage_with_autodep/pym/portage/tests/util/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# tests/portage.util/__init__.py -- Portage Unit Test functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - diff --git a/portage_with_autodep/pym/portage/tests/util/__test__ b/portage_with_autodep/pym/portage/tests/util/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/util/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/util/test_digraph.py b/portage_with_autodep/pym/portage/tests/util/test_digraph.py deleted file mode 100644 index b65c0b1..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_digraph.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright 2010-2011 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.util.digraph import digraph -#~ from portage.util import noiselimit -import portage.util - -class DigraphTest(TestCase): - - def testBackwardCompatibility(self): - g = digraph() - f = g.copy() - g.addnode("A", None) - self.assertEqual("A" in g, True) - self.assertEqual(bool(g), True) - self.assertEqual(g.allnodes(), ["A"]) - self.assertEqual(g.allzeros(), ["A"]) - self.assertEqual(g.hasnode("A"), True) - - def testDigraphEmptyGraph(self): - g = digraph() - f = g.clone() - for x in g, f: - self.assertEqual(bool(x), False) - self.assertEqual(x.contains("A"), False) - self.assertEqual(x.firstzero(), None) - self.assertRaises(KeyError, x.remove, "A") - x.delnode("A") - self.assertEqual(list(x), []) - self.assertEqual(x.get("A"), None) - self.assertEqual(x.get("A", "default"), "default") - self.assertEqual(x.all_nodes(), []) - self.assertEqual(x.leaf_nodes(), []) - self.assertEqual(x.root_nodes(), []) - self.assertRaises(KeyError, x.child_nodes, "A") - self.assertRaises(KeyError, x.parent_nodes, "A") - self.assertEqual(x.hasallzeros(), True) - self.assertRaises(KeyError, list, x.bfs("A")) - self.assertRaises(KeyError, x.shortest_path, "A", "B") - self.assertRaises(KeyError, x.remove_edge, "A", "B") - self.assertEqual(x.get_cycles(), []) - x.difference_update("A") - portage.util.noiselimit = -2 - x.debug_print() - portage.util.noiselimit = 0 - - def testDigraphCircle(self): - g = digraph() - g.add("A", "B", -1) - g.add("B", "C", 0) - g.add("C", "D", 1) - g.add("D", "A", 2) - - f = g.clone() - for x in g, f: - self.assertEqual(bool(x), True) - self.assertEqual(x.contains("A"), True) - self.assertEqual(x.firstzero(), None) - self.assertRaises(KeyError, x.remove, "Z") - x.delnode("Z") - self.assertEqual(list(x), ["A", "B", "C", "D"]) - self.assertEqual(x.get("A"), "A") - self.assertEqual(x.get("A", "default"), "A") - self.assertEqual(x.all_nodes(), ["A", "B", "C", "D"]) - self.assertEqual(x.leaf_nodes(), []) - self.assertEqual(x.root_nodes(), []) - self.assertEqual(x.child_nodes("A"), ["D"]) - self.assertEqual(x.child_nodes("A", ignore_priority=2), []) - self.assertEqual(x.parent_nodes("A"), ["B"]) - self.assertEqual(x.parent_nodes("A", ignore_priority=-2), ["B"]) - self.assertEqual(x.parent_nodes("A", ignore_priority=-1), []) - self.assertEqual(x.hasallzeros(), False) - self.assertEqual(list(x.bfs("A")), [(None, "A"), ("A", "D"), ("D", "C"), ("C", "B")]) - self.assertEqual(x.shortest_path("A", "D"), ["A", "D"]) - self.assertEqual(x.shortest_path("D", "A"), ["D", "C", "B", "A"]) - self.assertEqual(x.shortest_path("A", "D", ignore_priority=2), None) - self.assertEqual(x.shortest_path("D", "A", ignore_priority=-2), ["D", "C", "B", "A"]) - cycles = set(tuple(y) for y in x.get_cycles()) - self.assertEqual(cycles, set([("D", "C", "B", "A"), ("C", "B", "A", "D"), ("B", "A", "D", "C"), \ - ("A", "D", "C", "B")])) - x.remove_edge("A", "B") - self.assertEqual(x.get_cycles(), []) - x.difference_update(["D"]) - self.assertEqual(x.all_nodes(), ["A", "B", "C"]) - portage.util.noiselimit = -2 - x.debug_print() - portage.util.noiselimit = 0 - - def testDigraphTree(self): - g = digraph() - g.add("B", "A", -1) - g.add("C", "A", 0) - g.add("D", "C", 1) - g.add("E", "C", 2) - - f = g.clone() - for x in g, f: - self.assertEqual(bool(x), True) - self.assertEqual(x.contains("A"), True) - self.assertEqual(x.firstzero(), "B") - self.assertRaises(KeyError, x.remove, "Z") - x.delnode("Z") - self.assertEqual(set(x), set(["A", "B", "C", "D", "E"])) - self.assertEqual(x.get("A"), "A") - self.assertEqual(x.get("A", "default"), "A") - self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "D", "E"])) - self.assertEqual(set(x.leaf_nodes()), set(["B", "D", "E"])) - self.assertEqual(set(x.leaf_nodes(ignore_priority=0)), set(["A", "B", "D", "E"])) - self.assertEqual(x.root_nodes(), ["A"]) - self.assertEqual(set(x.root_nodes(ignore_priority=0)), set(["A", "B", "C"])) - self.assertEqual(set(x.child_nodes("A")), set(["B", "C"])) - self.assertEqual(x.child_nodes("A", ignore_priority=2), []) - self.assertEqual(x.parent_nodes("B"), ["A"]) - self.assertEqual(x.parent_nodes("B", ignore_priority=-2), ["A"]) - self.assertEqual(x.parent_nodes("B", ignore_priority=-1), []) - self.assertEqual(x.hasallzeros(), False) - self.assertEqual(list(x.bfs("A")), [(None, "A"), ("A", "C"), ("A", "B"), ("C", "E"), ("C", "D")]) - self.assertEqual(x.shortest_path("A", "D"), ["A", "C", "D"]) - self.assertEqual(x.shortest_path("D", "A"), None) - self.assertEqual(x.shortest_path("A", "D", ignore_priority=2), None) - cycles = set(tuple(y) for y in x.get_cycles()) - self.assertEqual(cycles, set()) - x.remove("D") - self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "E"])) - x.remove("C") - self.assertEqual(set(x.all_nodes()), set(["A", "B", "E"])) - portage.util.noiselimit = -2 - x.debug_print() - portage.util.noiselimit = 0 - self.assertRaises(KeyError, x.remove_edge, "A", "E") - - def testDigraphCompleteGraph(self): - g = digraph() - g.add("A", "B", -1) - g.add("B", "A", 1) - g.add("A", "C", 1) - g.add("C", "A", -1) - g.add("C", "B", 1) - g.add("B", "C", 1) - - f = g.clone() - for x in g, f: - self.assertEqual(bool(x), True) - self.assertEqual(x.contains("A"), True) - self.assertEqual(x.firstzero(), None) - self.assertRaises(KeyError, x.remove, "Z") - x.delnode("Z") - self.assertEqual(list(x), ["A", "B", "C"]) - self.assertEqual(x.get("A"), "A") - self.assertEqual(x.get("A", "default"), "A") - self.assertEqual(x.all_nodes(), ["A", "B", "C"]) - self.assertEqual(x.leaf_nodes(), []) - self.assertEqual(x.root_nodes(), []) - self.assertEqual(set(x.child_nodes("A")), set(["B", "C"])) - self.assertEqual(x.child_nodes("A", ignore_priority=0), ["B"]) - self.assertEqual(set(x.parent_nodes("A")), set(["B", "C"])) - self.assertEqual(x.parent_nodes("A", ignore_priority=0), ["C"]) - self.assertEqual(x.parent_nodes("A", ignore_priority=1), []) - self.assertEqual(x.hasallzeros(), False) - self.assertEqual(list(x.bfs("A")), [(None, "A"), ("A", "C"), ("A", "B")]) - self.assertEqual(x.shortest_path("A", "C"), ["A", "C"]) - self.assertEqual(x.shortest_path("C", "A"), ["C", "A"]) - self.assertEqual(x.shortest_path("A", "C", ignore_priority=0), ["A", "B", "C"]) - self.assertEqual(x.shortest_path("C", "A", ignore_priority=0), ["C", "A"]) - cycles = set(tuple(y) for y in x.get_cycles()) - self.assertEqual(cycles, set([("C", "A"), ("A", "B"), ("A", "C")])) - x.remove_edge("A", "B") - self.assertEqual(x.get_cycles(), [["C", "A"], ["A", "C"], ["C", "B"]]) - x.difference_update(["C"]) - self.assertEqual(x.all_nodes(), ["A", "B"]) - portage.util.noiselimit = -2 - x.debug_print() - portage.util.noiselimit = 0 - - def testDigraphIgnorePriority(self): - - def always_true(dummy): - return True - - def always_false(dummy): - return False - - g = digraph() - g.add("A", "B") - - self.assertEqual(g.parent_nodes("A"), ["B"]) - self.assertEqual(g.parent_nodes("A", ignore_priority=always_false), ["B"]) - self.assertEqual(g.parent_nodes("A", ignore_priority=always_true), []) - - self.assertEqual(g.child_nodes("B"), ["A"]) - self.assertEqual(g.child_nodes("B", ignore_priority=always_false), ["A"]) - self.assertEqual(g.child_nodes("B", ignore_priority=always_true), []) - - self.assertEqual(g.leaf_nodes(), ["A"]) - self.assertEqual(g.leaf_nodes(ignore_priority=always_false), ["A"]) - self.assertEqual(g.leaf_nodes(ignore_priority=always_true), ["A", "B"]) - - self.assertEqual(g.root_nodes(), ["B"]) - self.assertEqual(g.root_nodes(ignore_priority=always_false), ["B"]) - self.assertEqual(g.root_nodes(ignore_priority=always_true), ["A", "B"]) diff --git a/portage_with_autodep/pym/portage/tests/util/test_getconfig.py b/portage_with_autodep/pym/portage/tests/util/test_getconfig.py deleted file mode 100644 index 22e0bfc..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_getconfig.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.const import PORTAGE_BASE_PATH -from portage.tests import TestCase -from portage.util import getconfig - -class GetConfigTestCase(TestCase): - """ - Test that getconfig() produces that same result as bash would when - sourcing the same input. - """ - - _cases = { - 'FETCHCOMMAND' : '/usr/bin/wget -t 3 -T 60 --passive-ftp -O "${DISTDIR}/${FILE}" "${URI}"', - 'FETCHCOMMAND_RSYNC' : 'rsync -avP "${URI}" "${DISTDIR}/${FILE}"', - 'FETCHCOMMAND_SFTP' : 'bash -c "x=\\${2#sftp://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; exec sftp -P \\${port} \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" sftp "${DISTDIR}/${FILE}" "${URI}"', - 'FETCHCOMMAND_SSH' : 'bash -c "x=\\${2#ssh://} ; host=\\${x%%/*} ; port=\\${host##*:} ; host=\\${host%:*} ; [[ \\${host} = \\${port} ]] && port=22 ; exec rsync --rsh=\\"ssh -p\\${port}\\" -avP \\"\\${host}:/\\${x#*/}\\" \\"\\$1\\"" rsync "${DISTDIR}/${FILE}" "${URI}"', - 'PORTAGE_ELOG_MAILSUBJECT' : '[portage] ebuild log for ${PACKAGE} on ${HOST}' - } - - def testGetConfig(self): - - make_globals_file = os.path.join(PORTAGE_BASE_PATH, - 'cnf', 'make.globals') - d = getconfig(make_globals_file) - for k, v in self._cases.items(): - self.assertEqual(d[k], v) diff --git a/portage_with_autodep/pym/portage/tests/util/test_grabdict.py b/portage_with_autodep/pym/portage/tests/util/test_grabdict.py deleted file mode 100644 index e62a75d..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_grabdict.py +++ /dev/null @@ -1,11 +0,0 @@ -# test_grabDict.py -- Portage Unit Testing Functionality -# Copyright 2006-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -#from portage.util import grabdict - -class GrabDictTestCase(TestCase): - - def testGrabDictPass(self): - pass diff --git a/portage_with_autodep/pym/portage/tests/util/test_normalizedPath.py b/portage_with_autodep/pym/portage/tests/util/test_normalizedPath.py deleted file mode 100644 index f993886..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_normalizedPath.py +++ /dev/null @@ -1,14 +0,0 @@ -# test_normalizePath.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase - -class NormalizePathTestCase(TestCase): - - def testNormalizePath(self): - - from portage.util import normalize_path - path = "///foo/bar/baz" - good = "/foo/bar/baz" - self.assertEqual(normalize_path(path), good) diff --git a/portage_with_autodep/pym/portage/tests/util/test_stackDictList.py b/portage_with_autodep/pym/portage/tests/util/test_stackDictList.py deleted file mode 100644 index 678001c..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_stackDictList.py +++ /dev/null @@ -1,17 +0,0 @@ -# test_stackDictList.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase - -class StackDictListTestCase(TestCase): - - def testStackDictList(self): - from portage.util import stack_dictlist - - tests = [ ({'a':'b'},{'x':'y'},False,{'a':['b'],'x':['y']}) ] - tests.append(( {'KEYWORDS':['alpha','x86']},{'KEYWORDS':['-*']},True,{} )) - tests.append(( {'KEYWORDS':['alpha','x86']},{'KEYWORDS':['-x86']},True,{'KEYWORDS':['alpha']} )) - for test in tests: - self.assertEqual( - stack_dictlist([test[0],test[1]],incremental=test[2]), test[3] ) diff --git a/portage_with_autodep/pym/portage/tests/util/test_stackDicts.py b/portage_with_autodep/pym/portage/tests/util/test_stackDicts.py deleted file mode 100644 index 0d2cadd..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_stackDicts.py +++ /dev/null @@ -1,36 +0,0 @@ -# test_stackDicts.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.util import stack_dicts - - -class StackDictsTestCase(TestCase): - - def testStackDictsPass(self): - - tests = [ ( [ { "a":"b" }, { "b":"c" } ], { "a":"b", "b":"c" }, - False, [], False ), - ( [ { "a":"b" }, { "a":"c" } ], { "a":"b c" }, - True, [], False ), - ( [ { "a":"b" }, { "a":"c" } ], { "a":"b c" }, - False, ["a"], False ), - ( [ { "a":"b" }, None ], { "a":"b" }, - False, [], True ), - ( [ None ], {}, False, [], False ), - ( [ None, {}], {}, False, [], True ) ] - - - for test in tests: - result = stack_dicts( test[0], test[2], test[3], test[4] ) - self.assertEqual( result, test[1] ) - - def testStackDictsFail(self): - - tests = [ ( [ None, {} ], None, False, [], True ), - ( [ { "a":"b"}, {"a":"c" } ], { "a":"b c" }, - False, [], False ) ] - for test in tests: - result = stack_dicts( test[0], test[2], test[3], test[4] ) - self.assertNotEqual( result , test[1] ) diff --git a/portage_with_autodep/pym/portage/tests/util/test_stackLists.py b/portage_with_autodep/pym/portage/tests/util/test_stackLists.py deleted file mode 100644 index 8d01ea5..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_stackLists.py +++ /dev/null @@ -1,19 +0,0 @@ -# test_stackLists.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.util import stack_lists - -class StackListsTestCase(TestCase): - - def testStackLists(self): - - tests = [ ( [ ['a','b','c'], ['d','e','f'] ], ['a','c','b','e','d','f'], False ), - ( [ ['a','x'], ['b','x'] ], ['a','x','b'], False ), - ( [ ['a','b','c'], ['-*'] ], [], True ), - ( [ ['a'], ['-a'] ], [], True ) ] - - for test in tests: - result = stack_lists( test[0], test[2] ) - self.assertEqual( result , test[1] ) diff --git a/portage_with_autodep/pym/portage/tests/util/test_uniqueArray.py b/portage_with_autodep/pym/portage/tests/util/test_uniqueArray.py deleted file mode 100644 index 2a1a209..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_uniqueArray.py +++ /dev/null @@ -1,24 +0,0 @@ -# test_uniqueArray.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage import os -from portage.tests import TestCase -from portage.util import unique_array - -class UniqueArrayTestCase(TestCase): - - def testUniqueArrayPass(self): - """ - test portage.util.uniqueArray() - """ - - tests = [ ( ["a","a","a",os,os,[],[],[]], ['a',os,[]] ), - ( [1,1,1,2,3,4,4] , [1,2,3,4]) ] - - for test in tests: - result = unique_array( test[0] ) - for item in test[1]: - number = result.count(item) - self.assertFalse( number is not 1, msg="%s contains %s of %s, \ - should be only 1" % (result, number, item) ) diff --git a/portage_with_autodep/pym/portage/tests/util/test_varExpand.py b/portage_with_autodep/pym/portage/tests/util/test_varExpand.py deleted file mode 100644 index 7b528d6..0000000 --- a/portage_with_autodep/pym/portage/tests/util/test_varExpand.py +++ /dev/null @@ -1,92 +0,0 @@ -# test_varExpand.py -- Portage Unit Testing Functionality -# Copyright 2006-2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.util import varexpand - -class VarExpandTestCase(TestCase): - - def testVarExpandPass(self): - - varDict = { "a":"5", "b":"7", "c":"-5" } - for key in varDict: - result = varexpand( "$%s" % key, varDict ) - - self.assertFalse( result != varDict[key], - msg="Got %s != %s, from varexpand( %s, %s )" % \ - ( result, varDict[key], "$%s" % key, varDict ) ) - result = varexpand( "${%s}" % key, varDict ) - self.assertFalse( result != varDict[key], - msg="Got %s != %s, from varexpand( %s, %s )" % \ - ( result, varDict[key], "${%s}" % key, varDict ) ) - - def testVarExpandBackslashes(self): - """ - We want to behave like bash does when expanding a variable - assignment in a sourced file, in which case it performs - backslash removal for \\ and \$ but nothing more. It also - removes escaped newline characters. Note that we don't - handle escaped quotes here, since getconfig() uses shlex - to handle that earlier. - """ - - varDict = {} - tests = [ - ("\\", "\\"), - ("\\\\", "\\"), - ("\\\\\\", "\\\\"), - ("\\\\\\\\", "\\\\"), - ("\\$", "$"), - ("\\\\$", "\\$"), - ("\\a", "\\a"), - ("\\b", "\\b"), - ("\\n", "\\n"), - ("\\r", "\\r"), - ("\\t", "\\t"), - ("\\\n", ""), - ("\\\"", "\\\""), - ("\\'", "\\'"), - ] - for test in tests: - result = varexpand( test[0], varDict ) - self.assertFalse( result != test[1], - msg="Got %s != %s from varexpand( %s, %s )" \ - % ( result, test[1], test[0], varDict ) ) - - def testVarExpandDoubleQuotes(self): - - varDict = { "a":"5" } - tests = [ ("\"${a}\"", "\"5\"") ] - for test in tests: - result = varexpand( test[0], varDict ) - self.assertFalse( result != test[1], - msg="Got %s != %s from varexpand( %s, %s )" \ - % ( result, test[1], test[0], varDict ) ) - - def testVarExpandSingleQuotes(self): - - varDict = { "a":"5" } - tests = [ ("\'${a}\'", "\'${a}\'") ] - for test in tests: - result = varexpand( test[0], varDict ) - self.assertFalse( result != test[1], - msg="Got %s != %s from varexpand( %s, %s )" \ - % ( result, test[1], test[0], varDict ) ) - - def testVarExpandFail(self): - - varDict = { "a":"5", "b":"7", "c":"15" } - - testVars = [ "fail" ] - - for var in testVars: - result = varexpand( "$%s" % var, varDict ) - self.assertFalse( len(result), - msg="Got %s == %s, from varexpand( %s, %s )" \ - % ( result, var, "$%s" % var, varDict ) ) - - result = varexpand( "${%s}" % var, varDict ) - self.assertFalse( len(result), - msg="Got %s == %s, from varexpand( %s, %s )" \ - % ( result, var, "${%s}" % var, varDict ) ) diff --git a/portage_with_autodep/pym/portage/tests/versions/__init__.py b/portage_with_autodep/pym/portage/tests/versions/__init__.py deleted file mode 100644 index 2b14180..0000000 --- a/portage_with_autodep/pym/portage/tests/versions/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# tests/portage.versions/__init__.py -- Portage Unit Test functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/versions/__test__ b/portage_with_autodep/pym/portage/tests/versions/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/versions/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/versions/test_cpv_sort_key.py b/portage_with_autodep/pym/portage/tests/versions/test_cpv_sort_key.py deleted file mode 100644 index a223d78..0000000 --- a/portage_with_autodep/pym/portage/tests/versions/test_cpv_sort_key.py +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2010 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.versions import cpv_sort_key - -class CpvSortKeyTestCase(TestCase): - - def testCpvSortKey(self): - - tests = [ (("a/b-2_alpha", "a", "b", "a/b-2", "a/a-1", "a/b-1"), - ( "a", "a/a-1", "a/b-1", "a/b-2_alpha", "a/b-2", "b")), - ] - - for test in tests: - self.assertEqual( tuple(sorted(test[0], key=cpv_sort_key())), test[1] ) diff --git a/portage_with_autodep/pym/portage/tests/versions/test_vercmp.py b/portage_with_autodep/pym/portage/tests/versions/test_vercmp.py deleted file mode 100644 index aa7969c..0000000 --- a/portage_with_autodep/pym/portage/tests/versions/test_vercmp.py +++ /dev/null @@ -1,80 +0,0 @@ -# test_vercmp.py -- Portage Unit Testing Functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 - -from portage.tests import TestCase -from portage.versions import vercmp - -class VerCmpTestCase(TestCase): - """ A simple testCase for portage.versions.vercmp() - """ - - def testVerCmpGreater(self): - - tests = [ ( "6.0", "5.0"), ("5.0","5"), - ("1.0-r1", "1.0-r0"), - ("1.0-r1", "1.0"), - ("cvs.9999", "9999"), - ("999999999999999999999999999999", "999999999999999999999999999998"), - ("1.0.0", "1.0"), - ("1.0.0", "1.0b"), - ("1b", "1"), - ("1b_p1", "1_p1"), - ("1.1b", "1.1"), - ("12.2.5", "12.2b"), - ] - for test in tests: - self.assertFalse( vercmp( test[0], test[1] ) <= 0, msg="%s < %s? Wrong!" % (test[0],test[1]) ) - - def testVerCmpLess(self): - """ - pre < alpha < beta < rc < p -> test each of these, they are inductive (or should be..) - """ - tests = [ ( "4.0", "5.0"), ("5", "5.0"), ("1.0_pre2","1.0_p2"), - ("1.0_alpha2", "1.0_p2"),("1.0_alpha1", "1.0_beta1"),("1.0_beta3","1.0_rc3"), - ("1.001000000000000000001", "1.001000000000000000002"), - ("1.00100000000", "1.0010000000000000001"), - ("9999", "cvs.9999"), - ("999999999999999999999999999998", "999999999999999999999999999999"), - ("1.01", "1.1"), - ("1.0-r0", "1.0-r1"), - ("1.0", "1.0-r1"), - ("1.0", "1.0.0"), - ("1.0b", "1.0.0"), - ("1_p1", "1b_p1"), - ("1", "1b"), - ("1.1", "1.1b"), - ("12.2b", "12.2.5"), - ] - for test in tests: - self.assertFalse( vercmp( test[0], test[1]) >= 0, msg="%s > %s? Wrong!" % (test[0],test[1])) - - - def testVerCmpEqual(self): - - tests = [ ("4.0", "4.0"), - ("1.0", "1.0"), - ("1.0-r0", "1.0"), - ("1.0", "1.0-r0"), - ("1.0-r0", "1.0-r0"), - ("1.0-r1", "1.0-r1")] - for test in tests: - self.assertFalse( vercmp( test[0], test[1]) != 0, msg="%s != %s? Wrong!" % (test[0],test[1])) - - def testVerNotEqual(self): - - tests = [ ("1","2"),("1.0_alpha","1.0_pre"),("1.0_beta","1.0_alpha"), - ("0", "0.0"), - ("cvs.9999", "9999"), - ("1.0-r0", "1.0-r1"), - ("1.0-r1", "1.0-r0"), - ("1.0", "1.0-r1"), - ("1.0-r1", "1.0"), - ("1.0", "1.0.0"), - ("1_p1", "1b_p1"), - ("1b", "1"), - ("1.1b", "1.1"), - ("12.2b", "12.2"), - ] - for test in tests: - self.assertFalse( vercmp( test[0], test[1]) == 0, msg="%s == %s? Wrong!" % (test[0],test[1])) diff --git a/portage_with_autodep/pym/portage/tests/xpak/__init__.py b/portage_with_autodep/pym/portage/tests/xpak/__init__.py deleted file mode 100644 index 9c3f524..0000000 --- a/portage_with_autodep/pym/portage/tests/xpak/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# tests/portage.dep/__init__.py -- Portage Unit Test functionality -# Copyright 2006 Gentoo Foundation -# Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/tests/xpak/__test__ b/portage_with_autodep/pym/portage/tests/xpak/__test__ deleted file mode 100644 index e69de29..0000000 --- a/portage_with_autodep/pym/portage/tests/xpak/__test__ +++ /dev/null diff --git a/portage_with_autodep/pym/portage/tests/xpak/test_decodeint.py b/portage_with_autodep/pym/portage/tests/xpak/test_decodeint.py deleted file mode 100644 index 2da5735..0000000 --- a/portage_with_autodep/pym/portage/tests/xpak/test_decodeint.py +++ /dev/null @@ -1,16 +0,0 @@ -# xpak/test_decodeint.py -# Copright Gentoo Foundation 2006 -# Portage Unit Testing Functionality - -from portage.tests import TestCase -from portage.xpak import decodeint, encodeint - -class testDecodeIntTestCase(TestCase): - - def testDecodeInt(self): - - for n in range(1000): - self.assertEqual(decodeint(encodeint(n)), n) - - for n in (2 ** 32 - 1,): - self.assertEqual(decodeint(encodeint(n)), n) diff --git a/portage_with_autodep/pym/portage/update.py b/portage_with_autodep/pym/portage/update.py index 52ab506..34e4663 100644 --- a/portage_with_autodep/pym/portage/update.py +++ b/portage_with_autodep/pym/portage/update.py @@ -86,10 +86,11 @@ def fixdbentries(update_iter, dbdir): mydata = {} for myfile in [f for f in os.listdir(dbdir) if f not in ignored_dbentries]: file_path = os.path.join(dbdir, myfile) - mydata[myfile] = io.open(_unicode_encode(file_path, + with io.open(_unicode_encode(file_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], - errors='replace').read() + errors='replace') as f: + mydata[myfile] = f.read() updated_items = update_dbentries(update_iter, mydata) for myfile, mycontent in updated_items.items(): file_path = os.path.join(dbdir, myfile) @@ -132,10 +133,11 @@ def grab_updates(updpath, prev_mtimes=None): if update_data or \ file_path not in prev_mtimes or \ long(prev_mtimes[file_path]) != mystat[stat.ST_MTIME]: - content = io.open(_unicode_encode(file_path, + f = io.open(_unicode_encode(file_path, encoding=_encodings['fs'], errors='strict'), - mode='r', encoding=_encodings['repo.content'], errors='replace' - ).read() + mode='r', encoding=_encodings['repo.content'], errors='replace') + content = f.read() + f.close() update_data.append((file_path, mystat, content)) return update_data @@ -155,6 +157,7 @@ def parse_updates(mycontent): if len(mysplit) != 3: errors.append(_("ERROR: Update command invalid '%s'") % myline) continue + valid = True for i in (1, 2): try: atom = Atom(mysplit[i]) @@ -168,7 +171,11 @@ def parse_updates(mycontent): else: errors.append( _("ERROR: Malformed update entry '%s'") % myline) + valid = False break + if not valid: + continue + if mysplit[0] == "slotmove": if len(mysplit)!=4: errors.append(_("ERROR: Update command invalid '%s'") % myline) @@ -252,14 +259,19 @@ def update_config_files(config_root, protect, protect_mask, update_iter, match_c recursivefiles.append(x) myxfiles = recursivefiles for x in myxfiles: + f = None try: - file_contents[x] = io.open( + f = io.open( _unicode_encode(os.path.join(abs_user_config, x), encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['content'], - errors='replace').readlines() + errors='replace') + file_contents[x] = f.readlines() except IOError: continue + finally: + if f is not None: + f.close() # update /etc/portage/packages.* ignore_line_re = re.compile(r'^#|^\s*$') diff --git a/portage_with_autodep/pym/portage/update.pyo b/portage_with_autodep/pym/portage/update.pyo Binary files differnew file mode 100644 index 0000000..9628ea9 --- /dev/null +++ b/portage_with_autodep/pym/portage/update.pyo diff --git a/portage_with_autodep/pym/portage/util/ExtractKernelVersion.py b/portage_with_autodep/pym/portage/util/ExtractKernelVersion.py index 5cb9747..69bd58a 100644 --- a/portage_with_autodep/pym/portage/util/ExtractKernelVersion.py +++ b/portage_with_autodep/pym/portage/util/ExtractKernelVersion.py @@ -14,7 +14,7 @@ def ExtractKernelVersion(base_dir): @param base_dir: Path to sources (usually /usr/src/linux) @type base_dir: string @rtype: tuple( version[string], error[string]) - @returns: + @return: 1. tuple( version[string], error[string]) Either version or error is populated (but never both) @@ -37,6 +37,8 @@ def ExtractKernelVersion(base_dir): return (None, str(details)) except IOError as details: return (None, str(details)) + finally: + f.close() lines = [l.strip() for l in lines] diff --git a/portage_with_autodep/pym/portage/util/ExtractKernelVersion.pyo b/portage_with_autodep/pym/portage/util/ExtractKernelVersion.pyo Binary files differnew file mode 100644 index 0000000..d0302fd --- /dev/null +++ b/portage_with_autodep/pym/portage/util/ExtractKernelVersion.pyo diff --git a/portage_with_autodep/pym/portage/util/SlotObject.py b/portage_with_autodep/pym/portage/util/SlotObject.py new file mode 100644 index 0000000..a59dfc1 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/SlotObject.py @@ -0,0 +1,51 @@ +# Copyright 1999-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +class SlotObject(object): + __slots__ = ("__weakref__",) + + def __init__(self, **kwargs): + classes = [self.__class__] + while classes: + c = classes.pop() + if c is SlotObject: + continue + classes.extend(c.__bases__) + slots = getattr(c, "__slots__", None) + if not slots: + continue + for myattr in slots: + myvalue = kwargs.pop(myattr, None) + if myvalue is None and getattr(self, myattr, None) is not None: + raise AssertionError( + "class '%s' duplicates '%s' value in __slots__ of base class '%s'" % + (self.__class__.__name__, myattr, c.__name__)) + setattr(self, myattr, myvalue) + + if kwargs: + raise TypeError( + "'%s' is an invalid keyword argument for this constructor" % + (next(iter(kwargs)),)) + + def copy(self): + """ + Create a new instance and copy all attributes + defined from __slots__ (including those from + inherited classes). + """ + obj = self.__class__() + + classes = [self.__class__] + while classes: + c = classes.pop() + if c is SlotObject: + continue + classes.extend(c.__bases__) + slots = getattr(c, "__slots__", None) + if not slots: + continue + for myattr in slots: + setattr(obj, myattr, getattr(self, myattr)) + + return obj + diff --git a/portage_with_autodep/pym/portage/util/SlotObject.pyo b/portage_with_autodep/pym/portage/util/SlotObject.pyo Binary files differnew file mode 100644 index 0000000..11d0ec7 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/SlotObject.pyo diff --git a/portage_with_autodep/pym/portage/util/_ShelveUnicodeWrapper.py b/portage_with_autodep/pym/portage/util/_ShelveUnicodeWrapper.py new file mode 100644 index 0000000..adbd519 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_ShelveUnicodeWrapper.py @@ -0,0 +1,45 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +class ShelveUnicodeWrapper(object): + """ + Convert unicode to str and back again, since python-2.x shelve + module doesn't support unicode. + """ + def __init__(self, shelve_instance): + self._shelve = shelve_instance + + def _encode(self, s): + if isinstance(s, unicode): + s = s.encode('utf_8') + return s + + def __len__(self): + return len(self._shelve) + + def __contains__(self, k): + return self._encode(k) in self._shelve + + def __iter__(self): + return self._shelve.__iter__() + + def items(self): + return self._shelve.iteritems() + + def __setitem__(self, k, v): + self._shelve[self._encode(k)] = self._encode(v) + + def __getitem__(self, k): + return self._shelve[self._encode(k)] + + def __delitem__(self, k): + del self._shelve[self._encode(k)] + + def get(self, k, *args): + return self._shelve.get(self._encode(k), *args) + + def close(self): + self._shelve.close() + + def clear(self): + self._shelve.clear() diff --git a/portage_with_autodep/pym/portage/util/__init__.py b/portage_with_autodep/pym/portage/util/__init__.py index 4aa63d5..2e0a32b 100644 --- a/portage_with_autodep/pym/portage/util/__init__.py +++ b/portage_with_autodep/pym/portage/util/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2004-2011 Gentoo Foundation +# Copyright 2004-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['apply_permissions', 'apply_recursive_permissions', @@ -25,6 +25,7 @@ import stat import string import sys import traceback +import glob import portage portage.proxy.lazyimport.lazyimport(globals(), @@ -40,7 +41,7 @@ from portage import _os_merge from portage import _unicode_encode from portage import _unicode_decode from portage.exception import InvalidAtom, PortageException, FileNotFound, \ - OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem + OperationNotPermitted, ParseError, PermissionDenied, ReadOnlyFileSystem from portage.localization import _ from portage.proxy.objectproxy import ObjectProxy from portage.cache.mappings import UserDict @@ -318,11 +319,11 @@ def stack_lists(lists, incremental=1, remember_source_file=False, for source_file, tokens in unmatched_removals.items(): if len(tokens) > 3: selected = [tokens.pop(), tokens.pop(), tokens.pop()] - writemsg(_("--- Unmatch removal atoms in %s: %s and %s more\n") % \ + writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \ (source_file, ", ".join(selected), len(tokens)), noiselevel=-1) else: - writemsg(_("--- Unmatch removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)), + writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)), noiselevel=-1) if remember_source_file: @@ -345,12 +346,11 @@ def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1): @param incremental: Append to the return list, don't overwrite @type incremental: Boolean (integer) @rtype: Dictionary - @returns: + @return: 1. Returns the lines in a file in a dictionary, for example: 'sys-apps/portage x86 amd64 ppc' would return { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ] - the line syntax is key : [list of values] """ newdict={} for x in grablines(myfilename, recursive): @@ -387,7 +387,9 @@ def read_corresponding_eapi_file(filename): default = "0" eapi_file = os.path.join(os.path.dirname(filename), "eapi") try: - f = open(eapi_file, "r") + f = io.open(_unicode_encode(eapi_file, + encoding=_encodings['fs'], errors='strict'), + mode='r', encoding=_encodings['repo.content'], errors='replace') lines = f.readlines() if len(lines) == 1: eapi = lines[0].rstrip("\n") @@ -503,14 +505,15 @@ def writedict(mydict,myfilename,writekey=True): def shlex_split(s): """ - This is equivalent to shlex.split but it temporarily encodes unicode - strings to bytes since shlex.split() doesn't handle unicode strings. + This is equivalent to shlex.split, but if the current interpreter is + python2, it temporarily encodes unicode strings to bytes since python2's + shlex.split() doesn't handle unicode strings. """ - is_unicode = sys.hexversion < 0x3000000 and isinstance(s, unicode) - if is_unicode: + convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes) + if convert_to_bytes: s = _unicode_encode(s) rval = shlex.split(s) - if is_unicode: + if convert_to_bytes: rval = [_unicode_decode(x) for x in rval] return rval @@ -534,16 +537,18 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True): else: expand_map = {} mykeys = {} + f = None try: # NOTE: shlex doesn't support unicode objects with Python 2 # (produces spurious \0 characters). if sys.hexversion < 0x3000000: - content = open(_unicode_encode(mycfg, - encoding=_encodings['fs'], errors='strict'), 'rb').read() + f = open(_unicode_encode(mycfg, + encoding=_encodings['fs'], errors='strict'), 'rb') else: - content = open(_unicode_encode(mycfg, + f = open(_unicode_encode(mycfg, encoding=_encodings['fs'], errors='strict'), mode='r', - encoding=_encodings['content'], errors='replace').read() + encoding=_encodings['content'], errors='replace') + content = f.read() except IOError as e: if e.errno == PermissionDenied.errno: raise PermissionDenied(mycfg) @@ -552,6 +557,9 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True): if e.errno not in (errno.EISDIR,): raise return None + finally: + if f is not None: + f.close() # Workaround for avoiding a silent error in shlex that is # triggered by a source statement at the end of the file @@ -565,6 +573,7 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True): writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \ "in config file: '%s'") + "\n") % mycfg, noiselevel=-1) + lex = None try: if tolerant: shlex_class = _tolerant_shlex @@ -588,64 +597,63 @@ def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True): break; equ=lex.get_token() if (equ==''): - #unexpected end of file - #lex.error_leader(self.filename,lex.lineno) + msg = lex.error_leader() + _("Unexpected EOF") if not tolerant: - writemsg(_("!!! Unexpected end of config file: variable %s\n") % key, - noiselevel=-1) - raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno)) + raise ParseError(msg) else: + writemsg("%s\n" % msg, noiselevel=-1) return mykeys elif (equ!='='): - #invalid token - #lex.error_leader(self.filename,lex.lineno) + msg = lex.error_leader() + \ + _("Invalid token '%s' (not '=')") % (equ,) if not tolerant: - raise Exception(_("ParseError: Invalid token " - "'%s' (not '='): %s: line %s") % \ - (equ, mycfg, lex.lineno)) + raise ParseError(msg) else: + writemsg("%s\n" % msg, noiselevel=-1) return mykeys val=lex.get_token() if val is None: - #unexpected end of file - #lex.error_leader(self.filename,lex.lineno) + msg = lex.error_leader() + \ + _("Unexpected end of config file: variable '%s'") % (key,) if not tolerant: - writemsg(_("!!! Unexpected end of config file: variable %s\n") % key, - noiselevel=-1) - raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno)) + raise ParseError(msg) else: + writemsg("%s\n" % msg, noiselevel=-1) return mykeys key = _unicode_decode(key) val = _unicode_decode(val) if _invalid_var_name_re.search(key) is not None: + msg = lex.error_leader() + \ + _("Invalid variable name '%s'") % (key,) if not tolerant: - raise Exception(_( - "ParseError: Invalid variable name '%s': line %s") % \ - (key, lex.lineno - 1)) - writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \ - % (key, lex.lineno - 1, mycfg), noiselevel=-1) + raise ParseError(msg) + writemsg("%s\n" % msg, noiselevel=-1) continue if expand: - mykeys[key] = varexpand(val, expand_map) + mykeys[key] = varexpand(val, mydict=expand_map, + error_leader=lex.error_leader) expand_map[key] = mykeys[key] else: mykeys[key] = val except SystemExit as e: raise except Exception as e: - raise portage.exception.ParseError(str(e)+" in "+mycfg) + if isinstance(e, ParseError) or lex is None: + raise + msg = _unicode_decode("%s%s") % (lex.error_leader(), e) + writemsg("%s\n" % msg, noiselevel=-1) + raise + return mykeys - -#cache expansions of constant strings -cexpand={} -def varexpand(mystring, mydict=None): + +_varexpand_word_chars = frozenset(string.ascii_letters + string.digits + "_") +_varexpand_unexpected_eof_msg = "unexpected EOF while looking for matching `}'" + +def varexpand(mystring, mydict=None, error_leader=None): if mydict is None: mydict = {} - newstring = cexpand.get(" "+mystring, None) - if newstring is not None: - return newstring """ new variable expansion code. Preserves quotes, handles \n, etc. @@ -653,36 +661,37 @@ def varexpand(mystring, mydict=None): This would be a good bunch of code to port to C. """ numvars=0 - mystring=" "+mystring #in single, double quotes insing=0 indoub=0 - pos=1 - newstring=" " - while (pos<len(mystring)): - if (mystring[pos]=="'") and (mystring[pos-1]!="\\"): + pos = 0 + length = len(mystring) + newstring = [] + while pos < length: + current = mystring[pos] + if current == "'": if (indoub): - newstring=newstring+"'" + newstring.append("'") else: - newstring += "'" # Quote removal is handled by shlex. + newstring.append("'") # Quote removal is handled by shlex. insing=not insing pos=pos+1 continue - elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"): + elif current == '"': if (insing): - newstring=newstring+'"' + newstring.append('"') else: - newstring += '"' # Quote removal is handled by shlex. + newstring.append('"') # Quote removal is handled by shlex. indoub=not indoub pos=pos+1 continue if (not insing): #expansion time - if (mystring[pos]=="\n"): + if current == "\n": #convert newlines to spaces - newstring=newstring+" " - pos=pos+1 - elif (mystring[pos]=="\\"): + newstring.append(" ") + pos += 1 + elif current == "\\": # For backslash expansion, this function used to behave like # echo -e, but that's not needed for our purposes. We want to # behave like bash does when expanding a variable assignment @@ -692,19 +701,27 @@ def varexpand(mystring, mydict=None): # escaped quotes here, since getconfig() uses shlex # to handle that earlier. if (pos+1>=len(mystring)): - newstring=newstring+mystring[pos] + newstring.append(current) break else: - a = mystring[pos + 1] - pos = pos + 2 - if a in ("\\", "$"): - newstring = newstring + a - elif a == "\n": + current = mystring[pos + 1] + pos += 2 + if current == "$": + newstring.append(current) + elif current == "\\": + newstring.append(current) + # BUG: This spot appears buggy, but it's intended to + # be bug-for-bug compatible with existing behavior. + if pos < length and \ + mystring[pos] in ("'", '"', "$"): + newstring.append(mystring[pos]) + pos += 1 + elif current == "\n": pass else: - newstring = newstring + mystring[pos-2:pos] + newstring.append(mystring[pos - 2:pos]) continue - elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"): + elif current == "$": pos=pos+1 if mystring[pos]=="{": pos=pos+1 @@ -712,11 +729,13 @@ def varexpand(mystring, mydict=None): else: braced=False myvstart=pos - validchars=string.ascii_letters+string.digits+"_" - while mystring[pos] in validchars: + while mystring[pos] in _varexpand_word_chars: if (pos+1)>=len(mystring): if braced: - cexpand[mystring]="" + msg = _varexpand_unexpected_eof_msg + if error_leader is not None: + msg = error_leader() + msg + writemsg(msg + "\n", noiselevel=-1) return "" else: pos=pos+1 @@ -725,25 +744,33 @@ def varexpand(mystring, mydict=None): myvarname=mystring[myvstart:pos] if braced: if mystring[pos]!="}": - cexpand[mystring]="" + msg = _varexpand_unexpected_eof_msg + if error_leader is not None: + msg = error_leader() + msg + writemsg(msg + "\n", noiselevel=-1) return "" else: pos=pos+1 if len(myvarname)==0: - cexpand[mystring]="" + msg = "$" + if braced: + msg += "{}" + msg += ": bad substitution" + if error_leader is not None: + msg = error_leader() + msg + writemsg(msg + "\n", noiselevel=-1) return "" numvars=numvars+1 if myvarname in mydict: - newstring=newstring+mydict[myvarname] + newstring.append(mydict[myvarname]) else: - newstring=newstring+mystring[pos] - pos=pos+1 + newstring.append(current) + pos += 1 else: - newstring=newstring+mystring[pos] - pos=pos+1 - if numvars==0: - cexpand[mystring]=newstring[1:] - return newstring[1:] + newstring.append(current) + pos += 1 + + return "".join(newstring) # broken and removed, but can still be imported pickle_write = None @@ -1589,13 +1616,27 @@ def find_updated_config_files(target_root, config_protect): else: yield (x, None) +_ld_so_include_re = re.compile(r'^include\s+(\S.*)') + def getlibpaths(root, env=None): + def read_ld_so_conf(path): + for l in grabfile(path): + include_match = _ld_so_include_re.match(l) + if include_match is not None: + subpath = os.path.join(os.path.dirname(path), + include_match.group(1)) + for p in glob.glob(subpath): + for r in read_ld_so_conf(p): + yield r + else: + yield l + """ Return a list of paths that are used for library lookups """ if env is None: env = os.environ # the following is based on the information from ld.so(8) rval = env.get("LD_LIBRARY_PATH", "").split(":") - rval.extend(grabfile(os.path.join(root, "etc", "ld.so.conf"))) + rval.extend(read_ld_so_conf(os.path.join(root, "etc", "ld.so.conf"))) rval.append("/usr/lib") rval.append("/lib") diff --git a/portage_with_autodep/pym/portage/util/__init__.pyo b/portage_with_autodep/pym/portage/util/__init__.pyo Binary files differnew file mode 100644 index 0000000..941b286 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/__init__.pyo diff --git a/portage_with_autodep/pym/portage/util/_argparse.py b/portage_with_autodep/pym/portage/util/_argparse.py new file mode 100644 index 0000000..6ca7852 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_argparse.py @@ -0,0 +1,42 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +__all__ = ['ArgumentParser'] + +try: + from argparse import ArgumentParser +except ImportError: + # Compatibility with Python 2.6 and 3.1 + from optparse import OptionGroup, OptionParser + + from portage.localization import _ + + class ArgumentParser(object): + def __init__(self, **kwargs): + add_help = kwargs.pop("add_help", None) + if add_help is not None: + kwargs["add_help_option"] = add_help + parser = OptionParser(**kwargs) + self._parser = parser + self.add_argument = parser.add_option + self.print_help = parser.print_help + self.error = parser.error + + def add_argument_group(self, title=None, **kwargs): + optiongroup = OptionGroup(self._parser, title, **kwargs) + self._parser.add_option_group(optiongroup) + return _ArgumentGroup(optiongroup) + + def parse_known_args(self, args=None, namespace=None): + return self._parser.parse_args(args, namespace) + + def parse_args(self, args=None, namespace=None): + args, argv = self.parse_known_args(args, namespace) + if argv: + msg = _('unrecognized arguments: %s') + self.error(msg % ' '.join(argv)) + return args + + class _ArgumentGroup(object): + def __init__(self, optiongroup): + self.add_argument = optiongroup.add_option diff --git a/portage_with_autodep/pym/portage/util/_async/AsyncScheduler.py b/portage_with_autodep/pym/portage/util/_async/AsyncScheduler.py new file mode 100644 index 0000000..9b96c6f --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/AsyncScheduler.py @@ -0,0 +1,102 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os +from _emerge.AsynchronousTask import AsynchronousTask +from _emerge.PollScheduler import PollScheduler + +class AsyncScheduler(AsynchronousTask, PollScheduler): + + def __init__(self, max_jobs=None, max_load=None, **kwargs): + AsynchronousTask.__init__(self) + PollScheduler.__init__(self, **kwargs) + + if max_jobs is None: + max_jobs = 1 + self._max_jobs = max_jobs + self._max_load = max_load + self._error_count = 0 + self._running_tasks = set() + self._remaining_tasks = True + self._term_check_id = None + self._loadavg_check_id = None + + def _poll(self): + if not (self._is_work_scheduled() or self._keep_scheduling()): + self.wait() + return self.returncode + + def _cancel(self): + self._terminated.set() + self._termination_check() + + def _terminate_tasks(self): + for task in list(self._running_tasks): + task.cancel() + + def _next_task(self): + raise NotImplementedError(self) + + def _keep_scheduling(self): + return self._remaining_tasks and not self._terminated.is_set() + + def _running_job_count(self): + return len(self._running_tasks) + + def _schedule_tasks(self): + while self._keep_scheduling() and self._can_add_job(): + try: + task = self._next_task() + except StopIteration: + self._remaining_tasks = False + else: + self._running_tasks.add(task) + task.scheduler = self._sched_iface + task.addExitListener(self._task_exit) + task.start() + + # Triggers cleanup and exit listeners if there's nothing left to do. + self.poll() + + def _task_exit(self, task): + self._running_tasks.discard(task) + if task.returncode != os.EX_OK: + self._error_count += 1 + self._schedule() + + def _start(self): + self._term_check_id = self._event_loop.idle_add(self._termination_check) + if self._max_load is not None and \ + self._loadavg_latency is not None and \ + (self._max_jobs is True or self._max_jobs > 1): + # We have to schedule periodically, in case the load + # average has changed since the last call. + self._loadavg_check_id = self._event_loop.timeout_add( + self._loadavg_latency, self._schedule) + self._schedule() + + def _wait(self): + # Loop while there are jobs to be scheduled. + while self._keep_scheduling(): + self._event_loop.iteration() + + # Clean shutdown of previously scheduled jobs. In the + # case of termination, this allows for basic cleanup + # such as flushing of buffered output to logs. + while self._is_work_scheduled(): + self._event_loop.iteration() + + if self._term_check_id is not None: + self._event_loop.source_remove(self._term_check_id) + self._term_check_id = None + + if self._loadavg_check_id is not None: + self._event_loop.source_remove(self._loadavg_check_id) + self._loadavg_check_id = None + + if self._error_count > 0: + self.returncode = 1 + else: + self.returncode = os.EX_OK + + return self.returncode diff --git a/portage_with_autodep/pym/portage/util/_async/FileCopier.py b/portage_with_autodep/pym/portage/util/_async/FileCopier.py new file mode 100644 index 0000000..27e5ab4 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/FileCopier.py @@ -0,0 +1,17 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os +from portage import shutil +from portage.util._async.ForkProcess import ForkProcess + +class FileCopier(ForkProcess): + """ + Asynchronously copy a file. + """ + + __slots__ = ('src_path', 'dest_path') + + def _run(self): + shutil.copy(self.src_path, self.dest_path) + return os.EX_OK diff --git a/portage_with_autodep/pym/portage/util/_async/FileDigester.py b/portage_with_autodep/pym/portage/util/_async/FileDigester.py new file mode 100644 index 0000000..881c692 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/FileDigester.py @@ -0,0 +1,73 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from portage import os +from portage.checksum import perform_multiple_checksums +from portage.util._async.ForkProcess import ForkProcess +from _emerge.PipeReader import PipeReader + +class FileDigester(ForkProcess): + """ + Asynchronously generate file digests. Pass in file_path and + hash_names, and after successful execution, the digests + attribute will be a dict containing all of the requested + digests. + """ + + __slots__ = ('file_path', 'digests', 'hash_names', + '_digest_pipe_reader', '_digest_pw') + + def _start(self): + pr, pw = os.pipe() + self.fd_pipes = {} + self.fd_pipes[pw] = pw + self._digest_pw = pw + self._digest_pipe_reader = PipeReader( + input_files={"input":pr}, + scheduler=self.scheduler) + self._digest_pipe_reader.addExitListener(self._digest_pipe_reader_exit) + self._digest_pipe_reader.start() + ForkProcess._start(self) + os.close(pw) + + def _run(self): + digests = perform_multiple_checksums(self.file_path, + hashes=self.hash_names) + + buf = "".join("%s=%s\n" % item + for item in digests.items()).encode('utf_8') + + while buf: + buf = buf[os.write(self._digest_pw, buf):] + + return os.EX_OK + + def _parse_digests(self, data): + + digests = {} + for line in data.decode('utf_8').splitlines(): + parts = line.split('=', 1) + if len(parts) == 2: + digests[parts[0]] = parts[1] + + self.digests = digests + + def _pipe_logger_exit(self, pipe_logger): + # Ignore this event, since we want to ensure that we + # exit only after _digest_pipe_reader has reached EOF. + self._pipe_logger = None + + def _digest_pipe_reader_exit(self, pipe_reader): + self._parse_digests(pipe_reader.getvalue()) + self._digest_pipe_reader = None + self._unregister() + self.wait() + + def _unregister(self): + ForkProcess._unregister(self) + + pipe_reader = self._digest_pipe_reader + if pipe_reader is not None: + self._digest_pipe_reader = None + pipe_reader.removeExitListener(self._digest_pipe_reader_exit) + pipe_reader.cancel() diff --git a/portage_with_autodep/pym/portage/util/_async/ForkProcess.py b/portage_with_autodep/pym/portage/util/_async/ForkProcess.py new file mode 100644 index 0000000..25f72d3 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/ForkProcess.py @@ -0,0 +1,65 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import signal +import sys +import traceback + +import portage +from portage import os +from _emerge.SpawnProcess import SpawnProcess + +class ForkProcess(SpawnProcess): + + __slots__ = () + + def _spawn(self, args, fd_pipes=None, **kwargs): + """ + Fork a subprocess, apply local settings, and call fetch(). + """ + + parent_pid = os.getpid() + pid = None + try: + pid = os.fork() + + if pid != 0: + if not isinstance(pid, int): + raise AssertionError( + "fork returned non-integer: %s" % (repr(pid),)) + return [pid] + + rval = 1 + try: + + # Use default signal handlers in order to avoid problems + # killing subprocesses as reported in bug #353239. + signal.signal(signal.SIGINT, signal.SIG_DFL) + signal.signal(signal.SIGTERM, signal.SIG_DFL) + + portage.locks._close_fds() + # We don't exec, so use close_fds=False + # (see _setup_pipes docstring). + portage.process._setup_pipes(fd_pipes, close_fds=False) + + rval = self._run() + except SystemExit: + raise + except: + traceback.print_exc() + # os._exit() skips stderr flush! + sys.stderr.flush() + finally: + os._exit(rval) + + finally: + if pid == 0 or (pid is None and os.getpid() != parent_pid): + # Call os._exit() from a finally block in order + # to suppress any finally blocks from earlier + # in the call stack (see bug #345289). This + # finally block has to be setup before the fork + # in order to avoid a race condition. + os._exit(1) + + def _run(self): + raise NotImplementedError(self) diff --git a/portage_with_autodep/pym/portage/util/_async/PipeLogger.py b/portage_with_autodep/pym/portage/util/_async/PipeLogger.py new file mode 100644 index 0000000..aa605d9 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/PipeLogger.py @@ -0,0 +1,163 @@ +# Copyright 2008-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import fcntl +import errno +import gzip +import sys + +import portage +from portage import os, _encodings, _unicode_encode +from _emerge.AbstractPollTask import AbstractPollTask + +class PipeLogger(AbstractPollTask): + + """ + This can be used for logging output of a child process, + optionally outputing to log_file_path and/or stdout_fd. It can + also monitor for EOF on input_fd, which may be used to detect + termination of a child process. If log_file_path ends with + '.gz' then the log file is written with compression. + """ + + __slots__ = ("input_fd", "log_file_path", "stdout_fd") + \ + ("_log_file", "_log_file_real", "_reg_id") + + def _start(self): + + log_file_path = self.log_file_path + if log_file_path is not None: + + self._log_file = open(_unicode_encode(log_file_path, + encoding=_encodings['fs'], errors='strict'), mode='ab') + if log_file_path.endswith('.gz'): + self._log_file_real = self._log_file + self._log_file = gzip.GzipFile(filename='', mode='ab', + fileobj=self._log_file) + + portage.util.apply_secpass_permissions(log_file_path, + uid=portage.portage_uid, gid=portage.portage_gid, + mode=0o660) + + if isinstance(self.input_fd, int): + fd = self.input_fd + else: + fd = self.input_fd.fileno() + + fcntl.fcntl(fd, fcntl.F_SETFL, + fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK) + + # FD_CLOEXEC is enabled by default in Python >=3.4. + if sys.hexversion < 0x3040000: + try: + fcntl.FD_CLOEXEC + except AttributeError: + pass + else: + fcntl.fcntl(fd, fcntl.F_SETFD, + fcntl.fcntl(fd, fcntl.F_GETFD) | fcntl.FD_CLOEXEC) + + self._reg_id = self.scheduler.io_add_watch(fd, + self._registered_events, self._output_handler) + self._registered = True + + def _cancel(self): + self._unregister() + if self.returncode is None: + self.returncode = self._cancelled_returncode + + def _wait(self): + if self.returncode is not None: + return self.returncode + self._wait_loop() + self.returncode = os.EX_OK + return self.returncode + + def _output_handler(self, fd, event): + + background = self.background + stdout_fd = self.stdout_fd + log_file = self._log_file + + while True: + buf = self._read_buf(fd, event) + + if buf is None: + # not a POLLIN event, EAGAIN, etc... + break + + if not buf: + # EOF + self._unregister() + self.wait() + break + + else: + if not background and stdout_fd is not None: + failures = 0 + stdout_buf = buf + while stdout_buf: + try: + stdout_buf = \ + stdout_buf[os.write(stdout_fd, stdout_buf):] + except OSError as e: + if e.errno != errno.EAGAIN: + raise + del e + failures += 1 + if failures > 50: + # Avoid a potentially infinite loop. In + # most cases, the failure count is zero + # and it's unlikely to exceed 1. + raise + + # This means that a subprocess has put an inherited + # stdio file descriptor (typically stdin) into + # O_NONBLOCK mode. This is not acceptable (see bug + # #264435), so revert it. We need to use a loop + # here since there's a race condition due to + # parallel processes being able to change the + # flags on the inherited file descriptor. + # TODO: When possible, avoid having child processes + # inherit stdio file descriptors from portage + # (maybe it can't be avoided with + # PROPERTIES=interactive). + fcntl.fcntl(stdout_fd, fcntl.F_SETFL, + fcntl.fcntl(stdout_fd, + fcntl.F_GETFL) ^ os.O_NONBLOCK) + + if log_file is not None: + log_file.write(buf) + log_file.flush() + + self._unregister_if_appropriate(event) + + return True + + def _unregister(self): + + if self._reg_id is not None: + self.scheduler.source_remove(self._reg_id) + self._reg_id = None + + if self.input_fd is not None: + if isinstance(self.input_fd, int): + os.close(self.input_fd) + else: + self.input_fd.close() + self.input_fd = None + + if self.stdout_fd is not None: + os.close(self.stdout_fd) + self.stdout_fd = None + + if self._log_file is not None: + self._log_file.close() + self._log_file = None + + if self._log_file_real is not None: + # Avoid "ResourceWarning: unclosed file" since python 3.2. + self._log_file_real.close() + self._log_file_real = None + + self._registered = False diff --git a/portage_with_autodep/pym/portage/util/_async/PipeReaderBlockingIO.py b/portage_with_autodep/pym/portage/util/_async/PipeReaderBlockingIO.py new file mode 100644 index 0000000..b06adf6 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/PipeReaderBlockingIO.py @@ -0,0 +1,91 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +try: + import threading +except ImportError: + # dummy_threading will not suffice + threading = None + +from portage import os +from _emerge.AbstractPollTask import AbstractPollTask + +class PipeReaderBlockingIO(AbstractPollTask): + """ + Reads output from one or more files and saves it in memory, for + retrieval via the getvalue() method. This is driven by a thread + for each input file, in order to support blocking IO. This may + be useful for using threads to handle blocking IO with Jython, + since Jython lacks the fcntl module which is needed for + non-blocking IO (see http://bugs.jython.org/issue1074). + """ + + __slots__ = ("input_files", "_read_data", "_terminate", + "_threads", "_thread_rlock") + + def _start(self): + self._terminate = threading.Event() + self._threads = {} + self._read_data = [] + + self._registered = True + self._thread_rlock = threading.RLock() + with self._thread_rlock: + for f in self.input_files.values(): + t = threading.Thread(target=self._reader_thread, args=(f,)) + t.daemon = True + t.start() + self._threads[f] = t + + def _reader_thread(self, f): + try: + terminated = self._terminate.is_set + except AttributeError: + # Jython 2.7.0a2 + terminated = self._terminate.isSet + bufsize = self._bufsize + while not terminated(): + buf = f.read(bufsize) + with self._thread_rlock: + if terminated(): + break + elif buf: + self._read_data.append(buf) + else: + del self._threads[f] + if not self._threads: + # Thread-safe callback to EventLoop + self.scheduler.idle_add(self._eof) + break + f.close() + + def _eof(self): + self._registered = False + if self.returncode is None: + self.returncode = os.EX_OK + self.wait() + return False + + def _cancel(self): + self._terminate.set() + self._registered = False + if self.returncode is None: + self.returncode = self._cancelled_returncode + self.wait() + + def _wait(self): + if self.returncode is not None: + return self.returncode + self._wait_loop() + self.returncode = os.EX_OK + return self.returncode + + def getvalue(self): + """Retrieve the entire contents""" + with self._thread_rlock: + return b''.join(self._read_data) + + def close(self): + """Free the memory buffer.""" + with self._thread_rlock: + self._read_data = None diff --git a/portage_with_autodep/pym/portage/util/_async/PopenProcess.py b/portage_with_autodep/pym/portage/util/_async/PopenProcess.py new file mode 100644 index 0000000..2fc56d2 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/PopenProcess.py @@ -0,0 +1,33 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from _emerge.SubProcess import SubProcess + +class PopenProcess(SubProcess): + + __slots__ = ("pipe_reader", "proc",) + + def _start(self): + + self.pid = self.proc.pid + self._registered = True + + if self.pipe_reader is None: + self._reg_id = self.scheduler.child_watch_add( + self.pid, self._child_watch_cb) + else: + try: + self.pipe_reader.scheduler = self.scheduler + except AttributeError: + pass + self.pipe_reader.addExitListener(self._pipe_reader_exit) + self.pipe_reader.start() + + def _pipe_reader_exit(self, pipe_reader): + self._reg_id = self.scheduler.child_watch_add( + self.pid, self._child_watch_cb) + + def _child_watch_cb(self, pid, condition, user_data=None): + self._reg_id = None + self._waitpid_cb(pid, condition) + self.wait() diff --git a/portage_with_autodep/pym/portage/util/_async/SchedulerInterface.py b/portage_with_autodep/pym/portage/util/_async/SchedulerInterface.py new file mode 100644 index 0000000..2ab668e --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/SchedulerInterface.py @@ -0,0 +1,79 @@ +# Copyright 2012-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import gzip +import errno + +from portage import _encodings +from portage import _unicode_encode +from portage.util import writemsg_level +from ..SlotObject import SlotObject + +class SchedulerInterface(SlotObject): + + _event_loop_attrs = ("IO_ERR", "IO_HUP", "IO_IN", + "IO_NVAL", "IO_OUT", "IO_PRI", + "child_watch_add", "idle_add", "io_add_watch", + "iteration", "source_remove", "timeout_add") + + __slots__ = _event_loop_attrs + ("_event_loop", "_is_background") + + def __init__(self, event_loop, is_background=None, **kwargs): + SlotObject.__init__(self, **kwargs) + self._event_loop = event_loop + if is_background is None: + is_background = self._return_false + self._is_background = is_background + for k in self._event_loop_attrs: + setattr(self, k, getattr(event_loop, k)) + + @staticmethod + def _return_false(): + return False + + def output(self, msg, log_path=None, background=None, + level=0, noiselevel=-1): + """ + Output msg to stdout if not self._is_background(). If log_path + is not None then append msg to the log (appends with + compression if the filename extension of log_path corresponds + to a supported compression type). + """ + + global_background = self._is_background() + if background is None or global_background: + # Use the global value if the task does not have a local + # background value. For example, parallel-fetch tasks run + # in the background while other tasks concurrently run in + # the foreground. + background = global_background + + msg_shown = False + if not background: + writemsg_level(msg, level=level, noiselevel=noiselevel) + msg_shown = True + + if log_path is not None: + try: + f = open(_unicode_encode(log_path, + encoding=_encodings['fs'], errors='strict'), + mode='ab') + f_real = f + except IOError as e: + if e.errno not in (errno.ENOENT, errno.ESTALE): + raise + if not msg_shown: + writemsg_level(msg, level=level, noiselevel=noiselevel) + else: + + if log_path.endswith('.gz'): + # NOTE: The empty filename argument prevents us from + # triggering a bug in python3 which causes GzipFile + # to raise AttributeError if fileobj.name is bytes + # instead of unicode. + f = gzip.GzipFile(filename='', mode='ab', fileobj=f) + + f.write(_unicode_encode(msg)) + f.close() + if f_real is not f: + f_real.close() diff --git a/portage_with_autodep/pym/portage/util/_async/TaskScheduler.py b/portage_with_autodep/pym/portage/util/_async/TaskScheduler.py new file mode 100644 index 0000000..35b3875 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/TaskScheduler.py @@ -0,0 +1,20 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from .AsyncScheduler import AsyncScheduler + +class TaskScheduler(AsyncScheduler): + + """ + A simple way to handle scheduling of AbstractPollTask instances. Simply + pass a task iterator into the constructor and call start(). Use the + poll, wait, or addExitListener methods to be notified when all of the + tasks have completed. + """ + + def __init__(self, task_iter, **kwargs): + AsyncScheduler.__init__(self, **kwargs) + self._task_iter = task_iter + + def _next_task(self): + return next(self._task_iter) diff --git a/portage_with_autodep/pym/portage/tests/locks/__init__.py b/portage_with_autodep/pym/portage/util/_async/__init__.py index 21a391a..418ad86 100644 --- a/portage_with_autodep/pym/portage/tests/locks/__init__.py +++ b/portage_with_autodep/pym/portage/util/_async/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2010 Gentoo Foundation +# Copyright 2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/util/_async/run_main_scheduler.py b/portage_with_autodep/pym/portage/util/_async/run_main_scheduler.py new file mode 100644 index 0000000..10fed34 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_async/run_main_scheduler.py @@ -0,0 +1,41 @@ + +import signal + +def run_main_scheduler(scheduler): + """ + Start and run an AsyncScheduler (or compatible object), and handle + SIGINT or SIGTERM by calling its terminate() method and waiting + for it to clean up after itself. If SIGINT or SIGTERM is received, + return signum, else return None. Any previous SIGINT or SIGTERM + signal handlers are automatically saved and restored before + returning. + """ + + received_signal = [] + + def sighandler(signum, frame): + signal.signal(signal.SIGINT, signal.SIG_IGN) + signal.signal(signal.SIGTERM, signal.SIG_IGN) + received_signal.append(signum) + scheduler.terminate() + + earlier_sigint_handler = signal.signal(signal.SIGINT, sighandler) + earlier_sigterm_handler = signal.signal(signal.SIGTERM, sighandler) + + try: + scheduler.start() + scheduler.wait() + finally: + # Restore previous handlers + if earlier_sigint_handler is not None: + signal.signal(signal.SIGINT, earlier_sigint_handler) + else: + signal.signal(signal.SIGINT, signal.SIG_DFL) + if earlier_sigterm_handler is not None: + signal.signal(signal.SIGTERM, earlier_sigterm_handler) + else: + signal.signal(signal.SIGTERM, signal.SIG_DFL) + + if received_signal: + return received_signal[0] + return None diff --git a/portage_with_autodep/pym/portage/util/_ctypes.py b/portage_with_autodep/pym/portage/util/_ctypes.py new file mode 100644 index 0000000..aeceebc --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_ctypes.py @@ -0,0 +1,47 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +try: + import ctypes + import ctypes.util +except ImportError: + ctypes = None +else: + try: + ctypes.cdll + except AttributeError: + ctypes = None + +_library_names = {} + +def find_library(name): + """ + Calls ctype.util.find_library() if the ctypes module is available, + and otherwise returns None. Results are cached for future invocations. + """ + filename = _library_names.get(name) + if filename is None: + if ctypes is not None: + filename = ctypes.util.find_library(name) + if filename is None: + filename = False + _library_names[name] = filename + + if filename is False: + return None + return filename + +_library_handles = {} + +def LoadLibrary(name): + """ + Calls ctypes.cdll.LoadLibrary(name) if the ctypes module is available, + and otherwise returns None. Results are cached for future invocations. + """ + handle = _library_handles.get(name) + + if handle is None and ctypes is not None: + handle = ctypes.CDLL(name, use_errno=True) + _library_handles[name] = handle + + return handle diff --git a/portage_with_autodep/pym/portage/util/_desktop_entry.py b/portage_with_autodep/pym/portage/util/_desktop_entry.py new file mode 100644 index 0000000..7901780 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_desktop_entry.py @@ -0,0 +1,75 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import io +import subprocess +import sys + +try: + from configparser import Error as ConfigParserError, RawConfigParser +except ImportError: + from ConfigParser import Error as ConfigParserError, RawConfigParser + +from portage import _encodings, _unicode_encode, _unicode_decode + +def parse_desktop_entry(path): + """ + Parse the given file with RawConfigParser and return the + result. This may raise an IOError from io.open(), or a + ParsingError from RawConfigParser. + """ + parser = RawConfigParser() + + # use read_file/readfp in order to control decoding of unicode + try: + # Python >=3.2 + read_file = parser.read_file + except AttributeError: + read_file = parser.readfp + + with io.open(_unicode_encode(path, + encoding=_encodings['fs'], errors='strict'), + mode='r', encoding=_encodings['repo.content'], + errors='replace') as f: + read_file(f) + + return parser + +_ignored_service_errors = ( + 'error: required key "Name" in group "Desktop Entry" is not present', + 'error: key "Actions" is present in group "Desktop Entry", but the type is "Service" while this key is only valid for type "Application"', + 'error: key "MimeType" is present in group "Desktop Entry", but the type is "Service" while this key is only valid for type "Application"', +) + +def validate_desktop_entry(path): + args = ["desktop-file-validate", path] + if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000: + # Python 3.1 does not support bytes in Popen args. + args = [_unicode_encode(x, errors='strict') for x in args] + proc = subprocess.Popen(args, + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output_lines = _unicode_decode(proc.communicate()[0]).splitlines() + proc.wait() + + if output_lines: + try: + desktop_entry = parse_desktop_entry(path) + except ConfigParserError: + pass + else: + if desktop_entry.has_section("Desktop Entry"): + try: + entry_type = desktop_entry.get("Desktop Entry", "Type") + except ConfigParserError: + pass + else: + if entry_type == "Service": + # Filter false errors for Type=Service (bug #414125). + filtered_output = [] + for line in output_lines: + if line[len(path)+2:] in _ignored_service_errors: + continue + filtered_output.append(line) + output_lines = filtered_output + + return output_lines diff --git a/portage_with_autodep/pym/portage/util/_desktop_entry.pyo b/portage_with_autodep/pym/portage/util/_desktop_entry.pyo Binary files differnew file mode 100644 index 0000000..7dec17a --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_desktop_entry.pyo diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.py b/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.py index 52670d9..e71ac73 100644 --- a/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.py +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.py @@ -267,6 +267,7 @@ class LinkageMapELF(object): owner = plibs.pop(fields[1], None) lines.append((owner, "scanelf", ";".join(fields))) proc.wait() + proc.stdout.close() if plibs: # Preserved libraries that did not appear in the scanelf output. @@ -286,6 +287,13 @@ class LinkageMapELF(object): l = l.rstrip("\n") if not l: continue + if '\0' in l: + # os.stat() will raise "TypeError: must be encoded string + # without NULL bytes, not str" in this case. + writemsg_level(_("\nLine contains null byte(s) " \ + "in %s: %s\n\n") % (location, l), + level=logging.ERROR, noiselevel=-1) + continue fields = l.split(";") if len(fields) < 5: writemsg_level(_("\nWrong number of fields " \ diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.pyo b/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.pyo Binary files differnew file mode 100644 index 0000000..c1e5603 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/LinkageMapELF.pyo diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py b/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py index 602cf87..4bc64db 100644 --- a/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.py @@ -1,8 +1,10 @@ -# Copyright 1998-2011 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import errno +import json import logging +import stat import sys try: @@ -27,6 +29,19 @@ if sys.hexversion >= 0x3000000: class PreservedLibsRegistry(object): """ This class handles the tracking of preserved library objects """ + + # JSON read support has been available since portage-2.2.0_alpha89. + _json_write = True + + _json_write_opts = { + "ensure_ascii": False, + "indent": "\t", + "sort_keys": True + } + if sys.hexversion < 0x30200F0: + # indent only supports int number of spaces + _json_write_opts["indent"] = 4 + def __init__(self, root, filename): """ @param root: root used to check existence of paths in pruneNonExisting @@ -55,22 +70,51 @@ class PreservedLibsRegistry(object): def load(self): """ Reload the registry data from file """ self._data = None + f = None + content = None try: - self._data = pickle.load( - open(_unicode_encode(self._filename, - encoding=_encodings['fs'], errors='strict'), 'rb')) - except (ValueError, pickle.UnpicklingError) as e: - writemsg_level(_("!!! Error loading '%s': %s\n") % \ - (self._filename, e), level=logging.ERROR, noiselevel=-1) - except (EOFError, IOError) as e: - if isinstance(e, EOFError) or e.errno == errno.ENOENT: + f = open(_unicode_encode(self._filename, + encoding=_encodings['fs'], errors='strict'), 'rb') + content = f.read() + except EnvironmentError as e: + if not hasattr(e, 'errno'): + raise + elif e.errno == errno.ENOENT: pass elif e.errno == PermissionDenied.errno: raise PermissionDenied(self._filename) else: raise + finally: + if f is not None: + f.close() + + # content is empty if it's an empty lock file + if content: + try: + self._data = json.loads(_unicode_decode(content, + encoding=_encodings['repo.content'], errors='strict')) + except SystemExit: + raise + except Exception as e: + try: + self._data = pickle.loads(content) + except SystemExit: + raise + except Exception: + writemsg_level(_("!!! Error loading '%s': %s\n") % + (self._filename, e), level=logging.ERROR, + noiselevel=-1) + if self._data is None: self._data = {} + else: + for k, v in self._data.items(): + if isinstance(v, (list, tuple)) and len(v) == 3 and \ + isinstance(v[2], set): + # convert set to list, for write with JSONEncoder + self._data[k] = (v[0], v[1], list(v[2])) + self._data_orig = self._data.copy() self.pruneNonExisting() @@ -87,7 +131,12 @@ class PreservedLibsRegistry(object): return try: f = atomic_ofstream(self._filename, 'wb') - pickle.dump(self._data, f, protocol=2) + if self._json_write: + f.write(_unicode_encode( + json.dumps(self._data, **self._json_write_opts), + encoding=_encodings['repo.content'], errors='strict')) + else: + pickle.dump(self._data, f, protocol=2) f.close() except EnvironmentError as e: if e.errno != PermissionDenied.errno: @@ -128,6 +177,9 @@ class PreservedLibsRegistry(object): self._normalize_counter(self._data[cps][1]) == counter: del self._data[cps] elif len(paths) > 0: + if isinstance(paths, set): + # convert set to list, for write with JSONEncoder + paths = list(paths) self._data[cps] = (cpv, counter, paths) def unregister(self, cpv, slot, counter): @@ -145,9 +197,38 @@ class PreservedLibsRegistry(object): os = _os_merge for cps in list(self._data): - cpv, counter, paths = self._data[cps] - paths = [f for f in paths \ - if os.path.exists(os.path.join(self._root, f.lstrip(os.sep)))] + cpv, counter, _paths = self._data[cps] + + paths = [] + hardlinks = set() + symlinks = {} + for f in _paths: + f_abs = os.path.join(self._root, f.lstrip(os.sep)) + try: + lst = os.lstat(f_abs) + except OSError: + continue + if stat.S_ISLNK(lst.st_mode): + try: + symlinks[f] = os.readlink(f_abs) + except OSError: + continue + elif stat.S_ISREG(lst.st_mode): + hardlinks.add(f) + paths.append(f) + + # Only count symlinks as preserved if they still point to a hardink + # in the same directory, in order to handle cases where a tool such + # as eselect-opengl has updated the symlink to point to a hardlink + # in a different directory (see bug #406837). The unused hardlink + # is automatically found by _find_unused_preserved_libs, since the + # soname symlink no longer points to it. After the hardlink is + # removed by _remove_preserved_libs, it calls pruneNonExisting + # which eliminates the irrelevant symlink from the registry here. + for f, target in symlinks.items(): + if os.path.join(os.path.dirname(f), target) in hardlinks: + paths.append(f) + if len(paths) > 0: self._data[cps] = (cpv, counter, paths) else: @@ -161,7 +242,7 @@ class PreservedLibsRegistry(object): def getPreservedLibs(self): """ Return a mapping of packages->preserved objects. - @returns mapping of package instances to preserved objects + @return mapping of package instances to preserved objects @rtype Dict cpv->list-of-paths """ if self._data is None: diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.pyo b/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.pyo Binary files differnew file mode 100644 index 0000000..8cdd7cb --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/PreservedLibsRegistry.pyo diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/__init__.pyo b/portage_with_autodep/pym/portage/util/_dyn_libs/__init__.pyo Binary files differnew file mode 100644 index 0000000..960b66e --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/__init__.pyo diff --git a/portage_with_autodep/pym/portage/util/_dyn_libs/display_preserved_libs.py b/portage_with_autodep/pym/portage/util/_dyn_libs/display_preserved_libs.py new file mode 100644 index 0000000..b16478d --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_dyn_libs/display_preserved_libs.py @@ -0,0 +1,98 @@ +# Copyright 2007-2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from __future__ import print_function + +import logging + +import portage +from portage.output import colorize + +def display_preserved_libs(vardb): + + MAX_DISPLAY = 3 + + plibdata = vardb._plib_registry.getPreservedLibs() + linkmap = vardb._linkmap + consumer_map = {} + owners = {} + + try: + linkmap.rebuild() + except portage.exception.CommandNotFound as e: + portage.util.writemsg_level("!!! Command Not Found: %s\n" % (e,), + level=logging.ERROR, noiselevel=-1) + else: + search_for_owners = set() + for cpv in plibdata: + internal_plib_keys = set(linkmap._obj_key(f) \ + for f in plibdata[cpv]) + for f in plibdata[cpv]: + if f in consumer_map: + continue + consumers = [] + for c in linkmap.findConsumers(f, greedy=False): + # Filter out any consumers that are also preserved libs + # belonging to the same package as the provider. + if linkmap._obj_key(c) not in internal_plib_keys: + consumers.append(c) + consumers.sort() + consumer_map[f] = consumers + search_for_owners.update(consumers[:MAX_DISPLAY+1]) + + owners = {} + for f in search_for_owners: + owner_set = set() + for owner in linkmap.getOwners(f): + owner_dblink = vardb._dblink(owner) + if owner_dblink.exists(): + owner_set.add(owner_dblink) + if owner_set: + owners[f] = owner_set + + all_preserved = set() + all_preserved.update(*plibdata.values()) + + for cpv in plibdata: + print(colorize("WARN", ">>>") + " package: %s" % cpv) + samefile_map = {} + for f in plibdata[cpv]: + obj_key = linkmap._obj_key(f) + alt_paths = samefile_map.get(obj_key) + if alt_paths is None: + alt_paths = set() + samefile_map[obj_key] = alt_paths + alt_paths.add(f) + + for alt_paths in samefile_map.values(): + alt_paths = sorted(alt_paths) + for p in alt_paths: + print(colorize("WARN", " * ") + " - %s" % (p,)) + f = alt_paths[0] + consumers = consumer_map.get(f, []) + consumers_non_preserved = [c for c in consumers + if c not in all_preserved] + if consumers_non_preserved: + # Filter the consumers that are preserved libraries, since + # they don't need to be rebuilt (see bug #461908). + consumers = consumers_non_preserved + + if len(consumers) == MAX_DISPLAY + 1: + # Display 1 extra consumer, instead of displaying + # "used by 1 other files". + max_display = MAX_DISPLAY + 1 + else: + max_display = MAX_DISPLAY + for c in consumers[:max_display]: + if c in all_preserved: + # The owner is displayed elsewhere due to having + # its libs preserved, so distinguish this special + # case (see bug #461908). + owners_desc = "preserved" + else: + owners_desc = ", ".join(x.mycpv for x in owners.get(c, [])) + print(colorize("WARN", " * ") + " used by %s (%s)" % \ + (c, owners_desc)) + if len(consumers) > max_display: + print(colorize("WARN", " * ") + " used by %d other files" % + (len(consumers) - max_display)) diff --git a/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.py b/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.py new file mode 100644 index 0000000..bbbce52 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.py @@ -0,0 +1,490 @@ +# Copyright 1999-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import fcntl +import logging +import os +import select +import signal +import time + +from portage.util import writemsg_level +from ..SlotObject import SlotObject +from .PollConstants import PollConstants +from .PollSelectAdapter import PollSelectAdapter + +class EventLoop(object): + + supports_multiprocessing = True + + # TODO: Find out why SIGCHLD signals aren't delivered during poll + # calls, forcing us to wakeup in order to receive them. + _sigchld_interval = 250 + + class _child_callback_class(SlotObject): + __slots__ = ("callback", "data", "pid", "source_id") + + class _idle_callback_class(SlotObject): + __slots__ = ("args", "callback", "calling", "source_id") + + class _io_handler_class(SlotObject): + __slots__ = ("args", "callback", "f", "source_id") + + class _timeout_handler_class(SlotObject): + __slots__ = ("args", "function", "calling", "interval", "source_id", + "timestamp") + + def __init__(self, main=True): + """ + @param main: If True then this is a singleton instance for use + in the main thread, otherwise it is a local instance which + can safely be use in a non-main thread (default is True, so + that global_event_loop does not need constructor arguments) + @type main: bool + """ + self._use_signal = main + self._poll_event_queue = [] + self._poll_event_handlers = {} + self._poll_event_handler_ids = {} + # Increment id for each new handler. + self._event_handler_id = 0 + self._idle_callbacks = {} + self._timeout_handlers = {} + self._timeout_interval = None + self._poll_obj = create_poll_instance() + + self.IO_ERR = PollConstants.POLLERR + self.IO_HUP = PollConstants.POLLHUP + self.IO_IN = PollConstants.POLLIN + self.IO_NVAL = PollConstants.POLLNVAL + self.IO_OUT = PollConstants.POLLOUT + self.IO_PRI = PollConstants.POLLPRI + + self._child_handlers = {} + self._sigchld_read = None + self._sigchld_write = None + self._sigchld_src_id = None + self._pid = os.getpid() + + def _poll(self, timeout=None): + """ + All poll() calls pass through here. The poll events + are added directly to self._poll_event_queue. + In order to avoid endless blocking, this raises + StopIteration if timeout is None and there are + no file descriptors to poll. + """ + + if timeout is None and \ + not self._poll_event_handlers: + raise StopIteration( + "timeout is None and there are no poll() event handlers") + + while True: + try: + self._poll_event_queue.extend(self._poll_obj.poll(timeout)) + break + except select.error as e: + # Silently handle EINTR, which is normal when we have + # received a signal such as SIGINT. + if not (e.args and e.args[0] == errno.EINTR): + writemsg_level("\n!!! select error: %s\n" % (e,), + level=logging.ERROR, noiselevel=-1) + del e + + # This typically means that we've received a SIGINT, so + # raise StopIteration in order to break out of our current + # iteration and respond appropriately to the signal as soon + # as possible. + raise StopIteration("interrupted") + + def iteration(self, *args): + """ + Like glib.MainContext.iteration(), runs a single iteration. + @type may_block: bool + @param may_block: if True the call may block waiting for an event + (default is True). + @rtype: bool + @return: True if events were dispatched. + """ + + may_block = True + + if args: + if len(args) > 1: + raise TypeError( + "expected at most 1 argument (%s given)" % len(args)) + may_block = args[0] + + event_queue = self._poll_event_queue + event_handlers = self._poll_event_handlers + events_handled = 0 + + if not event_handlers: + if self._run_timeouts(): + events_handled += 1 + if not event_handlers: + if not events_handled and may_block and \ + self._timeout_interval is not None: + # Block so that we don't waste cpu time by looping too + # quickly. This makes EventLoop useful for code that needs + # to wait for timeout callbacks regardless of whether or + # not any IO handlers are currently registered. + try: + self._poll(timeout=self._timeout_interval) + except StopIteration: + pass + if self._run_timeouts(): + events_handled += 1 + + # If any timeouts have executed, then return immediately, + # in order to minimize latency in termination of iteration + # loops that they may control. + if events_handled or not event_handlers: + return bool(events_handled) + + if not event_queue: + + if may_block: + if self._child_handlers: + if self._timeout_interval is None: + timeout = self._sigchld_interval + else: + timeout = min(self._sigchld_interval, + self._timeout_interval) + else: + timeout = self._timeout_interval + else: + timeout = 0 + + try: + self._poll(timeout=timeout) + except StopIteration: + # This can be triggered by EINTR which is caused by signals. + pass + + # NOTE: IO event handlers may be re-entrant, in case something + # like AbstractPollTask._wait_loop() needs to be called inside + # a handler for some reason. + while event_queue: + events_handled += 1 + f, event = event_queue.pop() + x = event_handlers[f] + if not x.callback(f, event, *x.args): + self.source_remove(x.source_id) + + # Run timeouts last, in order to minimize latency in + # termination of iteration loops that they may control. + if self._run_timeouts(): + events_handled += 1 + + return bool(events_handled) + + def child_watch_add(self, pid, callback, data=None): + """ + Like glib.child_watch_add(), sets callback to be called with the + user data specified by data when the child indicated by pid exits. + The signature for the callback is: + + def callback(pid, condition, user_data) + + where pid is is the child process id, condition is the status + information about the child process and user_data is data. + + @type int + @param pid: process id of a child process to watch + @type callback: callable + @param callback: a function to call + @type data: object + @param data: the optional data to pass to function + @rtype: int + @return: an integer ID + """ + self._event_handler_id += 1 + source_id = self._event_handler_id + self._child_handlers[source_id] = self._child_callback_class( + callback=callback, data=data, pid=pid, source_id=source_id) + + if self._use_signal: + if self._sigchld_read is None: + self._sigchld_read, self._sigchld_write = os.pipe() + fcntl.fcntl(self._sigchld_read, fcntl.F_SETFL, + fcntl.fcntl(self._sigchld_read, + fcntl.F_GETFL) | os.O_NONBLOCK) + + # The IO watch is dynamically registered and unregistered as + # needed, since we don't want to consider it as a valid source + # of events when there are no child listeners. It's important + # to distinguish when there are no valid sources of IO events, + # in order to avoid an endless poll call if there's no timeout. + if self._sigchld_src_id is None: + self._sigchld_src_id = self.io_add_watch( + self._sigchld_read, self.IO_IN, self._sigchld_io_cb) + signal.signal(signal.SIGCHLD, self._sigchld_sig_cb) + + # poll now, in case the SIGCHLD has already arrived + self._poll_child_processes() + return source_id + + def _sigchld_sig_cb(self, signum, frame): + # If this signal handler was not installed by the + # current process then the signal doesn't belong to + # this EventLoop instance. + if os.getpid() == self._pid: + os.write(self._sigchld_write, b'\0') + + def _sigchld_io_cb(self, fd, events): + try: + while True: + os.read(self._sigchld_read, 4096) + except OSError: + # read until EAGAIN + pass + self._poll_child_processes() + return True + + def _poll_child_processes(self): + if not self._child_handlers: + return False + + calls = 0 + + for x in list(self._child_handlers.values()): + if x.source_id not in self._child_handlers: + # it's already been called via re-entrance + continue + try: + wait_retval = os.waitpid(x.pid, os.WNOHANG) + except OSError as e: + if e.errno != errno.ECHILD: + raise + del e + self.source_remove(x.source_id) + else: + # With waitpid and WNOHANG, only check the + # first element of the tuple since the second + # element may vary (bug #337465). + if wait_retval[0] != 0: + calls += 1 + self.source_remove(x.source_id) + x.callback(x.pid, wait_retval[1], x.data) + + return bool(calls) + + def idle_add(self, callback, *args): + """ + Like glib.idle_add(), if callback returns False it is + automatically removed from the list of event sources and will + not be called again. + + @type callback: callable + @param callback: a function to call + @rtype: int + @return: an integer ID + """ + self._event_handler_id += 1 + source_id = self._event_handler_id + self._idle_callbacks[source_id] = self._idle_callback_class( + args=args, callback=callback, source_id=source_id) + return source_id + + def _run_idle_callbacks(self): + if not self._idle_callbacks: + return + # Iterate of our local list, since self._idle_callbacks can be + # modified during the exection of these callbacks. + for x in list(self._idle_callbacks.values()): + if x.source_id not in self._idle_callbacks: + # it got cancelled while executing another callback + continue + if x.calling: + # don't call it recursively + continue + x.calling = True + try: + if not x.callback(*x.args): + self.source_remove(x.source_id) + finally: + x.calling = False + + def timeout_add(self, interval, function, *args): + """ + Like glib.timeout_add(), interval argument is the number of + milliseconds between calls to your function, and your function + should return False to stop being called, or True to continue + being called. Any additional positional arguments given here + are passed to your function when it's called. + """ + self._event_handler_id += 1 + source_id = self._event_handler_id + self._timeout_handlers[source_id] = \ + self._timeout_handler_class( + interval=interval, function=function, args=args, + source_id=source_id, timestamp=time.time()) + if self._timeout_interval is None or self._timeout_interval > interval: + self._timeout_interval = interval + return source_id + + def _run_timeouts(self): + + calls = 0 + if not self._use_signal: + if self._poll_child_processes(): + calls += 1 + + self._run_idle_callbacks() + + if not self._timeout_handlers: + return bool(calls) + + ready_timeouts = [] + current_time = time.time() + for x in self._timeout_handlers.values(): + elapsed_seconds = current_time - x.timestamp + # elapsed_seconds < 0 means the system clock has been adjusted + if elapsed_seconds < 0 or \ + (x.interval - 1000 * elapsed_seconds) <= 0: + ready_timeouts.append(x) + + # Iterate of our local list, since self._timeout_handlers can be + # modified during the exection of these callbacks. + for x in ready_timeouts: + if x.source_id not in self._timeout_handlers: + # it got cancelled while executing another timeout + continue + if x.calling: + # don't call it recursively + continue + calls += 1 + x.calling = True + try: + x.timestamp = time.time() + if not x.function(*x.args): + self.source_remove(x.source_id) + finally: + x.calling = False + + return bool(calls) + + def io_add_watch(self, f, condition, callback, *args): + """ + Like glib.io_add_watch(), your function should return False to + stop being called, or True to continue being called. Any + additional positional arguments given here are passed to your + function when it's called. + + @type f: int or object with fileno() method + @param f: a file descriptor to monitor + @type condition: int + @param condition: a condition mask + @type callback: callable + @param callback: a function to call + @rtype: int + @return: an integer ID of the event source + """ + if f in self._poll_event_handlers: + raise AssertionError("fd %d is already registered" % f) + self._event_handler_id += 1 + source_id = self._event_handler_id + self._poll_event_handler_ids[source_id] = f + self._poll_event_handlers[f] = self._io_handler_class( + args=args, callback=callback, f=f, source_id=source_id) + self._poll_obj.register(f, condition) + return source_id + + def source_remove(self, reg_id): + """ + Like glib.source_remove(), this returns True if the given reg_id + is found and removed, and False if the reg_id is invalid or has + already been removed. + """ + x = self._child_handlers.pop(reg_id, None) + if x is not None: + if not self._child_handlers and self._use_signal: + signal.signal(signal.SIGCHLD, signal.SIG_DFL) + self.source_remove(self._sigchld_src_id) + self._sigchld_src_id = None + return True + idle_callback = self._idle_callbacks.pop(reg_id, None) + if idle_callback is not None: + return True + timeout_handler = self._timeout_handlers.pop(reg_id, None) + if timeout_handler is not None: + if timeout_handler.interval == self._timeout_interval: + if self._timeout_handlers: + self._timeout_interval = \ + min(x.interval for x in self._timeout_handlers.values()) + else: + self._timeout_interval = None + return True + f = self._poll_event_handler_ids.pop(reg_id, None) + if f is None: + return False + self._poll_obj.unregister(f) + if self._poll_event_queue: + # Discard any unhandled events that belong to this file, + # in order to prevent these events from being erroneously + # delivered to a future handler that is using a reallocated + # file descriptor of the same numeric value (causing + # extremely confusing bugs). + remaining_events = [] + discarded_events = False + for event in self._poll_event_queue: + if event[0] == f: + discarded_events = True + else: + remaining_events.append(event) + + if discarded_events: + self._poll_event_queue[:] = remaining_events + + del self._poll_event_handlers[f] + return True + +_can_poll_device = None + +def can_poll_device(): + """ + Test if it's possible to use poll() on a device such as a pty. This + is known to fail on Darwin. + @rtype: bool + @return: True if poll() on a device succeeds, False otherwise. + """ + + global _can_poll_device + if _can_poll_device is not None: + return _can_poll_device + + if not hasattr(select, "poll"): + _can_poll_device = False + return _can_poll_device + + try: + dev_null = open('/dev/null', 'rb') + except IOError: + _can_poll_device = False + return _can_poll_device + + p = select.poll() + p.register(dev_null.fileno(), PollConstants.POLLIN) + + invalid_request = False + for f, event in p.poll(): + if event & PollConstants.POLLNVAL: + invalid_request = True + break + dev_null.close() + + _can_poll_device = not invalid_request + return _can_poll_device + +def create_poll_instance(): + """ + Create an instance of select.poll, or an instance of + PollSelectAdapter there is no poll() implementation or + it is broken somehow. + """ + if can_poll_device(): + return select.poll() + return PollSelectAdapter() diff --git a/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.pyo b/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.pyo Binary files differnew file mode 100644 index 0000000..6ce2883 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/EventLoop.pyo diff --git a/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.py b/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.py new file mode 100644 index 0000000..f2f5c5e --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.py @@ -0,0 +1,23 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +class GlibEventLoop(object): + + # TODO: Support multiprocessing by using a separate glib.MainContext + # instance for each process. + supports_multiprocessing = False + + def __init__(self): + import gi.repository.GLib as glib + self.IO_ERR = glib.IO_ERR + self.IO_HUP = glib.IO_HUP + self.IO_IN = glib.IO_IN + self.IO_NVAL = glib.IO_NVAL + self.IO_OUT = glib.IO_OUT + self.IO_PRI = glib.IO_PRI + self.iteration = glib.main_context_default().iteration + self.child_watch_add = glib.child_watch_add + self.idle_add = glib.idle_add + self.io_add_watch = glib.io_add_watch + self.timeout_add = glib.timeout_add + self.source_remove = glib.source_remove diff --git a/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.pyo b/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.pyo Binary files differnew file mode 100644 index 0000000..d3453a4 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/GlibEventLoop.pyo diff --git a/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.py b/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.py new file mode 100644 index 0000000..d0270a9 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.py @@ -0,0 +1,18 @@ +# Copyright 1999-2009 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import select +class PollConstants(object): + + """ + Provides POLL* constants that are equivalent to those from the + select module, for use by PollSelectAdapter. + """ + + names = ("POLLIN", "POLLPRI", "POLLOUT", "POLLERR", "POLLHUP", "POLLNVAL") + v = 1 + for k in names: + locals()[k] = getattr(select, k, v) + v *= 2 + del k, v + diff --git a/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.pyo b/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.pyo Binary files differnew file mode 100644 index 0000000..6c7c953 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/PollConstants.pyo diff --git a/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.py b/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.py new file mode 100644 index 0000000..17e63d9 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.py @@ -0,0 +1,74 @@ +# Copyright 1999-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +from .PollConstants import PollConstants +import select + +class PollSelectAdapter(object): + + """ + Use select to emulate a poll object, for + systems that don't support poll(). + """ + + def __init__(self): + self._registered = {} + self._select_args = [[], [], []] + + def register(self, fd, *args): + """ + Only POLLIN is currently supported! + """ + if len(args) > 1: + raise TypeError( + "register expected at most 2 arguments, got " + \ + repr(1 + len(args))) + + eventmask = PollConstants.POLLIN | \ + PollConstants.POLLPRI | PollConstants.POLLOUT + if args: + eventmask = args[0] + + self._registered[fd] = eventmask + self._select_args = None + + def unregister(self, fd): + self._select_args = None + del self._registered[fd] + + def poll(self, *args): + if len(args) > 1: + raise TypeError( + "poll expected at most 2 arguments, got " + \ + repr(1 + len(args))) + + timeout = None + if args: + timeout = args[0] + + select_args = self._select_args + if select_args is None: + select_args = [list(self._registered), [], []] + + if timeout is not None: + select_args = select_args[:] + # Translate poll() timeout args to select() timeout args: + # + # | units | value(s) for indefinite block + # ---------|--------------|------------------------------ + # poll | milliseconds | omitted, negative, or None + # ---------|--------------|------------------------------ + # select | seconds | omitted + # ---------|--------------|------------------------------ + + if timeout is not None and timeout < 0: + timeout = None + if timeout is not None: + select_args.append(timeout / 1000) + + select_events = select.select(*select_args) + poll_events = [] + for fd in select_events[0]: + poll_events.append((fd, PollConstants.POLLIN)) + return poll_events + diff --git a/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.pyo b/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.pyo Binary files differnew file mode 100644 index 0000000..e9ecc51 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/PollSelectAdapter.pyo diff --git a/portage_with_autodep/pym/portage/tests/unicode/__init__.py b/portage_with_autodep/pym/portage/util/_eventloop/__init__.py index 21a391a..418ad86 100644 --- a/portage_with_autodep/pym/portage/tests/unicode/__init__.py +++ b/portage_with_autodep/pym/portage/util/_eventloop/__init__.py @@ -1,2 +1,2 @@ -# Copyright 2010 Gentoo Foundation +# Copyright 2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 diff --git a/portage_with_autodep/pym/portage/util/_eventloop/__init__.pyo b/portage_with_autodep/pym/portage/util/_eventloop/__init__.pyo Binary files differnew file mode 100644 index 0000000..69864a6 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/__init__.pyo diff --git a/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.py b/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.py new file mode 100644 index 0000000..502dab8 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.py @@ -0,0 +1,35 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import os + +from .EventLoop import EventLoop + +_default_constructor = EventLoop +#from .GlibEventLoop import GlibEventLoop as _default_constructor + +# If _default_constructor doesn't support multiprocessing, +# then _multiprocessing_constructor is used in subprocesses. +_multiprocessing_constructor = EventLoop + +_MAIN_PID = os.getpid() +_instances = {} + +def global_event_loop(): + """ + Get a global EventLoop (or compatible object) instance which + belongs exclusively to the current process. + """ + + pid = os.getpid() + instance = _instances.get(pid) + if instance is not None: + return instance + + constructor = _default_constructor + if not constructor.supports_multiprocessing and pid != _MAIN_PID: + constructor = _multiprocessing_constructor + + instance = constructor() + _instances[pid] = instance + return instance diff --git a/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.pyo b/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.pyo Binary files differnew file mode 100644 index 0000000..3d57192 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_eventloop/global_event_loop.pyo diff --git a/portage_with_autodep/pym/portage/util/_get_vm_info.py b/portage_with_autodep/pym/portage/util/_get_vm_info.py new file mode 100644 index 0000000..e8ad938 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_get_vm_info.py @@ -0,0 +1,80 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import os +import platform +import subprocess + +from portage import _unicode_decode + +def get_vm_info(): + + vm_info = {} + + if platform.system() == 'Linux': + try: + proc = subprocess.Popen(["free"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + except OSError: + pass + else: + output = _unicode_decode(proc.communicate()[0]) + if proc.wait() == os.EX_OK: + for line in output.splitlines(): + line = line.split() + if len(line) < 2: + continue + if line[0] == "Mem:": + try: + vm_info["ram.total"] = int(line[1]) * 1024 + except ValueError: + pass + if len(line) > 3: + try: + vm_info["ram.free"] = int(line[3]) * 1024 + except ValueError: + pass + elif line[0] == "Swap:": + try: + vm_info["swap.total"] = int(line[1]) * 1024 + except ValueError: + pass + if len(line) > 3: + try: + vm_info["swap.free"] = int(line[3]) * 1024 + except ValueError: + pass + + else: + + try: + proc = subprocess.Popen(["sysctl", "-a"], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + except OSError: + pass + else: + output = _unicode_decode(proc.communicate()[0]) + if proc.wait() == os.EX_OK: + for line in output.splitlines(): + line = line.split(":", 1) + if len(line) != 2: + continue + line[1] = line[1].strip() + if line[0] == "hw.physmem": + try: + vm_info["ram.total"] = int(line[1]) + except ValueError: + pass + elif line[0] == "vm.swap_total": + try: + vm_info["swap.total"] = int(line[1]) + except ValueError: + pass + elif line[0] == "Free Memory Pages": + if line[1][-1] == "K": + try: + vm_info["ram.free"] = int(line[1][:-1]) * 1024 + except ValueError: + pass + + return vm_info diff --git a/portage_with_autodep/pym/portage/util/_info_files.py b/portage_with_autodep/pym/portage/util/_info_files.py new file mode 100644 index 0000000..fabf74b --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_info_files.py @@ -0,0 +1,138 @@ +# Copyright 1999-2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import errno +import logging +import re +import stat +import subprocess + +import portage +from portage import os + +def chk_updated_info_files(root, infodirs, prev_mtimes): + + if os.path.exists("/usr/bin/install-info"): + out = portage.output.EOutput() + regen_infodirs = [] + for z in infodirs: + if z == '': + continue + inforoot = portage.util.normalize_path(root + z) + if os.path.isdir(inforoot) and \ + not [x for x in os.listdir(inforoot) \ + if x.startswith('.keepinfodir')]: + infomtime = os.stat(inforoot)[stat.ST_MTIME] + if inforoot not in prev_mtimes or \ + prev_mtimes[inforoot] != infomtime: + regen_infodirs.append(inforoot) + + if not regen_infodirs: + portage.util.writemsg_stdout("\n") + if portage.util.noiselimit >= 0: + out.einfo("GNU info directory index is up-to-date.") + else: + portage.util.writemsg_stdout("\n") + if portage.util.noiselimit >= 0: + out.einfo("Regenerating GNU info directory index...") + + dir_extensions = ("", ".gz", ".bz2") + icount = 0 + badcount = 0 + errmsg = "" + for inforoot in regen_infodirs: + if inforoot == '': + continue + + if not os.path.isdir(inforoot) or \ + not os.access(inforoot, os.W_OK): + continue + + file_list = os.listdir(inforoot) + file_list.sort() + dir_file = os.path.join(inforoot, "dir") + moved_old_dir = False + processed_count = 0 + for x in file_list: + if x.startswith(".") or \ + os.path.isdir(os.path.join(inforoot, x)): + continue + if x.startswith("dir"): + skip = False + for ext in dir_extensions: + if x == "dir" + ext or \ + x == "dir" + ext + ".old": + skip = True + break + if skip: + continue + if processed_count == 0: + for ext in dir_extensions: + try: + os.rename(dir_file + ext, dir_file + ext + ".old") + moved_old_dir = True + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + del e + processed_count += 1 + try: + proc = subprocess.Popen( + ['/usr/bin/install-info', + '--dir-file=%s' % os.path.join(inforoot, "dir"), + os.path.join(inforoot, x)], + env=dict(os.environ, LANG="C", LANGUAGE="C"), + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + except OSError: + myso = None + else: + myso = portage._unicode_decode( + proc.communicate()[0]).rstrip("\n") + proc.wait() + existsstr = "already exists, for file `" + if myso: + if re.search(existsstr, myso): + # Already exists... Don't increment the count for this. + pass + elif myso[:44] == "install-info: warning: no info dir entry in ": + # This info file doesn't contain a DIR-header: install-info produces this + # (harmless) warning (the --quiet switch doesn't seem to work). + # Don't increment the count for this. + pass + else: + badcount += 1 + errmsg += myso + "\n" + icount += 1 + + if moved_old_dir and not os.path.exists(dir_file): + # We didn't generate a new dir file, so put the old file + # back where it was originally found. + for ext in dir_extensions: + try: + os.rename(dir_file + ext + ".old", dir_file + ext) + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + del e + + # Clean dir.old cruft so that they don't prevent + # unmerge of otherwise empty directories. + for ext in dir_extensions: + try: + os.unlink(dir_file + ext + ".old") + except EnvironmentError as e: + if e.errno != errno.ENOENT: + raise + del e + + #update mtime so we can potentially avoid regenerating. + prev_mtimes[inforoot] = os.stat(inforoot)[stat.ST_MTIME] + + if badcount: + out.eerror("Processed %d info files; %d errors." % \ + (icount, badcount)) + portage.util.writemsg_level(errmsg, + level=logging.ERROR, noiselevel=-1) + else: + if icount > 0 and portage.util.noiselimit >= 0: + out.einfo("Processed %d info files." % (icount,)) diff --git a/portage_with_autodep/pym/portage/util/_path.py b/portage_with_autodep/pym/portage/util/_path.py new file mode 100644 index 0000000..6fbcb43 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_path.py @@ -0,0 +1,27 @@ +# Copyright 2013 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import stat + +from portage import os +from portage.exception import PermissionDenied + +def exists_raise_eaccess(path): + try: + os.stat(path) + except OSError as e: + if e.errno == PermissionDenied.errno: + raise PermissionDenied("stat('%s')" % path) + return False + else: + return True + +def isdir_raise_eaccess(path): + try: + st = os.stat(path) + except OSError as e: + if e.errno == PermissionDenied.errno: + raise PermissionDenied("stat('%s')" % path) + return False + else: + return stat.S_ISDIR(st.st_mode) diff --git a/portage_with_autodep/pym/portage/util/_pty.py b/portage_with_autodep/pym/portage/util/_pty.py index f45ff0a..11c8b92 100644 --- a/portage_with_autodep/pym/portage/util/_pty.py +++ b/portage_with_autodep/pym/portage/util/_pty.py @@ -1,144 +1,20 @@ # Copyright 2010-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 -import array -import fcntl import platform import pty -import select -import sys import termios -from portage import os, _unicode_decode, _unicode_encode +from portage import os from portage.output import get_term_size, set_term_size -from portage.process import spawn_bash from portage.util import writemsg -def _can_test_pty_eof(): - """ - The _test_pty_eof() function seems to hang on most - kernels other than Linux. - This was reported for the following kernels which used to work fine - without this EOF test: Darwin, AIX, FreeBSD. They seem to hang on - the slave_file.close() call. Note that Python's implementation of - openpty on Solaris already caused random hangs without this EOF test - and hence is globally disabled. - @rtype: bool - @returns: True if _test_pty_eof() won't hang, False otherwise. - """ - return platform.system() in ("Linux",) - -def _test_pty_eof(fdopen_buffered=False): - """ - Returns True if this issues is fixed for the currently - running version of python: http://bugs.python.org/issue5380 - Raises an EnvironmentError from openpty() if it fails. - - NOTE: This issue is only problematic when array.fromfile() - is used, rather than os.read(). However, array.fromfile() - is preferred since it is approximately 10% faster. - - New development: It appears that array.fromfile() is usable - with python3 as long as fdopen is called with a bufsize - argument of 0. - """ - - use_fork = False - - test_string = 2 * "blah blah blah\n" - test_string = _unicode_decode(test_string, - encoding='utf_8', errors='strict') - - # may raise EnvironmentError - master_fd, slave_fd = pty.openpty() - - # Non-blocking mode is required for Darwin kernel. - fcntl.fcntl(master_fd, fcntl.F_SETFL, - fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK) - - # Disable post-processing of output since otherwise weird - # things like \n -> \r\n transformations may occur. - mode = termios.tcgetattr(slave_fd) - mode[1] &= ~termios.OPOST - termios.tcsetattr(slave_fd, termios.TCSANOW, mode) - - # Simulate a subprocess writing some data to the - # slave end of the pipe, and then exiting. - pid = None - if use_fork: - pids = spawn_bash(_unicode_encode("echo -n '%s'" % test_string, - encoding='utf_8', errors='strict'), env=os.environ, - fd_pipes={0:sys.stdin.fileno(), 1:slave_fd, 2:slave_fd}, - returnpid=True) - if isinstance(pids, int): - os.close(master_fd) - os.close(slave_fd) - raise EnvironmentError('spawn failed') - pid = pids[0] - else: - os.write(slave_fd, _unicode_encode(test_string, - encoding='utf_8', errors='strict')) - os.close(slave_fd) - - # If using a fork, we must wait for the child here, - # in order to avoid a race condition that would - # lead to inconsistent results. - if pid is not None: - os.waitpid(pid, 0) - - if fdopen_buffered: - master_file = os.fdopen(master_fd, 'rb') - else: - master_file = os.fdopen(master_fd, 'rb', 0) - eof = False - data = [] - iwtd = [master_file] - owtd = [] - ewtd = [] - - while not eof: - - events = select.select(iwtd, owtd, ewtd) - if not events[0]: - eof = True - break - - buf = array.array('B') - try: - buf.fromfile(master_file, 1024) - except (EOFError, IOError): - eof = True - - if not buf: - eof = True - else: - data.append(_unicode_decode(buf.tostring(), - encoding='utf_8', errors='strict')) - - master_file.close() - - return test_string == ''.join(data) - -# If _test_pty_eof() can't be used for runtime detection of -# http://bugs.python.org/issue5380, openpty can't safely be used -# unless we can guarantee that the current version of python has -# been fixed (affects all current versions of python3). When -# this issue is fixed in python3, we can add another sys.hexversion -# conditional to enable openpty support in the fixed versions. -if sys.hexversion >= 0x3000000 and not _can_test_pty_eof(): - _disable_openpty = True -else: - # Disable the use of openpty on Solaris as it seems Python's openpty - # implementation doesn't play nice on Solaris with Portage's - # behaviour causing hangs/deadlocks. - # Additional note for the future: on Interix, pipes do NOT work, so - # _disable_openpty on Interix must *never* be True - _disable_openpty = platform.system() in ("SunOS",) -_tested_pty = False - -if not _can_test_pty_eof(): - # Skip _test_pty_eof() on systems where it hangs. - _tested_pty = True +# Disable the use of openpty on Solaris as it seems Python's openpty +# implementation doesn't play nice on Solaris with Portage's +# behaviour causing hangs/deadlocks. +# Additional note for the future: on Interix, pipes do NOT work, so +# _disable_openpty on Interix must *never* be True +_disable_openpty = platform.system() in ("SunOS",) _fbsd_test_pty = platform.system() == 'FreeBSD' @@ -151,24 +27,14 @@ def _create_pty_or_pipe(copy_term_size=None): then the term size will be copied to the pty. @type copy_term_size: int @rtype: tuple - @returns: A tuple of (is_pty, master_fd, slave_fd) where + @return: A tuple of (is_pty, master_fd, slave_fd) where is_pty is True if a pty was successfully allocated, and False if a normal pipe was allocated. """ got_pty = False - global _disable_openpty, _fbsd_test_pty, _tested_pty - if not (_tested_pty or _disable_openpty): - try: - if not _test_pty_eof(): - _disable_openpty = True - except EnvironmentError as e: - _disable_openpty = True - writemsg("openpty failed: '%s'\n" % str(e), - noiselevel=-1) - del e - _tested_pty = True + global _disable_openpty, _fbsd_test_pty if _fbsd_test_pty and not _disable_openpty: # Test for python openpty breakage after freebsd7 to freebsd8 diff --git a/portage_with_autodep/pym/portage/util/_pty.pyo b/portage_with_autodep/pym/portage/util/_pty.pyo Binary files differnew file mode 100644 index 0000000..70b5eb0 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_pty.pyo diff --git a/portage_with_autodep/pym/portage/util/_urlopen.py b/portage_with_autodep/pym/portage/util/_urlopen.py new file mode 100644 index 0000000..307624b --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_urlopen.py @@ -0,0 +1,42 @@ +# Copyright 2012 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 + +import sys + +try: + from urllib.request import urlopen as _urlopen + import urllib.parse as urllib_parse + import urllib.request as urllib_request + from urllib.parse import splituser as urllib_parse_splituser +except ImportError: + from urllib import urlopen as _urlopen + import urlparse as urllib_parse + import urllib2 as urllib_request + from urllib import splituser as urllib_parse_splituser + +def urlopen(url): + try: + return _urlopen(url) + except SystemExit: + raise + except Exception: + if sys.hexversion < 0x3000000: + raise + parse_result = urllib_parse.urlparse(url) + if parse_result.scheme not in ("http", "https") or \ + not parse_result.username: + raise + + return _new_urlopen(url) + +def _new_urlopen(url): + # This is experimental code for bug #413983. + parse_result = urllib_parse.urlparse(url) + netloc = urllib_parse_splituser(parse_result.netloc)[1] + url = urllib_parse.urlunparse((parse_result.scheme, netloc, parse_result.path, parse_result.params, parse_result.query, parse_result.fragment)) + password_manager = urllib_request.HTTPPasswordMgrWithDefaultRealm() + if parse_result.username is not None: + password_manager.add_password(None, url, parse_result.username, parse_result.password) + auth_handler = urllib_request.HTTPBasicAuthHandler(password_manager) + opener = urllib_request.build_opener(auth_handler) + return opener.open(url) diff --git a/portage_with_autodep/pym/portage/util/_urlopen.pyo b/portage_with_autodep/pym/portage/util/_urlopen.pyo Binary files differnew file mode 100644 index 0000000..9f51de8 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/_urlopen.pyo diff --git a/portage_with_autodep/pym/portage/util/digraph.py b/portage_with_autodep/pym/portage/util/digraph.py index 1bbe10f..f3ae658 100644 --- a/portage_with_autodep/pym/portage/util/digraph.py +++ b/portage_with_autodep/pym/portage/util/digraph.py @@ -317,16 +317,23 @@ class digraph(object): """ all_cycles = [] for node in self.nodes: + # If we have multiple paths of the same length, we have to + # return them all, so that we always get the same results + # even with PYTHONHASHSEED="random" enabled. shortest_path = None + candidates = [] for child in self.child_nodes(node, ignore_priority): path = self.shortest_path(child, node, ignore_priority) if path is None: continue - if not shortest_path or len(shortest_path) > len(path): + if not shortest_path or len(shortest_path) >= len(path): shortest_path = path - if shortest_path: - if not max_length or len(shortest_path) <= max_length: - all_cycles.append(shortest_path) + candidates.append(path) + if shortest_path and \ + (not max_length or len(shortest_path) <= max_length): + for path in candidates: + if len(path) == len(shortest_path): + all_cycles.append(path) return all_cycles # Backward compatibility diff --git a/portage_with_autodep/pym/portage/util/digraph.pyo b/portage_with_autodep/pym/portage/util/digraph.pyo Binary files differnew file mode 100644 index 0000000..8e503a6 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/digraph.pyo diff --git a/portage_with_autodep/pym/portage/util/env_update.py b/portage_with_autodep/pym/portage/util/env_update.py index eb8a0d9..ace4077 100644 --- a/portage_with_autodep/pym/portage/util/env_update.py +++ b/portage_with_autodep/pym/portage/util/env_update.py @@ -19,12 +19,14 @@ from portage.process import find_binary from portage.util import atomic_ofstream, ensure_dirs, getconfig, \ normalize_path, writemsg from portage.util.listdir import listdir +from portage.dbapi.vartree import vartree +from portage.package.ebuild.config import config if sys.hexversion >= 0x3000000: long = int def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, - env=None, writemsg_level=None): + env=None, writemsg_level=None, vardbapi=None): """ Parse /etc/env.d and use it to generate /etc/profile.env, csh.env, ld.so.conf, and prelink.conf. Finally, run ldconfig. When ldconfig is @@ -39,6 +41,40 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, defaults to portage.settings["ROOT"]. @type target_root: String (Path) """ + if vardbapi is None: + if isinstance(env, config): + vardbapi = vartree(settings=env).dbapi + else: + if target_root is None: + eprefix = portage.settings["EPREFIX"] + target_root = portage.settings["ROOT"] + target_eroot = portage.settings['EROOT'] + else: + eprefix = portage.const.EPREFIX + target_eroot = os.path.join(target_root, + eprefix.lstrip(os.sep)) + target_eroot = target_eroot.rstrip(os.sep) + os.sep + if hasattr(portage, "db") and target_eroot in portage.db: + vardbapi = portage.db[target_eroot]["vartree"].dbapi + else: + settings = config(config_root=target_root, + target_root=target_root, eprefix=eprefix) + target_root = settings["ROOT"] + if env is None: + env = settings + vardbapi = vartree(settings=settings).dbapi + + # Lock the config memory file to prevent symlink creation + # in merge_contents from overlapping with env-update. + vardbapi._fs_lock() + try: + return _env_update(makelinks, target_root, prev_mtimes, contents, + env, writemsg_level) + finally: + vardbapi._fs_unlock() + +def _env_update(makelinks, target_root, prev_mtimes, contents, env, + writemsg_level): if writemsg_level is None: writemsg_level = portage.util.writemsg_level if target_root is None: @@ -46,8 +82,13 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, if prev_mtimes is None: prev_mtimes = portage.mtimedb["ldpath"] if env is None: - env = os.environ - envd_dir = os.path.join(target_root, "etc", "env.d") + settings = portage.settings + else: + settings = env + + eprefix = settings.get("EPREFIX", "") + eprefix_lstrip = eprefix.lstrip(os.sep) + envd_dir = os.path.join(target_root, eprefix_lstrip, "etc", "env.d") ensure_dirs(envd_dir, mode=0o755) fns = listdir(envd_dir, EmptyOnError=1) fns.sort() @@ -123,7 +164,8 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, they won't be overwritten by this dict.update call.""" env.update(myconfig) - ldsoconf_path = os.path.join(target_root, "etc", "ld.so.conf") + ldsoconf_path = os.path.join( + target_root, eprefix_lstrip, "etc", "ld.so.conf") try: myld = io.open(_unicode_encode(ldsoconf_path, encoding=_encodings['fs'], errors='strict'), @@ -141,8 +183,6 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, raise oldld = None - ld_cache_update=False - newld = specials["LDPATH"] if (oldld != newld): #ld.so.conf needs updating and ldconfig needs to be run @@ -152,12 +192,11 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, for x in specials["LDPATH"]: myfd.write(x + "\n") myfd.close() - ld_cache_update=True # Update prelink.conf if we are prelink-enabled if prelink_capable: - newprelink = atomic_ofstream( - os.path.join(target_root, "etc", "prelink.conf")) + newprelink = atomic_ofstream(os.path.join( + target_root, eprefix_lstrip, "etc", "prelink.conf")) newprelink.write("# prelink.conf autogenerated by env-update; make all changes to\n") newprelink.write("# contents of /etc/env.d directory\n") @@ -193,7 +232,7 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, lib_dirs = set() for lib_dir in set(specials["LDPATH"] + \ ['usr/lib','usr/lib64','usr/lib32','lib','lib64','lib32']): - x = os.path.join(target_root, lib_dir.lstrip(os.sep)) + x = os.path.join(target_root, eprefix_lstrip, lib_dir.lstrip(os.sep)) try: newldpathtime = os.stat(x)[stat.ST_MTIME] lib_dirs.add(normalize_path(x)) @@ -223,11 +262,8 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, prev_mtimes[x] = newldpathtime mtime_changed = True - if mtime_changed: - ld_cache_update = True - if makelinks and \ - not ld_cache_update and \ + not mtime_changed and \ contents is not None: libdir_contents_changed = False for mypath, mydata in contents.items(): @@ -241,12 +277,12 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, makelinks = False ldconfig = "/sbin/ldconfig" - if "CHOST" in env and "CBUILD" in env and \ - env["CHOST"] != env["CBUILD"]: - ldconfig = find_binary("%s-ldconfig" % env["CHOST"]) + if "CHOST" in settings and "CBUILD" in settings and \ + settings["CHOST"] != settings["CBUILD"]: + ldconfig = find_binary("%s-ldconfig" % settings["CHOST"]) # Only run ldconfig as needed - if (ld_cache_update or makelinks) and ldconfig: + if makelinks and ldconfig and not eprefix: # ldconfig has very different behaviour between FreeBSD and Linux if ostype == "Linux" or ostype.lower().endswith("gnu"): # We can't update links if we haven't cleaned other versions first, as @@ -272,7 +308,8 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, cenvnotice += "# GO INTO /etc/csh.cshrc NOT /etc/csh.env\n\n" #create /etc/profile.env for bash support - outfile = atomic_ofstream(os.path.join(target_root, "etc", "profile.env")) + outfile = atomic_ofstream(os.path.join( + target_root, eprefix_lstrip, "etc", "profile.env")) outfile.write(penvnotice) env_keys = [ x for x in env if x != "LDPATH" ] @@ -286,7 +323,8 @@ def env_update(makelinks=1, target_root=None, prev_mtimes=None, contents=None, outfile.close() #create /etc/csh.env for (t)csh support - outfile = atomic_ofstream(os.path.join(target_root, "etc", "csh.env")) + outfile = atomic_ofstream(os.path.join( + target_root, eprefix_lstrip, "etc", "csh.env")) outfile.write(cenvnotice) for x in env_keys: outfile.write("setenv %s '%s'\n" % (x, env[x])) diff --git a/portage_with_autodep/pym/portage/util/env_update.pyo b/portage_with_autodep/pym/portage/util/env_update.pyo Binary files differnew file mode 100644 index 0000000..ee3b187 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/env_update.pyo diff --git a/portage_with_autodep/pym/portage/util/lafilefixer.py b/portage_with_autodep/pym/portage/util/lafilefixer.py index 2b093d8..54ff20d 100644 --- a/portage_with_autodep/pym/portage/util/lafilefixer.py +++ b/portage_with_autodep/pym/portage/util/lafilefixer.py @@ -80,7 +80,7 @@ def rewrite_lafile(contents): @param contents: the contents of a libtool archive file @type contents: bytes @rtype: tuple - @returns: (True, fixed_contents) if something needed to be + @return: (True, fixed_contents) if something needed to be fixed, (False, None) otherwise. """ #Parse the 'dependency_libs' and 'inherited_linker_flags' lines. diff --git a/portage_with_autodep/pym/portage/util/lafilefixer.pyo b/portage_with_autodep/pym/portage/util/lafilefixer.pyo Binary files differnew file mode 100644 index 0000000..a6e06ab --- /dev/null +++ b/portage_with_autodep/pym/portage/util/lafilefixer.pyo diff --git a/portage_with_autodep/pym/portage/util/listdir.py b/portage_with_autodep/pym/portage/util/listdir.py index 5753d2f..c2628cb 100644 --- a/portage_with_autodep/pym/portage/util/listdir.py +++ b/portage_with_autodep/pym/portage/util/listdir.py @@ -109,7 +109,7 @@ def listdir(mypath, recursive=False, filesonly=False, ignorecvs=False, ignorelis @param dirsonly: Only return directories. @type dirsonly: Boolean @rtype: List - @returns: A list of files and directories (or just files or just directories) or an empty list. + @return: A list of files and directories (or just files or just directories) or an empty list. """ list, ftype = cacheddir(mypath, ignorecvs, ignorelist, EmptyOnError, followSymlinks) diff --git a/portage_with_autodep/pym/portage/util/listdir.pyo b/portage_with_autodep/pym/portage/util/listdir.pyo Binary files differnew file mode 100644 index 0000000..0f02d6d --- /dev/null +++ b/portage_with_autodep/pym/portage/util/listdir.pyo diff --git a/portage_with_autodep/pym/portage/util/movefile.py b/portage_with_autodep/pym/portage/util/movefile.py index 30cb6f1..10577b5 100644 --- a/portage_with_autodep/pym/portage/util/movefile.py +++ b/portage_with_autodep/pym/portage/util/movefile.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['movefile'] @@ -7,33 +7,98 @@ import errno import os as _os import shutil as _shutil import stat +import subprocess +import textwrap import portage from portage import bsd_chflags, _encodings, _os_overrides, _selinux, \ - _unicode_decode, _unicode_func_wrapper, _unicode_module_wrapper + _unicode_decode, _unicode_encode, _unicode_func_wrapper,\ + _unicode_module_wrapper from portage.const import MOVE_BINARY +from portage.exception import OperationNotSupported from portage.localization import _ from portage.process import spawn from portage.util import writemsg +def _apply_stat(src_stat, dest): + _os.chown(dest, src_stat.st_uid, src_stat.st_gid) + _os.chmod(dest, stat.S_IMODE(src_stat.st_mode)) + +if hasattr(_os, "getxattr"): + # Python >=3.3 and GNU/Linux + def _copyxattr(src, dest): + for attr in _os.listxattr(src): + try: + _os.setxattr(dest, attr, _os.getxattr(src, attr)) + raise_exception = False + except OSError: + raise_exception = True + if raise_exception: + raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest) +else: + try: + import xattr + except ImportError: + xattr = None + if xattr is not None: + def _copyxattr(src, dest): + for attr in xattr.list(src): + try: + xattr.set(dest, attr, xattr.get(src, attr)) + raise_exception = False + except IOError: + raise_exception = True + if raise_exception: + raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest) + else: + _devnull = open("/dev/null", "wb") + try: + subprocess.call(["getfattr", "--version"], stdout=_devnull) + subprocess.call(["setfattr", "--version"], stdout=_devnull) + _has_getfattr_and_setfattr = True + except OSError: + _has_getfattr_and_setfattr = False + _devnull.close() + if _has_getfattr_and_setfattr: + def _copyxattr(src, dest): + getfattr_process = subprocess.Popen(["getfattr", "-d", "--absolute-names", src], stdout=subprocess.PIPE) + getfattr_process.wait() + extended_attributes = getfattr_process.stdout.readlines() + getfattr_process.stdout.close() + if extended_attributes: + extended_attributes[0] = b"# file: " + _unicode_encode(dest) + b"\n" + setfattr_process = subprocess.Popen(["setfattr", "--restore=-"], stdin=subprocess.PIPE, stderr=subprocess.PIPE) + setfattr_process.communicate(input=b"".join(extended_attributes)) + if setfattr_process.returncode != 0: + raise OperationNotSupported("Filesystem containing file '%s' does not support extended attributes" % dest) + else: + def _copyxattr(src, dest): + pass + def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, hardlink_candidates=None, encoding=_encodings['fs']): """moves a file from src to dest, preserving all permissions and attributes; mtime will be preserved even when moving across filesystems. Returns true on success and false on failure. Move is atomic.""" - #print "movefile("+str(src)+","+str(dest)+","+str(newmtime)+","+str(sstat)+")" if mysettings is None: mysettings = portage.settings + src_bytes = _unicode_encode(src, encoding=encoding, errors='strict') + dest_bytes = _unicode_encode(dest, encoding=encoding, errors='strict') + xattr_enabled = "xattr" in mysettings.features selinux_enabled = mysettings.selinux_enabled() if selinux_enabled: selinux = _unicode_module_wrapper(_selinux, encoding=encoding) + _copyfile = selinux.copyfile + _rename = selinux.rename + else: + _copyfile = _shutil.copyfile + _rename = _os.rename lchown = _unicode_func_wrapper(portage.data.lchown, encoding=encoding) os = _unicode_module_wrapper(_os, encoding=encoding, overrides=_os_overrides) - shutil = _unicode_module_wrapper(_shutil, encoding=encoding) try: if not sstat: @@ -42,8 +107,9 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, except SystemExit as e: raise except Exception as e: - print(_("!!! Stating source file failed... movefile()")) - print("!!!",e) + writemsg("!!! %s\n" % _("Stating source file failed... movefile()"), + noiselevel=-1) + writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1) return None destexists=1 @@ -75,9 +141,9 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, if stat.S_ISLNK(sstat[stat.ST_MODE]): try: target=os.readlink(src) - if mysettings and mysettings["D"]: - if target.find(mysettings["D"])==0: - target=target[len(mysettings["D"]):] + if mysettings and "D" in mysettings and \ + target.startswith(mysettings["D"]): + target = target[len(mysettings["D"])-1:] if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): os.unlink(dest) try: @@ -100,9 +166,10 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, except SystemExit as e: raise except Exception as e: - print(_("!!! failed to properly create symlink:")) - print("!!!",dest,"->",target) - print("!!!",e) + writemsg("!!! %s\n" % _("failed to properly create symlink:"), + noiselevel=-1) + writemsg("!!! %s -> %s\n" % (dest, target), noiselevel=-1) + writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1) return None hardlinked = False @@ -152,26 +219,40 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, except OSError as e: if e.errno != errno.EXDEV: # Some random error. - print(_("!!! Failed to move %(src)s to %(dest)s") % {"src": src, "dest": dest}) - print("!!!",e) + writemsg("!!! %s\n" % _("Failed to move %(src)s to %(dest)s") % + {"src": src, "dest": dest}, noiselevel=-1) + writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1) return None # Invalid cross-device-link 'bind' mounted or actually Cross-Device if renamefailed: - didcopy=0 if stat.S_ISREG(sstat[stat.ST_MODE]): + dest_tmp = dest + "#new" + dest_tmp_bytes = _unicode_encode(dest_tmp, encoding=encoding, + errors='strict') try: # For safety copy then move it over. - if selinux_enabled: - selinux.copyfile(src, dest + "#new") - selinux.rename(dest + "#new", dest) - else: - shutil.copyfile(src,dest+"#new") - os.rename(dest+"#new",dest) - didcopy=1 + _copyfile(src_bytes, dest_tmp_bytes) + if xattr_enabled: + try: + _copyxattr(src_bytes, dest_tmp_bytes) + except SystemExit: + raise + except: + msg = _("Failed to copy extended attributes. " + "In order to avoid this error, set " + "FEATURES=\"-xattr\" in make.conf.") + msg = textwrap.wrap(msg, 65) + for line in msg: + writemsg("!!! %s\n" % (line,), noiselevel=-1) + raise + _apply_stat(sstat, dest_tmp_bytes) + _rename(dest_tmp_bytes, dest_bytes) + _os.unlink(src_bytes) except SystemExit as e: raise except Exception as e: - print(_('!!! copy %(src)s -> %(dest)s failed.') % {"src": src, "dest": dest}) - print("!!!",e) + writemsg("!!! %s\n" % _('copy %(src)s -> %(dest)s failed.') % + {"src": src, "dest": dest}, noiselevel=-1) + writemsg(_unicode_decode("!!! %s\n") % (e,), noiselevel=-1) return None else: #we don't yet handle special, so we need to fall back to /bin/mv @@ -183,21 +264,6 @@ def movefile(src, dest, newmtime=None, sstat=None, mysettings=None, "dest": _unicode_decode(dest, encoding=encoding)}, noiselevel=-1) writemsg("!!! %s\n" % a, noiselevel=-1) return None # failure - try: - if didcopy: - if stat.S_ISLNK(sstat[stat.ST_MODE]): - lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) - else: - os.chown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) - os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown - os.unlink(src) - except SystemExit as e: - raise - except Exception as e: - print(_("!!! Failed to chown/chmod/unlink in movefile()")) - print("!!!",dest) - print("!!!",e) - return None # Always use stat_obj[stat.ST_MTIME] for the integral timestamp which # is returned, since the stat_obj.st_mtime float attribute rounds *up* diff --git a/portage_with_autodep/pym/portage/util/movefile.pyo b/portage_with_autodep/pym/portage/util/movefile.pyo Binary files differnew file mode 100644 index 0000000..1228ee7 --- /dev/null +++ b/portage_with_autodep/pym/portage/util/movefile.pyo diff --git a/portage_with_autodep/pym/portage/util/mtimedb.py b/portage_with_autodep/pym/portage/util/mtimedb.py index 67f93e8..30922a9 100644 --- a/portage_with_autodep/pym/portage/util/mtimedb.py +++ b/portage_with_autodep/pym/portage/util/mtimedb.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = ['MtimeDB'] @@ -9,35 +9,77 @@ try: except ImportError: import pickle +import errno +import io +import json +import sys + import portage +from portage import _encodings +from portage import _unicode_decode from portage import _unicode_encode from portage.data import portage_gid, uid from portage.localization import _ from portage.util import apply_secpass_permissions, atomic_ofstream, writemsg class MtimeDB(dict): + + # JSON read support has been available since portage-2.1.10.49. + _json_write = True + + _json_write_opts = { + "ensure_ascii": False, + "indent": "\t", + "sort_keys": True + } + if sys.hexversion < 0x30200F0: + # indent only supports int number of spaces + _json_write_opts["indent"] = 4 + def __init__(self, filename): dict.__init__(self) self.filename = filename self._load(filename) def _load(self, filename): + f = None + content = None try: f = open(_unicode_encode(filename), 'rb') - mypickle = pickle.Unpickler(f) - try: - mypickle.find_global = None - except AttributeError: - # TODO: If py3k, override Unpickler.find_class(). + content = f.read() + except EnvironmentError as e: + if getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES): pass - d = mypickle.load() - f.close() - del f - except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError) as e: - if isinstance(e, pickle.UnpicklingError): + else: writemsg(_("!!! Error loading '%s': %s\n") % \ - (filename, str(e)), noiselevel=-1) - del e + (filename, e), noiselevel=-1) + finally: + if f is not None: + f.close() + + d = None + if content: + try: + d = json.loads(_unicode_decode(content, + encoding=_encodings['repo.content'], errors='strict')) + except SystemExit: + raise + except Exception as e: + try: + mypickle = pickle.Unpickler(io.BytesIO(content)) + try: + mypickle.find_global = None + except AttributeError: + # Python >=3 + pass + d = mypickle.load() + except SystemExit: + raise + except Exception: + writemsg(_("!!! Error loading '%s': %s\n") % \ + (filename, e), noiselevel=-1) + + if d is None: d = {} if "old" in d: @@ -74,7 +116,12 @@ class MtimeDB(dict): except EnvironmentError: pass else: - pickle.dump(d, f, protocol=2) + if self._json_write: + f.write(_unicode_encode( + json.dumps(d, **self._json_write_opts), + encoding=_encodings['repo.content'], errors='strict')) + else: + pickle.dump(d, f, protocol=2) f.close() apply_secpass_permissions(self.filename, uid=uid, gid=portage_gid, mode=0o644) diff --git a/portage_with_autodep/pym/portage/util/mtimedb.pyo b/portage_with_autodep/pym/portage/util/mtimedb.pyo Binary files differnew file mode 100644 index 0000000..fda479a --- /dev/null +++ b/portage_with_autodep/pym/portage/util/mtimedb.pyo diff --git a/portage_with_autodep/pym/portage/util/whirlpool.py b/portage_with_autodep/pym/portage/util/whirlpool.py new file mode 100644 index 0000000..c696f6f --- /dev/null +++ b/portage_with_autodep/pym/portage/util/whirlpool.py @@ -0,0 +1,794 @@ +## whirlpool.py - pure Python implementation of the Whirlpool algorithm. +## Bjorn Edstrom <be@bjrn.se> 16 december 2007. +## +## Copyrights +## ========== +## +## This code is based on the reference implementation by +## Paulo S.L.M. Barreto and Vincent Rijmen. The reference implementation +## is placed in the public domain but has the following headers: +## +## * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS +## * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +## * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +## * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE +## * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +## * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +## * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +## * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +## * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +## * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +## * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +## * +## */ +## /* The code contained in this file (Whirlpool.c) is in the public domain. */ +## +## This Python implementation is therefore also placed in the public domain. + +import sys +if sys.hexversion >= 0x3000000: + xrange = range + +#block_size = 64 +digest_size = 64 +digestsize = 64 + +class Whirlpool: + """Return a new Whirlpool object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + def __init__(self, arg=None): + self.ctx = WhirlpoolStruct() + if arg: + self.update(arg) + self.digest_status = 0 + + def update(self, arg): + """update(arg)""" + WhirlpoolAdd(arg, len(arg)*8, self.ctx) + self.digest_status = 0 + + def digest(self): + """digest()""" + if self.digest_status == 0: + self.dig = WhirlpoolFinalize(self.ctx) + self.digest_status = 1 + return self.dig + + def hexdigest(self): + """hexdigest()""" + dig = self.digest() + tempstr = '' + for d in dig: + xxx = '%02x' % (ord(d)) + tempstr = tempstr + xxx + return tempstr + + def copy(self): + """copy()""" + import copy + return copy.deepcopy(self) + + +def new(init=None): + """Return a new Whirlpool object. An optional string argument + may be provided; if present, this string will be automatically + hashed.""" + return Whirlpool(init) + +# +# Private. +# + +R = 10 + +C0 = [ +0x18186018c07830d8, 0x23238c2305af4626, 0xc6c63fc67ef991b8, 0xe8e887e8136fcdfb, +0x878726874ca113cb, 0xb8b8dab8a9626d11, 0x0101040108050209, 0x4f4f214f426e9e0d, +0x3636d836adee6c9b, 0xa6a6a2a6590451ff, 0xd2d26fd2debdb90c, 0xf5f5f3f5fb06f70e, +0x7979f979ef80f296, 0x6f6fa16f5fcede30, 0x91917e91fcef3f6d, 0x52525552aa07a4f8, +0x60609d6027fdc047, 0xbcbccabc89766535, 0x9b9b569baccd2b37, 0x8e8e028e048c018a, +0xa3a3b6a371155bd2, 0x0c0c300c603c186c, 0x7b7bf17bff8af684, 0x3535d435b5e16a80, +0x1d1d741de8693af5, 0xe0e0a7e05347ddb3, 0xd7d77bd7f6acb321, 0xc2c22fc25eed999c, +0x2e2eb82e6d965c43, 0x4b4b314b627a9629, 0xfefedffea321e15d, 0x575741578216aed5, +0x15155415a8412abd, 0x7777c1779fb6eee8, 0x3737dc37a5eb6e92, 0xe5e5b3e57b56d79e, +0x9f9f469f8cd92313, 0xf0f0e7f0d317fd23, 0x4a4a354a6a7f9420, 0xdada4fda9e95a944, +0x58587d58fa25b0a2, 0xc9c903c906ca8fcf, 0x2929a429558d527c, 0x0a0a280a5022145a, +0xb1b1feb1e14f7f50, 0xa0a0baa0691a5dc9, 0x6b6bb16b7fdad614, 0x85852e855cab17d9, +0xbdbdcebd8173673c, 0x5d5d695dd234ba8f, 0x1010401080502090, 0xf4f4f7f4f303f507, +0xcbcb0bcb16c08bdd, 0x3e3ef83eedc67cd3, 0x0505140528110a2d, 0x676781671fe6ce78, +0xe4e4b7e47353d597, 0x27279c2725bb4e02, 0x4141194132588273, 0x8b8b168b2c9d0ba7, +0xa7a7a6a7510153f6, 0x7d7de97dcf94fab2, 0x95956e95dcfb3749, 0xd8d847d88e9fad56, +0xfbfbcbfb8b30eb70, 0xeeee9fee2371c1cd, 0x7c7ced7cc791f8bb, 0x6666856617e3cc71, +0xdddd53dda68ea77b, 0x17175c17b84b2eaf, 0x4747014702468e45, 0x9e9e429e84dc211a, +0xcaca0fca1ec589d4, 0x2d2db42d75995a58, 0xbfbfc6bf9179632e, 0x07071c07381b0e3f, +0xadad8ead012347ac, 0x5a5a755aea2fb4b0, 0x838336836cb51bef, 0x3333cc3385ff66b6, +0x636391633ff2c65c, 0x02020802100a0412, 0xaaaa92aa39384993, 0x7171d971afa8e2de, +0xc8c807c80ecf8dc6, 0x19196419c87d32d1, 0x494939497270923b, 0xd9d943d9869aaf5f, +0xf2f2eff2c31df931, 0xe3e3abe34b48dba8, 0x5b5b715be22ab6b9, 0x88881a8834920dbc, +0x9a9a529aa4c8293e, 0x262698262dbe4c0b, 0x3232c8328dfa64bf, 0xb0b0fab0e94a7d59, +0xe9e983e91b6acff2, 0x0f0f3c0f78331e77, 0xd5d573d5e6a6b733, 0x80803a8074ba1df4, +0xbebec2be997c6127, 0xcdcd13cd26de87eb, 0x3434d034bde46889, 0x48483d487a759032, +0xffffdbffab24e354, 0x7a7af57af78ff48d, 0x90907a90f4ea3d64, 0x5f5f615fc23ebe9d, +0x202080201da0403d, 0x6868bd6867d5d00f, 0x1a1a681ad07234ca, 0xaeae82ae192c41b7, +0xb4b4eab4c95e757d, 0x54544d549a19a8ce, 0x93937693ece53b7f, 0x222288220daa442f, +0x64648d6407e9c863, 0xf1f1e3f1db12ff2a, 0x7373d173bfa2e6cc, 0x12124812905a2482, +0x40401d403a5d807a, 0x0808200840281048, 0xc3c32bc356e89b95, 0xecec97ec337bc5df, +0xdbdb4bdb9690ab4d, 0xa1a1bea1611f5fc0, 0x8d8d0e8d1c830791, 0x3d3df43df5c97ac8, +0x97976697ccf1335b, 0x0000000000000000, 0xcfcf1bcf36d483f9, 0x2b2bac2b4587566e, +0x7676c57697b3ece1, 0x8282328264b019e6, 0xd6d67fd6fea9b128, 0x1b1b6c1bd87736c3, +0xb5b5eeb5c15b7774, 0xafaf86af112943be, 0x6a6ab56a77dfd41d, 0x50505d50ba0da0ea, +0x45450945124c8a57, 0xf3f3ebf3cb18fb38, 0x3030c0309df060ad, 0xefef9bef2b74c3c4, +0x3f3ffc3fe5c37eda, 0x55554955921caac7, 0xa2a2b2a2791059db, 0xeaea8fea0365c9e9, +0x656589650fecca6a, 0xbabad2bab9686903, 0x2f2fbc2f65935e4a, 0xc0c027c04ee79d8e, +0xdede5fdebe81a160, 0x1c1c701ce06c38fc, 0xfdfdd3fdbb2ee746, 0x4d4d294d52649a1f, +0x92927292e4e03976, 0x7575c9758fbceafa, 0x06061806301e0c36, 0x8a8a128a249809ae, +0xb2b2f2b2f940794b, 0xe6e6bfe66359d185, 0x0e0e380e70361c7e, 0x1f1f7c1ff8633ee7, +0x6262956237f7c455, 0xd4d477d4eea3b53a, 0xa8a89aa829324d81, 0x96966296c4f43152, +0xf9f9c3f99b3aef62, 0xc5c533c566f697a3, 0x2525942535b14a10, 0x59597959f220b2ab, +0x84842a8454ae15d0, 0x7272d572b7a7e4c5, 0x3939e439d5dd72ec, 0x4c4c2d4c5a619816, +0x5e5e655eca3bbc94, 0x7878fd78e785f09f, 0x3838e038ddd870e5, 0x8c8c0a8c14860598, +0xd1d163d1c6b2bf17, 0xa5a5aea5410b57e4, 0xe2e2afe2434dd9a1, 0x616199612ff8c24e, +0xb3b3f6b3f1457b42, 0x2121842115a54234, 0x9c9c4a9c94d62508, 0x1e1e781ef0663cee, +0x4343114322528661, 0xc7c73bc776fc93b1, 0xfcfcd7fcb32be54f, 0x0404100420140824, +0x51515951b208a2e3, 0x99995e99bcc72f25, 0x6d6da96d4fc4da22, 0x0d0d340d68391a65, +0xfafacffa8335e979, 0xdfdf5bdfb684a369, 0x7e7ee57ed79bfca9, 0x242490243db44819, +0x3b3bec3bc5d776fe, 0xabab96ab313d4b9a, 0xcece1fce3ed181f0, 0x1111441188552299, +0x8f8f068f0c890383, 0x4e4e254e4a6b9c04, 0xb7b7e6b7d1517366, 0xebeb8beb0b60cbe0, +0x3c3cf03cfdcc78c1, 0x81813e817cbf1ffd, 0x94946a94d4fe3540, 0xf7f7fbf7eb0cf31c, +0xb9b9deb9a1676f18, 0x13134c13985f268b, 0x2c2cb02c7d9c5851, 0xd3d36bd3d6b8bb05, +0xe7e7bbe76b5cd38c, 0x6e6ea56e57cbdc39, 0xc4c437c46ef395aa, 0x03030c03180f061b, +0x565645568a13acdc, 0x44440d441a49885e, 0x7f7fe17fdf9efea0, 0xa9a99ea921374f88, +0x2a2aa82a4d825467, 0xbbbbd6bbb16d6b0a, 0xc1c123c146e29f87, 0x53535153a202a6f1, +0xdcdc57dcae8ba572, 0x0b0b2c0b58271653, 0x9d9d4e9d9cd32701, 0x6c6cad6c47c1d82b, +0x3131c43195f562a4, 0x7474cd7487b9e8f3, 0xf6f6fff6e309f115, 0x464605460a438c4c, +0xacac8aac092645a5, 0x89891e893c970fb5, 0x14145014a04428b4, 0xe1e1a3e15b42dfba, +0x16165816b04e2ca6, 0x3a3ae83acdd274f7, 0x6969b9696fd0d206, 0x09092409482d1241, +0x7070dd70a7ade0d7, 0xb6b6e2b6d954716f, 0xd0d067d0ceb7bd1e, 0xeded93ed3b7ec7d6, +0xcccc17cc2edb85e2, 0x424215422a578468, 0x98985a98b4c22d2c, 0xa4a4aaa4490e55ed, +0x2828a0285d885075, 0x5c5c6d5cda31b886, 0xf8f8c7f8933fed6b, 0x8686228644a411c2, +] +C1 = [ +0xd818186018c07830, 0x2623238c2305af46, 0xb8c6c63fc67ef991, 0xfbe8e887e8136fcd, +0xcb878726874ca113, 0x11b8b8dab8a9626d, 0x0901010401080502, 0x0d4f4f214f426e9e, +0x9b3636d836adee6c, 0xffa6a6a2a6590451, 0x0cd2d26fd2debdb9, 0x0ef5f5f3f5fb06f7, +0x967979f979ef80f2, 0x306f6fa16f5fcede, 0x6d91917e91fcef3f, 0xf852525552aa07a4, +0x4760609d6027fdc0, 0x35bcbccabc897665, 0x379b9b569baccd2b, 0x8a8e8e028e048c01, +0xd2a3a3b6a371155b, 0x6c0c0c300c603c18, 0x847b7bf17bff8af6, 0x803535d435b5e16a, +0xf51d1d741de8693a, 0xb3e0e0a7e05347dd, 0x21d7d77bd7f6acb3, 0x9cc2c22fc25eed99, +0x432e2eb82e6d965c, 0x294b4b314b627a96, 0x5dfefedffea321e1, 0xd5575741578216ae, +0xbd15155415a8412a, 0xe87777c1779fb6ee, 0x923737dc37a5eb6e, 0x9ee5e5b3e57b56d7, +0x139f9f469f8cd923, 0x23f0f0e7f0d317fd, 0x204a4a354a6a7f94, 0x44dada4fda9e95a9, +0xa258587d58fa25b0, 0xcfc9c903c906ca8f, 0x7c2929a429558d52, 0x5a0a0a280a502214, +0x50b1b1feb1e14f7f, 0xc9a0a0baa0691a5d, 0x146b6bb16b7fdad6, 0xd985852e855cab17, +0x3cbdbdcebd817367, 0x8f5d5d695dd234ba, 0x9010104010805020, 0x07f4f4f7f4f303f5, +0xddcbcb0bcb16c08b, 0xd33e3ef83eedc67c, 0x2d0505140528110a, 0x78676781671fe6ce, +0x97e4e4b7e47353d5, 0x0227279c2725bb4e, 0x7341411941325882, 0xa78b8b168b2c9d0b, +0xf6a7a7a6a7510153, 0xb27d7de97dcf94fa, 0x4995956e95dcfb37, 0x56d8d847d88e9fad, +0x70fbfbcbfb8b30eb, 0xcdeeee9fee2371c1, 0xbb7c7ced7cc791f8, 0x716666856617e3cc, +0x7bdddd53dda68ea7, 0xaf17175c17b84b2e, 0x454747014702468e, 0x1a9e9e429e84dc21, +0xd4caca0fca1ec589, 0x582d2db42d75995a, 0x2ebfbfc6bf917963, 0x3f07071c07381b0e, +0xacadad8ead012347, 0xb05a5a755aea2fb4, 0xef838336836cb51b, 0xb63333cc3385ff66, +0x5c636391633ff2c6, 0x1202020802100a04, 0x93aaaa92aa393849, 0xde7171d971afa8e2, +0xc6c8c807c80ecf8d, 0xd119196419c87d32, 0x3b49493949727092, 0x5fd9d943d9869aaf, +0x31f2f2eff2c31df9, 0xa8e3e3abe34b48db, 0xb95b5b715be22ab6, 0xbc88881a8834920d, +0x3e9a9a529aa4c829, 0x0b262698262dbe4c, 0xbf3232c8328dfa64, 0x59b0b0fab0e94a7d, +0xf2e9e983e91b6acf, 0x770f0f3c0f78331e, 0x33d5d573d5e6a6b7, 0xf480803a8074ba1d, +0x27bebec2be997c61, 0xebcdcd13cd26de87, 0x893434d034bde468, 0x3248483d487a7590, +0x54ffffdbffab24e3, 0x8d7a7af57af78ff4, 0x6490907a90f4ea3d, 0x9d5f5f615fc23ebe, +0x3d202080201da040, 0x0f6868bd6867d5d0, 0xca1a1a681ad07234, 0xb7aeae82ae192c41, +0x7db4b4eab4c95e75, 0xce54544d549a19a8, 0x7f93937693ece53b, 0x2f222288220daa44, +0x6364648d6407e9c8, 0x2af1f1e3f1db12ff, 0xcc7373d173bfa2e6, 0x8212124812905a24, +0x7a40401d403a5d80, 0x4808082008402810, 0x95c3c32bc356e89b, 0xdfecec97ec337bc5, +0x4ddbdb4bdb9690ab, 0xc0a1a1bea1611f5f, 0x918d8d0e8d1c8307, 0xc83d3df43df5c97a, +0x5b97976697ccf133, 0x0000000000000000, 0xf9cfcf1bcf36d483, 0x6e2b2bac2b458756, +0xe17676c57697b3ec, 0xe68282328264b019, 0x28d6d67fd6fea9b1, 0xc31b1b6c1bd87736, +0x74b5b5eeb5c15b77, 0xbeafaf86af112943, 0x1d6a6ab56a77dfd4, 0xea50505d50ba0da0, +0x5745450945124c8a, 0x38f3f3ebf3cb18fb, 0xad3030c0309df060, 0xc4efef9bef2b74c3, +0xda3f3ffc3fe5c37e, 0xc755554955921caa, 0xdba2a2b2a2791059, 0xe9eaea8fea0365c9, +0x6a656589650fecca, 0x03babad2bab96869, 0x4a2f2fbc2f65935e, 0x8ec0c027c04ee79d, +0x60dede5fdebe81a1, 0xfc1c1c701ce06c38, 0x46fdfdd3fdbb2ee7, 0x1f4d4d294d52649a, +0x7692927292e4e039, 0xfa7575c9758fbcea, 0x3606061806301e0c, 0xae8a8a128a249809, +0x4bb2b2f2b2f94079, 0x85e6e6bfe66359d1, 0x7e0e0e380e70361c, 0xe71f1f7c1ff8633e, +0x556262956237f7c4, 0x3ad4d477d4eea3b5, 0x81a8a89aa829324d, 0x5296966296c4f431, +0x62f9f9c3f99b3aef, 0xa3c5c533c566f697, 0x102525942535b14a, 0xab59597959f220b2, +0xd084842a8454ae15, 0xc57272d572b7a7e4, 0xec3939e439d5dd72, 0x164c4c2d4c5a6198, +0x945e5e655eca3bbc, 0x9f7878fd78e785f0, 0xe53838e038ddd870, 0x988c8c0a8c148605, +0x17d1d163d1c6b2bf, 0xe4a5a5aea5410b57, 0xa1e2e2afe2434dd9, 0x4e616199612ff8c2, +0x42b3b3f6b3f1457b, 0x342121842115a542, 0x089c9c4a9c94d625, 0xee1e1e781ef0663c, +0x6143431143225286, 0xb1c7c73bc776fc93, 0x4ffcfcd7fcb32be5, 0x2404041004201408, +0xe351515951b208a2, 0x2599995e99bcc72f, 0x226d6da96d4fc4da, 0x650d0d340d68391a, +0x79fafacffa8335e9, 0x69dfdf5bdfb684a3, 0xa97e7ee57ed79bfc, 0x19242490243db448, +0xfe3b3bec3bc5d776, 0x9aabab96ab313d4b, 0xf0cece1fce3ed181, 0x9911114411885522, +0x838f8f068f0c8903, 0x044e4e254e4a6b9c, 0x66b7b7e6b7d15173, 0xe0ebeb8beb0b60cb, +0xc13c3cf03cfdcc78, 0xfd81813e817cbf1f, 0x4094946a94d4fe35, 0x1cf7f7fbf7eb0cf3, +0x18b9b9deb9a1676f, 0x8b13134c13985f26, 0x512c2cb02c7d9c58, 0x05d3d36bd3d6b8bb, +0x8ce7e7bbe76b5cd3, 0x396e6ea56e57cbdc, 0xaac4c437c46ef395, 0x1b03030c03180f06, +0xdc565645568a13ac, 0x5e44440d441a4988, 0xa07f7fe17fdf9efe, 0x88a9a99ea921374f, +0x672a2aa82a4d8254, 0x0abbbbd6bbb16d6b, 0x87c1c123c146e29f, 0xf153535153a202a6, +0x72dcdc57dcae8ba5, 0x530b0b2c0b582716, 0x019d9d4e9d9cd327, 0x2b6c6cad6c47c1d8, +0xa43131c43195f562, 0xf37474cd7487b9e8, 0x15f6f6fff6e309f1, 0x4c464605460a438c, +0xa5acac8aac092645, 0xb589891e893c970f, 0xb414145014a04428, 0xbae1e1a3e15b42df, +0xa616165816b04e2c, 0xf73a3ae83acdd274, 0x066969b9696fd0d2, 0x4109092409482d12, +0xd77070dd70a7ade0, 0x6fb6b6e2b6d95471, 0x1ed0d067d0ceb7bd, 0xd6eded93ed3b7ec7, +0xe2cccc17cc2edb85, 0x68424215422a5784, 0x2c98985a98b4c22d, 0xeda4a4aaa4490e55, +0x752828a0285d8850, 0x865c5c6d5cda31b8, 0x6bf8f8c7f8933fed, 0xc28686228644a411, +] +C2 = [ +0x30d818186018c078, 0x462623238c2305af, 0x91b8c6c63fc67ef9, 0xcdfbe8e887e8136f, +0x13cb878726874ca1, 0x6d11b8b8dab8a962, 0x0209010104010805, 0x9e0d4f4f214f426e, +0x6c9b3636d836adee, 0x51ffa6a6a2a65904, 0xb90cd2d26fd2debd, 0xf70ef5f5f3f5fb06, +0xf2967979f979ef80, 0xde306f6fa16f5fce, 0x3f6d91917e91fcef, 0xa4f852525552aa07, +0xc04760609d6027fd, 0x6535bcbccabc8976, 0x2b379b9b569baccd, 0x018a8e8e028e048c, +0x5bd2a3a3b6a37115, 0x186c0c0c300c603c, 0xf6847b7bf17bff8a, 0x6a803535d435b5e1, +0x3af51d1d741de869, 0xddb3e0e0a7e05347, 0xb321d7d77bd7f6ac, 0x999cc2c22fc25eed, +0x5c432e2eb82e6d96, 0x96294b4b314b627a, 0xe15dfefedffea321, 0xaed5575741578216, +0x2abd15155415a841, 0xeee87777c1779fb6, 0x6e923737dc37a5eb, 0xd79ee5e5b3e57b56, +0x23139f9f469f8cd9, 0xfd23f0f0e7f0d317, 0x94204a4a354a6a7f, 0xa944dada4fda9e95, +0xb0a258587d58fa25, 0x8fcfc9c903c906ca, 0x527c2929a429558d, 0x145a0a0a280a5022, +0x7f50b1b1feb1e14f, 0x5dc9a0a0baa0691a, 0xd6146b6bb16b7fda, 0x17d985852e855cab, +0x673cbdbdcebd8173, 0xba8f5d5d695dd234, 0x2090101040108050, 0xf507f4f4f7f4f303, +0x8bddcbcb0bcb16c0, 0x7cd33e3ef83eedc6, 0x0a2d050514052811, 0xce78676781671fe6, +0xd597e4e4b7e47353, 0x4e0227279c2725bb, 0x8273414119413258, 0x0ba78b8b168b2c9d, +0x53f6a7a7a6a75101, 0xfab27d7de97dcf94, 0x374995956e95dcfb, 0xad56d8d847d88e9f, +0xeb70fbfbcbfb8b30, 0xc1cdeeee9fee2371, 0xf8bb7c7ced7cc791, 0xcc716666856617e3, +0xa77bdddd53dda68e, 0x2eaf17175c17b84b, 0x8e45474701470246, 0x211a9e9e429e84dc, +0x89d4caca0fca1ec5, 0x5a582d2db42d7599, 0x632ebfbfc6bf9179, 0x0e3f07071c07381b, +0x47acadad8ead0123, 0xb4b05a5a755aea2f, 0x1bef838336836cb5, 0x66b63333cc3385ff, +0xc65c636391633ff2, 0x041202020802100a, 0x4993aaaa92aa3938, 0xe2de7171d971afa8, +0x8dc6c8c807c80ecf, 0x32d119196419c87d, 0x923b494939497270, 0xaf5fd9d943d9869a, +0xf931f2f2eff2c31d, 0xdba8e3e3abe34b48, 0xb6b95b5b715be22a, 0x0dbc88881a883492, +0x293e9a9a529aa4c8, 0x4c0b262698262dbe, 0x64bf3232c8328dfa, 0x7d59b0b0fab0e94a, +0xcff2e9e983e91b6a, 0x1e770f0f3c0f7833, 0xb733d5d573d5e6a6, 0x1df480803a8074ba, +0x6127bebec2be997c, 0x87ebcdcd13cd26de, 0x68893434d034bde4, 0x903248483d487a75, +0xe354ffffdbffab24, 0xf48d7a7af57af78f, 0x3d6490907a90f4ea, 0xbe9d5f5f615fc23e, +0x403d202080201da0, 0xd00f6868bd6867d5, 0x34ca1a1a681ad072, 0x41b7aeae82ae192c, +0x757db4b4eab4c95e, 0xa8ce54544d549a19, 0x3b7f93937693ece5, 0x442f222288220daa, +0xc86364648d6407e9, 0xff2af1f1e3f1db12, 0xe6cc7373d173bfa2, 0x248212124812905a, +0x807a40401d403a5d, 0x1048080820084028, 0x9b95c3c32bc356e8, 0xc5dfecec97ec337b, +0xab4ddbdb4bdb9690, 0x5fc0a1a1bea1611f, 0x07918d8d0e8d1c83, 0x7ac83d3df43df5c9, +0x335b97976697ccf1, 0x0000000000000000, 0x83f9cfcf1bcf36d4, 0x566e2b2bac2b4587, +0xece17676c57697b3, 0x19e68282328264b0, 0xb128d6d67fd6fea9, 0x36c31b1b6c1bd877, +0x7774b5b5eeb5c15b, 0x43beafaf86af1129, 0xd41d6a6ab56a77df, 0xa0ea50505d50ba0d, +0x8a5745450945124c, 0xfb38f3f3ebf3cb18, 0x60ad3030c0309df0, 0xc3c4efef9bef2b74, +0x7eda3f3ffc3fe5c3, 0xaac755554955921c, 0x59dba2a2b2a27910, 0xc9e9eaea8fea0365, +0xca6a656589650fec, 0x6903babad2bab968, 0x5e4a2f2fbc2f6593, 0x9d8ec0c027c04ee7, +0xa160dede5fdebe81, 0x38fc1c1c701ce06c, 0xe746fdfdd3fdbb2e, 0x9a1f4d4d294d5264, +0x397692927292e4e0, 0xeafa7575c9758fbc, 0x0c3606061806301e, 0x09ae8a8a128a2498, +0x794bb2b2f2b2f940, 0xd185e6e6bfe66359, 0x1c7e0e0e380e7036, 0x3ee71f1f7c1ff863, +0xc4556262956237f7, 0xb53ad4d477d4eea3, 0x4d81a8a89aa82932, 0x315296966296c4f4, +0xef62f9f9c3f99b3a, 0x97a3c5c533c566f6, 0x4a102525942535b1, 0xb2ab59597959f220, +0x15d084842a8454ae, 0xe4c57272d572b7a7, 0x72ec3939e439d5dd, 0x98164c4c2d4c5a61, +0xbc945e5e655eca3b, 0xf09f7878fd78e785, 0x70e53838e038ddd8, 0x05988c8c0a8c1486, +0xbf17d1d163d1c6b2, 0x57e4a5a5aea5410b, 0xd9a1e2e2afe2434d, 0xc24e616199612ff8, +0x7b42b3b3f6b3f145, 0x42342121842115a5, 0x25089c9c4a9c94d6, 0x3cee1e1e781ef066, +0x8661434311432252, 0x93b1c7c73bc776fc, 0xe54ffcfcd7fcb32b, 0x0824040410042014, +0xa2e351515951b208, 0x2f2599995e99bcc7, 0xda226d6da96d4fc4, 0x1a650d0d340d6839, +0xe979fafacffa8335, 0xa369dfdf5bdfb684, 0xfca97e7ee57ed79b, 0x4819242490243db4, +0x76fe3b3bec3bc5d7, 0x4b9aabab96ab313d, 0x81f0cece1fce3ed1, 0x2299111144118855, +0x03838f8f068f0c89, 0x9c044e4e254e4a6b, 0x7366b7b7e6b7d151, 0xcbe0ebeb8beb0b60, +0x78c13c3cf03cfdcc, 0x1ffd81813e817cbf, 0x354094946a94d4fe, 0xf31cf7f7fbf7eb0c, +0x6f18b9b9deb9a167, 0x268b13134c13985f, 0x58512c2cb02c7d9c, 0xbb05d3d36bd3d6b8, +0xd38ce7e7bbe76b5c, 0xdc396e6ea56e57cb, 0x95aac4c437c46ef3, 0x061b03030c03180f, +0xacdc565645568a13, 0x885e44440d441a49, 0xfea07f7fe17fdf9e, 0x4f88a9a99ea92137, +0x54672a2aa82a4d82, 0x6b0abbbbd6bbb16d, 0x9f87c1c123c146e2, 0xa6f153535153a202, +0xa572dcdc57dcae8b, 0x16530b0b2c0b5827, 0x27019d9d4e9d9cd3, 0xd82b6c6cad6c47c1, +0x62a43131c43195f5, 0xe8f37474cd7487b9, 0xf115f6f6fff6e309, 0x8c4c464605460a43, +0x45a5acac8aac0926, 0x0fb589891e893c97, 0x28b414145014a044, 0xdfbae1e1a3e15b42, +0x2ca616165816b04e, 0x74f73a3ae83acdd2, 0xd2066969b9696fd0, 0x124109092409482d, +0xe0d77070dd70a7ad, 0x716fb6b6e2b6d954, 0xbd1ed0d067d0ceb7, 0xc7d6eded93ed3b7e, +0x85e2cccc17cc2edb, 0x8468424215422a57, 0x2d2c98985a98b4c2, 0x55eda4a4aaa4490e, +0x50752828a0285d88, 0xb8865c5c6d5cda31, 0xed6bf8f8c7f8933f, 0x11c28686228644a4, +] +C3 = [ +0x7830d818186018c0, 0xaf462623238c2305, 0xf991b8c6c63fc67e, 0x6fcdfbe8e887e813, +0xa113cb878726874c, 0x626d11b8b8dab8a9, 0x0502090101040108, 0x6e9e0d4f4f214f42, +0xee6c9b3636d836ad, 0x0451ffa6a6a2a659, 0xbdb90cd2d26fd2de, 0x06f70ef5f5f3f5fb, +0x80f2967979f979ef, 0xcede306f6fa16f5f, 0xef3f6d91917e91fc, 0x07a4f852525552aa, +0xfdc04760609d6027, 0x766535bcbccabc89, 0xcd2b379b9b569bac, 0x8c018a8e8e028e04, +0x155bd2a3a3b6a371, 0x3c186c0c0c300c60, 0x8af6847b7bf17bff, 0xe16a803535d435b5, +0x693af51d1d741de8, 0x47ddb3e0e0a7e053, 0xacb321d7d77bd7f6, 0xed999cc2c22fc25e, +0x965c432e2eb82e6d, 0x7a96294b4b314b62, 0x21e15dfefedffea3, 0x16aed55757415782, +0x412abd15155415a8, 0xb6eee87777c1779f, 0xeb6e923737dc37a5, 0x56d79ee5e5b3e57b, +0xd923139f9f469f8c, 0x17fd23f0f0e7f0d3, 0x7f94204a4a354a6a, 0x95a944dada4fda9e, +0x25b0a258587d58fa, 0xca8fcfc9c903c906, 0x8d527c2929a42955, 0x22145a0a0a280a50, +0x4f7f50b1b1feb1e1, 0x1a5dc9a0a0baa069, 0xdad6146b6bb16b7f, 0xab17d985852e855c, +0x73673cbdbdcebd81, 0x34ba8f5d5d695dd2, 0x5020901010401080, 0x03f507f4f4f7f4f3, +0xc08bddcbcb0bcb16, 0xc67cd33e3ef83eed, 0x110a2d0505140528, 0xe6ce78676781671f, +0x53d597e4e4b7e473, 0xbb4e0227279c2725, 0x5882734141194132, 0x9d0ba78b8b168b2c, +0x0153f6a7a7a6a751, 0x94fab27d7de97dcf, 0xfb374995956e95dc, 0x9fad56d8d847d88e, +0x30eb70fbfbcbfb8b, 0x71c1cdeeee9fee23, 0x91f8bb7c7ced7cc7, 0xe3cc716666856617, +0x8ea77bdddd53dda6, 0x4b2eaf17175c17b8, 0x468e454747014702, 0xdc211a9e9e429e84, +0xc589d4caca0fca1e, 0x995a582d2db42d75, 0x79632ebfbfc6bf91, 0x1b0e3f07071c0738, +0x2347acadad8ead01, 0x2fb4b05a5a755aea, 0xb51bef838336836c, 0xff66b63333cc3385, +0xf2c65c636391633f, 0x0a04120202080210, 0x384993aaaa92aa39, 0xa8e2de7171d971af, +0xcf8dc6c8c807c80e, 0x7d32d119196419c8, 0x70923b4949394972, 0x9aaf5fd9d943d986, +0x1df931f2f2eff2c3, 0x48dba8e3e3abe34b, 0x2ab6b95b5b715be2, 0x920dbc88881a8834, +0xc8293e9a9a529aa4, 0xbe4c0b262698262d, 0xfa64bf3232c8328d, 0x4a7d59b0b0fab0e9, +0x6acff2e9e983e91b, 0x331e770f0f3c0f78, 0xa6b733d5d573d5e6, 0xba1df480803a8074, +0x7c6127bebec2be99, 0xde87ebcdcd13cd26, 0xe468893434d034bd, 0x75903248483d487a, +0x24e354ffffdbffab, 0x8ff48d7a7af57af7, 0xea3d6490907a90f4, 0x3ebe9d5f5f615fc2, +0xa0403d202080201d, 0xd5d00f6868bd6867, 0x7234ca1a1a681ad0, 0x2c41b7aeae82ae19, +0x5e757db4b4eab4c9, 0x19a8ce54544d549a, 0xe53b7f93937693ec, 0xaa442f222288220d, +0xe9c86364648d6407, 0x12ff2af1f1e3f1db, 0xa2e6cc7373d173bf, 0x5a24821212481290, +0x5d807a40401d403a, 0x2810480808200840, 0xe89b95c3c32bc356, 0x7bc5dfecec97ec33, +0x90ab4ddbdb4bdb96, 0x1f5fc0a1a1bea161, 0x8307918d8d0e8d1c, 0xc97ac83d3df43df5, +0xf1335b97976697cc, 0x0000000000000000, 0xd483f9cfcf1bcf36, 0x87566e2b2bac2b45, +0xb3ece17676c57697, 0xb019e68282328264, 0xa9b128d6d67fd6fe, 0x7736c31b1b6c1bd8, +0x5b7774b5b5eeb5c1, 0x2943beafaf86af11, 0xdfd41d6a6ab56a77, 0x0da0ea50505d50ba, +0x4c8a574545094512, 0x18fb38f3f3ebf3cb, 0xf060ad3030c0309d, 0x74c3c4efef9bef2b, +0xc37eda3f3ffc3fe5, 0x1caac75555495592, 0x1059dba2a2b2a279, 0x65c9e9eaea8fea03, +0xecca6a656589650f, 0x686903babad2bab9, 0x935e4a2f2fbc2f65, 0xe79d8ec0c027c04e, +0x81a160dede5fdebe, 0x6c38fc1c1c701ce0, 0x2ee746fdfdd3fdbb, 0x649a1f4d4d294d52, +0xe0397692927292e4, 0xbceafa7575c9758f, 0x1e0c360606180630, 0x9809ae8a8a128a24, +0x40794bb2b2f2b2f9, 0x59d185e6e6bfe663, 0x361c7e0e0e380e70, 0x633ee71f1f7c1ff8, +0xf7c4556262956237, 0xa3b53ad4d477d4ee, 0x324d81a8a89aa829, 0xf4315296966296c4, +0x3aef62f9f9c3f99b, 0xf697a3c5c533c566, 0xb14a102525942535, 0x20b2ab59597959f2, +0xae15d084842a8454, 0xa7e4c57272d572b7, 0xdd72ec3939e439d5, 0x6198164c4c2d4c5a, +0x3bbc945e5e655eca, 0x85f09f7878fd78e7, 0xd870e53838e038dd, 0x8605988c8c0a8c14, +0xb2bf17d1d163d1c6, 0x0b57e4a5a5aea541, 0x4dd9a1e2e2afe243, 0xf8c24e616199612f, +0x457b42b3b3f6b3f1, 0xa542342121842115, 0xd625089c9c4a9c94, 0x663cee1e1e781ef0, +0x5286614343114322, 0xfc93b1c7c73bc776, 0x2be54ffcfcd7fcb3, 0x1408240404100420, +0x08a2e351515951b2, 0xc72f2599995e99bc, 0xc4da226d6da96d4f, 0x391a650d0d340d68, +0x35e979fafacffa83, 0x84a369dfdf5bdfb6, 0x9bfca97e7ee57ed7, 0xb44819242490243d, +0xd776fe3b3bec3bc5, 0x3d4b9aabab96ab31, 0xd181f0cece1fce3e, 0x5522991111441188, +0x8903838f8f068f0c, 0x6b9c044e4e254e4a, 0x517366b7b7e6b7d1, 0x60cbe0ebeb8beb0b, +0xcc78c13c3cf03cfd, 0xbf1ffd81813e817c, 0xfe354094946a94d4, 0x0cf31cf7f7fbf7eb, +0x676f18b9b9deb9a1, 0x5f268b13134c1398, 0x9c58512c2cb02c7d, 0xb8bb05d3d36bd3d6, +0x5cd38ce7e7bbe76b, 0xcbdc396e6ea56e57, 0xf395aac4c437c46e, 0x0f061b03030c0318, +0x13acdc565645568a, 0x49885e44440d441a, 0x9efea07f7fe17fdf, 0x374f88a9a99ea921, +0x8254672a2aa82a4d, 0x6d6b0abbbbd6bbb1, 0xe29f87c1c123c146, 0x02a6f153535153a2, +0x8ba572dcdc57dcae, 0x2716530b0b2c0b58, 0xd327019d9d4e9d9c, 0xc1d82b6c6cad6c47, +0xf562a43131c43195, 0xb9e8f37474cd7487, 0x09f115f6f6fff6e3, 0x438c4c464605460a, +0x2645a5acac8aac09, 0x970fb589891e893c, 0x4428b414145014a0, 0x42dfbae1e1a3e15b, +0x4e2ca616165816b0, 0xd274f73a3ae83acd, 0xd0d2066969b9696f, 0x2d12410909240948, +0xade0d77070dd70a7, 0x54716fb6b6e2b6d9, 0xb7bd1ed0d067d0ce, 0x7ec7d6eded93ed3b, +0xdb85e2cccc17cc2e, 0x578468424215422a, 0xc22d2c98985a98b4, 0x0e55eda4a4aaa449, +0x8850752828a0285d, 0x31b8865c5c6d5cda, 0x3fed6bf8f8c7f893, 0xa411c28686228644, +] +C4 = [ +0xc07830d818186018, 0x05af462623238c23, 0x7ef991b8c6c63fc6, 0x136fcdfbe8e887e8, +0x4ca113cb87872687, 0xa9626d11b8b8dab8, 0x0805020901010401, 0x426e9e0d4f4f214f, +0xadee6c9b3636d836, 0x590451ffa6a6a2a6, 0xdebdb90cd2d26fd2, 0xfb06f70ef5f5f3f5, +0xef80f2967979f979, 0x5fcede306f6fa16f, 0xfcef3f6d91917e91, 0xaa07a4f852525552, +0x27fdc04760609d60, 0x89766535bcbccabc, 0xaccd2b379b9b569b, 0x048c018a8e8e028e, +0x71155bd2a3a3b6a3, 0x603c186c0c0c300c, 0xff8af6847b7bf17b, 0xb5e16a803535d435, +0xe8693af51d1d741d, 0x5347ddb3e0e0a7e0, 0xf6acb321d7d77bd7, 0x5eed999cc2c22fc2, +0x6d965c432e2eb82e, 0x627a96294b4b314b, 0xa321e15dfefedffe, 0x8216aed557574157, +0xa8412abd15155415, 0x9fb6eee87777c177, 0xa5eb6e923737dc37, 0x7b56d79ee5e5b3e5, +0x8cd923139f9f469f, 0xd317fd23f0f0e7f0, 0x6a7f94204a4a354a, 0x9e95a944dada4fda, +0xfa25b0a258587d58, 0x06ca8fcfc9c903c9, 0x558d527c2929a429, 0x5022145a0a0a280a, +0xe14f7f50b1b1feb1, 0x691a5dc9a0a0baa0, 0x7fdad6146b6bb16b, 0x5cab17d985852e85, +0x8173673cbdbdcebd, 0xd234ba8f5d5d695d, 0x8050209010104010, 0xf303f507f4f4f7f4, +0x16c08bddcbcb0bcb, 0xedc67cd33e3ef83e, 0x28110a2d05051405, 0x1fe6ce7867678167, +0x7353d597e4e4b7e4, 0x25bb4e0227279c27, 0x3258827341411941, 0x2c9d0ba78b8b168b, +0x510153f6a7a7a6a7, 0xcf94fab27d7de97d, 0xdcfb374995956e95, 0x8e9fad56d8d847d8, +0x8b30eb70fbfbcbfb, 0x2371c1cdeeee9fee, 0xc791f8bb7c7ced7c, 0x17e3cc7166668566, +0xa68ea77bdddd53dd, 0xb84b2eaf17175c17, 0x02468e4547470147, 0x84dc211a9e9e429e, +0x1ec589d4caca0fca, 0x75995a582d2db42d, 0x9179632ebfbfc6bf, 0x381b0e3f07071c07, +0x012347acadad8ead, 0xea2fb4b05a5a755a, 0x6cb51bef83833683, 0x85ff66b63333cc33, +0x3ff2c65c63639163, 0x100a041202020802, 0x39384993aaaa92aa, 0xafa8e2de7171d971, +0x0ecf8dc6c8c807c8, 0xc87d32d119196419, 0x7270923b49493949, 0x869aaf5fd9d943d9, +0xc31df931f2f2eff2, 0x4b48dba8e3e3abe3, 0xe22ab6b95b5b715b, 0x34920dbc88881a88, +0xa4c8293e9a9a529a, 0x2dbe4c0b26269826, 0x8dfa64bf3232c832, 0xe94a7d59b0b0fab0, +0x1b6acff2e9e983e9, 0x78331e770f0f3c0f, 0xe6a6b733d5d573d5, 0x74ba1df480803a80, +0x997c6127bebec2be, 0x26de87ebcdcd13cd, 0xbde468893434d034, 0x7a75903248483d48, +0xab24e354ffffdbff, 0xf78ff48d7a7af57a, 0xf4ea3d6490907a90, 0xc23ebe9d5f5f615f, +0x1da0403d20208020, 0x67d5d00f6868bd68, 0xd07234ca1a1a681a, 0x192c41b7aeae82ae, +0xc95e757db4b4eab4, 0x9a19a8ce54544d54, 0xece53b7f93937693, 0x0daa442f22228822, +0x07e9c86364648d64, 0xdb12ff2af1f1e3f1, 0xbfa2e6cc7373d173, 0x905a248212124812, +0x3a5d807a40401d40, 0x4028104808082008, 0x56e89b95c3c32bc3, 0x337bc5dfecec97ec, +0x9690ab4ddbdb4bdb, 0x611f5fc0a1a1bea1, 0x1c8307918d8d0e8d, 0xf5c97ac83d3df43d, +0xccf1335b97976697, 0x0000000000000000, 0x36d483f9cfcf1bcf, 0x4587566e2b2bac2b, +0x97b3ece17676c576, 0x64b019e682823282, 0xfea9b128d6d67fd6, 0xd87736c31b1b6c1b, +0xc15b7774b5b5eeb5, 0x112943beafaf86af, 0x77dfd41d6a6ab56a, 0xba0da0ea50505d50, +0x124c8a5745450945, 0xcb18fb38f3f3ebf3, 0x9df060ad3030c030, 0x2b74c3c4efef9bef, +0xe5c37eda3f3ffc3f, 0x921caac755554955, 0x791059dba2a2b2a2, 0x0365c9e9eaea8fea, +0x0fecca6a65658965, 0xb9686903babad2ba, 0x65935e4a2f2fbc2f, 0x4ee79d8ec0c027c0, +0xbe81a160dede5fde, 0xe06c38fc1c1c701c, 0xbb2ee746fdfdd3fd, 0x52649a1f4d4d294d, +0xe4e0397692927292, 0x8fbceafa7575c975, 0x301e0c3606061806, 0x249809ae8a8a128a, +0xf940794bb2b2f2b2, 0x6359d185e6e6bfe6, 0x70361c7e0e0e380e, 0xf8633ee71f1f7c1f, +0x37f7c45562629562, 0xeea3b53ad4d477d4, 0x29324d81a8a89aa8, 0xc4f4315296966296, +0x9b3aef62f9f9c3f9, 0x66f697a3c5c533c5, 0x35b14a1025259425, 0xf220b2ab59597959, +0x54ae15d084842a84, 0xb7a7e4c57272d572, 0xd5dd72ec3939e439, 0x5a6198164c4c2d4c, +0xca3bbc945e5e655e, 0xe785f09f7878fd78, 0xddd870e53838e038, 0x148605988c8c0a8c, +0xc6b2bf17d1d163d1, 0x410b57e4a5a5aea5, 0x434dd9a1e2e2afe2, 0x2ff8c24e61619961, +0xf1457b42b3b3f6b3, 0x15a5423421218421, 0x94d625089c9c4a9c, 0xf0663cee1e1e781e, +0x2252866143431143, 0x76fc93b1c7c73bc7, 0xb32be54ffcfcd7fc, 0x2014082404041004, +0xb208a2e351515951, 0xbcc72f2599995e99, 0x4fc4da226d6da96d, 0x68391a650d0d340d, +0x8335e979fafacffa, 0xb684a369dfdf5bdf, 0xd79bfca97e7ee57e, 0x3db4481924249024, +0xc5d776fe3b3bec3b, 0x313d4b9aabab96ab, 0x3ed181f0cece1fce, 0x8855229911114411, +0x0c8903838f8f068f, 0x4a6b9c044e4e254e, 0xd1517366b7b7e6b7, 0x0b60cbe0ebeb8beb, +0xfdcc78c13c3cf03c, 0x7cbf1ffd81813e81, 0xd4fe354094946a94, 0xeb0cf31cf7f7fbf7, +0xa1676f18b9b9deb9, 0x985f268b13134c13, 0x7d9c58512c2cb02c, 0xd6b8bb05d3d36bd3, +0x6b5cd38ce7e7bbe7, 0x57cbdc396e6ea56e, 0x6ef395aac4c437c4, 0x180f061b03030c03, +0x8a13acdc56564556, 0x1a49885e44440d44, 0xdf9efea07f7fe17f, 0x21374f88a9a99ea9, +0x4d8254672a2aa82a, 0xb16d6b0abbbbd6bb, 0x46e29f87c1c123c1, 0xa202a6f153535153, +0xae8ba572dcdc57dc, 0x582716530b0b2c0b, 0x9cd327019d9d4e9d, 0x47c1d82b6c6cad6c, +0x95f562a43131c431, 0x87b9e8f37474cd74, 0xe309f115f6f6fff6, 0x0a438c4c46460546, +0x092645a5acac8aac, 0x3c970fb589891e89, 0xa04428b414145014, 0x5b42dfbae1e1a3e1, +0xb04e2ca616165816, 0xcdd274f73a3ae83a, 0x6fd0d2066969b969, 0x482d124109092409, +0xa7ade0d77070dd70, 0xd954716fb6b6e2b6, 0xceb7bd1ed0d067d0, 0x3b7ec7d6eded93ed, +0x2edb85e2cccc17cc, 0x2a57846842421542, 0xb4c22d2c98985a98, 0x490e55eda4a4aaa4, +0x5d8850752828a028, 0xda31b8865c5c6d5c, 0x933fed6bf8f8c7f8, 0x44a411c286862286, +] +C5 = [ +0x18c07830d8181860, 0x2305af462623238c, 0xc67ef991b8c6c63f, 0xe8136fcdfbe8e887, +0x874ca113cb878726, 0xb8a9626d11b8b8da, 0x0108050209010104, 0x4f426e9e0d4f4f21, +0x36adee6c9b3636d8, 0xa6590451ffa6a6a2, 0xd2debdb90cd2d26f, 0xf5fb06f70ef5f5f3, +0x79ef80f2967979f9, 0x6f5fcede306f6fa1, 0x91fcef3f6d91917e, 0x52aa07a4f8525255, +0x6027fdc04760609d, 0xbc89766535bcbcca, 0x9baccd2b379b9b56, 0x8e048c018a8e8e02, +0xa371155bd2a3a3b6, 0x0c603c186c0c0c30, 0x7bff8af6847b7bf1, 0x35b5e16a803535d4, +0x1de8693af51d1d74, 0xe05347ddb3e0e0a7, 0xd7f6acb321d7d77b, 0xc25eed999cc2c22f, +0x2e6d965c432e2eb8, 0x4b627a96294b4b31, 0xfea321e15dfefedf, 0x578216aed5575741, +0x15a8412abd151554, 0x779fb6eee87777c1, 0x37a5eb6e923737dc, 0xe57b56d79ee5e5b3, +0x9f8cd923139f9f46, 0xf0d317fd23f0f0e7, 0x4a6a7f94204a4a35, 0xda9e95a944dada4f, +0x58fa25b0a258587d, 0xc906ca8fcfc9c903, 0x29558d527c2929a4, 0x0a5022145a0a0a28, +0xb1e14f7f50b1b1fe, 0xa0691a5dc9a0a0ba, 0x6b7fdad6146b6bb1, 0x855cab17d985852e, +0xbd8173673cbdbdce, 0x5dd234ba8f5d5d69, 0x1080502090101040, 0xf4f303f507f4f4f7, +0xcb16c08bddcbcb0b, 0x3eedc67cd33e3ef8, 0x0528110a2d050514, 0x671fe6ce78676781, +0xe47353d597e4e4b7, 0x2725bb4e0227279c, 0x4132588273414119, 0x8b2c9d0ba78b8b16, +0xa7510153f6a7a7a6, 0x7dcf94fab27d7de9, 0x95dcfb374995956e, 0xd88e9fad56d8d847, +0xfb8b30eb70fbfbcb, 0xee2371c1cdeeee9f, 0x7cc791f8bb7c7ced, 0x6617e3cc71666685, +0xdda68ea77bdddd53, 0x17b84b2eaf17175c, 0x4702468e45474701, 0x9e84dc211a9e9e42, +0xca1ec589d4caca0f, 0x2d75995a582d2db4, 0xbf9179632ebfbfc6, 0x07381b0e3f07071c, +0xad012347acadad8e, 0x5aea2fb4b05a5a75, 0x836cb51bef838336, 0x3385ff66b63333cc, +0x633ff2c65c636391, 0x02100a0412020208, 0xaa39384993aaaa92, 0x71afa8e2de7171d9, +0xc80ecf8dc6c8c807, 0x19c87d32d1191964, 0x497270923b494939, 0xd9869aaf5fd9d943, +0xf2c31df931f2f2ef, 0xe34b48dba8e3e3ab, 0x5be22ab6b95b5b71, 0x8834920dbc88881a, +0x9aa4c8293e9a9a52, 0x262dbe4c0b262698, 0x328dfa64bf3232c8, 0xb0e94a7d59b0b0fa, +0xe91b6acff2e9e983, 0x0f78331e770f0f3c, 0xd5e6a6b733d5d573, 0x8074ba1df480803a, +0xbe997c6127bebec2, 0xcd26de87ebcdcd13, 0x34bde468893434d0, 0x487a75903248483d, +0xffab24e354ffffdb, 0x7af78ff48d7a7af5, 0x90f4ea3d6490907a, 0x5fc23ebe9d5f5f61, +0x201da0403d202080, 0x6867d5d00f6868bd, 0x1ad07234ca1a1a68, 0xae192c41b7aeae82, +0xb4c95e757db4b4ea, 0x549a19a8ce54544d, 0x93ece53b7f939376, 0x220daa442f222288, +0x6407e9c86364648d, 0xf1db12ff2af1f1e3, 0x73bfa2e6cc7373d1, 0x12905a2482121248, +0x403a5d807a40401d, 0x0840281048080820, 0xc356e89b95c3c32b, 0xec337bc5dfecec97, +0xdb9690ab4ddbdb4b, 0xa1611f5fc0a1a1be, 0x8d1c8307918d8d0e, 0x3df5c97ac83d3df4, +0x97ccf1335b979766, 0x0000000000000000, 0xcf36d483f9cfcf1b, 0x2b4587566e2b2bac, +0x7697b3ece17676c5, 0x8264b019e6828232, 0xd6fea9b128d6d67f, 0x1bd87736c31b1b6c, +0xb5c15b7774b5b5ee, 0xaf112943beafaf86, 0x6a77dfd41d6a6ab5, 0x50ba0da0ea50505d, +0x45124c8a57454509, 0xf3cb18fb38f3f3eb, 0x309df060ad3030c0, 0xef2b74c3c4efef9b, +0x3fe5c37eda3f3ffc, 0x55921caac7555549, 0xa2791059dba2a2b2, 0xea0365c9e9eaea8f, +0x650fecca6a656589, 0xbab9686903babad2, 0x2f65935e4a2f2fbc, 0xc04ee79d8ec0c027, +0xdebe81a160dede5f, 0x1ce06c38fc1c1c70, 0xfdbb2ee746fdfdd3, 0x4d52649a1f4d4d29, +0x92e4e03976929272, 0x758fbceafa7575c9, 0x06301e0c36060618, 0x8a249809ae8a8a12, +0xb2f940794bb2b2f2, 0xe66359d185e6e6bf, 0x0e70361c7e0e0e38, 0x1ff8633ee71f1f7c, +0x6237f7c455626295, 0xd4eea3b53ad4d477, 0xa829324d81a8a89a, 0x96c4f43152969662, +0xf99b3aef62f9f9c3, 0xc566f697a3c5c533, 0x2535b14a10252594, 0x59f220b2ab595979, +0x8454ae15d084842a, 0x72b7a7e4c57272d5, 0x39d5dd72ec3939e4, 0x4c5a6198164c4c2d, +0x5eca3bbc945e5e65, 0x78e785f09f7878fd, 0x38ddd870e53838e0, 0x8c148605988c8c0a, +0xd1c6b2bf17d1d163, 0xa5410b57e4a5a5ae, 0xe2434dd9a1e2e2af, 0x612ff8c24e616199, +0xb3f1457b42b3b3f6, 0x2115a54234212184, 0x9c94d625089c9c4a, 0x1ef0663cee1e1e78, +0x4322528661434311, 0xc776fc93b1c7c73b, 0xfcb32be54ffcfcd7, 0x0420140824040410, +0x51b208a2e3515159, 0x99bcc72f2599995e, 0x6d4fc4da226d6da9, 0x0d68391a650d0d34, +0xfa8335e979fafacf, 0xdfb684a369dfdf5b, 0x7ed79bfca97e7ee5, 0x243db44819242490, +0x3bc5d776fe3b3bec, 0xab313d4b9aabab96, 0xce3ed181f0cece1f, 0x1188552299111144, +0x8f0c8903838f8f06, 0x4e4a6b9c044e4e25, 0xb7d1517366b7b7e6, 0xeb0b60cbe0ebeb8b, +0x3cfdcc78c13c3cf0, 0x817cbf1ffd81813e, 0x94d4fe354094946a, 0xf7eb0cf31cf7f7fb, +0xb9a1676f18b9b9de, 0x13985f268b13134c, 0x2c7d9c58512c2cb0, 0xd3d6b8bb05d3d36b, +0xe76b5cd38ce7e7bb, 0x6e57cbdc396e6ea5, 0xc46ef395aac4c437, 0x03180f061b03030c, +0x568a13acdc565645, 0x441a49885e44440d, 0x7fdf9efea07f7fe1, 0xa921374f88a9a99e, +0x2a4d8254672a2aa8, 0xbbb16d6b0abbbbd6, 0xc146e29f87c1c123, 0x53a202a6f1535351, +0xdcae8ba572dcdc57, 0x0b582716530b0b2c, 0x9d9cd327019d9d4e, 0x6c47c1d82b6c6cad, +0x3195f562a43131c4, 0x7487b9e8f37474cd, 0xf6e309f115f6f6ff, 0x460a438c4c464605, +0xac092645a5acac8a, 0x893c970fb589891e, 0x14a04428b4141450, 0xe15b42dfbae1e1a3, +0x16b04e2ca6161658, 0x3acdd274f73a3ae8, 0x696fd0d2066969b9, 0x09482d1241090924, +0x70a7ade0d77070dd, 0xb6d954716fb6b6e2, 0xd0ceb7bd1ed0d067, 0xed3b7ec7d6eded93, +0xcc2edb85e2cccc17, 0x422a578468424215, 0x98b4c22d2c98985a, 0xa4490e55eda4a4aa, +0x285d8850752828a0, 0x5cda31b8865c5c6d, 0xf8933fed6bf8f8c7, 0x8644a411c2868622, +] +C6 = [ +0x6018c07830d81818, 0x8c2305af46262323, 0x3fc67ef991b8c6c6, 0x87e8136fcdfbe8e8, +0x26874ca113cb8787, 0xdab8a9626d11b8b8, 0x0401080502090101, 0x214f426e9e0d4f4f, +0xd836adee6c9b3636, 0xa2a6590451ffa6a6, 0x6fd2debdb90cd2d2, 0xf3f5fb06f70ef5f5, +0xf979ef80f2967979, 0xa16f5fcede306f6f, 0x7e91fcef3f6d9191, 0x5552aa07a4f85252, +0x9d6027fdc0476060, 0xcabc89766535bcbc, 0x569baccd2b379b9b, 0x028e048c018a8e8e, +0xb6a371155bd2a3a3, 0x300c603c186c0c0c, 0xf17bff8af6847b7b, 0xd435b5e16a803535, +0x741de8693af51d1d, 0xa7e05347ddb3e0e0, 0x7bd7f6acb321d7d7, 0x2fc25eed999cc2c2, +0xb82e6d965c432e2e, 0x314b627a96294b4b, 0xdffea321e15dfefe, 0x41578216aed55757, +0x5415a8412abd1515, 0xc1779fb6eee87777, 0xdc37a5eb6e923737, 0xb3e57b56d79ee5e5, +0x469f8cd923139f9f, 0xe7f0d317fd23f0f0, 0x354a6a7f94204a4a, 0x4fda9e95a944dada, +0x7d58fa25b0a25858, 0x03c906ca8fcfc9c9, 0xa429558d527c2929, 0x280a5022145a0a0a, +0xfeb1e14f7f50b1b1, 0xbaa0691a5dc9a0a0, 0xb16b7fdad6146b6b, 0x2e855cab17d98585, +0xcebd8173673cbdbd, 0x695dd234ba8f5d5d, 0x4010805020901010, 0xf7f4f303f507f4f4, +0x0bcb16c08bddcbcb, 0xf83eedc67cd33e3e, 0x140528110a2d0505, 0x81671fe6ce786767, +0xb7e47353d597e4e4, 0x9c2725bb4e022727, 0x1941325882734141, 0x168b2c9d0ba78b8b, +0xa6a7510153f6a7a7, 0xe97dcf94fab27d7d, 0x6e95dcfb37499595, 0x47d88e9fad56d8d8, +0xcbfb8b30eb70fbfb, 0x9fee2371c1cdeeee, 0xed7cc791f8bb7c7c, 0x856617e3cc716666, +0x53dda68ea77bdddd, 0x5c17b84b2eaf1717, 0x014702468e454747, 0x429e84dc211a9e9e, +0x0fca1ec589d4caca, 0xb42d75995a582d2d, 0xc6bf9179632ebfbf, 0x1c07381b0e3f0707, +0x8ead012347acadad, 0x755aea2fb4b05a5a, 0x36836cb51bef8383, 0xcc3385ff66b63333, +0x91633ff2c65c6363, 0x0802100a04120202, 0x92aa39384993aaaa, 0xd971afa8e2de7171, +0x07c80ecf8dc6c8c8, 0x6419c87d32d11919, 0x39497270923b4949, 0x43d9869aaf5fd9d9, +0xeff2c31df931f2f2, 0xabe34b48dba8e3e3, 0x715be22ab6b95b5b, 0x1a8834920dbc8888, +0x529aa4c8293e9a9a, 0x98262dbe4c0b2626, 0xc8328dfa64bf3232, 0xfab0e94a7d59b0b0, +0x83e91b6acff2e9e9, 0x3c0f78331e770f0f, 0x73d5e6a6b733d5d5, 0x3a8074ba1df48080, +0xc2be997c6127bebe, 0x13cd26de87ebcdcd, 0xd034bde468893434, 0x3d487a7590324848, +0xdbffab24e354ffff, 0xf57af78ff48d7a7a, 0x7a90f4ea3d649090, 0x615fc23ebe9d5f5f, +0x80201da0403d2020, 0xbd6867d5d00f6868, 0x681ad07234ca1a1a, 0x82ae192c41b7aeae, +0xeab4c95e757db4b4, 0x4d549a19a8ce5454, 0x7693ece53b7f9393, 0x88220daa442f2222, +0x8d6407e9c8636464, 0xe3f1db12ff2af1f1, 0xd173bfa2e6cc7373, 0x4812905a24821212, +0x1d403a5d807a4040, 0x2008402810480808, 0x2bc356e89b95c3c3, 0x97ec337bc5dfecec, +0x4bdb9690ab4ddbdb, 0xbea1611f5fc0a1a1, 0x0e8d1c8307918d8d, 0xf43df5c97ac83d3d, +0x6697ccf1335b9797, 0x0000000000000000, 0x1bcf36d483f9cfcf, 0xac2b4587566e2b2b, +0xc57697b3ece17676, 0x328264b019e68282, 0x7fd6fea9b128d6d6, 0x6c1bd87736c31b1b, +0xeeb5c15b7774b5b5, 0x86af112943beafaf, 0xb56a77dfd41d6a6a, 0x5d50ba0da0ea5050, +0x0945124c8a574545, 0xebf3cb18fb38f3f3, 0xc0309df060ad3030, 0x9bef2b74c3c4efef, +0xfc3fe5c37eda3f3f, 0x4955921caac75555, 0xb2a2791059dba2a2, 0x8fea0365c9e9eaea, +0x89650fecca6a6565, 0xd2bab9686903baba, 0xbc2f65935e4a2f2f, 0x27c04ee79d8ec0c0, +0x5fdebe81a160dede, 0x701ce06c38fc1c1c, 0xd3fdbb2ee746fdfd, 0x294d52649a1f4d4d, +0x7292e4e039769292, 0xc9758fbceafa7575, 0x1806301e0c360606, 0x128a249809ae8a8a, +0xf2b2f940794bb2b2, 0xbfe66359d185e6e6, 0x380e70361c7e0e0e, 0x7c1ff8633ee71f1f, +0x956237f7c4556262, 0x77d4eea3b53ad4d4, 0x9aa829324d81a8a8, 0x6296c4f431529696, +0xc3f99b3aef62f9f9, 0x33c566f697a3c5c5, 0x942535b14a102525, 0x7959f220b2ab5959, +0x2a8454ae15d08484, 0xd572b7a7e4c57272, 0xe439d5dd72ec3939, 0x2d4c5a6198164c4c, +0x655eca3bbc945e5e, 0xfd78e785f09f7878, 0xe038ddd870e53838, 0x0a8c148605988c8c, +0x63d1c6b2bf17d1d1, 0xaea5410b57e4a5a5, 0xafe2434dd9a1e2e2, 0x99612ff8c24e6161, +0xf6b3f1457b42b3b3, 0x842115a542342121, 0x4a9c94d625089c9c, 0x781ef0663cee1e1e, +0x1143225286614343, 0x3bc776fc93b1c7c7, 0xd7fcb32be54ffcfc, 0x1004201408240404, +0x5951b208a2e35151, 0x5e99bcc72f259999, 0xa96d4fc4da226d6d, 0x340d68391a650d0d, +0xcffa8335e979fafa, 0x5bdfb684a369dfdf, 0xe57ed79bfca97e7e, 0x90243db448192424, +0xec3bc5d776fe3b3b, 0x96ab313d4b9aabab, 0x1fce3ed181f0cece, 0x4411885522991111, +0x068f0c8903838f8f, 0x254e4a6b9c044e4e, 0xe6b7d1517366b7b7, 0x8beb0b60cbe0ebeb, +0xf03cfdcc78c13c3c, 0x3e817cbf1ffd8181, 0x6a94d4fe35409494, 0xfbf7eb0cf31cf7f7, +0xdeb9a1676f18b9b9, 0x4c13985f268b1313, 0xb02c7d9c58512c2c, 0x6bd3d6b8bb05d3d3, +0xbbe76b5cd38ce7e7, 0xa56e57cbdc396e6e, 0x37c46ef395aac4c4, 0x0c03180f061b0303, +0x45568a13acdc5656, 0x0d441a49885e4444, 0xe17fdf9efea07f7f, 0x9ea921374f88a9a9, +0xa82a4d8254672a2a, 0xd6bbb16d6b0abbbb, 0x23c146e29f87c1c1, 0x5153a202a6f15353, +0x57dcae8ba572dcdc, 0x2c0b582716530b0b, 0x4e9d9cd327019d9d, 0xad6c47c1d82b6c6c, +0xc43195f562a43131, 0xcd7487b9e8f37474, 0xfff6e309f115f6f6, 0x05460a438c4c4646, +0x8aac092645a5acac, 0x1e893c970fb58989, 0x5014a04428b41414, 0xa3e15b42dfbae1e1, +0x5816b04e2ca61616, 0xe83acdd274f73a3a, 0xb9696fd0d2066969, 0x2409482d12410909, +0xdd70a7ade0d77070, 0xe2b6d954716fb6b6, 0x67d0ceb7bd1ed0d0, 0x93ed3b7ec7d6eded, +0x17cc2edb85e2cccc, 0x15422a5784684242, 0x5a98b4c22d2c9898, 0xaaa4490e55eda4a4, +0xa0285d8850752828, 0x6d5cda31b8865c5c, 0xc7f8933fed6bf8f8, 0x228644a411c28686, +] +C7 = [ +0x186018c07830d818, 0x238c2305af462623, 0xc63fc67ef991b8c6, 0xe887e8136fcdfbe8, +0x8726874ca113cb87, 0xb8dab8a9626d11b8, 0x0104010805020901, 0x4f214f426e9e0d4f, +0x36d836adee6c9b36, 0xa6a2a6590451ffa6, 0xd26fd2debdb90cd2, 0xf5f3f5fb06f70ef5, +0x79f979ef80f29679, 0x6fa16f5fcede306f, 0x917e91fcef3f6d91, 0x525552aa07a4f852, +0x609d6027fdc04760, 0xbccabc89766535bc, 0x9b569baccd2b379b, 0x8e028e048c018a8e, +0xa3b6a371155bd2a3, 0x0c300c603c186c0c, 0x7bf17bff8af6847b, 0x35d435b5e16a8035, +0x1d741de8693af51d, 0xe0a7e05347ddb3e0, 0xd77bd7f6acb321d7, 0xc22fc25eed999cc2, +0x2eb82e6d965c432e, 0x4b314b627a96294b, 0xfedffea321e15dfe, 0x5741578216aed557, +0x155415a8412abd15, 0x77c1779fb6eee877, 0x37dc37a5eb6e9237, 0xe5b3e57b56d79ee5, +0x9f469f8cd923139f, 0xf0e7f0d317fd23f0, 0x4a354a6a7f94204a, 0xda4fda9e95a944da, +0x587d58fa25b0a258, 0xc903c906ca8fcfc9, 0x29a429558d527c29, 0x0a280a5022145a0a, +0xb1feb1e14f7f50b1, 0xa0baa0691a5dc9a0, 0x6bb16b7fdad6146b, 0x852e855cab17d985, +0xbdcebd8173673cbd, 0x5d695dd234ba8f5d, 0x1040108050209010, 0xf4f7f4f303f507f4, +0xcb0bcb16c08bddcb, 0x3ef83eedc67cd33e, 0x05140528110a2d05, 0x6781671fe6ce7867, +0xe4b7e47353d597e4, 0x279c2725bb4e0227, 0x4119413258827341, 0x8b168b2c9d0ba78b, +0xa7a6a7510153f6a7, 0x7de97dcf94fab27d, 0x956e95dcfb374995, 0xd847d88e9fad56d8, +0xfbcbfb8b30eb70fb, 0xee9fee2371c1cdee, 0x7ced7cc791f8bb7c, 0x66856617e3cc7166, +0xdd53dda68ea77bdd, 0x175c17b84b2eaf17, 0x47014702468e4547, 0x9e429e84dc211a9e, +0xca0fca1ec589d4ca, 0x2db42d75995a582d, 0xbfc6bf9179632ebf, 0x071c07381b0e3f07, +0xad8ead012347acad, 0x5a755aea2fb4b05a, 0x8336836cb51bef83, 0x33cc3385ff66b633, +0x6391633ff2c65c63, 0x020802100a041202, 0xaa92aa39384993aa, 0x71d971afa8e2de71, +0xc807c80ecf8dc6c8, 0x196419c87d32d119, 0x4939497270923b49, 0xd943d9869aaf5fd9, +0xf2eff2c31df931f2, 0xe3abe34b48dba8e3, 0x5b715be22ab6b95b, 0x881a8834920dbc88, +0x9a529aa4c8293e9a, 0x2698262dbe4c0b26, 0x32c8328dfa64bf32, 0xb0fab0e94a7d59b0, +0xe983e91b6acff2e9, 0x0f3c0f78331e770f, 0xd573d5e6a6b733d5, 0x803a8074ba1df480, +0xbec2be997c6127be, 0xcd13cd26de87ebcd, 0x34d034bde4688934, 0x483d487a75903248, +0xffdbffab24e354ff, 0x7af57af78ff48d7a, 0x907a90f4ea3d6490, 0x5f615fc23ebe9d5f, +0x2080201da0403d20, 0x68bd6867d5d00f68, 0x1a681ad07234ca1a, 0xae82ae192c41b7ae, +0xb4eab4c95e757db4, 0x544d549a19a8ce54, 0x937693ece53b7f93, 0x2288220daa442f22, +0x648d6407e9c86364, 0xf1e3f1db12ff2af1, 0x73d173bfa2e6cc73, 0x124812905a248212, +0x401d403a5d807a40, 0x0820084028104808, 0xc32bc356e89b95c3, 0xec97ec337bc5dfec, +0xdb4bdb9690ab4ddb, 0xa1bea1611f5fc0a1, 0x8d0e8d1c8307918d, 0x3df43df5c97ac83d, +0x976697ccf1335b97, 0x0000000000000000, 0xcf1bcf36d483f9cf, 0x2bac2b4587566e2b, +0x76c57697b3ece176, 0x82328264b019e682, 0xd67fd6fea9b128d6, 0x1b6c1bd87736c31b, +0xb5eeb5c15b7774b5, 0xaf86af112943beaf, 0x6ab56a77dfd41d6a, 0x505d50ba0da0ea50, +0x450945124c8a5745, 0xf3ebf3cb18fb38f3, 0x30c0309df060ad30, 0xef9bef2b74c3c4ef, +0x3ffc3fe5c37eda3f, 0x554955921caac755, 0xa2b2a2791059dba2, 0xea8fea0365c9e9ea, +0x6589650fecca6a65, 0xbad2bab9686903ba, 0x2fbc2f65935e4a2f, 0xc027c04ee79d8ec0, +0xde5fdebe81a160de, 0x1c701ce06c38fc1c, 0xfdd3fdbb2ee746fd, 0x4d294d52649a1f4d, +0x927292e4e0397692, 0x75c9758fbceafa75, 0x061806301e0c3606, 0x8a128a249809ae8a, +0xb2f2b2f940794bb2, 0xe6bfe66359d185e6, 0x0e380e70361c7e0e, 0x1f7c1ff8633ee71f, +0x62956237f7c45562, 0xd477d4eea3b53ad4, 0xa89aa829324d81a8, 0x966296c4f4315296, +0xf9c3f99b3aef62f9, 0xc533c566f697a3c5, 0x25942535b14a1025, 0x597959f220b2ab59, +0x842a8454ae15d084, 0x72d572b7a7e4c572, 0x39e439d5dd72ec39, 0x4c2d4c5a6198164c, +0x5e655eca3bbc945e, 0x78fd78e785f09f78, 0x38e038ddd870e538, 0x8c0a8c148605988c, +0xd163d1c6b2bf17d1, 0xa5aea5410b57e4a5, 0xe2afe2434dd9a1e2, 0x6199612ff8c24e61, +0xb3f6b3f1457b42b3, 0x21842115a5423421, 0x9c4a9c94d625089c, 0x1e781ef0663cee1e, +0x4311432252866143, 0xc73bc776fc93b1c7, 0xfcd7fcb32be54ffc, 0x0410042014082404, +0x515951b208a2e351, 0x995e99bcc72f2599, 0x6da96d4fc4da226d, 0x0d340d68391a650d, +0xfacffa8335e979fa, 0xdf5bdfb684a369df, 0x7ee57ed79bfca97e, 0x2490243db4481924, +0x3bec3bc5d776fe3b, 0xab96ab313d4b9aab, 0xce1fce3ed181f0ce, 0x1144118855229911, +0x8f068f0c8903838f, 0x4e254e4a6b9c044e, 0xb7e6b7d1517366b7, 0xeb8beb0b60cbe0eb, +0x3cf03cfdcc78c13c, 0x813e817cbf1ffd81, 0x946a94d4fe354094, 0xf7fbf7eb0cf31cf7, +0xb9deb9a1676f18b9, 0x134c13985f268b13, 0x2cb02c7d9c58512c, 0xd36bd3d6b8bb05d3, +0xe7bbe76b5cd38ce7, 0x6ea56e57cbdc396e, 0xc437c46ef395aac4, 0x030c03180f061b03, +0x5645568a13acdc56, 0x440d441a49885e44, 0x7fe17fdf9efea07f, 0xa99ea921374f88a9, +0x2aa82a4d8254672a, 0xbbd6bbb16d6b0abb, 0xc123c146e29f87c1, 0x535153a202a6f153, +0xdc57dcae8ba572dc, 0x0b2c0b582716530b, 0x9d4e9d9cd327019d, 0x6cad6c47c1d82b6c, +0x31c43195f562a431, 0x74cd7487b9e8f374, 0xf6fff6e309f115f6, 0x4605460a438c4c46, +0xac8aac092645a5ac, 0x891e893c970fb589, 0x145014a04428b414, 0xe1a3e15b42dfbae1, +0x165816b04e2ca616, 0x3ae83acdd274f73a, 0x69b9696fd0d20669, 0x092409482d124109, +0x70dd70a7ade0d770, 0xb6e2b6d954716fb6, 0xd067d0ceb7bd1ed0, 0xed93ed3b7ec7d6ed, +0xcc17cc2edb85e2cc, 0x4215422a57846842, 0x985a98b4c22d2c98, 0xa4aaa4490e55eda4, +0x28a0285d88507528, 0x5c6d5cda31b8865c, 0xf8c7f8933fed6bf8, 0x86228644a411c286, +] + +rc = [ +0x0000000000000000, +0x1823c6e887b8014f, +0x36a6d2f5796f9152, +0x60bc9b8ea30c7b35, +0x1de0d7c22e4bfe57, +0x157737e59ff04ada, +0x58c9290ab1a06b85, +0xbd5d10f4cb3e0567, +0xe427418ba77d95d8, +0xfbee7c66dd17479e, +0xca2dbf07ad5a8333 +] + +DIGESTBYTES = 64 +class WhirlpoolStruct: + def __init__(self): + self.bitLength = [0]*32 + self.buffer = [0]*64 + self.bufferBits = 0 + self.bufferPos = 0 + self.hash = [0]*8 + +def WhirlpoolInit(ctx): + ctx = WhirlpoolStruct() + return + +def WhirlpoolAdd(source, sourceBits, ctx): + if sys.hexversion < 0x3000000: + source = [ord(s)&0xff for s in source] + + carry = 0 + value = sourceBits + i = 31 + while i >= 0 and (carry != 0 or value != 0): + carry += ctx.bitLength[i] + ((value % 0x100000000) & 0xff) + ctx.bitLength[i] = carry % 0x100 + carry >>= 8 + value >>= 8 + i -= 1 + + bufferBits = ctx.bufferBits + bufferPos = ctx.bufferPos + sourcePos = 0 + sourceGap = (8 - (sourceBits & 7)) & 7 + bufferRem = ctx.bufferBits & 7 + buffr = ctx.buffer + + while sourceBits > 8: + b = ((source[sourcePos] << sourceGap) & 0xff) | ((source[sourcePos + 1] & 0xff) >> (8 - sourceGap)) + buffr[bufferPos] |= (b >> bufferRem) % 0x100 + bufferPos += 1 + bufferBits += 8 - bufferRem + if bufferBits == 512: + processBuffer(ctx) + bufferBits = 0 + bufferPos = 0 + + buffr[bufferPos] = b << (8 - bufferRem) + bufferBits += bufferRem + + sourceBits -= 8 + sourcePos += 1 + + b = (source[sourcePos] << sourceGap) & 0xff + buffr[bufferPos] |= b >> bufferRem + if bufferRem + sourceBits < 8: + bufferBits += sourceBits + else: + bufferPos += 1 + bufferBits += 8 - bufferRem + sourceBits -= 8 - bufferRem + if bufferBits == 512: + processBuffer(ctx) + bufferBits = 0 + bufferPos = 0 + buffr[bufferPos] = b << (8 - bufferRem) + bufferBits += sourceBits + ctx.bufferBits = bufferBits + ctx.bufferPos = bufferPos + +def WhirlpoolFinalize(ctx): + bufferPos = ctx.bufferPos + ctx.buffer[bufferPos] |= 0x80 >> (ctx.bufferBits & 7) + bufferPos += 1 + if bufferPos > 32: + if bufferPos < 64: + for i in xrange(64 - bufferPos): + ctx.buffer[bufferPos+i] = 0 + processBuffer(ctx) + bufferPos = 0 + if bufferPos < 32: + for i in xrange(32 - bufferPos): + ctx.buffer[bufferPos+i] = 0 + bufferPos = 32 + for i in xrange(32): + ctx.buffer[32+i] = ctx.bitLength[i] + processBuffer(ctx) + digest = '' + for i in xrange(8): + digest += chr((ctx.hash[i] >> 56) % 0x100) + digest += chr((ctx.hash[i] >> 48) % 0x100) + digest += chr((ctx.hash[i] >> 40) % 0x100) + digest += chr((ctx.hash[i] >> 32) % 0x100) + digest += chr((ctx.hash[i] >> 24) % 0x100) + digest += chr((ctx.hash[i] >> 16) % 0x100) + digest += chr((ctx.hash[i] >> 8) % 0x100) + digest += chr((ctx.hash[i]) % 0x100) + ctx.bufferPos = bufferPos + return digest + +def CDo(buf, a0, a1, a2, a3, a4, a5, a6, a7): + return C0[((buf[a0] >> 56) % 0x100000000) & 0xff] ^ \ + C1[((buf[a1] >> 48) % 0x100000000) & 0xff] ^ \ + C2[((buf[a2] >> 40) % 0x100000000) & 0xff] ^ \ + C3[((buf[a3] >> 32) % 0x100000000) & 0xff] ^ \ + C4[((buf[a4] >> 24) % 0x100000000) & 0xff] ^ \ + C5[((buf[a5] >> 16) % 0x100000000) & 0xff] ^ \ + C6[((buf[a6] >> 8) % 0x100000000) & 0xff] ^ \ + C7[((buf[a7] >> 0) % 0x100000000) & 0xff] + +def processBuffer(ctx): + i, r = 0, 0 + K = [0]*8 + block = [0]*8 + state = [0]*8 + L = [0]*8 + buffr = ctx.buffer + + buf_cnt = 0 + for i in xrange(8): + block[i] = ((buffr[buf_cnt+0] & 0xff) << 56) ^ \ + ((buffr[buf_cnt+1] & 0xff) << 48) ^ \ + ((buffr[buf_cnt+2] & 0xff) << 40) ^ \ + ((buffr[buf_cnt+3] & 0xff) << 32) ^ \ + ((buffr[buf_cnt+4] & 0xff) << 24) ^ \ + ((buffr[buf_cnt+5] & 0xff) << 16) ^ \ + ((buffr[buf_cnt+6] & 0xff) << 8) ^ \ + ((buffr[buf_cnt+7] & 0xff) << 0) + buf_cnt += 8 + for i in xrange(8): + K[i] = ctx.hash[i] + state[i] = block[i] ^ K[i] + + for r in xrange(1, R+1): + L[0] = CDo(K, 0, 7, 6, 5, 4, 3, 2, 1) ^ rc[r] + L[1] = CDo(K, 1, 0, 7, 6, 5, 4, 3, 2) + L[2] = CDo(K, 2, 1, 0, 7, 6, 5, 4, 3) + L[3] = CDo(K, 3, 2, 1, 0, 7, 6, 5, 4) + L[4] = CDo(K, 4, 3, 2, 1, 0, 7, 6, 5) + L[5] = CDo(K, 5, 4, 3, 2, 1, 0, 7, 6) + L[6] = CDo(K, 6, 5, 4, 3, 2, 1, 0, 7) + L[7] = CDo(K, 7, 6, 5, 4, 3, 2, 1, 0) + for i in xrange(8): + K[i] = L[i] + L[0] = CDo(state, 0, 7, 6, 5, 4, 3, 2, 1) ^ K[0] + L[1] = CDo(state, 1, 0, 7, 6, 5, 4, 3, 2) ^ K[1] + L[2] = CDo(state, 2, 1, 0, 7, 6, 5, 4, 3) ^ K[2] + L[3] = CDo(state, 3, 2, 1, 0, 7, 6, 5, 4) ^ K[3] + L[4] = CDo(state, 4, 3, 2, 1, 0, 7, 6, 5) ^ K[4] + L[5] = CDo(state, 5, 4, 3, 2, 1, 0, 7, 6) ^ K[5] + L[6] = CDo(state, 6, 5, 4, 3, 2, 1, 0, 7) ^ K[6] + L[7] = CDo(state, 7, 6, 5, 4, 3, 2, 1, 0) ^ K[7] + for i in xrange(8): + state[i] = L[i] + # apply the Miyaguchi-Preneel compression function + for i in xrange(8): + ctx.hash[i] ^= state[i] ^ block[i] + return + +# +# Tests. +# + +if __name__ == '__main__': + assert Whirlpool(b'The quick brown fox jumps over the lazy dog').hexdigest() == \ + 'b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35' + assert Whirlpool(b'The quick brown fox jumps over the lazy eog').hexdigest() == \ + 'c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c' + assert Whirlpool(b'').hexdigest() == \ + '19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3' diff --git a/portage_with_autodep/pym/portage/util/whirlpool.pyo b/portage_with_autodep/pym/portage/util/whirlpool.pyo Binary files differnew file mode 100644 index 0000000..4bcf49a --- /dev/null +++ b/portage_with_autodep/pym/portage/util/whirlpool.pyo diff --git a/portage_with_autodep/pym/portage/versions.py b/portage_with_autodep/pym/portage/versions.py index f8691d1..db14e99 100644 --- a/portage_with_autodep/pym/portage/versions.py +++ b/portage_with_autodep/pym/portage/versions.py @@ -1,5 +1,5 @@ # versions.py -- core Portage functionality -# Copyright 1998-2010 Gentoo Foundation +# Copyright 1998-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 __all__ = [ @@ -9,14 +9,26 @@ __all__ = [ ] import re +import sys import warnings +if sys.hexversion < 0x3000000: + _unicode = unicode +else: + _unicode = str + import portage portage.proxy.lazyimport.lazyimport(globals(), - 'portage.util:cmp_sort_key' + 'portage.repository.config:_gen_valid_repo', + 'portage.util:cmp_sort_key', ) +from portage import _unicode_decode +from portage.eapi import eapi_allows_dots_in_PN +from portage.exception import InvalidData from portage.localization import _ +_unknown_repo = "__unknown__" + # \w is [a-zA-Z0-9_] # 2.1.1 A category name may contain any of the characters [A-Za-z0-9+_.-]. @@ -26,15 +38,27 @@ _cat = r'[\w+][\w+.-]*' # 2.1.2 A package name may contain any of the characters [A-Za-z0-9+_-]. # It must not begin with a hyphen, # and must not end in a hyphen followed by one or more digits. -_pkg = r'[\w+][\w+-]*?' +_pkg = { + "dots_disallowed_in_PN": r'[\w+][\w+-]*?', + "dots_allowed_in_PN": r'[\w+][\w+.-]*?', +} _v = r'(cvs\.)?(\d+)((\.\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\d*)*)' _rev = r'\d+' _vr = _v + '(-r(' + _rev + '))?' -_cp = '(' + _cat + '/' + _pkg + '(-' + _vr + ')?)' -_cpv = '(' + _cp + '-' + _vr + ')' -_pv = '(?P<pn>' + _pkg + '(?P<pn_inval>-' + _vr + ')?)' + '-(?P<ver>' + _v + ')(-r(?P<rev>' + _rev + '))?' +_cp = { + "dots_disallowed_in_PN": '(' + _cat + '/' + _pkg['dots_disallowed_in_PN'] + '(-' + _vr + ')?)', + "dots_allowed_in_PN": '(' + _cat + '/' + _pkg['dots_allowed_in_PN'] + '(-' + _vr + ')?)', +} +_cpv = { + "dots_disallowed_in_PN": '(' + _cp['dots_disallowed_in_PN'] + '-' + _vr + ')', + "dots_allowed_in_PN": '(' + _cp['dots_allowed_in_PN'] + '-' + _vr + ')', +} +_pv = { + "dots_disallowed_in_PN": '(?P<pn>' + _pkg['dots_disallowed_in_PN'] + '(?P<pn_inval>-' + _vr + ')?)' + '-(?P<ver>' + _v + ')(-r(?P<rev>' + _rev + '))?', + "dots_allowed_in_PN": '(?P<pn>' + _pkg['dots_allowed_in_PN'] + '(?P<pn_inval>-' + _vr + ')?)' + '-(?P<ver>' + _v + ')(-r(?P<rev>' + _rev + '))?', +} ver_regexp = re.compile("^" + _vr + "$") suffix_regexp = re.compile("^(alpha|beta|rc|pre|p)(\\d*)$") @@ -49,7 +73,6 @@ def ververify(myver, silent=1): print(_("!!! syntax error in version: %s") % myver) return 0 -vercmp_cache = {} def vercmp(ver1, ver2, silent=1): """ Compare two versions @@ -76,11 +99,7 @@ def vercmp(ver1, ver2, silent=1): if ver1 == ver2: return 0 - mykey=ver1+":"+ver2 - try: - return vercmp_cache[mykey] - except KeyError: - pass + match1 = ver_regexp.match(ver1) match2 = ver_regexp.match(ver2) @@ -96,10 +115,8 @@ def vercmp(ver1, ver2, silent=1): # shortcut for cvs ebuilds (new style) if match1.group(1) and not match2.group(1): - vercmp_cache[mykey] = 1 return 1 elif match2.group(1) and not match1.group(1): - vercmp_cache[mykey] = -1 return -1 # building lists of the version parts before the suffix @@ -153,16 +170,13 @@ def vercmp(ver1, ver2, silent=1): for i in range(0, max(len(list1), len(list2))): if len(list1) <= i: - vercmp_cache[mykey] = -1 return -1 elif len(list2) <= i: - vercmp_cache[mykey] = 1 return 1 elif list1[i] != list2[i]: a = list1[i] b = list2[i] rval = (a > b) - (a < b) - vercmp_cache[mykey] = rval return rval # main version is equal, so now compare the _suffix part @@ -183,7 +197,6 @@ def vercmp(ver1, ver2, silent=1): a = suffix_value[s1[0]] b = suffix_value[s2[0]] rval = (a > b) - (a < b) - vercmp_cache[mykey] = rval return rval if s1[1] != s2[1]: # it's possible that the s(1|2)[1] == '' @@ -198,7 +211,6 @@ def vercmp(ver1, ver2, silent=1): r2 = 0 rval = (r1 > r2) - (r1 < r2) if rval: - vercmp_cache[mykey] = rval return rval # the suffix part is equal to, so finally check the revision @@ -211,7 +223,6 @@ def vercmp(ver1, ver2, silent=1): else: r2 = 0 rval = (r1 > r2) - (r1 < r2) - vercmp_cache[mykey] = rval return rval def pkgcmp(pkg1, pkg2): @@ -240,16 +251,25 @@ def pkgcmp(pkg1, pkg2): return None return vercmp("-".join(pkg1[1:]), "-".join(pkg2[1:])) -_pv_re = re.compile('^' + _pv + '$', re.VERBOSE) +_pv_re = { + "dots_disallowed_in_PN": re.compile('^' + _pv['dots_disallowed_in_PN'] + '$', re.VERBOSE), + "dots_allowed_in_PN": re.compile('^' + _pv['dots_allowed_in_PN'] + '$', re.VERBOSE), +} -def _pkgsplit(mypkg): +def _get_pv_re(eapi): + if eapi is None or eapi_allows_dots_in_PN(eapi): + return _pv_re["dots_allowed_in_PN"] + else: + return _pv_re["dots_disallowed_in_PN"] + +def _pkgsplit(mypkg, eapi=None): """ @param mypkg: pv @return: 1. None if input is invalid. 2. (pn, ver, rev) if input is pv """ - m = _pv_re.match(mypkg) + m = _get_pv_re(eapi).match(mypkg) if m is None: return None @@ -266,8 +286,8 @@ def _pkgsplit(mypkg): _cat_re = re.compile('^%s$' % _cat) _missing_cat = 'null' -catcache={} -def catpkgsplit(mydata,silent=1): + +def catpkgsplit(mydata, silent=1, eapi=None): """ Takes a Category/Package-Version-Rev and returns a list of each. @@ -281,28 +301,65 @@ def catpkgsplit(mydata,silent=1): 2. If cat is not specificed in mydata, cat will be "null" 3. if rev does not exist it will be '-r0' """ - try: - return catcache[mydata] - except KeyError: + return mydata.cpv_split + except AttributeError: pass mysplit = mydata.split('/', 1) p_split=None if len(mysplit)==1: cat = _missing_cat - p_split = _pkgsplit(mydata) + p_split = _pkgsplit(mydata, eapi=eapi) elif len(mysplit)==2: cat = mysplit[0] if _cat_re.match(cat) is not None: - p_split = _pkgsplit(mysplit[1]) + p_split = _pkgsplit(mysplit[1], eapi=eapi) if not p_split: - catcache[mydata]=None return None retval = (cat, p_split[0], p_split[1], p_split[2]) - catcache[mydata]=retval return retval -def pkgsplit(mypkg, silent=1): +class _pkg_str(_unicode): + """ + This class represents a cpv. It inherits from str (unicode in python2) and + has attributes that cache results for use by functions like catpkgsplit and + cpv_getkey which are called frequently (especially in match_from_list). + Instances are typically created in dbapi.cp_list() or the Atom contructor, + and propagate from there. Generally, code that pickles these objects will + manually convert them to a plain unicode object first. + """ + + def __new__(cls, cpv, slot=None, repo=None, eapi=None): + return _unicode.__new__(cls, cpv) + + def __init__(self, cpv, slot=None, repo=None, eapi=None): + if not isinstance(cpv, _unicode): + # Avoid TypeError from _unicode.__init__ with PyPy. + cpv = _unicode_decode(cpv) + _unicode.__init__(cpv) + self.__dict__['cpv_split'] = catpkgsplit(cpv, eapi=eapi) + if self.cpv_split is None: + raise InvalidData(cpv) + self.__dict__['cp'] = self.cpv_split[0] + '/' + self.cpv_split[1] + if self.cpv_split[-1] == "r0" and cpv[-3:] != "-r0": + self.__dict__['version'] = "-".join(self.cpv_split[2:-1]) + else: + self.__dict__['version'] = "-".join(self.cpv_split[2:]) + # for match_from_list introspection + self.__dict__['cpv'] = self + if slot is not None: + self.__dict__['slot'] = slot + if repo is not None: + repo = _gen_valid_repo(repo) + if not repo: + repo = _unknown_repo + self.__dict__['repo'] = repo + + def __setattr__(self, name, value): + raise AttributeError("_pkg_str instances are immutable", + self.__class__, name, value) + +def pkgsplit(mypkg, silent=1, eapi=None): """ @param mypkg: either a pv or cpv @return: @@ -310,7 +367,7 @@ def pkgsplit(mypkg, silent=1): 2. (pn, ver, rev) if input is pv 3. (cp, ver, rev) if input is a cpv """ - catpsplit = catpkgsplit(mypkg) + catpsplit = catpkgsplit(mypkg, eapi=eapi) if catpsplit is None: return None cat, pn, ver, rev = catpsplit @@ -319,9 +376,13 @@ def pkgsplit(mypkg, silent=1): else: return (cat + '/' + pn, ver, rev) -def cpv_getkey(mycpv): +def cpv_getkey(mycpv, eapi=None): """Calls catpkgsplit on a cpv and returns only the cp.""" - mysplit = catpkgsplit(mycpv) + try: + return mycpv.cp + except AttributeError: + pass + mysplit = catpkgsplit(mycpv, eapi=eapi) if mysplit is not None: return mysplit[0] + '/' + mysplit[1] @@ -330,7 +391,7 @@ def cpv_getkey(mycpv): DeprecationWarning, stacklevel=2) myslash = mycpv.split("/", 1) - mysplit = _pkgsplit(myslash[-1]) + mysplit = _pkgsplit(myslash[-1], eapi=eapi) if mysplit is None: return None mylen = len(myslash) @@ -339,14 +400,18 @@ def cpv_getkey(mycpv): else: return mysplit[0] -def cpv_getversion(mycpv): +def cpv_getversion(mycpv, eapi=None): """Returns the v (including revision) from an cpv.""" - cp = cpv_getkey(mycpv) + try: + return mycpv.version + except AttributeError: + pass + cp = cpv_getkey(mycpv, eapi=eapi) if cp is None: return None return mycpv[len(cp+"-"):] -def cpv_sort_key(): +def cpv_sort_key(eapi=None): """ Create an object for sorting cpvs, to be used as the 'key' parameter in places like list.sort() or sorted(). This calls catpkgsplit() once for @@ -365,39 +430,55 @@ def cpv_sort_key(): split1 = split_cache.get(cpv1, False) if split1 is False: - split1 = catpkgsplit(cpv1) - if split1 is not None: - split1 = (split1[:2], '-'.join(split1[2:])) + split1 = None + try: + split1 = cpv1.cpv + except AttributeError: + try: + split1 = _pkg_str(cpv1, eapi=eapi) + except InvalidData: + pass split_cache[cpv1] = split1 split2 = split_cache.get(cpv2, False) if split2 is False: - split2 = catpkgsplit(cpv2) - if split2 is not None: - split2 = (split2[:2], '-'.join(split2[2:])) + split2 = None + try: + split2 = cpv2.cpv + except AttributeError: + try: + split2 = _pkg_str(cpv2, eapi=eapi) + except InvalidData: + pass split_cache[cpv2] = split2 - if split1 is None or split2 is None or split1[0] != split2[0]: + if split1 is None or split2 is None or split1.cp != split2.cp: return (cpv1 > cpv2) - (cpv1 < cpv2) - return vercmp(split1[1], split2[1]) + return vercmp(split1.version, split2.version) return cmp_sort_key(cmp_cpv) def catsplit(mydep): return mydep.split("/", 1) -def best(mymatches): +def best(mymatches, eapi=None): """Accepts None arguments; assumes matches are valid.""" if not mymatches: return "" if len(mymatches) == 1: return mymatches[0] bestmatch = mymatches[0] - p2 = catpkgsplit(bestmatch)[1:] + try: + v2 = bestmatch.version + except AttributeError: + v2 = _pkg_str(bestmatch, eapi=eapi).version for x in mymatches[1:]: - p1 = catpkgsplit(x)[1:] - if pkgcmp(p1, p2) > 0: + try: + v1 = x.version + except AttributeError: + v1 = _pkg_str(x, eapi=eapi).version + if vercmp(v1, v2) > 0: bestmatch = x - p2 = catpkgsplit(bestmatch)[1:] + v2 = v1 return bestmatch diff --git a/portage_with_autodep/pym/portage/versions.pyo b/portage_with_autodep/pym/portage/versions.pyo Binary files differnew file mode 100644 index 0000000..eae2743 --- /dev/null +++ b/portage_with_autodep/pym/portage/versions.pyo diff --git a/portage_with_autodep/pym/portage/xml/__init__.pyo b/portage_with_autodep/pym/portage/xml/__init__.pyo Binary files differnew file mode 100644 index 0000000..15f1b77 --- /dev/null +++ b/portage_with_autodep/pym/portage/xml/__init__.pyo diff --git a/portage_with_autodep/pym/portage/xml/metadata.py b/portage_with_autodep/pym/portage/xml/metadata.py index 7acc1f3..25f801a 100644 --- a/portage_with_autodep/pym/portage/xml/metadata.py +++ b/portage_with_autodep/pym/portage/xml/metadata.py @@ -1,4 +1,4 @@ -# Copyright 2010-2011 Gentoo Foundation +# Copyright 2010-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 """Provides an easy-to-use python interface to Gentoo's metadata.xml file. @@ -30,16 +30,40 @@ __all__ = ('MetaDataXML',) -try: - import xml.etree.cElementTree as etree -except ImportError: +import sys + +if sys.hexversion < 0x2070000 or \ + (sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000): + # Our _MetadataTreeBuilder usage is incompatible with + # cElementTree in Python 2.6, 3.0, and 3.1: + # File "/usr/lib/python2.6/xml/etree/ElementTree.py", line 644, in findall + # assert self._root is not None import xml.etree.ElementTree as etree +else: + try: + import xml.etree.cElementTree as etree + except (ImportError, SystemError): + import xml.etree.ElementTree as etree + +try: + from xml.parsers.expat import ExpatError +except (ImportError, SystemError): + ExpatError = SyntaxError import re +import xml.etree.ElementTree import portage -from portage import os +from portage import os, _unicode_decode from portage.util import unique_everseen +class _MetadataTreeBuilder(xml.etree.ElementTree.TreeBuilder): + """ + Implements doctype() as required to avoid deprecation warnings with + Python >=2.7. + """ + def doctype(self, name, pubid, system): + pass + class _Maintainer(object): """An object for representing one maintainer. @@ -63,8 +87,7 @@ class _Maintainer(object): self.description = None self.restrict = node.get('restrict') self.status = node.get('status') - maint_attrs = node.getchildren() - for attr in maint_attrs: + for attr in node: setattr(self, attr.tag, attr.text) def __repr__(self): @@ -174,9 +197,12 @@ class MetaDataXML(object): self._xml_tree = None try: - self._xml_tree = etree.parse(metadata_xml_path) + self._xml_tree = etree.parse(metadata_xml_path, + parser=etree.XMLParser(target=_MetadataTreeBuilder())) except ImportError: pass + except ExpatError as e: + raise SyntaxError(_unicode_decode("%s") % (e,)) if isinstance(herds, etree.ElementTree): herds_etree = herds @@ -209,7 +235,8 @@ class MetaDataXML(object): if self._herdstree is None: try: - self._herdstree = etree.parse(self._herds_path) + self._herdstree = etree.parse(self._herds_path, + parser=etree.XMLParser(target=_MetadataTreeBuilder())) except (ImportError, IOError, SyntaxError): return None @@ -217,7 +244,13 @@ class MetaDataXML(object): if herd in ('no-herd', 'maintainer-wanted', 'maintainer-needed'): return None - for node in self._herdstree.getiterator('herd'): + try: + # Python 2.7 or >=3.2 + iterate = self._herdstree.iter + except AttributeError: + iterate = self._herdstree.getiterator + + for node in iterate('herd'): if node.findtext('name') == herd: return node.findtext('email') @@ -292,8 +325,13 @@ class MetaDataXML(object): if self._xml_tree is None: self._useflags = tuple() else: + try: + # Python 2.7 or >=3.2 + iterate = self._xml_tree.iter + except AttributeError: + iterate = self._xml_tree.getiterator self._useflags = tuple(_Useflag(node) \ - for node in self._xml_tree.getiterator('flag')) + for node in iterate('flag')) return self._useflags diff --git a/portage_with_autodep/pym/portage/xml/metadata.pyo b/portage_with_autodep/pym/portage/xml/metadata.pyo Binary files differnew file mode 100644 index 0000000..0103456 --- /dev/null +++ b/portage_with_autodep/pym/portage/xml/metadata.pyo diff --git a/portage_with_autodep/pym/portage/xpak.py b/portage_with_autodep/pym/portage/xpak.py index 7487d67..3262326 100644 --- a/portage_with_autodep/pym/portage/xpak.py +++ b/portage_with_autodep/pym/portage/xpak.py @@ -1,4 +1,4 @@ -# Copyright 2001-2010 Gentoo Foundation +# Copyright 2001-2012 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 @@ -22,11 +22,11 @@ __all__ = ['addtolist', 'decodeint', 'encodeint', 'getboth', import array import errno -import shutil import sys import portage from portage import os +from portage import shutil from portage import normalize_path from portage import _encodings from portage import _unicode_decode @@ -62,11 +62,15 @@ def encodeint(myint): """Takes a 4 byte integer and converts it into a string of 4 characters. Returns the characters in a string.""" a = array.array('B') - a.append((myint >> 24 ) & 0xff) - a.append((myint >> 16 ) & 0xff) - a.append((myint >> 8 ) & 0xff) + a.append((myint >> 24) & 0xff) + a.append((myint >> 16) & 0xff) + a.append((myint >> 8) & 0xff) a.append(myint & 0xff) - return a.tostring() + try: + # Python >= 3.2 + return a.tobytes() + except AttributeError: + return a.tostring() def decodeint(mystring): """Takes a 4 byte string and converts it into a 4 byte integer. @@ -80,12 +84,12 @@ def decodeint(mystring): myint += mystring[0] << 24 return myint -def xpak(rootdir,outfile=None): - """(rootdir,outfile) -- creates an xpak segment of the directory 'rootdir' +def xpak(rootdir, outfile=None): + """(rootdir, outfile) -- creates an xpak segment of the directory 'rootdir' and under the name 'outfile' if it is specified. Otherwise it returns the xpak segment.""" - mylist=[] + mylist = [] addtolist(mylist, rootdir) mylist.sort() @@ -95,7 +99,8 @@ def xpak(rootdir,outfile=None): # CONTENTS is generated during the merge process. continue x = _unicode_encode(x, encoding=_encodings['fs'], errors='strict') - mydata[x] = open(os.path.join(rootdir, x), 'rb').read() + with open(os.path.join(rootdir, x), 'rb') as f: + mydata[x] = f.read() xpak_segment = xpak_mem(mydata) if outfile: @@ -107,7 +112,7 @@ def xpak(rootdir,outfile=None): return xpak_segment def xpak_mem(mydata): - """Create an xpack segement from a map object.""" + """Create an xpack segment from a map object.""" mydata_encoded = {} for k, v in mydata.items(): @@ -120,21 +125,21 @@ def xpak_mem(mydata): del mydata_encoded indexglob = b'' - indexpos=0 + indexpos = 0 dataglob = b'' - datapos=0 + datapos = 0 for x, newglob in mydata.items(): - mydatasize=len(newglob) - indexglob=indexglob+encodeint(len(x))+x+encodeint(datapos)+encodeint(mydatasize) - indexpos=indexpos+4+len(x)+4+4 - dataglob=dataglob+newglob - datapos=datapos+mydatasize + mydatasize = len(newglob) + indexglob = indexglob + encodeint(len(x)) + x + encodeint(datapos) + encodeint(mydatasize) + indexpos = indexpos + 4 + len(x) + 4 + 4 + dataglob = dataglob + newglob + datapos = datapos + mydatasize return b'XPAKPACK' \ - + encodeint(len(indexglob)) \ - + encodeint(len(dataglob)) \ - + indexglob \ - + dataglob \ - + b'XPAKSTOP' + + encodeint(len(indexglob)) \ + + encodeint(len(dataglob)) \ + + indexglob \ + + dataglob \ + + b'XPAKSTOP' def xsplit(infile): """(infile) -- Splits the infile into two files. @@ -144,7 +149,7 @@ def xsplit(infile): encoding=_encodings['fs'], errors='strict') myfile = open(_unicode_encode(infile, encoding=_encodings['fs'], errors='strict'), 'rb') - mydat=myfile.read() + mydat = myfile.read() myfile.close() splits = xsplit_mem(mydat) @@ -166,35 +171,35 @@ def xsplit_mem(mydat): return None if mydat[-8:] != b'XPAKSTOP': return None - indexsize=decodeint(mydat[8:12]) - return (mydat[16:indexsize+16], mydat[indexsize+16:-8]) + indexsize = decodeint(mydat[8:12]) + return (mydat[16:indexsize + 16], mydat[indexsize + 16:-8]) def getindex(infile): """(infile) -- grabs the index segment from the infile and returns it.""" myfile = open(_unicode_encode(infile, encoding=_encodings['fs'], errors='strict'), 'rb') - myheader=myfile.read(16) + myheader = myfile.read(16) if myheader[0:8] != b'XPAKPACK': myfile.close() return - indexsize=decodeint(myheader[8:12]) - myindex=myfile.read(indexsize) + indexsize = decodeint(myheader[8:12]) + myindex = myfile.read(indexsize) myfile.close() return myindex def getboth(infile): """(infile) -- grabs the index and data segments from the infile. - Returns an array [indexSegment,dataSegment]""" + Returns an array [indexSegment, dataSegment]""" myfile = open(_unicode_encode(infile, encoding=_encodings['fs'], errors='strict'), 'rb') - myheader=myfile.read(16) + myheader = myfile.read(16) if myheader[0:8] != b'XPAKPACK': myfile.close() return - indexsize=decodeint(myheader[8:12]) - datasize=decodeint(myheader[12:16]) - myindex=myfile.read(indexsize) - mydata=myfile.read(datasize) + indexsize = decodeint(myheader[8:12]) + datasize = decodeint(myheader[12:16]) + myindex = myfile.read(indexsize) + mydata = myfile.read(datasize) myfile.close() return myindex, mydata @@ -205,83 +210,82 @@ def listindex(myindex): def getindex_mem(myindex): """Returns the filenames listed in the indexglob passed in.""" - myindexlen=len(myindex) - startpos=0 - myret=[] - while ((startpos+8)<myindexlen): - mytestlen=decodeint(myindex[startpos:startpos+4]) - myret=myret+[myindex[startpos+4:startpos+4+mytestlen]] - startpos=startpos+mytestlen+12 + myindexlen = len(myindex) + startpos = 0 + myret = [] + while ((startpos + 8) < myindexlen): + mytestlen = decodeint(myindex[startpos:startpos + 4]) + myret = myret + [myindex[startpos + 4:startpos + 4 + mytestlen]] + startpos = startpos + mytestlen + 12 return myret -def searchindex(myindex,myitem): - """(index,item) -- Finds the offset and length of the file 'item' in the +def searchindex(myindex, myitem): + """(index, item) -- Finds the offset and length of the file 'item' in the datasegment via the index 'index' provided.""" myitem = _unicode_encode(myitem, encoding=_encodings['repo.content'], errors='backslashreplace') - mylen=len(myitem) - myindexlen=len(myindex) - startpos=0 - while ((startpos+8)<myindexlen): - mytestlen=decodeint(myindex[startpos:startpos+4]) - if mytestlen==mylen: - if myitem==myindex[startpos+4:startpos+4+mytestlen]: + mylen = len(myitem) + myindexlen = len(myindex) + startpos = 0 + while ((startpos + 8) < myindexlen): + mytestlen = decodeint(myindex[startpos:startpos + 4]) + if mytestlen == mylen: + if myitem == myindex[startpos + 4:startpos + 4 + mytestlen]: #found - datapos=decodeint(myindex[startpos+4+mytestlen:startpos+8+mytestlen]); - datalen=decodeint(myindex[startpos+8+mytestlen:startpos+12+mytestlen]); + datapos = decodeint(myindex[startpos + 4 + mytestlen:startpos + 8 + mytestlen]) + datalen = decodeint(myindex[startpos + 8 + mytestlen:startpos + 12 + mytestlen]) return datapos, datalen - startpos=startpos+mytestlen+12 + startpos = startpos + mytestlen + 12 -def getitem(myid,myitem): - myindex=myid[0] - mydata=myid[1] - myloc=searchindex(myindex,myitem) +def getitem(myid, myitem): + myindex = myid[0] + mydata = myid[1] + myloc = searchindex(myindex, myitem) if not myloc: return None - return mydata[myloc[0]:myloc[0]+myloc[1]] - -def xpand(myid,mydest): - myindex=myid[0] - mydata=myid[1] - try: - origdir=os.getcwd() - except SystemExit as e: - raise - except: - os.chdir("/") - origdir="/" - os.chdir(mydest) - myindexlen=len(myindex) - startpos=0 - while ((startpos+8)<myindexlen): - namelen=decodeint(myindex[startpos:startpos+4]) - datapos=decodeint(myindex[startpos+4+namelen:startpos+8+namelen]); - datalen=decodeint(myindex[startpos+8+namelen:startpos+12+namelen]); - myname=myindex[startpos+4:startpos+4+namelen] - dirname=os.path.dirname(myname) + return mydata[myloc[0]:myloc[0] + myloc[1]] + +def xpand(myid, mydest): + mydest = normalize_path(mydest) + os.sep + myindex = myid[0] + mydata = myid[1] + myindexlen = len(myindex) + startpos = 0 + while ((startpos + 8) < myindexlen): + namelen = decodeint(myindex[startpos:startpos + 4]) + datapos = decodeint(myindex[startpos + 4 + namelen:startpos + 8 + namelen]) + datalen = decodeint(myindex[startpos + 8 + namelen:startpos + 12 + namelen]) + myname = myindex[startpos + 4:startpos + 4 + namelen] + myname = _unicode_decode(myname, + encoding=_encodings['repo.content'], errors='replace') + filename = os.path.join(mydest, myname.lstrip(os.sep)) + filename = normalize_path(filename) + if not filename.startswith(mydest): + # myname contains invalid ../ component(s) + continue + dirname = os.path.dirname(filename) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) - mydat = open(_unicode_encode(myname, + mydat = open(_unicode_encode(filename, encoding=_encodings['fs'], errors='strict'), 'wb') - mydat.write(mydata[datapos:datapos+datalen]) + mydat.write(mydata[datapos:datapos + datalen]) mydat.close() - startpos=startpos+namelen+12 - os.chdir(origdir) + startpos = startpos + namelen + 12 class tbz2(object): - def __init__(self,myfile): - self.file=myfile - self.filestat=None + def __init__(self, myfile): + self.file = myfile + self.filestat = None self.index = b'' - self.infosize=0 - self.xpaksize=0 - self.indexsize=None - self.datasize=None - self.indexpos=None - self.datapos=None - - def decompose(self,datadir,cleanup=1): + self.infosize = 0 + self.xpaksize = 0 + self.indexsize = None + self.datasize = None + self.indexpos = None + self.datapos = None + + def decompose(self, datadir, cleanup=1): """Alias for unpackinfo() --- Complement to recompose() but optionally deletes the destination directory. Extracts the xpak from the tbz2 into the directory provided. Raises IOError if scan() fails. @@ -293,9 +297,9 @@ class tbz2(object): if not os.path.exists(datadir): os.makedirs(datadir) return self.unpackinfo(datadir) - def compose(self,datadir,cleanup=0): + def compose(self, datadir, cleanup=0): """Alias for recompose().""" - return self.recompose(datadir,cleanup) + return self.recompose(datadir, cleanup) def recompose(self, datadir, cleanup=0, break_hardlinks=True): """Creates an xpak segment from the datadir provided, truncates the tbz2 @@ -333,9 +337,9 @@ class tbz2(object): encoding=_encodings['fs'], errors='strict'), 'ab+') if not myfile: raise IOError - myfile.seek(-self.xpaksize,2) # 0,2 or -0,2 just mean EOF. + myfile.seek(-self.xpaksize, 2) # 0,2 or -0,2 just mean EOF. myfile.truncate() - myfile.write(xpdata+encodeint(len(xpdata)) + b'STOP') + myfile.write(xpdata + encodeint(len(xpdata)) + b'STOP') myfile.flush() myfile.close() return 1 @@ -356,47 +360,47 @@ class tbz2(object): def scan(self): """Scans the tbz2 to locate the xpak segment and setup internal values. This function is called by relevant functions already.""" + a = None try: - mystat=os.stat(self.file) + mystat = os.stat(self.file) if self.filestat: - changed=0 + changed = 0 if mystat.st_size != self.filestat.st_size \ or mystat.st_mtime != self.filestat.st_mtime \ or mystat.st_ctime != self.filestat.st_ctime: changed = True if not changed: return 1 - self.filestat=mystat + self.filestat = mystat a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') - a.seek(-16,2) - trailer=a.read() - self.infosize=0 - self.xpaksize=0 + a.seek(-16, 2) + trailer = a.read() + self.infosize = 0 + self.xpaksize = 0 if trailer[-4:] != b'STOP': - a.close() return 0 if trailer[0:8] != b'XPAKSTOP': - a.close() return 0 - self.infosize=decodeint(trailer[8:12]) - self.xpaksize=self.infosize+8 - a.seek(-(self.xpaksize),2) - header=a.read(16) + self.infosize = decodeint(trailer[8:12]) + self.xpaksize = self.infosize + 8 + a.seek(-(self.xpaksize), 2) + header = a.read(16) if header[0:8] != b'XPAKPACK': - a.close() return 0 - self.indexsize=decodeint(header[8:12]) - self.datasize=decodeint(header[12:16]) - self.indexpos=a.tell() - self.index=a.read(self.indexsize) - self.datapos=a.tell() - a.close() + self.indexsize = decodeint(header[8:12]) + self.datasize = decodeint(header[12:16]) + self.indexpos = a.tell() + self.index = a.read(self.indexsize) + self.datapos = a.tell() return 2 - except SystemExit as e: + except SystemExit: raise except: return 0 + finally: + if a is not None: + a.close() def filelist(self): """Return an array of each file listed in the index.""" @@ -404,63 +408,60 @@ class tbz2(object): return None return getindex_mem(self.index) - def getfile(self,myfile,mydefault=None): + def getfile(self, myfile, mydefault=None): """Finds 'myfile' in the data segment and returns it.""" if not self.scan(): return None - myresult=searchindex(self.index,myfile) + myresult = searchindex(self.index, myfile) if not myresult: return mydefault a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') - a.seek(self.datapos+myresult[0],0) - myreturn=a.read(myresult[1]) + a.seek(self.datapos + myresult[0], 0) + myreturn = a.read(myresult[1]) a.close() return myreturn - def getelements(self,myfile): + def getelements(self, myfile): """A split/array representation of tbz2.getfile()""" - mydat=self.getfile(myfile) + mydat = self.getfile(myfile) if not mydat: return [] return mydat.split() - def unpackinfo(self,mydest): + def unpackinfo(self, mydest): """Unpacks all the files from the dataSegment into 'mydest'.""" if not self.scan(): return 0 - try: - origdir=os.getcwd() - except SystemExit as e: - raise - except: - os.chdir("/") - origdir="/" + mydest = normalize_path(mydest) + os.sep a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') if not os.path.exists(mydest): os.makedirs(mydest) - os.chdir(mydest) - startpos=0 - while ((startpos+8)<self.indexsize): - namelen=decodeint(self.index[startpos:startpos+4]) - datapos=decodeint(self.index[startpos+4+namelen:startpos+8+namelen]); - datalen=decodeint(self.index[startpos+8+namelen:startpos+12+namelen]); - myname=self.index[startpos+4:startpos+4+namelen] + startpos = 0 + while ((startpos + 8) < self.indexsize): + namelen = decodeint(self.index[startpos:startpos + 4]) + datapos = decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen]) + datalen = decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen]) + myname = self.index[startpos + 4:startpos + 4 + namelen] myname = _unicode_decode(myname, encoding=_encodings['repo.content'], errors='replace') - dirname=os.path.dirname(myname) + filename = os.path.join(mydest, myname.lstrip(os.sep)) + filename = normalize_path(filename) + if not filename.startswith(mydest): + # myname contains invalid ../ component(s) + continue + dirname = os.path.dirname(filename) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) - mydat = open(_unicode_encode(myname, + mydat = open(_unicode_encode(filename, encoding=_encodings['fs'], errors='strict'), 'wb') - a.seek(self.datapos+datapos) + a.seek(self.datapos + datapos) mydat.write(a.read(datalen)) mydat.close() - startpos=startpos+namelen+12 + startpos = startpos + namelen + 12 a.close() - os.chdir(origdir) return 1 def get_data(self): @@ -470,28 +471,27 @@ class tbz2(object): a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') mydata = {} - startpos=0 - while ((startpos+8)<self.indexsize): - namelen=decodeint(self.index[startpos:startpos+4]) - datapos=decodeint(self.index[startpos+4+namelen:startpos+8+namelen]); - datalen=decodeint(self.index[startpos+8+namelen:startpos+12+namelen]); - myname=self.index[startpos+4:startpos+4+namelen] - a.seek(self.datapos+datapos) + startpos = 0 + while ((startpos + 8) < self.indexsize): + namelen = decodeint(self.index[startpos:startpos + 4]) + datapos = decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen]) + datalen = decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen]) + myname = self.index[startpos + 4:startpos + 4 + namelen] + a.seek(self.datapos + datapos) mydata[myname] = a.read(datalen) - startpos=startpos+namelen+12 + startpos = startpos + namelen + 12 a.close() return mydata def getboth(self): - """Returns an array [indexSegment,dataSegment]""" + """Returns an array [indexSegment, dataSegment]""" if not self.scan(): return None a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') a.seek(self.datapos) - mydata =a.read(self.datasize) + mydata = a.read(self.datasize) a.close() return self.index, mydata - diff --git a/portage_with_autodep/pym/portage/xpak.pyo b/portage_with_autodep/pym/portage/xpak.pyo Binary files differnew file mode 100644 index 0000000..361a709 --- /dev/null +++ b/portage_with_autodep/pym/portage/xpak.pyo |