summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBenda Xu <heroxbd@gentoo.org>2017-01-07 23:27:50 +0900
committerBenda Xu <heroxbd@gentoo.org>2017-01-07 23:27:50 +0900
commit7003d2b7a7b5f93d3eefc996c2863d5decbf2b39 (patch)
treed219a3e8c1e779ed070dc079c70894a835ac392b
parentsys-apps/portage: prefixify 2.3.3. (diff)
downloadandroid-7003d2b7a7b5f93d3eefc996c2863d5decbf2b39.tar.gz
android-7003d2b7a7b5f93d3eefc996c2863d5decbf2b39.tar.bz2
android-7003d2b7a7b5f93d3eefc996c2863d5decbf2b39.zip
track.
toolchain.eclass refresh before commit.
-rw-r--r--eclass/R-packages.eclass51
-rw-r--r--eclass/java-utils-2.eclass2888
-rw-r--r--eclass/linux-info.eclass925
-rw-r--r--eclass/toolchain.eclass106
4 files changed, 3930 insertions, 40 deletions
diff --git a/eclass/R-packages.eclass b/eclass/R-packages.eclass
new file mode 100644
index 0000000..cea7eba
--- /dev/null
+++ b/eclass/R-packages.eclass
@@ -0,0 +1,51 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Header: $
+
+inherit eutils
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install pkg_postinst
+
+SLOT="0"
+KEYWORDS="~amd64 ~x86"
+IUSE="byte-compile"
+
+DEPEND="dev-lang/R"
+RDEPEND="${DEPEND}"
+
+S="${WORKDIR}"
+
+R-packages_src_unpack() {
+ unpack ${A}
+ mv ${PN//_/.} ${P}
+}
+
+R-packages_src_prepare() {
+ cd ${P}
+ epatch_user
+}
+
+R-packages_src_compile() {
+ einfo "R CMD INSTALL ${S}/${P} -l . $(use byte-compile && echo --byte-compile)"
+ MAKEFLAGS="CFLAGS=${CFLAGS// /\\ } CXXFLAGS=${CXXFLAGS// /\\ } FFLAGS=${FFLAGS// /\\ } FCFLAGS=${FCFLAGS// /\\ } LDFLAGS=${LDFLAGS// /\\ }" \
+ R CMD INSTALL ${S}/${P} -l . $(use byte-compile && echo --byte-compile)
+}
+
+R-packages_src_install() {
+ insinto /usr/$(get_libdir)/R/site-library
+ doins -r ${PN//_/.}
+}
+
+R-packages_pkg_postinst() {
+ if [[ "${_UNRESOLVABLE_PACKAGES:-}" ]]; then
+ # _UNRESOLVABLE_PACKAGES is only set if it has more than zero items
+ local _max=${#_UNRESOLVABLE_PACKAGES[*]} i=
+
+ einfo "Dependency(-ies):"
+ for (( i=0; i<${_max}; i++ )); do
+ einfo "- ${_UNRESOLVABLE_PACKAGES[$i]}"
+ done
+ einfo 'are (is) suggested by upstream but could not be found.'
+ einfo 'Please install it manually from the R interpreter if you need it.'
+ fi
+}
diff --git a/eclass/java-utils-2.eclass b/eclass/java-utils-2.eclass
new file mode 100644
index 0000000..b4246b3
--- /dev/null
+++ b/eclass/java-utils-2.eclass
@@ -0,0 +1,2888 @@
+# Copyright 2004-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: java-utils-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Thomas Matthijs <axxo@gentoo.org>, Karl Trygve Kalleberg <karltk@gentoo.org>
+# @BLURB: Base eclass for Java packages
+# @DESCRIPTION:
+# This eclass provides functionality which is used by java-pkg-2.eclass,
+# java-pkg-opt-2.eclass and java-ant-2 eclass, as well as from ebuilds.
+#
+# This eclass should not be inherited this directly from an ebuild. Instead,
+# you should inherit java-pkg-2 for Java packages or java-pkg-opt-2 for packages
+# that have optional Java support. In addition you can inherit java-ant-2 for
+# Ant-based packages.
+inherit eutils versionator multilib
+
+IUSE="elibc_FreeBSD"
+
+# Make sure we use java-config-2
+export WANT_JAVA_CONFIG="2"
+
+# @VARIABLE: JAVA_PKG_PORTAGE_DEP
+# @INTERNAL
+# @DESCRIPTION:
+# The version of portage we need to function properly. Previously it was
+# portage with phase hooks support but now we use a version with proper env
+# saving. For EAPI 2 we have new enough stuff so let's have cleaner deps.
+has "${EAPI}" 0 1 && JAVA_PKG_PORTAGE_DEP=">=sys-apps/portage-2.1.2.7"
+
+# @VARIABLE: JAVA_PKG_E_DEPEND
+# @INTERNAL
+# @DESCRIPTION:
+# This is a convience variable to be used from the other java eclasses. This is
+# the version of java-config we want to use. Usually the latest stable version
+# so that ebuilds can use new features without depending on specific versions.
+JAVA_PKG_E_DEPEND=">=dev-java/java-config-2.2.0-r3 ${JAVA_PKG_PORTAGE_DEP}"
+has source ${JAVA_PKG_IUSE} && JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} source? ( app-arch/zip )"
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_BOOTCLASSPATH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The version of bootclasspath the package needs to work. Translates to a proper
+# dependency. The bootclasspath can then be obtained by java-ant_rewrite-bootclasspath
+if [[ -n "${JAVA_PKG_WANT_BOOTCLASSPATH}" ]]; then
+ if [[ "${JAVA_PKG_WANT_BOOTCLASSPATH}" == "1.5" ]]; then
+ JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} >=dev-java/gnu-classpath-0.98-r1:0.98"
+ else
+ eerror "Unknown value of JAVA_PKG_WANT_BOOTCLASSPATH"
+ # since die in global scope doesn't work, this will make repoman fail
+ JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} BAD_JAVA_PKG_WANT_BOOTCLASSPATH"
+ fi
+fi
+
+# @ECLASS-VARIABLE: JAVA_PKG_ALLOW_VM_CHANGE
+# @DESCRIPTION:
+# Allow this eclass to change the active VM?
+# If your system VM isn't sufficient for the package, the build will fail
+# instead of trying to switch to another VM.
+#
+# Overriding the default can be useful for testing specific VMs locally, but
+# should not be used in the final ebuild.
+JAVA_PKG_ALLOW_VM_CHANGE=${JAVA_PKG_ALLOW_VM_CHANGE:="yes"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_VM
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Explicitly set a particular VM to use. If its not valid, it'll fall back to
+# whatever /etc/java-config-2/build/jdk.conf would elect to use.
+#
+# Should only be used for testing and debugging.
+#
+# Example: use sun-jdk-1.5 to emerge foo:
+# @CODE
+# JAVA_PKG_FORCE_VM=sun-jdk-1.5 emerge foo
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_BUILD_VM
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A list of VM handles to choose a build VM from. If the list contains the
+# currently active VM use that one, otherwise step through the list till a
+# usable/installed VM is found.
+#
+# This allows to use an explicit list of JDKs in DEPEND instead of a virtual.
+# Users of this variable must make sure at least one of the listed handles is
+# covered by DEPEND.
+# Requires JAVA_PKG_WANT_SOURCE and JAVA_PKG_WANT_TARGET to be set as well.
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_SOURCE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specify a non-standard Java source version for compilation (via javac -source
+# parameter or Ant equivalent via build.xml rewriting done by java-ant-2 eclass).
+# Normally this is determined from the jdk version specified in DEPEND.
+# See java-pkg_get-source function below.
+#
+# Should generally only be used for testing and debugging.
+#
+# Use 1.4 source to emerge baz
+# @CODE
+# JAVA_PKG_WANT_SOURCE=1.4 emerge baz
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_TARGET
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Same as JAVA_PKG_WANT_SOURCE (see above) but for javac -target parameter,
+# which affects the version of generated bytecode.
+# Normally this is determined from the jre/jdk version specified in RDEPEND.
+# See java-pkg_get-target function below.
+#
+# Should generallyonly be used for testing and debugging.
+#
+# emerge bar to be compatible with 1.3
+# @CODE
+# JAVA_PKG_WANT_TARGET=1.3 emerge bar
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_DEBUG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A variable to be set with "yes" or "y", or ANY string of length non equal to
+# zero. When set, verbosity across java eclasses is increased and extra
+# logging is displayed.
+# @CODE
+# JAVA_PKG_DEBUG="yes"
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_RM_FILES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing a list of files to remove. If defined, this array will be
+# automatically handed over to java-pkg_rm_files for processing during the
+# src_prepare phase.
+#
+# @CODE
+# JAVA_RM_FILES=(
+# path/to/File1.java
+# DELETEME.txt
+# )
+# @CODE
+
+# @VARIABLE: JAVA_PKG_COMPILER_DIR
+# @INTERNAL
+# @DESCRIPTION:
+# Directory where compiler settings are saved, without trailing slash.
+# You probably shouldn't touch this variable except local testing.
+JAVA_PKG_COMPILER_DIR=${JAVA_PKG_COMPILER_DIR:="/usr/share/java-config-2/compiler"}
+
+# @VARIABLE: JAVA_PKG_COMPILERS_CONF
+# @INTERNAL
+# @DESCRIPTION:
+# Path to file containing information about which compiler to use.
+# Can be overloaded, but it should be overloaded only for local testing.
+JAVA_PKG_COMPILERS_CONF=${JAVA_PKG_COMPILERS_CONF:="/etc/java-config-2/build/compilers.conf"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_COMPILER
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Explicitly set a list of compilers to choose from. This is normally read from
+# JAVA_PKG_COMPILERS_CONF.
+#
+# Useful for local testing.
+#
+# Use jikes and javac, in that order
+# @CODE
+# JAVA_PKG_FORCE_COMPILER="jikes javac"
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_ANT_TASKS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An $IFS separated list of ant tasks. Can be set in environment before calling
+# emerge/ebuild to override variables set in ebuild, mainly for testing before
+# putting the resulting (WANT_)ANT_TASKS into ebuild. Affects only ANT_TASKS in
+# eant() call, not the dependencies specified in WANT_ANT_TASKS.
+#
+# @CODE
+# JAVA_PKG_FORCE_ANT_TASKS="ant-junit ant-trax" \
+# ebuild foo.ebuild compile
+# @CODE
+
+# TODO document me
+JAVA_PKG_QA_VIOLATIONS=0
+
+# @FUNCTION: java-pkg_doexamples
+# @USAGE: [--subdir <subdir>] <file1/dir1> [<file2> ...]
+# @DESCRIPTION:
+# Installs given arguments to /usr/share/doc/${PF}/examples
+# If you give it only one parameter and it is a directory it will install
+# everything in that directory to the examples directory.
+#
+# @CODE
+# Parameters:
+# --subdir - If the examples need a certain directory structure
+# $* - list of files to install
+#
+# Examples:
+# java-pkg_doexamples demo
+# java-pkg_doexamples demo/* examples/*
+# @CODE
+java-pkg_doexamples() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ local dest=/usr/share/doc/${PF}/examples
+ if [[ ${1} == --subdir ]]; then
+ local dest=${dest}/${2}
+ dodir ${dest}
+ shift 2
+ fi
+
+ if [[ ${#} = 1 && -d ${1} ]]; then
+ ( # dont want to pollute calling env
+ insinto "${dest}"
+ doins -r ${1}/*
+ ) || die "Installing examples failed"
+ else
+ ( # dont want to pollute calling env
+ insinto "${dest}"
+ doins -r "$@"
+ ) || die "Installing examples failed"
+ fi
+
+ # Let's make a symlink to the directory we have everything else under
+ dosym "${dest}" "${JAVA_PKG_SHAREPATH}/examples" || die
+}
+
+# @FUNCTION: java-pkg_addres
+# @USAGE: <jar> <dir> [<find arguments> ...]
+# @DESCRIPTION:
+# Adds resource files to an existing jar.
+# It is important that the directory given is actually the root of the
+# corresponding resource tree. The target directory as well as
+# sources.lst, MANIFEST.MF, *.class, *.jar, and *.java files are
+# automatically excluded. Symlinks are always followed. Additional
+# arguments are passed through to find.
+#
+# @CODE
+# java-pkg_addres ${PN}.jar resources ! -name "*.html"
+# @CODE
+#
+# @param $1 - jar file
+# @param $2 - resource tree directory
+# @param $* - arguments to pass to find
+java-pkg_addres() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 2 ]] && die "at least two arguments needed"
+
+ local jar=$(realpath "$1" || die "realpath $1 failed")
+ local dir="$2"
+ shift 2
+
+ pushd "${dir}" > /dev/null || die "pushd ${dir} failed"
+ find -L -type f ! -path "./target/*" ! -path "./sources.lst" ! -name "MANIFEST.MF" ! -regex ".*\.\(class\|jar\|java\)" "${@}" -print0 | xargs -r0 jar uf "${jar}" || die "jar failed"
+ popd > /dev/null || die "popd failed"
+}
+
+# @FUNCTION: java-pkg_rm_files
+# @USAGE: java-pkg_rm_files File1.java File2.java ...
+# @DESCRIPTION:
+# Remove unneeded files in ${S}.
+#
+# Every now and then, you'll run into situations whereby a file needs removing,
+# be it a unit test or a regular java class.
+#
+# You can use this function by either:
+# - calling it yourself in java_prepare() and feeding java-pkg_rm_files with
+# the list of files you wish to remove.
+# - defining an array in the ebuild named JAVA_RM_FILES with the list of files
+# you wish to remove.
+#
+# Both way work and it is left to the developer's preferences. If the
+# JAVA_RM_FILES array is defined, it will be automatically handed over to
+# java-pkg_rm_files during the src_prepare phase.
+#
+# See java-utils-2_src_prepare.
+#
+# @CODE
+# java-pkg_rm_files File1.java File2.java
+# @CODE
+#
+# @param $* - list of files to remove.
+java-pkg_rm_files() {
+ debug-print-function ${FUNCNAME} $*
+ local IFS="\n"
+ for filename in "$@"; do
+ [[ ! -f "${filename}" ]] && die "${filename} is not a regular file. Aborting."
+ einfo "Removing unneeded file ${filename}"
+ rm -f "${S}/${filename}" || die "cannot remove ${filename}"
+ eend $?
+ done
+}
+
+# @FUNCTION: java-pkg_dojar
+# @USAGE: <jar1> [<jar2> ...]
+# @DESCRIPTION:
+# Installs any number of jars.
+# Jar's will be installed into /usr/share/${PN}(-${SLOT})/lib/ by default.
+# You can use java-pkg_jarinto to change this path.
+# You should never install a jar with a package version in the filename.
+# Instead, use java-pkg_newjar defined below.
+#
+# @CODE
+# java-pkg_dojar dist/${PN}.jar dist/${PN}-core.jar
+# @CODE
+#
+# @param $* - list of jars to install
+java-pkg_dojar() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ # Create JARDEST if it doesn't exist
+ dodir ${JAVA_PKG_JARDEST}
+
+ local jar
+ # for each jar
+ for jar in "${@}"; do
+ local jar_basename=$(basename "${jar}")
+
+ java-pkg_check-versioned-jar ${jar_basename}
+
+ # check if it exists
+ if [[ -e "${jar}" ]] ; then
+ # Don't overwrite if jar has already been installed with the same
+ # name
+ local dest="${ED}${JAVA_PKG_JARDEST}/${jar_basename}"
+ if [[ -e "${dest}" ]]; then
+ ewarn "Overwriting ${dest}"
+ fi
+
+ # install it into JARDEST if it's a non-symlink
+ if [[ ! -L "${jar}" ]] ; then
+ #but first check class version when in strict mode.
+ is-java-strict && java-pkg_verify-classes "${jar}"
+
+ (
+ insinto "${JAVA_PKG_JARDEST}"
+ doins "${jar}"
+ ) || die "failed to install ${jar}"
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${EPREFIX}/${JAVA_PKG_JARDEST}/${jar_basename}"
+ debug-print "installed ${jar} to ${ED}${JAVA_PKG_JARDEST}"
+ # make a symlink to the original jar if it's symlink
+ else
+ # TODO use dosym, once we find something that could use it
+ # -nichoj
+ ln -s "$(readlink "${jar}")" "${ED}${JAVA_PKG_JARDEST}/${jar_basename}"
+ debug-print "${jar} is a symlink, linking accordingly"
+ fi
+ else
+ die "${jar} does not exist"
+ fi
+ done
+
+ # Extra logging if enabled.
+ if [[ -n ${JAVA_PKG_DEBUG} ]]; then
+ einfo "Verbose logging for \"${FUNCNAME}\" function"
+ einfo "Jar file(s) destination: ${JAVA_PKG_JARDEST}"
+ einfo "Jar file(s) created: ${@}"
+ einfo "Complete command:"
+ einfo "${FUNCNAME} ${@}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_regjar
+# @USAGE: </path/to/installed/jar>
+# @DESCRIPTION:
+# Records an already installed (in ${D}) jar in the package.env
+# This would mostly be used if the package has make or a custom script to
+# install things.
+#
+# WARNING:
+# if you want to use shell expansion, you have to use ${D}/... as the for in
+# this function will not be able to expand the path, here's an example:
+#
+# @CODE
+# java-pkg_regjar ${D}/opt/my-java/lib/*.jar
+# @CODE
+#
+
+# TODO should we be making sure the jar is present on ${D} or wherever?
+java-pkg_regjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "at least one argument needed"
+
+ java-pkg_init_paths_
+
+ local jar jar_dir jar_file
+ for jar in "${@}"; do
+ # TODO use java-pkg_check-versioned-jar
+ if [[ -e "${jar}" || -e "${D}${jar}" ]]; then
+ [[ -d "${jar}" || -d "${D}${jar}" ]] \
+ && die "Called ${FUNCNAME} on a directory $*"
+
+ #check that class version correct when in strict mode
+ is-java-strict && java-pkg_verify-classes "${jar}"
+
+ # nelchael: we should strip ${D} in this case too, here's why:
+ # imagine such call:
+ # java-pkg_regjar ${D}/opt/java/*.jar
+ # such call will fall into this case (-e ${jar}) and will
+ # record paths with ${D} in package.env
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${jar#${D}}"
+ else
+ if [[ ${jar} = *\** ]]; then
+ eerror "The argument ${jar} to ${FUNCNAME}"
+ eerror "has * in it. If you want it to glob in"
+ eerror '${D} add ${D} to the argument.'
+ fi
+ debug-print "${jar} or ${D}${jar} not found"
+ die "${jar} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_newjar
+# @USAGE: <path/to/oldname.jar> [<newname.jar>]
+# @DESCRIPTION:
+# Installs a jar with a new name (defaults to $PN.jar)
+#
+# For example, installs a versioned jar without the version
+java-pkg_newjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ local original_jar="${1}"
+ local new_jar="${2:-${PN}.jar}"
+ local new_jar_dest="${T}/${new_jar}"
+
+ [[ -z ${original_jar} ]] && die "Must specify a jar to install"
+ [[ ! -f ${original_jar} ]] \
+ && die "${original_jar} does not exist or is not a file!"
+
+ rm -f "${new_jar_dest}" || die "Failed to remove ${new_jar_dest}"
+ cp "${original_jar}" "${new_jar_dest}" \
+ || die "Failed to copy ${original_jar} to ${new_jar_dest}"
+ java-pkg_dojar "${new_jar_dest}"
+}
+
+# @FUNCTION: java-pkg_addcp
+# @USAGE: <classpath>
+# @DESCRIPTION:
+# Add something to the package's classpath. For jars, you should use dojar,
+# newjar, or regjar. This is typically used to add directories to the classpath.
+# The parameters of this function are appended to JAVA_PKG_CLASSPATH
+java-pkg_addcp() {
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${@}"
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_doso
+# @USAGE: <path/to/file1.so> [...]
+# @DESCRIPTION:
+# Installs any number of JNI libraries
+# They will be installed into /usr/lib by default, but java-pkg_sointo
+# can be used change this path
+#
+# @CODE
+# Example:
+# java-pkg_doso *.so
+# @CODE
+java-pkg_doso() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument"
+
+ java-pkg_init_paths_
+
+ local lib
+ # for each lib
+ for lib in "$@" ; do
+ # if the lib exists...
+ if [[ -e "${lib}" ]] ; then
+ # install if it isn't a symlink
+ if [[ ! -L "${lib}" ]] ; then
+ (
+ insinto "${JAVA_PKG_LIBDEST}"
+ insopts -m0755
+ doins "${lib}"
+ ) || die "failed to install ${lib}"
+ java-pkg_append_ JAVA_PKG_LIBRARY "${JAVA_PKG_LIBDEST}"
+ debug-print "Installing ${lib} to ${JAVA_PKG_LIBDEST}"
+ # otherwise make a symlink to the symlink's origin
+ else
+ dosym "$(readlink "${lib}")" "${JAVA_PKG_LIBDEST}/${lib##*/}"
+ debug-print "${lib} is a symlink, linking accordantly"
+ fi
+ # otherwise die
+ else
+ die "${lib} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_regso
+# @USAGE: <file1.so> [...]
+# @DESCRIPTION:
+# Registers an already installed JNI library in package.env.
+#
+# @CODE
+# Parameters:
+# $@ - JNI libraries to register
+#
+# Example:
+# java-pkg_regso *.so /path/*.so
+# @CODE
+java-pkg_regso() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument"
+
+ java-pkg_init_paths_
+
+ local lib target_dir
+ for lib in "$@" ; do
+ # Check the absolute path of the lib
+ if [[ -e "${lib}" ]] ; then
+ target_dir="$(java-pkg_expand_dir_ ${lib})"
+ java-pkg_append_ JAVA_PKG_LIBRARY "/${target_dir#${D}}"
+ # Check the path of the lib relative to ${D}
+ elif [[ -e "${D}${lib}" ]]; then
+ target_dir="$(java-pkg_expand_dir_ ${D}${lib})"
+ java-pkg_append_ JAVA_PKG_LIBRARY "${target_dir}"
+ else
+ die "${lib} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_jarinto
+# @USAGE: </path/to/install/jars/into>
+# @DESCRIPTION:
+# Changes the path jars are installed into via subsequent java-pkg_dojar calls.
+java-pkg_jarinto() {
+ debug-print-function ${FUNCNAME} $*
+
+ JAVA_PKG_JARDEST="${1}"
+}
+
+# @FUNCTION: java-pkg_sointo
+# @USAGE: </path/to/install/sofiles/into>
+# @DESCRIPTION:
+# Changes the path that JNI libraries are installed into via subsequent
+# java-pkg_doso calls.
+java-pkg_sointo() {
+ debug-print-function ${FUNCNAME} $*
+
+ JAVA_PKG_LIBDEST="${1}"
+}
+
+# @FUNCTION: java-pkg_dohtml
+# @USAGE: <path/to/javadoc/documentation> [...]
+# @DESCRIPTION:
+# Install Javadoc HTML documentation. Usage of java-pkg_dojavadoc is preferred.
+#
+# @CODE
+# java-pkg_dohtml dist/docs/
+# @CODE
+java-pkg_dohtml() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument required for ${FUNCNAME}"
+
+ # from /usr/lib/portage/bin/dohtml -h
+ # -f Set list of allowed extensionless file names.
+ dohtml -f package-list "$@"
+
+ # this probably shouldn't be here but it provides
+ # a reasonable way to catch # docs for all of the
+ # old ebuilds.
+ java-pkg_recordjavadoc
+}
+
+# @FUNCTION: java-pkg_dojavadoc
+# @USAGE: [--symlink destination] <path/to/javadocs/root>
+# @DESCRIPTION:
+# Installs javadoc documentation. This should be controlled by the doc use flag.
+#
+# @CODE
+# Parameters:
+# $1: optional --symlink creates to symlink like this for html
+# documentation bundles.
+# $2: - The javadoc root directory.
+#
+# Examples:
+# java-pkg_dojavadoc docs/api
+# java-pkg_dojavadoc --symlink apidocs docs/api
+# @CODE
+java-pkg_dojavadoc() {
+ debug-print-function ${FUNCNAME} $*
+
+ # For html documentation bundles that link to Javadoc
+ local symlink
+ if [[ ${1} = --symlink ]]; then
+ symlink=${2}
+ shift 2
+ fi
+
+ local dir="$1"
+ local dest=/usr/share/doc/${PF}/html
+
+ # QA checks
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ [[ -z "${dir}" ]] && die "Must specify a directory!"
+ [[ ! -d "${dir}" ]] && die "${dir} does not exist, or isn't a directory!"
+ if [[ ! -e "${dir}/index.html" ]]; then
+ local msg="No index.html in javadoc directory"
+ ewarn "${msg}"
+ is-java-strict && die "${msg}"
+ fi
+
+ if [[ -e ${D}/${dest}/api ]]; then
+ eerror "${dest} already exists. Will not overwrite."
+ die "${dest}"
+ fi
+
+ # Renaming to match our directory layout
+
+ local dir_to_install="${dir}"
+ if [[ "$(basename "${dir}")" != "api" ]]; then
+ dir_to_install="${T}/api"
+ # TODO use doins
+ cp -r "${dir}" "${dir_to_install}" || die "cp failed"
+ fi
+
+ # Actual installation
+ java-pkg_dohtml -r "${dir_to_install}"
+
+ # Let's make a symlink to the directory we have everything else under
+ dosym ${dest}/api "${JAVA_PKG_SHAREPATH}/api" || die
+
+ if [[ ${symlink} ]]; then
+ debug-print "symlinking ${dest}/{api,${symlink}}"
+ dosym ${dest}/{api,${symlink}} || die
+ fi
+
+ # Extra logging if enabled.
+ if [[ -n ${JAVA_PKG_DEBUG} ]]; then
+ einfo "Verbose logging for \"${FUNCNAME}\" function"
+ einfo "Documentation destination: ${dest}"
+ einfo "Directory to install: ${dir_to_install}"
+ einfo "Complete command:"
+ einfo "${FUNCNAME} ${@}"
+ fi
+}
+
+# @FUNCTION: java-pkg_dosrc
+# @USAGE: <path/to/sources> [...]
+# @DESCRIPTION:
+# Installs a zip containing the source for a package, so it can used in
+# from IDEs like eclipse and netbeans.
+# Ebuild needs to DEPEND on app-arch/zip to use this. It also should be controlled by USE=source.
+#
+# @CODE
+# Example:
+# java-pkg_dosrc src/*
+# @CODE
+
+# TODO change so it the arguments it takes are the base directories containing
+# source -nichoj
+#
+# TODO should we be able to handle multiple calls to dosrc? -nichoj
+#
+# TODO maybe we can take an existing zip/jar? -nichoj
+#
+# FIXME apparently this fails if you give it an empty directories
+java-pkg_dosrc() {
+ debug-print-function ${FUNCNAME} $*
+
+ [ ${#} -lt 1 ] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ if ! [[ ${DEPEND} = *app-arch/zip* ]]; then
+ local msg="${FUNCNAME} called without app-arch/zip in DEPEND"
+ java-pkg_announce-qa-violation ${msg}
+ fi
+
+ java-pkg_init_paths_
+
+ local zip_name="${PN}-src.zip"
+ local zip_path="${T}/${zip_name}"
+ local dir
+ for dir in "${@}"; do
+ local dir_parent=$(dirname "${dir}")
+ local dir_name=$(basename "${dir}")
+ pushd ${dir_parent} > /dev/null || die "problem entering ${dir_parent}"
+ zip -q -r ${zip_path} ${dir_name} -i '*.java'
+ local result=$?
+ # 12 means zip has nothing to do
+ if [[ ${result} != 12 && ${result} != 0 ]]; then
+ die "failed to zip ${dir_name}"
+ fi
+ popd >/dev/null || die
+ done
+
+ # Install the zip
+ (
+ insinto "${JAVA_PKG_SOURCESPATH}"
+ doins ${zip_path}
+ ) || die "Failed to install source"
+
+ JAVA_SOURCES="${JAVA_PKG_SOURCESPATH}/${zip_name}"
+
+ # Extra logging if enabled.
+ if [[ -n ${JAVA_PKG_DEBUG} ]]; then
+ einfo "Verbose logging for \"${FUNCNAME}\" function"
+ einfo "Zip filename created: ${zip_name}"
+ einfo "Zip file destination: ${JAVA_PKG_SOURCESPATH}"
+ einfo "Directories zipped: ${@}"
+ einfo "Complete command:"
+ einfo "${FUNCNAME} ${@}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_dolauncher
+# @USAGE: <filename> [options]
+# @DESCRIPTION:
+# Make a wrapper script to lauch/start this package
+# If necessary, the wrapper will switch to the appropriate VM.
+#
+# Can be called without parameters if the package installs only one jar
+# that has the Main-class attribute set. The wrapper will be named ${PN}.
+#
+# @CODE
+# Parameters:
+# $1 - filename of launcher to create
+# $2 - options, as follows:
+# --main the.main.class.to.start
+# --jar /the/jar/too/launch.jar or just <name>.jar
+# --java_args 'Extra arguments to pass to java'
+# --pkg_args 'Extra arguments to pass to the package'
+# --pwd Directory the launcher changes to before executing java
+# -into Directory to install the launcher to, instead of /usr/bin
+# -pre Prepend contents of this file to the launcher
+# @CODE
+java-pkg_dolauncher() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ if [[ ${#} = 0 ]]; then
+ local name="${PN}"
+ else
+ local name="${1}"
+ shift
+ fi
+
+ # TODO rename to launcher
+ local target="${T}/${name}"
+ local var_tmp="${T}/launcher_variables_tmp"
+ local target_dir pre
+
+ # Process the other the rest of the arguments
+ while [[ -n "${1}" && -n "${2}" ]]; do
+ local var="${1}" value="${2}"
+ if [[ "${var:0:2}" == "--" ]]; then
+ local var=${var:2}
+ echo "gjl_${var}=\"${value}\"" >> "${var_tmp}"
+ local gjl_${var}="${value}"
+ elif [[ "${var}" == "-into" ]]; then
+ target_dir="${value}"
+ elif [[ "${var}" == "-pre" ]]; then
+ pre="${value}"
+ fi
+ shift 2
+ done
+
+ # Test if no --jar and --main arguments were given and
+ # in that case check if the package only installs one jar
+ # and use that jar.
+ if [[ -z "${gjl_jar}" && -z "${gjl_main}" ]]; then
+ local cp="${JAVA_PKG_CLASSPATH}"
+ if [[ "${cp/:}" = "${cp}" && "${cp%.jar}" != "${cp}" ]]; then
+ echo "gjl_jar=\"${JAVA_PKG_CLASSPATH}\"" >> "${var_tmp}"
+ else
+ local msg="Not enough information to create a launcher given."
+ msg="${msg} Please give --jar or --main argument to ${FUNCNAME}."
+ die "${msg}"
+ fi
+ fi
+
+ # Write the actual script
+ echo "#!/bin/bash" > "${target}"
+ if [[ -n "${pre}" ]]; then
+ if [[ -f "${pre}" ]]; then
+ cat "${pre}" >> "${target}"
+ else
+ die "-pre specified file '${pre}' does not exist"
+ fi
+ fi
+ echo "gjl_package=${JAVA_PKG_NAME}" >> "${target}"
+ cat "${var_tmp}" >> "${target}"
+ rm -f "${var_tmp}"
+ echo "source ${EPREFIX}/usr/share/java-config-2/launcher/launcher.bash" >> "${target}"
+
+ if [[ -n "${target_dir}" ]]; then
+ (
+ into "${target_dir}"
+ dobin "${target}"
+ )
+ local ret=$?
+ return ${ret}
+ else
+ dobin "${target}"
+ fi
+}
+
+# @FUNCTION: java-pkg_dowar
+# @DESCRIPTION:
+# Install war files.
+# TODO document
+java-pkg_dowar() {
+ debug-print-function ${FUNCNAME} $*
+
+ # Check for arguments
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+ java-pkg_check-phase install
+
+ java-pkg_init_paths_
+
+ local war
+ for war in $* ; do
+ local warpath
+ # TODO evaluate if we want to handle symlinks differently -nichoj
+ # Check for symlink
+ if [[ -L "${war}" ]] ; then
+ cp "${war}" "${T}"
+ warpath="${T}$(basename "${war}")"
+ # Check for directory
+ # TODO evaluate if we want to handle directories differently -nichoj
+ elif [[ -d "${war}" ]] ; then
+ echo "dowar: warning, skipping directory ${war}"
+ continue
+ else
+ warpath="${war}"
+ fi
+
+ # Install those files like you mean it
+ (
+ insopts -m0644
+ insinto "${JAVA_PKG_WARDEST}"
+ doins ${warpath}
+ )
+ done
+}
+
+# @FUNCTION: java-pkg_recordjavadoc
+# @INTERNAL
+# @DESCRIPTION:
+# Scan for JavaDocs, and record their existence in the package.env file
+
+# TODO make sure this in the proper section
+java-pkg_recordjavadoc()
+{
+ debug-print-function ${FUNCNAME} $*
+ # the find statement is important
+ # as some packages include multiple trees of javadoc
+ JAVADOC_PATH="$(find ${D}/usr/share/doc/ -name allclasses-frame.html -printf '%h:')"
+ # remove $D - TODO: check this is ok with all cases of the above
+ JAVADOC_PATH="${JAVADOC_PATH//${D}}"
+ if [[ -n "${JAVADOC_PATH}" ]] ; then
+ debug-print "javadocs found in ${JAVADOC_PATH%:}"
+ java-pkg_do_write_
+ else
+ debug-print "No javadocs found"
+ fi
+}
+
+
+# @FUNCTION: java-pkg_jar-from
+# @USAGE: [--build-only] [--with-dependencies] [--virtual] [--into dir] <package> [<package.jar>] [<destination.jar>]
+# @DESCRIPTION:
+# Makes a symlink to a jar from a certain package
+# A lot of java packages include dependencies in a lib/ directory
+# You can use this function to replace these bundled dependencies.
+# The dependency is recorded into package.env DEPEND line, unless "--build-only"
+# is passed as the very first argument, for jars that have to be present only
+# at build time and are not needed on runtime (junit testing etc).
+#
+# @CODE
+# Example: get all jars from xerces slot 2
+# java-pkg_jar-from xerces-2
+#
+# Example: get a specific jar from xerces slot 2
+# java-pkg_jar-from xerces-2 xml-apis.jar
+#
+# Example: get a specific jar from xerces slot 2, and name it diffrently
+# java-pkg_jar-from xerces-2 xml-apis.jar xml.jar
+#
+# Example: get junit.jar which is needed only for building
+# java-pkg_jar-from --build-only junit junit.jar
+# @CODE
+#
+# @CODE
+# Parameters
+# --build-only - makes the jar(s) not added into package.env DEPEND line.
+# (assumed automatically when called inside src_test)
+# --with-dependencies - get jars also from requested package's dependencies
+# transitively.
+# --virtual - Packages passed to this function are to be handled as virtuals
+# and will not have individual jar dependencies recorded.
+# --into $dir - symlink jar(s) into $dir (must exist) instead of .
+# $1 - Package to get jars from, or comma-separated list of packages in
+# case other parameters are not used.
+# $2 - jar from package. If not specified, all jars will be used.
+# $3 - When a single jar is specified, destination filename of the
+# symlink. Defaults to the name of the jar.
+# @CODE
+
+# TODO could probably be cleaned up a little
+java-pkg_jar-from() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local destdir="."
+ local deep=""
+ local virtual=""
+ local record_jar=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" = "--with-dependencies" ]]; then
+ deep="--with-dependencies"
+ elif [[ "${1}" = "--virtual" ]]; then
+ virtual="true"
+ elif [[ "${1}" = "--into" ]]; then
+ destdir="${2}"
+ shift
+ else
+ die "java-pkg_jar-from called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ local target_pkg="${1}" target_jar="${2}" destjar="${3}"
+
+ [[ -z ${target_pkg} ]] && die "Must specify a package"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ target_pkg="${target_pkg//:/-}"
+ fi
+
+ # default destjar to the target jar
+ [[ -z "${destjar}" ]] && destjar="${target_jar}"
+
+ local error_msg="There was a problem getting the classpath for ${target_pkg}."
+ local classpath
+ classpath="$(java-config ${deep} --classpath=${target_pkg})"
+ [[ $? != 0 ]] && die ${error_msg}
+
+ # When we have commas this functions is called to bring jars from multiple
+ # packages. This affects recording of dependencencies performed later
+ # which expects one package only, so we do it here.
+ if [[ ${target_pkg} = *,* ]]; then
+ for pkg in ${target_pkg//,/ }; do
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+ [[ -z "${build_only}" ]] && java-pkg_record-jar_ "${pkg}"
+ done
+ # setting this disables further record-jar_ calls later
+ record_jar="true"
+ else
+ java-pkg_ensure-dep "${build_only}" "${target_pkg}"
+ fi
+
+ # Record the entire virtual as a dependency so that
+ # no jars are missed.
+ if [[ -z "${build_only}" && -n "${virtual}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}"
+ # setting this disables further record-jars_ calls later
+ record_jar="true"
+ fi
+
+ pushd ${destdir} > /dev/null \
+ || die "failed to change directory to ${destdir}"
+
+ local jar
+ for jar in ${classpath//:/ }; do
+ local jar_name=$(basename "${jar}")
+ if [[ ! -f "${jar}" ]] ; then
+ debug-print "${jar} from ${target_pkg} does not exist"
+ die "Installation problems with jars in ${target_pkg} - is it installed?"
+ fi
+ # If no specific target jar was indicated, link it
+ if [[ -z "${target_jar}" ]] ; then
+ [[ -f "${target_jar}" ]] && rm "${target_jar}"
+ ln -snf "${jar}" \
+ || die "Failed to make symlink from ${jar} to ${jar_name}"
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${target_pkg}" "${jar}"
+ fi
+ fi
+ # otherwise, if the current jar is the target jar, link it
+ elif [[ "${jar_name}" == "${target_jar}" ]] ; then
+ [[ -f "${destjar}" ]] && rm "${destjar}"
+ ln -snf "${jar}" "${destjar}" \
+ || die "Failed to make symlink from ${jar} to ${destjar}"
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${target_pkg}" "${jar}"
+ fi
+ fi
+ popd > /dev/null || die
+ return 0
+ fi
+ done
+ popd > /dev/null || die
+ # if no target was specified, we're ok
+ if [[ -z "${target_jar}" ]] ; then
+ return 0
+ # otherwise, die bitterly
+ else
+ die "Failed to find ${target_jar:-jar} in ${target_pkg}"
+ fi
+}
+
+# @FUNCTION: java-pkg_jarfrom
+# @DESCRIPTION:
+# See java-pkg_jar-from
+java-pkg_jarfrom() {
+ java-pkg_jar-from "$@"
+}
+
+# @FUNCTION: java-pkg_getjars
+# @USAGE: [--build-only] [--with-dependencies] <package1>[,<package2>...]
+# @DESCRIPTION:
+# Get the classpath provided by any number of packages
+# Among other things, this can be passed to 'javac -classpath' or 'ant -lib'.
+# The providing packages are recorded as dependencies into package.env DEPEND
+# line, unless "--build-only" is passed as the very first argument, for jars
+# that have to be present only at build time and are not needed on runtime
+# (junit testing etc).
+#
+# @CODE
+# Example: Get the classpath for xerces-2 and xalan,
+# java-pkg_getjars xerces-2,xalan
+#
+# Example Return:
+# /usr/share/xerces-2/lib/xml-apis.jar:/usr/share/xerces-2/lib/xmlParserAPIs.jar:/usr/share/xalan/lib/xalan.jar
+#
+#
+# Parameters:
+# --build-only - makes the jar(s) not added into package.env DEPEND line.
+# (assumed automatically when called inside src_test)
+# --with-dependencies - get jars also from requested package's dependencies
+# transitively.
+# $1 - list of packages to get jars from
+# (passed to java-config --classpath)
+# @CODE
+java-pkg_getjars() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local deep=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" = "--with-dependencies" ]]; then
+ deep="--with-dependencies"
+ else
+ die "java-pkg_jar-from called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ [[ ${#} -ne 1 ]] && die "${FUNCNAME} takes only one argument besides --*"
+
+
+ local pkgs="${1}"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ jars="$(java-config ${deep} --classpath=${pkgs})"
+ [[ $? != 0 ]] && die "java-config --classpath=${pkgs} failed"
+ debug-print "${pkgs}:${jars}"
+
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+ done
+
+ for pkg in ${pkgs//,/ }; do
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}"
+ fi
+ done
+
+ echo "${jars}"
+}
+
+# @FUNCTION: java-pkg_getjar
+# @USAGE: [--build-only] [--virtual] <package> <jarfile>
+# @DESCRIPTION:
+# Get the complete path of a single jar from a package
+# The providing package is recorded as runtime dependency into package.env
+# DEPEND line, unless "--build-only" is passed as the very first argument, for
+# jars that have to be present only at build time and are not needed on runtime
+# (junit testing etc).
+#
+# @CODE
+# Example:
+# java-pkg_getjar xerces-2 xml-apis.jar
+# returns
+# /usr/share/xerces-2/lib/xml-apis.jar
+#
+# Parameters:
+# --build-only - makes the jar not added into package.env DEPEND line.
+# --virtual - Packages passed to this function are to be handled as virtuals
+# and will not have individual jar dependencies recorded.
+# $1 - package to use
+# $2 - jar to get
+# @CODE
+java-pkg_getjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local virtual=""
+ local record_jar=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" == "--virtual" ]]; then
+ virtual="true"
+ else
+ die "java-pkg_getjar called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ [[ ${#} -ne 2 ]] && die "${FUNCNAME} takes only two arguments besides --*"
+
+ local pkg="${1}" target_jar="${2}" jar
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkg="${pkg//:/-}"
+ fi
+
+ [[ -z ${pkg} ]] && die "Must specify package to get a jar from"
+ [[ -z ${target_jar} ]] && die "Must specify jar to get"
+
+ local error_msg="Could not find classpath for ${pkg}. Are you sure its installed?"
+ local classpath
+ classpath=$(java-config --classpath=${pkg})
+ [[ $? != 0 ]] && die ${error_msg}
+
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+
+ # Record the package(Virtual) as a dependency and then set build_only
+ # So that individual jars are not recorded.
+ if [[ -n "${virtual}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}"
+ fi
+ record_jar="true"
+ fi
+
+ for jar in ${classpath//:/ }; do
+ if [[ ! -f "${jar}" ]] ; then
+ die "Installation problem with jar ${jar} in ${pkg} - is it installed?"
+ fi
+
+ if [[ "$(basename ${jar})" == "${target_jar}" ]] ; then
+ # Only record jars that aren't build-only
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}" "${jar}"
+ fi
+ fi
+ echo "${jar}"
+ return 0
+ fi
+ done
+
+ die "Could not find ${target_jar} in ${pkg}"
+ return 1
+}
+
+# @FUNCTION: java-pkg_register-dependency
+# @USAGE: <package>[,<package2>...] [<jarfile>]
+# @DESCRIPTION:
+# Registers runtime dependency on a package, list of packages, or a single jar
+# from a package, into package.env DEPEND line. Can only be called in
+# src_install phase.
+# Intended for binary packages where you don't need to symlink the jars or get
+# their classpath during build. As such, the dependencies only need to be
+# specified in ebuild's RDEPEND, and should be omitted in DEPEND.
+#
+# @CODE
+# Parameters:
+# $1 - comma-separated list of packages, or a single package
+# $2 - if param $1 is a single package, optionally specify the jar
+# to depend on
+#
+# Examples:
+# Record the dependency on whole xerces-2 and xalan,
+# java-pkg_register-dependency xerces-2,xalan
+#
+# Record the dependency on ant.jar from ant-core
+# java-pkg_register-dependency ant-core ant.jar
+# @CODE
+#
+# Note: Passing both list of packages as the first parameter AND specifying the
+# jar as the second is not allowed and will cause the function to die. We assume
+# that there's more chance one passes such combination as a mistake, than that
+# there are more packages providing identically named jar without class
+# collisions.
+java-pkg_register-dependency() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments"
+
+ local pkgs="${1}"
+ local jar="${2}"
+
+ [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ if [[ -z "${jar}" ]]; then
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_ensure-dep runtime "${pkg}"
+ java-pkg_record-jar_ "${pkg}"
+ done
+ else
+ [[ ${pkgs} == *,* ]] && \
+ die "${FUNCNAME} called with both package list and jar name"
+ java-pkg_ensure-dep runtime "${pkgs}"
+ java-pkg_record-jar_ "${pkgs}" "${jar}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_register-optional-dependency
+# @USAGE: <package>[,<package2>...] [<jarfile>]
+# @DESCRIPTION:
+# Registers optional runtime dependency on a package, list of packages, or a
+# single jar from a package, into package.env OPTIONAL_DEPEND line. Can only be
+# called in src_install phase.
+# Intended for packages that can use other packages when those are in classpath.
+# Will be put on classpath by launcher if they are installed. Typical case is
+# JDBC implementations for various databases. It's better than having USE flag
+# for each implementation triggering hard dependency.
+#
+# @CODE
+# Parameters:
+# $1 - comma-separated list of packages, or a single package
+# $2 - if param $1 is a single package, optionally specify the jar to depend on
+#
+# Example:
+# Record the optional dependency on some jdbc providers
+# java-pkg_register-optional-dependency jdbc-jaybird,jtds-1.2,jdbc-mysql
+# @CODE
+#
+# Note: Passing both list of packages as the first parameter AND specifying the
+# jar as the second is not allowed and will cause the function to die. We assume
+# that there's more chance one passes such combination as a mistake, than that
+# there are more packages providing identically named jar without class
+# collisions.
+java-pkg_register-optional-dependency() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments"
+
+ local pkgs="${1}"
+ local jar="${2}"
+
+ [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ if [[ -z "${jar}" ]]; then
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_record-jar_ --optional "${pkg}"
+ done
+ else
+ [[ ${pkgs} == *,* ]] && \
+ die "${FUNCNAME} called with both package list and jar name"
+ java-pkg_record-jar_ --optional "${pkgs}" "${jar}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_register-environment-variable
+# @USAGE: <name> <value>
+# @DESCRIPTION:
+# Register an arbitrary environment variable into package.env. The gjl launcher
+# for this package or any package depending on this will export it into
+# environement before executing java command.
+# Must only be called in src_install phase.
+JAVA_PKG_EXTRA_ENV="${T}/java-pkg-extra-env"
+JAVA_PKG_EXTRA_ENV_VARS=""
+java-pkg_register-environment-variable() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} != 2 ]] && die "${FUNCNAME} takes two arguments"
+
+ echo "${1}=\"${2}\"" >> ${JAVA_PKG_EXTRA_ENV}
+ JAVA_PKG_EXTRA_ENV_VARS="${JAVA_PKG_EXTRA_ENV_VARS} ${1}"
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_get-bootclasspath
+# @USAGE: <version>
+# @DESCRIPTION:
+# Returns classpath of a given bootclasspath-providing package version.
+#
+# @param $1 - the version of bootclasspath (e.g. 1.5), 'auto' for bootclasspath
+# of the current JDK
+java-pkg_get-bootclasspath() {
+ local version="${1}"
+
+ local bcp
+ case "${version}" in
+ auto)
+ bcp="$(java-config -g BOOTCLASSPATH)"
+ ;;
+ 1.5)
+ bcp="$(java-pkg_getjars --build-only gnu-classpath-0.98)"
+ ;;
+ *)
+ eerror "unknown parameter of java-pkg_get-bootclasspath"
+ die "unknown parameter of java-pkg_get-bootclasspath"
+ ;;
+ esac
+
+ echo "${bcp}"
+}
+
+
+# This function reads stdin, and based on that input, figures out how to
+# populate jars from the filesystem.
+# Need to figure out a good way of making use of this, ie be able to use a
+# string that was built instead of stdin
+# NOTE: this isn't quite ready for primetime.
+#java-pkg_populate-jars() {
+# local line
+#
+# read line
+# while [[ -n "${line}" ]]; do
+# # Ignore comments
+# [[ ${line%%#*} == "" ]] && continue
+#
+# # get rid of any spaces
+# line="${line// /}"
+#
+# # format: path=jarinfo
+# local path=${line%%=*}
+# local jarinfo=${line##*=}
+#
+# # format: jar@package
+# local jar=${jarinfo%%@*}.jar
+# local package=${jarinfo##*@}
+# if [[ -n ${replace_only} ]]; then
+# [[ ! -f $path ]] && die "No jar exists at ${path}"
+# fi
+# if [[ -n ${create_parent} ]]; then
+# local parent=$(dirname ${path})
+# mkdir -p "${parent}"
+# fi
+# java-pkg_jar-from "${package}" "${jar}" "${path}"
+#
+# read line
+# done
+#}
+
+# @FUNCTION: java-pkg_find-normal-jars
+# @USAGE: [<path/to/directory>]
+# @DESCRIPTION:
+# Find the files with suffix .jar file in the given directory (default: $WORKDIR)
+java-pkg_find-normal-jars() {
+ local dir=$1
+ [[ "${dir}" ]] || dir="${WORKDIR}"
+ local found
+ for jar in $(find "${dir}" -name "*.jar" -type f); do
+ echo "${jar}"
+ found="true"
+ done
+ [[ "${found}" ]]
+ return $?
+}
+
+# @FUNCTION: java-pkg_ensure-no-bundled-jars
+# @DESCRIPTION:
+# Try to locate bundled jar files in ${WORKDIR} and die if found.
+# This function should be called after WORKDIR has been populated with symlink
+# to system jar files or bundled jars removed.
+java-pkg_ensure-no-bundled-jars() {
+ debug-print-function ${FUNCNAME} $*
+
+ local bundled_jars=$(java-pkg_find-normal-jars)
+ if [[ -n ${bundled_jars} ]]; then
+ echo "Bundled jars found:"
+ local jar
+ for jar in ${bundled_jars}; do
+ echo $(pwd)${jar/./}
+ done
+ die "Bundled jars found!"
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-sufficient
+# @INTERNAL
+# @DESCRIPTION:
+# Checks if we have a sufficient VM and dies if we don't.
+java-pkg_ensure-vm-version-sufficient() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-sufficient; then
+ debug-print "VM is not suffient"
+ eerror "Current Java VM cannot build this package"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM cannot build this package"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-sufficient
+# @INTERNAL
+# @DESCRIPTION:
+# @RETURN: zero - VM is sufficient; non-zero - VM is not sufficient
+java-pkg_is-vm-version-sufficient() {
+ debug-print-function ${FUNCNAME} $*
+
+ depend-java-query --is-sufficient "${DEPEND}" > /dev/null
+ return $?
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-eq
+# @INTERNAL
+# @DESCRIPTION:
+# Die if the current VM is not equal to the argument passed.
+#
+# @param $@ - Desired VM version to ensure
+java-pkg_ensure-vm-version-eq() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-eq $@ ; then
+ debug-print "VM is not suffient"
+ eerror "This package requires a Java VM version = $@"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM too old"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-eq
+# @USAGE: <version>
+# @INTERNAL
+# @RETURN: zero - VM versions are equal; non-zero - VM version are not equal
+java-pkg_is-vm-version-eq() {
+ debug-print-function ${FUNCNAME} $*
+
+ local needed_version="$@"
+
+ [[ -z "${needed_version}" ]] && die "need an argument"
+
+ local vm_version="$(java-pkg_get-vm-version)"
+
+ vm_version="$(get_version_component_range 1-2 "${vm_version}")"
+ needed_version="$(get_version_component_range 1-2 "${needed_version}")"
+
+ if [[ -z "${vm_version}" ]]; then
+ debug-print "Could not get JDK version from DEPEND"
+ return 1
+ else
+ if [[ "${vm_version}" == "${needed_version}" ]]; then
+ debug-print "Detected a JDK(${vm_version}) = ${needed_version}"
+ return 0
+ else
+ debug-print "Detected a JDK(${vm_version}) != ${needed_version}"
+ return 1
+ fi
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-ge
+# @INTERNAL
+# @DESCRIPTION:
+# Die if the current VM is not greater than the desired version
+#
+# @param $@ - VM version to compare current to
+java-pkg_ensure-vm-version-ge() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-ge "$@" ; then
+ debug-print "vm is not suffient"
+ eerror "This package requires a Java VM version >= $@"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM too old"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-ge
+# @INTERNAL
+# @DESCRIPTION:
+# @CODE
+# Parameters:
+# $@ - VM version to compare current VM to
+# @CODE
+# @RETURN: zero - current VM version is greater than checked version;
+# non-zero - current VM version is not greater than checked version
+java-pkg_is-vm-version-ge() {
+ debug-print-function ${FUNCNAME} $*
+
+ local needed_version=$@
+ local vm_version=$(java-pkg_get-vm-version)
+ if [[ -z "${vm_version}" ]]; then
+ debug-print "Could not get JDK version from DEPEND"
+ return 1
+ else
+ if version_is_at_least "${needed_version}" "${vm_version}"; then
+ debug-print "Detected a JDK(${vm_version}) >= ${needed_version}"
+ return 0
+ else
+ debug-print "Detected a JDK(${vm_version}) < ${needed_version}"
+ return 1
+ fi
+ fi
+}
+
+java-pkg_set-current-vm() {
+ export GENTOO_VM=${1}
+}
+
+java-pkg_get-current-vm() {
+ echo ${GENTOO_VM}
+}
+
+java-pkg_current-vm-matches() {
+ has $(java-pkg_get-current-vm) ${@}
+ return $?
+}
+
+# @FUNCTION: java-pkg_get-source
+# @DESCRIPTION:
+# Determines what source version should be used, for passing to -source.
+# Unless you want to break things you probably shouldn't set _WANT_SOURCE
+#
+# @RETURN: string - Either the lowest possible source, or JAVA_PKG_WANT_SOURCE
+java-pkg_get-source() {
+ echo ${JAVA_PKG_WANT_SOURCE:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")}
+}
+
+# @FUNCTION: java-pkg_get-target
+# @DESCRIPTION:
+# Determines what target version should be used, for passing to -target.
+# If you don't care about lower versions, you can set _WANT_TARGET to the
+# version of your JDK.
+#
+# @RETURN: string - Either the lowest possible target, or JAVA_PKG_WANT_TARGET
+java-pkg_get-target() {
+ echo ${JAVA_PKG_WANT_TARGET:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")}
+}
+
+# @FUNCTION: java-pkg_get-javac
+# @DESCRIPTION:
+# Returns the compiler executable
+java-pkg_get-javac() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_init-compiler_
+ local compiler="${GENTOO_COMPILER}"
+
+ local compiler_executable
+ if [[ "${compiler}" = "javac" ]]; then
+ # nothing fancy needs to be done for javac
+ compiler_executable="javac"
+ else
+ # for everything else, try to determine from an env file
+
+ local compiler_env="/usr/share/java-config-2/compiler/${compiler}"
+ if [[ -f ${compiler_env} ]]; then
+ local old_javac=${JAVAC}
+ unset JAVAC
+ # try to get value of JAVAC
+ compiler_executable="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${JAVAC})"
+ export JAVAC=${old_javac}
+
+ if [[ -z ${compiler_executable} ]]; then
+ die "JAVAC is empty or undefined in ${compiler_env}"
+ fi
+
+ # check that it's executable
+ if [[ ! -x ${compiler_executable} ]]; then
+ die "${compiler_executable} doesn't exist, or isn't executable"
+ fi
+ else
+ die "Could not find environment file for ${compiler}"
+ fi
+ fi
+ echo ${compiler_executable}
+}
+
+# @FUNCTION: java-pkg_javac-args
+# @DESCRIPTION:
+# If an ebuild uses javac directly, instead of using ejavac, it should call this
+# to know what -source/-target to use.
+#
+# @RETURN: string - arguments to pass to javac, complete with -target and -source
+java-pkg_javac-args() {
+ debug-print-function ${FUNCNAME} $*
+
+ local want_source="$(java-pkg_get-source)"
+ local want_target="$(java-pkg_get-target)"
+
+ local source_str="-source ${want_source}"
+ local target_str="-target ${want_target}"
+
+ debug-print "want source: ${want_source}"
+ debug-print "want target: ${want_target}"
+
+ if [[ -z "${want_source}" || -z "${want_target}" ]]; then
+ die "Could not find valid -source/-target values for javac"
+ else
+ echo "${source_str} ${target_str}"
+ fi
+}
+
+# @FUNCTION: java-pkg_get-jni-cflags
+# @DESCRIPTION:
+# Echos the CFLAGS for JNI compilations
+java-pkg_get-jni-cflags() {
+ local flags="-I${JAVA_HOME}/include"
+
+ local platform="linux"
+ use elibc_FreeBSD && platform="freebsd"
+
+ # TODO do a check that the directories are valid
+ flags="${flags} -I${JAVA_HOME}/include/${platform}"
+
+ echo ${flags}
+}
+
+java-pkg_ensure-gcj() {
+ # was enforcing sys-devel/gcc[gcj]
+ die "${FUNCNAME} was removed. Use use-deps available as of EAPI 2 instead. #261562"
+}
+
+java-pkg_ensure-test() {
+ # was enforcing USE=test if FEATURES=test
+ die "${FUNCNAME} was removed. Package mangers handle this already. #278965"
+}
+
+# @FUNCTION: java-pkg_register-ant-task
+# @USAGE: [--version x.y] [<name>]
+# @DESCRIPTION:
+# Register this package as ant task, so that ant will load it when no specific
+# ANT_TASKS are specified. Note that even without this registering, all packages
+# specified in ANT_TASKS will be loaded. Mostly used by the actual ant tasks
+# packages, but can be also used by other ebuilds that used to symlink their
+# .jar into /usr/share/ant-core/lib to get autoloaded, for backwards
+# compatibility.
+#
+# @CODE
+# Parameters
+# --version x.y Register only for ant version x.y (otherwise for any ant
+# version). Used by the ant-* packages to prevent loading of mismatched
+# ant-core ant tasks after core was updated, before the tasks are updated,
+# without a need for blockers.
+# $1 Name to register as. Defaults to JAVA_PKG_NAME ($PN[-$SLOT])
+# @CODE
+java-pkg_register-ant-task() {
+ local TASKS_DIR="tasks"
+
+ # check for --version x.y parameters
+ while [[ -n "${1}" && -n "${2}" ]]; do
+ local var="${1#--}"
+ local val="${2}"
+ if [[ "${var}" == "version" ]]; then
+ TASKS_DIR="tasks-${val}"
+ else
+ die "Unknown parameter passed to java-pkg_register-ant-tasks: ${1} ${2}"
+ fi
+ shift 2
+ done
+
+ local TASK_NAME="${1:-${JAVA_PKG_NAME}}"
+
+ dodir /usr/share/ant/${TASKS_DIR}
+ touch "${ED}/usr/share/ant/${TASKS_DIR}/${TASK_NAME}"
+}
+
+# @FUNCTION: java-pkg_ant-tasks-depend
+# @INTERNAL
+# @DESCRIPTION:
+# Translates the WANT_ANT_TASKS variable into valid dependencies.
+java-pkg_ant-tasks-depend() {
+ debug-print-function ${FUNCNAME} ${WANT_ANT_TASKS}
+
+ if [[ -n "${WANT_ANT_TASKS}" ]]; then
+ local DEP=""
+ for i in ${WANT_ANT_TASKS}
+ do
+ if [[ ${i} = ant-* ]]; then
+ DEP="${DEP}dev-java/${i} "
+ elif [[ ${i} = */*:* ]]; then
+ DEP="${DEP}${i} "
+ else
+ echo "Invalid atom in WANT_ANT_TASKS: ${i}"
+ return 1
+ fi
+ done
+ echo ${DEP}
+ return 0
+ else
+ return 0
+ fi
+}
+
+
+# @FUNCTION: ejunit_
+# @INTERNAL
+# @DESCRIPTION:
+# Internal Junit wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit in DEPEND. Launches the tests using junit.textui.TestRunner.
+# @CODE
+# Parameters:
+# $1 - junit package (junit or junit-4)
+# $2 - -cp or -classpath
+# $3 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+# @CODE
+ejunit_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local pkgs
+ if [[ -f ${JAVA_PKG_DEPEND_FILE} ]]; then
+ for atom in $(cat ${JAVA_PKG_DEPEND_FILE} | tr : ' '); do
+ pkgs=${pkgs},$(echo ${atom} | sed -re "s/^.*@//")
+ done
+ fi
+
+ local junit=${1}
+ shift 1
+
+ local cp=$(java-pkg_getjars --with-dependencies ${junit}${pkgs})
+ if [[ ${1} = -cp || ${1} = -classpath ]]; then
+ cp="${2}:${cp}"
+ shift 2
+ else
+ cp=".:${cp}"
+ fi
+
+ local runner=junit.textui.TestRunner
+ if [[ "${junit}" == "junit-4" ]] ; then
+ runner=org.junit.runner.JUnitCore
+ fi
+ debug-print "Calling: java -cp \"${cp}\" -Djava.io.tmpdir=\"${T}\" -Djava.awt.headless=true ${runner} ${@}"
+ java -cp "${cp}" -Djava.io.tmpdir="${T}/" -Djava.awt.headless=true ${runner} "${@}" || die "Running junit failed"
+}
+
+# @FUNCTION: ejunit
+# @DESCRIPTION:
+# Junit wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit in DEPEND. Launches the tests using org.junit.runner.JUnitCore.
+#
+# @CODE
+# Parameters:
+# $1 - -cp or -classpath
+# $2 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+#
+# Examples:
+# ejunit -cp build/classes org.blinkenlights.jid3.test.AllTests
+# ejunit org.blinkenlights.jid3.test.AllTests
+# ejunit org.blinkenlights.jid3.test.FirstTest org.blinkenlights.jid3.test.SecondTest
+# @CODE
+ejunit() {
+ debug-print-function ${FUNCNAME} $*
+
+ ejunit_ "junit" "${@}"
+}
+
+# @FUNCTION: ejunit4
+# @DESCRIPTION:
+# Junit4 wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit:4 in DEPEND. Launches the tests using junit.textui.TestRunner.
+#
+# @CODE
+# Parameters:
+# $1 - -cp or -classpath
+# $2 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+#
+# Examples:
+# ejunit4 -cp build/classes org.blinkenlights.jid3.test.AllTests
+# ejunit4 org.blinkenlights.jid3.test.AllTests
+# ejunit4 org.blinkenlights.jid3.test.FirstTest \
+# org.blinkenlights.jid3.test.SecondTest
+# @CODE
+ejunit4() {
+ debug-print-function ${FUNCNAME} $*
+
+ ejunit_ "junit-4" "${@}"
+}
+
+# @FUNCTION: java-utils-2_src_prepare
+# @DESCRIPTION:
+# src_prepare Searches for bundled jars
+# Don't call directly, but via java-pkg-2_src_prepare!
+java-utils-2_src_prepare() {
+ java-pkg_func-exists java_prepare && java_prepare
+
+ # Check for files in JAVA_RM_FILES array.
+ if [[ ${JAVA_RM_FILES[@]} ]]; then
+ debug-print "$FUNCNAME: removing unneeded files"
+ java-pkg_rm_files "${JAVA_RM_FILES[@]}"
+ fi
+
+ if is-java-strict; then
+ echo "Searching for bundled jars:"
+ java-pkg_find-normal-jars || echo "None found."
+ echo "Searching for bundled classes (no output if none found):"
+ find "${WORKDIR}" -name "*.class"
+ echo "Search done."
+ fi
+}
+
+# @FUNCTION: java-utils-2_pkg_preinst
+# @DESCRIPTION:
+# pkg_preinst Searches for missing and unneeded dependencies
+# Don't call directly, but via java-pkg-2_pkg_preinst!
+java-utils-2_pkg_preinst() {
+ if is-java-strict; then
+ if [[ ! -e "${JAVA_PKG_ENV}" ]] || has ant-tasks ${INHERITED}; then
+ return
+ fi
+
+ if has_version dev-java/java-dep-check; then
+ local output=$(GENTOO_VM= java-dep-check --image "${D}" "${JAVA_PKG_ENV}")
+ [[ ${output} ]] && ewarn "${output}"
+ else
+ eerror "Install dev-java/java-dep-check for dependency checking"
+ fi
+ fi
+}
+
+# @FUNCTION: eant
+# @USAGE: <ant_build_target(s)>
+# @DESCRIPTION:
+# Ant wrapper function. Will use the appropriate compiler, based on user-defined
+# compiler. Will also set proper ANT_TASKS from the variable ANT_TASKS,
+# variables:
+#
+# @CODE
+# Variables:
+# EANT_GENTOO_CLASSPATH - calls java-pkg_getjars for the value and adds to the
+# gentoo.classpath property. Be sure to call java-ant_rewrite-classpath in src_unpack.
+# EANT_NEEDS_TOOLS - add tools.jar to the gentoo.classpath. Should only be used
+# for build-time purposes, the dependency is not recorded to
+# package.env!
+# ANT_TASKS - used to determine ANT_TASKS before calling Ant.
+# @CODE
+eant() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ ${EBUILD_PHASE} = compile ]]; then
+ java-ant-2_src_configure
+ fi
+
+ if ! has java-ant-2 ${INHERITED}; then
+ local msg="You should inherit java-ant-2 when using eant"
+ java-pkg_announce-qa-violation "${msg}"
+ fi
+
+ local antflags="-Dnoget=true -Dmaven.mode.offline=true -Dbuild.sysclasspath=ignore"
+
+ java-pkg_init-compiler_
+ local compiler="${GENTOO_COMPILER}"
+
+ local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}"
+ local build_compiler="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER})"
+ if [[ "${compiler}" != "javac" && -z "${build_compiler}" ]]; then
+ die "ANT_BUILD_COMPILER undefined in ${compiler_env}"
+ fi
+
+ if [[ ${compiler} != "javac" ]]; then
+ antflags="${antflags} -Dbuild.compiler=${build_compiler}"
+ # Figure out any extra stuff to put on the classpath for compilers aside
+ # from javac
+ # ANT_BUILD_COMPILER_DEPS should be something that could be passed to
+ # java-config -p
+ local build_compiler_deps="$(source ${JAVA_PKG_COMPILER_DIR}/${compiler} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER_DEPS})"
+ if [[ -n ${build_compiler_deps} ]]; then
+ antflags="${antflags} -lib $(java-config -p ${build_compiler_deps})"
+ fi
+ fi
+
+ for arg in "${@}"; do
+ if [[ ${arg} = -lib ]]; then
+ if is-java-strict; then
+ eerror "You should not use the -lib argument to eant because it will fail"
+ eerror "with JAVA_PKG_STRICT. Please use for example java-pkg_jar-from"
+ eerror "or ant properties to make dependencies available."
+ eerror "For ant tasks use WANT_ANT_TASKS or ANT_TASKS from."
+ eerror "split ant (>=dev-java/ant-core-1.7)."
+ die "eant -lib is deprecated/forbidden"
+ else
+ echo "eant -lib is deprecated. Turn JAVA_PKG_STRICT on for"
+ echo "more info."
+ fi
+ fi
+ done
+
+ # parse WANT_ANT_TASKS for atoms
+ local want_ant_tasks
+ for i in ${WANT_ANT_TASKS}; do
+ if [[ ${i} = */*:* ]]; then
+ i=${i#*/}
+ i=${i%:0}
+ want_ant_tasks+="${i/:/-} "
+ else
+ want_ant_tasks+="${i} "
+ fi
+ done
+ # default ANT_TASKS to WANT_ANT_TASKS, if ANT_TASKS is not set explicitly
+ ANT_TASKS="${ANT_TASKS:-${want_ant_tasks% }}"
+
+ # override ANT_TASKS with JAVA_PKG_FORCE_ANT_TASKS if it's set
+ ANT_TASKS="${JAVA_PKG_FORCE_ANT_TASKS:-${ANT_TASKS}}"
+
+ # if ant-tasks is not set by ebuild or forced, use none
+ ANT_TASKS="${ANT_TASKS:-none}"
+
+ # at this point, ANT_TASKS should be "all", "none" or explicit list
+ if [[ "${ANT_TASKS}" == "all" ]]; then
+ einfo "Using all available ANT_TASKS"
+ elif [[ "${ANT_TASKS}" == "none" ]]; then
+ einfo "Disabling all optional ANT_TASKS"
+ else
+ einfo "Using following ANT_TASKS: ${ANT_TASKS}"
+ fi
+
+ export ANT_TASKS
+
+ [[ -n ${JAVA_PKG_DEBUG} ]] && antflags="${antflags} --execdebug -debug"
+ [[ -n ${PORTAGE_QUIET} ]] && antflags="${antflags} -q"
+
+ local gcp="${EANT_GENTOO_CLASSPATH}"
+ local getjarsarg=""
+
+ if [[ ${EBUILD_PHASE} = "test" ]]; then
+ antflags="${antflags} -DJunit.present=true"
+ getjarsarg="--with-dependencies"
+
+ local re="\bant-junit4?([-:]\S+)?\b"
+ [[ ${ANT_TASKS} =~ ${re} ]] && gcp+=" ${BASH_REMATCH[0]}"
+ else
+ antflags="${antflags} -Dmaven.test.skip=true"
+ fi
+
+ local cp
+
+ for atom in ${gcp}; do
+ cp+=":$(java-pkg_getjars ${getjarsarg} ${atom})"
+ done
+
+ [[ ${EANT_NEEDS_TOOLS} ]] && cp+=":$(java-config --tools)"
+ [[ ${EANT_GENTOO_CLASSPATH_EXTRA} ]] && cp+=":${EANT_GENTOO_CLASSPATH_EXTRA}"
+
+ if [[ ${cp#:} ]]; then
+ # It seems ant does not like single quotes around ${cp}
+ antflags="${antflags} -Dgentoo.classpath=\"${cp#:}\""
+ fi
+
+ [[ -n ${JAVA_PKG_DEBUG} ]] && echo ant ${antflags} "${@}"
+ debug-print "Calling ant (GENTOO_VM: ${GENTOO_VM}): ${antflags} ${@}"
+ ant ${antflags} "${@}" || die "eant failed"
+}
+
+# @FUNCTION: ejavac
+# @USAGE: <javac_arguments>
+# @DESCRIPTION:
+# Javac wrapper function. Will use the appropriate compiler, based on
+# /etc/java-config/compilers.conf
+ejavac() {
+ debug-print-function ${FUNCNAME} $*
+
+ local compiler_executable
+ compiler_executable=$(java-pkg_get-javac)
+
+ local javac_args
+ javac_args="$(java-pkg_javac-args)"
+
+ if [[ -n ${JAVA_PKG_DEBUG} ]]; then
+ einfo "Verbose logging for \"${FUNCNAME}\" function"
+ einfo "Compiler executable: ${compiler_executable}"
+ einfo "Extra arguments: ${javac_args}"
+ einfo "Complete command:"
+ einfo "${compiler_executable} ${javac_args} ${@}"
+ fi
+
+ ebegin "Compiling"
+ ${compiler_executable} ${javac_args} "${@}" || die "ejavac failed"
+}
+
+# @FUNCTION: ejavadoc
+# @USAGE: <javadoc_arguments>
+# @DESCRIPTION:
+# javadoc wrapper function. Will set some flags based on the VM version
+# due to strict javadoc rules in 1.8.
+ejavadoc() {
+ debug-print-function ${FUNCNAME} $*
+
+ local javadoc_args=""
+
+ if java-pkg_is-vm-version-ge "1.8" ; then
+ javadoc_args="-Xdoclint:none"
+ fi
+
+ if [[ -n ${JAVA_PKG_DEBUG} ]]; then
+ einfo "Verbose logging for \"${FUNCNAME}\" function"
+ einfo "Javadoc executable: javadoc"
+ einfo "Extra arguments: ${javadoc_args}"
+ einfo "Complete command:"
+ einfo "javadoc ${javadoc_args} ${@}"
+ fi
+
+ ebegin "Generating JavaDoc"
+ javadoc ${javadoc_args} "${@}" || die "ejavadoc failed"
+}
+
+# @FUNCTION: java-pkg_filter-compiler
+# @USAGE: <compiler(s)_to_filter>
+# @DESCRIPTION:
+# Used to prevent the use of some compilers. Should be used in src_compile.
+# Basically, it just appends onto JAVA_PKG_FILTER_COMPILER
+java-pkg_filter-compiler() {
+ JAVA_PKG_FILTER_COMPILER="${JAVA_PKG_FILTER_COMPILER} $@"
+}
+
+# @FUNCTION: java-pkg_force-compiler
+# @USAGE: <compiler(s)_to_force>
+# @DESCRIPTION:
+# Used to force the use of particular compilers. Should be used in src_compile.
+# A common use of this would be to force ecj-3.1 to be used on amd64, to avoid
+# OutOfMemoryErrors that may come up.
+java-pkg_force-compiler() {
+ JAVA_PKG_FORCE_COMPILER="$@"
+}
+
+# @FUNCTION: use_doc
+# @DESCRIPTION:
+#
+# Helper function for getting ant to build javadocs. If the user has USE=doc,
+# then 'javadoc' or the argument are returned. Otherwise, there is no return.
+#
+# The output of this should be passed to ant.
+# @CODE
+# Parameters:
+# $@ - Option value to return. Defaults to 'javadoc'
+#
+# Examples:
+# build javadocs by calling 'javadoc' target
+# eant $(use_doc)
+#
+# build javadocs by calling 'apidoc' target
+# eant $(use_doc apidoc)
+# @CODE
+# @RETURN string - Name of the target to create javadocs
+use_doc() {
+ use doc && echo ${@:-javadoc}
+}
+
+
+# @FUNCTION: java-pkg_init
+# @INTERNAL
+# @DESCRIPTION:
+# The purpose of this function, as the name might imply, is to initialize the
+# Java environment. It ensures that that there aren't any environment variables
+# that'll muss things up. It initializes some variables, which are used
+# internally. And most importantly, it'll switch the VM if necessary.
+#
+# This shouldn't be used directly. Instead, java-pkg and java-pkg-opt will
+# call it during each of the phases of the merge process.
+java-pkg_init() {
+ debug-print-function ${FUNCNAME} $*
+
+ # Don't set up build environment if installing from binary. #206024 #258423
+ [[ "${MERGE_TYPE}" == "binary" ]] && return
+ # Also try Portage's nonstandard EMERGE_FROM for old EAPIs, if it doesn't
+ # work nothing is lost.
+ has ${EAPI:-0} 0 1 2 3 && [[ "${EMERGE_FROM}" == "binary" ]] && return
+
+ unset JAVAC
+ unset JAVA_HOME
+
+ java-config --help >/dev/null || {
+ eerror ""
+ eerror "Can't run java-config --help"
+ eerror "Have you upgraded python recently but haven't"
+ eerror "run python-updater yet?"
+ die "Can't run java-config --help"
+ }
+
+ # People do all kinds of weird things.
+ # https://forums.gentoo.org/viewtopic-p-3943166.html
+ local silence="${SILENCE_JAVA_OPTIONS_WARNING}"
+ local accept="${I_WANT_GLOBAL_JAVA_OPTIONS}"
+ if [[ -n ${_JAVA_OPTIONS} && -z ${accept} && -z ${silence} ]]; then
+ ewarn "_JAVA_OPTIONS changes what java -version outputs at least for"
+ ewarn "sun-jdk vms and and as such break configure scripts that"
+ ewarn "use it (for example app-office/openoffice) so we filter it out."
+ ewarn "Use SILENCE_JAVA_OPTIONS_WARNING=true in the environment (use"
+ ewarn "make.conf for example) to silence this warning or"
+ ewarn "I_WANT_GLOBAL_JAVA_OPTIONS to not filter it."
+ fi
+
+ if [[ -z ${accept} ]]; then
+ # export _JAVA_OPTIONS= doesn't work because it will show up in java
+ # -version output
+ unset _JAVA_OPTIONS
+ # phase hooks make this run many times without this
+ I_WANT_GLOBAL_JAVA_OPTIONS="true"
+ fi
+
+ if java-pkg_func-exists ant_src_unpack; then
+ java-pkg_announce-qa-violation "Using old ant_src_unpack. Should be src_unpack"
+ fi
+
+ java-pkg_switch-vm
+ PATH=${JAVA_HOME}/bin:${PATH}
+
+ # TODO we will probably want to set JAVAC and JAVACFLAGS
+
+ # Do some QA checks
+ java-pkg_check-jikes
+
+ # Can't use unset here because Portage does not save the unset
+ # see https://bugs.gentoo.org/show_bug.cgi?id=189417#c11
+
+ # When users have crazy classpaths some packages can fail to compile.
+ # and everything should work with empty CLASSPATH.
+ # This also helps prevent unexpected dependencies on random things
+ # from the CLASSPATH.
+ export CLASSPATH=
+
+ # Unset external ANT_ stuff
+ export ANT_TASKS=
+ export ANT_OPTS=
+ export ANT_RESPECT_JAVA_HOME=
+}
+
+# @FUNCTION: java-pkg-init-compiler_
+# @INTERNAL
+# @DESCRIPTION:
+# This function attempts to figure out what compiler should be used. It does
+# this by reading the file at JAVA_PKG_COMPILERS_CONF, and checking the
+# COMPILERS variable defined there.
+# This can be overridden by a list in JAVA_PKG_FORCE_COMPILER
+#
+# It will go through the list of compilers, and verify that it supports the
+# target and source that are needed. If it is not suitable, then the next
+# compiler is checked. When JAVA_PKG_FORCE_COMPILER is defined, this checking
+# isn't done.
+#
+# Once the which compiler to use has been figured out, it is set to
+# GENTOO_COMPILER.
+#
+# If you hadn't guessed, JAVA_PKG_FORCE_COMPILER is for testing only.
+#
+# If the user doesn't defined anything in JAVA_PKG_COMPILERS_CONF, or no
+# suitable compiler was found there, then the default is to use javac provided
+# by the current VM.
+#
+#
+# @RETURN name of the compiler to use
+java-pkg_init-compiler_() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n ${GENTOO_COMPILER} ]]; then
+ debug-print "GENTOO_COMPILER already set"
+ return
+ fi
+
+ local compilers;
+ if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then
+ compilers="$(source ${JAVA_PKG_COMPILERS_CONF} 1>/dev/null 2>&1; echo ${COMPILERS})"
+ else
+ compilers=${JAVA_PKG_FORCE_COMPILER}
+ fi
+
+ debug-print "Read \"${compilers}\" from ${JAVA_PKG_COMPILERS_CONF}"
+
+ # Figure out if we should announce what compiler we're using
+ local compiler
+ for compiler in ${compilers}; do
+ debug-print "Checking ${compiler}..."
+ # javac should always be alright
+ if [[ ${compiler} = "javac" ]]; then
+ debug-print "Found javac... breaking"
+ export GENTOO_COMPILER="javac"
+ break
+ fi
+
+ if has ${compiler} ${JAVA_PKG_FILTER_COMPILER}; then
+ if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then
+ einfo "Filtering ${compiler}" >&2
+ continue
+ fi
+ fi
+
+ # for non-javac, we need to make sure it supports the right target and
+ # source
+ local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}"
+ if [[ -f ${compiler_env} ]]; then
+ local desired_target="$(java-pkg_get-target)"
+ local desired_source="$(java-pkg_get-source)"
+
+
+ # Verify that the compiler supports target
+ local supported_target=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_TARGET})
+ if ! has ${desired_target} ${supported_target}; then
+ ewarn "${compiler} does not support -target ${desired_target}, skipping"
+ continue
+ fi
+
+ # Verify that the compiler supports source
+ local supported_source=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_SOURCE})
+ if ! has ${desired_source} ${supported_source}; then
+ ewarn "${compiler} does not support -source ${desired_source}, skipping"
+ continue
+ fi
+
+ # if you get here, then the compiler should be good to go
+ export GENTOO_COMPILER="${compiler}"
+ break
+ else
+ ewarn "Could not find configuration for ${compiler}, skipping"
+ ewarn "Perhaps it is not installed?"
+ continue
+ fi
+ done
+
+ # If it hasn't been defined already, default to javac
+ if [[ -z ${GENTOO_COMPILER} ]]; then
+ if [[ -n ${compilers} ]]; then
+ einfo "No suitable compiler found: defaulting to JDK default for compilation" >&2
+ else
+ # probably don't need to notify users about the default.
+ :;#einfo "Defaulting to javac for compilation" >&2
+ fi
+ if java-config -g GENTOO_COMPILER 2> /dev/null; then
+ export GENTOO_COMPILER=$(java-config -g GENTOO_COMPILER)
+ else
+ export GENTOO_COMPILER=javac
+ fi
+ else
+ einfo "Using ${GENTOO_COMPILER} for compilation" >&2
+ fi
+
+}
+
+# @FUNCTION: init_paths_
+# @INTERNAL
+# @DESCRIPTION:
+# Initializes some variables that will be used. These variables are mostly used
+# to determine where things will eventually get installed.
+java-pkg_init_paths_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local pkg_name
+ if [[ "${SLOT%/*}" == "0" ]] ; then
+ JAVA_PKG_NAME="${PN}"
+ else
+ JAVA_PKG_NAME="${PN}-${SLOT%/*}"
+ fi
+
+ JAVA_PKG_SHAREPATH="/usr/share/${JAVA_PKG_NAME}"
+ JAVA_PKG_SOURCESPATH="${JAVA_PKG_SHAREPATH}/sources/"
+ JAVA_PKG_ENV="${ED}${JAVA_PKG_SHAREPATH}/package.env"
+ JAVA_PKG_VIRTUALS_PATH="/usr/share/java-config-2/virtuals"
+ JAVA_PKG_VIRTUAL_PROVIDER="${ED}${JAVA_PKG_VIRTUALS_PATH}/${JAVA_PKG_NAME}"
+
+ [[ -z "${JAVA_PKG_JARDEST}" ]] && JAVA_PKG_JARDEST="${JAVA_PKG_SHAREPATH}/lib"
+ [[ -z "${JAVA_PKG_LIBDEST}" ]] && JAVA_PKG_LIBDEST="/usr/$(get_libdir)/${JAVA_PKG_NAME}"
+ [[ -z "${JAVA_PKG_WARDEST}" ]] && JAVA_PKG_WARDEST="${JAVA_PKG_SHAREPATH}/webapps"
+
+ # TODO maybe only print once?
+ debug-print "JAVA_PKG_SHAREPATH: ${JAVA_PKG_SHAREPATH}"
+ debug-print "JAVA_PKG_ENV: ${JAVA_PKG_ENV}"
+ debug-print "JAVA_PKG_JARDEST: ${JAVA_PKG_JARDEST}"
+ debug-print "JAVA_PKG_LIBDEST: ${JAVA_PKG_LIBDEST}"
+ debug-print "JAVA_PKG_WARDEST: ${JAVA_PKG_WARDEST}"
+}
+
+# @FUNCTION: java-pkg_do_write_
+# @INTERNAL
+# @DESCRIPTION:
+# Writes the package.env out to disk.
+#
+# TODO change to do-write, to match everything else
+java-pkg_do_write_() {
+ debug-print-function ${FUNCNAME} $*
+ java-pkg_init_paths_
+ # Create directory for package.env
+ dodir "${JAVA_PKG_SHAREPATH}"
+
+ # Create package.env
+ (
+ echo "DESCRIPTION=\"${DESCRIPTION}\""
+ echo "GENERATION=\"2\""
+ echo "SLOT=\"${SLOT}\""
+ echo "CATEGORY=\"${CATEGORY}\""
+ echo "PVR=\"${PVR}\""
+
+ [[ -n "${JAVA_PKG_CLASSPATH}" ]] && echo "CLASSPATH=\"${JAVA_PKG_CLASSPATH}\""
+ [[ -n "${JAVA_PKG_LIBRARY}" ]] && echo "LIBRARY_PATH=\"${JAVA_PKG_LIBRARY}\""
+ [[ -n "${JAVA_PROVIDE}" ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\""
+ [[ -f "${JAVA_PKG_DEPEND_FILE}" ]] \
+ && echo "DEPEND=\"$(sort -u "${JAVA_PKG_DEPEND_FILE}" | tr '\n' ':')\""
+ [[ -f "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" ]] \
+ && echo "OPTIONAL_DEPEND=\"$(sort -u "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" | tr '\n' ':')\""
+ echo "VM=\"$(echo ${RDEPEND} ${DEPEND} | sed -e 's/ /\n/g' | sed -n -e '/virtual\/\(jre\|jdk\)/ { p;q }')\"" # TODO cleanup !
+ [[ -f "${JAVA_PKG_BUILD_DEPEND_FILE}" ]] \
+ && echo "BUILD_DEPEND=\"$(sort -u "${JAVA_PKG_BUILD_DEPEND_FILE}" | tr '\n' ':')\""
+ ) > "${JAVA_PKG_ENV}"
+
+ # register target/source
+ local target="$(java-pkg_get-target)"
+ local source="$(java-pkg_get-source)"
+ [[ -n ${target} ]] && echo "TARGET=\"${target}\"" >> "${JAVA_PKG_ENV}"
+ [[ -n ${source} ]] && echo "SOURCE=\"${source}\"" >> "${JAVA_PKG_ENV}"
+
+ # register javadoc info
+ [[ -n ${JAVADOC_PATH} ]] && echo "JAVADOC_PATH=\"${JAVADOC_PATH}\"" \
+ >> ${JAVA_PKG_ENV}
+ # register source archives
+ [[ -n ${JAVA_SOURCES} ]] && echo "JAVA_SOURCES=\"${JAVA_SOURCES}\"" \
+ >> ${JAVA_PKG_ENV}
+
+ echo "MERGE_VM=\"${GENTOO_VM}\"" >> "${JAVA_PKG_ENV}"
+ [[ -n ${GENTOO_COMPILER} ]] && echo "MERGE_COMPILER=\"${GENTOO_COMPILER}\"" >> "${JAVA_PKG_ENV}"
+
+ # extra env variables
+ if [[ -n "${JAVA_PKG_EXTRA_ENV_VARS}" ]]; then
+ cat "${JAVA_PKG_EXTRA_ENV}" >> "${JAVA_PKG_ENV}" || die
+ # nested echo to remove leading/trailing spaces
+ echo "ENV_VARS=\"$(echo ${JAVA_PKG_EXTRA_ENV_VARS})\"" \
+ >> "${JAVA_PKG_ENV}" || die
+ fi
+
+ # Strip unnecessary leading and trailing colons
+ # TODO try to cleanup if possible
+ sed -e "s/=\":/=\"/" -e "s/:\"$/\"/" -i "${JAVA_PKG_ENV}" || die "Did you forget to call java_init ?"
+}
+
+# @FUNCTION: java-pkg_record-jar_
+# @INTERNAL
+# @DESCRIPTION:
+# Record an (optional) dependency to the package.env
+# @CODE
+# Parameters:
+# --optional - record dependency as optional
+# --build - record dependency as build_only
+# $1 - package to record
+# $2 - (optional) jar of package to record
+# @CODE
+JAVA_PKG_DEPEND_FILE="${T}/java-pkg-depend"
+JAVA_PKG_OPTIONAL_DEPEND_FILE="${T}/java-pkg-optional-depend"
+JAVA_PKG_BUILD_DEPEND_FILE="${T}/java-pkg-build-depend"
+
+java-pkg_record-jar_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local depend_file="${JAVA_PKG_DEPEND_FILE}"
+ case "${1}" in
+ "--optional") depend_file="${JAVA_PKG_OPTIONAL_DEPEND_FILE}"; shift;;
+ "--build-only") depend_file="${JAVA_PKG_BUILD_DEPEND_FILE}"; shift;;
+ esac
+
+ local pkg=${1} jar=${2} append
+ if [[ -z "${jar}" ]]; then
+ append="${pkg}"
+ else
+ append="$(basename ${jar})@${pkg}"
+ fi
+
+ echo "${append}" >> "${depend_file}"
+}
+
+# @FUNCTION: java-pkg_append_
+# @INTERNAL
+# @DESCRIPTION:
+# Appends a value to a variable
+#
+# @CODE
+# Parameters:
+# $1 variable name to modify
+# $2 value to append
+#
+# Examples:
+# java-pkg_append_ CLASSPATH foo.jar
+# @CODE
+java-pkg_append_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local var="${1}" value="${2}"
+ if [[ -z "${!var}" ]] ; then
+ export ${var}="${value}"
+ else
+ local oldIFS=${IFS} cur haveit
+ IFS=':'
+ for cur in ${!var}; do
+ if [[ ${cur} == ${value} ]]; then
+ haveit="yes"
+ break
+ fi
+ done
+ [[ -z ${haveit} ]] && export ${var}="${!var}:${value}"
+ IFS=${oldIFS}
+ fi
+}
+
+# @FUNCTION: java-pkg_expand_dir_
+# @INTERNAL
+# @DESCRIPTION:
+# Gets the full path of the file/directory's parent.
+# @CODE
+# Parameters:
+# $1 - file/directory to find parent directory for
+# @CODE
+# @RETURN: path to $1's parent directory
+java-pkg_expand_dir_() {
+ pushd "$(dirname "${1}")" >/dev/null 2>&1 || die
+ pwd
+ popd >/dev/null 2>&1 || die
+}
+
+# @FUNCTION: java-pkg_func-exists
+# @INTERNAL
+# @DESCRIPTION:
+# Does the indicated function exist?
+# @RETURN: 0 - function is declared, 1 - function is undeclared
+java-pkg_func-exists() {
+ declare -F ${1} > /dev/null
+}
+
+# @FUNCTION: java-pkg_setup-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Sets up the environment for a specific VM
+java-pkg_setup-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ local vendor="$(java-pkg_get-vm-vendor)"
+ if [[ "${vendor}" == "sun" ]] && java-pkg_is-vm-version-ge "1.5" ; then
+ addpredict "/dev/random"
+ elif [[ "${vendor}" == "ibm" ]]; then
+ addpredict "/proc/self/maps"
+ addpredict "/proc/cpuinfo"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == "oracle" ]]; then
+ addpredict "/dev/random"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == icedtea* ]] && java-pkg_is-vm-version-ge "1.7" ; then
+ addpredict "/dev/random"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == "jrockit" ]]; then
+ addpredict "/proc/cpuinfo"
+ fi
+}
+
+# @FUNCTION: java-pkg_needs-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Does the current package depend on virtual/jdk or does it set
+# JAVA_PKG_WANT_BUILD_VM?
+#
+# @RETURN: 0 - Package depends on virtual/jdk; 1 - Package does not depend on virtual/jdk
+java-pkg_needs-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n "$(echo ${JAVA_PKG_NV_DEPEND:-${DEPEND}} | sed -e '\:virtual/jdk:!d')" ]]; then
+ return 0
+ fi
+
+ [[ -n "${JAVA_PKG_WANT_BUILD_VM}" ]] && return 0
+
+ return 1
+}
+
+# @FUNCTION: java-pkg_get-current-vm
+# @INTERNAL
+# @RETURN - The current VM being used
+java-pkg_get-current-vm() {
+ java-config -f
+}
+
+# @FUNCTION: java-pkg_get-vm-vendor
+# @INTERNAL
+# @RETURN - The vendor of the current VM
+java-pkg_get-vm-vendor() {
+ debug-print-function ${FUNCNAME} $*
+
+ local vm="$(java-pkg_get-current-vm)"
+ vm="${vm/-*/}"
+ echo "${vm}"
+}
+
+# @FUNCTION: java-pkg_get-vm-version
+# @INTERNAL
+# @RETURN - The version of the current VM
+java-pkg_get-vm-version() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-config -g PROVIDES_VERSION
+}
+
+# @FUNCTION: java-pkg_build-vm-from-handle
+# @INTERNAL
+# @DESCRIPTION:
+# Selects a build vm from a list of vm handles. First checks for the system-vm
+# beeing usable, then steps through the listed handles till a suitable vm is
+# found.
+#
+# @RETURN - VM handle of an available JDK
+java-pkg_build-vm-from-handle() {
+ debug-print-function ${FUNCNAME} "$*"
+
+ local vm
+ vm=$(java-pkg_get-current-vm 2>/dev/null)
+ if [[ $? -eq 0 ]]; then
+ if has ${vm} ${JAVA_PKG_WANT_BUILD_VM}; then
+ echo ${vm}
+ return 0
+ fi
+ fi
+
+ for vm in ${JAVA_PKG_WANT_BUILD_VM}; do
+ if java-config-2 --select-vm=${vm} 2>/dev/null; then
+ echo ${vm}
+ return 0
+ fi
+ done
+
+ eerror "${FUNCNAME}: No vm found for handles: ${JAVA_PKG_WANT_BUILD_VM}"
+ return 1
+}
+
+# @FUNCTION: java-pkg_switch-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Switch VM if we're allowed to (controlled by JAVA_PKG_ALLOW_VM_CHANGE), and
+# verify that the current VM is sufficient.
+# Setup the environment for the VM being used.
+java-pkg_switch-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ if java-pkg_needs-vm; then
+ # Use the VM specified by JAVA_PKG_FORCE_VM
+ if [[ -n "${JAVA_PKG_FORCE_VM}" ]]; then
+ # If you're forcing the VM, I hope you know what your doing...
+ debug-print "JAVA_PKG_FORCE_VM used: ${JAVA_PKG_FORCE_VM}"
+ export GENTOO_VM="${JAVA_PKG_FORCE_VM}"
+ # if we're allowed to switch the vm...
+ elif [[ "${JAVA_PKG_ALLOW_VM_CHANGE}" == "yes" ]]; then
+ # if there is an explicit list of handles to choose from
+ if [[ -n "${JAVA_PKG_WANT_BUILD_VM}" ]]; then
+ debug-print "JAVA_PKG_WANT_BUILD_VM used: ${JAVA_PKG_WANT_BUILD_VM}"
+ GENTOO_VM=$(java-pkg_build-vm-from-handle)
+ if [[ $? != 0 ]]; then
+ eerror "${FUNCNAME}: No VM found for handles: ${JAVA_PKG_WANT_BUILD_VM}"
+ die "${FUNCNAME}: Failed to determine VM for building"
+ fi
+ # JAVA_PKG_WANT_SOURCE and JAVA_PKG_WANT_TARGET are required as
+ # they can't be deduced from handles.
+ if [[ -z "${JAVA_PKG_WANT_SOURCE}" ]]; then
+ eerror "JAVA_PKG_WANT_BUILD_VM specified but not JAVA_PKG_WANT_SOURCE"
+ die "Specify JAVA_PKG_WANT_SOURCE"
+ fi
+ if [[ -z "${JAVA_PKG_WANT_TARGET}" ]]; then
+ eerror "JAVA_PKG_WANT_BUILD_VM specified but not JAVA_PKG_WANT_TARGET"
+ die "Specify JAVA_PKG_WANT_TARGET"
+ fi
+ # otherwise determine a vm from dep string
+ else
+ debug-print "depend-java-query: NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}"
+ GENTOO_VM="$(depend-java-query --get-vm "${JAVA_PKG_NV_DEPEND:-${DEPEND}}")"
+ if [[ -z "${GENTOO_VM}" || "${GENTOO_VM}" == "None" ]]; then
+ eerror "Unable to determine VM for building from dependencies:"
+ echo "NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}"
+ die "Failed to determine VM for building."
+ fi
+ fi
+ export GENTOO_VM
+ # otherwise just make sure the current VM is sufficient
+ else
+ java-pkg_ensure-vm-version-sufficient
+ fi
+ debug-print "Using: $(java-config -f)"
+
+ java-pkg_setup-vm
+
+ export JAVA=$(java-config --java)
+ export JAVAC=$(java-config --javac)
+ JAVACFLAGS="$(java-pkg_javac-args)"
+ [[ -n ${JAVACFLAGS_EXTRA} ]] && JAVACFLAGS="${JAVACFLAGS_EXTRA} ${JAVACFLAGS}"
+ export JAVACFLAGS
+
+ export JAVA_HOME="$(java-config -g JAVA_HOME)"
+ export JDK_HOME=${JAVA_HOME}
+
+ #TODO If you know a better solution let us know.
+ java-pkg_append_ LD_LIBRARY_PATH "$(java-config -g LDPATH)"
+
+ local tann="${T}/announced-vm"
+ # With the hooks we should only get here once from pkg_setup but better safe than sorry
+ # if people have for example modified eclasses some where
+ if [[ -n "${JAVA_PKG_DEBUG}" ]] || [[ ! -f "${tann}" ]] ; then
+ einfo "Using: $(java-config -f)"
+ [[ ! -f "${tann}" ]] && touch "${tann}"
+ fi
+
+ else
+ [[ -n "${JAVA_PKG_DEBUG}" ]] && ewarn "!!! This package inherits java-pkg but doesn't depend on a JDK. -bin or broken dependency!!!"
+ fi
+}
+
+# @FUNCTION: java-pkg_die
+# @INTERNAL
+# @DESCRIPTION:
+# Enhanced die for Java packages, which displays some information that may be
+# useful for debugging bugs on bugzilla.
+#register_die_hook java-pkg_die
+if ! has java-pkg_die ${EBUILD_DEATH_HOOKS}; then
+ EBUILD_DEATH_HOOKS="${EBUILD_DEATH_HOOKS} java-pkg_die"
+fi
+
+java-pkg_die() {
+ echo "!!! When you file a bug report, please include the following information:" >&2
+ echo "GENTOO_VM=${GENTOO_VM} CLASSPATH=\"${CLASSPATH}\" JAVA_HOME=\"${JAVA_HOME}\"" >&2
+ echo "JAVACFLAGS=\"${JAVACFLAGS}\" COMPILER=\"${GENTOO_COMPILER}\"" >&2
+ echo "and of course, the output of emerge --info =${P}" >&2
+}
+
+
+# TODO document
+# List jars in the source directory, ${S}
+java-pkg_jar-list() {
+ if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
+ einfo "Linked Jars"
+ find "${S}" -type l -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ einfo "Jars"
+ find "${S}" -type f -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ einfo "Classes"
+ find "${S}" -type f -name '*.class' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ fi
+}
+
+# @FUNCTION: java-pkg_verify-classes
+# @INTERNAL
+# @DESCRIPTION:
+# Verify that the classes were compiled for the right source / target. Dies if
+# not.
+# @CODE
+# $1 (optional) - the file to check, otherwise checks whole ${D}
+# @CODE
+java-pkg_verify-classes() {
+ #$(find ${ED} -type f -name '*.jar' -o -name '*.class')
+
+ local version_verify="/usr/bin/class-version-verify.py"
+
+ if [[ ! -x "${version_verify}" ]]; then
+ version_verify="/usr/$(get_libdir)/javatoolkit/bin/class-version-verify.py"
+ fi
+
+ if [[ ! -x "${version_verify}" ]]; then
+ ewarn "Unable to perform class version checks as"
+ ewarn "class-version-verify.py is unavailable"
+ ewarn "Please install dev-java/javatoolkit."
+ return
+ fi
+
+ local target=$(java-pkg_get-target)
+ local result
+ local log="${T}/class-version-verify.log"
+ if [[ -n "${1}" ]]; then
+ ${version_verify} -v -t ${target} "${1}" > "${log}"
+ result=$?
+ else
+ ebegin "Verifying java class versions (target: ${target})"
+ ${version_verify} -v -t ${target} -r "${ED}" > "${log}"
+ result=$?
+ eend ${result}
+ fi
+ [[ -n ${JAVA_PKG_DEBUG} ]] && cat "${log}"
+ if [[ ${result} != 0 ]]; then
+ eerror "Incorrect bytecode version found"
+ [[ -n "${1}" ]] && eerror "in file: ${1}"
+ eerror "See ${log} for more details."
+ die "Incorrect bytecode found"
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-dep
+# @INTERNAL
+# @DESCRIPTION:
+# Check that a package being used in jarfrom, getjars and getjar is contained
+# within DEPEND or RDEPEND with the correct SLOT. See this mail for details:
+# https://archives.gentoo.org/gentoo-dev/message/dcb644f89520f4bbb61cc7bbe45fdf6e
+# @CODE
+# Parameters:
+# $1 - empty - check both vars; "runtime" or "build" - check only
+# RDEPEND, resp. DEPEND
+# $2 - Package name and slot.
+# @CODE
+java-pkg_ensure-dep() {
+ debug-print-function ${FUNCNAME} $*
+
+ local limit_to="${1}"
+ local target_pkg="${2}"
+ local dev_error=""
+
+ # Transform into a regular expression to look for a matching package
+ # and SLOT. SLOTs don't have to be numeric so foo-bar could either
+ # mean foo-bar:0 or foo:bar. So you want to get your head around the
+ # line below?
+ #
+ # * The target package first has any dots escaped, e.g. foo-1.2
+ # becomes foo-1\.2.
+ #
+ # * sed then looks at the component following the last - or :
+ # character, or the whole string if there is no - or :
+ # character. It uses this to build a new regexp with two
+ # significant branches.
+ #
+ # * The first checks for the whole target package string, optionally
+ # followed by a version number, and then :0.
+ #
+ # * The second checks for the first part of the target package
+ # string, optionally followed by a version number, followed by the
+ # aforementioned component, treating that as a SLOT.
+ #
+ local stripped_pkg=/$(sed -r 's/[-:]?([^-:]+)$/(\0(-[^:]+)?:0|(-[^:]+)?:\1)/' <<< "${target_pkg//./\\.}")\\b
+
+ debug-print "Matching against: ${stripped_pkg}"
+
+ # Uncomment the lines below once we've dealt with more of these
+ # otherwise we'll be tempted to turn JAVA_PKG_STRICT off while
+ # getting hit with a wave of bug reports. :(
+
+ if [[ ${limit_to} != runtime && ! ( "${DEPEND}" =~ $stripped_pkg ) ]]; then
+ dev_error="The ebuild is attempting to use ${target_pkg}, which is not "
+ dev_error+="declared with a SLOT in DEPEND."
+# if is-java-strict; then
+# die "${dev_error}"
+# else
+ eqawarn "java-pkg_ensure-dep: ${dev_error}"
+# eerror "Because you have ${target_pkg} installed,"
+# eerror "the package will build without problems, but please"
+# eerror "report this to https://bugs.gentoo.org."
+# fi
+ elif [[ ${limit_to} != build && ! ( "${RDEPEND}${PDEPEND}" =~ ${stripped_pkg} ) ]]; then
+ dev_error="The ebuild is attempting to use ${target_pkg}, which is not "
+ dev_error+="declared with a SLOT in [RP]DEPEND and --build-only wasn't given."
+# if is-java-strict; then
+# die "${dev_error}"
+# else
+ eqawarn "java-pkg_ensure-dep: ${dev_error}"
+# eerror "The package will build without problems, but may fail to run"
+# eerror "if you don't have ${target_pkg} installed,"
+# eerror "so please report this to https://bugs.gentoo.org."
+# fi
+ fi
+}
+
+java-pkg_check-phase() {
+ local phase=${1}
+ local funcname=${FUNCNAME[1]}
+ if [[ ${EBUILD_PHASE} != ${phase} ]]; then
+ local msg="${funcname} used outside of src_${phase}"
+ java-pkg_announce-qa-violation "${msg}"
+ fi
+}
+
+java-pkg_check-versioned-jar() {
+ local jar=${1}
+
+ if [[ ${jar} =~ ${PV} ]]; then
+ java-pkg_announce-qa-violation "installing versioned jar '${jar}'"
+ fi
+}
+
+java-pkg_check-jikes() {
+ if has jikes ${IUSE}; then
+ java-pkg_announce-qa-violation "deprecated USE flag 'jikes' in IUSE"
+ fi
+}
+
+java-pkg_announce-qa-violation() {
+ local nodie
+ if [[ ${1} == "--nodie" ]]; then
+ nodie="true"
+ shift
+ fi
+ echo "Java QA Notice: $@" >&2
+ increment-qa-violations
+ [[ -z "${nodie}" ]] && is-java-strict && die "${@}"
+}
+
+increment-qa-violations() {
+ let "JAVA_PKG_QA_VIOLATIONS+=1"
+ export JAVA_PKG_QA_VIOLATIONS
+}
+
+is-java-strict() {
+ [[ -n ${JAVA_PKG_STRICT} ]]
+ return $?
+}
+
+# @FUNCTION: java-pkg_clean
+# @DESCRIPTION:
+# Java package cleaner function. This will remove all *.class and *.jar
+# files, removing any bundled dependencies.
+java-pkg_clean() {
+ if [[ -z "${JAVA_PKG_NO_CLEAN}" ]]; then
+ find "${@}" '(' -name '*.class' -o -name '*.jar' ')' -type f -delete -print || die
+ fi
+}
diff --git a/eclass/linux-info.eclass b/eclass/linux-info.eclass
new file mode 100644
index 0000000..5267bd1
--- /dev/null
+++ b/eclass/linux-info.eclass
@@ -0,0 +1,925 @@
+# Copyright 1999-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: linux-info.eclass
+# @MAINTAINER:
+# kernel@gentoo.org
+# @AUTHOR:
+# Original author: John Mylchreest <johnm@gentoo.org>
+# @BLURB: eclass used for accessing kernel related information
+# @DESCRIPTION:
+# This eclass is used as a central eclass for accessing kernel
+# related information for source or binary already installed.
+# It is vital for linux-mod.eclass to function correctly, and is split
+# out so that any ebuild behaviour "templates" are abstracted out
+# using additional eclasses.
+#
+# "kernel config" in this file means:
+# The .config of the currently installed sources is used as the first
+# preference, with a fall-back to bundled config (/proc/config.gz) if available.
+#
+# Before using any of the config-handling functions in this eclass, you must
+# ensure that one of the following functions has been called (in order of
+# preference), otherwise you will get bugs like #364041):
+# linux-info_pkg_setup
+# linux-info_get_any_version
+# get_version
+# get_running_version
+
+# A Couple of env vars are available to effect usage of this eclass
+# These are as follows:
+
+# @ECLASS-VARIABLE: KERNEL_DIR
+# @DESCRIPTION:
+# A string containing the directory of the target kernel sources. The default value is
+# "/usr/src/linux"
+
+# @ECLASS-VARIABLE: CONFIG_CHECK
+# @DESCRIPTION:
+# A string containing a list of .config options to check for before
+# proceeding with the install.
+#
+# e.g.: CONFIG_CHECK="MTRR"
+#
+# You can also check that an option doesn't exist by
+# prepending it with an exclamation mark (!).
+#
+# e.g.: CONFIG_CHECK="!MTRR"
+#
+# To simply warn about a missing option, prepend a '~'.
+# It may be combined with '!'.
+#
+# In general, most checks should be non-fatal. The only time fatal checks should
+# be used is for building kernel modules or cases that a compile will fail
+# without the option.
+#
+# This is to allow usage of binary kernels, and minimal systems without kernel
+# sources.
+
+# @ECLASS-VARIABLE: ERROR_<CFG>
+# @DESCRIPTION:
+# A string containing the error message to display when the check against CONFIG_CHECK
+# fails. <CFG> should reference the appropriate option used in CONFIG_CHECK.
+#
+# e.g.: ERROR_MTRR="MTRR exists in the .config but shouldn't!!"
+
+# @ECLASS-VARIABLE: KBUILD_OUTPUT
+# @DESCRIPTION:
+# A string passed on commandline, or set from the kernel makefile. It contains the directory
+# which is to be used as the kernel object directory.
+
+# There are also a couple of variables which are set by this, and shouldn't be
+# set by hand. These are as follows:
+
+# @ECLASS-VARIABLE: KV_FULL
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the full kernel version. ie: 2.6.9-gentoo-johnm-r1
+
+# @ECLASS-VARIABLE: KV_MAJOR
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel major version. ie: 2
+
+# @ECLASS-VARIABLE: KV_MINOR
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel minor version. ie: 6
+
+# @ECLASS-VARIABLE: KV_PATCH
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel patch version. ie: 9
+
+# @ECLASS-VARIABLE: KV_EXTRA
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel EXTRAVERSION. ie: -gentoo
+
+# @ECLASS-VARIABLE: KV_LOCAL
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel LOCALVERSION concatenation. ie: -johnm
+
+# @ECLASS-VARIABLE: KV_DIR
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel source directory, will be null if
+# KERNEL_DIR is invalid.
+
+# @ECLASS-VARIABLE: KV_OUT_DIR
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel object directory, will be KV_DIR unless
+# KBUILD_OUTPUT is used. This should be used for referencing .config.
+
+# And to ensure all the weirdness with crosscompile
+inherit toolchain-funcs versionator
+
+EXPORT_FUNCTIONS pkg_setup
+
+# Overwritable environment Var's
+# ---------------------------------------
+KERNEL_DIR="${KERNEL_DIR:-${EROOT}usr/src/linux}"
+
+
+# Bug fixes
+# fix to bug #75034
+case ${ARCH} in
+ ppc) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
+ ppc64) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
+esac
+
+# @FUNCTION: set_arch_to_kernel
+# @DESCRIPTION:
+# Set the env ARCH to match what the kernel expects.
+set_arch_to_kernel() { export ARCH=$(tc-arch-kernel); }
+# @FUNCTION: set_arch_to_portage
+# @DESCRIPTION:
+# Set the env ARCH to match what portage expects.
+set_arch_to_portage() { export ARCH=$(tc-arch); }
+
+# qeinfo "Message"
+# -------------------
+# qeinfo is a quiet einfo call when EBUILD_PHASE
+# should not have visible output.
+qout() {
+ local outputmsg type
+ type=${1}
+ shift
+ outputmsg="${@}"
+ case "${EBUILD_PHASE}" in
+ depend) unset outputmsg;;
+ clean) unset outputmsg;;
+ preinst) unset outputmsg;;
+ esac
+ [ -n "${outputmsg}" ] && ${type} "${outputmsg}"
+}
+
+qeinfo() { qout einfo "${@}" ; }
+qewarn() { qout ewarn "${@}" ; }
+qeerror() { qout eerror "${@}" ; }
+
+# File Functions
+# ---------------------------------------
+
+# @FUNCTION: getfilevar
+# @USAGE: variable configfile
+# @RETURN: the value of the variable
+# @DESCRIPTION:
+# It detects the value of the variable defined in the file configfile. This is
+# done by including the configfile, and printing the variable with Make.
+# It WILL break if your makefile has missing dependencies!
+getfilevar() {
+ local ERROR basefname basedname myARCH="${ARCH}" M="${S}"
+ ERROR=0
+
+ [ -z "${1}" ] && ERROR=1
+ [ ! -f "${2}" ] && ERROR=1
+
+ if [ "${ERROR}" = 1 ]
+ then
+ echo -e "\n"
+ eerror "getfilevar requires 2 variables, with the second a valid file."
+ eerror " getfilevar <VARIABLE> <CONFIGFILE>"
+ else
+ basefname="$(basename ${2})"
+ basedname="$(dirname ${2})"
+ unset ARCH
+
+ # We use nonfatal because we want the caller to take care of things #373151
+ [[ ${EAPI:-0} == [0123] ]] && nonfatal() { "$@"; }
+ case ${EBUILD_PHASE_FUNC} in
+ pkg_info|pkg_nofetch|pkg_pretend) M="${T}" ;;
+ esac
+ echo -e "e:\\n\\t@echo \$(${1})\\ninclude ${basefname}" | \
+ nonfatal emake -C "${basedname}" M="${M}" ${BUILD_FIXES} -s -f - 2>/dev/null
+
+ ARCH=${myARCH}
+ fi
+}
+
+# @FUNCTION: getfilevar_noexec
+# @USAGE: variable configfile
+# @RETURN: the value of the variable
+# @DESCRIPTION:
+# It detects the value of the variable defined in the file configfile.
+# This is done with sed matching an expression only. If the variable is defined,
+# you will run into problems. See getfilevar for those cases.
+getfilevar_noexec() {
+ local ERROR basefname basedname mycat myARCH="${ARCH}"
+ ERROR=0
+ mycat='cat'
+
+ [ -z "${1}" ] && ERROR=1
+ [ ! -f "${2}" ] && ERROR=1
+ [ "${2%.gz}" != "${2}" ] && mycat='zcat'
+
+ if [ "${ERROR}" = 1 ]
+ then
+ echo -e "\n"
+ eerror "getfilevar_noexec requires 2 variables, with the second a valid file."
+ eerror " getfilevar_noexec <VARIABLE> <CONFIGFILE>"
+ else
+ ${mycat} "${2}" | \
+ sed -n \
+ -e "/^[[:space:]]*${1}[[:space:]]*:\\?=[[:space:]]*\(.*\)\$/{
+ s,^[^=]*[[:space:]]*=[[:space:]]*,,g ;
+ s,[[:space:]]*\$,,g ;
+ p
+ }"
+ fi
+}
+
+# @ECLASS-VARIABLE: _LINUX_CONFIG_EXISTS_DONE
+# @INTERNAL
+# @DESCRIPTION:
+# This is only set if one of the linux_config_*exists functions has been called.
+# We use it for a QA warning that the check for a config has not been performed,
+# as linux_chkconfig* in non-legacy mode WILL return an undefined value if no
+# config is available at all.
+_LINUX_CONFIG_EXISTS_DONE=
+
+linux_config_qa_check() {
+ local f="$1"
+ if [ -z "${_LINUX_CONFIG_EXISTS_DONE}" ]; then
+ ewarn "QA: You called $f before any linux_config_exists!"
+ ewarn "QA: The return value of $f will NOT guaranteed later!"
+ fi
+}
+
+# @FUNCTION: linux_config_src_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists in a build directory otherwise false
+linux_config_src_exists() {
+ export _LINUX_CONFIG_EXISTS_DONE=1
+ [[ -n ${KV_OUT_DIR} && -s ${KV_OUT_DIR}/.config ]]
+}
+
+# @FUNCTION: linux_config_bin_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists in /proc, otherwise false
+linux_config_bin_exists() {
+ export _LINUX_CONFIG_EXISTS_DONE=1
+ [[ -s /proc/config.gz ]]
+}
+
+# @FUNCTION: linux_config_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists otherwise false
+#
+# This function MUST be checked before using any of the linux_chkconfig_*
+# functions.
+linux_config_exists() {
+ linux_config_src_exists || linux_config_bin_exists
+}
+
+# @FUNCTION: linux_config_path
+# @DESCRIPTION:
+# Echo the name of the config file to use. If none are found,
+# then return false.
+linux_config_path() {
+ if linux_config_src_exists; then
+ echo "${KV_OUT_DIR}/.config"
+ elif linux_config_bin_exists; then
+ echo "/proc/config.gz"
+ else
+ return 1
+ fi
+}
+
+# @FUNCTION: require_configured_kernel
+# @DESCRIPTION:
+# This function verifies that the current kernel is configured (it checks against the existence of .config)
+# otherwise it dies.
+require_configured_kernel() {
+ if ! linux_config_src_exists; then
+ qeerror "Could not find a usable .config in the kernel source directory."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a configured set of Linux sources."
+ qeerror "If you are using KBUILD_OUTPUT, please set the environment var so that"
+ qeerror "it points to the necessary object directory so that it might find .config."
+ die "Kernel not configured; no .config found in ${KV_OUT_DIR}"
+ fi
+}
+
+# @FUNCTION: linux_chkconfig_present
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=y or CONFIG_<option>=m is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_present() {
+ linux_config_qa_check linux_chkconfig_present
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == [my] ]]
+}
+
+# @FUNCTION: linux_chkconfig_module
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=m is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_module() {
+ linux_config_qa_check linux_chkconfig_module
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == m ]]
+}
+
+# @FUNCTION: linux_chkconfig_builtin
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=y is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_builtin() {
+ linux_config_qa_check linux_chkconfig_builtin
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == y ]]
+}
+
+# @FUNCTION: linux_chkconfig_string
+# @USAGE: option
+# @RETURN: CONFIG_<option>
+# @DESCRIPTION:
+# It prints the CONFIG_<option> value of the current kernel .config (it requires a configured kernel).
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_string() {
+ linux_config_qa_check linux_chkconfig_string
+ getfilevar_noexec "CONFIG_$1" "$(linux_config_path)"
+}
+
+# Versioning Functions
+# ---------------------------------------
+
+# @FUNCTION: kernel_is
+# @USAGE: [-lt -gt -le -ge -eq] major_number [minor_number patch_number]
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true when the current kernel version satisfies the comparison against the passed version.
+# -eq is the default comparison.
+#
+# @CODE
+# For Example where KV = 2.6.9
+# kernel_is 2 4 returns false
+# kernel_is 2 returns true
+# kernel_is 2 6 returns true
+# kernel_is 2 6 8 returns false
+# kernel_is 2 6 9 returns true
+# @CODE
+
+# Note: duplicated in kernel-2.eclass
+kernel_is() {
+ # if we haven't determined the version yet, we need to.
+ linux-info_get_any_version
+
+ # Now we can continue
+ local operator test value
+
+ case ${1#-} in
+ lt) operator="-lt"; shift;;
+ gt) operator="-gt"; shift;;
+ le) operator="-le"; shift;;
+ ge) operator="-ge"; shift;;
+ eq) operator="-eq"; shift;;
+ *) operator="-eq";;
+ esac
+ [[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
+
+ : $(( test = (KV_MAJOR << 16) + (KV_MINOR << 8) + KV_PATCH ))
+ : $(( value = (${1:-${KV_MAJOR}} << 16) + (${2:-${KV_MINOR}} << 8) + ${3:-${KV_PATCH}} ))
+ [ ${test} ${operator} ${value} ]
+}
+
+get_localversion() {
+ local lv_list i x
+
+ # ignore files with ~ in it.
+ for i in $(ls ${1}/localversion* 2>/dev/null); do
+ [[ -n ${i//*~*} ]] && lv_list="${lv_list} ${i}"
+ done
+
+ for i in ${lv_list}; do
+ x="${x}$(<${i})"
+ done
+ x=${x/ /}
+ echo ${x}
+}
+
+# Check if the Makefile is valid for direct parsing.
+# Check status results:
+# - PASS, use 'getfilevar' to extract values
+# - FAIL, use 'getfilevar_noexec' to extract values
+# The check may fail if:
+# - make is not present
+# - corruption exists in the kernel makefile
+get_makefile_extract_function() {
+ local a='' b='' mkfunc='getfilevar'
+ a="$(getfilevar VERSION ${KERNEL_MAKEFILE})"
+ b="$(getfilevar_noexec VERSION ${KERNEL_MAKEFILE})"
+ [[ "${a}" != "${b}" ]] && mkfunc='getfilevar_noexec'
+ echo "${mkfunc}"
+}
+
+# internal variable, so we know to only print the warning once
+get_version_warning_done=
+
+# @FUNCTION: get_version
+# @DESCRIPTION:
+# It gets the version of the kernel inside KERNEL_DIR and populates the KV_FULL variable
+# (if KV_FULL is already set it does nothing).
+#
+# The kernel version variables (KV_MAJOR, KV_MINOR, KV_PATCH, KV_EXTRA and KV_LOCAL) are also set.
+#
+# The KV_DIR is set using the KERNEL_DIR env var, the KV_DIR_OUT is set using a valid
+# KBUILD_OUTPUT (in a decreasing priority list, we look for the env var, makefile var or the
+# symlink /lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build).
+get_version() {
+ local tmplocal
+
+ # no need to execute this twice assuming KV_FULL is populated.
+ # we can force by unsetting KV_FULL
+ [ -n "${KV_FULL}" ] && return 0
+
+ # if we dont know KV_FULL, then we need too.
+ # make sure KV_DIR isnt set since we need to work it out via KERNEL_DIR
+ unset KV_DIR
+
+ # KV_DIR will contain the full path to the sources directory we should use
+ [ -z "${get_version_warning_done}" ] && \
+ qeinfo "Determining the location of the kernel source code"
+ [ -h "${KERNEL_DIR}" ] && KV_DIR="$(readlink -f ${KERNEL_DIR})"
+ [ -d "${KERNEL_DIR}" ] && KV_DIR="${KERNEL_DIR}"
+
+ if [ -z "${KV_DIR}" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qewarn "Unable to find kernel sources at ${KERNEL_DIR}"
+ #qeinfo "This package requires Linux sources."
+ if [ "${KERNEL_DIR}" == "/usr/src/linux" ] ; then
+ qeinfo "Please make sure that ${KERNEL_DIR} points at your running kernel, "
+ qeinfo "(or the kernel you wish to build against)."
+ qeinfo "Alternatively, set the KERNEL_DIR environment variable to the kernel sources location"
+ else
+ qeinfo "Please ensure that the KERNEL_DIR environment variable points at full Linux sources of the kernel you wish to compile against."
+ fi
+ fi
+ return 1
+ fi
+
+ # See if the kernel dir is actually an output dir. #454294
+ if [ -z "${KBUILD_OUTPUT}" -a -L "${KERNEL_DIR}/source" ]; then
+ KBUILD_OUTPUT=${KERNEL_DIR}
+ KERNEL_DIR=$(readlink -f "${KERNEL_DIR}/source")
+ KV_DIR=${KERNEL_DIR}
+ fi
+
+ if [ -z "${get_version_warning_done}" ]; then
+ qeinfo "Found kernel source directory:"
+ qeinfo " ${KV_DIR}"
+ fi
+
+ if [ ! -s "${KV_DIR}/Makefile" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qeerror "Could not find a Makefile in the kernel source directory."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources"
+ fi
+ return 1
+ fi
+
+ # OK so now we know our sources directory, but they might be using
+ # KBUILD_OUTPUT, and we need this for .config and localversions-*
+ # so we better find it eh?
+ # do we pass KBUILD_OUTPUT on the CLI?
+ local OUTPUT_DIR=${KBUILD_OUTPUT}
+
+ # keep track of it
+ KERNEL_MAKEFILE="${KV_DIR}/Makefile"
+
+ if [[ -z ${OUTPUT_DIR} ]]; then
+ # Decide the function used to extract makefile variables.
+ local mkfunc=$(get_makefile_extract_function "${KERNEL_MAKEFILE}")
+
+ # And if we didn't pass it, we can take a nosey in the Makefile.
+ OUTPUT_DIR=$(${mkfunc} KBUILD_OUTPUT "${KERNEL_MAKEFILE}")
+ fi
+
+ # And contrary to existing functions I feel we shouldn't trust the
+ # directory name to find version information as this seems insane.
+ # So we parse ${KERNEL_MAKEFILE}. We should be able to trust that
+ # the Makefile is simple enough to use the noexec extract function.
+ # This has been true for every release thus far, and it's faster
+ # than using make to evaluate the Makefile every time.
+ KV_MAJOR=$(getfilevar_noexec VERSION "${KERNEL_MAKEFILE}")
+ KV_MINOR=$(getfilevar_noexec PATCHLEVEL "${KERNEL_MAKEFILE}")
+ KV_PATCH=$(getfilevar_noexec SUBLEVEL "${KERNEL_MAKEFILE}")
+ KV_EXTRA=$(getfilevar_noexec EXTRAVERSION "${KERNEL_MAKEFILE}")
+
+ if [ -z "${KV_MAJOR}" -o -z "${KV_MINOR}" -o -z "${KV_PATCH}" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qeerror "Could not detect kernel version."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources."
+ fi
+ return 1
+ fi
+
+ # and in newer versions we can also pull LOCALVERSION if it is set.
+ # but before we do this, we need to find if we use a different object directory.
+ # This *WILL* break if the user is using localversions, but we assume it was
+ # caught before this if they are.
+ if [[ -z ${OUTPUT_DIR} ]] ; then
+ # Try to locate a kernel that is most relevant for us.
+ for OUTPUT_DIR in "${SYSROOT}" "${ROOT}" "" ; do
+ OUTPUT_DIR+="/lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build"
+ if [[ -e ${OUTPUT_DIR} ]] ; then
+ break
+ fi
+ done
+ fi
+
+ [ -h "${OUTPUT_DIR}" ] && KV_OUT_DIR="$(readlink -f ${OUTPUT_DIR})"
+ [ -d "${OUTPUT_DIR}" ] && KV_OUT_DIR="${OUTPUT_DIR}"
+ if [ -n "${KV_OUT_DIR}" ];
+ then
+ qeinfo "Found kernel object directory:"
+ qeinfo " ${KV_OUT_DIR}"
+ fi
+ # and if we STILL have not got it, then we better just set it to KV_DIR
+ KV_OUT_DIR="${KV_OUT_DIR:-${KV_DIR}}"
+
+ # Grab the kernel release from the output directory.
+ # TODO: we MUST detect kernel.release being out of date, and 'return 1' from
+ # this function.
+ if [ -s "${KV_OUT_DIR}"/include/config/kernel.release ]; then
+ KV_LOCAL=$(<"${KV_OUT_DIR}"/include/config/kernel.release)
+ elif [ -s "${KV_OUT_DIR}"/.kernelrelease ]; then
+ KV_LOCAL=$(<"${KV_OUT_DIR}"/.kernelrelease)
+ else
+ KV_LOCAL=
+ fi
+
+ # KV_LOCAL currently contains the full release; discard the first bits.
+ tmplocal=${KV_LOCAL#${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}}
+
+ # If the updated local version was not changed, the tree is not prepared.
+ # Clear out KV_LOCAL in that case.
+ # TODO: this does not detect a change in the localversion part between
+ # kernel.release and the value that would be generated.
+ if [ "$KV_LOCAL" = "$tmplocal" ]; then
+ KV_LOCAL=
+ else
+ KV_LOCAL=$tmplocal
+ fi
+
+ # And we should set KV_FULL to the full expanded version
+ KV_FULL="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}${KV_LOCAL}"
+
+ qeinfo "Found sources for kernel version:"
+ qeinfo " ${KV_FULL}"
+
+ return 0
+}
+
+# @FUNCTION: get_running_version
+# @DESCRIPTION:
+# It gets the version of the current running kernel and the result is the same as get_version() if the
+# function can find the sources.
+get_running_version() {
+ KV_FULL=$(uname -r)
+
+ if [[ -f ${ROOT}/lib/modules/${KV_FULL}/source/Makefile && -f ${ROOT}/lib/modules/${KV_FULL}/build/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/source)
+ KBUILD_OUTPUT=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/build)
+ unset KV_FULL
+ get_version
+ return $?
+ elif [[ -f ${ROOT}/lib/modules/${KV_FULL}/source/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/source)
+ unset KV_FULL
+ get_version
+ return $?
+ elif [[ -f ${ROOT}/lib/modules/${KV_FULL}/build/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/build)
+ unset KV_FULL
+ get_version
+ return $?
+ else
+ # This handles a variety of weird kernel versions. Make sure to update
+ # tests/linux-info_get_running_version.sh if you want to change this.
+ local kv_full=${KV_FULL//[-+_]*}
+ KV_MAJOR=$(get_version_component_range 1 ${kv_full})
+ KV_MINOR=$(get_version_component_range 2 ${kv_full})
+ KV_PATCH=$(get_version_component_range 3 ${kv_full})
+ KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}${KV_PATCH:+.${KV_PATCH}}}"
+ : ${KV_PATCH:=0}
+ fi
+ return 0
+}
+
+# This next function is named with the eclass prefix to avoid conflicts with
+# some old versionator-like eclass functions.
+
+# @FUNCTION: linux-info_get_any_version
+# @DESCRIPTION:
+# This attempts to find the version of the sources, and otherwise falls back to
+# the version of the running kernel.
+linux-info_get_any_version() {
+ get_version
+ if [[ $? -ne 0 ]]; then
+ ewarn "Unable to calculate Linux Kernel version for build, attempting to use running version"
+ get_running_version
+ fi
+}
+
+
+# ebuild check functions
+# ---------------------------------------
+
+# @FUNCTION: check_kernel_built
+# @DESCRIPTION:
+# This function verifies that the current kernel sources have been already prepared otherwise it dies.
+check_kernel_built() {
+ # if we haven't determined the version yet, we need to
+ require_configured_kernel
+ get_version
+
+ local versionh_path
+ if kernel_is -ge 3 7; then
+ versionh_path="include/generated/uapi/linux/version.h"
+ else
+ versionh_path="include/linux/version.h"
+ fi
+
+ if [ ! -f "${KV_OUT_DIR}/${versionh_path}" ]
+ then
+ eerror "These sources have not yet been prepared."
+ eerror "We cannot build against an unprepared tree."
+ eerror "To resolve this, please type the following:"
+ eerror
+ eerror "# cd ${KV_DIR}"
+ eerror "# make oldconfig"
+ eerror "# make modules_prepare"
+ eerror
+ eerror "Then please try merging this module again."
+ die "Kernel sources need compiling first"
+ fi
+}
+
+# @FUNCTION: check_modules_supported
+# @DESCRIPTION:
+# This function verifies that the current kernel support modules (it checks CONFIG_MODULES=y) otherwise it dies.
+check_modules_supported() {
+ # if we haven't determined the version yet, we need too.
+ require_configured_kernel
+ get_version
+
+ if ! linux_chkconfig_builtin "MODULES"; then
+ eerror "These sources do not support loading external modules."
+ eerror "to be able to use this module please enable \"Loadable modules support\""
+ eerror "in your kernel, recompile and then try merging this module again."
+ die "No support for external modules in ${KV_FULL} config"
+ fi
+}
+
+# @FUNCTION: check_extra_config
+# @DESCRIPTION:
+# It checks the kernel config options specified by CONFIG_CHECK. It dies only when a required config option (i.e.
+# the prefix ~ is not used) doesn't satisfy the directive.
+check_extra_config() {
+ local config negate die error reworkmodulenames
+ local soft_errors_count=0 hard_errors_count=0 config_required=0
+ # store the value of the QA check, because otherwise we won't catch usages
+ # after if check_extra_config is called AND other direct calls are done
+ # later.
+ local old_LINUX_CONFIG_EXISTS_DONE="${_LINUX_CONFIG_EXISTS_DONE}"
+
+ # if we haven't determined the version yet, we need to
+ linux-info_get_any_version
+
+ # Determine if we really need a .config. The only time when we don't need
+ # one is when all of the CONFIG_CHECK options are prefixed with "~".
+ for config in ${CONFIG_CHECK}; do
+ if [[ "${config:0:1}" != "~" ]]; then
+ config_required=1
+ break
+ fi
+ done
+
+ if [[ ${config_required} == 0 ]]; then
+ # In the case where we don't require a .config, we can now bail out
+ # if the user has no .config as there is nothing to do. Otherwise
+ # code later will cause a failure due to missing .config.
+ if ! linux_config_exists; then
+ ewarn "Unable to check for the following kernel config options due"
+ ewarn "to absence of any configured kernel sources or compiled"
+ ewarn "config:"
+ for config in ${CONFIG_CHECK}; do
+ config=${config#\~}
+ config=${config#\!}
+ local_error="ERROR_${config}"
+ msg="${!local_error}"
+ if [[ -z ${msg} ]]; then
+ local_error="WARNING_${config}"
+ msg="${!local_error}"
+ fi
+ ewarn " - ${config}${msg:+ - }${msg}"
+ done
+ ewarn "You're on your own to make sure they are set if needed."
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+ return 0
+ fi
+ else
+ require_configured_kernel
+ fi
+
+ einfo "Checking for suitable kernel configuration options..."
+
+ for config in ${CONFIG_CHECK}
+ do
+ # if we specify any fatal, ensure we honor them
+ die=1
+ error=0
+ negate=0
+ reworkmodulenames=0
+
+ if [[ ${config:0:1} == "~" ]]; then
+ die=0
+ config=${config:1}
+ elif [[ ${config:0:1} == "@" ]]; then
+ die=0
+ reworkmodulenames=1
+ config=${config:1}
+ fi
+ if [[ ${config:0:1} == "!" ]]; then
+ negate=1
+ config=${config:1}
+ fi
+
+ if [[ ${negate} == 1 ]]; then
+ linux_chkconfig_present ${config} && error=2
+ elif [[ ${reworkmodulenames} == 1 ]]; then
+ local temp_config="${config//*:}" i n
+ config="${config//:*}"
+ if linux_chkconfig_present ${config}; then
+ for i in ${MODULE_NAMES}; do
+ n="${i//${temp_config}}"
+ [[ -z ${n//\(*} ]] && \
+ MODULE_IGNORE="${MODULE_IGNORE} ${temp_config}"
+ done
+ error=2
+ fi
+ else
+ linux_chkconfig_present ${config} || error=1
+ fi
+
+ if [[ ${error} > 0 ]]; then
+ local report_func="eerror" local_error
+ local_error="ERROR_${config}"
+ local_error="${!local_error}"
+
+ if [[ -z "${local_error}" ]]; then
+ # using old, deprecated format.
+ local_error="${config}_ERROR"
+ local_error="${!local_error}"
+ fi
+ if [[ ${die} == 0 && -z "${local_error}" ]]; then
+ #soft errors can be warnings
+ local_error="WARNING_${config}"
+ local_error="${!local_error}"
+ if [[ -n "${local_error}" ]] ; then
+ report_func="ewarn"
+ fi
+ fi
+
+ if [[ -z "${local_error}" ]]; then
+ [[ ${error} == 1 ]] \
+ && local_error="is not set when it should be." \
+ || local_error="should not be set. But it is."
+ local_error="CONFIG_${config}:\t ${local_error}"
+ fi
+ if [[ ${die} == 0 ]]; then
+ ${report_func} " ${local_error}"
+ soft_errors_count=$[soft_errors_count + 1]
+ else
+ ${report_func} " ${local_error}"
+ hard_errors_count=$[hard_errors_count + 1]
+ fi
+ fi
+ done
+
+ if [[ ${hard_errors_count} > 0 ]]; then
+ eerror "Please check to make sure these options are set correctly."
+ eerror "Failure to do so may cause unexpected problems."
+ eerror "Once you have satisfied these options, please try merging"
+ eerror "this package again."
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+ die "Incorrect kernel configuration options"
+ elif [[ ${soft_errors_count} > 0 ]]; then
+ ewarn "Please check to make sure these options are set correctly."
+ ewarn "Failure to do so may cause unexpected problems."
+ else
+ eend 0
+ fi
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+}
+
+check_zlibinflate() {
+ # if we haven't determined the version yet, we need to
+ require_configured_kernel
+ get_version
+
+ # although I restructured this code - I really really really dont support it!
+
+ # bug #27882 - zlib routines are only linked into the kernel
+ # if something compiled into the kernel calls them
+ #
+ # plus, for the cloop module, it appears that there's no way
+ # to get cloop.o to include a static zlib if CONFIG_MODVERSIONS
+ # is on
+
+ local INFLATE
+ local DEFLATE
+
+ einfo "Determining the usability of ZLIB_INFLATE support in your kernel"
+
+ ebegin "checking ZLIB_INFLATE"
+ linux_chkconfig_builtin CONFIG_ZLIB_INFLATE
+ eend $?
+ [ "$?" != 0 ] && die
+
+ ebegin "checking ZLIB_DEFLATE"
+ linux_chkconfig_builtin CONFIG_ZLIB_DEFLATE
+ eend $?
+ [ "$?" != 0 ] && die
+
+ local LINENO_START
+ local LINENO_END
+ local SYMBOLS
+ local x
+
+ LINENO_END="$(grep -n 'CONFIG_ZLIB_INFLATE y' ${KV_DIR}/lib/Config.in | cut -d : -f 1)"
+ LINENO_START="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | grep -n 'if \[' | tail -n 1 | cut -d : -f 1)"
+ (( LINENO_AMOUNT = $LINENO_END - $LINENO_START ))
+ (( LINENO_END = $LINENO_END - 1 ))
+ SYMBOLS="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | tail -n $LINENO_AMOUNT | sed -e 's/^.*\(CONFIG_[^\" ]*\).*/\1/g;')"
+
+ # okay, now we have a list of symbols
+ # we need to check each one in turn, to see whether it is set or not
+ for x in $SYMBOLS ; do
+ if [ "${!x}" = "y" ]; then
+ # we have a winner!
+ einfo "${x} ensures zlib is linked into your kernel - excellent"
+ return 0
+ fi
+ done
+
+ eerror
+ eerror "This kernel module requires ZLIB library support."
+ eerror "You have enabled zlib support in your kernel, but haven't enabled"
+ eerror "enabled any option that will ensure that zlib is linked into your"
+ eerror "kernel."
+ eerror
+ eerror "Please ensure that you enable at least one of these options:"
+ eerror
+
+ for x in $SYMBOLS ; do
+ eerror " * $x"
+ done
+
+ eerror
+ eerror "Please remember to recompile and install your kernel, and reboot"
+ eerror "into your new kernel before attempting to load this kernel module."
+
+ die "Kernel doesn't include zlib support"
+}
+
+################################
+# Default pkg_setup
+# Also used when inheriting linux-mod to force a get_version call
+# @FUNCTION: linux-info_pkg_setup
+# @DESCRIPTION:
+# Force a get_version() call when inherited from linux-mod.eclass and then check if the kernel is configured
+# to support the options specified in CONFIG_CHECK (if not null)
+linux-info_pkg_setup() {
+ linux-info_get_any_version
+
+ if kernel_is 2 4; then
+ if [ "$( gcc-major-version )" -eq "4" ] ; then
+ echo
+ ewarn "Be warned !! >=sys-devel/gcc-4.0.0 isn't supported with"
+ ewarn "linux-2.4 (or modules building against a linux-2.4 kernel)!"
+ echo
+ ewarn "Either switch to another gcc-version (via gcc-config) or use a"
+ ewarn "newer kernel that supports gcc-4."
+ echo
+ ewarn "Also be aware that bugreports about gcc-4 not working"
+ ewarn "with linux-2.4 based ebuilds will be closed as INVALID!"
+ echo
+ epause 10
+ fi
+ fi
+
+ [ -n "${CONFIG_CHECK}" ] && check_extra_config;
+}
diff --git a/eclass/toolchain.eclass b/eclass/toolchain.eclass
index 27ff44a..ca64091 100644
--- a/eclass/toolchain.eclass
+++ b/eclass/toolchain.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2015 Gentoo Foundation
+# Copyright 1999-2016 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# $Id$
@@ -136,7 +136,7 @@ else
LICENSE="GPL-2+ LGPL-2.1+ FDL-1.1+"
fi
-IUSE="multislot regression-test vanilla"
+IUSE="regression-test vanilla"
IUSE_DEF=( nls nptl )
if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then
@@ -159,7 +159,7 @@ if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then
tc_version_is_at_least 4.8 && IUSE+=" graphite" IUSE_DEF+=( sanitize )
tc_version_is_at_least 4.9 && IUSE+=" cilk +vtv"
tc_version_is_at_least 5.0 && IUSE+=" jit mpx"
- tc_version_is_at_least 6.0 && IUSE+=" pie +ssp"
+ tc_version_is_at_least 6.0 && IUSE+=" pie ssp +pch"
fi
IUSE+=" ${IUSE_DEF[*]/#/+}"
@@ -631,6 +631,50 @@ do_gcc_PIE_patches() {
# configure to build with the hardened GCC specs as the default
make_gcc_hard() {
+
+ local gcc_hard_flags=""
+ # Gcc >= 6.X we can use configurations options to turn pie/ssp on as default
+ if tc_version_is_at_least 6.0 ; then
+ if use pie ; then
+ einfo "Updating gcc to use automatic PIE building ..."
+ fi
+ if use ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ fi
+ if use hardened ; then
+ # Will add some optimatizion as default.
+ gcc_hard_flags+=" -DEXTRA_OPTIONS"
+ # rebrand to make bug reports easier
+ BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
+ fi
+ else
+ if use hardened ; then
+ # rebrand to make bug reports easier
+ BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
+ if hardened_gcc_works ; then
+ einfo "Updating gcc to use automatic PIE + SSP building ..."
+ gcc_hard_flags+=" -DEFAULT_PIE_SSP"
+ elif hardened_gcc_works pie ; then
+ einfo "Updating gcc to use automatic PIE building ..."
+ ewarn "SSP has not been enabled by default"
+ gcc_hard_flags+=" -DEFAULT_PIE"
+ elif hardened_gcc_works ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ ewarn "PIE has not been enabled by default"
+ gcc_hard_flags+=" -DEFAULT_SSP"
+ else
+ # do nothing if hardened isn't supported, but don't die either
+ ewarn "hardened is not supported for this arch in this gcc version"
+ return 0
+ fi
+ else
+ if hardened_gcc_works ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ gcc_hard_flags+=" -DEFAULT_SSP"
+ fi
+ fi
+ fi
+
# we want to be able to control the pie patch logic via something other
# than ALL_CFLAGS...
sed -e '/^ALL_CFLAGS/iHARD_CFLAGS = ' \
@@ -639,36 +683,8 @@ make_gcc_hard() {
# Need to add HARD_CFLAGS to ALL_CXXFLAGS on >= 4.7
if tc_version_is_at_least 4.7 ; then
sed -e '/^ALL_CXXFLAGS/iHARD_CFLAGS = ' \
- -e 's|^ALL_CXXFLAGS = |ALL_CXXFLAGS = $(HARD_CFLAGS) |' \
- -i "${S}"/gcc/Makefile.in
- fi
-
- # defaults to enable for all toolchains
- local gcc_hard_flags=""
- if use hardened ; then
- if hardened_gcc_works ; then
- einfo "Updating gcc to use automatic PIE + SSP building ..."
- gcc_hard_flags+=" -DEFAULT_PIE_SSP"
- elif hardened_gcc_works pie ; then
- einfo "Updating gcc to use automatic PIE building ..."
- ewarn "SSP has not been enabled by default"
- gcc_hard_flags+=" -DEFAULT_PIE"
- elif hardened_gcc_works ssp ; then
- einfo "Updating gcc to use automatic SSP building ..."
- ewarn "PIE has not been enabled by default"
- gcc_hard_flags+=" -DEFAULT_SSP"
- else
- # do nothing if hardened isn't supported, but don't die either
- ewarn "hardened is not supported for this arch in this gcc version"
- return 0
- fi
- # rebrand to make bug reports easier
- BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
- else
- if hardened_gcc_works ssp ; then
- einfo "Updating gcc to use automatic SSP building ..."
- gcc_hard_flags+=" -DEFAULT_SSP"
- fi
+ -e 's|^ALL_CXXFLAGS = |ALL_CXXFLAGS = $(HARD_CFLAGS) |' \
+ -i "${S}"/gcc/Makefile.in
fi
sed -i \
@@ -904,6 +920,11 @@ toolchain_src_configure() {
confgcc+=( --enable-libstdcxx-time )
fi
+ # Support to disable pch when building libstdcxx
+ if tc_version_is_at_least 6.0 && ! use pch ; then
+ confgcc+=( --disable-libstdcxx-pch )
+ fi
+
# The jit language requires this.
is_jit && confgcc+=( --enable-host-shared )
@@ -1862,7 +1883,7 @@ gcc_movelibs() {
if [[ ${FROMDIR} != "${TODIR}" && -d ${FROMDIR} ]] ; then
local files=$(find "${FROMDIR}" -maxdepth 1 ! -type d 2>/dev/null)
if [[ -n ${files} ]] ; then
- mv ${files} "${TODIR}"
+ mv ${files} "${TODIR}" || die
fi
fi
done
@@ -1940,7 +1961,7 @@ create_gcc_env_entry() {
local mdir mosdir abi ldpath
for abi in $(get_all_abis TARGET) ; do
mdir=$($(XGCC) $(get_abi_CFLAGS ${abi}) --print-multi-directory)
- ldpath="${LIBPATH}"
+ ldpath=${LIBPATH}
[[ ${mdir} != "." ]] && ldpath+="/${mdir}"
ldpaths="${ldpath}${ldpaths:+:${ldpaths}}"
@@ -1949,7 +1970,7 @@ create_gcc_env_entry() {
done
else
# Older gcc's didn't do multilib, so logic is simple.
- ldpaths="${LIBPATH}"
+ ldpaths=${LIBPATH}
fi
cat <<-EOF > ${gcc_envd_file}
@@ -1967,6 +1988,11 @@ create_gcc_env_entry() {
}
copy_minispecs_gcc_specs() {
+ # on gcc 6 we don't need minispecs
+ if tc_version_is_at_least 6.0 ; then
+ return 0
+ fi
+
# setup the hardenedno* specs files and the vanilla specs file.
if hardened_gcc_works ; then
create_gcc_env_entry hardenednopiessp
@@ -2148,10 +2174,6 @@ should_we_gcc_config() {
local curr_branch_ver=$(get_version_component_range 1-2 ${curr_config_ver})
- # If we're using multislot, just run gcc-config if we're installing
- # to the same profile as the current one.
- use multislot && return $([[ ${curr_config_ver} == ${GCC_CONFIG_VER} ]])
-
if [[ ${curr_branch_ver} == ${GCC_BRANCH_VER} ]] ; then
return 0
else
@@ -2312,6 +2334,10 @@ hardened_gcc_is_stable() {
}
want_minispecs() {
+ # on gcc 6 we don't need minispecs
+ if tc_version_is_at_least 6.0 ; then
+ return 0
+ fi
if tc_version_is_at_least 4.3.2 && use hardened ; then
if ! want_pie ; then
ewarn "PIE_VER or SPECS_VER is not defined in the GCC ebuild."