summaryrefslogtreecommitdiff
path: root/eclass
diff options
context:
space:
mode:
Diffstat (limited to 'eclass')
-rw-r--r--eclass/acct-group.eclass48
-rw-r--r--eclass/acct-user.eclass344
-rw-r--r--eclass/ada.eclass108
-rw-r--r--eclass/alternatives.eclass15
-rw-r--r--eclass/ant-tasks.eclass27
-rw-r--r--eclass/apache-2.eclass92
-rw-r--r--eclass/apache-module.eclass11
-rw-r--r--eclass/app-alternatives.eclass84
-rw-r--r--eclass/aspell-dict-r1.eclass26
-rw-r--r--eclass/autotools.eclass179
-rw-r--r--eclass/bazel.eclass226
-rw-r--r--eclass/bzr.eclass28
-rw-r--r--eclass/cargo.eclass272
-rw-r--r--eclass/cdrom.eclass4
-rw-r--r--eclass/check-reqs.eclass18
-rw-r--r--eclass/chromium-2.eclass6
-rw-r--r--eclass/cmake-multilib.eclass34
-rw-r--r--eclass/cmake-utils.eclass850
-rw-r--r--eclass/cmake.eclass103
-rw-r--r--eclass/common-lisp-3.eclass22
-rw-r--r--eclass/cron.eclass85
-rw-r--r--eclass/crossdev.eclass77
-rw-r--r--eclass/cuda.eclass37
-rw-r--r--eclass/cvs.eclass32
-rw-r--r--eclass/db-use.eclass17
-rw-r--r--eclass/db.eclass127
-rw-r--r--eclass/depend.apache.eclass92
-rw-r--r--eclass/desktop.eclass29
-rw-r--r--eclass/dist-kernel-utils.eclass154
-rw-r--r--eclass/distutils-r1.eclass951
-rw-r--r--eclass/docs.eclass204
-rw-r--r--eclass/dotnet-pkg-base.eclass654
-rw-r--r--eclass/dotnet-pkg.eclass343
-rw-r--r--eclass/dotnet.eclass29
-rw-r--r--eclass/dune.eclass170
-rw-r--r--eclass/eapi8-dosym.eclass14
-rw-r--r--eclass/ecm.eclass209
-rw-r--r--eclass/elisp-common.eclass299
-rw-r--r--eclass/elisp.eclass45
-rw-r--r--eclass/epatch.eclass108
-rw-r--r--eclass/eqawarn.eclass26
-rw-r--r--eclass/estack.eclass4
-rw-r--r--eclass/eutils.eclass185
-rw-r--r--eclass/fcaps.eclass16
-rw-r--r--eclass/findlib.eclass23
-rw-r--r--eclass/flag-o-matic.eclass248
-rw-r--r--eclass/font-ebdftopcf.eclass8
-rw-r--r--eclass/font.eclass53
-rw-r--r--eclass/fortran-2.eclass32
-rw-r--r--eclass/frameworks.kde.org.eclass92
-rw-r--r--eclass/freedict.eclass20
-rw-r--r--eclass/gap-pkg.eclass388
-rw-r--r--eclass/gear.kde.org.eclass254
-rw-r--r--eclass/ghc-package.eclass43
-rw-r--r--eclass/git-r3.eclass228
-rw-r--r--eclass/gkrellm-plugin.eclass55
-rw-r--r--eclass/gnome.org.eclass23
-rw-r--r--eclass/gnome2-utils.eclass23
-rw-r--r--eclass/gnome2.eclass160
-rw-r--r--eclass/gnustep-2.eclass24
-rw-r--r--eclass/gnustep-base.eclass23
-rw-r--r--eclass/go-env.eclass105
-rw-r--r--eclass/go-module.eclass94
-rw-r--r--eclass/golang-build.eclass24
-rw-r--r--eclass/golang-vcs-snapshot.eclass26
-rw-r--r--eclass/golang-vcs.eclass29
-rw-r--r--eclass/gstreamer-meson.eclass134
-rw-r--r--eclass/gstreamer.eclass269
-rw-r--r--eclass/haskell-cabal.eclass512
-rw-r--r--eclass/java-ant-2.eclass39
-rw-r--r--eclass/java-osgi.eclass15
-rw-r--r--eclass/java-pkg-2.eclass10
-rw-r--r--eclass/java-pkg-opt-2.eclass13
-rw-r--r--eclass/java-pkg-simple.eclass111
-rw-r--r--eclass/java-utils-2.eclass165
-rw-r--r--eclass/java-virtuals-2.eclass65
-rw-r--r--eclass/java-vm-2.eclass49
-rw-r--r--eclass/kde.org.eclass220
-rw-r--r--eclass/kernel-2.eclass161
-rw-r--r--eclass/kernel-build.eclass433
-rw-r--r--eclass/kernel-install.eclass403
-rw-r--r--eclass/kodi-addon.eclass28
-rw-r--r--eclass/libretro-core.eclass27
-rw-r--r--eclass/libtool.eclass10
-rw-r--r--eclass/linux-info.eclass239
-rw-r--r--eclass/linux-mod-r1.eclass1280
-rw-r--r--eclass/linux-mod.eclass237
-rw-r--r--eclass/llvm-r1.eclass250
-rw-r--r--eclass/llvm-utils.eclass153
-rw-r--r--eclass/llvm.eclass121
-rw-r--r--eclass/llvm.org.eclass267
-rw-r--r--eclass/lua-single.eclass21
-rw-r--r--eclass/lua-utils.eclass13
-rw-r--r--eclass/lua.eclass19
-rw-r--r--eclass/mate-desktop.org.eclass26
-rw-r--r--eclass/mate.eclass38
-rw-r--r--eclass/mercurial.eclass16
-rw-r--r--eclass/meson-multilib.eclass6
-rw-r--r--eclass/meson.eclass216
-rw-r--r--eclass/mono-env.eclass12
-rw-r--r--eclass/mono.eclass4
-rw-r--r--eclass/mount-boot.eclass8
-rw-r--r--eclass/mozcoreconf-v5.eclass270
-rw-r--r--eclass/mozcoreconf-v6.eclass48
-rw-r--r--eclass/mozextension.eclass19
-rw-r--r--eclass/mozlinguas-v2.eclass57
-rw-r--r--eclass/multibuild.eclass35
-rw-r--r--eclass/multilib-build.eclass39
-rw-r--r--eclass/multilib.eclass94
-rw-r--r--eclass/multiprocessing.eclass38
-rw-r--r--eclass/myspell-r2.eclass32
-rw-r--r--eclass/netsurf.eclass12
-rw-r--r--eclass/ninja-utils.eclass71
-rw-r--r--eclass/nuget.eclass290
-rw-r--r--eclass/office-ext-r1.eclass26
-rw-r--r--eclass/opam.eclass39
-rw-r--r--eclass/optfeature.eclass8
-rw-r--r--eclass/out-of-source-utils.eclass43
-rw-r--r--eclass/out-of-source.eclass13
-rw-r--r--eclass/pam.eclass79
-rw-r--r--eclass/perl-functions.eclass74
-rw-r--r--eclass/perl-module.eclass347
-rw-r--r--eclass/php-ext-pecl-r3.eclass6
-rw-r--r--eclass/php-ext-source-r3.eclass27
-rw-r--r--eclass/php-pear-r2.eclass32
-rw-r--r--eclass/plasma-mobile.kde.org.eclass48
-rw-r--r--eclass/plasma.kde.org.eclass91
-rw-r--r--eclass/plocale.eclass6
-rw-r--r--eclass/portability.eclass17
-rw-r--r--eclass/postgres-multi.eclass44
-rw-r--r--eclass/postgres.eclass41
-rw-r--r--eclass/preserve-libs.eclass6
-rw-r--r--eclass/pypi.eclass283
-rw-r--r--eclass/python-any-r1.eclass43
-rw-r--r--eclass/python-r1.eclass65
-rw-r--r--eclass/python-single-r1.eclass30
-rw-r--r--eclass/python-utils-r1.eclass424
-rw-r--r--eclass/qmail.eclass78
-rw-r--r--eclass/qmake-utils.eclass102
-rw-r--r--eclass/qt5-build.eclass121
-rw-r--r--eclass/qt6-build.eclass294
-rw-r--r--eclass/readme.gentoo-r1.eclass4
-rw-r--r--eclass/rebar.eclass37
-rw-r--r--eclass/rocm.eclass238
-rw-r--r--eclass/ros-catkin.eclass229
-rw-r--r--eclass/rpm.eclass66
-rw-r--r--eclass/ruby-fakegem.eclass33
-rw-r--r--eclass/ruby-ng-gnome2.eclass26
-rw-r--r--eclass/ruby-ng.eclass270
-rw-r--r--eclass/ruby-single.eclass34
-rw-r--r--eclass/ruby-utils.eclass58
-rw-r--r--eclass/rust-toolchain.eclass103
-rw-r--r--eclass/s6.eclass48
-rw-r--r--eclass/savedconfig.eclass19
-rw-r--r--eclass/scons-utils.eclass258
-rw-r--r--eclass/secureboot.eclass175
-rw-r--r--eclass/selinux-policy-2.eclass90
-rw-r--r--eclass/sgml-catalog-r1.eclass16
-rw-r--r--eclass/shell-completion.eclass115
-rw-r--r--eclass/ssl-cert.eclass33
-rw-r--r--eclass/stardict.eclass46
-rw-r--r--eclass/subversion.eclass46
-rw-r--r--eclass/sword-module.eclass95
-rw-r--r--eclass/systemd.eclass119
-rw-r--r--eclass/tests/Makefile27
-rwxr-xr-xeclass/tests/cargo-bench.sh114
-rwxr-xr-xeclass/tests/dist-kernel-utils.sh28
-rwxr-xr-xeclass/tests/distutils-r1.sh142
-rwxr-xr-xeclass/tests/distutils-r1_single.sh122
-rwxr-xr-xeclass/tests/eapi8-dosym.sh5
-rwxr-xr-xeclass/tests/llvm-r1.sh151
-rwxr-xr-xeclass/tests/llvm-utils.sh118
-rwxr-xr-xeclass/tests/llvm.sh7
-rwxr-xr-xeclass/tests/multiprocessing_makeopts_jobs.sh24
-rwxr-xr-xeclass/tests/pypi-bench.sh69
-rwxr-xr-xeclass/tests/pypi.sh97
-rwxr-xr-xeclass/tests/python-utils-bench.sh53
-rwxr-xr-xeclass/tests/python-utils-r1.sh45
-rwxr-xr-xeclass/tests/scons-utils.sh63
-rwxr-xr-xeclass/tests/systemd.sh50
-rw-r--r--eclass/tests/tests-common.sh7
-rwxr-xr-xeclass/tests/toolchain-funcs.sh96
-rwxr-xr-xeclass/tests/toolchain.sh41
-rwxr-xr-xeclass/tests/unpacker.sh430
-rwxr-xr-xeclass/tests/verify-sig.sh94
-rw-r--r--eclass/texlive-common.eclass201
-rw-r--r--eclass/texlive-module.eclass128
-rw-r--r--eclass/toolchain-autoconf.eclass82
-rw-r--r--eclass/toolchain-funcs.eclass307
-rw-r--r--eclass/toolchain.eclass1675
-rw-r--r--eclass/tree-sitter-grammar.eclass185
-rw-r--r--eclass/udev.eclass16
-rw-r--r--eclass/unpacker.eclass286
-rw-r--r--eclass/user-info.eclass8
-rw-r--r--eclass/user.eclass684
-rw-r--r--eclass/usr-ldscript.eclass53
-rw-r--r--eclass/vala.eclass36
-rw-r--r--eclass/vcs-snapshot.eclass3
-rw-r--r--eclass/vdr-plugin-2.eclass72
-rw-r--r--eclass/verify-sig.eclass150
-rw-r--r--eclass/vim-doc.eclass72
-rw-r--r--eclass/vim-plugin.eclass120
-rw-r--r--eclass/vim-spell.eclass20
-rw-r--r--eclass/virtualx.eclass89
-rw-r--r--eclass/waf-utils.eclass48
-rw-r--r--eclass/webapp.eclass56
-rw-r--r--eclass/wxwidgets.eclass8
-rw-r--r--eclass/xdg-utils.eclass9
-rw-r--r--eclass/xdg.eclass65
-rw-r--r--eclass/xemacs-packages.eclass28
-rw-r--r--eclass/xorg-3.eclass108
211 files changed, 16227 insertions, 9584 deletions
diff --git a/eclass/acct-group.eclass b/eclass/acct-group.eclass
index 3d02e4f713b4..a0ad86066309 100644
--- a/eclass/acct-group.eclass
+++ b/eclass/acct-group.eclass
@@ -1,9 +1,10 @@
-# Copyright 2019-2022 Gentoo Authors
+# Copyright 2019-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: acct-group.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
+# Mike Gilbert <floppym@gentoo.org>
# @AUTHOR:
# Michael Orlitzky <mjo@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
@@ -35,12 +36,12 @@
if [[ -z ${_ACCT_GROUP_ECLASS} ]]; then
_ACCT_GROUP_ECLASS=1
-case ${EAPI:-0} in
+case ${EAPI} in
7|8) ;;
- *) die "EAPI=${EAPI:-0} not supported";;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit user
+inherit user-info
[[ ${CATEGORY} == acct-group ]] ||
die "Ebuild error: this eclass can be used only in acct-group category!"
@@ -60,7 +61,7 @@ readonly ACCT_GROUP_NAME
# @REQUIRED
# @DESCRIPTION:
# Preferred GID for the new group. This variable is obligatory, and its
-# value must be unique across all group packages. This can be overriden
+# value must be unique across all group packages. This can be overridden
# in make.conf through ACCT_GROUP_<UPPERCASE_USERNAME>_ID variable.
#
# Overlays should set this to -1 to dynamically allocate GID. Using -1
@@ -71,18 +72,17 @@ readonly ACCT_GROUP_NAME
# If set to a non-null value, the eclass will require the group to have
# specified GID. If the group already exists with another GID, or
# the GID is taken by another group, the install will fail.
-: ${ACCT_GROUP_ENFORCE_ID:=}
+: "${ACCT_GROUP_ENFORCE_ID:=}"
# << Boilerplate ebuild variables >>
-: ${DESCRIPTION:="System group: ${ACCT_GROUP_NAME}"}
-: ${SLOT:=0}
-: ${KEYWORDS:=alpha amd64 arm arm64 hppa ia64 ~loong m68k ~mips ppc ppc64 ~riscv s390 sparc x86 ~x64-cygwin ~amd64-linux ~x86-linux ~ppc-macos ~x64-macos ~sparc-solaris ~sparc64-solaris ~x64-solaris ~x86-solaris}
+: "${DESCRIPTION:="System group: ${ACCT_GROUP_NAME}"}"
+: "${SLOT:=0}"
+: "${KEYWORDS:=~alpha amd64 arm arm64 hppa ~ia64 ~loong ~m68k ~mips ppc ppc64 ~riscv ~s390 sparc x86 ~amd64-linux ~x86-linux ~arm64-macos ~ppc-macos ~x64-macos ~x64-solaris}"
S=${WORKDIR}
# << Phase functions >>
-EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst
# @FUNCTION: acct-group_pkg_pretend
# @DESCRIPTION:
@@ -156,8 +156,32 @@ acct-group_src_install() {
acct-group_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
- enewgroup ${ACCT_GROUP_ENFORCE_ID:+-F} "${ACCT_GROUP_NAME}" \
- "${_ACCT_GROUP_ID}"
+ if [[ ${EUID} -ne 0 || -n ${EPREFIX} ]]; then
+ einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
+ return
+ fi
+
+ if egetent group "${ACCT_GROUP_NAME}" >/dev/null; then
+ elog "Group ${ACCT_GROUP_NAME} already exists"
+ return
+ fi
+
+ local opts=( --system )
+
+ if [[ ${_ACCT_GROUP_ID} -ne -1 ]] &&
+ ! egetent group "${_ACCT_GROUP_ID}" >/dev/null
+ then
+ opts+=( --gid "${_ACCT_GROUP_ID}" )
+ fi
+
+ if [[ -n ${ROOT} ]]; then
+ opts+=( --prefix "${ROOT}" )
+ fi
+
+ elog "Adding group ${ACCT_GROUP_NAME}"
+ groupadd "${opts[@]}" "${ACCT_GROUP_NAME}" || die "groupadd failed with status $?"
}
fi
+
+EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst
diff --git a/eclass/acct-user.eclass b/eclass/acct-user.eclass
index c87b27f3ccaa..66a4d6667888 100644
--- a/eclass/acct-user.eclass
+++ b/eclass/acct-user.eclass
@@ -1,9 +1,10 @@
-# Copyright 2019-2022 Gentoo Authors
+# Copyright 2019-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: acct-user.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
+# Mike Gilbert <floppym@gentoo.org>
# @AUTHOR:
# Michael Orlitzky <mjo@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
@@ -43,12 +44,12 @@
if [[ -z ${_ACCT_USER_ECLASS} ]]; then
_ACCT_USER_ECLASS=1
-case ${EAPI:-0} in
+case ${EAPI} in
7|8) ;;
- *) die "EAPI=${EAPI:-0} not supported";;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit user
+inherit user-info
[[ ${CATEGORY} == acct-user ]] ||
die "Ebuild error: this eclass can be used only in acct-user category!"
@@ -68,61 +69,63 @@ readonly ACCT_USER_NAME
# @REQUIRED
# @DESCRIPTION:
# Preferred UID for the new user. This variable is obligatory, and its
-# value must be unique across all user packages. This can be overriden
+# value must be unique across all user packages. This can be overridden
# in make.conf through ACCT_USER_<UPPERCASE_USERNAME>_ID variable.
#
# Overlays should set this to -1 to dynamically allocate UID. Using -1
# in ::gentoo is prohibited by policy.
-# @ECLASS_VARIABLE: _ACCT_USER_ALREADY_EXISTS
-# @INTERNAL
-# @DESCRIPTION:
-# Status variable which indicates if user already exists.
-
# @ECLASS_VARIABLE: ACCT_USER_ENFORCE_ID
# @DESCRIPTION:
# If set to a non-null value, the eclass will require the user to have
# specified UID. If the user already exists with another UID, or
# the UID is taken by another user, the install will fail.
-: ${ACCT_USER_ENFORCE_ID:=}
+: "${ACCT_USER_ENFORCE_ID:=}"
# @ECLASS_VARIABLE: ACCT_USER_NO_MODIFY
# @DEFAULT_UNSET
# @DESCRIPTION:
# If set to a non-null value, the eclass will not make any changes
# to an already existing user.
-: ${ACCT_USER_NO_MODIFY:=}
+: "${ACCT_USER_NO_MODIFY:=}"
+
+# @ECLASS_VARIABLE: ACCT_USER_COMMENT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The comment to use for the user. If not specified, the package
+# DESCRIPTION will be used. This can be overridden in make.conf through
+# ACCT_USER_<UPPERCASE_USERNAME>_COMMENT variable.
# @ECLASS_VARIABLE: ACCT_USER_SHELL
# @DESCRIPTION:
# The shell to use for the user. If not specified, a 'nologin' variant
-# for the system is used. This can be overriden in make.conf through
+# for the system is used. This can be overridden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_SHELL variable.
-: ${ACCT_USER_SHELL:=-1}
+: "${ACCT_USER_SHELL:=/sbin/nologin}"
# @ECLASS_VARIABLE: ACCT_USER_HOME
# @DESCRIPTION:
# The home directory for the user. If not specified, /dev/null is used.
# The directory will be created with appropriate permissions if it does
# not exist. When updating, existing home directory will not be moved.
-# This can be overriden in make.conf through
+# This can be overridden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME variable.
-: ${ACCT_USER_HOME:=/dev/null}
+: "${ACCT_USER_HOME:=/dev/null}"
# @ECLASS_VARIABLE: ACCT_USER_HOME_OWNER
# @DEFAULT_UNSET
# @DESCRIPTION:
# The ownership to use for the home directory, in chown ([user][:group])
# syntax. Defaults to the newly created user, and its primary group.
-# This can be overriden in make.conf through
+# This can be overridden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME_OWNER variable.
# @ECLASS_VARIABLE: ACCT_USER_HOME_PERMS
# @DESCRIPTION:
# The permissions to use for the home directory, in chmod (octal
-# or verbose) form. This can be overriden in make.conf through
+# or verbose) form. This can be overridden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME_PERMS variable.
-: ${ACCT_USER_HOME_PERMS:=0755}
+: "${ACCT_USER_HOME_PERMS:=0755}"
# @ECLASS_VARIABLE: ACCT_USER_GROUPS
# @REQUIRED
@@ -131,7 +134,7 @@ readonly ACCT_USER_NAME
# array. The first group specified is the user's primary group, while
# the remaining groups (if any) become supplementary groups.
#
-# This can be overriden in make.conf through
+# This can be overridden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_GROUPS variable, or appended to
# via ACCT_USER_<UPPERCASE_USERNAME>_GROUPS_ADD. Please note that
# due to technical limitations, the override variables are not arrays
@@ -139,9 +142,9 @@ readonly ACCT_USER_NAME
# << Boilerplate ebuild variables >>
-: ${DESCRIPTION:="System user: ${ACCT_USER_NAME}"}
-: ${SLOT:=0}
-: ${KEYWORDS:=alpha amd64 arm arm64 hppa ia64 ~loong m68k ~mips ppc ppc64 ~riscv s390 sparc x86 ~x64-cygwin ~amd64-linux ~x86-linux ~ppc-macos ~x64-macos ~sparc-solaris ~sparc64-solaris ~x64-solaris ~x86-solaris}
+: "${DESCRIPTION:="System user: ${ACCT_USER_NAME}"}"
+: "${SLOT:=0}"
+: "${KEYWORDS:=~alpha amd64 arm arm64 hppa ~ia64 ~loong ~m68k ~mips ppc ppc64 ~riscv ~s390 sparc x86 ~amd64-linux ~x86-linux ~arm64-macos ~ppc-macos ~x64-macos ~x64-solaris}"
S=${WORKDIR}
@@ -178,7 +181,7 @@ acct-user_add_deps() {
eislocked() {
[[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
- if [[ ${EUID} -ne 0 ]]; then
+ if [[ ${EUID} -ne 0 || -n ${EPREFIX} ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
@@ -208,115 +211,7 @@ eislocked() {
esac
}
-# @FUNCTION: elockuser
-# @USAGE: <user>
-# @INTERNAL
-# @DESCRIPTION:
-# Lock the specified user account, using the available platform-specific
-# functions. This should prevent any login to the account.
-#
-# Established lock can be reverted using eunlockuser.
-#
-# This function returns 0 if locking succeeded, 2 if it is not supported
-# by the platform code or dies if it fails.
-elockuser() {
- [[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
-
- if [[ ${EUID} -ne 0 ]]; then
- einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
- fi
-
- eislocked "$1"
- [[ $? -eq 0 ]] && return 0
-
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw lock "${opts[@]}" "$1" || die "Locking account $1 failed"
- pw user mod "${opts[@]}" "$1" -e 1 || die "Expiring account $1 failed"
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: usermod ${opts[@]} -e 1 -C yes \"$1\" in a chroot"
- else
- usermod "${opts[@]}" -e 1 -C yes "$1" || die "Locking account $1 failed"
- fi
- ;;
-
- *-openbsd*)
- return 2
- ;;
-
- *)
- usermod "${opts[@]}" -e 1 -L "$1" || die "Locking account $1 failed"
- ;;
- esac
-
- elog "User account $1 locked"
- return 0
-}
-
-# @FUNCTION: eunlockuser
-# @USAGE: <user>
-# @INTERNAL
-# @DESCRIPTION:
-# Unlock the specified user account, using the available platform-
-# specific functions.
-#
-# This function returns 0 if unlocking succeeded, 1 if it is not
-# supported by the platform code or dies if it fails.
-eunlockuser() {
- [[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
-
- if [[ ${EUID} -ne 0 ]]; then
- einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
- fi
-
- eislocked "$1"
- [[ $? -eq 1 ]] && return 0
-
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw user mod "${opts[@]}" "$1" -e 0 || die "Unexpiring account $1 failed"
- pw unlock "${opts[@]}" "$1" || die "Unlocking account $1 failed"
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"usermod ${opts[@]} -e 0 -C no $1\" in a chroot"
- else
- usermod "${opts[@]}" -e 0 -C no "$1" || die "Unlocking account $1 failed"
- fi
- ;;
-
- *-openbsd*)
- return 1
- ;;
-
- *)
- # silence warning if account does not have a password
- usermod "${opts[@]}" -e "" -U "$1" 2>/dev/null || die "Unlocking account $1 failed"
- ;;
- esac
-
- ewarn "User account $1 unlocked after reinstating."
- return 0
-}
-
-
# << Phase functions >>
-EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst pkg_postinst \
- pkg_prerm
# @FUNCTION: acct-user_pkg_pretend
# @DESCRIPTION:
@@ -373,6 +268,9 @@ acct-user_pkg_pretend() {
acct-user_src_install() {
debug-print-function ${FUNCNAME} "${@}"
+ # Replace reserved characters in comment
+ : "${ACCT_USER_COMMENT:=${DESCRIPTION//[:,=]/;}}"
+
# serialize for override support
local ACCT_USER_GROUPS=${ACCT_USER_GROUPS[*]}
@@ -380,7 +278,7 @@ acct-user_src_install() {
local override_name=${ACCT_USER_NAME^^}
override_name=${override_name//-/_}
local var
- for var in ACCT_USER_{ID,SHELL,HOME{,_OWNER,_PERMS},GROUPS}; do
+ for var in ACCT_USER_{ID,COMMENT,SHELL,HOME{,_OWNER,_PERMS},GROUPS}; do
local var_name=ACCT_USER_${override_name}_${var#ACCT_USER_}
if [[ -n ${!var_name} ]]; then
ewarn "${var_name}=${!var_name} override in effect, support will not be provided."
@@ -395,6 +293,10 @@ acct-user_src_install() {
_ACCT_USER_GROUPS+=" ${!var_name}"
fi
+ if [[ -n ${_ACCT_USER_COMMENT//[^:,=]} ]]; then
+ die "Invalid characters in user comment: '${_ACCT_USER_COMMENT//[^:,=]}'"
+ fi
+
# deserialize into an array
local groups=( ${_ACCT_USER_GROUPS} )
@@ -409,7 +311,7 @@ acct-user_src_install() {
printf "u\t%q\t%q\t%q\t%q\t%q\n" \
"${ACCT_USER_NAME}" \
"${_ACCT_USER_ID/#-*/-}:${groups[0]}" \
- "${DESCRIPTION//[:,=]/;}" \
+ "${_ACCT_USER_COMMENT}" \
"${_ACCT_USER_HOME}" \
"${_ACCT_USER_SHELL/#-*/-}"
if [[ ${#groups[@]} -gt 1 ]]; then
@@ -426,27 +328,53 @@ acct-user_src_install() {
acct-user_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
- # check if user already exists
- _ACCT_USER_ALREADY_EXISTS=
- if [[ -n $(egetent passwd "${ACCT_USER_NAME}") ]]; then
- _ACCT_USER_ALREADY_EXISTS=1
+ unset _ACCT_USER_ADDED
+
+ if [[ ${EUID} -ne 0 || -n ${EPREFIX} ]]; then
+ einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
+ return
fi
- readonly _ACCT_USER_ALREADY_EXISTS
- enewuser ${ACCT_USER_ENFORCE_ID:+-F} -M "${ACCT_USER_NAME}" \
- "${_ACCT_USER_ID}" "${_ACCT_USER_SHELL}" "${_ACCT_USER_HOME}" \
- "${_ACCT_USER_GROUPS// /,}"
+ if egetent passwd "${ACCT_USER_NAME}" >/dev/null; then
+ elog "User ${ACCT_USER_NAME} already exists"
+ else
+ local groups=( ${_ACCT_USER_GROUPS} )
+ local aux_groups=${groups[*]:1}
+ local opts=(
+ --system
+ --no-create-home
+ --no-user-group
+ --comment "${_ACCT_USER_COMMENT}"
+ --home-dir "${_ACCT_USER_HOME}"
+ --shell "${_ACCT_USER_SHELL}"
+ --gid "${groups[0]}"
+ --groups "${aux_groups// /,}"
+ )
+
+ if [[ ${_ACCT_USER_ID} -ne -1 ]] &&
+ ! egetent passwd "${_ACCT_USER_ID}" >/dev/null
+ then
+ opts+=( --uid "${_ACCT_USER_ID}" )
+ fi
+
+ if [[ -n ${ROOT} ]]; then
+ opts+=( --prefix "${ROOT}" )
+ fi
+
+ elog "Adding user ${ACCT_USER_NAME}"
+ useradd "${opts[@]}" "${ACCT_USER_NAME}" || die "useradd failed with status $?"
+ _ACCT_USER_ADDED=1
+ fi
if [[ ${_ACCT_USER_HOME} != /dev/null ]]; then
# default ownership to user:group
if [[ -z ${_ACCT_USER_HOME_OWNER} ]]; then
- local group_array=( ${_ACCT_USER_GROUPS} )
- if [[ -n "${ROOT}" ]]; then
+ if [[ -n ${ROOT} ]]; then
local euid=$(egetent passwd ${ACCT_USER_NAME} | cut -d: -f3)
local egid=$(egetent passwd ${ACCT_USER_NAME} | cut -d: -f4)
_ACCT_USER_HOME_OWNER=${euid}:${egid}
else
- _ACCT_USER_HOME_OWNER=${ACCT_USER_NAME}:${group_array[0]}
+ _ACCT_USER_HOME_OWNER=${ACCT_USER_NAME}:${groups[0]}
fi
fi
# Path might be missing due to INSTALL_MASK, etc.
@@ -469,26 +397,82 @@ acct-user_pkg_preinst() {
acct-user_pkg_postinst() {
debug-print-function ${FUNCNAME} "${@}"
- if [[ ${EUID} -ne 0 ]]; then
- einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
+ if [[ -n ${_ACCT_USER_ADDED} ]]; then
+ # We just added the user; no need to update it
+ return
fi
- if [[ -n ${ACCT_USER_NO_MODIFY} && -n ${_ACCT_USER_ALREADY_EXISTS} ]]; then
- eunlockuser "${ACCT_USER_NAME}"
+ if [[ ${EUID} -ne 0 || -n ${EPREFIX} ]]; then
+ einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
+ return
+ fi
+ if [[ -n ${ACCT_USER_NO_MODIFY} ]]; then
ewarn "User ${ACCT_USER_NAME} already exists; Not touching existing user"
ewarn "due to set ACCT_USER_NO_MODIFY."
- return 0
+ return
fi
- # NB: eset* functions check current value
- esethome "${ACCT_USER_NAME}" "${_ACCT_USER_HOME}"
- esetshell "${ACCT_USER_NAME}" "${_ACCT_USER_SHELL}"
- esetgroups "${ACCT_USER_NAME}" "${_ACCT_USER_GROUPS// /,}"
- # comment field can not contain colons
- esetcomment "${ACCT_USER_NAME}" "${DESCRIPTION//[:,=]/;}"
- eunlockuser "${ACCT_USER_NAME}"
+ local groups=( ${_ACCT_USER_GROUPS} )
+ local aux_groups=${groups[*]:1}
+ local opts=(
+ --comment "${_ACCT_USER_COMMENT}"
+ --home "${_ACCT_USER_HOME}"
+ --shell "${_ACCT_USER_SHELL}"
+ --gid "${groups[0]}"
+ --groups "${aux_groups// /,}"
+ )
+
+ if eislocked "${ACCT_USER_NAME}"; then
+ opts+=( --expiredate "" --unlock )
+ fi
+
+ if [[ -n ${ROOT} ]]; then
+ opts+=( --prefix "${ROOT}" )
+ fi
+
+ local g old_groups del_groups=""
+ old_groups=$(egetgroups "${ACCT_USER_NAME}")
+ for g in ${old_groups//,/ }; do
+ has "${g}" "${groups[@]}" || del_groups+="${del_groups:+, }${g}"
+ done
+ if [[ -n ${del_groups} ]]; then
+ local override_name=${ACCT_USER_NAME^^}
+ override_name=${override_name//-/_}
+ ewarn "Removing user ${ACCT_USER_NAME} from group(s): ${del_groups}"
+ ewarn "To retain the user's group membership in the local system"
+ ewarn "config, override with ACCT_USER_${override_name}_GROUPS or"
+ ewarn "ACCT_USER_${override_name}_GROUPS_ADD in make.conf."
+ ewarn "Documentation reference:"
+ ewarn "https://wiki.gentoo.org/wiki/Practical_guide_to_the_GLEP_81_migration#Override_user_groups"
+ fi
+
+ elog "Updating user ${ACCT_USER_NAME}"
+ # usermod outputs a warning if unlocking the account would result in an
+ # empty password. Hide stderr in a text file and display it if usermod fails.
+ usermod "${opts[@]}" "${ACCT_USER_NAME}" 2>"${T}/usermod-error.log"
+ local status=$?
+ if [[ ${status} -ne 0 ]]; then
+ cat "${T}/usermod-error.log" >&2
+ if [[ ${status} -eq 8 ]]; then
+ # usermod refused to update the home directory
+ # for a uid with active processes.
+ eerror "Failed to update user ${ACCT_USER_NAME}"
+ eerror "This user currently has one or more running processes."
+ eerror "Please update this user manually with the following command:"
+
+ # Surround opts with quotes.
+ # With bash-5 (EAPI 8), we can use "${opts[@]@Q}" instead.
+ local q="'"
+ local optsq=( "${opts[@]/#/${q}}" )
+ optsq=( "${optsq[@]/%/${q}}" )
+
+ eerror " usermod ${optsq[*]} ${ACCT_USER_NAME}"
+ else
+ eerror "$(<"${T}/usermod-error.log")"
+ die "usermod failed with status ${status}"
+ fi
+ fi
}
# @FUNCTION: acct-user_pkg_prerm
@@ -497,28 +481,46 @@ acct-user_pkg_postinst() {
acct-user_pkg_prerm() {
debug-print-function ${FUNCNAME} "${@}"
- if [[ ${EUID} -ne 0 ]]; then
+ if [[ -n ${REPLACED_BY_VERSION} ]]; then
+ return
+ fi
+
+ if [[ ${EUID} -ne 0 || -n ${EPREFIX} ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
+ return
fi
if [[ ${ACCT_USER_ID} -eq 0 ]]; then
elog "Refusing to lock out the superuser (UID 0)"
- return 0
+ return
fi
- if [[ -z ${REPLACED_BY_VERSION} ]]; then
- if [[ -z $(egetent passwd "${ACCT_USER_NAME}") ]]; then
- ewarn "User account not found: ${ACCT_USER_NAME}"
- ewarn "Locking process will be skipped."
- return
- fi
+ if [[ -n ${ACCT_USER_NO_MODIFY} ]]; then
+ elog "Not locking user ${ACCT_USER_NAME} due to ACCT_USER_NO_MODIFY"
+ return
+ fi
- esetshell "${ACCT_USER_NAME}" -1
- esetcomment "${ACCT_USER_NAME}" \
- "$(egetcomment "${ACCT_USER_NAME}"); user account removed @ $(date +%Y-%m-%d)"
- elockuser "${ACCT_USER_NAME}"
+ if ! egetent passwd "${ACCT_USER_NAME}" >/dev/null; then
+ ewarn "User account not found: ${ACCT_USER_NAME}"
+ ewarn "Locking process will be skipped."
+ return
fi
+
+ local opts=(
+ --expiredate 1
+ --lock
+ --comment "$(egetcomment "${ACCT_USER_NAME}"); user account removed @ $(date +%Y-%m-%d)"
+ --shell /sbin/nologin
+ )
+
+ if [[ -n ${ROOT} ]]; then
+ opts+=( --prefix "${ROOT}" )
+ fi
+
+ elog "Locking user ${ACCT_USER_NAME}"
+ usermod "${opts[@]}" "${ACCT_USER_NAME}" || die "usermod failed with status $?"
}
fi
+
+EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst pkg_postinst pkg_prerm
diff --git a/eclass/ada.eclass b/eclass/ada.eclass
index 0bf3dcfe41cc..3c3fa3c01453 100644
--- a/eclass/ada.eclass
+++ b/eclass/ada.eclass
@@ -1,4 +1,4 @@
-# Copyright 2019 Gentoo Authors
+# Copyright 2019-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ada.eclass
@@ -6,7 +6,7 @@
# Ada team <ada@gentoo.org>
# @AUTHOR:
# Tupone Alfredo <tupone@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: An eclass for Ada packages
# @DESCRIPTION:
# This eclass set the IUSE and REQUIRED_USE to request the ADA_TARGET
@@ -23,19 +23,13 @@
#
# Mostly copied from python-single-r1.eclass
-case "${EAPI:-0}" in
- 0|1|2|3|4|5)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 6|7)
- # EAPI=5 is required for sane USE_EXPAND dependencies
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_setup
+if [[ -z ${_ADA_ECLASS} ]]; then
+_ADA_ECLASS=1
# @ECLASS_VARIABLE: ADA_DEPS
# @OUTPUT_VARIABLE
@@ -52,16 +46,58 @@ EXPORT_FUNCTIONS pkg_setup
# DEPEND="${RDEPEND}"
# @CODE
#
+# Example value:
+# @CODE
+# ada_target_gcc_12? ( sys-devel/gcc:12[ada] )
+# ada_target_gnat_2021? ( dev-lang/gnat-gps:2021[ada] )
+# @CODE
# @ECLASS_VARIABLE: _ADA_ALL_IMPLS
# @INTERNAL
# @DESCRIPTION:
# All supported Ada implementations, most preferred last.
_ADA_ALL_IMPLS=(
- gnat_2020 gnat_2021
+ gnat_2021 gcc_12 gcc_13
)
readonly _ADA_ALL_IMPLS
+# @ECLASS_VARIABLE: ADA_REQUIRED_USE
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# This is an eclass-generated required-use expression which ensures
+# that exactly one ADA_TARGET value has been enabled.
+#
+# This expression should be utilized in an ebuild by including it in
+# REQUIRED_USE, optionally behind a use flag.
+#
+# Example use:
+# @CODE
+# REQUIRED_USE="ada? ( ${ADA_REQUIRED_USE} )"
+# @CODE
+#
+# Example value:
+# @CODE
+# ^^ ( ada_target_gnat_2021 ada_target_gcc_12 )
+# @CODE
+
+# @ECLASS_VARIABLE: ADA_USEDEP
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# This is a placeholder variable,
+# in order to depend on ada packages built for the same ada
+# implementations.
+#
+# Example use:
+# @CODE
+# RDEPEND="$(ada_gen_cond_dep '
+# dev-ada/foo[${ADA_USEDEP}]
+# ')"
+# @CODE
+#
+# Example value:
+# @CODE
+# ada_targets_gcc_12(-)
+# @CODE
# @FUNCTION: _ada_impl_supported
# @USAGE: <impl>
@@ -72,7 +108,7 @@ readonly _ADA_ALL_IMPLS
#
# Returns 0 if the implementation is valid and supported. If it is
# unsupported, returns 1 -- and the caller should ignore the entry.
-# If it is invalid, dies with an appopriate error messages.
+# If it is invalid, dies with an appropriate error message.
_ada_impl_supported() {
debug-print-function ${FUNCNAME} "${@}"
@@ -83,7 +119,7 @@ _ada_impl_supported() {
# keep in sync with _ADA_ALL_IMPLS!
# (not using that list because inline patterns shall be faster)
case "${impl}" in
- gnat_202[01])
+ gnat_2021|gcc_12|gcc_13)
return 0
;;
*)
@@ -177,7 +213,7 @@ ada_export() {
local impl var
case "${1}" in
- gnat_202[01])
+ gnat_2021|gcc_12|gcc_13)
impl=${1}
shift
;;
@@ -193,14 +229,18 @@ ada_export() {
local gcc_pv
local slot
case "${impl}" in
- gnat_2020)
- gcc_pv=9.3.1
- slot=9.3.1
- ;;
gnat_2021)
- gcc_pv=10.3.1
+ gcc_pv=10
slot=10
;;
+ gcc_12)
+ gcc_pv=12
+ slot=12
+ ;;
+ gcc_13)
+ gcc_pv=13
+ slot=13
+ ;;
*)
gcc_pv="9.9.9"
slot=9.9.9
@@ -221,6 +261,10 @@ ada_export() {
export GCC_PV=${gcc_pv}
debug-print "${FUNCNAME}: GCC_PV = ${GCC_PV}"
;;
+ GCCPV)
+ export GCCPV=${gcc_pv}
+ debug-print "${FUNCNAME}: GCCPV = ${GCC_PV}"
+ ;;
GNAT)
export GNAT=${EPREFIX}/usr/bin/gnat-${gcc_pv}
debug-print "${FUNCNAME}: GNAT = ${GNAT}"
@@ -246,7 +290,17 @@ ada_export() {
debug-print "${FUNCNAME}: GNATCHOP = ${GNATCHOP}"
;;
ADA_PKG_DEP)
- ADA_PKG_DEP="dev-lang/gnat-gpl:${slot}[ada]"
+ case "${impl}" in
+ gnat_2021)
+ ADA_PKG_DEP="dev-lang/gnat-gpl:${slot}[ada]"
+ ;;
+ gcc_12|gcc_13)
+ ADA_PKG_DEP="sys-devel/gcc:${slot}[ada]"
+ ;;
+ *)
+ ADA_PKG_DEP="=sys-devel/gcc-${gcc_pv}*[ada]"
+ ;;
+ esac
# use-dep
if [[ ${ADA_REQ_USE} ]]; then
@@ -415,7 +469,7 @@ ada_setup() {
if [[ ${#_ADA_SUPPORTED_IMPLS[@]} -eq 1 ]]; then
if use "ada_target_${_ADA_SUPPORTED_IMPLS[0]}"; then
# Only one supported implementation, enable it explicitly
- ada_export "${_ADA_SUPPORTED_IMPLS[0]}" EADA GCC_PV GNAT GNATBIND GNATLS GNATMAKE
+ ada_export "${_ADA_SUPPORTED_IMPLS[0]}" EADA GCCPV GCC_PV GNAT GNATBIND GNATLS GNATMAKE
ada_wrapper_setup
fi
else
@@ -431,7 +485,7 @@ ada_setup() {
die "More than one implementation in ADA_TARGET."
fi
- ada_export "${impl}" EADA GCC_PV GNAT GNATBIND GNATLS GNATMAKE
+ ada_export "${impl}" EADA GCCPV GCC_PV GNAT GNATBIND GNATLS GNATMAKE
ada_wrapper_setup
fi
done
@@ -460,3 +514,7 @@ ada_pkg_setup() {
[[ ${MERGE_TYPE} != binary ]] && ada_setup
}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/alternatives.eclass b/eclass/alternatives.eclass
index 155a2457bbdf..2489fc67359a 100644
--- a/eclass/alternatives.eclass
+++ b/eclass/alternatives.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: alternatives.eclass
@@ -6,7 +6,7 @@
# maintainer-needed@gentoo.org
# @AUTHOR:
# Alastair Tse <liquidx@gentoo.org> (03 Oct 2003)
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: Creates symlink to the latest version of multiple slotted packages.
# @DESCRIPTION:
# When a package is SLOT'ed, very often we need to have a symlink to the
@@ -42,12 +42,10 @@
# consider using this unless you are want to do something special.
case ${EAPI} in
- [5-7]) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_postinst pkg_postrm
-
if [[ -z ${_ALTERNATIVES_ECLASS} ]]; then
_ALTERNATIVES_ECLASS=1
@@ -95,8 +93,7 @@ alternatives_makesym() {
# usage: alternatives_makesym <resulting symlink> [alternative targets..]
# make sure it is in the prefix, allow it already to be in the prefix
SYMLINK=${EPREFIX}/${1#${EPREFIX}}
- # this trick removes the trailing / from ${ROOT}
- pref=${ROOT%/}
+ pref=${ROOT}
shift
ALTERNATIVES=$@
@@ -154,3 +151,5 @@ alternatives_pkg_postrm() {
}
fi
+
+EXPORT_FUNCTIONS pkg_postinst pkg_postrm
diff --git a/eclass/ant-tasks.eclass b/eclass/ant-tasks.eclass
index a11716bf2b62..2b10e0f3e1e0 100644
--- a/eclass/ant-tasks.eclass
+++ b/eclass/ant-tasks.eclass
@@ -1,37 +1,32 @@
-# Copyright 2007-2021 Gentoo Authors
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+# @DEAD
# @ECLASS: ant-tasks.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Vlastimil Babka <caster@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @PROVIDES: java-utils-2
# @BLURB: Eclass for building dev-java/ant-* packages
# @DESCRIPTION:
# This eclass provides functionality and default ebuild variables for building
# dev-java/ant-* packages easily.
-case "${EAPI:-0}" in
- 0|1|2|3|4|5)
- die "ant-tasks.eclass: EAPI ${EAPI} is too old."
- ;;
- 6|7)
- ;;
- *)
- die "ant-tasks.eclass: EAPI ${EAPI} is not supported yet."
- ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_ANT_TASKS_ECLASS} ]]; then
+_ANT_TASKS_ECLASS=1
+
# we set ant-core dep ourselves, restricted
JAVA_ANT_DISABLE_ANT_CORE_DEP=true
# rewriting build.xml for are the testcases has no reason atm
JAVA_PKG_BSFIX_ALL=no
inherit java-pkg-2 java-ant-2
-[[ ${EAPI:-0} -eq 6 ]] && inherit eapi7-ver
-
-EXPORT_FUNCTIONS src_unpack src_compile src_install
# @ECLASS_VARIABLE: ANT_TASK_JDKVER
# @PRE_INHERIT
@@ -171,3 +166,7 @@ ant-tasks_src_install() {
dodir /usr/share/ant/lib
dosym /usr/share/${PN}/lib/${PN}.jar /usr/share/ant/lib/${PN}.jar
}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
diff --git a/eclass/apache-2.eclass b/eclass/apache-2.eclass
index f59030f926f5..17b8b0e2a64a 100644
--- a/eclass/apache-2.eclass
+++ b/eclass/apache-2.eclass
@@ -1,25 +1,26 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: apache-2.eclass
# @MAINTAINER:
+# apache-bugs@gentoo.org
+# @AUTHOR:
# polynomial-c@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: Provides a common set of functions for apache-2.x ebuilds
# @DESCRIPTION:
# This eclass handles apache-2.x ebuild functions such as LoadModule generation
# and inter-module dependency checking.
LUA_COMPAT=( lua5-{1..4} )
-inherit autotools flag-o-matic lua-single multilib ssl-cert user toolchain-funcs
+inherit autotools flag-o-matic lua-single multilib ssl-cert toolchain-funcs
[[ ${CATEGORY}/${PN} != www-servers/apache ]] \
&& die "Do not use this eclass with anything else than www-servers/apache ebuilds!"
-case ${EAPI:-0} in
- 0|1|2|3|4|5|6)
- die "This eclass is banned for EAPI<7"
- ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# settings which are version specific go in here:
@@ -87,7 +88,7 @@ SRC_URI="mirror://apache/httpd/httpd-${PV}.tar.bz2
# built-in modules
IUSE_MPMS="${IUSE_MPMS_FORK} ${IUSE_MPMS_THREAD}"
-IUSE="${IUSE} debug doc gdbm ldap selinux ssl static suexec +suexec-caps suexec-syslog split-usr threads"
+IUSE="${IUSE} debug doc gdbm ldap selinux ssl static suexec +suexec-caps suexec-syslog threads"
for module in ${IUSE_MODULES} ; do
case ${module} in
@@ -131,39 +132,46 @@ _apache2_set_mpms() {
_apache2_set_mpms
unset -f _apache2_set_mpms
+NGHTTP2_VERSION=1.2.1
+if ver_test ${PV} -ge 2.4.55 ; then
+ NGHTTP2_VERSION=1.50.0
+fi
+
# Dependencies
RDEPEND="
+ acct-group/apache
+ acct-user/apache
dev-lang/perl
>=dev-libs/apr-1.5.1:=
=dev-libs/apr-util-1*:=[gdbm=,ldap?]
- dev-libs/libpcre
+ dev-libs/libpcre2
virtual/libcrypt:=
apache2_modules_brotli? ( >=app-arch/brotli-0.6.0:= )
apache2_modules_deflate? ( sys-libs/zlib )
apache2_modules_http2? (
- >=net-libs/nghttp2-1.2.1
+ >=net-libs/nghttp2-${NGHTTP2_VERSION}:=
kernel_linux? ( sys-apps/util-linux )
)
apache2_modules_lua? ( ${LUA_DEPS} )
- apache2_modules_md? ( >=dev-libs/jansson-2.10 )
+ apache2_modules_md? ( >=dev-libs/jansson-2.10:= )
apache2_modules_mime? ( app-misc/mime-types )
apache2_modules_proxy_http2? (
- >=net-libs/nghttp2-1.2.1
+ >=net-libs/nghttp2-${NGHTTP2_VERSION}:=
kernel_linux? ( sys-apps/util-linux )
)
apache2_modules_session_crypto? (
dev-libs/apr-util[openssl]
)
+ apache2_modules_tls? ( >=net-libs/rustls-ffi-0.9.2:= <net-libs/rustls-ffi-11 )
gdbm? ( sys-libs/gdbm:= )
- ldap? ( =net-nds/openldap-2* )
+ ldap? ( net-nds/openldap:= )
selinux? ( sec-policy/selinux-apache )
ssl? (
- >=dev-libs/openssl-1.0.2:0=
+ >=dev-libs/openssl-1.0.2:=
kernel_linux? ( sys-apps/util-linux )
)
systemd? ( sys-apps/systemd )
"
-
DEPEND="${RDEPEND}"
BDEPEND="
virtual/pkgconfig
@@ -289,7 +297,7 @@ setup_modules() {
mod_type="shared"
fi
- MY_CONF=( --enable-so=static )
+ MY_CONF=( --enable-so=static --disable-static )
MY_MODS=()
if use ldap ; then
@@ -375,7 +383,7 @@ setup_modules() {
# This internal function generates the LoadModule lines for httpd.conf based on
# the current module selection and MODULE_DEFINES
generate_load_module() {
- local def= endit=0 m= mod_lines= mod_dir="${ED%/}/usr/$(get_libdir)/apache2/modules"
+ local def= endit=0 m= mod_lines= mod_dir="${ED}/usr/$(get_libdir)/apache2/modules"
if use static; then
sed -i -e "/%%LOAD_MODULE%%/d" \
@@ -437,10 +445,6 @@ check_upgrade() {
apache-2_pkg_setup() {
check_upgrade
- # setup apache user and group
- enewgroup apache 81
- enewuser apache 81 -1 /var/www apache
-
setup_mpm
setup_modules
@@ -472,7 +476,7 @@ apache-2_src_prepare() {
# 03_all_gentoo-apache-tools.patch injects -Wl,-z,now, which is not a good
# idea for everyone
case ${CHOST} in
- *-linux-gnu|*-solaris*|*-freebsd*)
+ *-linux-gnu|*-solaris*)
# do nothing, these use GNU binutils
:
;;
@@ -520,8 +524,8 @@ apache-2_src_prepare() {
chmod g-w "${T}" || die
# This package really should upgrade to using pcre's .pc file.
- cat <<-\EOF >"${T}"/pcre-config
- #!/bin/bash
+ cat <<-\EOF > "${T}"/pcre2-config
+ #!/usr/bin/env bash
flags=()
for flag; do
if [[ ${flag} == "--version" ]]; then
@@ -530,9 +534,9 @@ apache-2_src_prepare() {
flags+=( "${flag}" )
fi
done
- exec ${PKG_CONFIG} libpcre "${flags[@]}"
+ exec ${PKG_CONFIG} libpcre2-8 "${flags[@]}"
EOF
- chmod a+x "${T}"/pcre-config || die
+ chmod a+x "${T}"/pcre2-config || die
}
# @FUNCTION: apache-2_src_configure
@@ -541,10 +545,11 @@ apache-2_src_prepare() {
# MY_CONF
apache-2_src_configure() {
tc-export PKG_CONFIG
+ export ac_cv_path_PKGCONFIG="${PKG_CONFIG}"
# Sanity check in case people have bad mounts/TPE settings. #500928
- if ! "${T}"/pcre-config --help >/dev/null ; then
- eerror "Could not execute ${T}/pcre-config; do you have bad mount"
+ if ! "${T}"/pcre2-config --help &>/dev/null ; then
+ eerror "Could not execute ${T}/pcre2-config; do you have bad mount"
eerror "permissions in ${T} or have TPE turned on in your kernel?"
die "check your runtime settings #500928"
fi
@@ -569,13 +574,19 @@ apache-2_src_configure() {
--with-mpm=${MY_MPM}
--with-apr="${SYSROOT}${EPREFIX}"/usr
--with-apr-util="${SYSROOT}${EPREFIX}"/usr
- --with-pcre="${T}"/pcre-config
--with-z="${EPREFIX}"/usr
--with-port=80
--with-program-name=apache2
--enable-layout=Gentoo
)
- ac_cv_path_PKGCONFIG=${PKG_CONFIG} \
+
+ export ac_cv_prog_ac_ct_PCRE_CONFIG="${T}"/pcre2-config
+
+ MY_CONF+=(
+ --without-pcre
+ --with-pcre2="${T}"/pcre2-config
+ )
+
econf "${MY_CONF[@]}"
sed -i -e 's:apache2\.conf:httpd.conf:' include/ap_config_auto.h || die
@@ -623,9 +634,6 @@ apache-2_src_install() {
dosym /etc/init.d/apache2 /usr/sbin/apache2ctl
fi
- # provide legacy symlink for apxs, bug 177697
- dosym apxs /usr/sbin/apxs2
-
# install some documentation
dodoc ABOUT_APACHE CHANGES LAYOUT README README.platforms VERSIONING
dodoc "${GENTOO_PATCHDIR}"/docs/*
@@ -633,23 +641,23 @@ apache-2_src_install() {
# drop in a convenient link to the manual
if use doc ; then
sed -i -e "s:VERSION:${PVR}:" \
- "${ED%/}/etc/apache2/modules.d/00_apache_manual.conf" \
+ "${ED}/etc/apache2/modules.d/00_apache_manual.conf" \
|| die
docompress -x /usr/share/doc/${PF}/manual # 503640
else
- rm -f "${ED%/}/etc/apache2/modules.d/00_apache_manual.conf" \
+ rm -f "${ED}/etc/apache2/modules.d/00_apache_manual.conf" \
|| die
- rm -Rf "${ED%/}/usr/share/doc/${PF}/manual" || die
+ rm -rf "${ED}/usr/share/doc/${PF}/manual" || die
fi
# the default icons and error pages get stored in
# /usr/share/apache2/{error,icons}
dodir /usr/share/apache2
- mv -f "${ED%/}/var/www/localhost/error" \
- "${ED%/}/usr/share/apache2/error" || die
- mv -f "${ED%/}/var/www/localhost/icons" \
- "${ED%/}/usr/share/apache2/icons" || die
- rm -rf "${ED%/}/var/www/localhost/" || die
+ mv -f "${ED}/var/www/localhost/error" \
+ "${ED}/usr/share/apache2/error" || die
+ mv -f "${ED}/var/www/localhost/icons" \
+ "${ED}/usr/share/apache2/icons" || die
+ rm -rf "${ED}/var/www/localhost/" || die
# set some sane permissions for suexec
if use suexec ; then
@@ -663,7 +671,7 @@ apache-2_src_install() {
# empty dirs
local i
- for i in /var/lib/dav /var/log/apache2 /var/cache/apache2 ; do
+ for i in /var/lib/dav /var/log/apache2 ; do
keepdir ${i}
fowners apache:apache ${i}
fperms 0750 ${i}
diff --git a/eclass/apache-module.eclass b/eclass/apache-module.eclass
index 60631171ed91..5a84ffedf71a 100644
--- a/eclass/apache-module.eclass
+++ b/eclass/apache-module.eclass
@@ -1,18 +1,17 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: apache-module.eclass
# @MAINTAINER:
# apache-bugs@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Provides a common set of functions for apache modules
# @DESCRIPTION:
# This eclass handles apache modules in a sane way.
#
# To make use of this eclass simply call one of the need/want_apache functions
# described in depend.apache.eclass. Make sure you use the need/want_apache call
-# after you have defined DEPEND and RDEPEND. Also note that you can not rely on
-# the automatic RDEPEND=DEPEND that portage does if you use this eclass.
+# after you have defined DEPEND and RDEPEND.
#
# See Bug 107127 for more information.
#
@@ -46,8 +45,8 @@
# @CODE
case ${EAPI} in
- [5-7]) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_APACHE_MODULE_ECLASS} ]]; then
diff --git a/eclass/app-alternatives.eclass b/eclass/app-alternatives.eclass
new file mode 100644
index 000000000000..c6924bfc6d2a
--- /dev/null
+++ b/eclass/app-alternatives.eclass
@@ -0,0 +1,84 @@
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: app-alternatives.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @BLURB: Common logic for app-alternatives/*
+# @DESCRIPTION:
+# This eclass provides common logic shared by app-alternatives/*
+# ebuilds. A global ALTERNATIVES variable needs to be declared
+# that lists available options and their respective dependencies.
+# HOMEPAGE, S, LICENSE, SLOT, IUSE, REQUIRED_USE and RDEPEND are set.
+# A get_alternative() function is provided that determines the selected
+# alternative and prints its respective flag name.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} unsupported."
+esac
+
+if [[ ! ${_APP_ALTERNATIVES_ECLASS} ]]; then
+_APP_ALTERNATIVES_ECLASS=1
+
+# @ECLASS_VARIABLE: ALTERNATIVES
+# @PRE_INHERIT
+# @REQUIRED
+# @DESCRIPTION:
+# Array of "flag:dependency" pairs specifying the available
+# alternatives. The default provider must be listed first.
+
+# @FUNCTION: _app-alternatives_set_globals
+# @INTERNAL
+# @DESCRIPTION:
+# Set ebuild metadata variables.
+_app-alternatives_set_globals() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${ALTERNATIVES@a} != *a* ]]; then
+ die 'ALTERNATIVES must be an array.'
+ elif [[ ${#ALTERNATIVES[@]} -eq 0 ]]; then
+ die 'ALTERNATIVES must not be empty.'
+ fi
+
+ HOMEPAGE="https://wiki.gentoo.org/wiki/Project:Base/Alternatives"
+ S=${WORKDIR}
+
+ LICENSE="CC0-1.0"
+ SLOT="0"
+
+ # yep, that's a cheap hack adding '+' to the first flag
+ IUSE="+${ALTERNATIVES[*]%%:*}"
+ REQUIRED_USE="^^ ( ${ALTERNATIVES[*]%%:*} )"
+ RDEPEND=""
+
+ local flag dep
+ for flag in "${ALTERNATIVES[@]}"; do
+ [[ ${flag} != *:* ]] && die "Invalid ALTERNATIVES item: ${flag}"
+ dep=${flag#*:}
+ flag=${flag%%:*}
+ RDEPEND+="
+ ${flag}? ( ${dep} )
+ "
+ done
+}
+_app-alternatives_set_globals
+
+# @FUNCTION: get_alternative
+# @DESCRIPTION:
+# Get the flag name for the selected alternative (i.e. the USE flag set).
+get_alternative() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local flag
+ for flag in "${ALTERNATIVES[@]%%:*}"; do
+ usev "${flag}" && return
+ done
+
+ die "No selected alternative found (REQUIRED_USE ignored?!)"
+}
+
+fi
diff --git a/eclass/aspell-dict-r1.eclass b/eclass/aspell-dict-r1.eclass
index 4d2df961993b..1a3bd0b21b24 100644
--- a/eclass/aspell-dict-r1.eclass
+++ b/eclass/aspell-dict-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: aspell-dict-r1.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Seemant Kulleen <seemant@gentoo.org> (original author)
# David Seifert <soap@gentoo.org> (-r1 author)
-# @SUPPORTED_EAPIS: 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: An eclass to streamline the construction of ebuilds for new Aspell dictionaries.
# @DESCRIPTION:
# The aspell-dict-r1 eclass is designed to streamline the construction of ebuilds for
@@ -36,18 +36,13 @@ readonly ASPELL_SPELLANG=${PN/aspell-/}
# This value is used to construct SRC_URI strings.
# If the value needs to be overridden, it needs to be overridden before inheriting the eclass.
-case ${EAPI:-0} in
- [7-8])
- ;;
- *)
- die "${ECLASS}: EAPI ${EAPI:-0} not supported"
- ;;
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_configure src_install
-
-if [[ ! ${_ASPELL_DICT_R1} ]]; then
-_ASPELL_DICT_R1=1
+if [[ ! ${_ASPELL_DICT_R1_ECLASS} ]]; then
+_ASPELL_DICT_R1_ECLASS=1
# Most of those aspell packages have an idiosyncratic versioning scheme,
# where the last separating version separator is replaced by a '-'.
@@ -63,7 +58,6 @@ SLOT="0"
RDEPEND="app-text/aspell"
DEPEND="${RDEPEND}"
-BDEPEND="sys-apps/which"
_ASPELL_MAJOR_VERSION=${ASPELL_VERSION:-6}
[[ ${_ASPELL_MAJOR_VERSION} != [56] ]] && die "Unsupported ASPELL_VERSION=${ASPELL_VERSION}"
@@ -73,6 +67,10 @@ unset _ASPELL_MAJOR_VERSION
# @DESCRIPTION:
# The aspell-dict-r1 src_configure function which is exported.
aspell-dict-r1_src_configure() {
+ # configure generates lines like:
+ # `echo "ASPELL = `which $ASPELL`" > Makefile`
+ sed -i -e '/.* = `which/ s:`which:`command -v:' configure || die
+
# Since it's a non-autoconf based script, 'econf' cannot be used.
./configure || die
}
@@ -86,3 +84,5 @@ aspell-dict-r1_src_install() {
}
fi
+
+EXPORT_FUNCTIONS src_configure src_install
diff --git a/eclass/autotools.eclass b/eclass/autotools.eclass
index d6c5b7f0ec0d..1ced771c5345 100644
--- a/eclass/autotools.eclass
+++ b/eclass/autotools.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: autotools.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Regenerates auto* build scripts
# @DESCRIPTION:
# This eclass is for safely handling autotooled software packages that need to
@@ -13,6 +13,11 @@
# Note: We require GNU m4, as does autoconf. So feel free to use any features
# from the GNU version of m4 without worrying about other variants (i.e. BSD).
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ ${_AUTOTOOLS_AUTO_DEPEND+set} == "set" ]] ; then
# See if we were included already, but someone changed the value
# of AUTOTOOLS_AUTO_DEPEND on us. We could reload the entire
@@ -26,14 +31,7 @@ fi
if [[ -z ${_AUTOTOOLS_ECLASS} ]] ; then
_AUTOTOOLS_ECLASS=1
-case ${EAPI} in
- 5|6)
- # Needed for eqawarn
- inherit eutils
- ;;
- 7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
-esac
+[[ ${EAPI} == 6 ]] && inherit eqawarn
inherit gnuconfig libtool
@@ -41,19 +39,40 @@ inherit gnuconfig libtool
# @PRE_INHERIT
# @DESCRIPTION:
# The major version of autoconf your package needs
-: ${WANT_AUTOCONF:=latest}
+: "${WANT_AUTOCONF:=latest}"
# @ECLASS_VARIABLE: WANT_AUTOMAKE
# @PRE_INHERIT
# @DESCRIPTION:
# The major version of automake your package needs
-: ${WANT_AUTOMAKE:=latest}
+: "${WANT_AUTOMAKE:=latest}"
# @ECLASS_VARIABLE: WANT_LIBTOOL
# @PRE_INHERIT
# @DESCRIPTION:
# Do you want libtool? Valid values here are "latest" and "none".
-: ${WANT_LIBTOOL:=latest}
+: "${WANT_LIBTOOL:=latest}"
+
+# @ECLASS_VARIABLE: _LATEST_AUTOCONF
+# @INTERNAL
+# @DESCRIPTION:
+# CONSTANT!
+# The latest major unstable and stable version/slot of autoconf available
+# on each arch.
+# Only add unstable version if it is in a different slot than latest stable
+# version.
+# List latest unstable version first to boost testing adoption rate because
+# most package manager dependency resolver will pick the first suitable
+# version.
+# If a newer slot is stable on any arch, and is NOT reflected in this list,
+# then circular dependencies may arise during emerge @system bootstraps.
+#
+# See bug #312315 and bug #465732 for further information and context.
+#
+# Do NOT change this variable in your ebuilds!
+# If you want to force a newer minor version, you can specify the correct
+# WANT value by using a colon: <PV>:<WANT_AUTOCONF>
+_LATEST_AUTOCONF=( 2.72-r1:2.72 2.71-r6:2.71 )
# @ECLASS_VARIABLE: _LATEST_AUTOMAKE
# @INTERNAL
@@ -76,32 +95,61 @@ inherit gnuconfig libtool
# WANT value by using a colon: <PV>:<WANT_AUTOMAKE>
_LATEST_AUTOMAKE=( 1.16.5:1.16 )
-_automake_atom="sys-devel/automake"
-_autoconf_atom="sys-devel/autoconf"
+_automake_atom="dev-build/automake"
+_autoconf_atom="dev-build/autoconf"
if [[ -n ${WANT_AUTOMAKE} ]] ; then
case ${WANT_AUTOMAKE} in
- # Even if the package doesn't use automake, we still need to depend
- # on it because we run aclocal to process m4 macros. This matches
- # the autoreconf tool, so this requirement is correct, bug #401605.
- none) ;;
- latest) _automake_atom="|| ( `printf '>=sys-devel/automake-%s:%s ' ${_LATEST_AUTOMAKE[@]/:/ }` )" ;;
- *) _automake_atom="=sys-devel/automake-${WANT_AUTOMAKE}*";;
+ none)
+ # Even if the package doesn't use automake, we still need to depend
+ # on it because we run aclocal to process m4 macros. This matches
+ # the autoreconf tool, so this requirement is correct, bug #401605.
+ ;;
+ latest)
+ printf -v _automake_atom_tmp '>=dev-build/automake-%s:%s ' ${_LATEST_AUTOMAKE[@]/:/ }
+ _automake_atom="|| ( ${_automake_atom_tmp} )"
+ unset _automake_atom_tmp
+ ;;
+ *)
+ _automake_atom="=dev-build/automake-${WANT_AUTOMAKE}*"
+ ;;
esac
export WANT_AUTOMAKE
fi
if [[ -n ${WANT_AUTOCONF} ]] ; then
+ # TODO: Fix the slot mess here and just have proper PV-as-SLOT?
+ # TODO: Make _LATEST_AUTOCONF an assoc. array and instead iterate over
+ # its keys.
case ${WANT_AUTOCONF} in
- none) _autoconf_atom="" ;; # some packages don't require autoconf at all
- 2.1) _autoconf_atom="~sys-devel/autoconf-2.13" ;;
- # if you change the "latest" version here, change also autotools_env_setup
- latest|2.5) _autoconf_atom=">=sys-devel/autoconf-2.71" ;;
- *) die "Invalid WANT_AUTOCONF value '${WANT_AUTOCONF}'" ;;
+ none)
+ # some packages don't require autoconf at all
+ _autoconf_atom=""
+ ;;
+ 2.1)
+ _autoconf_atom=">=dev-build/autoconf-2.13-r7:2.1"
+ ;;
+ 2.5)
+ _autoconf_atom=">=dev-build/autoconf-2.71-r6:2.71"
+ ;;
+ 2.69)
+ _autoconf_atom=">=dev-build/autoconf-2.69-r9:2.69"
+ ;;
+ 2.71)
+ _autoconf_atom=">=dev-build/autoconf-2.71-r6:2.71"
+ ;;
+ latest)
+ printf -v _autoconf_atom_tmp '>=dev-build/autoconf-%s:%s ' ${_LATEST_AUTOCONF[@]/:/ }
+ _autoconf_atom="|| ( ${_autoconf_atom_tmp} )"
+ unset _autoconf_atom_tmp
+ ;;
+ *)
+ die "Invalid WANT_AUTOCONF value '${WANT_AUTOCONF}'"
+ ;;
esac
export WANT_AUTOCONF
fi
-_libtool_atom=">=sys-devel/libtool-2.4"
+_libtool_atom=">=dev-build/libtool-2.4.7-r3"
if [[ -n ${WANT_LIBTOOL} ]] ; then
case ${WANT_LIBTOOL} in
none) _libtool_atom="" ;;
@@ -127,10 +175,10 @@ RDEPEND=""
# Set to 'no' to disable automatically adding to DEPEND. This lets
# ebuilds form conditional depends by using ${AUTOTOOLS_DEPEND} in
# their own DEPEND string.
-: ${AUTOTOOLS_AUTO_DEPEND:=yes}
+: "${AUTOTOOLS_AUTO_DEPEND:=yes}"
if [[ ${AUTOTOOLS_AUTO_DEPEND} != "no" ]] ; then
case ${EAPI} in
- 5|6) DEPEND=${AUTOTOOLS_DEPEND} ;;
+ 6) DEPEND=${AUTOTOOLS_DEPEND} ;;
*) BDEPEND=${AUTOTOOLS_DEPEND} ;;
esac
fi
@@ -143,14 +191,14 @@ unset _automake_atom _autoconf_atom
# @DESCRIPTION:
# Additional options to pass to automake during
# eautoreconf call.
-: ${AM_OPTS:=}
+: "${AM_OPTS:=}"
# @ECLASS_VARIABLE: AT_NOEAUTOHEADER
# @DEFAULT_UNSET
# @DESCRIPTION:
# Don't run eautoheader command if set to 'yes'; only used to work around
# packages that don't want their headers being modified.
-: ${AT_NOEAUTOHEADER:=}
+: "${AT_NOEAUTOHEADER:=}"
# @ECLASS_VARIABLE: AT_NOEAUTOMAKE
# @DEFAULT_UNSET
@@ -158,7 +206,7 @@ unset _automake_atom _autoconf_atom
# Don't run eautomake command if set to 'yes'; only used to workaround
# broken packages. Generally you should, instead, fix the package to
# not call AM_INIT_AUTOMAKE if it doesn't actually use automake.
-: ${AT_NOEAUTOMAKE:=}
+: "${AT_NOEAUTOMAKE:=}"
# @ECLASS_VARIABLE: AT_NOELIBTOOLIZE
# @DEFAULT_UNSET
@@ -166,13 +214,13 @@ unset _automake_atom _autoconf_atom
# Don't run elibtoolize command if set to 'yes',
# useful when elibtoolize needs to be ran with
# particular options
-: ${AT_NOELIBTOOLIZE:=}
+: "${AT_NOELIBTOOLIZE:=}"
# @ECLASS_VARIABLE: AT_M4DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Additional director(y|ies) aclocal should search
-: ${AT_M4DIR:=}
+: "${AT_M4DIR:=}"
# @ECLASS_VARIABLE: AT_SYS_M4DIR
# @DEFAULT_UNSET
@@ -181,7 +229,7 @@ unset _automake_atom _autoconf_atom
# For system integrators, a list of additional aclocal search paths.
# This variable gets eval-ed, so you can use variables in the definition
# that may not be valid until eautoreconf & friends are run.
-: ${AT_SYS_M4DIR:=}
+: "${AT_SYS_M4DIR:=}"
# @FUNCTION: eautoreconf
# @DESCRIPTION:
@@ -327,7 +375,7 @@ eaclocal_amflags() {
# @FUNCTION: eaclocal
# @DESCRIPTION:
# These functions runs the autotools using autotools_run_tool with the
-# specified parametes. The name of the tool run is the same of the function
+# specified parameters. The name of the tool run is the same of the function
# without e prefix.
# They also force installing the support files for safety.
# Respects AT_M4DIR for additional directories to search for macros.
@@ -336,7 +384,7 @@ eaclocal() {
# - ${BROOT}/usr/share/aclocal
# - ${ESYSROOT}/usr/share/aclocal
# See bug #677002
- if [[ ${EAPI} != [56] ]] ; then
+ if [[ ${EAPI} != 6 ]] ; then
if [[ ! -f "${T}"/aclocal/dirlist ]] ; then
mkdir "${T}"/aclocal || die
cat <<- EOF > "${T}"/aclocal/dirlist || die
@@ -391,10 +439,9 @@ eautoconf() {
die "No configure.{ac,in} present!"
fi
-
if [[ ${WANT_AUTOCONF} != "2.1" && -e configure.in ]] ; then
case ${EAPI} in
- 5|6|7)
+ 6|7)
eqawarn "This package has a configure.in file which has long been deprecated. Please"
eqawarn "update it to use configure.ac instead as newer versions of autotools will die"
eqawarn "when it finds this file. See https://bugs.gentoo.org/426262 for details."
@@ -402,7 +449,7 @@ eautoconf() {
*)
# Move configure file to the new location only on newer EAPIs to ensure
# checks are done rather than retroactively breaking ebuilds.
- eqawarn "Moving configure.in to configure.ac (bug #426262)"
+ einfo "Moving configure.in to configure.ac (bug #426262)"
mv configure.{in,ac} || die
;;
esac
@@ -485,7 +532,7 @@ config_rpath_update() {
local dst src
case ${EAPI} in
- 5|6)
+ 6)
src="${EPREFIX}/usr/share/gettext/config.rpath"
;;
*)
@@ -516,19 +563,57 @@ autotools_env_setup() {
# Break on first hit to respect _LATEST_AUTOMAKE order.
local hv_args=""
case ${EAPI} in
- 5|6)
+ 6)
hv_args="--host-root"
;;
*)
hv_args="-b"
;;
esac
- has_version ${hv_args} "=sys-devel/automake-${pv}*" && export WANT_AUTOMAKE="${pv}" && break
+ has_version ${hv_args} "=dev-build/automake-${pv}*" && export WANT_AUTOMAKE="${pv}" && break
done
- [[ ${WANT_AUTOMAKE} == "latest" ]] && \
- die "Cannot find the latest automake! Tried ${_LATEST_AUTOMAKE[*]}"
+
+ # During bootstrap in prefix there might be no automake merged yet
+ # due to --nodeps, but still available somewhere in PATH.
+ # For example, ncurses needs local libtool on aix and hpux.
+ # So, make the check non-fatal where automake doesn't yet
+ # exist within BROOT. (We could possibly do better here
+ # and inspect PATH, but I'm not sure there's much point.)
+ if use prefix && [[ ! -x "${BROOT}"/usr/bin/automake ]] ; then
+ [[ ${WANT_AUTOMAKE} == "latest" ]] && ewarn "Ignoring missing automake during Prefix bootstrap! Tried ${_LATEST_AUTOMAKE[*]}"
+ else
+ [[ ${WANT_AUTOMAKE} == "latest" ]] && die "Cannot find the latest automake! Tried ${_LATEST_AUTOMAKE[*]}"
+ fi
+ fi
+
+ if [[ ${WANT_AUTOCONF} == "latest" ]] ; then
+ local pv
+ for pv in ${_LATEST_AUTOCONF[@]/#*:} ; do
+ # Break on first hit to respect _LATEST_AUTOCONF order.
+ local hv_args=""
+ case ${EAPI} in
+ 6)
+ hv_args="--host-root"
+ ;;
+ *)
+ hv_args="-b"
+ ;;
+ esac
+ has_version ${hv_args} "=dev-build/autoconf-${pv}*" && export WANT_AUTOCONF="${pv}" && break
+ done
+
+ # During bootstrap in prefix there might be no autoconf merged yet
+ # due to --nodeps, but still available somewhere in PATH.
+ # For example, ncurses needs local libtool on aix and hpux.
+ # So, make the check non-fatal where autoconf doesn't yet
+ # exist within BROOT. (We could possibly do better here
+ # and inspect PATH, but I'm not sure there's much point.)
+ if use prefix && [[ ! -x "${BROOT}"/usr/bin/autoconf ]] ; then
+ [[ ${WANT_AUTOCONF} == "latest" ]] && ewarn "Ignoring missing autoconf during Prefix bootstrap! Tried ${_LATEST_AUTOCONF[*]}"
+ else
+ [[ ${WANT_AUTOCONF} == "latest" ]] && die "Cannot find the latest autoconf! Tried ${_LATEST_AUTOCONF[*]}"
+ fi
fi
- [[ ${WANT_AUTOCONF} == "latest" ]] && export WANT_AUTOCONF=2.71
}
# @FUNCTION: autotools_run_tool
@@ -552,7 +637,7 @@ autotools_run_tool() {
shift
done
- if [[ ${EBUILD_PHASE_FUNC} != "src_unpack" && ${EBUILD_PHASE_FUNC} != "src_prepare" ]] ; then
+ if [[ ${EBUILD_PHASE_FUNC} != "src_prepare" ]] ; then
eqawarn "Running '${1}' in ${EBUILD_PHASE_FUNC} phase"
fi
diff --git a/eclass/bazel.eclass b/eclass/bazel.eclass
deleted file mode 100644
index 641da6194ca7..000000000000
--- a/eclass/bazel.eclass
+++ /dev/null
@@ -1,226 +0,0 @@
-# Copyright 1999-2022 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: bazel.eclass
-# @MAINTAINER:
-# Jason Zaman <perfinion@gentoo.org>
-# @AUTHOR:
-# Jason Zaman <perfinion@gentoo.org>
-# @SUPPORTED_EAPIS: 7 8
-# @BLURB: Utility functions for packages using Bazel Build
-# @DESCRIPTION:
-# A utility eclass providing functions to run the Bazel Build system.
-#
-# This eclass does not export any phase functions.
-
-case "${EAPI:-0}" in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
-esac
-
-if [[ ! ${_BAZEL_ECLASS} ]]; then
-
-inherit multiprocessing toolchain-funcs
-
-if [[ ${CATEGORY}/${PN} != "dev-util/bazel" ]]; then
- BDEPEND=">=dev-util/bazel-0.20"
-fi
-
-# @FUNCTION: bazel_get_flags
-# @DESCRIPTION:
-# Obtain and print the bazel flags for target and host *FLAGS.
-#
-# To add more flags to this, append the flags to the
-# appropriate variable before calling this function
-bazel_get_flags() {
- local i fs=()
- for i in ${CFLAGS}; do
- fs+=( "--conlyopt=${i}" )
- done
- for i in ${BUILD_CFLAGS}; do
- fs+=( "--host_conlyopt=${i}" )
- done
- for i in ${CXXFLAGS}; do
- fs+=( "--cxxopt=${i}" )
- done
- for i in ${BUILD_CXXFLAGS}; do
- fs+=( "--host_cxxopt=${i}" )
- done
- for i in ${CPPFLAGS}; do
- fs+=( "--conlyopt=${i}" "--cxxopt=${i}" )
- done
- for i in ${BUILD_CPPFLAGS}; do
- fs+=( "--host_conlyopt=${i}" "--host_cxxopt=${i}" )
- done
- for i in ${LDFLAGS}; do
- fs+=( "--linkopt=${i}" )
- done
- for i in ${BUILD_LDFLAGS}; do
- fs+=( "--host_linkopt=${i}" )
- done
- echo "${fs[*]}"
-}
-
-# @FUNCTION: bazel_setup_bazelrc
-# @DESCRIPTION:
-# Creates the bazelrc with common options that will be passed
-# to bazel. This will be called by ebazel automatically so
-# does not need to be called from the ebuild.
-bazel_setup_bazelrc() {
- if [[ -f "${T}/bazelrc" ]]; then
- return
- fi
-
- # F: fopen_wr
- # P: /proc/self/setgroups
- # Even with standalone enabled, the Bazel sandbox binary is run for feature test:
- # https://github.com/bazelbuild/bazel/blob/7b091c1397a82258e26ab5336df6c8dae1d97384/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java#L61
- # https://github.com/bazelbuild/bazel/blob/76555482873ffcf1d32fb40106f89231b37f850a/src/main/tools/linux-sandbox-pid1.cc#L113
- addpredict /proc
-
- mkdir -p "${T}/bazel-cache" || die
- mkdir -p "${T}/bazel-distdir" || die
-
- cat > "${T}/bazelrc" <<-EOF || die
- startup --batch
-
- # dont strip HOME, portage sets a temp per-package dir
- build --action_env HOME
-
- # make bazel respect MAKEOPTS
- build --jobs=$(makeopts_jobs)
- build --compilation_mode=opt --host_compilation_mode=opt
-
- # FLAGS
- build $(bazel_get_flags)
-
- # Use standalone strategy to deactivate the bazel sandbox, since it
- # conflicts with FEATURES=sandbox.
- build --spawn_strategy=standalone --genrule_strategy=standalone
- test --spawn_strategy=standalone --genrule_strategy=standalone
-
- build --strip=never
- build --verbose_failures --noshow_loading_progress
- test --verbose_test_summary --verbose_failures --noshow_loading_progress
-
- # make bazel only fetch distfiles from the cache
- fetch --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/"
- build --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/"
-
- build --define=PREFIX=${EPREFIX%/}/usr
- build --define=LIBDIR=\$(PREFIX)/$(get_libdir)
- build --define=INCLUDEDIR=\$(PREFIX)/include
- EOF
-
- if tc-is-cross-compiler; then
- echo "build --distinct_host_configuration" >> "${T}/bazelrc" || die
- else
- echo "build --nodistinct_host_configuration" >> "${T}/bazelrc" || die
- fi
-}
-
-# @FUNCTION: ebazel
-# @USAGE: [<args>...]
-# @DESCRIPTION:
-# Run bazel with the bazelrc and output_base.
-#
-# output_base will be specific to $BUILD_DIR (if unset, $S).
-# bazel_setup_bazelrc will be called and the created bazelrc
-# will be passed to bazel.
-#
-# Will automatically die if bazel does not exit cleanly.
-ebazel() {
- bazel_setup_bazelrc
-
- # Use different build folders for each multibuild variant.
- local output_base="${BUILD_DIR:-${S}}"
- output_base="${output_base%/}-bazel-base"
- mkdir -p "${output_base}" || die
-
- set -- bazel --bazelrc="${T}/bazelrc" --output_base="${output_base}" ${@}
- echo "${*}" >&2
- "${@}" || die "ebazel failed"
-}
-
-# @FUNCTION: bazel_load_distfiles
-# @USAGE: <distfiles>...
-# @DESCRIPTION:
-# Populate the bazel distdir to fetch from since it cannot use
-# the network. Bazel looks in distdir but will only look for the
-# original filename, not the possibly renamed one that portage
-# downloaded. If the line has -> we to rename it back. This also
-# handles use-conditionals that SRC_URI does.
-#
-# Example:
-# @CODE
-# bazel_external_uris="http://a/file-2.0.tgz
-# python? ( http://b/1.0.tgz -> foo-1.0.tgz )"
-# SRC_URI="http://c/${PV}.tgz
-# ${bazel_external_uris}"
-#
-# src_unpack() {
-# unpack ${PV}.tgz
-# bazel_load_distfiles "${bazel_external_uris}"
-# }
-# @CODE
-bazel_load_distfiles() {
- local file=""
- local rename=0
-
- [[ "${@}" ]] || die "Missing args"
- mkdir -p "${T}/bazel-distdir" || die
-
- for word in ${@}
- do
- if [[ "${word}" == "->" ]]; then
- # next word is a dest filename
- rename=1
- elif [[ "${word}" == ")" ]]; then
- # close conditional block
- continue
- elif [[ "${word}" == "(" ]]; then
- # open conditional block
- continue
- elif [[ "${word}" == ?(\!)[A-Za-z0-9]*([A-Za-z0-9+_@-])\? ]]; then
- # use-conditional block
- # USE-flags can contain [A-Za-z0-9+_@-], and start with alphanum
- # https://dev.gentoo.org/~ulm/pms/head/pms.html#x1-200003.1.4
- # ?(\!) matches zero-or-one !'s
- # *(...) zero-or-more characters
- # ends with a ?
- continue
- elif [[ ${rename} -eq 1 ]]; then
- # Make sure the distfile is used
- if [[ "${A}" == *"${word}"* ]]; then
- echo "Copying ${word} to bazel distdir as ${file}"
- ln -s "${DISTDIR}/${word}" "${T}/bazel-distdir/${file}" || die
- fi
- rename=0
- file=""
- else
- # another URL, current one may or may not be a rename
- # if there was a previous one, its not renamed so copy it now
- if [[ -n "${file}" && "${A}" == *"${file}"* ]]; then
- echo "Copying ${file} to bazel distdir"
- ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die
- fi
- # save the current URL, later we will find out if its a rename or not.
- file="${word##*/}"
- fi
- done
-
- # handle last file
- if [[ -n "${file}" ]]; then
- echo "Copying ${file} to bazel distdir"
- ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die
- fi
-}
-
-_BAZEL_ECLASS=1
-fi
diff --git a/eclass/bzr.eclass b/eclass/bzr.eclass
index ba4ddad45f18..d522326773e1 100644
--- a/eclass/bzr.eclass
+++ b/eclass/bzr.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: bzr.eclass
@@ -21,55 +21,53 @@
case ${EAPI} in
7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
PROPERTIES+=" live"
BDEPEND="dev-vcs/breezy"
-EXPORT_FUNCTIONS src_unpack
-
# @ECLASS_VARIABLE: EBZR_STORE_DIR
# @USER_VARIABLE
# @DESCRIPTION:
# The directory to store all fetched Bazaar live sources.
-: ${EBZR_STORE_DIR:=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/bzr-src}
+: "${EBZR_STORE_DIR:=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/bzr-src}"
# @ECLASS_VARIABLE: EBZR_UNPACK_DIR
# @DESCRIPTION:
# The working directory where the sources are copied to.
-: ${EBZR_UNPACK_DIR:=${WORKDIR}/${P}}
+: "${EBZR_UNPACK_DIR:=${WORKDIR}/${P}}"
# @ECLASS_VARIABLE: EBZR_INIT_REPO_CMD
# @DESCRIPTION:
# The Bazaar command to initialise a shared repository.
-: ${EBZR_INIT_REPO_CMD:="brz init-shared-repository --no-trees"}
+: "${EBZR_INIT_REPO_CMD:="brz init-shared-repository --no-trees"}"
# @ECLASS_VARIABLE: EBZR_FETCH_CMD
# @DESCRIPTION:
# The Bazaar command to fetch the sources.
-: ${EBZR_FETCH_CMD:="brz branch --no-tree"}
+: "${EBZR_FETCH_CMD:="brz branch --no-tree"}"
# @ECLASS_VARIABLE: EBZR_UPDATE_CMD
# @DESCRIPTION:
# The Bazaar command to update the sources.
-: ${EBZR_UPDATE_CMD:="brz pull --overwrite-tags"}
+: "${EBZR_UPDATE_CMD:="brz pull --overwrite-tags"}"
# @ECLASS_VARIABLE: EBZR_EXPORT_CMD
# @DESCRIPTION:
# The Bazaar command to export a branch.
-: ${EBZR_EXPORT_CMD:="brz export"}
+: "${EBZR_EXPORT_CMD:="brz export"}"
# @ECLASS_VARIABLE: EBZR_CHECKOUT_CMD
# @DESCRIPTION:
# The Bazaar command to checkout a branch.
-: ${EBZR_CHECKOUT_CMD:="brz checkout --lightweight -q"}
+: "${EBZR_CHECKOUT_CMD:="brz checkout --lightweight -q"}"
# @ECLASS_VARIABLE: EBZR_REVNO_CMD
# @DESCRIPTION:
# The Bazaar command to list a revision number of the branch.
-: ${EBZR_REVNO_CMD:="brz revno"}
+: "${EBZR_REVNO_CMD:="brz revno"}"
# @ECLASS_VARIABLE: EBZR_OPTIONS
# @DEFAULT_UNSET
@@ -90,7 +88,7 @@ EXPORT_FUNCTIONS src_unpack
# If EBZR_BRANCH is set (see below), then a shared repository will be
# created in that directory, and the branch will be located in
# ${EBZR_STORE_DIR}/${EBZR_PROJECT}/${EBZR_BRANCH}.
-: ${EBZR_PROJECT:=${PN}}
+: "${EBZR_PROJECT:=${PN}}"
# @ECLASS_VARIABLE: EBZR_BRANCH
# @DEFAULT_UNSET
@@ -118,7 +116,7 @@ EXPORT_FUNCTIONS src_unpack
# Set this variable to a non-empty value to disable automatic updating
# of a bzr source tree. This is intended to be set outside the ebuild
# by users.
-: ${EBZR_OFFLINE=${EVCS_OFFLINE}}
+: "${EBZR_OFFLINE=${EVCS_OFFLINE}}"
# @ECLASS_VARIABLE: EVCS_UMASK
# @USER_VARIABLE
@@ -255,3 +253,5 @@ bzr_fetch() {
bzr_src_unpack() {
bzr_fetch
}
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/cargo.eclass b/eclass/cargo.eclass
index c46f48146aed..0f2da982f60c 100644
--- a/eclass/cargo.eclass
+++ b/eclass/cargo.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cargo.eclass
@@ -10,6 +10,11 @@
# @SUPPORTED_EAPIS: 7 8
# @BLURB: common functions and variables for cargo builds
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_CARGO_ECLASS} ]]; then
_CARGO_ECLASS=1
@@ -17,15 +22,11 @@ _CARGO_ECLASS=1
# https://github.com/rust-lang/cargo/blob/master/CHANGELOG.md
RUST_DEPEND="virtual/rust"
-case "${EAPI:-0}" in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
+case ${EAPI} in
7)
# 1.37 added 'cargo vendor' subcommand and net.offline config knob
RUST_DEPEND=">=virtual/rust-1.37.0"
;;
-
8)
# 1.39 added --workspace
# 1.46 added --target dir
@@ -34,23 +35,12 @@ case "${EAPI:-0}" in
# 1.52 may need setting RUSTC_BOOTSTRAP envvar for some crates
# 1.53 added cargo update --offline, can be used to update vulnerable crates from pre-fetched registry without editing toml
RUST_DEPEND=">=virtual/rust-1.53"
-
- if [[ -z ${CRATES} && "${PV}" != *9999* ]]; then
- eerror "undefined CRATES variable in non-live EAPI=8 ebuild"
- die "CRATES variable not defined"
- fi
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
-inherit multiprocessing toolchain-funcs
+inherit flag-o-matic multiprocessing toolchain-funcs
-if [[ ! ${CARGO_OPTIONAL} ]]; then
- BDEPEND="${RUST_DEPEND}"
- EXPORT_FUNCTIONS src_unpack src_configure src_compile src_install src_test
-fi
+[[ ! ${CARGO_OPTIONAL} ]] && BDEPEND="${RUST_DEPEND}"
IUSE="${IUSE} debug"
@@ -61,40 +51,78 @@ ECARGO_VENDOR="${ECARGO_HOME}/gentoo"
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
-# bash string containing all crates package wants to download
-# used by cargo_crate_uris()
+# Bash string containing all crates that are to be downloaded.
+# It is used by cargo_crate_uris.
+#
+# Ideally, crate names and versions should be separated by a `@`
+# character. A legacy syntax using hyphen is also supported but it is
+# much slower.
+#
# Example:
# @CODE
# CRATES="
-# metal-1.2.3
-# bar-4.5.6
-# iron_oxide-0.0.1
+# metal@1.2.3
+# bar@4.5.6
+# iron_oxide@0.0.1
# "
# inherit cargo
# ...
-# SRC_URI="$(cargo_crate_uris)"
+# SRC_URI="${CARGO_CRATE_URIS}"
+# @CODE
+
+# @ECLASS_VARIABLE: GIT_CRATES
+# @DEFAULT_UNSET
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Bash associative array containing all of the crates that are to be
+# fetched via git. It is used by cargo_crate_uris.
+# If this is defined, then cargo_src_install will add --frozen to "cargo install".
+# The key is a crate name, the value is a semicolon-separated list of:
+#
+# - the URI to fetch the crate from.
+# - This intelligently handles GitHub and GitLab URIs so that
+# just the repository path is needed.
+# - The string "%commit%" gets replaced with the commit's checksum.
+# - the checksum of the commit to use.
+# - optionally: the path to look for Cargo.toml in.
+# - This will also replace the string "%commit%" with the commit's checksum.
+# - Defaults to: "${crate}-%commit%"
+#
+# Example of a simple definition with no path to Cargo.toml:
+# @CODE
+# declare -A GIT_CRATES=(
+# [home]="https://github.com/rbtcollins/home;a243ee2fbee6022c57d56f5aa79aefe194eabe53"
+# )
+# @CODE
+#
+# Example with paths defined:
+# @CODE
+# declare -A GIT_CRATES=(
+# [rustpython-common]="https://github.com/RustPython/RustPython;4f38cb68e4a97aeea9eb19673803a0bd5f655383;RustPython-%commit%/common"
+# [rustpython-parser]="https://github.com/RustPython/RustPython;4f38cb68e4a97aeea9eb19673803a0bd5f655383;RustPython-%commit%/compiler/parser"
+# )
# @CODE
# @ECLASS_VARIABLE: CARGO_OPTIONAL
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
-# If set to a non-null value, before inherit cargo part of the ebuild will
+# If set to a non-null value, the part of the ebuild before "inherit cargo" will
# be considered optional. No dependencies will be added and no phase
# functions will be exported.
#
# If you enable CARGO_OPTIONAL, you have to set BDEPEND on virtual/rust
# for your package and call at least cargo_gen_config manually before using
-# other src_ functions of this eclass.
-# note that cargo_gen_config is automatically called by cargo_src_unpack.
+# other src_functions of this eclass.
+# Note that cargo_gen_config is automatically called by cargo_src_unpack.
# @ECLASS_VARIABLE: myfeatures
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional cargo features defined as bash array.
-# Should be defined before calling cargo_src_configure().
+# Should be defined before calling cargo_src_configure.
#
-# Example package that has x11 and wayland as features, and disables default.
+# Example of a package that has x11 and wayland features and disables default features.
# @CODE
# src_configure() {
# local myfeatures=(
@@ -114,7 +142,7 @@ ECARGO_VENDOR="${ECARGO_HOME}/gentoo"
# This is intended to be set by users.
# Ebuilds must not set it.
#
-# Defaults to "${DISTDIR}/cargo-registry" it not set.
+# Defaults to "${DISTDIR}/cargo-registry" if not set.
# @ECLASS_VARIABLE: ECARGO_OFFLINE
# @USER_VARIABLE
@@ -134,32 +162,92 @@ ECARGO_VENDOR="${ECARGO_HOME}/gentoo"
# group, and then switch over to building with FEATURES=userpriv.
# Or vice-versa.
-# @FUNCTION: cargo_crate_uris
+# @ECLASS_VARIABLE: CARGO_CRATE_URIS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# List of URIs to put in SRC_URI created from CRATES variable.
+
+# @FUNCTION: _cargo_set_crate_uris
+# @USAGE: <crates>
# @DESCRIPTION:
# Generates the URIs to put in SRC_URI to help fetch dependencies.
-# Uses first argument as crate list.
-# If no argument provided, uses CRATES variable.
-cargo_crate_uris() {
+# Constructs a list of crates from its arguments.
+# If no arguments are provided, it uses the CRATES variable.
+# The value is set as CARGO_CRATE_URIS.
+_cargo_set_crate_uris() {
local -r regex='^([a-zA-Z0-9_\-]+)-([0-9]+\.[0-9]+\.[0-9]+.*)$'
- local crate crates
+ local crates=${1}
+ local crate
- if [[ -n ${@} ]]; then
- crates="$@"
- elif [[ -n ${CRATES} ]]; then
- crates="${CRATES}"
- else
+ CARGO_CRATE_URIS=
+ for crate in ${crates}; do
+ local name version url
+ if [[ ${crate} == *@* ]]; then
+ name=${crate%@*}
+ version=${crate##*@}
+ else
+ [[ ${crate} =~ ${regex} ]] ||
+ die "Could not parse name and version from crate: ${crate}"
+ name="${BASH_REMATCH[1]}"
+ version="${BASH_REMATCH[2]}"
+ fi
+ url="https://crates.io/api/v1/crates/${name}/${version}/download -> ${name}-${version}.crate"
+ CARGO_CRATE_URIS+="${url} "
+
+ # when invoked by pkgbump, avoid fetching all the crates
+ # we just output the first one, to avoid creating empty groups
+ # in SRC_URI
+ [[ ${PKGBUMPING} == ${PVR} ]] && return
+ done
+
+ if declare -p GIT_CRATES &>/dev/null; then
+ if [[ $(declare -p GIT_CRATES) == "declare -A"* ]]; then
+ local crate commit crate_uri crate_dir repo_ext feat_expr
+
+ for crate in "${!GIT_CRATES[@]}"; do
+ IFS=';' read -r crate_uri commit crate_dir <<< "${GIT_CRATES[${crate}]}"
+
+ case "${crate_uri}" in
+ https://github.com/*)
+ repo_ext=".gh"
+ repo_name="${crate_uri##*/}"
+ crate_uri="${crate_uri%/}/archive/%commit%.tar.gz"
+ ;;
+ https://gitlab.com/*)
+ repo_ext=".gl"
+ repo_name="${crate_uri##*/}"
+ crate_uri="${crate_uri%/}/-/archive/%commit%/${repo_name}-%commit%.tar.gz"
+ ;;
+ *)
+ repo_ext=
+ repo_name="${crate}"
+ ;;
+ esac
+
+ CARGO_CRATE_URIS+="${crate_uri//%commit%/${commit}} -> ${repo_name}-${commit}${repo_ext}.tar.gz "
+ done
+ else
+ die "GIT_CRATE must be declared as an associative array"
+ fi
+ fi
+}
+_cargo_set_crate_uris "${CRATES}"
+
+# @FUNCTION: cargo_crate_uris
+# @USAGE: [<crates>...]
+# @DESCRIPTION:
+# Generates the URIs to put in SRC_URI to help fetch dependencies.
+# Constructs a list of crates from its arguments.
+# If no arguments are provided, it uses the CRATES variable.
+cargo_crate_uris() {
+ local crates=${*-${CRATES}}
+ if [[ -z ${crates} ]]; then
eerror "CRATES variable is not defined and nothing passed as argument"
die "Can't generate SRC_URI from empty input"
fi
- for crate in ${crates}; do
- local name version url
- [[ $crate =~ $regex ]] || die "Could not parse name and version from crate: $crate"
- name="${BASH_REMATCH[1]}"
- version="${BASH_REMATCH[2]}"
- url="https://crates.io/api/v1/crates/${name}/${version}/download -> ${crate}.crate"
- echo "${url}"
- done
+ _cargo_set_crate_uris "${crates}"
+ echo "${CARGO_CRATE_URIS}"
}
# @FUNCTION: cargo_gen_config
@@ -183,7 +271,7 @@ cargo_gen_config() {
[source.crates-io]
replace-with = "gentoo"
- local-registry = "/nonexistant"
+ local-registry = "/nonexistent"
[net]
offline = true
@@ -195,15 +283,49 @@ cargo_gen_config() {
[term]
verbose = true
$([[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && echo "color = 'never'")
+ $(_cargo_gen_git_config)
_EOF_
export CARGO_HOME="${ECARGO_HOME}"
_CARGO_GEN_CONFIG_HAS_RUN=1
}
+# @FUNCTION: _cargo_gen_git_config
+# @USAGE:
+# @INTERNAL
+# @DESCRIPTION:
+# Generate the cargo config for git crates, this will output the
+# configuration for cargo to override the cargo config so the local git crates
+# specified in GIT_CRATES will be used rather than attempting to fetch
+# from git.
+#
+# Called by cargo_gen_config when generating the config.
+_cargo_gen_git_config() {
+ local git_crates_type
+ git_crates_type="$(declare -p GIT_CRATES 2>&-)"
+
+ if [[ ${git_crates_type} == "declare -A "* ]]; then
+ local crate commit crate_uri crate_dir
+ local -A crate_patches
+
+ for crate in "${!GIT_CRATES[@]}"; do
+ IFS=';' read -r crate_uri commit crate_dir <<< "${GIT_CRATES[${crate}]}"
+ : "${crate_dir:=${crate}-%commit%}"
+ crate_patches["${crate_uri}"]+="${crate} = { path = \"${WORKDIR}/${crate_dir//%commit%/${commit}}\" };;"
+ done
+
+ for crate_uri in "${!crate_patches[@]}"; do
+ printf -- "[patch.'%s']\\n%s\n" "${crate_uri}" "${crate_patches["${crate_uri}"]//;;/$'\n'}"
+ done
+
+ elif [[ -n ${git_crates_type} ]]; then
+ die "GIT_CRATE must be declared as an associative array"
+ fi
+}
+
# @FUNCTION: cargo_src_unpack
# @DESCRIPTION:
-# Unpacks the package and the cargo registry
+# Unpacks the package and the cargo registry.
cargo_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
@@ -214,6 +336,9 @@ cargo_src_unpack() {
for archive in ${A}; do
case "${archive}" in
*.crate)
+ # when called by pkgdiff-mg, do not unpack crates
+ [[ ${PKGBUMPING} == ${PVR} ]] && continue
+
ebegin "Loading ${archive} into Cargo registry"
tar -xf "${DISTDIR}"/${archive} -C "${ECARGO_VENDOR}/" || die
# generate sha256sum of the crate itself as cargo needs this
@@ -243,7 +368,8 @@ cargo_src_unpack() {
# @FUNCTION: cargo_live_src_unpack
# @DESCRIPTION:
-# Runs 'cargo fetch' and vendors downloaded crates for offline use, used in live ebuilds
+# Runs 'cargo fetch' and vendors downloaded crates for offline use, used in live ebuilds.
+# NOTE: might require passing --frozen to cargo_src_configure if git dependencies are used.
cargo_live_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
@@ -255,7 +381,7 @@ cargo_live_src_unpack() {
mkdir -p "${ECARGO_HOME}" || die
local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
- : ${ECARGO_REGISTRY_DIR:=${distdir}/cargo-registry}
+ : "${ECARGO_REGISTRY_DIR:=${distdir}/cargo-registry}"
local offline="${ECARGO_OFFLINE:-${EVCS_OFFLINE}}"
@@ -284,7 +410,7 @@ cargo_live_src_unpack() {
pushd "${S}" > /dev/null || die
- # Respect user settings befire cargo_gen_config is called.
+ # Respect user settings before cargo_gen_config is called.
if [[ ! ${CARGO_TERM_COLOR} ]]; then
[[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && export CARGO_TERM_COLOR=never
local unset_color=true
@@ -309,8 +435,8 @@ cargo_live_src_unpack() {
# Users may have git checkouts made by cargo.
# While cargo vendors the sources, it still needs git checkout to be present.
- # Copying full dir is an overkill, so just symlink it.
- if [[ -d ${ECARGO_REGISTRY_DIR}/git ]]; then
+ # Copying full dir is overkill, so just symlink it (guard w/ -L to keep idempotent).
+ if [[ -d ${ECARGO_REGISTRY_DIR}/git && ! -L "${ECARGO_HOME}/git" ]]; then
ln -sv "${ECARGO_REGISTRY_DIR}/git" "${ECARGO_HOME}/git" || die
fi
@@ -337,10 +463,10 @@ cargo_live_src_unpack() {
# will be passed to cargo in all phases.
# Make sure all cargo subcommands support flags passed here.
#
-# Example for package that explicitly builds only 'baz' binary and
+# Example of a package that explicitly builds only 'baz' binary and
# enables 'barfeature' and optional 'foo' feature.
-# will pass '--features barfeature --features foo --bin baz'
-# in src_{compile,test,install}
+# It will pass '--features barfeature --features foo --bin baz'
+# in src_{compile,test,install}.
#
# @CODE
# src_configure() {
@@ -352,9 +478,13 @@ cargo_live_src_unpack() {
# }
# @CODE
#
-# In some cases crates may need '--no-default-features' option,
-# as there is no way to disable single feature, except disabling all.
-# It can be passed directly to cargo_src_configure().
+# In some cases crates may need the '--no-default-features' option,
+# as there is no way to disable a single default feature, except disabling all.
+# It can be passed directly to cargo_src_configure.
+#
+# Some live/9999 ebuild may need the '--frozen' option, if git crates
+# are used.
+# Otherwise src_install phase may query network again and fail.
cargo_src_configure() {
debug-print-function ${FUNCNAME} "$@"
@@ -379,13 +509,14 @@ cargo_src_configure() {
# @FUNCTION: cargo_src_compile
# @DESCRIPTION:
-# Build the package using cargo build
+# Build the package using cargo build.
cargo_src_compile() {
debug-print-function ${FUNCNAME} "$@"
[[ ${_CARGO_GEN_CONFIG_HAS_RUN} ]] || \
die "FATAL: please call cargo_gen_config before using ${FUNCNAME}"
+ filter-lto
tc-export AR CC CXX PKG_CONFIG
set -- cargo build $(usex debug "" --release) ${ECARGO_ARGS[@]} "$@"
@@ -395,10 +526,10 @@ cargo_src_compile() {
# @FUNCTION: cargo_src_install
# @DESCRIPTION:
-# Installs the binaries generated by cargo
-# In come case workspaces need alternative --path parameter
-# default is '--path ./' if nothing specified.
-# '--path ./somedir' can be passed directly to cargo_src_install()
+# Installs the binaries generated by cargo.
+# In come cases workspaces need an alternative --path parameter.
+# Defaults to '--path ./' if no path is specified.
+# '--path ./somedir' can be passed directly to cargo_src_install.
cargo_src_install() {
debug-print-function ${FUNCNAME} "$@"
@@ -407,6 +538,7 @@ cargo_src_install() {
set -- cargo install $(has --path ${@} || echo --path ./) \
--root "${ED}/usr" \
+ ${GIT_CRATES[@]:+--frozen} \
$(usex debug --debug "") \
${ECARGO_ARGS[@]} "$@"
einfo "${@}"
@@ -429,7 +561,7 @@ cargo_src_install() {
# @FUNCTION: cargo_src_test
# @DESCRIPTION:
-# Test the package using cargo test
+# Test the package using cargo test.
cargo_src_test() {
debug-print-function ${FUNCNAME} "$@"
@@ -442,3 +574,7 @@ cargo_src_test() {
}
fi
+
+if [[ ! ${CARGO_OPTIONAL} ]]; then
+ EXPORT_FUNCTIONS src_unpack src_configure src_compile src_install src_test
+fi
diff --git a/eclass/cdrom.eclass b/eclass/cdrom.eclass
index 81539e8560ce..4e56db951196 100644
--- a/eclass/cdrom.eclass
+++ b/eclass/cdrom.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cdrom.eclass
@@ -200,7 +200,7 @@ cdrom_load_next_cd() {
while true ; do
local i cdset
- : CD_ROOT_${CDROM_CURRENT_CD}
+ : "CD_ROOT_${CDROM_CURRENT_CD}"
export CDROM_ROOT=${CD_ROOT:-${!_}}
local var="CDROM_CHECK_${CDROM_CURRENT_CD}"
IFS=: read -r -a cdset -d "" <<< "${!var}"
diff --git a/eclass/check-reqs.eclass b/eclass/check-reqs.eclass
index ddf0a47775ae..fac2f4553d74 100644
--- a/eclass/check-reqs.eclass
+++ b/eclass/check-reqs.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2022 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: check-reqs.eclass
@@ -40,12 +40,10 @@
case ${EAPI} in
6|7|8) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_pretend pkg_setup
-
-if [[ ! ${_CHECK_REQS_ECLASS} ]]; then
+if [[ -z ${_CHECK_REQS_ECLASS} ]]; then
_CHECK_REQS_ECLASS=1
# @ECLASS_VARIABLE: CHECKREQS_MEMORY
@@ -293,9 +291,11 @@ _check-reqs_output() {
[[ ${EBUILD_PHASE} == "pretend" && -z ${CHECKREQS_DONOTHING} ]] && msg="eerror"
if [[ -n ${CHECKREQS_FAILED} ]]; then
${msg}
- ${msg} "Space constraints set in the ebuild were not met!"
- ${msg} "The build will most probably fail, you should enhance the space"
- ${msg} "as per failed tests."
+ ${msg} "Memory or space constraints set in the ebuild were not met!"
+ ${msg} "The build will most probably fail, you should:"
+ ${msg} "- enhance the memory (reduce MAKEOPTS, add swap), or"
+ ${msg} "- add more space"
+ ${msg} "as required depending on the failed tests."
${msg}
[[ ${EBUILD_PHASE} == "pretend" && -z ${CHECKREQS_DONOTHING} ]] && \
@@ -467,3 +467,5 @@ _check-reqs_unsatisfied() {
}
fi
+
+EXPORT_FUNCTIONS pkg_pretend pkg_setup
diff --git a/eclass/chromium-2.eclass b/eclass/chromium-2.eclass
index bf509d8ff0f8..0831f1f17aa5 100644
--- a/eclass/chromium-2.eclass
+++ b/eclass/chromium-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: chromium-2.eclass
@@ -11,7 +11,7 @@
case ${EAPI} in
7|8) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit linux-info
@@ -122,7 +122,7 @@ chromium_pkg_die() {
fi
# Prevent user problems like bug #348235.
- if ( shopt -s extglob; is-flagq '-g?(gdb)?([1-9])' ); then
+ if is-flagq '-g?(gdb)?([1-9])'; then
ewarn
ewarn "You have enabled debug info (i.e. -g or -ggdb in your CFLAGS/CXXFLAGS)."
ewarn "This produces very large build files causes the linker to consume large"
diff --git a/eclass/cmake-multilib.eclass b/eclass/cmake-multilib.eclass
index 61bacd3d60e3..826f0ffcd967 100644
--- a/eclass/cmake-multilib.eclass
+++ b/eclass/cmake-multilib.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cmake-multilib.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7 8
-# @PROVIDES: cmake cmake-utils multilib-minimal
+# @PROVIDES: cmake multilib-minimal
# @BLURB: cmake wrapper for multilib builds
# @DESCRIPTION:
# The cmake-multilib.eclass provides a glue between cmake.eclass(5)
@@ -20,33 +20,23 @@
# in multilib-minimal, yet they ought to call appropriate cmake
# phase rather than 'default'.
-[[ ${EAPI} == 7 ]] && : ${CMAKE_ECLASS:=cmake-utils}
# @ECLASS_VARIABLE: CMAKE_ECLASS
-# @PRE_INHERIT
+# @DEPRECATED: none
# @DESCRIPTION:
-# Only "cmake" is supported in EAPI-8 and later.
-# In EAPI-7, default is "cmake-utils" for compatibility. Specify "cmake" for
-# ebuilds that ported to cmake.eclass already.
-: ${CMAKE_ECLASS:=cmake}
-
-# @ECLASS_VARIABLE: _CMAKE_ECLASS_IMPL
-# @INTERNAL
-# @DESCRIPTION:
-# TODO: Cleanup once EAPI-7 support is gone.
-_CMAKE_ECLASS_IMPL=cmake
+# Only "cmake" is supported.
+: "${CMAKE_ECLASS:=cmake}"
case ${EAPI} in
7|8)
case ${CMAKE_ECLASS} in
- cmake-utils|cmake) ;;
+ cmake) ;;
*)
eerror "Unknown value for \${CMAKE_ECLASS}"
die "Value ${CMAKE_ECLASS} is not supported"
;;
esac
- _CMAKE_ECLASS_IMPL=${CMAKE_ECLASS}
;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ ${CMAKE_IN_SOURCE_BUILD} ]]; then
@@ -56,7 +46,7 @@ fi
if [[ -z ${_CMAKE_MULTILIB_ECLASS} ]]; then
_CMAKE_MULTILIB_ECLASS=1
-inherit ${_CMAKE_ECLASS_IMPL} multilib-minimal
+inherit cmake multilib-minimal
cmake-multilib_src_configure() {
local _cmake_args=( "${@}" )
@@ -65,7 +55,7 @@ cmake-multilib_src_configure() {
}
multilib_src_configure() {
- ${_CMAKE_ECLASS_IMPL}_src_configure "${_cmake_args[@]}"
+ cmake_src_configure "${_cmake_args[@]}"
}
cmake-multilib_src_compile() {
@@ -75,7 +65,7 @@ cmake-multilib_src_compile() {
}
multilib_src_compile() {
- ${_CMAKE_ECLASS_IMPL}_src_compile "${_cmake_args[@]}"
+ cmake_src_compile "${_cmake_args[@]}"
}
cmake-multilib_src_test() {
@@ -85,7 +75,7 @@ cmake-multilib_src_test() {
}
multilib_src_test() {
- ${_CMAKE_ECLASS_IMPL}_src_test "${_cmake_args[@]}"
+ cmake_src_test "${_cmake_args[@]}"
}
cmake-multilib_src_install() {
@@ -95,7 +85,7 @@ cmake-multilib_src_install() {
}
multilib_src_install() {
- ${_CMAKE_ECLASS_IMPL}_src_install "${_cmake_args[@]}"
+ cmake_src_install "${_cmake_args[@]}"
}
fi
diff --git a/eclass/cmake-utils.eclass b/eclass/cmake-utils.eclass
deleted file mode 100644
index 4ec3b900edaf..000000000000
--- a/eclass/cmake-utils.eclass
+++ /dev/null
@@ -1,850 +0,0 @@
-# Copyright 1999-2022 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: cmake-utils.eclass
-# @MAINTAINER:
-# kde@gentoo.org
-# @AUTHOR:
-# Tomáš Chvátal <scarabeus@gentoo.org>
-# Maciej Mrozowski <reavertm@gentoo.org>
-# (undisclosed contributors)
-# Original author: Zephyrus (zephyrus@mirach.it)
-# @SUPPORTED_EAPIS: 5 6 7
-# @PROVIDES: ninja-utils
-# @BLURB: common ebuild functions for cmake-based packages
-# @DEPRECATED: cmake.eclass
-# @DESCRIPTION:
-# DEPRECATED: This no longer receives any changes. Everyone must port to cmake.eclass.
-# The cmake-utils eclass makes creating ebuilds for cmake-based packages much easier.
-# It provides all inherited features (DOCS, HTML_DOCS, PATCHES) along with out-of-source
-# builds (default), in-source builds and an implementation of the well-known use_enable
-# and use_with functions for CMake.
-
-if [[ -z ${_CMAKE_UTILS_ECLASS} ]]; then
-_CMAKE_UTILS_ECLASS=1
-
-# @ECLASS_VARIABLE: BUILD_DIR
-# @DESCRIPTION:
-# Build directory where all cmake processed files should be generated.
-# For in-source build it's fixed to ${CMAKE_USE_DIR}.
-# For out-of-source build it can be overridden, by default it uses
-# ${WORKDIR}/${P}_build.
-#
-# This variable has been called CMAKE_BUILD_DIR formerly.
-# It is set under that name for compatibility.
-
-# @ECLASS_VARIABLE: CMAKE_BINARY
-# @DESCRIPTION:
-# Eclass can use different cmake binary than the one provided in by system.
-: ${CMAKE_BINARY:=cmake}
-
-# @ECLASS_VARIABLE: CMAKE_BUILD_TYPE
-# @DESCRIPTION:
-# Set to override default CMAKE_BUILD_TYPE. Only useful for packages
-# known to make use of "if (CMAKE_BUILD_TYPE MATCHES xxx)".
-# If about to be set - needs to be set before invoking cmake-utils_src_configure.
-# You usualy do *NOT* want nor need to set it as it pulls CMake default build-type
-# specific compiler flags overriding make.conf.
-: ${CMAKE_BUILD_TYPE:=Gentoo}
-
-# @ECLASS_VARIABLE: CMAKE_IN_SOURCE_BUILD
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Set to enable in-source build.
-
-# @ECLASS_VARIABLE: CMAKE_MAKEFILE_GENERATOR
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Specify a makefile generator to be used by cmake.
-# At this point only "emake" and "ninja" are supported.
-# In EAPI 7 and above, the default is set to "ninja",
-# whereas in EAPIs below 7, it is set to "emake".
-
-# @ECLASS_VARIABLE: CMAKE_MIN_VERSION
-# @DESCRIPTION:
-# Specify the minimum required CMake version.
-: ${CMAKE_MIN_VERSION:=3.9.6}
-
-# @ECLASS_VARIABLE: CMAKE_REMOVE_MODULES
-# @DESCRIPTION:
-# Do we want to remove anything? yes or whatever else for no
-: ${CMAKE_REMOVE_MODULES:=yes}
-
-# @ECLASS_VARIABLE: CMAKE_REMOVE_MODULES_LIST
-# @DESCRIPTION:
-# Space-separated list of CMake modules that will be removed in $S during src_prepare,
-# in order to force packages to use the system version.
-: ${CMAKE_REMOVE_MODULES_LIST:=FindBLAS FindLAPACK}
-
-# @ECLASS_VARIABLE: CMAKE_USE_DIR
-# @DESCRIPTION:
-# Sets the directory where we are working with cmake.
-# For example when application uses autotools and only one
-# plugin needs to be done by cmake.
-# By default it uses ${S}.
-
-# @ECLASS_VARIABLE: CMAKE_VERBOSE
-# @DESCRIPTION:
-# Set to OFF to disable verbose messages during compilation
-: ${CMAKE_VERBOSE:=ON}
-
-# @ECLASS_VARIABLE: CMAKE_WARN_UNUSED_CLI
-# @DESCRIPTION:
-# Warn about variables that are declared on the command line
-# but not used. Might give false-positives.
-# "no" to disable (default) or anything else to enable.
-
-# @ECLASS_VARIABLE: CMAKE_EXTRA_CACHE_FILE
-# @USER_VARIABLE
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Specifies an extra cache file to pass to cmake. This is the analog of EXTRA_ECONF
-# for econf and is needed to pass TRY_RUN results when cross-compiling.
-# Should be set by user in a per-package basis in /etc/portage/package.env.
-
-# @ECLASS_VARIABLE: CMAKE_UTILS_QA_SRC_DIR_READONLY
-# @USER_VARIABLE
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# After running cmake-utils_src_prepare, sets ${S} to read-only. This is
-# a user flag and should under _no circumstances_ be set in the ebuild.
-# Helps in improving QA of build systems that write to source tree.
-
-case ${EAPI} in
- 5) : ${CMAKE_WARN_UNUSED_CLI:=no} ;;
- 6|7) : ${CMAKE_WARN_UNUSED_CLI:=yes} ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
-esac
-
-inherit toolchain-funcs ninja-utils flag-o-matic multiprocessing xdg-utils
-
-case ${EAPI} in
- [56])
- : ${CMAKE_MAKEFILE_GENERATOR:=emake}
- inherit eutils multilib
- ;;
- *)
- : ${CMAKE_MAKEFILE_GENERATOR:=ninja}
- ;;
-esac
-
-EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
-
-if [[ ${WANT_CMAKE} ]]; then
- if [[ ${EAPI} != [56] ]]; then
- die "\${WANT_CMAKE} has been removed and is a no-op now"
- else
- eqawarn "\${WANT_CMAKE} has been removed and is a no-op now"
- fi
-fi
-[[ ${PREFIX} ]] && die "\${PREFIX} has been removed and is a no-op now"
-
-case ${CMAKE_MAKEFILE_GENERATOR} in
- emake)
- BDEPEND="sys-devel/make"
- ;;
- ninja)
- BDEPEND="dev-util/ninja"
- ;;
- *)
- eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
- die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
- ;;
-esac
-
-if [[ ${PN} != cmake ]]; then
- BDEPEND+=" >=dev-util/cmake-${CMAKE_MIN_VERSION}"
-fi
-
-case ${EAPI} in
- 7) ;;
- *) DEPEND=" ${BDEPEND}" ;;
-esac
-
-# Internal functions used by cmake-utils_use_*
-_cmake_use_me_now() {
- debug-print-function ${FUNCNAME} "$@"
-
- local arg=$2
- [[ ! -z $3 ]] && arg=$3
-
- [[ ${EAPI} == 5 ]] || die "${FUNCNAME[1]} is banned in EAPI 6 and later: use -D$1<related_CMake_variable>=\"\$(usex $2)\" instead"
-
- local uper capitalised x
- [[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
- if [[ ! -z $3 ]]; then
- # user specified the use name so use it
- echo "-D$1$3=$(use $2 && echo ON || echo OFF)"
- else
- # use all various most used combinations
- uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
- capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
- for x in $2 $uper $capitalised; do
- echo "-D$1$x=$(use $2 && echo ON || echo OFF) "
- done
- fi
-}
-_cmake_use_me_now_inverted() {
- debug-print-function ${FUNCNAME} "$@"
-
- local arg=$2
- [[ ! -z $3 ]] && arg=$3
-
- if [[ ${EAPI} != 5 && "${FUNCNAME[1]}" != cmake-utils_use_find_package ]] ; then
- die "${FUNCNAME[1]} is banned in EAPI 6 and later: use -D$1<related_CMake_variable>=\"\$(usex $2)\" instead"
- fi
-
- local uper capitalised x
- [[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
- if [[ ! -z $3 ]]; then
- # user specified the use name so use it
- echo "-D$1$3=$(use $2 && echo OFF || echo ON)"
- else
- # use all various most used combinations
- uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
- capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
- for x in $2 $uper $capitalised; do
- echo "-D$1$x=$(use $2 && echo OFF || echo ON) "
- done
- fi
-}
-
-# Determine using IN or OUT source build
-_cmake_check_build_dir() {
- : ${CMAKE_USE_DIR:=${S}}
- if [[ -n ${CMAKE_IN_SOURCE_BUILD} ]]; then
- # we build in source dir
- BUILD_DIR="${CMAKE_USE_DIR}"
- else
- # Respect both the old variable and the new one, depending
- # on which one was set by the ebuild.
- if [[ ! ${BUILD_DIR} && ${CMAKE_BUILD_DIR} ]]; then
- if [[ ${EAPI} != [56] ]]; then
- eerror "The CMAKE_BUILD_DIR variable has been renamed to BUILD_DIR."
- die "The ebuild must be migrated to BUILD_DIR."
- else
- eqawarn "The CMAKE_BUILD_DIR variable has been renamed to BUILD_DIR."
- eqawarn "Please migrate the ebuild to use the new one."
- fi
-
- # In the next call, both variables will be set already
- # and we'd have to know which one takes precedence.
- _RESPECT_CMAKE_BUILD_DIR=1
- fi
-
- if [[ ${_RESPECT_CMAKE_BUILD_DIR} ]]; then
- BUILD_DIR=${CMAKE_BUILD_DIR:-${WORKDIR}/${P}_build}
- else
- : ${BUILD_DIR:=${WORKDIR}/${P}_build}
- fi
- fi
-
- # Backwards compatibility for getting the value.
- [[ ${EAPI} == [56] ]] && CMAKE_BUILD_DIR=${BUILD_DIR}
-
- mkdir -p "${BUILD_DIR}" || die
- echo ">>> Working in BUILD_DIR: \"$BUILD_DIR\""
-}
-
-# Determine which generator to use
-_cmake_generator_to_use() {
- local generator_name
-
- case ${CMAKE_MAKEFILE_GENERATOR} in
- ninja)
- # if ninja is enabled but not installed, the build could fail
- # this could happen if ninja is manually enabled (eg. make.conf) but not installed
- case ${EAPI} in
- 5|6)
- if ! ROOT=/ has_version dev-util/ninja; then
- die "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed. Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
- fi
- ;;
- *)
- if ! has_version -b dev-util/ninja; then
- die "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed. Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
- fi
- ;;
- esac
- generator_name="Ninja"
- ;;
- emake)
- generator_name="Unix Makefiles"
- ;;
- *)
- eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
- die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
- ;;
- esac
-
- echo ${generator_name}
-}
-
-# @FUNCTION: cmake_comment_add_subdirectory
-# @USAGE: <subdirectory>
-# @DESCRIPTION:
-# Comment out one or more add_subdirectory calls in CMakeLists.txt in the current directory
-cmake_comment_add_subdirectory() {
- if [[ -z ${1} ]]; then
- die "comment_add_subdirectory must be passed at least one directory name to comment"
- fi
-
- if [[ -e "CMakeLists.txt" ]]; then
- local d
- for d in $@; do
- sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${d//\//\\/}[[:space:]]*)/I s/^/#DONOTCOMPILE /" \
- -i CMakeLists.txt || die "failed to comment add_subdirectory(${d})"
- done
- fi
-}
-
-# @FUNCTION: comment_add_subdirectory
-# @USAGE: <subdirectory>
-# @DESCRIPTION:
-# Comment out an add_subdirectory call in CMakeLists.txt in the current directory
-# Banned in EAPI 6 and later - use cmake_comment_add_subdirectory instead.
-comment_add_subdirectory() {
- [[ ${EAPI} == 5 ]] || die "comment_add_subdirectory is banned in EAPI 6 and later - use cmake_comment_add_subdirectory instead"
-
- cmake_comment_add_subdirectory "$@"
-}
-
-# @FUNCTION: cmake-utils_use_with
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_with. See ebuild(5).
-#
-# `cmake-utils_use_with foo FOO` echoes -DWITH_FOO=ON if foo is enabled
-# and -DWITH_FOO=OFF if it is disabled.
-cmake-utils_use_with() { _cmake_use_me_now WITH_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_enable
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_enable foo FOO` echoes -DENABLE_FOO=ON if foo is enabled
-# and -DENABLE_FOO=OFF if it is disabled.
-cmake-utils_use_enable() { _cmake_use_me_now ENABLE_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_find_package
-# @USAGE: <USE flag> <package name>
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_find_package foo LibFoo` echoes -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=OFF
-# if foo is enabled and -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=ON if it is disabled.
-# This can be used to make find_package optional.
-cmake-utils_use_find_package() {
- if [[ ${EAPI} != 5 && "$#" != 2 ]] ; then
- die "Usage: cmake-utils_use_find_package <USE flag> <package name>"
- fi
-
- _cmake_use_me_now_inverted CMAKE_DISABLE_FIND_PACKAGE_ "$@" ;
-}
-
-# @FUNCTION: cmake_use_find_package
-# @USAGE: <USE flag> <package name>
-# @DESCRIPTION:
-# Alias for cmake-utils_use_find_package.
-cmake_use_find_package() {
- if [[ "$#" != 2 ]] ; then
- die "Usage: cmake_use_find_package <USE flag> <package name>"
- fi
-
- cmake-utils_use_find_package "$@" ;
-}
-
-# @FUNCTION: cmake-utils_use_disable
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on inversion of use_enable. See ebuild(5).
-#
-# `cmake-utils_use_enable foo FOO` echoes -DDISABLE_FOO=OFF if foo is enabled
-# and -DDISABLE_FOO=ON if it is disabled.
-cmake-utils_use_disable() { _cmake_use_me_now_inverted DISABLE_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_no
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_disable. See ebuild(5).
-#
-# `cmake-utils_use_no foo FOO` echoes -DNO_FOO=OFF if foo is enabled
-# and -DNO_FOO=ON if it is disabled.
-cmake-utils_use_no() { _cmake_use_me_now_inverted NO_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_want
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_want foo FOO` echoes -DWANT_FOO=ON if foo is enabled
-# and -DWANT_FOO=OFF if it is disabled.
-cmake-utils_use_want() { _cmake_use_me_now WANT_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_build
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_build foo FOO` echoes -DBUILD_FOO=ON if foo is enabled
-# and -DBUILD_FOO=OFF if it is disabled.
-cmake-utils_use_build() { _cmake_use_me_now BUILD_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_has
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_has foo FOO` echoes -DHAVE_FOO=ON if foo is enabled
-# and -DHAVE_FOO=OFF if it is disabled.
-cmake-utils_use_has() { _cmake_use_me_now HAVE_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use_use
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use_use foo FOO` echoes -DUSE_FOO=ON if foo is enabled
-# and -DUSE_FOO=OFF if it is disabled.
-cmake-utils_use_use() { _cmake_use_me_now USE_ "$@" ; }
-
-# @FUNCTION: cmake-utils_use
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_use foo FOO` echoes -DFOO=ON if foo is enabled
-# and -DFOO=OFF if it is disabled.
-cmake-utils_use() { _cmake_use_me_now "" "$@" ; }
-
-# @FUNCTION: cmake-utils_useno
-# @USAGE: <USE flag> [flag name]
-# @DESCRIPTION:
-# Based on use_enable. See ebuild(5).
-#
-# `cmake-utils_useno foo NOFOO` echoes -DNOFOO=OFF if foo is enabled
-# and -DNOFOO=ON if it is disabled.
-cmake-utils_useno() { _cmake_use_me_now_inverted "" "$@" ; }
-
-# Internal function for modifying hardcoded definitions.
-# Removes dangerous definitions that override Gentoo settings.
-_cmake_modify-cmakelists() {
- debug-print-function ${FUNCNAME} "$@"
-
- # Only edit the files once
- grep -qs "<<< Gentoo configuration >>>" "${CMAKE_USE_DIR}"/CMakeLists.txt && return 0
-
- # Comment out all set (<some_should_be_user_defined_variable> value)
- find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec sed \
- -e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_BUILD_TYPE\([[:space:]].*)\|)\)/I{s/^/#_cmake_modify_IGNORE /g}' \
- -e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_COLOR_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
- -e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_INSTALL_PREFIX[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
- -e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_VERBOSE_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
- -i {} + || die "${LINENO}: failed to disable hardcoded settings"
- local x
- for x in $(find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec grep -l "^#_cmake_modify_IGNORE" {} +;); do
- einfo "Hardcoded definition(s) removed in $(echo "${x}" | cut -c $((${#CMAKE_USE_DIR}+2))-):"
- einfo "$(grep -se '^#_cmake_modify_IGNORE' ${x} | cut -c 22-99)"
- done
-
- # NOTE Append some useful summary here
- cat >> "${CMAKE_USE_DIR}"/CMakeLists.txt <<- _EOF_ || die
-
- MESSAGE(STATUS "<<< Gentoo configuration >>>
- Build type \${CMAKE_BUILD_TYPE}
- Install path \${CMAKE_INSTALL_PREFIX}
- Compiler flags:
- C \${CMAKE_C_FLAGS}
- C++ \${CMAKE_CXX_FLAGS}
- Linker flags:
- Executable \${CMAKE_EXE_LINKER_FLAGS}
- Module \${CMAKE_MODULE_LINKER_FLAGS}
- Shared \${CMAKE_SHARED_LINKER_FLAGS}\n")
- _EOF_
-}
-
-# temporary function for moving cmake cleanups from from src_configure -> src_prepare.
-# bug #378850
-_cmake_cleanup_cmake() {
- : ${CMAKE_USE_DIR:=${S}}
-
- if [[ "${CMAKE_REMOVE_MODULES}" == "yes" ]] ; then
- local name
- for name in ${CMAKE_REMOVE_MODULES_LIST} ; do
- find "${S}" -name ${name}.cmake -exec rm -v {} + || die
- done
- fi
-
- # check if CMakeLists.txt exist and if no then die
- if [[ ! -e ${CMAKE_USE_DIR}/CMakeLists.txt ]] ; then
- eerror "Unable to locate CMakeLists.txt under:"
- eerror "\"${CMAKE_USE_DIR}/CMakeLists.txt\""
- eerror "Consider not inheriting the cmake eclass."
- die "FATAL: Unable to find CMakeLists.txt"
- fi
-
- # Remove dangerous things.
- _cmake_modify-cmakelists
-}
-
-# @FUNCTION: cmake-utils_src_prepare
-# @DESCRIPTION:
-# Apply ebuild and user patches.
-cmake-utils_src_prepare() {
- debug-print-function ${FUNCNAME} "$@"
-
- pushd "${S}" > /dev/null || die
-
- if [[ ${EAPI} != 5 ]]; then
- default_src_prepare
- _cmake_cleanup_cmake
- else
- debug-print "$FUNCNAME: PATCHES=$PATCHES"
- [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
-
- debug-print "$FUNCNAME: applying user patches"
- epatch_user
- fi
-
- popd > /dev/null || die
-
- # make ${S} read-only in order to detect broken build-systems
- if [[ ${CMAKE_UTILS_QA_SRC_DIR_READONLY} && ! ${CMAKE_IN_SOURCE_BUILD} ]]; then
- chmod -R a-w "${S}"
- fi
-
- _CMAKE_UTILS_SRC_PREPARE_HAS_RUN=1
-}
-
-# @VARIABLE: mycmakeargs
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Optional cmake defines as a bash array. Should be defined before calling
-# src_configure.
-# @CODE
-# src_configure() {
-# local mycmakeargs=(
-# $(cmake-utils_use_with openconnect)
-# )
-#
-# cmake-utils_src_configure
-# }
-# @CODE
-
-# @FUNCTION: cmake-utils_src_configure
-# @DESCRIPTION:
-# General function for configuring with cmake. Default behaviour is to start an
-# out-of-source build.
-cmake-utils_src_configure() {
- debug-print-function ${FUNCNAME} "$@"
-
- if [[ ! ${_CMAKE_UTILS_SRC_PREPARE_HAS_RUN} ]]; then
- if [[ ${EAPI} != [56] ]]; then
- die "FATAL: cmake-utils_src_prepare has not been run"
- else
- eqawarn "cmake-utils_src_prepare has not been run, please open a bug on https://bugs.gentoo.org/"
- fi
- fi
-
- [[ ${EAPI} == 5 ]] && _cmake_cleanup_cmake
-
- _cmake_check_build_dir
-
- # Fix xdg collision with sandbox
- xdg_environment_reset
-
- # @SEE CMAKE_BUILD_TYPE
- if [[ ${CMAKE_BUILD_TYPE} = Gentoo ]]; then
- # Handle release builds
- if ! has debug ${IUSE//+} || ! use debug; then
- local CPPFLAGS=${CPPFLAGS}
- append-cppflags -DNDEBUG
- fi
- fi
-
- # Prepare Gentoo override rules (set valid compiler, append CPPFLAGS etc.)
- local build_rules=${BUILD_DIR}/gentoo_rules.cmake
-
- cat > "${build_rules}" <<- _EOF_ || die
- SET (CMAKE_ASM_COMPILE_OBJECT "<CMAKE_ASM_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "ASM compile command" FORCE)
- SET (CMAKE_ASM-ATT_COMPILE_OBJECT "<CMAKE_ASM-ATT_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c -x assembler <SOURCE>" CACHE STRING "ASM-ATT compile command" FORCE)
- SET (CMAKE_ASM-ATT_LINK_FLAGS "-nostdlib" CACHE STRING "ASM-ATT link flags" FORCE)
- SET (CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C compile command" FORCE)
- SET (CMAKE_CXX_COMPILE_OBJECT "<CMAKE_CXX_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C++ compile command" FORCE)
- SET (CMAKE_Fortran_COMPILE_OBJECT "<CMAKE_Fortran_COMPILER> <DEFINES> <INCLUDES> ${FCFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "Fortran compile command" FORCE)
- _EOF_
-
- local myCC=$(tc-getCC) myCXX=$(tc-getCXX) myFC=$(tc-getFC)
-
- # !!! IMPORTANT NOTE !!!
- # Single slash below is intentional. CMake is weird and wants the
- # CMAKE_*_VARIABLES split into two elements: the first one with
- # compiler path, and the second one with all command-line options,
- # space separated.
- local toolchain_file=${BUILD_DIR}/gentoo_toolchain.cmake
- cat > ${toolchain_file} <<- _EOF_ || die
- SET (CMAKE_ASM_COMPILER "${myCC/ /;}")
- SET (CMAKE_ASM-ATT_COMPILER "${myCC/ /;}")
- SET (CMAKE_C_COMPILER "${myCC/ /;}")
- SET (CMAKE_CXX_COMPILER "${myCXX/ /;}")
- SET (CMAKE_Fortran_COMPILER "${myFC/ /;}")
- SET (CMAKE_AR $(type -P $(tc-getAR)) CACHE FILEPATH "Archive manager" FORCE)
- SET (CMAKE_RANLIB $(type -P $(tc-getRANLIB)) CACHE FILEPATH "Archive index generator" FORCE)
- SET (CMAKE_SYSTEM_PROCESSOR "${CHOST%%-*}")
- _EOF_
-
- # We are using the C compiler for assembly by default.
- local -x ASMFLAGS=${CFLAGS}
- local -x PKG_CONFIG=$(tc-getPKG_CONFIG)
-
- if tc-is-cross-compiler; then
- local sysname
- case "${KERNEL:-linux}" in
- Cygwin) sysname="CYGWIN_NT-5.1" ;;
- HPUX) sysname="HP-UX" ;;
- linux) sysname="Linux" ;;
- Winnt)
- sysname="Windows"
- cat >> "${toolchain_file}" <<- _EOF_ || die
- SET (CMAKE_RC_COMPILER $(tc-getRC))
- _EOF_
- ;;
- *) sysname="${KERNEL}" ;;
- esac
-
- cat >> "${toolchain_file}" <<- _EOF_ || die
- SET (CMAKE_SYSTEM_NAME "${sysname}")
- _EOF_
-
- if [ "${SYSROOT:-/}" != "/" ] ; then
- # When cross-compiling with a sysroot (e.g. with crossdev's emerge wrappers)
- # we need to tell cmake to use libs/headers from the sysroot but programs from / only.
- cat >> "${toolchain_file}" <<- _EOF_ || die
- SET (CMAKE_FIND_ROOT_PATH "${SYSROOT}")
- SET (CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
- SET (CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
- SET (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
- _EOF_
- fi
- fi
-
- if use prefix-guest; then
- cat >> "${build_rules}" <<- _EOF_ || die
- # in Prefix we need rpath and must ensure cmake gets our default linker path
- # right ... except for Darwin hosts
- IF (NOT APPLE)
- SET (CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
- SET (CMAKE_PLATFORM_REQUIRED_RUNTIME_PATH "${EPREFIX}/usr/${CHOST}/lib/gcc;${EPREFIX}/usr/${CHOST}/lib;${EPREFIX}/usr/$(get_libdir);${EPREFIX}/$(get_libdir)"
- CACHE STRING "" FORCE)
-
- ELSE ()
-
- SET (CMAKE_PREFIX_PATH "${EPREFIX}/usr" CACHE STRING "" FORCE)
- SET (CMAKE_MACOSX_RPATH ON CACHE BOOL "" FORCE)
- SET (CMAKE_SKIP_BUILD_RPATH OFF CACHE BOOL "" FORCE)
- SET (CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
- SET (CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE CACHE BOOL "" FORCE)
-
- ENDIF (NOT APPLE)
- _EOF_
- fi
-
- # Common configure parameters (invariants)
- local common_config=${BUILD_DIR}/gentoo_common_config.cmake
- local libdir=$(get_libdir)
- cat > "${common_config}" <<- _EOF_ || die
- SET (CMAKE_GENTOO_BUILD ON CACHE BOOL "Indicate Gentoo package build")
- SET (LIB_SUFFIX ${libdir/lib} CACHE STRING "library path suffix" FORCE)
- SET (CMAKE_INSTALL_LIBDIR ${libdir} CACHE PATH "Output directory for libraries")
- SET (CMAKE_INSTALL_INFODIR "${EPREFIX}/usr/share/info" CACHE PATH "")
- SET (CMAKE_INSTALL_MANDIR "${EPREFIX}/usr/share/man" CACHE PATH "")
- SET (CMAKE_USER_MAKE_RULES_OVERRIDE "${build_rules}" CACHE FILEPATH "Gentoo override rules")
- _EOF_
-
- # See bug 689410
- if [[ "${ARCH}" == riscv ]]; then
- echo 'SET (CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX '"${libdir#lib}"' CACHE STRING "library search suffix" FORCE)' >> "${common_config}" || die
- fi
-
- [[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && echo 'SET (CMAKE_COLOR_MAKEFILE OFF CACHE BOOL "pretty colors during make" FORCE)' >> "${common_config}"
-
- if [[ ${EAPI} != [56] ]]; then
- cat >> "${common_config}" <<- _EOF_ || die
- SET (CMAKE_INSTALL_DOCDIR "${EPREFIX}/usr/share/doc/${PF}" CACHE PATH "")
- SET (BUILD_SHARED_LIBS ON CACHE BOOL "")
- _EOF_
- fi
-
- # Wipe the default optimization flags out of CMake
- if [[ ${CMAKE_BUILD_TYPE} != Gentoo && ${EAPI} != 5 ]]; then
- cat >> ${common_config} <<- _EOF_ || die
- SET (CMAKE_ASM_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_ASM-ATT_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_MODULE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- SET (CMAKE_STATIC_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
- _EOF_
- fi
-
- # Convert mycmakeargs to an array, for backwards compatibility
- # Make the array a local variable since <=portage-2.1.6.x does not
- # support global arrays (see bug #297255).
- local mycmakeargstype=$(declare -p mycmakeargs 2>&-)
- if [[ "${mycmakeargstype}" != "declare -a mycmakeargs="* ]]; then
- if [[ -n "${mycmakeargstype}" ]] ; then
- if [[ ${EAPI} == 5 ]]; then
- eqawarn "Declaring mycmakeargs as a variable is deprecated. Please use an array instead."
- else
- die "Declaring mycmakeargs as a variable is banned in EAPI=${EAPI}. Please use an array instead."
- fi
- fi
- local mycmakeargs_local=(${mycmakeargs})
- else
- local mycmakeargs_local=("${mycmakeargs[@]}")
- fi
-
- if [[ ${CMAKE_WARN_UNUSED_CLI} == no ]] ; then
- local warn_unused_cli="--no-warn-unused-cli"
- else
- local warn_unused_cli=""
- fi
-
- # Common configure parameters (overridable)
- # NOTE CMAKE_BUILD_TYPE can be only overridden via CMAKE_BUILD_TYPE eclass variable
- # No -DCMAKE_BUILD_TYPE=xxx definitions will be in effect.
- local cmakeargs=(
- ${warn_unused_cli}
- -C "${common_config}"
- -G "$(_cmake_generator_to_use)"
- -DCMAKE_INSTALL_PREFIX="${EPREFIX}/usr"
- "${mycmakeargs_local[@]}"
- -DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}"
- $([[ ${EAPI} == 5 ]] && echo -DCMAKE_INSTALL_DO_STRIP=OFF)
- -DCMAKE_TOOLCHAIN_FILE="${toolchain_file}"
- "${MYCMAKEARGS}"
- )
-
- if [[ -n "${CMAKE_EXTRA_CACHE_FILE}" ]] ; then
- cmakeargs+=( -C "${CMAKE_EXTRA_CACHE_FILE}" )
- fi
-
- pushd "${BUILD_DIR}" > /dev/null || die
- debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: mycmakeargs is ${mycmakeargs_local[*]}"
- echo "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}"
- "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}" || die "cmake failed"
- popd > /dev/null || die
-}
-
-# @FUNCTION: cmake-utils_src_compile
-# @DESCRIPTION:
-# General function for compiling with cmake.
-# Automatically detects the build type. All arguments are passed to emake.
-cmake-utils_src_compile() {
- debug-print-function ${FUNCNAME} "$@"
-
- cmake-utils_src_make "$@"
-}
-
-# @FUNCTION: _cmake_ninja_src_make
-# @INTERNAL
-# @DESCRIPTION:
-# Build the package using ninja generator
-_cmake_ninja_src_make() {
- debug-print-function ${FUNCNAME} "$@"
-
- [[ -e build.ninja ]] || die "build.ninja not found. Error during configure stage."
-
- eninja "$@"
-}
-
-# @FUNCTION: _cmake_emake_src_make
-# @INTERNAL
-# @DESCRIPTION:
-# Build the package using make generator
-_cmake_emake_src_make() {
- debug-print-function ${FUNCNAME} "$@"
-
- [[ -e Makefile ]] || die "Makefile not found. Error during configure stage."
-
- if [[ "${CMAKE_VERBOSE}" != "OFF" ]]; then
- emake VERBOSE=1 "$@" || die
- else
- emake "$@" || die
- fi
-
-}
-
-# @FUNCTION: cmake-utils_src_make
-# @DESCRIPTION:
-# Function for building the package. Automatically detects the build type.
-# All arguments are passed to emake.
-cmake-utils_src_make() {
- debug-print-function ${FUNCNAME} "$@"
-
- _cmake_check_build_dir
- pushd "${BUILD_DIR}" > /dev/null || die
-
- _cmake_${CMAKE_MAKEFILE_GENERATOR}_src_make "$@"
-
- popd > /dev/null || die
-}
-
-# @FUNCTION: cmake-utils_src_test
-# @DESCRIPTION:
-# Function for testing the package. Automatically detects the build type.
-cmake-utils_src_test() {
- debug-print-function ${FUNCNAME} "$@"
-
- _cmake_check_build_dir
- pushd "${BUILD_DIR}" > /dev/null || die
- [[ -e CTestTestfile.cmake ]] || { echo "No tests found. Skipping."; return 0 ; }
-
- [[ -n ${TEST_VERBOSE} ]] && myctestargs+=( --extra-verbose --output-on-failure )
-
- set -- ctest -j "$(makeopts_jobs "${MAKEOPTS}" 999)" \
- --test-load "$(makeopts_loadavg)" "${myctestargs[@]}" "$@"
- echo "$@" >&2
- if "$@" ; then
- einfo "Tests succeeded."
- popd > /dev/null || die
- return 0
- else
- if [[ -n "${CMAKE_YES_I_WANT_TO_SEE_THE_TEST_LOG}" ]] ; then
- # on request from Diego
- eerror "Tests failed. Test log ${BUILD_DIR}/Testing/Temporary/LastTest.log follows:"
- eerror "--START TEST LOG--------------------------------------------------------------"
- cat "${BUILD_DIR}/Testing/Temporary/LastTest.log"
- eerror "--END TEST LOG----------------------------------------------------------------"
- die "Tests failed."
- else
- die "Tests failed. When you file a bug, please attach the following file: \n\t${BUILD_DIR}/Testing/Temporary/LastTest.log"
- fi
-
- # die might not die due to nonfatal
- popd > /dev/null || die
- return 1
- fi
-}
-
-# @FUNCTION: cmake-utils_src_install
-# @DESCRIPTION:
-# Function for installing the package. Automatically detects the build type.
-cmake-utils_src_install() {
- debug-print-function ${FUNCNAME} "$@"
-
- _cmake_check_build_dir
- pushd "${BUILD_DIR}" > /dev/null || die
- DESTDIR="${D}" ${CMAKE_MAKEFILE_GENERATOR} install "$@" || die "died running ${CMAKE_MAKEFILE_GENERATOR} install"
- popd > /dev/null || die
-
- pushd "${S}" > /dev/null || die
- einstalldocs
- popd > /dev/null || die
-}
-
-fi
diff --git a/eclass/cmake.eclass b/eclass/cmake.eclass
index 59e5b60957c2..908e2356ead2 100644
--- a/eclass/cmake.eclass
+++ b/eclass/cmake.eclass
@@ -1,9 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cmake.eclass
# @MAINTAINER:
# kde@gentoo.org
+# base-system@gentoo.org
# @AUTHOR:
# Tomáš Chvátal <scarabeus@gentoo.org>
# Maciej Mrozowski <reavertm@gentoo.org>
@@ -15,12 +16,11 @@
# @DESCRIPTION:
# The cmake eclass makes creating ebuilds for cmake-based packages much easier.
# It provides all inherited features (DOCS, HTML_DOCS, PATCHES) along with
-# out-of-source builds (default), in-source builds and an implementation of the
-# well-known use_enable function for CMake.
+# out-of-source builds (default) and in-source builds.
case ${EAPI} in
7|8) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_CMAKE_ECLASS} ]]; then
@@ -35,15 +35,15 @@ inherit flag-o-matic multiprocessing ninja-utils toolchain-funcs xdg-utils
# For in-source build it's fixed to ${CMAKE_USE_DIR}.
# For out-of-source build it can be overridden, by default it uses
# ${CMAKE_USE_DIR}_build (in EAPI-7: ${WORKDIR}/${P}_build).
-[[ ${EAPI} == 7 ]] && : ${BUILD_DIR:=${WORKDIR}/${P}_build}
+[[ ${EAPI} == 7 ]] && : "${BUILD_DIR:=${WORKDIR}/${P}_build}"
# EAPI-8: set inside _cmake_check_build_dir
# @ECLASS_VARIABLE: CMAKE_BINARY
# @DESCRIPTION:
# Eclass can use different cmake binary than the one provided in by system.
-: ${CMAKE_BINARY:=cmake}
+: "${CMAKE_BINARY:=cmake}"
-[[ ${EAPI} == 7 ]] && : ${CMAKE_BUILD_TYPE:=Gentoo}
+[[ ${EAPI} == 7 ]] && : "${CMAKE_BUILD_TYPE:=Gentoo}"
# @ECLASS_VARIABLE: CMAKE_BUILD_TYPE
# @DESCRIPTION:
# Set to override default CMAKE_BUILD_TYPE. Only useful for packages
@@ -55,7 +55,7 @@ inherit flag-o-matic multiprocessing ninja-utils toolchain-funcs xdg-utils
# build type to achieve desirable results.
#
# In EAPI 7, the default was non-standard build type of Gentoo.
-: ${CMAKE_BUILD_TYPE:=RelWithDebInfo}
+: "${CMAKE_BUILD_TYPE:=RelWithDebInfo}"
# @ECLASS_VARIABLE: CMAKE_IN_SOURCE_BUILD
# @DEFAULT_UNSET
@@ -69,7 +69,7 @@ inherit flag-o-matic multiprocessing ninja-utils toolchain-funcs xdg-utils
# Specify a makefile generator to be used by cmake.
# At this point only "emake" and "ninja" are supported.
# The default is set to "ninja".
-: ${CMAKE_MAKEFILE_GENERATOR:=ninja}
+: "${CMAKE_MAKEFILE_GENERATOR:=ninja}"
# @ECLASS_VARIABLE: CMAKE_REMOVE_MODULES_LIST
# @PRE_INHERIT
@@ -97,16 +97,17 @@ fi
# By default it uses current working directory (in EAPI-7: ${S}).
# @ECLASS_VARIABLE: CMAKE_VERBOSE
+# @USER_VARIABLE
# @DESCRIPTION:
# Set to OFF to disable verbose messages during compilation
-: ${CMAKE_VERBOSE:=ON}
+: "${CMAKE_VERBOSE:=ON}"
# @ECLASS_VARIABLE: CMAKE_WARN_UNUSED_CLI
# @DESCRIPTION:
# Warn about variables that are declared on the command line
# but not used. Might give false-positives.
# "no" to disable (default) or anything else to enable.
-: ${CMAKE_WARN_UNUSED_CLI:=yes}
+: "${CMAKE_WARN_UNUSED_CLI:=yes}"
# @ECLASS_VARIABLE: CMAKE_EXTRA_CACHE_FILE
# @USER_VARIABLE
@@ -124,7 +125,12 @@ fi
# read-only. This is a user flag and should under _no circumstances_ be set in
# the ebuild. Helps in improving QA of build systems that write to source tree.
-[[ ${CMAKE_MIN_VERSION} ]] && die "CMAKE_MIN_VERSION is banned; if necessary, set BDEPEND=\">=dev-util/cmake-${CMAKE_MIN_VERSION}\" directly"
+# @ECLASS_VARIABLE: CMAKE_SKIP_TESTS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array of tests that should be skipped when running CTest.
+
+[[ ${CMAKE_MIN_VERSION} ]] && die "CMAKE_MIN_VERSION is banned; if necessary, set BDEPEND=\">=dev-build/cmake-${CMAKE_MIN_VERSION}\" directly"
[[ ${CMAKE_BUILD_DIR} ]] && die "The ebuild must be migrated to BUILD_DIR"
[[ ${CMAKE_REMOVE_MODULES} ]] && die "CMAKE_REMOVE_MODULES is banned, set CMAKE_REMOVE_MODULES_LIST array instead"
[[ ${CMAKE_UTILS_QA_SRC_DIR_READONLY} ]] && die "Use CMAKE_QA_SRC_DIR_READONLY instead"
@@ -133,10 +139,10 @@ fi
case ${CMAKE_MAKEFILE_GENERATOR} in
emake)
- BDEPEND="sys-devel/make"
+ BDEPEND="dev-build/make"
;;
ninja)
- BDEPEND="dev-util/ninja"
+ BDEPEND="${NINJA_DEPEND}"
;;
*)
eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
@@ -145,7 +151,7 @@ case ${CMAKE_MAKEFILE_GENERATOR} in
esac
if [[ ${PN} != cmake ]]; then
- BDEPEND+=" >=dev-util/cmake-3.20.5"
+ BDEPEND+=" >=dev-build/cmake-3.20.5"
fi
# @FUNCTION: cmake_run_in
@@ -283,15 +289,24 @@ cmake-utils_useno() { _cmake_banned_func "" "$@" ; }
# Determine using IN or OUT source build
_cmake_check_build_dir() {
if [[ ${EAPI} == 7 ]]; then
- : ${CMAKE_USE_DIR:=${S}}
+ : "${CMAKE_USE_DIR:=${S}}"
else
- : ${CMAKE_USE_DIR:=${PWD}}
+ : "${CMAKE_USE_DIR:=${PWD}}"
fi
if [[ -n ${CMAKE_IN_SOURCE_BUILD} ]]; then
# we build in source dir
BUILD_DIR="${CMAKE_USE_DIR}"
else
- : ${BUILD_DIR:=${CMAKE_USE_DIR}_build}
+ : "${BUILD_DIR:=${CMAKE_USE_DIR}_build}"
+
+ # Avoid creating ${WORKDIR}_build (which is above WORKDIR).
+ # TODO: For EAPI > 8, we should ban S=WORKDIR for CMake.
+ # See bug #889420.
+ if [[ ${S} == "${WORKDIR}" && ${BUILD_DIR} == "${WORKDIR}_build" ]] ; then
+ eqawarn "QA notice: S=WORKDIR is deprecated for cmake.eclass."
+ eqawarn "Please relocate the sources in src_unpack."
+ BUILD_DIR="${WORKDIR}"/${P}_build
+ fi
fi
einfo "Source directory (CMAKE_USE_DIR): \"${CMAKE_USE_DIR}\""
@@ -349,13 +364,13 @@ cmake_src_prepare() {
if [[ ${EAPI} == 7 ]]; then
pushd "${S}" > /dev/null || die # workaround from cmake-utils
# in EAPI-8, we use current working directory instead, bug #704524
- # esp. test with 'special' pkgs like: app-arch/brotli, media-gfx/gmic, net-libs/quiche
+ # esp. test with 'special' pkgs like: app-arch/brotli, net-libs/quiche
fi
_cmake_check_build_dir
default_src_prepare
- # check if CMakeLists.txt exist and if no then die
+ # check if CMakeLists.txt exists and if not then die
if [[ ! -e ${CMAKE_USE_DIR}/CMakeLists.txt ]] ; then
eerror "Unable to locate CMakeLists.txt under:"
eerror "\"${CMAKE_USE_DIR}/CMakeLists.txt\""
@@ -363,13 +378,6 @@ cmake_src_prepare() {
die "FATAL: Unable to find CMakeLists.txt"
fi
- # if ninja is enabled but not installed, the build could fail
- # this could happen if ninja is manually enabled (eg. make.conf) but not installed
- if [[ ${CMAKE_MAKEFILE_GENERATOR} == ninja ]] && ! has_version -b dev-util/ninja; then
- eerror "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed."
- die "Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
- fi
-
local modules_list
if [[ ${EAPI} == 7 && $(declare -p CMAKE_REMOVE_MODULES_LIST) != "declare -a"* ]]; then
modules_list=( ${CMAKE_REMOVE_MODULES_LIST} )
@@ -490,17 +498,17 @@ cmake_src_configure() {
cat >> "${toolchain_file}" <<- _EOF_ || die
set(CMAKE_SYSTEM_NAME "${sysname}")
_EOF_
+ fi
- if [ "${SYSROOT:-/}" != "/" ] ; then
- # When cross-compiling with a sysroot (e.g. with crossdev's emerge wrappers)
- # we need to tell cmake to use libs/headers from the sysroot but programs from / only.
- cat >> "${toolchain_file}" <<- _EOF_ || die
- set(CMAKE_FIND_ROOT_PATH "${SYSROOT}")
- set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
- set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
- set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
- _EOF_
- fi
+ if [[ ${SYSROOT:-/} != / ]] ; then
+ # When building with a sysroot (e.g. with crossdev's emerge wrappers)
+ # we need to tell cmake to use libs/headers from the sysroot but programs from / only.
+ cat >> "${toolchain_file}" <<- _EOF_ || die
+ set(CMAKE_SYSROOT "${ESYSROOT}")
+ set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+ set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+ set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+ _EOF_
fi
if use prefix-guest; then
@@ -532,6 +540,9 @@ cmake_src_configure() {
set(CMAKE_USER_MAKE_RULES_OVERRIDE "${build_rules}" CACHE FILEPATH "Gentoo override rules")
set(CMAKE_INSTALL_DOCDIR "${EPREFIX}/usr/share/doc/${PF}" CACHE PATH "")
set(BUILD_SHARED_LIBS ON CACHE BOOL "")
+ set(Python3_FIND_UNVERSIONED_NAMES FIRST CACHE STRING "")
+ set(FETCHCONTENT_FULLY_DISCONNECTED ON CACHE BOOL "")
+ set(CMAKE_DISABLE_PRECOMPILE_HEADERS ON CACHE BOOL "")
_EOF_
if [[ -n ${_ECM_ECLASS} ]]; then
@@ -601,9 +612,9 @@ cmake_src_configure() {
-DCMAKE_TOOLCHAIN_FILE="${toolchain_file}"
)
- if [[ -n ${MYCMAKEARGS} ]] ; then
- cmakeargs+=( "${MYCMAKEARGS}" )
- fi
+ # Handle quoted whitespace
+ eval "local -a MYCMAKEARGS=( ${MYCMAKEARGS} )"
+ cmakeargs+=( "${MYCMAKEARGS[@]}" )
if [[ -n "${CMAKE_EXTRA_CACHE_FILE}" ]] ; then
cmakeargs+=( -C "${CMAKE_EXTRA_CACHE_FILE}" )
@@ -647,7 +658,10 @@ cmake_build() {
;;
ninja)
[[ -e build.ninja ]] || die "build.ninja not found. Error during configure stage."
- eninja "$@"
+ case ${CMAKE_VERBOSE} in
+ OFF) NINJA_VERBOSE=OFF eninja "$@" ;;
+ *) eninja "$@" ;;
+ esac
;;
esac
@@ -673,6 +687,7 @@ cmake_src_test() {
[[ -e CTestTestfile.cmake ]] || { echo "No tests found. Skipping."; return 0 ; }
[[ -n ${TEST_VERBOSE} ]] && myctestargs+=( --extra-verbose --output-on-failure )
+ [[ -n ${CMAKE_SKIP_TESTS} ]] && myctestargs+=( -E '('$( IFS='|'; echo "${CMAKE_SKIP_TESTS[*]}")')' )
set -- ctest -j "$(makeopts_jobs "${MAKEOPTS}" 999)" \
--test-load "$(makeopts_loadavg)" "${myctestargs[@]}" "$@"
@@ -705,11 +720,7 @@ cmake_src_test() {
cmake_src_install() {
debug-print-function ${FUNCNAME} "$@"
- _cmake_check_build_dir
- pushd "${BUILD_DIR}" > /dev/null || die
- DESTDIR="${D}" ${CMAKE_MAKEFILE_GENERATOR} install "$@" ||
- die "died running ${CMAKE_MAKEFILE_GENERATOR} install"
- popd > /dev/null || die
+ DESTDIR="${D}" cmake_build install "$@"
if [[ ${EAPI} == 7 ]]; then
pushd "${S}" > /dev/null || die
diff --git a/eclass/common-lisp-3.eclass b/eclass/common-lisp-3.eclass
index df624d51607c..26d31268a598 100644
--- a/eclass/common-lisp-3.eclass
+++ b/eclass/common-lisp-3.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: common-lisp-3.eclass
@@ -11,19 +11,17 @@
# to provide a simple way to write ebuilds with these characteristics.
case ${EAPI} in
- [67]) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 6|7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit eutils
-
if [[ -z ${_COMMON_LISP_3_ECLASS} ]]; then
_COMMON_LISP_3_ECLASS=1
# @ECLASS_VARIABLE: CLIMPLEMENTATIONS
# @DESCRIPTION:
# Common Lisp implementations
-CLIMPLEMENTATIONS="sbcl clisp clozurecl cmucl ecls gcl abcl"
+CLIMPLEMENTATIONS="sbcl clisp clozurecl cmucl ecl gcl abcl"
# @ECLASS_VARIABLE: CLSOURCEROOT
# @DESCRIPTION:
@@ -128,8 +126,16 @@ common-lisp-install-sources() {
common-lisp-install-one-source ${fpredicate} "${path}" "$(dirname "${path}")"
elif [[ -d ${path} ]] ; then
local files
- readarray -d '' files < <(find "${path}" -type f -print0 || die "cannot traverse ${path}" )
- common-lisp-install-sources -t ${ftype} "${files[@]}" || die
+ # test can be dropped in EAPI 8 which guarantees bash-5.0
+ if [[ ${BASH_VERSINFO[0]} -ge 5 ]]; then
+ readarray -d '' files < <(find "${path}" -type f -print0 \
+ || die "cannot traverse ${path}")
+ else
+ # readarray has no -d option in bash-4.2
+ readarray -t files < <(find "${path}" -type f -print \
+ || die "cannot traverse ${path}")
+ fi
+ common-lisp-install-sources -t ${ftype} "${files[@]}"
else
die "${path} is neither a regular file nor a directory"
fi
diff --git a/eclass/cron.eclass b/eclass/cron.eclass
index 3198c181f21b..c185c0eaa032 100644
--- a/eclass/cron.eclass
+++ b/eclass/cron.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cron.eclass
@@ -6,11 +6,11 @@
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original Author: Aaron Walker <ka0ttic@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Some functions for cron
# @DESCRIPTION:
# Purpose: The main motivation for this eclass was to simplify
-# the jungle known as src_install() in cron ebuilds. Using these
+# the jungle known as src_install() in cron ebuilds. Using these
# functions also ensures that permissions are *always* reset,
# preventing the accidental installation of files with wrong perms.
#
@@ -18,26 +18,23 @@
# chosen based on the most common setting among cron ebuilds.
case ${EAPI} in
- [67]) inherit eutils ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit flag-o-matic
-
-EXPORT_FUNCTIONS pkg_postinst
-
if [[ -z ${_CRON_ECLASS} ]]; then
_CRON_ECLASS=1
-SLOT="0"
+inherit flag-o-matic
RDEPEND=">=sys-process/cronbase-0.3.2"
-for pn in vixie-cron bcron cronie dcron fcron; do
- [[ ${pn} == "${PN}" ]] || RDEPEND="${RDEPEND} !sys-process/${pn}"
+for pn in bcron cronie dcron fcron; do
+ [[ ${pn} == "${PN}" ]] || RDEPEND+=" !sys-process/${pn}"
done
+unset pn
# @FUNCTION: docrondir
-# @USAGE: [ dir ] [ perms ]
+# @USAGE: [dir] [perms]
# @DESCRIPTION:
# Creates crontab directory
#
@@ -49,12 +46,13 @@ done
# docrondir -m0700 (uses default dir)
docrondir() {
# defaults
- local perms="-m0750 -o 0 -g cron" dir="/var/spool/cron/crontabs"
+ local perms="-m0750 -o 0 -g cron"
+ local dir="/var/spool/cron/crontabs"
if [[ -n $1 ]] ; then
- case "$1" in
+ case $1 in
*/*)
- dir=$1
+ dir="$1"
shift
[[ -n $1 ]] && perms="$@"
;;
@@ -64,15 +62,14 @@ docrondir() {
esac
fi
- diropts ${perms}
- keepdir ${dir}
-
- # reset perms to default
- diropts -m0755
+ (
+ diropts ${perms}
+ keepdir ${dir}
+ )
}
# @FUNCTION: docron
-# @USAGE: [ exe ] [ perms ]
+# @USAGE: [exe] [perms]
# @DESCRIPTION:
# Install cron executable
#
@@ -81,40 +78,41 @@ docrondir() {
# ex: docron -m 0700 -o 0 -g root ('exe' defaults to "cron")
# docron crond -m 0110
docron() {
- local cron="cron" perms="-m 0750 -o 0 -g wheel"
+ local cron="cron"
+ local perms="-m 0750 -o 0 -g wheel"
if [[ -n $1 ]] ; then
- case "$1" in
+ case $1 in
-*)
perms="$@"
;;
*)
- cron=$1
+ cron="$1"
shift
[[ -n $1 ]] && perms="$@"
;;
esac
fi
- exeopts ${perms}
- exeinto /usr/sbin
- doexe ${cron} || die "failed to install ${cron}"
-
- # reset perms to default
- exeopts -m0755
+ (
+ exeopts ${perms}
+ exeinto /usr/sbin
+ doexe ${cron}
+ )
}
# @FUNCTION: docrontab
-# @USAGE: [ exe ] [ perms ]
+# @USAGE: [exe] [perms]
# @DESCRIPTION:
# Install crontab executable
#
# Uses same semantics as docron.
docrontab() {
- local crontab="crontab" perms="-m 4750 -o 0 -g cron"
+ local crontab="crontab"
+ local perms="-m 4750 -o 0 -g cron"
if [[ -n $1 ]] ; then
- case "$1" in
+ case $1 in
-*)
perms="$@"
;;
@@ -126,15 +124,14 @@ docrontab() {
esac
fi
- exeopts ${perms}
- exeinto /usr/bin
- doexe ${crontab} || die "failed to install ${crontab}"
-
- # reset perms to default
- exeopts -m0755
+ (
+ exeopts ${perms}
+ exeinto /usr/bin
+ doexe ${crontab}
+ )
# users expect /usr/bin/crontab to exist...
- if [[ "${crontab##*/}" != "crontab" ]] ; then
+ if [[ ${crontab##*/} != crontab ]] ; then
dosym ${crontab##*/} /usr/bin/crontab || \
die "failed to create /usr/bin/crontab symlink"
fi
@@ -145,9 +142,8 @@ docrontab() {
# Outputs a message about system crontabs
# daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
cron_pkg_postinst() {
- echo
# daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
- if [ "${CRON_SYSTEM_CRONTAB:-no}" != "yes" ] ; then
+ if [[ ${CRON_SYSTEM_CRONTAB:-no} != yes ]] ; then
einfo "To activate /etc/cron.{hourly|daily|weekly|monthly} please run:"
einfo " crontab /etc/crontab"
einfo
@@ -158,7 +154,8 @@ cron_pkg_postinst() {
einfo "You may wish to read the Gentoo Linux Cron Guide, which can be"
einfo "found online at:"
einfo " https://wiki.gentoo.org/wiki/Cron"
- echo
}
fi
+
+EXPORT_FUNCTIONS pkg_postinst
diff --git a/eclass/crossdev.eclass b/eclass/crossdev.eclass
new file mode 100644
index 000000000000..d6c99e4f32b7
--- /dev/null
+++ b/eclass/crossdev.eclass
@@ -0,0 +1,77 @@
+# Copyright 1999-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: crossdev.eclass
+# @MAINTAINER:
+# cat@catcream.org
+# @AUTHOR:
+# Alfred Persson Forsberg <cat@catcream.org> (21 Jul 2023)
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: Convenience wrappers for packages used by the Crossdev tool.
+
+inherit toolchain-funcs
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_CROSSDEV_ECLASS} ]]; then
+_CROSSDEV_ECLASS=1
+
+# @ECLASS_VARIABLE: _CROSS_CATEGORY_PREFIX
+# @INTERNAL
+# @DESCRIPTION:
+# This variable specifies the category prefix for a Crossdev
+# package. For GCC Crossdev it is "cross-", and for LLVM it is
+# "cross_llvm-"
+_CROSS_CATEGORY_PREFIX=""
+
+# @ECLASS_VARIABLE: _IS_CROSSPKG_LLVM
+# @INTERNAL
+# @DESCRIPTION:
+# Is true if the package is in a LLVM Crossdev category, otherwise false
+_IS_CROSSPKG_LLVM=0
+if [[ ${CATEGORY} == cross_llvm-* ]] ; then
+ _IS_CROSSPKG_LLVM=1
+ _CROSS_CATEGORY_PREFIX="cross_llvm-"
+fi
+
+# @ECLASS_VARIABLE: _IS_CROSSPKG_GCC
+# @INTERNAL
+# @DESCRIPTION:
+# Is true if the package is in a GCC Crossdev category, otherwise false
+_IS_CROSSPKG_GCC=0
+if [[ ${CATEGORY} == cross-* ]] ; then
+ _IS_CROSSPKG_GCC=1
+ _CROSS_CATEGORY_PREFIX="cross-"
+fi
+
+# @ECLASS_VARIABLE: _IS_CROSSPKG
+# @INTERNAL
+# @DESCRIPTION:
+# Is true if the package is in a any Crossdev category, otherwise false
+[[ ${_IS_CROSSPKG_LLVM} == 1 || ${_IS_CROSSPKG_GCC} == 1 ]] && _IS_CROSSPKG=1
+
+# Default CBUILD and CTARGET to CHOST if unset.
+export CBUILD=${CBUILD:-${CHOST}}
+export CTARGET=${CTARGET:-${CHOST}}
+
+if [[ ${CTARGET} == ${CHOST} ]] ; then
+ # cross-aarch64-gentoo-linux-musl -> aarch64-gentoo-linux-musl
+ [[ ${_IS_CROSSPKG} == 1 ]] && export CTARGET=${CATEGORY#${_CROSS_CATEGORY_PREFIX}}
+fi
+
+# @FUNCTION: target_is_not_host
+# @RETURN: Shell true if we're targeting an triple other than host
+target_is_not_host() {
+ [[ ${CHOST} != ${CTARGET} ]]
+}
+
+# @FUNCTION: is_crosspkg
+# @RETURN: Shell true if package belongs to any crossdev category
+is_crosspkg() {
+ [[ ${_IS_CROSSPKG} == 1 ]]
+}
+
+fi
diff --git a/eclass/cuda.eclass b/eclass/cuda.eclass
index 3dc8299c275c..e54560197a8f 100644
--- a/eclass/cuda.eclass
+++ b/eclass/cuda.eclass
@@ -1,17 +1,6 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-case "${EAPI:-0}" in
- [0-6])
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
-esac
-
# @ECLASS: cuda.eclass
# @MAINTAINER:
# Gentoo Science Project <sci@gentoo.org>
@@ -21,25 +10,30 @@ esac
# This eclass contains functions to be used with cuda package. Currently it is
# setting and/or sanitizing NVCCFLAGS, the compiler flags for nvcc. This is
# automatically done and exported in src_prepare() or manually by calling
-# cuda_sanatize.
+# cuda_sanitize.
# @EXAMPLE:
# inherit cuda
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_CUDA_ECLASS} ]]; then
+_CUDA_ECLASS=1
inherit flag-o-matic toolchain-funcs
-[[ ${EAPI} == [56] ]] && inherit eapi7-ver
# @ECLASS_VARIABLE: NVCCFLAGS
# @DESCRIPTION:
# nvcc compiler flags (see nvcc --help), which should be used like
# CFLAGS for c compiler
-: ${NVCCFLAGS:=-O2}
+: "${NVCCFLAGS:=-O2}"
# @ECLASS_VARIABLE: CUDA_VERBOSE
# @DESCRIPTION:
# Being verbose during compilation to see underlying commands
-: ${CUDA_VERBOSE:=true}
+: "${CUDA_VERBOSE:=true}"
# @FUNCTION: cuda_gccdir
# @USAGE: [-f]
@@ -87,7 +81,7 @@ cuda_gccdir() {
# Try the current gcc version first
ver=$(gcc-version)
if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then
- dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
+ dirs=( "${EPREFIX}"/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
@@ -96,14 +90,14 @@ cuda_gccdir() {
ver=$(ver_cut 1-2 "${ver##*sys-devel/gcc-}")
if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then
- dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
+ dirs=( "${EPREFIX}"/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
fi
for ver in ${vers}; do
if has_version "=sys-devel/gcc-${ver}*"; then
- dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
+ dirs=( "${EPREFIX}"/usr/*pc-linux-gnu/gcc-bin/${ver%.*}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
done
@@ -195,7 +189,6 @@ cuda_src_prepare() {
cuda_sanitize
}
-EXPORT_FUNCTIONS src_prepare
-
-_CUDA_ECLASS=1
fi
+
+EXPORT_FUNCTIONS src_prepare
diff --git a/eclass/cvs.eclass b/eclass/cvs.eclass
index a90a4d87f1c5..dbacc2c09cfe 100644
--- a/eclass/cvs.eclass
+++ b/eclass/cvs.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cvs.eclass
@@ -13,14 +13,14 @@
# cvs_src_unpack. If you find that you need to call the cvs_* functions
# directly, I'd be interested to hear about it.
-if [[ -z ${_CVS_ECLASS} ]]; then
-_CVS_ECLASS=1
-
case ${EAPI} in
7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_CVS_ECLASS} ]]; then
+_CVS_ECLASS=1
+
# TODO:
# Implement more auth types (gserver?, kserver?)
@@ -37,13 +37,13 @@ esac
# @DESCRIPTION:
# Set the default compression level. Has no effect when ECVS_CVS_COMMAND
# is defined by ebuild/user.
-: ${ECVS_CVS_COMPRESS:=-z1}
+: "${ECVS_CVS_COMPRESS:=-z1}"
# @ECLASS_VARIABLE: ECVS_CVS_OPTIONS
# @DESCRIPTION:
# Additional options to the cvs commands. Has no effect when ECVS_CVS_COMMAND
# is defined by ebuild/user.
-: ${ECVS_CVS_OPTIONS:=-q -f}
+: "${ECVS_CVS_OPTIONS:=-q -f}"
# @ECLASS_VARIABLE: ECVS_CVS_COMMAND
# @DESCRIPTION:
@@ -53,13 +53,13 @@ esac
# on the cvs connection. The default of "cvs -q -f -z4" means to be
# quiet, to disregard the ~/.cvsrc config file and to use maximum
# compression.
-: ${ECVS_CVS_COMMAND:=cvs ${ECVS_CVS_OPTIONS} ${ECVS_CVS_COMPRESS}}
+: "${ECVS_CVS_COMMAND:=cvs ${ECVS_CVS_OPTIONS} ${ECVS_CVS_COMPRESS}}"
# @ECLASS_VARIABLE: ECVS_UP_OPTS
# @DESCRIPTION:
# CVS options given after the cvs update command. Don't remove "-dP" or things
# won't work.
-: ${ECVS_UP_OPTS:=-dP}
+: "${ECVS_UP_OPTS:=-dP}"
# @ECLASS_VARIABLE: ECVS_CO_OPTS
# @DEFAULT_UNSET
@@ -72,7 +72,7 @@ esac
# Set this variable to a non-empty value to disable the automatic updating of
# a CVS source tree. This is intended to be set outside the cvs source
# tree by users.
-: ${ECVS_OFFLINE:=${EVCS_OFFLINE}}
+: "${ECVS_OFFLINE:=${EVCS_OFFLINE}}"
# @ECLASS_VARIABLE: ECVS_LOCAL
# @DEFAULT_UNSET
@@ -97,7 +97,7 @@ esac
# @ECLASS_VARIABLE: ECVS_TOP_DIR
# @DESCRIPTION:
# The directory under which CVS modules are checked out.
-: ${ECVS_TOP_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/cvs-src"}
+: "${ECVS_TOP_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/cvs-src"}"
# @ECLASS_VARIABLE: ECVS_SERVER
# @DESCRIPTION:
@@ -110,7 +110,7 @@ esac
#
# Set this to "offline" to disable fetching (i.e. to assume the module
# is already checked out in ECVS_TOP_DIR).
-: ${ECVS_SERVER:="offline"}
+: "${ECVS_SERVER:="offline"}"
# @ECLASS_VARIABLE: ECVS_MODULE
# @REQUIRED
@@ -152,12 +152,12 @@ esac
# e.g.
# "cvs -danoncvs@savannah.gnu.org:/cvsroot/backbone co System"
# ( from gnustep-apps/textedit )
-: ${ECVS_AUTH:="pserver"}
+: "${ECVS_AUTH:="pserver"}"
# @ECLASS_VARIABLE: ECVS_USER
# @DESCRIPTION:
# Username to use for authentication on the remote server.
-: ${ECVS_USER:="anonymous"}
+: "${ECVS_USER:="anonymous"}"
# @ECLASS_VARIABLE: ECVS_PASS
# @DEFAULT_UNSET
@@ -423,7 +423,7 @@ EOF
# Make sure DISPLAY is set (SSH will not use SSH_ASKPASS
# if DISPLAY is not set)
- : ${DISPLAY:="DISPLAY"}
+ : "${DISPLAY:="DISPLAY"}"
export DISPLAY
# Create a dummy executable to echo ${ECVS_PASS}
@@ -536,6 +536,6 @@ cvs_src_unpack() {
einfo "CVS module ${ECVS_MODULE} is now in ${WORKDIR}"
}
-EXPORT_FUNCTIONS src_unpack
-
fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/db-use.eclass b/eclass/db-use.eclass
index 55e72286fda4..99f31a17a738 100644
--- a/eclass/db-use.eclass
+++ b/eclass/db-use.eclass
@@ -8,7 +8,7 @@
# maintainer-needed@gentoo.org
# @AUTHOR:
# Paul de Vrieze <pauldv@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: This is a common location for functions that aid the use of sys-libs/db
# @DESCRIPTION:
# This eclass is designed to provide helpful functions for depending on
@@ -16,7 +16,6 @@
# multilib is used for get_libname in all EAPI
case ${EAPI} in
- 5|6) inherit eapi7-ver ;& # fallthrough
7|8) inherit multilib ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -53,7 +52,7 @@ db_findver() {
PKG="$(best_version $1)"
VER="$(ver_cut 1-2 "${PKG/*db-/}")"
- if [ -d "${EPREFIX}"/usr/include/db$(db_ver_to_slot "$VER") ]; then
+ if [ -d "${ESYSROOT}"/usr/include/db$(db_ver_to_slot "$VER") ]; then
#einfo "Found db version ${VER}" >&2
echo -n "$VER"
return 0
@@ -72,8 +71,8 @@ db_includedir() {
VER="$(db_findver sys-libs/db)" || return 1
VER="$(db_ver_to_slot "$VER")"
echo "include version ${VER}" >&2
- if [ -d "${EPREFIX}/usr/include/db${VER}" ]; then
- echo -n "${EPREFIX}/usr/include/db${VER}"
+ if [ -d "${ESYSROOT}/usr/include/db${VER}" ]; then
+ echo -n "${ESYSROOT}/usr/include/db${VER}"
return 0
else
eerror "sys-libs/db package requested, but headers not found" >&2
@@ -84,8 +83,8 @@ db_includedir() {
for x in $@
do
if VER=$(db_findver "=sys-libs/db-${x}*") &&
- [ -d "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)" ]; then
- echo -n "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)"
+ [ -d "${ESYSROOT}/usr/include/db$(db_ver_to_slot $VER)" ]; then
+ echo -n "${ESYSROOT}/usr/include/db$(db_ver_to_slot $VER)"
return 0
fi
done
@@ -103,7 +102,7 @@ db_includedir() {
db_libname() {
if [ $# -eq 0 ]; then
VER="$(db_findver sys-libs/db)" || return 1
- if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
+ if [ -e "${ESYSROOT}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
echo -n "db-${VER}"
return 0
else
@@ -115,7 +114,7 @@ db_libname() {
for x in $@
do
if VER=$(db_findver "=sys-libs/db-${x}*"); then
- if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
+ if [ -e "${ESYSROOT}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
echo -n "db-${VER}"
return 0
fi
diff --git a/eclass/db.eclass b/eclass/db.eclass
index 96669c6d8938..0c096dc48f13 100644
--- a/eclass/db.eclass
+++ b/eclass/db.eclass
@@ -1,24 +1,29 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: db.eclass
# @MAINTAINER:
# base-system@gentoo.org
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Internal eclass used by sys-libs/db ebuilds
-inherit eutils multilib multiprocessing
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
-IUSE="doc test examples"
+if [[ -z ${_DB_ECLASS} ]]; then
+_DB_ECLASS=1
-EXPORT_FUNCTIONS src_test
+inherit multilib multiprocessing
-DEPEND="test? ( >=dev-lang/tcl-8.4 )"
+IUSE="doc tcl test examples"
+REQUIRED_USE="test? ( tcl )"
-RDEPEND=""
+BDEPEND="test? ( >=dev-lang/tcl-8.4 )"
db_fix_so() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
- LIB="${EROOT}/usr/$(get_libdir)"
+ local LIB="${EROOT}"/usr/$(get_libdir)
cd "${LIB}" || die
@@ -32,71 +37,53 @@ db_fix_so() {
# now rebuild all the correct ones
local ext
for ext in so dylib a; do
+ local name
for name in libdb libdb_{cxx,tcl,java,sql,stl}; do
- target="$(find . -maxdepth 1 -type f -name "${name}-*.${ext}" |sort -V |tail -n 1)"
- [[ -n "${target}" ]] && ln -sf ${target//.\//} ${name}.${ext}
- done;
- done;
-
- # db[23] gets some extra-special stuff
- if [[ -f libdb1$(get_libname 2) ]]; then
- ln -sf libdb1$(get_libname 2) libdb$(get_libname 2)
- ln -sf libdb1$(get_libname 2) libdb1$(get_libname)
- ln -sf libdb1$(get_libname 2) libdb-1$(get_libname)
- fi
- # what do we do if we ever get 3.3 ?
- local i
- for i in libdb libdb_{cxx,tcl,java,sql,stl}; do
- if [[ -f $i-3.2$(get_libname) ]]; then
- ln -sf $i-3.2$(get_libname) $i-3$(get_libname)
- ln -sf $i-3.2$(get_libname) $i$(get_libname 3)
- fi
+ target="$(find . -maxdepth 1 -type f -name "${name}-*.${ext}" | sort -V | tail -n 1)"
+ [[ -n ${target} ]] && ln -sf ${target//.\//} ${name}.${ext}
+ done
done
# do the same for headers now
# but since there are only two of them, just overwrite them
- cd "${EROOT}"/usr/include
- target="$(find . -maxdepth 1 -type d -name 'db[0-9]*' | sort -V |cut -d/ -f2- | tail -n1)"
- if [[ -n "${target}" ]] && [[ -e "${target}/db.h" ]] && ( ! [[ -e db.h ]] || [[ -h db.h ]] ); then
+ cd "${EROOT}"/usr/include || die
+ target="$(find . -maxdepth 1 -type d -name 'db[0-9]*' | sort -V | cut -d/ -f2- | tail -n1)"
+ if [[ -n ${target} && -e ${target}/db.h ]] && ( ! [[ -e db.h ]] || [[ -h db.h ]] ); then
einfo "Creating db.h symlinks to ${target}"
ln -sf "${target}"/db.h .
ln -sf "${target}"/db_185.h .
- elif [[ ! -e "${target}/db.h" ]]; then
- if [[ -n "${target}" ]]; then
+ elif [[ ! -e ${target}/db.h ]]; then
+ if [[ -n ${target} ]]; then
ewarn "Could not find ${target}/db.h"
elif [[ -h db.h ]]; then
einfo "Apparently you just removed the last instance of $PN. Removing the symlinks"
- rm -f db.h db_185.h
+ rm -f db.h db_185.h || die
fi
fi
}
db_src_install_doc() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# not everybody wants this wad of documentation as it is primarily API docs
if use doc; then
dodir /usr/share/doc/${PF}/html
mv "${ED}"/usr/docs/* "${ED}"/usr/share/doc/${PF}/html/ || die
- rm -rf "${ED}"/usr/docs
+ rm -rf "${ED}"/usr/docs || die
else
- rm -rf "${ED}"/usr/docs
+ rm -rf "${ED}"/usr/docs || die
fi
db_src_install_examples
}
db_src_install_examples() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
- if use examples ; then
+ if use examples; then
local langs=( c cxx stl )
- [[ "${IUSE/java}" != "${IUSE}" ]] \
- && use java \
- && langs+=( java )
+ in_iuse java && use java && langs+=( java )
local i
- for i in ${langs[@]} ; do
- destdir="/usr/share/doc/${PF}/"
- src="${S}/../examples_${i}/"
- if [[ -f "${src}" ]]; then
+ for i in ${langs[@]}; do
+ local destdir="/usr/share/doc/${PF}/"
+ local src="${S}/../examples_${i}/"
+ if [[ -f ${src} ]]; then
dodir "${destdir}"
cp -ra "${src}" "${ED}${destdir}/" || die
fi
@@ -105,13 +92,12 @@ db_src_install_examples() {
}
db_src_install_usrbinslot() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# slot all program names to avoid overwriting
local fname
- for fname in "${ED}"/usr/bin/db*
- do
- dn="$(dirname "${fname}")"
- bn="$(basename "${fname}")"
+
+ for fname in "${ED}"/usr/bin/db*; do
+ local dn="$(dirname "${fname}")"
+ local bn="$(basename "${fname}")"
bn="${bn/db/db${SLOT}}"
mv "${fname}" "${dn}/${bn}" || \
die "Failed to rename ${fname} to ${dn}/${bn}"
@@ -119,30 +105,28 @@ db_src_install_usrbinslot() {
}
db_src_install_headerslot() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# install all headers in a slotted location
dodir /usr/include/db${SLOT}
mv "${ED}"/usr/include/*.h "${ED}"/usr/include/db${SLOT}/ || die
}
db_src_install_usrlibcleanup() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
- LIB="${ED}/usr/$(get_libdir)"
+ local LIB="${ED}"/usr/$(get_libdir)
# Clean out the symlinks so that they will not be recorded in the
# contents (bug #60732)
- if [[ "${ED}" = "" ]]; then
+ if [[ -z ${ED} ]]; then
die "Calling clean_links while \${ED} not defined"
fi
- if [[ -e "${LIB}"/libdb.a ]] && [[ ! -e "${LIB}"/libdb-${SLOT}.a ]]; then
+ if [[ -e "${LIB}"/libdb.a && ! -e "${LIB}"/libdb-${SLOT}.a ]]; then
einfo "Moving libdb.a to a versioned name"
- mv "${LIB}/libdb.a" "${LIB}/libdb-${SLOT}.a" || die
+ mv "${LIB}"/libdb.a "${LIB}"/libdb-${SLOT}.a || die
fi
- if [[ -e "${LIB}"/libdb_cxx.a ]] && [[ ! -e "${LIB}"/libdb_cxx-${SLOT}.a ]]; then
+ if [[ -e "${LIB}"/libdb_cxx.a && ! -e "${LIB}"/libdb_cxx-${SLOT}.a ]]; then
einfo "Moving libdb_cxx.a to a versioned name"
- mv "${LIB}/libdb_cxx.a" "${LIB}/libdb_cxx-${SLOT}.a" || die
+ mv "${LIB}"/libdb_cxx.a "${LIB}"/libdb_cxx-${SLOT}.a || die
fi
local soext=$(get_libname)
@@ -154,11 +138,11 @@ db_src_install_usrlibcleanup() {
rm -f \
"${ED}"/usr/include/{db,db_185}.h \
- "${LIB}"/libdb{,_{cxx,sql,stl,java,tcl}}.a
+ "${LIB}"/libdb{,_{cxx,sql,stl,java,tcl}}.a || die
}
db_src_test() {
- if [[ $UID -eq 0 ]]; then
+ if [[ ${UID} -eq 0 ]]; then
M="You must run the testsuite as non-root, skipping"
ewarn "${M}"
elog "${M}"
@@ -170,34 +154,39 @@ db_src_test() {
ewarn "This can take 6+ hours on modern machines"
# Fix stuff that fails with relative paths, and upstream moving files
# around...
- local test_parallel='' t
+ local t test_parallel
for t in \
"${S}"/test/parallel.tcl \
"${S}"/../test/parallel.tcl \
"${S}"/test/tcl/parallel.tcl \
"${S}"/../test/tcl/parallel.tcl \
; do
- [[ -f "${t}" ]] && test_parallel="${t}" && break
+ [[ -f ${t} ]] && test_parallel="${t}" && break
done
sed -ri \
-e '/regsub .test_path ./s,(regsub),#\1,g' \
-e '/regsub .src_root ./s,(regsub),#\1,g' \
-e '/regsub .tcl_utils ./s,(regsub),#\1,g' \
- "${test_parallel}"
- cd "${S}"
+ "${test_parallel}" || die
+ cd "${S}" || die
for t in \
../test/test.tcl \
../test/tcl/test.tcl \
; do
- [[ -f "${t}" ]] && testbase="${t}" && break
+ [[ -f ${t} ]] && testbase="${t}" && break
done
- echo "source ${t}" > testrunner.tcl
- echo "run_parallel $(makeopts_jobs) run_std" >> testrunner.tcl
- tclsh testrunner.tcl
+ cat > testrunner.tcl <<-EOF || die
+ source ${t}
+ run_parallel $(makeopts_jobs) run_std
+ EOF
+
+ tclsh testrunner.tcl || die
grep -Eqs '^FAIL' ALL.OUT* && die "Some tests failed, please see ${S}/ALL.OUT*"
- else
- eerror "You must have USE=tcl to run the sys-libs/db testsuite."
fi
}
+
+fi
+
+EXPORT_FUNCTIONS src_test
diff --git a/eclass/depend.apache.eclass b/eclass/depend.apache.eclass
index 51410265bbc5..8f0469931d2c 100644
--- a/eclass/depend.apache.eclass
+++ b/eclass/depend.apache.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: depend.apache.eclass
# @MAINTAINER:
# apache-bugs@gentoo.org
-# @SUPPORTED_EAPIS: 0 2 3 4 5 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Functions to allow ebuilds to depend on apache
# @DESCRIPTION:
# This eclass handles depending on apache in a sane way and provides information
@@ -41,10 +41,7 @@
# @CODE
case ${EAPI:-0} in
- 0|2|3|4|5)
- inherit multilib
- ;;
- 6|7)
+ 6|7|8)
;;
*)
die "EAPI=${EAPI} is not supported by depend.apache.eclass"
@@ -78,8 +75,7 @@ esac
# @ECLASS_VARIABLE: APACHE_BASEDIR
# @DESCRIPTION:
# Path to the server root directory.
-# This variable is set by the want/need_apache functions (EAPI=0 through 5)
-# or depend.apache_pkg_setup (EAPI=6 and later).
+# This variable is set by depend.apache_pkg_setup.
# @ECLASS_VARIABLE: APACHE_CONFDIR
# @DESCRIPTION:
@@ -99,8 +95,7 @@ esac
# @ECLASS_VARIABLE: APACHE_MODULESDIR
# @DESCRIPTION:
# Path where we install modules.
-# This variable is set by the want/need_apache functions (EAPI=0 through 5)
-# or depend.apache_pkg_setup (EAPI=6 and later).
+# This variable is set by depend.apache_pkg_setup.
# @ECLASS_VARIABLE: APACHE_DEPEND
# @DESCRIPTION:
@@ -114,7 +109,7 @@ APACHE2_DEPEND="=www-servers/apache-2*"
# @ECLASS_VARIABLE: APACHE2_2_DEPEND
# @DESCRIPTION:
-# Dependencies for Apache 2.2.x
+# Dependencies for Apache 2.2.x. Deprecated and removed in EAPI 8.
APACHE2_2_DEPEND="=www-servers/apache-2.2*"
# @ECLASS_VARIABLE: APACHE2_4_DEPEND
@@ -140,12 +135,6 @@ _init_apache2() {
APACHE_CONFDIR="/etc/apache2"
APACHE_MODULES_CONFDIR="${APACHE_CONFDIR}/modules.d"
APACHE_VHOSTS_CONFDIR="${APACHE_CONFDIR}/vhosts.d"
-
- case ${EAPI:-0} in
- 0|2|3|4|5)
- _init_apache2_late
- ;;
- esac
}
_init_apache2_late() {
@@ -177,27 +166,14 @@ depend.apache_pkg_setup() {
local myiuse=${1:-apache2}
- case ${EAPI:-0} in
- 0|2|3|4|5)
- if has ${myiuse} ${IUSE}; then
- if use ${myiuse}; then
- _init_apache2
- else
- _init_no_apache
- fi
- fi
- ;;
- *)
- if in_iuse ${myiuse}; then
- if use ${myiuse}; then
- _init_apache2
- _init_apache2_late
- else
- _init_no_apache
- fi
- fi
- ;;
- esac
+ if in_iuse ${myiuse}; then
+ if use ${myiuse}; then
+ _init_apache2
+ _init_apache2_late
+ else
+ _init_no_apache
+ fi
+ fi
}
# @FUNCTION: want_apache
@@ -239,10 +215,17 @@ want_apache2() {
want_apache2_2() {
debug-print-function $FUNCNAME $*
- local myiuse=${1:-apache2}
- IUSE="${IUSE} ${myiuse}"
- DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
- RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
+ case ${EAPI:-0} in
+ 6|7)
+ local myiuse=${1:-apache2}
+ IUSE="${IUSE} ${myiuse}"
+ DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
+ RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
+ ;;
+ *)
+ errror "want-apache2_2 is no longer supported in EAPI 8"
+ ;;
+ esac
}
# @FUNCTION: want_apache2_4
@@ -287,9 +270,16 @@ need_apache2() {
need_apache2_2() {
debug-print-function $FUNCNAME $*
- DEPEND="${DEPEND} ${APACHE2_2_DEPEND}"
- RDEPEND="${RDEPEND} ${APACHE2_2_DEPEND}"
- _init_apache2
+ case ${EAPI:-0} in
+ 6|7)
+ DEPEND="${DEPEND} ${APACHE2_2_DEPEND}"
+ RDEPEND="${RDEPEND} ${APACHE2_2_DEPEND}"
+ _init_apache2
+ ;;
+ *)
+ error "need_apache2-2 is no longer supported in EAPI 8"
+ ;;
+ esac
}
# @FUNCTION: need_apache2_4
@@ -327,12 +317,6 @@ has_apache() {
has_apache_threads() {
debug-print-function $FUNCNAME $*
- case ${EAPI:-0} in
- 0|1)
- die "depend.apache.eclass: has_apache_threads is not supported for EAPI=${EAPI:-0}"
- ;;
- esac
-
if ! has_version 'www-servers/apache[threads]'; then
return
fi
@@ -356,12 +340,6 @@ has_apache_threads() {
has_apache_threads_in() {
debug-print-function $FUNCNAME $*
- case ${EAPI:-0} in
- 0|1)
- die "depend.apache.eclass: has_apache_threads_in is not supported for EAPI=${EAPI:-0}"
- ;;
- esac
-
if ! has_version 'www-servers/apache[threads]'; then
return
fi
diff --git a/eclass/desktop.eclass b/eclass/desktop.eclass
index 82e764e2a1a4..780971342ba1 100644
--- a/eclass/desktop.eclass
+++ b/eclass/desktop.eclass
@@ -1,11 +1,17 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: desktop.eclass
# @MAINTAINER:
# base-system@gentoo.org
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: support for desktop files, menus, and icons
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_DESKTOP_ECLASS} ]]; then
_DESKTOP_ECLASS=1
@@ -156,19 +162,18 @@ make_desktop_entry() {
;;
esac
fi
- local slot=${SLOT%/*}
- if [[ ${slot} == "0" ]] ; then
- local desktop_name="${PN}"
- else
- local desktop_name="${PN}-${slot}"
- fi
+
local desktop_exec="${exec%%[[:space:]]*}"
desktop_exec="${desktop_exec##*/}"
+ local desktop_suffix="-${PN}"
+ [[ ${SLOT%/*} != 0 ]] && desktop_suffix+="-${SLOT%/*}"
+ # Replace foo-foo.desktop by foo.desktop
+ [[ ${desktop_suffix#-} == "${desktop_exec}" ]] && desktop_suffix=""
# Prevent collisions if a file with the same name already exists #771708
- local desktop="${desktop_exec}-${desktop_name}" count=0
+ local desktop="${desktop_exec}${desktop_suffix}" count=0
while [[ -e ${ED}/usr/share/applications/${desktop}.desktop ]]; do
- desktop="${desktop_exec}-$((++count))-${desktop_name}"
+ desktop="${desktop_exec}-$((++count))${desktop_suffix}"
done
desktop="${T}/${desktop}.desktop"
@@ -306,9 +311,9 @@ _iconins() {
size=${2}
fi
case ${size} in
- 16|22|24|32|36|48|64|72|96|128|192|256|512)
+ 16|22|24|32|36|48|64|72|96|128|192|256|512|1024)
size=${size}x${size};;
- scalable)
+ symbolic|scalable)
;;
*)
eerror "${size} is an unsupported icon size!"
@@ -364,7 +369,7 @@ _iconins() {
# !!! must specify to install into /usr/share/icons/... !!!
# size of the icon, like 48 or 48x48
# supported icon sizes are:
-# 16 22 24 32 36 48 64 72 96 128 192 256 512 scalable
+# 16 22 24 32 36 48 64 72 96 128 192 256 512 1024 scalable
# -c, --context
# defaults to "apps"
# -t, --theme
diff --git a/eclass/dist-kernel-utils.eclass b/eclass/dist-kernel-utils.eclass
index d192c31db273..13137f8c863c 100644
--- a/eclass/dist-kernel-utils.eclass
+++ b/eclass/dist-kernel-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 2020-2022 Gentoo Authors
+# Copyright 2020-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dist-kernel-utils.eclass
@@ -6,57 +6,27 @@
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Utility functions related to Distribution Kernels
# @DESCRIPTION:
# This eclass provides various utility functions related to Distribution
# Kernels.
-if [[ ! ${_DIST_KERNEL_UTILS} ]]; then
-
-case "${EAPI:-0}" in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 7)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
-esac
-
-# @FUNCTION: dist-kernel_build_initramfs
-# @USAGE: <output> <version>
+# @ECLASS_VARIABLE: KERNEL_EFI_ZBOOT
+# @DEFAULT_UNSET
# @DESCRIPTION:
-# Build an initramfs for the kernel. <output> specifies the absolute
-# path where initramfs will be created, while <version> specifies
-# the kernel version, used to find modules.
-#
-# Note: while this function uses dracut at the moment, other initramfs
-# variants may be supported in the future.
-dist-kernel_build_initramfs() {
- debug-print-function ${FUNCNAME} "${@}"
-
- [[ ${#} -eq 2 ]] || die "${FUNCNAME}: invalid arguments"
- local output=${1}
- local version=${2}
+# If set to a non-null value, it is assumed the kernel was built with
+# CONFIG_EFI_ZBOOT enabled. This effects the name of the kernel image on
+# arm64 and riscv. Mainly useful for sys-kernel/gentoo-kernel-bin.
- local rel_image_path=$(dist-kernel_get_image_path)
- local image=${output%/*}/${rel_image_path##*/}
-
- local args=(
- --force
- # if uefi=yes is used, dracut needs to locate the kernel image
- --kernel-image "${image}"
+if [[ ! ${_DIST_KERNEL_UTILS} ]]; then
- # positional arguments
- "${output}" "${version}"
- )
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
- ebegin "Building initramfs via dracut"
- dracut "${args[@]}"
- eend ${?} || die -n "Building initramfs failed"
-}
+inherit toolchain-funcs
# @FUNCTION: dist-kernel_get_image_path
# @DESCRIPTION:
@@ -66,21 +36,29 @@ dist-kernel_get_image_path() {
amd64|x86)
echo arch/x86/boot/bzImage
;;
- arm64)
- echo arch/arm64/boot/Image.gz
+ arm64|riscv)
+ if [[ ${KERNEL_EFI_ZBOOT} ]]; then
+ echo arch/${ARCH}/boot/vmlinuz.efi
+ else
+ echo arch/${ARCH}/boot/Image.gz
+ fi
+ ;;
+ loong)
+ if [[ ${KERNEL_EFI_ZBOOT} ]]; then
+ echo arch/loongarch/boot/vmlinuz.efi
+ else
+ echo arch/loongarch/boot/vmlinux.elf
+ fi
;;
arm)
echo arch/arm/boot/zImage
;;
- hppa|ppc|ppc64)
+ hppa|ppc|ppc64|sparc)
# https://www.kernel.org/doc/html/latest/powerpc/bootwrapper.html
# ./ is required because of ${image_path%/*}
# substitutions in the code
echo ./vmlinux
;;
- riscv)
- echo arch/riscv/boot/Image.gz
- ;;
*)
die "${FUNCNAME}: unsupported ARCH=${ARCH}"
;;
@@ -101,27 +79,10 @@ dist-kernel_install_kernel() {
local image=${2}
local map=${3}
- # if dracut is used in uefi=yes mode, initrd will actually
- # be a combined kernel+initramfs UEFI executable. we can easily
- # recognize it by PE magic (vs cpio for a regular initramfs)
- local initrd=${image%/*}/initrd
- local magic
- [[ -s ${initrd} ]] && read -n 2 magic < "${initrd}"
- if [[ ${magic} == MZ ]]; then
- einfo "Combined UEFI kernel+initramfs executable found"
- # install the combined executable in place of kernel
- image=${initrd}.uefi
- mv "${initrd}" "${image}" || die
- # put an empty file in place of initrd. installing a duplicate
- # file would waste disk space, and removing it entirely provokes
- # kernel-install to regenerate it via dracut.
- > "${initrd}"
- fi
-
ebegin "Installing the kernel via installkernel"
# note: .config is taken relatively to System.map;
# initrd relatively to bzImage
- installkernel "${version}" "${image}" "${map}"
+ ARCH=$(tc-arch-kernel) installkernel "${version}" "${image}" "${map}"
eend ${?} || die -n "Installing the kernel failed"
}
@@ -144,22 +105,69 @@ dist-kernel_reinstall_initramfs() {
local ver=${2}
local image_path=${kernel_dir}/$(dist-kernel_get_image_path)
- local initramfs_path=${image_path%/*}/initrd
if [[ ! -f ${image_path} ]]; then
eerror "Kernel install missing, image not found:"
eerror " ${image_path}"
eerror "Initramfs will not be updated. Please reinstall your kernel."
return
fi
- if [[ ! -f ${initramfs_path} ]]; then
- einfo "No initramfs found at ${initramfs_path}"
- return
- fi
- dist-kernel_build_initramfs "${initramfs_path}" "${ver}"
dist-kernel_install_kernel "${ver}" "${image_path}" \
"${kernel_dir}/System.map"
}
+# @FUNCTION: dist-kernel_PV_to_KV
+# @USAGE: <pv>
+# @DESCRIPTION:
+# Convert a Gentoo-style ebuild version to kernel "x.y.z[-rcN]" version.
+dist-kernel_PV_to_KV() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -ne 1 ]] && die "${FUNCNAME}: invalid arguments"
+ local pv=${1}
+
+ local kv=${pv%%_*}
+ [[ -z $(ver_cut 3- "${kv}") ]] && kv+=".0"
+ [[ ${pv} == *_* ]] && kv+=-${pv#*_}
+ echo "${kv}"
+}
+
+# @FUNCTION: dist-kernel_compressed_module_cleanup
+# @USAGE: <path>
+# @DESCRIPTION:
+# Traverse path for duplicate (un)compressed modules and remove all
+# but the newest variant.
+dist-kernel_compressed_module_cleanup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -ne 1 ]] && die "${FUNCNAME}: invalid arguments"
+ local path=${1}
+ local basename f
+
+ while read -r basename; do
+ local prev=
+ for f in "${path}/${basename}"{,.gz,.xz,.zst}; do
+ if [[ ! -e ${f} ]]; then
+ continue
+ elif [[ -z ${prev} ]]; then
+ prev=${f}
+ elif [[ ${f} -nt ${prev} ]]; then
+ rm -v "${prev}" || die
+ prev=${f}
+ else
+ rm -v "${f}" || die
+ fi
+ done
+ done < <(
+ cd "${path}" &&
+ find -type f \
+ \( -name '*.ko' \
+ -o -name '*.ko.gz' \
+ -o -name '*.ko.xz' \
+ -o -name '*.ko.zst' \
+ \) | sed -e 's:[.]\(gz\|xz\|zst\)$::' | sort | uniq -d || die
+ )
+}
+
_DIST_KERNEL_UTILS=1
fi
diff --git a/eclass/distutils-r1.eclass b/eclass/distutils-r1.eclass
index ed368da79896..a67122a59a33 100644
--- a/eclass/distutils-r1.eclass
+++ b/eclass/distutils-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: distutils-r1.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# Based on the work of: Krzysztof Pawlik <nelchael@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: python-r1 python-single-r1
# @BLURB: A simple eclass to build Python packages using distutils.
# @DESCRIPTION:
@@ -44,17 +44,29 @@
# For more information, please see the Python Guide:
# https://projects.gentoo.org/python/guide/
-case "${EAPI:-0}" in
- [0-5])
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- [6-8])
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+# @ECLASS_VARIABLE: DISTUTILS_EXT
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this variable to a non-null value if the package (possibly
+# optionally) builds Python extensions (loadable modules written in C,
+# Cython, Rust, etc.).
+#
+# When enabled, the eclass:
+#
+# - adds PYTHON_DEPS to DEPEND (for cross-compilation support), unless
+# DISTUTILS_OPTIONAL is used
+#
+# - adds `debug` flag to IUSE that controls assertions (i.e. -DNDEBUG)
+#
+# - calls `build_ext` command if setuptools build backend is used
+# and there is potential benefit from parallel builds
+
# @ECLASS_VARIABLE: DISTUTILS_OPTIONAL
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -96,7 +108,7 @@ esac
# The variable specifies the build system used. Currently,
# the following values are supported:
#
-# - flit - flit_core backend
+# - flit - flit-core backend
#
# - flit_scm - flit_scm backend
#
@@ -106,21 +118,38 @@ esac
#
# - maturin - maturin backend
#
+# - meson-python - meson-python (mesonpy) backend
+#
+# - no - no PEP517 build system (see below)
+#
# - pbr - pbr backend
#
-# - pdm - pdm.pep517 backend
+# - pdm-backend - pdm.backend backend
#
# - poetry - poetry-core backend
#
+# - scikit-build-core - scikit-build-core backend
+#
# - setuptools - distutils or setuptools (incl. legacy mode)
#
# - sip - sipbuild backend
#
# - standalone - standalone build systems without external deps
-# (used for bootstrapping).
+# (used for bootstrapping).
#
# The variable needs to be set before the inherit line. The eclass
# adds appropriate build-time dependencies and verifies the value.
+#
+# The special value "no" indicates that the package has no build system.
+# This is not equivalent to unset DISTUTILS_USE_PEP517 (legacy mode).
+# It causes the eclass not to include any build system dependencies
+# and to disable default python_compile() and python_install()
+# implementations. Baseline Python deps and phase functions will still
+# be set (depending on the value of DISTUTILS_OPTIONAL). Most of
+# the other eclass functions will work. Testing venv will be provided
+# in ${BUILD_DIR}/install after python_compile(), and if any (other)
+# files are found in ${BUILD_DIR}/install after python_install(), they
+# will be merged into ${D}.
# @ECLASS_VARIABLE: DISTUTILS_USE_SETUPTOOLS
# @DEFAULT_UNSET
@@ -135,11 +164,8 @@ esac
#
# - rdepend -- add it to BDEPEND+RDEPEND (e.g. when using pkg_resources)
#
-# - pyproject.toml -- use pyproject2setuptools to install a project
-# using pyproject.toml (flit, poetry...)
-#
# - manual -- do not add the dependency and suppress the checks
-# (assumes you will take care of doing it correctly)
+# (assumes you will take care of doing it correctly)
#
# This variable is effective only if DISTUTILS_OPTIONAL is disabled.
# It is available only in non-PEP517 mode. It needs to be set before
@@ -163,10 +189,15 @@ esac
# ${DISTUTILS_DEPS}"
# @CODE
-if [[ ! ${_DISTUTILS_R1} ]]; then
+if [[ -z ${_DISTUTILS_R1_ECLASS} ]]; then
+_DISTUTILS_R1_ECLASS=1
-[[ ${EAPI} == 6 ]] && inherit eutils xdg-utils
-inherit multibuild multiprocessing toolchain-funcs
+inherit flag-o-matic
+inherit multibuild multilib multiprocessing ninja-utils toolchain-funcs
+
+if [[ ${DISTUTILS_USE_PEP517} == meson-python ]]; then
+ inherit meson
+fi
if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
inherit python-r1
@@ -174,14 +205,6 @@ else
inherit python-single-r1
fi
-fi
-
-if [[ ! ${DISTUTILS_OPTIONAL} ]]; then
- EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
-fi
-
-if [[ ! ${_DISTUTILS_R1} ]]; then
-
_distutils_set_globals() {
local rdep bdep
if [[ ${DISTUTILS_USE_PEP517} ]]; then
@@ -190,48 +213,72 @@ _distutils_set_globals() {
fi
bdep='
- >=dev-python/gpep517-3[${PYTHON_USEDEP}]'
+ >=dev-python/gpep517-15[${PYTHON_USEDEP}]
+ '
case ${DISTUTILS_USE_PEP517} in
flit)
bdep+='
- >=dev-python/flit_core-3.7.1[${PYTHON_USEDEP}]'
+ >=dev-python/flit-core-3.9.0[${PYTHON_USEDEP}]
+ '
;;
flit_scm)
bdep+='
- dev-python/flit_scm[${PYTHON_USEDEP}]'
+ >=dev-python/flit_scm-1.7.0[${PYTHON_USEDEP}]
+ '
;;
hatchling)
bdep+='
- >=dev-python/hatchling-0.22.0[${PYTHON_USEDEP}]'
+ >=dev-python/hatchling-1.21.1[${PYTHON_USEDEP}]
+ '
;;
jupyter)
bdep+='
- >=dev-python/jupyter_packaging-0.11.1[${PYTHON_USEDEP}]'
+ >=dev-python/jupyter-packaging-0.12.3[${PYTHON_USEDEP}]
+ '
;;
maturin)
bdep+='
- >=dev-util/maturin-0.12.7[${PYTHON_USEDEP}]'
+ >=dev-util/maturin-1.4.0[${PYTHON_USEDEP}]
+ '
+ ;;
+ no)
+ # undo the generic deps added above
+ bdep=
+ ;;
+ meson-python)
+ bdep+='
+ >=dev-python/meson-python-0.15.0[${PYTHON_USEDEP}]
+ '
;;
pbr)
bdep+='
- >=dev-python/pbr-5.8.0-r1[${PYTHON_USEDEP}]'
+ >=dev-python/pbr-6.0.0[${PYTHON_USEDEP}]
+ '
;;
- pdm)
+ pdm-backend)
bdep+='
- >=dev-python/pdm-pep517-0.12.3[${PYTHON_USEDEP}]'
+ >=dev-python/pdm-backend-2.1.8[${PYTHON_USEDEP}]
+ '
;;
poetry)
bdep+='
- >=dev-python/poetry-core-1.0.8[${PYTHON_USEDEP}]'
+ >=dev-python/poetry-core-1.9.0[${PYTHON_USEDEP}]
+ '
+ ;;
+ scikit-build-core)
+ bdep+='
+ >=dev-python/scikit-build-core-0.8.2[${PYTHON_USEDEP}]
+ '
;;
setuptools)
bdep+='
- >=dev-python/setuptools-60.5.0[${PYTHON_USEDEP}]
- dev-python/wheel[${PYTHON_USEDEP}]'
+ >=dev-python/setuptools-69.0.3[${PYTHON_USEDEP}]
+ '
;;
sip)
bdep+='
- >=dev-python/sip-6.5.0-r1[${PYTHON_USEDEP}]'
+ >=dev-python/sip-6.8.3[${PYTHON_USEDEP}]
+ '
;;
standalone)
;;
@@ -245,7 +292,7 @@ _distutils_set_globals() {
eqawarn "is enabled."
fi
else
- local setuptools_dep='>=dev-python/setuptools-42.0.2[${PYTHON_USEDEP}]'
+ local setuptools_dep='>=dev-python/setuptools-69.0.3[${PYTHON_USEDEP}]'
case ${DISTUTILS_USE_SETUPTOOLS:-bdepend} in
no|manual)
@@ -258,7 +305,7 @@ _distutils_set_globals() {
rdep+=" ${setuptools_dep}"
;;
pyproject.toml)
- bdep+=' >=dev-python/pyproject2setuppy-22[${PYTHON_USEDEP}]'
+ die "DISTUTILS_USE_SETUPTOOLS=pyproject.toml is no longer supported, use DISTUTILS_USE_PEP517"
;;
*)
die "Invalid DISTUTILS_USE_SETUPTOOLS=${DISTUTILS_USE_SETUPTOOLS}"
@@ -290,12 +337,16 @@ _distutils_set_globals() {
if [[ ! ${DISTUTILS_OPTIONAL} ]]; then
RDEPEND="${PYTHON_DEPS} ${rdep}"
- if [[ ${EAPI} != 6 ]]; then
- BDEPEND="${PYTHON_DEPS} ${bdep}"
- else
- DEPEND="${PYTHON_DEPS} ${bdep}"
- fi
+ BDEPEND="${PYTHON_DEPS} ${bdep}"
REQUIRED_USE=${PYTHON_REQUIRED_USE}
+
+ if [[ ${DISTUTILS_EXT} ]]; then
+ DEPEND="${PYTHON_DEPS}"
+ fi
+ fi
+
+ if [[ ${DISTUTILS_EXT} ]]; then
+ IUSE="debug"
fi
}
_distutils_set_globals
@@ -397,6 +448,15 @@ unset -f _distutils_set_globals
# An array containing options to be passed to the build system.
# Supported by a subset of build systems used by the eclass.
#
+# For maturin, the arguments will be passed as `maturin build`
+# arguments.
+#
+# For meson-python, the arguments will be passed as `meson setup`
+# arguments.
+#
+# For scikit-build-core, the arguments will be passed as `cmake`
+# options (e.g. `-DFOO=BAR` form should be used).
+#
# For setuptools, the arguments will be passed as first parameters
# to setup.py invocations (via esetup.py), as well as to the PEP517
# backend. For future compatibility, only global options should be used
@@ -425,7 +485,7 @@ unset -f _distutils_set_globals
# This helper is meant for the most common case, that is a single Sphinx
# subdirectory with standard layout, building and installing HTML docs
# behind USE=doc. It assumes it's the only consumer of the three
-# aforementioned functions. If you need to use a custom implemention,
+# aforementioned functions. If you need to use a custom implementation,
# you can't use it.
#
# If your package uses additional Sphinx plugins, they should be passed
@@ -450,7 +510,7 @@ distutils_enable_sphinx() {
_DISTUTILS_SPHINX_PLUGINS=( "${@}" )
local deps autodoc=1 d
- deps=">=dev-python/sphinx-4.4.0[\${PYTHON_USEDEP}]"
+ deps=">=dev-python/sphinx-7.2.6[\${PYTHON_USEDEP}]"
for d; do
if [[ ${d} == --no-autodoc ]]; then
autodoc=
@@ -474,7 +534,7 @@ distutils_enable_sphinx() {
use doc || return 0
local p
- for p in ">=dev-python/sphinx-4.4.0" \
+ for p in ">=dev-python/sphinx-7.2.6" \
"${_DISTUTILS_SPHINX_PLUGINS[@]}"
do
python_has_version "${p}[${PYTHON_USEDEP}]" ||
@@ -482,7 +542,7 @@ distutils_enable_sphinx() {
done
}
else
- deps=">=dev-python/sphinx-4.4.0"
+ deps=">=dev-python/sphinx-7.2.6"
fi
sphinx_compile_all() {
@@ -507,11 +567,7 @@ distutils_enable_sphinx() {
python_compile_all() { sphinx_compile_all; }
IUSE+=" doc"
- if [[ ${EAPI} == 6 ]]; then
- DEPEND+=" doc? ( ${deps} )"
- else
- BDEPEND+=" doc? ( ${deps} )"
- fi
+ BDEPEND+=" doc? ( ${deps} )"
# we need to ensure successful return in case we're called last,
# otherwise Portage may wrongly assume sourcing failed
@@ -525,8 +581,6 @@ distutils_enable_sphinx() {
# with the specified test runner. Also copies the current value
# of RDEPEND to test?-BDEPEND. The test-runner argument must be one of:
#
-# - nose: nosetests (dev-python/nose)
-#
# - pytest: dev-python/pytest
#
# - setup.py: setup.py test (no deps included)
@@ -559,18 +613,35 @@ distutils_enable_tests() {
esac
[[ ${#} -eq 1 ]] || die "${FUNCNAME} takes exactly one argument: test-runner"
- local test_pkg
+
+ local test_deps=${RDEPEND}
case ${1} in
- nose)
- test_pkg=">=dev-python/nose-1.3.7-r4"
- ;;
pytest)
- test_pkg=">=dev-python/pytest-7.0.1"
+ local test_pkgs='>=dev-python/pytest-7.4.4[${PYTHON_USEDEP}]'
+ if [[ -n ${EPYTEST_TIMEOUT} ]]; then
+ test_pkgs+=' dev-python/pytest-timeout[${PYTHON_USEDEP}]'
+ fi
+ if [[ ${EPYTEST_XDIST} ]]; then
+ test_pkgs+=' dev-python/pytest-xdist[${PYTHON_USEDEP}]'
+ fi
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ test_deps+=" ${test_pkgs//'${PYTHON_USEDEP}'/${PYTHON_USEDEP}}"
+ else
+ test_deps+=" $(python_gen_cond_dep "
+ ${test_pkgs}
+ ")"
+ fi
;;
setup.py)
;;
unittest)
- test_pkg="dev-python/unittest-or-fail"
+ # unittest-or-fail is needed in py<3.12
+ local test_pkgs="$(python_gen_cond_dep '
+ dev-python/unittest-or-fail[${PYTHON_USEDEP}]
+ ' 3.10 3.11
+ )"
+ [[ -n ${test_pkgs} ]] && test_deps+=" ${test_pkgs}"
;;
*)
die "${FUNCNAME}: unsupported argument: ${1}"
@@ -579,24 +650,10 @@ distutils_enable_tests() {
_DISTUTILS_TEST_RUNNER=${1}
python_test() { distutils-r1_python_test; }
- local test_deps=${RDEPEND}
- if [[ -n ${test_pkg} ]]; then
- if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
- test_deps+=" ${test_pkg}[${PYTHON_USEDEP}]"
- else
- test_deps+=" $(python_gen_cond_dep "
- ${test_pkg}[\${PYTHON_USEDEP}]
- ")"
- fi
- fi
if [[ -n ${test_deps} ]]; then
IUSE+=" test"
RESTRICT+=" !test? ( test )"
- if [[ ${EAPI} == 6 ]]; then
- DEPEND+=" test? ( ${test_deps} )"
- else
- BDEPEND+=" test? ( ${test_deps} )"
- fi
+ BDEPEND+=" test? ( ${test_deps} )"
fi
# we need to ensure successful return in case we're called last,
@@ -611,8 +668,11 @@ distutils_enable_tests() {
# (if ${EPYTHON} is set; fallback 'python' otherwise).
#
# setup.py will be passed the following, in order:
+#
# 1. ${DISTUTILS_ARGS[@]}
+#
# 2. ${mydistutilsargs[@]} (deprecated)
+#
# 3. additional arguments passed to the esetup.py function.
#
# Please note that setup.py will respect defaults (unless overridden
@@ -630,16 +690,20 @@ esetup.py() {
fi
local setup_py=( setup.py )
- if [[ ${DISTUTILS_USE_SETUPTOOLS} == pyproject.toml ]]; then
- setup_py=( -m pyproject2setuppy )
- elif [[ ! -f setup.py ]]; then
- if [[ ! -f setup.cfg ]]; then
+ if [[ ! -f setup.py ]]; then
+ # The following call can succeed even if the package does not
+ # feature any setuptools configuration. In non-PEP517 mode this
+ # could lead to installing an "empty" package. In PEP517 mode,
+ # we verify the build system when invoking the backend,
+ # rendering this check redundant (and broken for projects using
+ # pyproject.toml configuration).
+ if [[ ! ${DISTUTILS_USE_PEP517} && ! -f setup.cfg ]]; then
die "${FUNCNAME}: setup.py nor setup.cfg not found"
fi
setup_py=( -c "from setuptools import setup; setup()" )
fi
- if [[ ${EAPI} != [67] && ${mydistutilsargs[@]} ]]; then
+ if [[ ${EAPI} != 7 && ${mydistutilsargs[@]} ]]; then
die "mydistutilsargs is banned in EAPI ${EAPI} (use DISTUTILS_ARGS)"
fi
@@ -690,8 +754,8 @@ esetup.py() {
# to unmerge the package first.
#
# This function is not available in PEP517 mode. The eclass provides
-# a venv-style install unconditionally therefore, and therefore it
-# should no longer be necessary.
+# a venv-style install unconditionally and therefore it should no longer
+# be necessary.
distutils_install_for_testing() {
debug-print-function ${FUNCNAME} "${@}"
@@ -711,7 +775,7 @@ distutils_install_for_testing() {
local install_method=root
case ${1} in
--via-home)
- [[ ${EAPI} == [67] ]] || die "${*} is banned in EAPI ${EAPI}"
+ [[ ${EAPI} == 7 ]] || die "${*} is banned in EAPI ${EAPI}"
install_method=home
shift
;;
@@ -789,7 +853,7 @@ distutils_install_for_testing() {
distutils_write_namespace() {
debug-print-function ${FUNCNAME} "${@}"
- if [[ ! ${DISTUTILS_USE_PEP517} ]]; then
+ if [[ ! ${DISTUTILS_USE_PEP517:-no} != no ]]; then
die "${FUNCNAME} is available only in PEP517 mode"
fi
if [[ ${EBUILD_PHASE} != test || ! ${BUILD_DIR} ]]; then
@@ -835,7 +899,8 @@ _distutils-r1_disable_ez_setup() {
# @FUNCTION: _distutils-r1_handle_pyproject_toml
# @INTERNAL
# @DESCRIPTION:
-# Generate setup.py for pyproject.toml if requested.
+# Verify whether DISTUTILS_USE_SETUPTOOLS is set correctly
+# for pyproject.toml build systems (in non-PEP517 mode).
_distutils-r1_handle_pyproject_toml() {
if [[ ${DISTUTILS_USE_PEP517} ]]; then
die "${FUNCNAME} is not implemented in PEP517 mode"
@@ -844,12 +909,10 @@ _distutils-r1_handle_pyproject_toml() {
[[ ${DISTUTILS_USE_SETUPTOOLS} == manual ]] && return
if [[ ! -f setup.py && -f pyproject.toml ]]; then
- if [[ ${DISTUTILS_USE_SETUPTOOLS} != pyproject.toml ]]; then
- eerror "No setup.py found but pyproject.toml is present. In order to enable"
- eerror "pyproject.toml support in distutils-r1, set:"
- eerror " DISTUTILS_USE_SETUPTOOLS=pyproject.toml"
- die "No setup.py found and DISTUTILS_USE_SETUPTOOLS!=pyproject.toml"
- fi
+ eerror "No setup.py found but pyproject.toml is present. Please migrate"
+ eerror "the package to use DISTUTILS_USE_PEP517. See:"
+ eerror " https://projects.gentoo.org/python/guide/distutils.html"
+ die "No setup.py found and PEP517 mode not enabled"
fi
}
@@ -863,7 +926,7 @@ _distutils-r1_check_all_phase_mismatch() {
eqawarn "QA Notice: distutils-r1_python_${EBUILD_PHASE}_all called"
eqawarn "from python_${EBUILD_PHASE}. Did you mean to use"
eqawarn "python_${EBUILD_PHASE}_all()?"
- [[ ${EAPI} != [67] ]] &&
+ [[ ${EAPI} != 7 ]] &&
die "distutils-r1_python_${EBUILD_PHASE}_all called from python_${EBUILD_PHASE}."
fi
}
@@ -881,29 +944,36 @@ _distutils-r1_print_package_versions() {
dev-python/gpep517
dev-python/installer
)
+ if [[ ${DISTUTILS_EXT} ]]; then
+ packages+=(
+ dev-python/cython
+ )
+ fi
case ${DISTUTILS_USE_PEP517} in
flit)
packages+=(
- dev-python/flit_core
+ dev-python/flit-core
)
;;
flit_scm)
packages+=(
- dev-python/flit_core
+ dev-python/flit-core
dev-python/flit_scm
- dev-python/setuptools_scm
+ dev-python/setuptools-scm
)
;;
hatchling)
packages+=(
dev-python/hatchling
+ dev-python/hatch-fancy-pypi-readme
+ dev-python/hatch-vcs
)
;;
jupyter)
packages+=(
- dev-python/jupyter_packaging
+ dev-python/jupyter-packaging
dev-python/setuptools
- dev-python/setuptools_scm
+ dev-python/setuptools-scm
dev-python/wheel
)
;;
@@ -912,6 +982,14 @@ _distutils-r1_print_package_versions() {
dev-util/maturin
)
;;
+ no)
+ return
+ ;;
+ meson-python)
+ packages+=(
+ dev-python/meson-python
+ )
+ ;;
pbr)
packages+=(
dev-python/pbr
@@ -919,9 +997,9 @@ _distutils-r1_print_package_versions() {
dev-python/wheel
)
;;
- pdm)
+ pdm-backend)
packages+=(
- dev-python/pdm-pep517
+ dev-python/pdm-backend
dev-python/setuptools
)
;;
@@ -930,10 +1008,16 @@ _distutils-r1_print_package_versions() {
dev-python/poetry-core
)
;;
+ scikit-build-core)
+ packages+=(
+ dev-python/scikit-build-core
+ )
+ ;;
setuptools)
packages+=(
dev-python/setuptools
- dev-python/setuptools_scm
+ dev-python/setuptools-rust
+ dev-python/setuptools-scm
dev-python/wheel
)
;;
@@ -959,7 +1043,7 @@ _distutils-r1_print_package_versions() {
local pkg
einfo "Build system packages:"
for pkg in "${packages[@]}"; do
- local installed=$(best_version "${pkg}")
+ local installed=$(best_version -b "${pkg}")
einfo " $(printf '%-30s' "${pkg}"): ${installed#${pkg}-}"
done
}
@@ -1008,7 +1092,6 @@ distutils-r1_python_prepare_all() {
fi
python_export_utf8_locale
- [[ ${EAPI} == 6 ]] && xdg_environment_reset # Bug 577704
_distutils-r1_print_package_versions
_DISTUTILS_DEFAULT_CALLED=1
@@ -1114,15 +1197,21 @@ _distutils-r1_backend_to_key() {
maturin)
echo maturin
;;
+ mesonpy)
+ echo meson-python
+ ;;
pbr.build)
echo pbr
;;
- pdm.pep517.api)
- echo pdm
+ pdm.backend|pdm.pep517.api)
+ echo pdm-backend
;;
poetry.core.masonry.api|poetry.masonry.api)
echo poetry
;;
+ scikit_build_core.build)
+ echo scikit-build-core
+ ;;
setuptools.build_meta|setuptools.build_meta:__legacy__)
echo setuptools
;;
@@ -1143,17 +1232,18 @@ _distutils-r1_backend_to_key() {
_distutils-r1_get_backend() {
debug-print-function ${FUNCNAME} "${@}"
- local build_backend
+ local build_backend legacy_fallback
if [[ -f pyproject.toml ]]; then
# if pyproject.toml exists, try getting the backend from it
# NB: this could fail if pyproject.toml doesn't list one
- build_backend=$(gpep517 get-backend)
+ build_backend=$("${EPYTHON}" -m gpep517 get-backend)
fi
if [[ -z ${build_backend} && ${DISTUTILS_USE_PEP517} == setuptools &&
-f setup.py ]]
then
# use the legacy setuptools backend as a fallback
build_backend=setuptools.build_meta:__legacy__
+ legacy_fallback=1
fi
if [[ -z ${build_backend} ]]; then
die "Unable to obtain build-backend from pyproject.toml"
@@ -1176,15 +1266,23 @@ _distutils-r1_get_backend() {
flit.buildapi)
new_backend=flit_core.buildapi
;;
+ pdm.pep517.api)
+ new_backend=pdm.backend
+ ;;
poetry.masonry.api)
new_backend=poetry.core.masonry.api
;;
+ setuptools.build_meta:__legacy__)
+ # this backend should only be used as implicit fallback
+ [[ ! ${legacy_fallback} ]] &&
+ new_backend=setuptools.build_meta
+ ;;
esac
if [[ -n ${new_backend} ]]; then
if [[ ! -f ${T}/.distutils_deprecated_backend_warned ]]; then
eqawarn "${build_backend} backend is deprecated. Please see:"
- eqawarn "https://projects.gentoo.org/python/guide/distutils.html#deprecated-pep-517-backends"
+ eqawarn "https://projects.gentoo.org/python/guide/qawarn.html#deprecated-pep-517-backends"
eqawarn "The eclass will be using ${new_backend} instead."
> "${T}"/.distutils_deprecated_backend_warned || die
fi
@@ -1195,6 +1293,56 @@ _distutils-r1_get_backend() {
echo "${build_backend}"
}
+# @FUNCTION: distutils_wheel_install
+# @USAGE: <root> <wheel>
+# @DESCRIPTION:
+# Install the specified wheel into <root>.
+#
+# This function is intended for expert use only.
+distutils_wheel_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+ if [[ ${#} -ne 2 ]]; then
+ die "${FUNCNAME} takes exactly two arguments: <root> <wheel>"
+ fi
+ if [[ -z ${PYTHON} ]]; then
+ die "PYTHON unset, invalid call context"
+ fi
+
+ local root=${1}
+ local wheel=${2}
+
+ einfo " Installing ${wheel##*/} to ${root}"
+ local cmd=(
+ "${EPYTHON}" -m gpep517 install-wheel
+ --destdir="${root}"
+ --interpreter="${PYTHON}"
+ --prefix="${EPREFIX}/usr"
+ --optimize=all
+ "${wheel}"
+ )
+ printf '%s\n' "${cmd[*]}"
+ "${cmd[@]}" || die "Wheel install failed"
+
+ # remove installed licenses and other junk
+ find "${root}$(python_get_sitedir)" -depth \
+ \( -ipath '*.dist-info/AUTHORS*' \
+ -o -ipath '*.dist-info/CHANGELOG*' \
+ -o -ipath '*.dist-info/CODE_OF_CONDUCT*' \
+ -o -ipath '*.dist-info/COPYING*' \
+ -o -ipath '*.dist-info/*LICEN[CS]E*' \
+ -o -ipath '*.dist-info/NOTICE*' \
+ -o -ipath '*.dist-info/*Apache*' \
+ -o -ipath '*.dist-info/*GPL*' \
+ -o -ipath '*.dist-info/*MIT*' \
+ -o -path '*.dist-info/RECORD' \
+ -o -path '*.dist-info/license_files/*' \
+ -o -path '*.dist-info/license_files' \
+ -o -path '*.dist-info/licenses/*' \
+ -o -path '*.dist-info/licenses' \
+ -o -path '*.dist-info/zip-safe' \
+ \) -delete || die
+}
+
# @FUNCTION: distutils_pep517_install
# @USAGE: <root>
# @DESCRIPTION:
@@ -1207,7 +1355,12 @@ distutils_pep517_install() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME} takes exactly one argument: root"
+ if [[ ! ${DISTUTILS_USE_PEP517:-no} != no ]]; then
+ die "${FUNCNAME} is available only in PEP517 mode"
+ fi
+
local root=${1}
+ export BUILD_DIR
local -x WHEEL_BUILD_DIR=${BUILD_DIR}/wheel
mkdir -p "${WHEEL_BUILD_DIR}" || die
@@ -1216,18 +1369,108 @@ distutils_pep517_install() {
fi
local config_settings=
- if [[ -n ${DISTUTILS_ARGS[@]} ]]; then
- case ${DISTUTILS_USE_PEP517} in
- setuptools)
+ case ${DISTUTILS_USE_PEP517} in
+ maturin)
+ # `maturin pep517 build-wheel --help` for options
+ local maturin_args=(
+ "${DISTUTILS_ARGS[@]}"
+ --jobs="$(makeopts_jobs)"
+ --skip-auditwheel # see bug #831171
+ $(in_iuse debug && usex debug '--profile=dev' '')
+ )
+
+ config_settings=$(
+ "${EPYTHON}" - "${maturin_args[@]}" <<-EOF || die
+ import json
+ import sys
+ print(json.dumps({"build-args": sys.argv[1:]}))
+ EOF
+ )
+ ;;
+ meson-python)
+ # variables defined by setup_meson_src_configure
+ local MESONARGS=() BOOST_INCLUDEDIR BOOST_LIBRARYDIR NM READELF
+ # it also calls filter-lto
+ local x
+ for x in $(all-flag-vars); do
+ local -x "${x}=${!x}"
+ done
+
+ setup_meson_src_configure "${DISTUTILS_ARGS[@]}"
+
+ local -x NINJAOPTS=$(get_NINJAOPTS)
+ config_settings=$(
+ "${EPYTHON}" - "${MESONARGS[@]}" <<-EOF || die
+ import json
+ import os
+ import shlex
+ import sys
+
+ ninjaopts = shlex.split(os.environ["NINJAOPTS"])
+ print(json.dumps({
+ "builddir": "${BUILD_DIR}",
+ "setup-args": sys.argv[1:],
+ "compile-args": ["-v"] + ninjaopts,
+ }))
+ EOF
+ )
+ ;;
+ scikit-build-core)
+ # TODO: split out the config/toolchain logic from cmake.eclass
+ # for now, we copy the most important bits
+ local CMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE:-RelWithDebInfo}
+ cat >> "${BUILD_DIR}"/config.cmake <<- _EOF_ || die
+ set(CMAKE_ASM_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_ASM-ATT_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_MODULE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ set(CMAKE_STATIC_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
+ _EOF_
+
+ # hack around CMake ignoring CPPFLAGS
+ local -x CFLAGS="${CFLAGS} ${CPPFLAGS}"
+ local -x CXXFLAGS="${CXXFLAGS} ${CPPFLAGS}"
+
+ local cmake_args=(
+ "-C${BUILD_DIR}/config.cmake"
+ "${DISTUTILS_ARGS[@]}"
+ )
+
+ # NB: we need to pass strings for boolean fields
+ # https://github.com/scikit-build/scikit-build-core/issues/707
+ config_settings=$(
+ "${EPYTHON}" - "${cmake_args[@]}" <<-EOF || die
+ import json
+ import sys
+ print(json.dumps({
+ "cmake.args": ";".join(sys.argv[1:]),
+ "cmake.build-type": "${CMAKE_BUILD_TYPE}",
+ "cmake.verbose": "true",
+ "install.strip": "false",
+ }))
+ EOF
+ )
+ ;;
+ setuptools)
+ if in_iuse debug && use debug; then
+ local -x SETUPTOOLS_RUST_CARGO_PROFILE=dev
+ fi
+ if [[ -n ${DISTUTILS_ARGS[@]} ]]; then
config_settings=$(
"${EPYTHON}" - "${DISTUTILS_ARGS[@]}" <<-EOF || die
import json
import sys
- print(json.dumps({"--global-option": sys.argv[1:]}))
+ print(json.dumps({"--build-option": sys.argv[1:]}))
EOF
)
- ;;
- sip)
+ fi
+ ;;
+ sip)
+ if [[ -n ${DISTUTILS_ARGS[@]} ]]; then
# NB: for practical reasons, we support only --foo=bar,
# not --foo bar
local arg
@@ -1250,144 +1493,82 @@ distutils_pep517_install() {
print(json.dumps(args))
EOF
)
- ;;
- *)
+ fi
+ ;;
+ *)
+ [[ -n ${DISTUTILS_ARGS[@]} ]] &&
die "DISTUTILS_ARGS are not supported by ${DISTUTILS_USE_PEP517}"
- ;;
- esac
- fi
+ ;;
+ esac
local build_backend=$(_distutils-r1_get_backend)
einfo " Building the wheel for ${PWD#${WORKDIR}/} via ${build_backend}"
- local config_args=()
- [[ -n ${config_settings} ]] &&
- config_args+=( --config-json "${config_settings}" )
+ local cmd=(
+ "${EPYTHON}" -m gpep517 build-wheel
+ --prefix="${EPREFIX}/usr"
+ --backend "${build_backend}"
+ --output-fd 3
+ --wheel-dir "${WHEEL_BUILD_DIR}"
+ )
+ if [[ -n ${config_settings} ]]; then
+ cmd+=( --config-json "${config_settings}" )
+ fi
+ if [[ -n ${SYSROOT} ]]; then
+ cmd+=( --sysroot "${SYSROOT}" )
+ fi
+ printf '%s\n' "${cmd[*]}"
local wheel=$(
- gpep517 build-wheel --backend "${build_backend}" \
- --output-fd 3 \
- --wheel-dir "${WHEEL_BUILD_DIR}" \
- "${config_args[@]}" 3>&1 >&2 ||
- die "Wheel build failed"
+ "${cmd[@]}" 3>&1 >&2 || die "Wheel build failed"
)
[[ -n ${wheel} ]] || die "No wheel name returned"
- einfo " Installing the wheel to ${root}"
- gpep517 install-wheel --destdir="${root}" --interpreter="${PYTHON}" \
- --prefix="${EPREFIX}/usr" "${WHEEL_BUILD_DIR}/${wheel}" ||
- die "Wheel install failed"
-
- # remove installed licenses
- find "${root}$(python_get_sitedir)" -depth \
- \( -path '*.dist-info/COPYING*' \
- -o -path '*.dist-info/LICENSE*' \
- -o -path '*.dist-info/license_files/*' \
- -o -path '*.dist-info/license_files' \
- \) -delete || die
-
- # clean the build tree; otherwise we may end up with PyPy3
- # extensions duplicated into CPython dists
- if [[ ${DISTUTILS_USE_PEP517:-setuptools} == setuptools ]]; then
- rm -rf build || die
- fi
+ distutils_wheel_install "${root}" "${WHEEL_BUILD_DIR}/${wheel}"
}
# @FUNCTION: distutils-r1_python_compile
# @USAGE: [additional-args...]
# @DESCRIPTION:
-# The default python_compile(). Runs 'esetup.py build'. Any parameters
-# passed to this function will be appended to setup.py invocation,
-# i.e. passed as options to the 'build' command.
+# The default python_compile().
#
-# This phase also sets up initial setup.cfg with build directories
-# and copies upstream egg-info files if supplied.
+# If DISTUTILS_USE_PEP517 is set to "no", a no-op.
+#
+# If DISTUTILS_USE_PEP517 is set to any other value, builds a wheel
+# using the PEP517 backend and installs it into ${BUILD_DIR}/install.
+#
+# In legacy mode, runs 'esetup.py build'. Any parameters passed to this
+# function will be appended to setup.py invocation, i.e. passed
+# as options to the 'build' command.
distutils-r1_python_compile() {
debug-print-function ${FUNCNAME} "${@}"
_python_check_EPYTHON
- case ${DISTUTILS_USE_PEP517:-setuptools} in
- setuptools)
- # call setup.py build when using setuptools (either via PEP517
- # or in legacy mode)
-
- if [[ ${DISTUTILS_USE_PEP517} ]]; then
- if [[ -d build ]]; then
- eqawarn "A 'build' directory exists already. Artifacts from this directory may"
- eqawarn "be picked up by setuptools when building for another interpreter."
- eqawarn "Please remove this directory prior to building."
- fi
- else
- _distutils-r1_copy_egg_info
- fi
-
- # distutils is parallel-capable since py3.5
- local jobs=$(makeopts_jobs "${MAKEOPTS} ${*}")
-
- if [[ ${DISTUTILS_USE_PEP517} ]]; then
- # issue build_ext only if it looks like we have at least
- # two source files to build; setuptools is expensive
- # to start and parallel builds can only benefit us if we're
- # compiling at least two files
- #
- # see extension.py for list of suffixes
- # .pyx is added for Cython
- if [[ 1 -ne ${jobs} && 2 -eq $(
- find '(' -name '*.c' -o -name '*.cc' -o -name '*.cpp' \
- -o -name '*.cxx' -o -name '*.c++' -o -name '*.m' \
- -o -name '*.mm' -o -name '*.pyx' ')' -printf '\n' |
- head -n 2 | wc -l
- ) ]]; then
- esetup.py build_ext -j "${jobs}" "${@}"
- fi
- else
- esetup.py build -j "${jobs}" "${@}"
- fi
+ case ${DISTUTILS_USE_PEP517:-unset} in
+ no)
+ return
;;
- maturin)
- # auditwheel may attempt to auto-bundle libraries, bug #831171
- local -x MATURIN_PEP517_ARGS=--skip-auditwheel
-
- # support cargo.eclass' IUSE=debug if available
- in_iuse debug && use debug &&
- MATURIN_PEP517_ARGS+=" --cargo-extra-args=--profile=dev"
+ unset)
+ # legacy mode
+ _distutils-r1_copy_egg_info
+ esetup.py build -j "$(makeopts_jobs "${MAKEOPTS} ${*}")" "${@}"
+ ;;
+ *)
+ # we do this for all build systems, since other backends
+ # and custom hooks may wrap setuptools
+ mkdir -p "${BUILD_DIR}" || die
+ local -x DIST_EXTRA_CONFIG="${BUILD_DIR}/extra-setup.cfg"
+ cat > "${DIST_EXTRA_CONFIG}" <<-EOF || die
+ [build]
+ build_base = ${BUILD_DIR}/build
+
+ [build_ext]
+ parallel = $(makeopts_jobs "${MAKEOPTS} ${*}")
+ EOF
;;
esac
if [[ ${DISTUTILS_USE_PEP517} ]]; then
- # python likes to compile any module it sees, which triggers sandbox
- # failures if some packages haven't compiled their modules yet.
- addpredict "${EPREFIX}/usr/lib/${EPYTHON}"
- addpredict /usr/lib/pypy3.8
- addpredict /usr/lib/portage/pym
- addpredict /usr/local # bug 498232
-
- local root=${BUILD_DIR}/install
- distutils_pep517_install "${root}"
-
- # copy executables to python-exec directory
- # we do it early so that we can alter bindir recklessly
- local bindir=${root}${EPREFIX}/usr/bin
- local rscriptdir=${root}$(python_get_scriptdir)
- [[ -d ${rscriptdir} ]] &&
- die "${rscriptdir} should not exist!"
- if [[ -d ${bindir} ]]; then
- mkdir -p "${rscriptdir}" || die
- cp -a --reflink=auto "${bindir}"/. "${rscriptdir}"/ || die
- fi
-
- # enable venv magic inside the install tree
- mkdir -p "${bindir}" || die
- ln -s "${PYTHON}" "${bindir}/${EPYTHON}" || die
- ln -s "${EPYTHON}" "${bindir}/python3" || die
- ln -s "${EPYTHON}" "${bindir}/python" || die
- cat > "${bindir}"/pyvenv.cfg <<-EOF || die
- include-system-site-packages = true
- EOF
-
- # we need to change shebangs to point to the venv-python
- find "${bindir}" -type f -exec sed -i \
- -e "1s@^#!\(${EPREFIX}/usr/bin/\(python\|pypy\)\)@#!${root}\1@" \
- {} + || die
+ distutils_pep517_install "${BUILD_DIR}/install"
fi
}
@@ -1428,7 +1609,7 @@ _distutils-r1_wrap_scripts() {
debug-print "${FUNCNAME}: installing wrapper at ${bindir}/${basename}"
local dosym=dosym
- [[ ${EAPI} == [67] ]] && dosym=dosym8
+ [[ ${EAPI} == 7 ]] && dosym=dosym8
"${dosym}" -r /usr/lib/python-exec/python-exec2 \
"${bindir#${EPREFIX}}/${basename}"
done
@@ -1464,9 +1645,6 @@ distutils-r1_python_test() {
fi
case ${_DISTUTILS_TEST_RUNNER} in
- nose)
- "${EPYTHON}" -m nose -v "${@}"
- ;;
pytest)
epytest
;;
@@ -1482,36 +1660,72 @@ distutils-r1_python_test() {
esac
if [[ ${?} -ne 0 ]]; then
- die "Tests failed with ${EPYTHON}"
+ die -n "Tests failed with ${EPYTHON}"
fi
}
# @FUNCTION: distutils-r1_python_install
# @USAGE: [additional-args...]
# @DESCRIPTION:
-# The default python_install(). Runs 'esetup.py install', doing
-# intermediate root install and handling script wrapping afterwards.
+# The default python_install().
+#
+# In PEP517 mode, merges the files from ${BUILD_DIR}/install
+# (if present) to the image directory.
+#
+# In the legacy mode, calls `esetup.py install` to install the package.
# Any parameters passed to this function will be appended
# to the setup.py invocation (i.e. as options to the 'install' command).
-#
-# This phase updates the setup.cfg file with install directories.
distutils-r1_python_install() {
debug-print-function ${FUNCNAME} "${@}"
_python_check_EPYTHON
local scriptdir=${EPREFIX}/usr/bin
+ local merge_root=
if [[ ${DISTUTILS_USE_PEP517} ]]; then
local root=${BUILD_DIR}/install
+ local reg_scriptdir=${root}/${scriptdir}
+ local wrapped_scriptdir=${root}$(python_get_scriptdir)
+
+ # we are assuming that _distutils-r1_post_python_compile
+ # has been called and ${root} has not been altered since
+ # let's explicitly verify these assumptions
+
+ # remove files that we've created explicitly
+ rm "${reg_scriptdir}"/{"${EPYTHON}",python3,python,pyvenv.cfg} || die
+
+ # Automagically do the QA check to avoid issues when bootstrapping
+ # prefix.
+ if type diff &>/dev/null ; then
+ # verify that scriptdir & wrapped_scriptdir both contain
+ # the same files
+ (
+ cd "${reg_scriptdir}" && find . -mindepth 1
+ ) | sort > "${T}"/.distutils-files-bin
+ assert "listing ${reg_scriptdir} failed"
+ (
+ if [[ -d ${wrapped_scriptdir} ]]; then
+ cd "${wrapped_scriptdir}" && find . -mindepth 1
+ fi
+ ) | sort > "${T}"/.distutils-files-wrapped
+ assert "listing ${wrapped_scriptdir} failed"
+ if ! diff -U 0 "${T}"/.distutils-files-{bin,wrapped}; then
+ die "File lists for ${reg_scriptdir} and ${wrapped_scriptdir} differ (see diff above)"
+ fi
+ fi
+
# remove the altered bindir, executables from the package
# are already in scriptdir
- rm -r "${root}${scriptdir}" || die
+ rm -r "${reg_scriptdir}" || die
if [[ ${DISTUTILS_SINGLE_IMPL} ]]; then
- local wrapped_scriptdir=${root}$(python_get_scriptdir)
if [[ -d ${wrapped_scriptdir} ]]; then
- mv "${wrapped_scriptdir}" "${root}${scriptdir}" || die
+ mv "${wrapped_scriptdir}" "${reg_scriptdir}" || die
fi
fi
+ # prune empty directories to see if ${root} contains anything
+ # to merge
+ find "${BUILD_DIR}"/install -type d -empty -delete || die
+ [[ -d ${BUILD_DIR}/install ]] && merge_root=1
else
local root=${D%/}/_${EPYTHON}
[[ ${DISTUTILS_SINGLE_IMPL} ]] && root=${D%/}
@@ -1533,11 +1747,12 @@ distutils-r1_python_install() {
# python likes to compile any module it sees, which triggers sandbox
# failures if some packages haven't compiled their modules yet.
addpredict "${EPREFIX}/usr/lib/${EPYTHON}"
- addpredict /usr/lib/pypy3.8
- addpredict /usr/lib/portage/pym
- addpredict /usr/local # bug 498232
+ addpredict "${EPREFIX}/usr/lib/pypy3.10"
+ addpredict "${EPREFIX}/usr/local" # bug 498232
if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ merge_root=1
+
# user may override --install-scripts
# note: this is poor but distutils argv parsing is dumb
@@ -1566,36 +1781,8 @@ distutils-r1_python_install() {
esetup.py "${args[@]}"
fi
- local forbidden_package_names=(
- examples test tests
- .pytest_cache .hypothesis
- )
- local p
- for p in "${forbidden_package_names[@]}"; do
- if [[ -d ${root}$(python_get_sitedir)/${p} ]]; then
- die "Package installs '${p}' package which is forbidden and likely a bug in the build system."
- fi
- done
-
- local shopt_save=$(shopt -p nullglob)
- shopt -s nullglob
- local pypy_dirs=(
- "${root}${EPREFIX}/usr/$(get_libdir)"/pypy*/share
- "${root}${EPREFIX}/usr/lib"/pypy*/share
- )
- ${shopt_save}
-
- if [[ -n ${pypy_dirs} ]]; then
- die "Package installs 'share' in PyPy prefix, see bug #465546."
- fi
-
- if [[ ! ${DISTUTILS_SINGLE_IMPL} || ${DISTUTILS_USE_PEP517} ]]; then
+ if [[ ${merge_root} ]]; then
multibuild_merge_root "${root}" "${D%/}"
- if [[ ${DISTUTILS_USE_PEP517} ]]; then
- # we need to recompile everything here in order to embed
- # the correct paths
- python_optimize "${D%/}$(python_get_sitedir)"
- fi
fi
if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
_distutils-r1_wrap_scripts "${scriptdir}"
@@ -1629,6 +1816,8 @@ distutils-r1_run_phase() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${DISTUTILS_IN_SOURCE_BUILD} ]]; then
+ [[ ${DISTUTILS_USE_PEP517} ]] &&
+ die "DISTUTILS_IN_SOURCE_BUILD is not supported in PEP517 mode"
# only force BUILD_DIR if implementation is explicitly enabled
# for building; any-r1 API may select one that is not
# https://bugs.gentoo.org/701506
@@ -1649,9 +1838,11 @@ distutils-r1_run_phase() {
# and _all() already localizes it
local -x PATH=${PATH}
- # Undo the default switch in setuptools-60+ for the time being,
- # to avoid replacing .egg-info file with directory in-place.
- local -x SETUPTOOLS_USE_DISTUTILS="${SETUPTOOLS_USE_DISTUTILS:-stdlib}"
+ if _python_impl_matches "${EPYTHON}" 3.{9..11}; then
+ # Undo the default switch in setuptools-60+ for the time being,
+ # to avoid replacing .egg-info file with directory in-place.
+ local -x SETUPTOOLS_USE_DISTUTILS="${SETUPTOOLS_USE_DISTUTILS:-stdlib}"
+ fi
# Bug 559644
# using PYTHONPATH when the ${BUILD_DIR}/lib is not created yet might lead to
@@ -1666,11 +1857,36 @@ distutils-r1_run_phase() {
local -x AR=${AR} CC=${CC} CPP=${CPP} CXX=${CXX}
tc-export AR CC CPP CXX
+ # Perform additional environment modifications only for python_compile
+ # phase. This is the only phase where we expect to be calling the Python
+ # build system. We want to localize the altered variables to avoid them
+ # leaking to other parts of multi-language ebuilds. However, we want
+ # to avoid localizing them in other phases, particularly
+ # python_configure_all, where the ebuild may wish to alter them globally.
+ if [[ ${DISTUTILS_EXT} && ( ${1} == *compile* || ${1} == *test* ) ]]; then
+ local -x CPPFLAGS="${CPPFLAGS} $(usex debug '-UNDEBUG' '-DNDEBUG')"
+ # always generate .c files from .pyx files to ensure we get latest
+ # bug fixes from Cython (this works only when setup.py is using
+ # cythonize() but it's better than nothing)
+ local -x CYTHON_FORCE_REGEN=1
+
+ # Rust extensions are incompatible with C/C++ LTO compiler
+ # see e.g. https://bugs.gentoo.org/910220
+ if has cargo ${INHERITED}; then
+ local x
+ for x in $(all-flag-vars); do
+ local -x "${x}=${!x}"
+ done
+ filter-lto
+ fi
+ fi
+
+ # silence warnings when pydevd is loaded on Python 3.11+
+ local -x PYDEVD_DISABLE_FILE_VALIDATION=1
+
# How to build Python modules in different worlds...
local ldopts
case "${CHOST}" in
- # provided by haubi, 2014-07-08
- *-aix*) ldopts='-shared -Wl,-berok';; # good enough
# provided by grobian, 2014-06-22, bug #513664 c7
*-darwin*) ldopts='-bundle -undefined dynamic_lookup';;
*) ldopts='-shared';;
@@ -1687,6 +1903,11 @@ distutils-r1_run_phase() {
fi
cd "${_DISTUTILS_INITIAL_CWD}" || die
+ if [[ ! ${_DISTUTILS_IN_COMMON_IMPL} ]] &&
+ declare -f "_distutils-r1_post_python_${EBUILD_PHASE}" >/dev/null
+ then
+ "_distutils-r1_post_python_${EBUILD_PHASE}"
+ fi
return "${ret}"
}
@@ -1701,6 +1922,7 @@ distutils-r1_run_phase() {
# of sources made for the selected Python interpreter.
_distutils-r1_run_common_phase() {
local DISTUTILS_ORIG_BUILD_DIR=${BUILD_DIR}
+ local _DISTUTILS_IN_COMMON_IMPL=1
if [[ ${DISTUTILS_SINGLE_IMPL} ]]; then
# reuse the dedicated code branch
@@ -1731,6 +1953,7 @@ _distutils-r1_run_foreach_impl() {
set -- distutils-r1_run_phase "${@}"
if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ local _DISTUTILS_CALLING_FOREACH_IMPL=1
python_foreach_impl "${@}"
else
if [[ ! ${EPYTHON} ]]; then
@@ -1781,6 +2004,43 @@ distutils-r1_src_configure() {
return ${ret}
}
+# @FUNCTION: _distutils-r1_post_python_compile
+# @INTERNAL
+# @DESCRIPTION:
+# Post-phase function called after python_compile. In PEP517 mode,
+# it adjusts the install tree for venv-style usage.
+_distutils-r1_post_python_compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local root=${BUILD_DIR}/install
+ if [[ ${DISTUTILS_USE_PEP517} && -d ${root} ]]; then
+ # copy executables to python-exec directory
+ # we do it early so that we can alter bindir recklessly
+ local bindir=${root}${EPREFIX}/usr/bin
+ local rscriptdir=${root}$(python_get_scriptdir)
+ [[ -d ${rscriptdir} ]] &&
+ die "${rscriptdir} should not exist!"
+ if [[ -d ${bindir} ]]; then
+ mkdir -p "${rscriptdir}" || die
+ cp -a "${bindir}"/. "${rscriptdir}"/ || die
+ fi
+
+ # enable venv magic inside the install tree
+ mkdir -p "${bindir}" || die
+ ln -s "${PYTHON}" "${bindir}/${EPYTHON}" || die
+ ln -s "${EPYTHON}" "${bindir}/python3" || die
+ ln -s "${EPYTHON}" "${bindir}/python" || die
+ cat > "${bindir}"/pyvenv.cfg <<-EOF || die
+ include-system-site-packages = true
+ EOF
+
+ # we need to change shebangs to point to the venv-python
+ find "${bindir}" -type f -exec sed -i \
+ -e "1s@^#!\(${EPREFIX}/usr/bin/\(python\|pypy\)\)@#!${root}\1@" \
+ {} + || die
+ fi
+}
+
distutils-r1_src_compile() {
debug-print-function ${FUNCNAME} "${@}"
local ret=0
@@ -1812,15 +2072,24 @@ _distutils-r1_clean_egg_info() {
rm -rf "${BUILD_DIR}"/lib/*.egg-info || die
}
+# @FUNCTION: _distutils-r1_post_python_test
+# @INTERNAL
+# @DESCRIPTION:
+# Post-phase function called after python_test.
+_distutils-r1_post_python_test() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ! ${DISTUTILS_USE_PEP517} ]]; then
+ _distutils-r1_clean_egg_info
+ fi
+}
+
distutils-r1_src_test() {
debug-print-function ${FUNCNAME} "${@}"
local ret=0
if declare -f python_test >/dev/null; then
_distutils-r1_run_foreach_impl python_test || ret=${?}
- if [[ ! ${DISTUTILS_USE_PEP517} ]]; then
- _distutils-r1_run_foreach_impl _distutils-r1_clean_egg_info
- fi
fi
if declare -f python_test_all >/dev/null; then
@@ -1830,6 +2099,93 @@ distutils-r1_src_test() {
return ${ret}
}
+# @FUNCTION: _distutils-r1_strip_namespace_packages
+# @USAGE: <sitedir>
+# @INTERNAL
+# @DESCRIPTION:
+# Find and remove setuptools-style namespaces in the specified
+# directory.
+_distutils-r1_strip_namespace_packages() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local sitedir=${1}
+ local f ns had_any=
+ while IFS= read -r -d '' f; do
+ while read -r ns; do
+ einfo "Stripping pkg_resources-style namespace ${ns}"
+ had_any=1
+ done < "${f}"
+
+ rm "${f}" || die
+ done < <(
+ # NB: this deliberately does not include .egg-info, in order
+ # to limit this to PEP517 mode.
+ find "${sitedir}" -path '*.dist-info/namespace_packages.txt' -print0
+ )
+
+ # If we had any namespace packages, remove .pth files as well.
+ if [[ ${had_any} ]]; then
+ find "${sitedir}" -name '*-nspkg.pth' -delete || die
+ fi
+}
+
+# @FUNCTION: _distutils-r1_post_python_install
+# @INTERNAL
+# @DESCRIPTION:
+# Post-phase function called after python_install. Performs QA checks.
+# In PEP517 mode, additionally optimizes installed Python modules.
+_distutils-r1_post_python_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local sitedir=${D%/}$(python_get_sitedir)
+ if [[ -d ${sitedir} ]]; then
+ _distutils-r1_strip_namespace_packages "${sitedir}"
+
+ local forbidden_package_names=(
+ examples test tests
+ .pytest_cache .hypothesis _trial_temp
+ )
+ local strays=()
+ local p
+ mapfile -d $'\0' -t strays < <(
+ find "${sitedir}" -maxdepth 1 -type f '!' '(' \
+ -name '*.egg-info' -o \
+ -name '*.pth' -o \
+ -name '*.py' -o \
+ -name '*.pyi' -o \
+ -name "*$(get_modname)" \
+ ')' -print0
+ )
+ for p in "${forbidden_package_names[@]}"; do
+ [[ -d ${sitedir}/${p} ]] && strays+=( "${sitedir}/${p}" )
+ done
+
+ if [[ -n ${strays[@]} ]]; then
+ eerror "The following unexpected files/directories were found top-level"
+ eerror "in the site-packages directory:"
+ eerror
+ for p in "${strays[@]}"; do
+ eerror " ${p#${ED}}"
+ done
+ eerror
+ eerror "This is most likely a bug in the build system. More information"
+ eerror "can be found in the Python Guide:"
+ eerror "https://projects.gentoo.org/python/guide/qawarn.html#stray-top-level-files-in-site-packages"
+ die "Failing install because of stray top-level files in site-packages"
+ fi
+
+ if [[ ! ${DISTUTILS_EXT} && ! ${_DISTUTILS_EXT_WARNED} ]]; then
+ if [[ $(find "${sitedir}" -name "*$(get_modname)" | head -n 1) ]]
+ then
+ eqawarn "Python extension modules (*$(get_modname)) found installed. Please set:"
+ eqawarn " DISTUTILS_EXT=1"
+ eqawarn "in the ebuild."
+ _DISTUTILS_EXT_WARNED=1
+ fi
+ fi
+ fi
+}
+
# @FUNCTION: _distutils-r1_check_namespace_pth
# @INTERNAL
# @DESCRIPTION:
@@ -1843,17 +2199,19 @@ _distutils-r1_check_namespace_pth() {
done < <(find "${ED%/}" -name '*-nspkg.pth' -print0)
if [[ ${pth[@]} ]]; then
- ewarn "The following *-nspkg.pth files were found installed:"
- ewarn
+ eerror "The following *-nspkg.pth files were found installed:"
+ eerror
for f in "${pth[@]}"; do
- ewarn " ${f#${ED%/}}"
+ eerror " ${f#${ED%/}}"
done
- ewarn
- ewarn "The presence of those files may break namespaces in Python 3.5+. Please"
- ewarn "read our documentation on reliable handling of namespaces and update"
- ewarn "the ebuild accordingly:"
- ewarn
- ewarn " https://projects.gentoo.org/python/guide/concept.html#namespace-packages"
+ eerror
+ eerror "The presence of those files may break namespaces in Python 3.5+. Please"
+ eerror "read our documentation on reliable handling of namespaces and update"
+ eerror "the ebuild accordingly:"
+ eerror
+ eerror " https://projects.gentoo.org/python/guide/concept.html#namespace-packages"
+
+ die "Installing *-nspkg.pth files is banned"
fi
}
@@ -1878,5 +2236,8 @@ distutils-r1_src_install() {
return ${ret}
}
-_DISTUTILS_R1=1
+fi
+
+if [[ ! ${DISTUTILS_OPTIONAL} ]]; then
+ EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
fi
diff --git a/eclass/docs.eclass b/eclass/docs.eclass
index 611485c227ff..edb3c8661ca2 100644
--- a/eclass/docs.eclass
+++ b/eclass/docs.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: docs.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Andrew Ammerlaan <andrewammerlaan@gentoo.org>
# Based on the work of: Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: A simple eclass to build documentation.
# @DESCRIPTION:
# A simple eclass providing basic functions and variables to build
@@ -57,15 +57,9 @@
# ...
# @CODE
-case "${EAPI:-0}" in
- 0|1|2|3|4|5)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 6|7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# @ECLASS_VARIABLE: DOCS_BUILDER
@@ -143,13 +137,23 @@ esac
#
# Defaults to Doxyfile for doxygen
-if [[ ! ${_DOCS} ]]; then
+# @ECLASS_VARIABLE: DOCS_INITIALIZE_GIT
+# @DEFAULT_UNSET
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Sometimes building the documentation will fail if this is not done
+# inside a git repository. If this variable is set the compile functions
+# will initialize a dummy git repository before compiling. A dependency
+# on dev-vcs/git is automatically added.
+
+if [[ -z ${_DOCS_ECLASS} ]]; then
+_DOCS_ECLASS=1
# For the python based DOCS_BUILDERS we need to inherit any python eclass
case ${DOCS_BUILDER} in
"sphinx"|"mkdocs")
# We need the python_gen_any_dep function
- if [[ ! ${_PYTHON_R1} && ! ${_PYTHON_ANY_R1} && ! ${_PYTHON_SINGLE_R1} ]]; then
+ if [[ ! ${_PYTHON_R1_ECLASS} && ! ${_PYTHON_ANY_R1_ECLASS} && ! ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
die "distutils-r1, python-r1, python-single-r1 or python-any-r1 needs to be inherited to use python based documentation builders"
fi
;;
@@ -164,21 +168,54 @@ case ${DOCS_BUILDER} in
;;
esac
-# @FUNCTION: python_append_deps
+# @FUNCTION: initialize_git_repo
+# @DESCRIPTION:
+# Initializes a dummy git repository. This function is called by the
+# documentation compile functions if DOCS_INITIALIZE_GIT is set. It can
+# also be called manually.
+initialize_git_repo() {
+ # Only initialize if we are not already in a git repository
+ local git_is_initialized="$(git rev-parse --is-inside-work-tree 2> /dev/null)"
+ if [[ ! "${git_is_initialized}" ]]; then
+ git init -q || die
+ git config --global user.email "larry@gentoo.org" || die
+ git config --global user.name "Larry the Cow" || die
+ git add . || die
+ git commit -qm "init" || die
+ git tag -a "${PV}" -m "${PN} version ${PV}" || die
+ fi
+}
+
+# @FUNCTION: _docs_set_python_deps
# @INTERNAL
# @DESCRIPTION:
-# Appends [\${PYTHON_USEDEP}] to all dependencies
-# for python based DOCS_BUILDERs such as mkdocs or
-# sphinx.
-python_append_deps() {
+# Add python_gen_any_dep or python_gen_cond_dep
+# to DOCS_DEPEND and define python_check_deps
+_docs_set_python_deps() {
debug-print-function ${FUNCNAME}
- local temp
+ local deps=${@}
+ python_check_deps() {
+ use doc || return 0
+
+ local dep
+ for dep in ${deps[@]}; do
+ python_has_version "${dep}[${PYTHON_USEDEP}]" ||
+ return 1
+ done
+ }
+
+ local deps_appended
local dep
- for dep in ${DOCS_DEPEND[@]}; do
- temp+=" ${dep}[\${PYTHON_USEDEP}]"
+ for dep in ${deps[@]}; do
+ deps_appended+=" ${dep}[\${PYTHON_USEDEP}]"
done
- DOCS_DEPEND=${temp}
+
+ if [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
+ DOCS_DEPEND=$(python_gen_cond_dep "${deps_appended}")
+ else
+ DOCS_DEPEND=$(python_gen_any_dep "${deps_appended}")
+ fi
}
# @FUNCTION: sphinx_deps
@@ -188,10 +225,10 @@ python_append_deps() {
sphinx_deps() {
debug-print-function ${FUNCNAME}
- : ${DOCS_AUTODOC:=1}
+ : "${DOCS_AUTODOC:=1}"
- deps="dev-python/sphinx[\${PYTHON_USEDEP}]
- ${DOCS_DEPEND}"
+ deps="dev-python/sphinx
+ ${DOCS_DEPEND}"
if [[ ${DOCS_AUTODOC} == 0 ]]; then
if [[ -n "${DOCS_DEPEND}" ]]; then
die "${FUNCNAME}: do not set DOCS_AUTODOC to 0 if external plugins are used"
@@ -199,24 +236,22 @@ sphinx_deps() {
elif [[ ${DOCS_AUTODOC} != 0 && ${DOCS_AUTODOC} != 1 ]]; then
die "${FUNCNAME}: DOCS_AUTODOC should be set to 0 or 1"
fi
- if [[ ${_PYTHON_SINGLE_R1} ]]; then
- DOCS_DEPEND="$(python_gen_cond_dep "${deps}")"
- else
- DOCS_DEPEND="$(python_gen_any_dep "${deps}")"
- fi
+
+ _docs_set_python_deps ${deps}
}
# @FUNCTION: sphinx_compile
-# @INTERNAL
# @DESCRIPTION:
# Calls sphinx to build docs.
-#
-# If you overwrite python_compile_all do not call
-# this function, call docs_compile instead
sphinx_compile() {
debug-print-function ${FUNCNAME}
use doc || return
+ : "${DOCS_DIR:="${S}"}"
+ : "${DOCS_OUTDIR:="${S}/_build/html/sphinx"}"
+
+ [[ ${DOCS_INITIALIZE_GIT} ]] && initialize_git_repo
+
local confpy=${DOCS_DIR}/conf.py
[[ -f ${confpy} ]] ||
die "${FUNCNAME}: ${confpy} not found, DOCS_DIR=${DOCS_DIR} call wrong"
@@ -234,8 +269,24 @@ sphinx_compile() {
sed -i -e 's:^intersphinx_mapping:disabled_&:' \
"${DOCS_DIR}"/conf.py || die
# not all packages include the Makefile in pypi tarball
- sphinx-build -b html -d "${DOCS_OUTDIR}"/_build/doctrees "${DOCS_DIR}" \
- "${DOCS_OUTDIR}" || die "${FUNCNAME}: sphinx-build failed"
+ local command=( "${EPYTHON}" -m sphinx.cmd.build )
+ if ! "${EPYTHON}" -c "import sphinx.cmd.build" 2>/dev/null; then
+ command=( sphinx-build )
+ fi
+ command+=(
+ -b html
+ -d "${DOCS_OUTDIR}"/_build/doctrees
+ "${DOCS_DIR}"
+ "${DOCS_OUTDIR}"
+ )
+ echo "${command[@]}" >&2
+ "${command[@]}" || die "${FUNCNAME}: sphinx-build failed"
+
+ HTML_DOCS+=( "${DOCS_OUTDIR}" )
+
+ # We don't need these any more, unset them in case we want to call a
+ # second documentation builder.
+ unset DOCS_DIR DOCS_OUTDIR
}
# @FUNCTION: mkdocs_deps
@@ -245,46 +296,58 @@ sphinx_compile() {
mkdocs_deps() {
debug-print-function ${FUNCNAME}
- : ${DOCS_AUTODOC:=0}
+ : "${DOCS_AUTODOC:=0}"
- deps="dev-python/mkdocs[\${PYTHON_USEDEP}]
- ${DOCS_DEPEND}"
+ deps="dev-python/mkdocs
+ ${DOCS_DEPEND}"
if [[ ${DOCS_AUTODOC} == 1 ]]; then
- deps="dev-python/mkautodoc[\${PYTHON_USEDEP}]
+ deps="dev-python/mkautodoc
${deps}"
elif [[ ${DOCS_AUTODOC} != 0 && ${DOCS_AUTODOC} != 1 ]]; then
die "${FUNCNAME}: DOCS_AUTODOC should be set to 0 or 1"
fi
- if [[ ${_PYTHON_SINGLE_R1} ]]; then
- DOCS_DEPEND="$(python_gen_cond_dep "${deps}")"
- else
- DOCS_DEPEND="$(python_gen_any_dep "${deps}")"
- fi
+
+ _docs_set_python_deps ${deps}
}
# @FUNCTION: mkdocs_compile
-# @INTERNAL
# @DESCRIPTION:
# Calls mkdocs to build docs.
-#
-# If you overwrite python_compile_all do not call
-# this function, call docs_compile instead
mkdocs_compile() {
debug-print-function ${FUNCNAME}
use doc || return
+ : "${DOCS_DIR:="${S}"}"
+ : "${DOCS_OUTDIR:="${S}/_build/html/mkdocs"}"
+
+ [[ ${DOCS_INITIALIZE_GIT} ]] && initialize_git_repo
+
local mkdocsyml=${DOCS_DIR}/mkdocs.yml
[[ -f ${mkdocsyml} ]] ||
die "${FUNCNAME}: ${mkdocsyml} not found, DOCS_DIR=${DOCS_DIR} wrong"
- pushd "${DOCS_DIR}" || die
- mkdocs build -d "${DOCS_OUTDIR}" || die "${FUNCNAME}: mkdocs build failed"
- popd || die
+ pushd "${DOCS_DIR}" >/dev/null || die
+ local command=( "${EPYTHON}" -m mkdocs build )
+ if ! "${EPYTHON}" -c "import mkdocs" 2>/dev/null; then
+ command=( mkdocs build )
+ fi
+ command+=(
+ -d "${DOCS_OUTDIR}"
+ )
+ echo "${command[@]}" >&2
+ "${command[@]}" || die "${FUNCNAME}: mkdocs build failed"
+ popd >/dev/null || die
# remove generated .gz variants
# mkdocs currently has no option to disable this
# and portage complains: "Colliding files found by ecompress"
rm "${DOCS_OUTDIR}"/*.gz || die
+
+ HTML_DOCS+=( "${DOCS_OUTDIR}" )
+
+ # We don't need these any more, unset them in case we want to call a
+ # second documentation builder.
+ unset DOCS_DIR DOCS_OUTDIR
}
# @FUNCTION: doxygen_deps
@@ -294,19 +357,23 @@ mkdocs_compile() {
doxygen_deps() {
debug-print-function ${FUNCNAME}
- DOCS_DEPEND="app-doc/doxygen
+ DOCS_DEPEND="app-text/doxygen
${DOCS_DEPEND}"
}
# @FUNCTION: doxygen_compile
-# @INTERNAL
# @DESCRIPTION:
# Calls doxygen to build docs.
doxygen_compile() {
debug-print-function ${FUNCNAME}
use doc || return
- : ${DOCS_CONFIG_NAME:="Doxyfile"}
+ # This is the default name of the config file, upstream can change it.
+ : "${DOCS_CONFIG_NAME:="Doxyfile"}"
+ : "${DOCS_DIR:="${S}"}"
+ : "${DOCS_OUTDIR:="${S}/_build/html/doxygen"}"
+
+ [[ ${DOCS_INITIALIZE_GIT} ]] && initialize_git_repo
local doxyfile=${DOCS_DIR}/${DOCS_CONFIG_NAME}
[[ -f ${doxyfile} ]] ||
@@ -318,6 +385,12 @@ doxygen_compile() {
pushd "${DOCS_DIR}" || die
(cat "${DOCS_CONFIG_NAME}" ; echo "HTML_OUTPUT=${DOCS_OUTDIR}") | doxygen - || die "${FUNCNAME}: doxygen failed"
popd || die
+
+ HTML_DOCS+=( "${DOCS_OUTDIR}" )
+
+ # We don't need these any more, unset them in case we want to call a
+ # second documentation builder.
+ unset DOCS_DIR DOCS_OUTDIR DOCS_CONFIG_NAME
}
# @FUNCTION: docs_compile
@@ -343,16 +416,8 @@ docs_compile() {
debug-print-function ${FUNCNAME}
use doc || return
- # Set a sensible default as DOCS_DIR
- : ${DOCS_DIR:="${S}"}
-
- # Where to put the compiled files?
- : ${DOCS_OUTDIR:="${S}/_build/html"}
-
${DOCS_BUILDER}_compile
- HTML_DOCS+=( "${DOCS_OUTDIR}/." )
-
# we need to ensure successful return in case we're called last,
# otherwise Portage may wrongly assume sourcing failed
return 0
@@ -366,11 +431,9 @@ IUSE+=" doc"
# Call the correct setup function
case ${DOCS_BUILDER} in
"sphinx")
- python_append_deps
sphinx_deps
;;
"mkdocs")
- python_append_deps
mkdocs_deps
;;
"doxygen")
@@ -378,19 +441,16 @@ case ${DOCS_BUILDER} in
;;
esac
-if [[ ${EAPI} != 6 ]]; then
- BDEPEND+=" doc? ( ${DOCS_DEPEND} )"
-else
- DEPEND+=" doc? ( ${DOCS_DEPEND} )"
-fi
+[[ ${DOCS_INITIALIZE_GIT} ]] && DOCS_DEPEND+=" dev-vcs/git "
+
+BDEPEND+=" doc? ( ${DOCS_DEPEND} )"
# If this is a python package using distutils-r1
# then put the compile function in the specific
# python function, else docs_compile should be manually
# added to src_compile
-if [[ ${_DISTUTILS_R1} && ( ${DOCS_BUILDER}="mkdocs" || ${DOCS_BUILDER}="sphinx" ) ]]; then
+if [[ ${_DISTUTILS_R1_ECLASS} && ( ${DOCS_BUILDER}="mkdocs" || ${DOCS_BUILDER}="sphinx" ) ]]; then
python_compile_all() { docs_compile; }
fi
-_DOCS=1
fi
diff --git a/eclass/dotnet-pkg-base.eclass b/eclass/dotnet-pkg-base.eclass
new file mode 100644
index 000000000000..a2d95f15a2fb
--- /dev/null
+++ b/eclass/dotnet-pkg-base.eclass
@@ -0,0 +1,654 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: dotnet-pkg-base.eclass
+# @MAINTAINER:
+# Gentoo Dotnet project <dotnet@gentoo.org>
+# @AUTHOR:
+# Anna Figueiredo Gomes <navi@vlhl.dev>
+# Maciej Barć <xgqt@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: nuget
+# @BLURB: common functions and variables for builds using .NET SDK
+# @DESCRIPTION:
+# This eclass is designed to provide required ebuild definitions for .NET
+# packages. Beware that in addition to Gentoo-specific concepts also terms that
+# should be known to people familiar with the .NET ecosystem are used through
+# this one and similar eclasses.
+#
+# In ebuilds for software that only utilizes the .NET SDK, without special
+# cases, the "dotnet-pkg.eclass" is probably better suited.
+#
+# This eclass does not export any phase functions, for that see
+# the "dotnet-pkg" eclass.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_DOTNET_PKG_BASE_ECLASS} ]] ; then
+_DOTNET_PKG_BASE_ECLASS=1
+
+inherit edo multiprocessing nuget
+
+# @ECLASS_VARIABLE: DOTNET_PKG_COMPAT
+# @REQUIRED
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Allows to choose a slot for dotnet.
+#
+# Most .NET packages will lock onto one supported .NET major version.
+# DOTNET_PKG_COMPAT should specify which version was chosen by package upstream.
+# In case multiple .NET versions are specified in the project, then the highest
+# should be picked by the maintainer.
+
+# @ECLASS_VARIABLE: DOTNET_PKG_RDEPS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Populated with important dependencies on .NET ecosystem packages for running
+# .NET packages.
+#
+# "DOTNET_PKG_RDEPS" should appear (or conditionally appear) in "RDEPEND".
+DOTNET_PKG_RDEPS=""
+
+# @ECLASS_VARIABLE: DOTNET_PKG_BDEPS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Populated with important dependencies on .NET ecosystem packages for building
+# .NET packages.
+#
+# "DOTNET_PKG_BDEPS" should appear (or conditionally appear) in "BDEPEND".
+DOTNET_PKG_BDEPS=""
+
+# Have this guard to be sure that *DEPS are not added to
+# the "dev-dotnet/dotnet-runtime-nugets" package dependencies.
+if [[ ${CATEGORY}/${PN} != dev-dotnet/dotnet-runtime-nugets ]] ; then
+ if [[ -z ${DOTNET_PKG_COMPAT} ]] ; then
+ die "${ECLASS}: DOTNET_PKG_COMPAT not set"
+ fi
+
+ DOTNET_PKG_RDEPS+="
+ virtual/dotnet-sdk:${DOTNET_PKG_COMPAT}
+ "
+ DOTNET_PKG_BDEPS+="
+ ${DOTNET_PKG_RDEPS}
+ "
+
+ # Special package "dev-dotnet/csharp-gentoodotnetinfo" used for information
+ # gathering, example for usage see the "dotnet-pkg-base_info" function.
+ if [[ ${CATEGORY}/${PN} != dev-dotnet/csharp-gentoodotnetinfo ]] ; then
+ DOTNET_PKG_BDEPS+="
+ dev-dotnet/csharp-gentoodotnetinfo
+ "
+ fi
+
+ IUSE+=" debug "
+fi
+
+# Needed otherwise the binaries may break.
+RESTRICT+=" strip "
+
+# Everything is built by "dotnet".
+QA_PREBUILT=".*"
+
+# Special .NET SDK environment variables.
+# Setting them either prevents annoying information from being generated
+# or stops services that may interfere with a clean package build.
+export DOTNET_CLI_TELEMETRY_OPTOUT=1
+export DOTNET_NOLOGO=1
+export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
+export MSBUILDDISABLENODEREUSE=1
+export POWERSHELL_TELEMETRY_OPTOUT=1
+export POWERSHELL_UPDATECHECK=0
+# Overwrite selected MSBuild properties ("-p:XYZ").
+export UseSharedCompilation=false
+
+# @ECLASS_VARIABLE: DOTNET_PKG_RUNTIME
+# @DEFAULT_UNSET
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Sets the runtime used to build a package.
+#
+# This variable is set automatically by the "dotnet-pkg-base_setup" function.
+
+# @ECLASS_VARIABLE: DOTNET_PKG_EXECUTABLE
+# @DEFAULT_UNSET
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Sets path of a "dotnet" executable.
+#
+# This variable is set automatically by the "dotnet-pkg-base_setup" function.
+
+# @ECLASS_VARIABLE: DOTNET_PKG_CONFIGURATION
+# @DEFAULT_UNSET
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Configuration value passed to "dotnet" in the compile phase.
+# Is either Debug or Release, depending on the "debug" USE flag.
+#
+# This variable is set automatically by the "dotnet-pkg-base_setup" function.
+
+# @ECLASS_VARIABLE: DOTNET_PKG_OUTPUT
+# @DEFAULT_UNSET
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Path of the output directory, where the package artifacts are placed during
+# the building of packages with "dotnet-pkg-base_build" function.
+#
+# This variable is set automatically by the "dotnet-pkg-base_setup" function.
+
+# @VARIABLE: _DOTNET_PKG_LAUNCHERDEST
+# @INTERNAL
+# @DESCRIPTION:
+# Sets the path that .NET launchers are installed into by
+# the "dotnet-pkg-base_dolauncher" function.
+#
+# The function "dotnet-pkg-base_launcherinto" is able to manipulate this
+# variable.
+#
+# Defaults to "/usr/bin".
+_DOTNET_PKG_LAUNCHERDEST=/usr/bin
+
+# @VARIABLE: _DOTNET_PKG_LAUNCHERVARS
+# @INTERNAL
+# @DESCRIPTION:
+# Sets additional variables for .NET launchers created by
+# the "dotnet-pkg-base_dolauncher" function.
+#
+# The function "dotnet-pkg-base_append_launchervar" is able to manipulate this
+# variable.
+#
+# Defaults to a empty array.
+_DOTNET_PKG_LAUNCHERVARS=()
+
+# @FUNCTION: dotnet-pkg-base_get-configuration
+# @DESCRIPTION:
+# Return .NET configuration type of the current package.
+#
+# It is advised to refer to the "DOTNET_PKG_CONFIGURATION" variable instead of
+# calling this function if necessary.
+#
+# Used by "dotnet-pkg-base_setup".
+dotnet-pkg-base_get-configuration() {
+ if in_iuse debug && use debug ; then
+ echo Debug
+ else
+ echo Release
+ fi
+}
+
+# @FUNCTION: dotnet-pkg-base_get-output
+# @USAGE: <name>
+# @DESCRIPTION:
+# Return a specially constructed name of a directory for output of
+# "dotnet build" artifacts ("--output" flag, see "dotnet-pkg-base_build").
+#
+# It is very rare that a maintainer would use this function in an ebuild.
+#
+# This function is used inside "dotnet-pkg-base_setup".
+dotnet-pkg-base_get-output() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ [[ -z ${DOTNET_PKG_CONFIGURATION} ]] &&
+ die "${FUNCNAME[0]}: DOTNET_PKG_CONFIGURATION is not set."
+
+ echo "${WORKDIR}/${1}_net${DOTNET_PKG_COMPAT}_${DOTNET_PKG_CONFIGURATION}"
+}
+
+# @FUNCTION: dotnet-pkg-base_get-runtime
+# @DESCRIPTION:
+# Return the .NET runtime used for the current package.
+#
+# Used by "dotnet-pkg-base_setup".
+dotnet-pkg-base_get-runtime() {
+ local libc
+ libc="$(usex elibc_musl "-musl" "")"
+
+ if use amd64 ; then
+ echo "linux${libc}-x64"
+ elif use x86 ; then
+ echo "linux${libc}-x86"
+ elif use arm ; then
+ echo "linux${libc}-arm"
+ elif use arm64 ; then
+ echo "linux${libc}-arm64"
+ else
+ die "${FUNCNAME[0]}: Unsupported architecture: ${ARCH}"
+ fi
+}
+
+# @FUNCTION: dotnet-pkg-base_setup
+# @DESCRIPTION:
+# Sets up "DOTNET_PKG_EXECUTABLE" variable for later use in "edotnet".
+# Also sets up "DOTNET_PKG_CONFIGURATION" and "DOTNET_PKG_OUTPUT"
+# for "dotnet-pkg_src_configure" and "dotnet-pkg_src_compile".
+#
+# This functions should be called by "pkg_setup".
+#
+# Used by "dotnet-pkg_pkg_setup" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_setup() {
+ local -a impl_dirs=(
+ "${EPREFIX}/usr/$(get_libdir)/dotnet-sdk-${DOTNET_PKG_COMPAT}"
+ "${EPREFIX}/opt/dotnet-sdk-bin-${DOTNET_PKG_COMPAT}"
+ )
+ local impl_exe
+
+ local impl_dir
+ for impl_dir in "${impl_dirs[@]}" ; do
+ impl_exe="${impl_dir}/dotnet"
+
+ if [[ -d "${impl_dir}" ]] && [[ -x "${impl_exe}" ]] ; then
+ DOTNET_PKG_EXECUTABLE="${impl_exe}"
+ DOTNET_ROOT="${impl_dir}"
+
+ break
+ fi
+ done
+
+ einfo "Setting .NET SDK \"DOTNET_ROOT\" to \"${DOTNET_ROOT}\""
+ export DOTNET_ROOT
+ export PATH="${DOTNET_ROOT}:${PATH}"
+
+ DOTNET_PKG_RUNTIME="$(dotnet-pkg-base_get-runtime)"
+ DOTNET_PKG_CONFIGURATION="$(dotnet-pkg-base_get-configuration)"
+ DOTNET_PKG_OUTPUT="$(dotnet-pkg-base_get-output "${P}")"
+}
+
+# @FUNCTION: dotnet-pkg-base_remove-global-json
+# @USAGE: [directory]
+# @DESCRIPTION:
+# Remove the "global.json" if it exists.
+# The file in question might lock target package to a specified .NET
+# version, which might be unnecessary (as it is in most cases).
+#
+# Optional "directory" argument defaults to the current directory path.
+#
+# Used by "dotnet-pkg_src_prepare" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_remove-global-json() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local file="${1:-.}"/global.json
+
+ if [[ -f "${file}" ]] ; then
+ ebegin "Removing the global.json file"
+ rm "${file}"
+ eend ${?} || die "${FUNCNAME[0]}: failed to remove ${file}"
+ fi
+}
+
+# @FUNCTION: edotnet
+# @USAGE: <command> [args...]
+# @DESCRIPTION:
+# Call dotnet, passing the supplied arguments.
+edotnet() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ if [[ -z ${DOTNET_PKG_EXECUTABLE} ]] ; then
+ die "${FUNCNAME[0]}: DOTNET_PKG_EXECUTABLE not set. Was dotnet-pkg-base_setup called?"
+ fi
+
+ edo "${DOTNET_PKG_EXECUTABLE}" "${@}"
+}
+
+# @FUNCTION: dotnet-pkg-base_info
+# @DESCRIPTION:
+# Show information about current .NET SDK that is being used.
+#
+# Depends upon the "gentoo-dotnet-info" program installed by
+# the "dev-dotnet/csharp-gentoodotnetinfo" package.
+#
+# Used by "dotnet-pkg_src_configure" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_info() {
+ if [[ ${CATEGORY}/${PN} == dev-dotnet/csharp-gentoodotnetinfo ]] ; then
+ debug-print-function "${FUNCNAME[0]}: ${P} is a special package, skipping dotnet-pkg-base_info"
+ elif command -v gentoo-dotnet-info >/dev/null ; then
+ gentoo-dotnet-info || die "${FUNCNAME[0]}: failed to execute gentoo-dotnet-info"
+ else
+ ewarn "${FUNCNAME[0]}: gentoo-dotnet-info not available"
+ fi
+}
+
+# @FUNCTION: dotnet-pkg-base_sln-remove
+# @USAGE: <solution> <project>
+# @DESCRIPTION:
+# Remove a project from a given solution file.
+#
+# Used by "dotnet-pkg_remove-bad" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_sln-remove() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ [[ -z ${1} ]] && die "${FUNCNAME[0]}: no solution file specified"
+ [[ -z ${2} ]] && die "${FUNCNAME[0]}: no project file specified"
+
+ edotnet sln "${1}" remove "${2}"
+}
+
+# @FUNCTION: dotnet-pkg-base_foreach-solution
+# @USAGE: <directory> <args> ...
+# @DESCRIPTION:
+# Execute a function for each solution file (.sln) in a specified directory.
+# This function may yield no real results because solutions are discovered
+# automatically.
+#
+# Used by "dotnet-pkg_src_configure" and "dotnet-pkg_src_test" from
+# the "dotnet-pkg" eclass.
+dotnet-pkg-base_foreach-solution() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local directory="${1}"
+ shift
+
+ local dotnet_solution
+ local dotnet_solution_name
+ while read -r dotnet_solution ; do
+ dotnet_solution_name="$(basename "${dotnet_solution}")"
+
+ ebegin "Running \"${@}\" for solution: \"${dotnet_solution_name}\""
+ "${@}" "${dotnet_solution}"
+ eend $? "${FUNCNAME[0]}: failed for solution: \"${dotnet_solution}\"" || die
+ done < <(find "${directory}" -maxdepth 1 -type f -name "*.sln")
+}
+
+# @FUNCTION: dotnet-pkg-base_restore
+# @USAGE: [args] ...
+# @DESCRIPTION:
+# Restore the package using "dotnet restore".
+# Restore is performed in current directory unless a different directory is
+# passed via "args".
+#
+# Additionally any number of "args" maybe be given, they are appended to
+# the "dotnet" command invocation.
+#
+# Used by "dotnet-pkg_src_configure" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_restore() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local -a restore_args=(
+ --runtime "${DOTNET_PKG_RUNTIME}"
+ --source "${NUGET_PACKAGES}"
+ -maxCpuCount:$(makeopts_jobs)
+ "${@}"
+ )
+
+ edotnet restore "${restore_args[@]}"
+}
+
+# @FUNCTION: dotnet-pkg-base_restore-tools
+# @USAGE: [config-file] [args] ...
+# @DESCRIPTION:
+# Restore dotnet tools for a project in the current directory.
+#
+# Optional "config-file" argument is used to specify a file for the
+# "--configfile" option which records what tools should be restored.
+#
+# Additionally any number of "args" maybe be given, they are appended to
+# the "dotnet" command invocation.
+dotnet-pkg-base_restore-tools() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local -a tool_restore_args=(
+ --add-source "${NUGET_PACKAGES}"
+ )
+
+ if [[ -n "${1}" ]] ; then
+ tool_restore_args+=( --configfile "${1}" )
+ shift
+ fi
+
+ tool_restore_args+=( "${@}" )
+
+ edotnet tool restore "${tool_restore_args[@]}"
+}
+
+# @FUNCTION: dotnet-pkg-base_restore_tools
+# @USAGE: [config-file] [args] ...
+# @DESCRIPTION:
+# DEPRECATED, use "dotnet-pkg-base_restore-tools" instead.
+dotnet-pkg-base_restore_tools() {
+ dotnet-pkg-base_restore-tools "${@}"
+}
+
+# @FUNCTION: dotnet-pkg-base_build
+# @USAGE: [args] ...
+# @DESCRIPTION:
+# Build the package using "dotnet build" in a specified directory.
+# Build is performed in current directory unless a different directory is
+# passed via "args".
+#
+# Any number of "args" maybe be given, they are appended to the "dotnet"
+# command invocation.
+#
+# Used by "dotnet-pkg_src_compile" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_build() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local -a build_args=(
+ --configuration "${DOTNET_PKG_CONFIGURATION}"
+ --no-restore
+ --no-self-contained
+ --output "${DOTNET_PKG_OUTPUT}"
+ --runtime "${DOTNET_PKG_RUNTIME}"
+ -maxCpuCount:$(makeopts_jobs)
+ )
+
+ if ! use debug ; then
+ build_args+=(
+ -p:StripSymbols=true
+ -p:NativeDebugSymbols=false
+ )
+ fi
+
+ # And append "args" at the end.
+ build_args+=(
+ "${@}"
+ )
+
+ edotnet build "${build_args[@]}"
+}
+
+# @FUNCTION: dotnet-pkg-base_test
+# @USAGE: [args] ...
+# @DESCRIPTION:
+# Test the package using "dotnet test" in a specified directory.
+# Test is performed in current directory unless a different directory is
+# passed via "args".
+#
+# Any number of "args" maybe be given, they are appended to the "dotnet"
+# command invocation.
+#
+# Used by "dotnet-pkg_src_test" from the "dotnet-pkg" eclass.
+dotnet-pkg-base_test() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local -a test_args=(
+ --configuration "${DOTNET_PKG_CONFIGURATION}"
+ --no-restore
+ -maxCpuCount:$(makeopts_jobs)
+ "${@}"
+ )
+
+ edotnet test "${test_args[@]}"
+}
+
+# @FUNCTION: dotnet-pkg-base_install
+# @USAGE: [directory]
+# @DESCRIPTION:
+# Install the contents of "DOTNET_PKG_OUTPUT" into a directory, defaults to
+# "/usr/share/${P}".
+#
+# Installation directory is relative to "ED".
+dotnet-pkg-base_install() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local installation_directory="${1:-/usr/share/${P}}"
+
+ dodir "${installation_directory}"
+ cp -r "${DOTNET_PKG_OUTPUT}"/* "${ED}/${installation_directory}/" || die
+}
+
+# @FUNCTION: dotnet-pkg-base_launcherinto
+# @USAGE: <directory>
+# @DESCRIPTION:
+# Changes the path .NET launchers are installed into via subsequent
+# "dotnet-pkg-base_dolauncher" calls.
+#
+# For more info see the "_DOTNET_PKG_LAUNCHERDEST" variable.
+dotnet-pkg-base_launcherinto() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ [[ -z ${1} ]] && die "${FUNCNAME[0]}: no directory specified"
+
+ _DOTNET_PKG_LAUNCHERDEST="${1}"
+}
+
+# @FUNCTION: dotnet-pkg-base_append-launchervar
+# @USAGE: <variable-setting>
+# @DESCRIPTION:
+# Appends a given variable setting to the "_DOTNET_PKG_LAUNCHERVARS".
+#
+# WARNING: This functions modifies a global variable permanently!
+# This means that all launchers created in subsequent
+# "dotnet-pkg-base_dolauncher" calls of a given package will have
+# the given variable set.
+#
+# Example:
+# @CODE
+# dotnet-pkg-base_append_launchervar "DOTNET_EnableAlternateStackCheck=1"
+# @CODE
+#
+# For more info see the "_DOTNET_PKG_LAUNCHERVARS" variable.
+dotnet-pkg-base_append-launchervar() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ [[ -z ${1} ]] && die "${FUNCNAME[0]}: no variable setting specified"
+
+ _DOTNET_PKG_LAUNCHERVARS+=( "${1}" )
+}
+
+# @FUNCTION: dotnet-pkg-base_append_launchervar
+# @USAGE: <variable-setting>
+# @DESCRIPTION:
+# DEPRECATED, use "dotnet-pkg-base_append-launchervar" instead.
+dotnet-pkg-base_append_launchervar() {
+ dotnet-pkg-base_append-launchervar "${@}"
+}
+
+# @FUNCTION: dotnet-pkg-base_dolauncher
+# @USAGE: <executable-path> [filename]
+# @DESCRIPTION:
+# Make a wrapper script to launch an executable built from a .NET package.
+#
+# If no file name is given, the `basename` of the executable is used.
+#
+# Parameters:
+# ${1} - path of the executable to launch,
+# ${2} - filename of launcher to create (optional).
+#
+# Example:
+# @CODE
+# dotnet-pkg-base_install
+# dotnet-pkg-base_dolauncher /usr/share/${P}/${PN^}
+# @CODE
+#
+# The path is prepended by "EPREFIX".
+dotnet-pkg-base_dolauncher() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local executable_path executable_name
+
+ if [[ -n "${1}" ]] ; then
+ local executable_path="${1}"
+ shift
+ else
+ die "${FUNCNAME[0]}: No executable path given."
+ fi
+
+ if [[ ${#} -eq 0 ]] ; then
+ executable_name="$(basename "${executable_path}")"
+ else
+ executable_name="${1}"
+ shift
+ fi
+
+ local executable_target="${T}/${executable_name}"
+
+ cat <<-EOF > "${executable_target}" || die
+ #!/bin/sh
+
+ # Launcher script for ${executable_path} (${executable_name}),
+ # created from package "${CATEGORY}/${P}",
+ # compatible with dotnet version ${DOTNET_PKG_COMPAT}.
+
+ for __dotnet_root in \\
+ "${EPREFIX}/usr/$(get_libdir)/dotnet-sdk-${DOTNET_PKG_COMPAT}" \\
+ "${EPREFIX}/opt/dotnet-sdk-bin-${DOTNET_PKG_COMPAT}" ; do
+ [ -d "\${__dotnet_root}" ] && break
+ done
+
+ DOTNET_ROOT="\${__dotnet_root}"
+ export DOTNET_ROOT
+
+ $(for var in "${_DOTNET_PKG_LAUNCHERVARS[@]}" ; do
+ echo "${var}"
+ echo "export ${var%%=*}"
+ done)
+
+ exec "${EPREFIX}${executable_path}" "\${@}"
+ EOF
+
+ exeinto "${_DOTNET_PKG_LAUNCHERDEST}"
+ doexe "${executable_target}"
+}
+
+# @FUNCTION: dotnet-pkg-base_dolauncher-portable
+# @USAGE: <dll-path> <filename>
+# @DESCRIPTION:
+# Make a wrapper script to launch a .NET DLL file built from a .NET package.
+#
+# Parameters:
+# ${1} - path of the DLL to launch,
+# ${2} - filename of launcher to create.
+#
+# Example:
+# @CODE
+# dotnet-pkg-base_dolauncher-portable \
+# /usr/share/${P}/GentooDotnetInfo.dll gentoo-dotnet-info
+# @CODE
+#
+# The path is prepended by "EPREFIX".
+dotnet-pkg-base_dolauncher-portable() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local dll_path="${1}"
+ local executable_name="${2}"
+ local executable_target="${T}/${executable_name}"
+
+ cat <<-EOF > "${executable_target}" || die
+ #!/bin/sh
+
+ # Launcher script for ${dll_path} (${executable_name}),
+ # created from package "${CATEGORY}/${P}",
+ # compatible with any dotnet version, built on ${DOTNET_PKG_COMPAT}.
+
+ $(for var in "${_DOTNET_PKG_LAUNCHERVARS[@]}" ; do
+ echo "${var}"
+ echo "export ${var%%=*}"
+ done)
+
+ exec dotnet exec "${EPREFIX}${dll_path}" "\${@}"
+ EOF
+
+ exeinto "${_DOTNET_PKG_LAUNCHERDEST}"
+ doexe "${executable_target}"
+}
+
+# @FUNCTION: dotnet-pkg-base_dolauncher_portable
+# @USAGE: <dll-path> <filename>
+# @DESCRIPTION:
+# DEPRECATED, use "dotnet-pkg-base_dolauncher-portable" instead.
+dotnet-pkg-base_dolauncher_portable() {
+ dotnet-pkg-base_dolauncher-portable "${@}"
+}
+
+fi
diff --git a/eclass/dotnet-pkg.eclass b/eclass/dotnet-pkg.eclass
new file mode 100644
index 000000000000..59a8ae799f86
--- /dev/null
+++ b/eclass/dotnet-pkg.eclass
@@ -0,0 +1,343 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: dotnet-pkg.eclass
+# @MAINTAINER:
+# Gentoo Dotnet project <dotnet@gentoo.org>
+# @AUTHOR:
+# Anna Figueiredo Gomes <navi@vlhl.dev>
+# Maciej Barć <xgqt@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: dotnet-pkg-base nuget
+# @BLURB: common functions and variables for .NET packages
+# @DESCRIPTION:
+# This eclass is designed to help with building and installing packages that
+# use the .NET SDK.
+# It provides the required phase functions and special variables that make
+# it easier to write ebuilds for .NET packages.
+# If you do not use the exported phase functions, then consider using
+# the "dotnet-pkg-base.eclass" instead.
+#
+# .NET SDK is a open-source framework from Microsoft, it is a cross-platform
+# successor to .NET Framework.
+#
+# .NET packages require proper inspection before packaging:
+# - the compatible .NET SDK version has to be declared,
+# this can be done by inspecting the package's "*.proj" files,
+# unlike JAVA, .NET packages tend to lock onto one exact selected .NET SDK
+# version, so building with other .NET versions will be mostly unsupported,
+# - Nugets, packages' .NET dependencies, which are similar to JAVA's JARs,
+# have to be listed using either the "NUGETS" variable or bundled inside
+# a "prebuilt" archive, in second case also the "NUGET_PACKAGES" variable
+# has to be explicitly set.
+# - the main project file (*.proj) that builds the project has to be specified
+# by the "DOTNET_PROJECT" variable.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_DOTNET_PKG_ECLASS} ]] ; then
+_DOTNET_PKG_ECLASS=1
+
+inherit dotnet-pkg-base
+
+# Append to "RDEPEND" and "BDEPEND" "DOTNET_PKG_RDEPS" and "DOTNET_PKG_BDEPS"
+# generated by "dotnet-pkg-base" eclass.
+RDEPEND+=" ${DOTNET_PKG_RDEPS} "
+BDEPEND+=" ${DOTNET_PKG_BDEPS} "
+
+# @ECLASS_VARIABLE: DOTNET_PKG_BAD_PROJECTS
+# @DESCRIPTION:
+# List of projects to remove from all found solution (".sln") files.
+# The projects are removed in the "dotnet-pkg_src_prepare" function.
+#
+# This variable should be set after inheriting "dotnet-pkg.eclass".
+#
+# Default value is an empty array.
+#
+# Example:
+# @CODE
+# DOTNET_PKG_BAD_PROJECTS=( "${S}/BrokenTests" )
+# DOTNET_PKG_PROJECTS=( "${S}/DotnetProject" )
+# @CODE
+#
+# For more info see: "dotnet-pkg_remove-bad" function.
+DOTNET_PKG_BAD_PROJECTS=()
+
+# @ECLASS_VARIABLE: DOTNET_PKG_PROJECTS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Path to the main .NET project files (".csproj", ".fsproj", ".vbproj")
+# used by default by "dotnet-pkg_src_compile" phase function.
+#
+# In .NET version 6.0 and lower it was possible to build a project solution
+# (".sln") immediately with output to a specified directory ("--output DIR"),
+# but versions >= 7.0 deprecated this behavior. This means that
+# "dotnet-pkg-base_build" will fail when pointed to a solution or a directory
+# containing a solution file.
+#
+# This variable should be set after inheriting "dotnet-pkg.eclass",
+# it is also advised that it is set after the variable "${S}" is set.
+# "DOTNET_PKG_PROJECTS" can integrate with "S" (see the example below).
+#
+# Example:
+# @CODE
+# SRC_URI="..."
+# S="${WORKDIR}/${P}/src"
+#
+# LICENSE="MIT"
+# SLOT="0"
+# KEYWORDS="~amd64"
+#
+# DOTNET_PKG_PROJECTS=( "${S}/DotnetProject" )
+#
+# src_prepare() {
+# ...
+# @CODE
+
+# @ECLASS_VARIABLE: DOTNET_PKG_RESTORE_EXTRA_ARGS
+# @DESCRIPTION:
+# Extra arguments to pass to the package restore, in the "src_configure" phase.
+#
+# This is passed only when restoring the specified "DOTNET_PROJECT".
+# Other project restorers do not use this variable.
+#
+# This variable should be set after inheriting "dotnet-pkg.eclass",
+# it is also advised that it is set after the variable
+# "DOTNET_PROJECT" (from "dotnet-pkg-base" eclass) is set.
+#
+# Default value is an empty array.
+#
+# For more info see the "DOTNET_PROJECT" variable and "dotnet-pkg_src_configure".
+DOTNET_PKG_RESTORE_EXTRA_ARGS=()
+
+# @ECLASS_VARIABLE: DOTNET_PKG_BUILD_EXTRA_ARGS
+# @DESCRIPTION:
+# Extra arguments to pass to the package build, in the "src_compile" phase.
+#
+# This is passed only when building the specified "DOTNET_PROJECT".
+# Other project builds do not use this variable.
+#
+# This variable should be set after inheriting "dotnet-pkg.eclass",
+# it is also advised that it is set after the variable
+# "DOTNET_PROJECT" (from "dotnet-pkg-base" eclass) is set.
+#
+# Default value is an empty array.
+#
+# Example:
+# @CODE
+# DOTNET_PKG_BUILD_EXTRA_ARGS=( -p:WarningLevel=0 )
+# @CODE
+#
+# For more info see the "DOTNET_PROJECT" variable and "dotnet-pkg_src_compile".
+DOTNET_PKG_BUILD_EXTRA_ARGS=()
+
+# @ECLASS_VARIABLE: DOTNET_PKG_TEST_EXTRA_ARGS
+# @DESCRIPTION:
+# Extra arguments to pass to the package test, in the "src_test" phase.
+#
+# This is passed only when testing found ".sln" solution files
+# (see also "dotnet-pkg-base_foreach-solution").
+# Other project builds do not use this variable.
+#
+# This variable should be set after inheriting "dotnet-pkg.eclass",
+# it is also advised that it is set after the variable
+# "DOTNET_PROJECT" (from "dotnet-pkg-base" eclass) is set.
+#
+# Default value is an empty array.
+#
+# Example:
+# @CODE
+# DOTNET_PKG_TEST_EXTRA_ARGS=( -p:RollForward=Major )
+# @CODE
+#
+# For more info see the "DOTNET_PROJECT" variable and "dotnet-pkg_src_test".
+DOTNET_PKG_TEST_EXTRA_ARGS=()
+
+# @FUNCTION: dotnet-pkg_force-compat
+# @DESCRIPTION:
+# This function appends special options to all "DOTNET_PKG_*_EXTRA_ARGS"
+# variables in an attempt to force compatibility to the picked
+# "DOTNET_PKG_COMPAT" .NET SDK version.
+#
+# Call this function post-inherit.
+dotnet-pkg_force-compat() {
+ if [[ -z ${DOTNET_PKG_COMPAT} ]] ; then
+ die "DOTNET_PKG_COMPAT is not set"
+ fi
+
+ local -a force_extra_args=(
+ -p:RollForward=Major
+ -p:TargetFramework="net${DOTNET_PKG_COMPAT}"
+ -p:TargetFrameworks="net${DOTNET_PKG_COMPAT}"
+ )
+
+ DOTNET_PKG_RESTORE_EXTRA_ARGS+=( "${force_extra_args[@]}" )
+ DOTNET_PKG_BUILD_EXTRA_ARGS+=( "${force_extra_args[@]}" )
+ DOTNET_PKG_TEST_EXTRA_ARGS+=( "${force_extra_args[@]}" )
+}
+
+# @FUNCTION: dotnet-pkg_pkg_setup
+# @DESCRIPTION:
+# Default "pkg_setup" for the "dotnet-pkg" eclass.
+# Pre-build configuration and checks.
+#
+# Calls "dotnet-pkg-base_pkg_setup".
+dotnet-pkg_pkg_setup() {
+ [[ ${MERGE_TYPE} != binary ]] && dotnet-pkg-base_setup
+}
+
+# @FUNCTION: dotnet-pkg_src_unpack
+# @DESCRIPTION:
+# Default "src_unpack" for the "dotnet-pkg" eclass.
+# Unpack the package sources.
+#
+# Includes a special exception for nugets (".nupkg" files) - they are instead
+# copied into the "NUGET_PACKAGES" directory.
+dotnet-pkg_src_unpack() {
+ nuget_link-system-nugets
+ nuget_link-nuget-archives
+ nuget_unpack-non-nuget-archives
+}
+
+# @FUNCTION: dotnet-pkg_remove-bad
+# @USAGE: <solution>
+# @DESCRIPTION:
+# Remove all projects specified by "DOTNET_PKG_BAD_PROJECTS" from a given
+# solution file.
+#
+# Used by "dotnet-pkg_src_prepare".
+dotnet-pkg_remove-bad() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ [[ -z ${1} ]] && die "${FUNCNAME[0]}: no solution file specified"
+
+ local bad_project
+ for bad_project in "${DOTNET_PKG_BAD_PROJECTS[@]}" ; do
+ nonfatal dotnet-pkg-base_sln-remove "${1}" "${bad_project}"
+ done
+}
+
+# @FUNCTION: dotnet-pkg_src_prepare
+# @DESCRIPTION:
+# Default "src_prepare" for the "dotnet-pkg" eclass.
+# Prepare the package sources.
+#
+# Run "dotnet-pkg-base_remove-global-json", "dotnet-pkg-base_remove-bad"
+# for each found solution file and prepare for using Nuget.
+dotnet-pkg_src_prepare() {
+ dotnet-pkg-base_remove-global-json
+ dotnet-pkg-base_foreach-solution "$(pwd)" dotnet-pkg_remove-bad
+
+ find "$(pwd)" -maxdepth 1 -iname "nuget.config" -delete ||
+ die "${FUNCNAME[0]}: failed to remove unwanted \"NuGet.config\" config files"
+ nuget_writeconfig "$(pwd)/"
+
+ default
+}
+
+# @FUNCTION: dotnet-pkg_foreach-project
+# @USAGE: <args> ...
+# @DESCRIPTION:
+# Run a specified command for each project listed inside the "DOTNET_PKG_PROJECTS"
+# variable.
+#
+# Used by "dotnet-pkg_src_configure" and "dotnet-pkg_src_compile".
+dotnet-pkg_foreach-project() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ local dotnet_project
+ for dotnet_project in "${DOTNET_PKG_PROJECTS[@]}" ; do
+ ebegin "Running \"${*}\" for project: \"${dotnet_project##*/}\""
+ "${@}" "${dotnet_project}"
+ eend $? "${FUNCNAME[0]}: failed for project: \"${dotnet_project}\"" || die
+ done
+}
+
+# @FUNCTION: dotnet-pkg_src_configure
+# @DESCRIPTION:
+# Default "src_configure" for the "dotnet-pkg" eclass.
+# Configure the package.
+#
+# First show information about current .NET SDK that is being used,
+# then restore the project file specified by "DOTNET_PROJECT",
+# afterwards restore any found solutions.
+dotnet-pkg_src_configure() {
+ dotnet-pkg-base_info
+
+ dotnet-pkg_foreach-project \
+ dotnet-pkg-base_restore "${DOTNET_PKG_RESTORE_EXTRA_ARGS[@]}"
+
+ dotnet-pkg-base_foreach-solution \
+ "$(pwd)" \
+ dotnet-pkg-base_restore "${DOTNET_PKG_RESTORE_EXTRA_ARGS[@]}"
+}
+
+# @FUNCTION: dotnet-pkg_src_compile
+# @DESCRIPTION:
+# Default "src_compile" for the "dotnet-pkg" eclass.
+# Build the package.
+#
+# Build the package using "dotnet build" in the directory specified by either
+# "DOTNET_PROJECT" or "S" (temporary build directory) variables.
+#
+# For more info see: "DOTNET_PROJECT" variable
+# and "dotnet-pkg-base_get-project" function.
+dotnet-pkg_src_compile() {
+ dotnet-pkg_foreach-project \
+ dotnet-pkg-base_build "${DOTNET_PKG_BUILD_EXTRA_ARGS[@]}"
+}
+
+# @FUNCTION: dotnet-pkg_src_test
+# @DESCRIPTION:
+# Default "src_test" for the "dotnet-pkg" eclass.
+# Test the package.
+#
+# Test the package by testing any found solutions.
+#
+# It is very likely that this function will either not execute any tests or
+# will execute wrong or incomplete test suite. Maintainers should inspect if
+# any and/or correct tests are ran.
+dotnet-pkg_src_test() {
+ dotnet-pkg-base_foreach-solution \
+ "$(pwd)" \
+ dotnet-pkg-base_test "${DOTNET_PKG_TEST_EXTRA_ARGS[@]}"
+}
+
+# @FUNCTION: dotnet-pkg_src_install
+# @DESCRIPTION:
+# Default "src_install" for the "dotnet-pkg" eclass.
+# Install the package.
+#
+# This is the default package install:
+# - install the compiled .NET package artifacts,
+# for more info see "dotnet-pkg-base_install" and "DOTNET_PKG_OUTPUT",
+# - create launcher from the .NET package directory to "/usr/bin",
+# phase will detect to choose either executable with capital letter
+# (common among .NET packages) or not,
+# - call "einstalldocs".
+#
+# It is very likely that this function is either insufficient or has to be
+# redefined in a ebuild.
+dotnet-pkg_src_install() {
+ dotnet-pkg-base_install
+
+ # /usr/bin/Nake -> /usr/share/nake-3.0.0/Nake
+ if [[ -f "${D}/usr/share/${P}/${PN^}" ]] ; then
+ dotnet-pkg-base_dolauncher "/usr/share/${P}/${PN^}"
+
+ # Create a compatibility symlink and also for ease of use from CLI.
+ dosym -r "/usr/bin/${PN^}" "/usr/bin/${PN}"
+
+ elif [[ -f "${D}/usr/share/${P}/${PN}" ]] ; then
+ dotnet-pkg-base_dolauncher "/usr/share/${P}/${PN}"
+ fi
+
+ einstalldocs
+}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_configure src_compile src_test src_install
diff --git a/eclass/dotnet.eclass b/eclass/dotnet.eclass
index 3affc00ba458..1fb288dd1094 100644
--- a/eclass/dotnet.eclass
+++ b/eclass/dotnet.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dotnet.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: common settings and functions for mono and dotnet related packages
# @DESCRIPTION:
# The dotnet eclass contains common environment settings that are useful for
@@ -12,19 +12,16 @@
# MONO_SHARED_DIR and sets LC_ALL in order to prevent errors during compilation
# of dotnet packages.
-case ${EAPI:-0} in
- 6)
- inherit eapi7-ver multilib
- DEPEND="dev-lang/mono"
- ;;
- 7)
- BDEPEND="dev-lang/mono"
- ;;
- *)
- die "${ECLASS}: EAPI ${EAPI:-0} not supported"
- ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ ! ${_DOTNET_ECLASS} ]]; then
+_DOTNET_ECLASS=1
+
+BDEPEND="dev-lang/mono"
+
inherit mono-env
# @ECLASS_VARIABLE: USE_DOTNET
@@ -98,7 +95,6 @@ exbuild() {
# @DESCRIPTION:
# Install package to GAC.
egacinstall() {
- use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
gacutil -i "${1}" \
-root "${ED}"/usr/$(get_libdir) \
-gacdir /usr/$(get_libdir) \
@@ -110,7 +106,6 @@ egacinstall() {
# @DESCRIPTION:
# multilib comply
dotnet_multilib_comply() {
- use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
local dir finddirs=() mv_command=${mv_command:-mv}
if [[ -d "${ED}/usr/lib" && "$(get_libdir)" != "lib" ]]
then
@@ -136,7 +131,7 @@ dotnet_multilib_comply() {
then
for exe in "${ED}/usr/bin"/*
do
- if [[ "$(file "${exe}")" == *"shell script text"* ]]
+ if [[ "$(file -S "${exe}")" == *"shell script text"* ]]
then
sed -r -i -e ":/lib(/|$): s:/lib(/|$):/$(get_libdir)\1:" \
"${exe}" || die "Sedding some sense into ${exe} failed"
@@ -147,4 +142,6 @@ dotnet_multilib_comply() {
fi
}
+fi
+
EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/dune.eclass b/eclass/dune.eclass
index 5009648114f5..f0faea007c9f 100644
--- a/eclass/dune.eclass
+++ b/eclass/dune.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dune.eclass
@@ -8,52 +8,144 @@
# ML <ml@gentoo.org>
# @AUTHOR:
# Rafael Kitover <rkitover@gmail.com>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Provides functions for installing Dune packages.
# @DESCRIPTION:
-# Provides dependencies on dDne and OCaml and default src_compile, src_test and
+# Provides dependencies on Dune and OCaml and default src_compile, src_test and
# src_install for Dune-based packages.
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_DUNE_ECLASS} ]]; then
+_DUNE_ECLASS=1
+
# @ECLASS_VARIABLE: DUNE_PKG_NAME
# @PRE_INHERIT
# @DESCRIPTION:
# Sets the actual Dune package name, if different from Gentoo package name.
# Set before inheriting the eclass.
-: ${DUNE_PKG_NAME:=${PN}}
+: "${DUNE_PKG_NAME:=${PN}}"
-case ${EAPI:-0} in
- 6|7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
-esac
-
-inherit multiprocessing
+inherit edo multiprocessing
# Do not complain about CFLAGS etc since ml projects do not use them.
QA_FLAGS_IGNORED='.*'
-EXPORT_FUNCTIONS src_compile src_test src_install
+RDEPEND="
+ >=dev-lang/ocaml-4:=[ocamlopt?]
+ dev-ml/dune:=
+"
+DEPEND="${RDEPEND}"
+BDEPEND="
+ dev-lang/ocaml
+ dev-ml/dune
+"
-RDEPEND=">=dev-lang/ocaml-4:=[ocamlopt?] dev-ml/dune:="
-case ${EAPI:-0} in
- 6)
- DEPEND="${RDEPEND} dev-ml/dune"
- ;;
- *)
- BDEPEND="dev-ml/dune dev-lang/ocaml"
- DEPEND="${RDEPEND}"
- ;;
-esac
+# @FUNCTION: edune
+# @USAGE: <arg> ...
+# @DESCRIPTION:
+# A thin wrapper for the `dune` command.
+# Runs `dune` with given arguments and dies on failure.
+#
+# Example use:
+# @CODE
+# edune clean
+# @CODE
+edune() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ edo dune "${@}"
+}
+
+# @FUNCTION: dune-release
+# @USAGE: <subcommand> [--target target] [package] ...
+# @DESCRIPTION:
+# Run a selected subcommand for either all of dune packages in current
+# directory or only the selected packages. In case of all packages the package
+# detection is done via dune itself.
+# The `--target` option specifies a target for the selected subcommand,
+# it is primarily used for `dune build`, for more info see `man dune-build`.
+#
+# Example use:
+# @CODE
+# dune-release build --target @install menhir menhirLib menhirSdk
+# @CODE
+dune-release() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local subcommand
+ local target
+
+ # Get the subcommand.
+ if [[ -z "${1}" ]] ; then
+ die "dune-release: missing subcommand"
+ else
+ subcommand="${1}"
+ shift
+ fi
+
+ # Detect if the target is specified.
+ case "${1}" in
+ --target )
+ target="${2}"
+ shift
+ shift
+ ;;
+ esac
+
+ local -a myduneopts=(
+ --display=short
+ --profile release
+ -j $(makeopts_jobs)
+ )
+
+ # Resolve the package flag.
+ if [[ -n "${1}" ]] ; then
+ myduneopts+=( --for-release-of-packages="$(IFS="," ; echo "${*}")" )
+ fi
+
+ edune ${subcommand} ${target} "${myduneopts[@]}"
+}
+
+# @FUNCTION: dune-compile
+# @USAGE: [package] ...
+# @DESCRIPTION:
+# Builds either all of or selected dune packages in current directory.
+#
+# Example use:
+# @CODE
+# dune-compile menhir menhirLib menhirSdk
+# @CODE
+dune-compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ dune-release build --target @install "${@}"
+}
+
+# @FUNCTION: dune-test
+# @USAGE: [package] ...
+# @DESCRIPTION:
+# Tests either all of or selected dune packages in current directory.
+#
+# Example use:
+# @CODE
+# dune-test menhir menhirLib menhirSdk
+# @CODE
+dune-test() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ dune-release runtest "${@}"
+}
dune_src_compile() {
- ebegin "Building"
- dune build @install -j $(makeopts_jobs) --profile release
- eend $? || die
+ dune-compile
}
dune_src_test() {
- ebegin "Testing"
- dune runtest -j $(makeopts_jobs) --profile release
- eend $? || die
+ dune-test
}
# @FUNCTION: dune-install
@@ -67,27 +159,27 @@ dune_src_test() {
# dune-install menhir menhirLib menhirSdk
# @CODE
dune-install() {
+ debug-print-function ${FUNCNAME} "${@}"
+
local -a pkgs=( "${@}" )
[[ ${#pkgs[@]} -eq 0 ]] && pkgs=( "${DUNE_PKG_NAME}" )
local -a myduneopts=(
- --prefix="${ED%/}/usr"
- --libdir="${D%/}$(ocamlc -where)"
- --mandir="${ED%/}/usr/share/man"
+ --prefix="${ED}/usr"
+ --libdir="${D}$(ocamlc -where)"
+ --mandir="${ED}/usr/share/man"
)
local pkg
for pkg in "${pkgs[@]}" ; do
- ebegin "Installing ${pkg}"
- dune install ${myduneopts[@]} ${pkg}
- eend $? || die
+ edune install ${myduneopts[@]} ${pkg}
# Move docs to the appropriate place.
- if [ -d "${ED%/}/usr/doc/${pkg}" ] ; then
- mkdir -p "${ED%/}/usr/share/doc/${PF}/" || die
- mv "${ED%/}/usr/doc/${pkg}" "${ED%/}/usr/share/doc/${PF}/" || die
- rm -rf "${ED%/}/usr/doc" || die
+ if [[ -d "${ED}/usr/doc/${pkg}" ]] ; then
+ mkdir -p "${ED}/usr/share/doc/${PF}/" || die
+ mv "${ED}/usr/doc/${pkg}" "${ED}/usr/share/doc/${PF}/" || die
+ rm -rf "${ED}/usr/doc" || die
fi
done
}
@@ -95,3 +187,7 @@ dune-install() {
dune_src_install() {
dune-install ${1:-${DUNE_PKG_NAME}}
}
+
+fi
+
+EXPORT_FUNCTIONS src_compile src_test src_install
diff --git a/eclass/eapi8-dosym.eclass b/eclass/eapi8-dosym.eclass
index 52f0ffe3e62b..866c98c78d91 100644
--- a/eclass/eapi8-dosym.eclass
+++ b/eclass/eapi8-dosym.eclass
@@ -1,4 +1,4 @@
-# Copyright 2020 Gentoo Authors
+# Copyright 2020-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: eapi8-dosym.eclass
@@ -6,7 +6,7 @@
# PMS team <pms@gentoo.org>
# @AUTHOR:
# Ulrich Müller <ulm@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: Testing implementation of EAPI 8 dosym -r option
# @DESCRIPTION:
# A stand-alone implementation of the dosym command aimed for EAPI 8.
@@ -17,8 +17,8 @@
# https://bugs.gentoo.org/708360
case ${EAPI} in
- 5|6|7) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} not supported" ;;
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# @FUNCTION: _dosym8_canonicalize
@@ -31,7 +31,7 @@ esac
_dosym8_canonicalize() {
local path slash i prev out IFS=/
- path=( $1 )
+ read -r -d '' -a path < <(printf '%s\0' "$1")
[[ $1 == /* ]] && slash=/
while true; do
@@ -39,7 +39,7 @@ _dosym8_canonicalize() {
# or as a special case, "/.." at the beginning of the path.
# Also drop empty and "." path components as we go along.
prev=
- for i in ${!path[@]}; do
+ for i in "${!path[@]}"; do
if [[ -z ${path[i]} || ${path[i]} == . ]]; then
unset "path[i]"
elif [[ ${path[i]} != .. ]]; then
@@ -56,7 +56,7 @@ _dosym8_canonicalize() {
done
out="${slash}${path[*]}"
- echo "${out:-.}"
+ printf "%s\n" "${out:-.}"
}
# @FUNCTION: dosym8
diff --git a/eclass/ecm.eclass b/eclass/ecm.eclass
index 3cc586b82d82..d83320e991ed 100644
--- a/eclass/ecm.eclass
+++ b/eclass/ecm.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ecm.eclass
# @MAINTAINER:
# kde@gentoo.org
-# @SUPPORTED_EAPIS: 7 8
+# @SUPPORTED_EAPIS: 8
# @PROVIDES: cmake virtualx
# @BLURB: Support eclass for packages that use KDE Frameworks with ECM.
# @DESCRIPTION:
@@ -22,21 +22,25 @@
# any phase functions are overridden the version here should also be called.
case ${EAPI} in
- 7|8) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_ECM_ECLASS} ]]; then
_ECM_ECLASS=1
+inherit cmake flag-o-matic toolchain-funcs
+
+if [[ ${EAPI} == 8 ]]; then
# @ECLASS_VARIABLE: VIRTUALX_REQUIRED
# @DESCRIPTION:
# For proper description see virtualx.eclass manpage.
# Here we redefine default value to be manual, if your package needs virtualx
# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
-: ${VIRTUALX_REQUIRED:=manual}
+: "${VIRTUALX_REQUIRED:=manual}"
-inherit cmake flag-o-matic toolchain-funcs virtualx
+inherit virtualx
+fi
# @ECLASS_VARIABLE: ECM_NONGUI
# @DEFAULT_UNSET
@@ -46,9 +50,9 @@ inherit cmake flag-o-matic toolchain-funcs virtualx
# kde-frameworks/oxygen-icons and run the xdg.eclass routines for pkg_preinst,
# pkg_postinst and pkg_postrm. If set to "true", do nothing.
if [[ ${CATEGORY} = kde-frameworks ]] ; then
- : ${ECM_NONGUI:=true}
+ : "${ECM_NONGUI:=true}"
fi
-: ${ECM_NONGUI:=false}
+: "${ECM_NONGUI:=false}"
if [[ ${ECM_NONGUI} = false ]] ; then
inherit xdg
@@ -58,52 +62,53 @@ fi
# @DESCRIPTION:
# Assume the package is using KDEInstallDirs macro and switch
# KDE_INSTALL_USE_QT_SYS_PATHS to ON. If set to "false", do nothing.
-: ${ECM_KDEINSTALLDIRS:=true}
+: "${ECM_KDEINSTALLDIRS:=true}"
# @ECLASS_VARIABLE: ECM_DEBUG
# @DESCRIPTION:
# Add "debug" to IUSE. If !debug, add -DQT_NO_DEBUG to CPPFLAGS. If set to
# "false", do nothing.
-: ${ECM_DEBUG:=true}
+: "${ECM_DEBUG:=true}"
# @ECLASS_VARIABLE: ECM_DESIGNERPLUGIN
# @DESCRIPTION:
# If set to "true", add "designer" to IUSE to toggle build of designer plugins
# and add the necessary BDEPEND. If set to "false", do nothing.
-: ${ECM_DESIGNERPLUGIN:=false}
+: "${ECM_DESIGNERPLUGIN:=false}"
# @ECLASS_VARIABLE: ECM_EXAMPLES
# @DESCRIPTION:
# By default unconditionally ignore a top-level examples subdirectory.
# If set to "true", add "examples" to IUSE to toggle adding that subdirectory.
-: ${ECM_EXAMPLES:=false}
+: "${ECM_EXAMPLES:=false}"
# @ECLASS_VARIABLE: ECM_HANDBOOK
# @DESCRIPTION:
# Will accept "true", "false", "optional", "forceoptional". If set to "false",
# do nothing.
# Otherwise, add "+handbook" to IUSE, add the appropriate dependency, and let
-# KF5DocTools generate and install the handbook from docbook file(s) found in
-# ECM_HANDBOOK_DIR. However if !handbook, disable build of ECM_HANDBOOK_DIR
-# in CMakeLists.txt.
-# If set to "optional", build with -DCMAKE_DISABLE_FIND_PACKAGE_KF5DocTools=ON
-# when !handbook. In case package requires KF5KDELibs4Support, see next:
-# If set to "forceoptional", remove a KF5DocTools dependency from the root
-# CMakeLists.txt in addition to the above.
-: ${ECM_HANDBOOK:=false}
+# KF${_KFSLOT}DocTools generate and install the handbook from docbook file(s)
+# found in ECM_HANDBOOK_DIR. However if !handbook, disable build of
+# ECM_HANDBOOK_DIR in CMakeLists.txt.
+# If set to "optional", build with
+# -DCMAKE_DISABLE_FIND_PACKAGE_KF${_KFSLOT}DocTools=ON when !handbook. In case
+# package requires KF5KDELibs4Support, see next:
+# If set to "forceoptional", remove a KF${_KFSLOT}DocTools dependency from the
+# root CMakeLists.txt in addition to the above.
+: "${ECM_HANDBOOK:=false}"
# @ECLASS_VARIABLE: ECM_HANDBOOK_DIR
# @DESCRIPTION:
# Specifies the directory containing the docbook file(s) relative to ${S} to
-# be processed by KF5DocTools (kdoctools_install).
-: ${ECM_HANDBOOK_DIR:=doc}
+# be processed by KF${_KFSLOT}DocTools (kdoctools_install).
+: "${ECM_HANDBOOK_DIR:=doc}"
# @ECLASS_VARIABLE: ECM_PO_DIRS
# @DESCRIPTION:
# Specifies directories of l10n files relative to ${S} to be processed by
-# KF5I18n (ki18n_install). If IUSE nls exists and is disabled then disable
-# build of these directories in CMakeLists.txt.
-: ${ECM_PO_DIRS:="po poqm"}
+# KF${_KFSLOT}I18n (ki18n_install). If IUSE nls exists and is disabled then
+# disable build of these directories in CMakeLists.txt.
+: "${ECM_PO_DIRS:="po poqm"}"
# @ECLASS_VARIABLE: ECM_QTHELP
# @DEFAULT_UNSET
@@ -113,9 +118,9 @@ fi
# -DBUILD_QCH=ON generate and install Qt compressed help files when USE=doc.
# If set to "false", do nothing.
if [[ ${CATEGORY} = kde-frameworks ]]; then
- : ${ECM_QTHELP:=true}
+ : "${ECM_QTHELP:=true}"
fi
-: ${ECM_QTHELP:=false}
+: "${ECM_QTHELP:=false}"
# @ECLASS_VARIABLE: ECM_TEST
# @DEFAULT_UNSET
@@ -124,38 +129,50 @@ fi
# "forceoptional-recursive".
# Default value is "false", except for CATEGORY=kde-frameworks where it is
# set to "true". If set to "false", do nothing.
-# For any other value, add "test" to IUSE and DEPEND on dev-qt/qttest:5.
-# If set to "optional", build with -DCMAKE_DISABLE_FIND_PACKAGE_Qt5Test=ON
-# when USE=!test.
-# If set to "forceoptional", punt Qt5Test dependency and ignore "autotests",
+# For any other value, add "test" to IUSE (and for KF5 DEPEND on
+# dev-qt/qttest:5). If set to "optional", build with
+# -DCMAKE_DISABLE_FIND_PACKAGE_Qt${_KFSLOT}Test=ON when USE=!test. If set
+# to "forceoptional", punt Qt${_KFSLOT}Test dependency and ignore "autotests",
# "test", "tests" subdirs from top-level CMakeLists.txt when USE=!test.
-# If set to "forceoptional-recursive", punt Qt5Test dependencies and make
-# autotest(s), unittest(s) and test(s) subdirs from *any* CMakeLists.txt in
-# ${S} and below conditional on BUILD_TESTING when USE=!test. This is always
+# If set to "forceoptional-recursive", punt Qt${_KFSLOT}Test dependencies and
+# make autotest(s), unittest(s) and test(s) subdirs from *any* CMakeLists.txt
+# in ${S} and below conditional on BUILD_TESTING when USE=!test. This is always
# meant as a short-term fix and creates ${T}/${P}-tests-optional.patch to
# refine and submit upstream.
if [[ ${CATEGORY} = kde-frameworks ]]; then
- : ${ECM_TEST:=true}
+ : "${ECM_TEST:=true}"
fi
-: ${ECM_TEST:=false}
+: "${ECM_TEST:=false}"
# @ECLASS_VARIABLE: KFMIN
# @DEFAULT_UNSET
# @DESCRIPTION:
# Minimum version of Frameworks to require. Default value for kde-frameworks
-# is ${PV} and 5.64.0 baseline for everything else. This is not going to be
-# changed unless we also bump EAPI, which usually implies (rev-)bumping.
-# Version will later be used to differentiate between KF5/Qt5 and KF6/Qt6.
+# is ${PV} and 5.106.0 baseline for everything else.
+# If set to >=5.240, KF6/Qt6 is assumed thus SLOT=6 dependencies added and
+# -DQT_MAJOR_VERSION=6 added to cmake args.
if [[ ${CATEGORY} = kde-frameworks ]]; then
- : ${KFMIN:=$(ver_cut 1-2)}
+ : "${KFMIN:=$(ver_cut 1-2)}"
fi
-: ${KFMIN:=5.82.0}
+: "${KFMIN:=5.106.0}"
-# @ECLASS_VARIABLE: KFSLOT
+# @ECLASS_VARIABLE: _KFSLOT
# @INTERNAL
# @DESCRIPTION:
-# KDE Frameworks and Qt slot dependency, implied by KFMIN version.
-: ${KFSLOT:=5}
+# KDE Frameworks and Qt main slot dependency, implied by KFMIN version, *not*
+# necessarily the package's SLOT. This is being used throughout the eclass to
+# depend on either :5 or :6 Qt/KF packages as well as setting correctly
+# prefixed cmake args.
+: "${_KFSLOT:=5}"
+if [[ ${CATEGORY} == kde-frameworks ]]; then
+ if [[ ${PV} != 5.9999 ]] && $(ver_test ${KFMIN} -ge 5.240); then
+ _KFSLOT=6
+ fi
+else
+ if [[ ${KFMIN/.*} == 6 ]] || $(ver_test ${KFMIN} -ge 5.240); then
+ _KFSLOT=6
+ fi
+fi
case ${ECM_NONGUI} in
true) ;;
@@ -186,7 +203,11 @@ esac
case ${ECM_DESIGNERPLUGIN} in
true)
IUSE+=" designer"
- BDEPEND+=" designer? ( dev-qt/designer:${KFSLOT} )"
+ if [[ ${_KFSLOT} == 6 ]]; then
+ BDEPEND+=" designer? ( dev-qt/qttools:${_KFSLOT}[designer] )"
+ else
+ BDEPEND+=" designer? ( dev-qt/designer:${_KFSLOT} )"
+ fi
;;
false) ;;
*)
@@ -209,7 +230,7 @@ esac
case ${ECM_HANDBOOK} in
true|optional|forceoptional)
IUSE+=" +handbook"
- BDEPEND+=" handbook? ( >=kde-frameworks/kdoctools-${KFMIN}:${KFSLOT} )"
+ BDEPEND+=" handbook? ( >=kde-frameworks/kdoctools-${KFMIN}:${_KFSLOT} )"
;;
false) ;;
*)
@@ -221,11 +242,13 @@ esac
case ${ECM_QTHELP} in
true)
IUSE+=" doc"
- COMMONDEPEND+=" doc? ( dev-qt/qt-docs:${KFSLOT} )"
- BDEPEND+=" doc? (
- >=app-doc/doxygen-1.8.13-r1
- dev-qt/qthelp:${KFSLOT}
- )"
+ COMMONDEPEND+=" doc? ( dev-qt/qt-docs:${_KFSLOT} )"
+ BDEPEND+=" doc? ( >=app-text/doxygen-1.8.13-r1 )"
+ if [[ ${_KFSLOT} == 6 ]]; then
+ BDEPEND+=" dev-qt/qttools:${_KFSLOT}[assistant]"
+ else
+ BDEPEND+=" doc? ( dev-qt/qthelp:${_KFSLOT} )"
+ fi
;;
false) ;;
*)
@@ -237,7 +260,9 @@ esac
case ${ECM_TEST} in
true|optional|forceoptional|forceoptional-recursive)
IUSE+=" test"
- DEPEND+=" test? ( dev-qt/qttest:${KFSLOT} )"
+ if [[ ${_KFSLOT} == 5 ]]; then
+ DEPEND+=" test? ( dev-qt/qttest:${_KFSLOT} )"
+ fi
RESTRICT+=" !test? ( test )"
;;
false) ;;
@@ -247,9 +272,16 @@ case ${ECM_TEST} in
;;
esac
-BDEPEND+=" >=kde-frameworks/extra-cmake-modules-${KFMIN}:${KFSLOT}"
+BDEPEND+="
+ dev-libs/libpcre2:*
+ >=kde-frameworks/extra-cmake-modules-${KFMIN}:*
+"
RDEPEND+=" >=kde-frameworks/kf-env-4"
-COMMONDEPEND+=" dev-qt/qtcore:${KFSLOT}"
+if [[ ${_KFSLOT} == 6 ]]; then
+ COMMONDEPEND+=" dev-qt/qtbase:${_KFSLOT}"
+else
+ COMMONDEPEND+=" dev-qt/qtcore:${_KFSLOT}"
+fi
DEPEND+=" ${COMMONDEPEND}"
RDEPEND+=" ${COMMONDEPEND}"
@@ -325,10 +357,10 @@ _ecm_punt_kfqt_module() {
[[ ! -e "CMakeLists.txt" ]] && return
# FIXME: dep=WebKit will result in 'Widgets' over 'WebKitWidgets' (no regression)
- pcregrep -Mni "(?s)find_package\s*\(\s*${prefix}(\d+|\\$\{\w*\})[^)]*?${dep}.*?\)" \
+ pcre2grep -Mni "(?s)find_package\s*\(\s*${prefix}(\d+|\\$\{\w*\})[^)]*?${dep}.*?\)" \
CMakeLists.txt > "${T}/bogus${dep}"
- # pcregrep returns non-zero on no matches/error
+ # pcre2grep returns non-zero on no matches/error
[[ $? -ne 0 ]] && return
local length=$(wc -l "${T}/bogus${dep}" | cut -d " " -f 1)
@@ -362,21 +394,34 @@ ecm_punt_qt_module() {
}
# @FUNCTION: ecm_punt_bogus_dep
-# @USAGE: <prefix> <dependency>
+# @USAGE: <dependency> or <prefix> <dependency>
# @DESCRIPTION:
-# Removes a specified dependency from a find_package call with multiple
-# components.
+# Removes a specified dependency from a find_package call, optionally
+# supports prefix for find_package with multiple components.
ecm_punt_bogus_dep() {
- local prefix=${1}
- local dep=${2}
+
+ if [[ "$#" == 2 ]] ; then
+ local prefix=${1}
+ local dep=${2}
+ elif [[ "$#" == 1 ]] ; then
+ local dep=${1}
+ else
+ die "${FUNCNAME[0]} must be passed either one or two arguments"
+ fi
if [[ ! -e "CMakeLists.txt" ]]; then
return
fi
- pcregrep -Mni "(?s)find_package\s*\(\s*${prefix}[^)]*?${dep}.*?\)" CMakeLists.txt > "${T}/bogus${dep}"
+ if [[ -z ${prefix} ]]; then
+ sed -e "/find_package\s*(\s*${dep}\(\s\+\(REQUIRED\|CONFIG\|COMPONENTS\|\${[A-Z0-9_]*}\)\)\+\s*)/Is/^/# removed by ecm.eclass - /" \
+ -i CMakeLists.txt || die
+ return
+ else
+ pcre2grep -Mni "(?s)find_package\s*\(\s*${prefix}[^)]*?${dep}.*?\)" CMakeLists.txt > "${T}/bogus${dep}"
+ fi
- # pcregrep returns non-zero on no matches/error
+ # pcre2grep returns non-zero on no matches/error
if [[ $? -ne 0 ]] ; then
return
fi
@@ -446,7 +491,7 @@ ecm_src_prepare() {
# always install unconditionally for kconfigwidgets - if you use
# language X as system language, and there is a combobox with language
# names, the translated language name for language Y is taken from
- # /usr/share/locale/Y/kf5_entry.desktop
+ # /usr/share/locale/Y/kf${_KFSLOT}_entry.desktop
[[ ${PN} != kconfigwidgets ]] && _ecm_strip_handbook_translations
fi
@@ -500,16 +545,20 @@ ecm_src_configure() {
local cmakeargs
+ if [[ ${_KFSLOT} == 6 ]]; then
+ cmakeargs+=( -DQT_MAJOR_VERSION=6 )
+ fi
+
if in_iuse test && ! use test ; then
cmakeargs+=( -DBUILD_TESTING=OFF )
if [[ ${ECM_TEST} = optional ]] ; then
- cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_Qt5Test=ON )
+ cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_Qt${_KFSLOT}Test=ON )
fi
fi
if [[ ${ECM_HANDBOOK} = optional ]] ; then
- cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_KF5DocTools=$(usex !handbook) )
+ cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_KF${_KFSLOT}DocTools=$(usex !handbook) )
fi
if in_iuse designer && [[ ${ECM_DESIGNERPLUGIN} = true ]]; then
@@ -561,13 +610,15 @@ ecm_src_test() {
KDE_DEBUG=1 cmake_src_test
}
+ local -x QT_QPA_PLATFORM=offscreen
+
# When run as normal user during ebuild development with the ebuild command,
# tests tend to access the session DBUS. This however is not possible in a
# real emerge or on the tinderbox.
# make sure it does not happen, so bad tests can be recognized and disabled
unset DBUS_SESSION_BUS_ADDRESS DBUS_SESSION_BUS_PID
- if [[ ${VIRTUALX_REQUIRED} = always || ${VIRTUALX_REQUIRED} = test ]]; then
+ if [[ ${EAPI} == 8 ]] && [[ ${VIRTUALX_REQUIRED} = always || ${VIRTUALX_REQUIRED} = test ]]; then
virtx _test_runner
else
_test_runner
@@ -589,16 +640,14 @@ ecm_src_install() {
cmake_src_install
# bug 621970
- if [[ ${EAPI} != 7 ]]; then
- if [[ -d "${ED}"/usr/share/applications ]]; then
- local f
- for f in "${ED}"/usr/share/applications/*.desktop; do
- if [[ -x ${f} ]]; then
- einfo "Removing executable bit from ${f#${ED}}"
- fperms a-x "${f#${ED}}"
- fi
- done
- fi
+ if [[ -d "${ED}"/usr/share/applications ]]; then
+ local f
+ for f in "${ED}"/usr/share/applications/*.desktop; do
+ if [[ -x ${f} ]]; then
+ einfo "Removing executable bit from ${f#${ED}}"
+ fperms a-x "${f#${ED}}"
+ fi
+ done
fi
}
@@ -650,8 +699,4 @@ if [[ -v ${KDE_GCC_MINIMAL} ]]; then
EXPORT_FUNCTIONS pkg_pretend
fi
-EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_test pkg_preinst pkg_postinst pkg_postrm
-
-if [[ ${EAPI} != 7 ]]; then
- EXPORT_FUNCTIONS src_install
-fi
+EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_test src_install pkg_preinst pkg_postinst pkg_postrm
diff --git a/eclass/elisp-common.eclass b/eclass/elisp-common.eclass
index 47c8132192ca..3d99838a0221 100644
--- a/eclass/elisp-common.eclass
+++ b/eclass/elisp-common.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: elisp-common.eclass
@@ -10,7 +10,8 @@
# Mamoru Komachi <usata@gentoo.org>
# Christian Faulhammer <fauli@gentoo.org>
# Ulrich Müller <ulm@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# Maciej Barć <xgqt@gentoo.org>
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Emacs-related installation utilities
# @DESCRIPTION:
#
@@ -24,7 +25,7 @@
# When relying on the emacs USE flag, you need to add
#
# @CODE
-# emacs? ( >=app-editors/emacs-23.1:* )
+# emacs? ( >=app-editors/emacs-25.3:* )
# @CODE
#
# to your DEPEND/RDEPEND line and use the functions provided here to
@@ -131,6 +132,17 @@
# "50${PN}-gentoo.el". If your subdirectory is not named ${PN}, give
# the differing name as second argument.
#
+# For the simple case that only the package's subdirectory needs to be
+# added to the load-path, function elisp-make-site-file() will create
+# and install a site-init file that does just that:
+#
+# @CODE
+# elisp-make-site-file "${SITEFILE}"
+# @CODE
+#
+# Again, this must be called in src_install(). See the function's
+# documentation for more details on its usage.
+#
# @SUBSECTION pkg_setup() usage:
#
# If your ebuild uses the elisp-compile eclass function to compile
@@ -166,7 +178,6 @@
# to above calls of elisp-site-regen().
case ${EAPI} in
- 6) inherit eapi7-ver ;;
7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -206,7 +217,7 @@ BYTECOMPFLAGS="-L ."
# @ECLASS_VARIABLE: NEED_EMACS
# @DESCRIPTION:
# The minimum Emacs version required for the package.
-: ${NEED_EMACS:=23.1}
+: "${NEED_EMACS:=25.3}"
# @ECLASS_VARIABLE: _ELISP_EMACS_VERSION
# @INTERNAL
@@ -322,6 +333,7 @@ elisp-make-autoload-file() {
;; Local ${null}Variables:
;; version-control: never
;; no-byte-compile: t
+ ;; no-native-compile: t
;; no-update-autoloads: t
;; End:
@@ -329,6 +341,7 @@ elisp-make-autoload-file() {
EOF
${EMACS} ${EMACSFLAGS} \
+ --eval "(require 'autoload)" \
--eval "(setq make-backup-files nil)" \
--eval "(setq generated-autoload-file (expand-file-name \"${f}\"))" \
-f batch-update-autoloads "${@-.}"
@@ -336,6 +349,242 @@ elisp-make-autoload-file() {
eend $? "elisp-make-autoload-file: batch-update-autoloads failed" || die
}
+# @FUNCTION: elisp-org-export-to
+# @USAGE: <export file type> <Org file path>
+# @DESCRIPTION:
+# Use Emacs Org "export-to" functions to convert a given Org file to a
+# picked format.
+#
+# Example:
+# @CODE
+# elisp-org-export-to texinfo README.org
+# mv README.texi ${PN}.texi || die
+# @CODE
+
+elisp-org-export-to() {
+ local export_format="${1}"
+ local org_file_path="${2}"
+
+ local export_group
+ case ${export_format} in
+ info) export_group=texinfo ;; # Straight to ".info".
+ markdown) export_group=md ;;
+ pdf) export_group=latex ;;
+ *) export_group=${export_format} ;;
+ esac
+
+ # export_format = texinfo => org-texinfo-export-to-texinfo
+ # export_format = pdf => org-latex-export-to-pdf
+
+ local export_function=org-${export_group}-export-to-${export_format}
+
+ ${EMACS} ${EMACSFLAGS} "${org_file_path}" -f "${export_function}" \
+ || die "Org export to ${export_format} failed"
+}
+
+# @FUNCTION: elisp-test-buttercup
+# @USAGE: [test-subdirectory] [test-runner-opts] ...
+# @DESCRIPTION:
+# Run ELisp package tests using the "buttercup" test runner.
+#
+# The option "test-subdirectory" may be given any number of times,
+# it should be given as though it was passed to Emacs or the test tool,
+# not as a string.
+#
+# The options "test-subdirectory" and "test-runner-opts" are optional,
+# but if "test-runner-opts" needs to be provided also "test-subdirectory"
+# has to be specified.
+
+elisp-test-buttercup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local test_dir="${1:-$(pwd)}"
+ shift
+
+ local -a myopts=(
+ ${BYTECOMPFLAGS}
+ -L "${test_dir}"
+ --traceback full
+ "$@"
+ )
+ ebegin "Running buttercup tests"
+ buttercup "${myopts[@]}" "${test_dir}"
+ eend $? "${FUNCNAME}: tests failed" || die
+}
+
+# @FUNCTION: elisp-test-ert-runner
+# @USAGE: [test-subdirectory] [test-runner-opts] ...
+# @DESCRIPTION:
+# Run ELisp package tests using the "ert-runner" test runner.
+#
+# The option "test-subdirectory" may be given any number of times,
+# it should be given as though it was passed to Emacs or the test tool,
+# not as a string.
+#
+# The options "test-subdirectory" and "test-runner-opts" are optional,
+# but if "test-runner-opts" needs to be provided also "test-subdirectory"
+# has to be specified.
+
+elisp-test-ert-runner() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local test_dir="${1:-$(pwd)}"
+ shift
+
+ local -a myopts=(
+ ${BYTECOMPFLAGS}
+ --reporter ert+duration
+ --script
+ -L "${test_dir}"
+ "$@"
+ )
+ ebegin "Running ert-runner tests"
+ ert-runner "${myopts[@]}" "${test_dir}"
+ eend $? "${FUNCNAME}: tests failed" || die
+}
+
+# @FUNCTION: elisp-test-ert
+# @USAGE: [test-subdirectory] [test-runner-opts] ...
+# @DESCRIPTION:
+# Run ELisp package tests using "ert", the Emacs's built-in test runner.
+#
+# The option "test-subdirectory" may be given any number of times,
+# it should be given as though it was passed to Emacs or the test tool,
+# not as a string.
+#
+# The options "test-subdirectory" and "test-runner-opts" are optional,
+# but if "test-runner-opts" needs to be provided also "test-subdirectory"
+# has to be specified.
+
+elisp-test-ert() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local test_dir="${1:-$(pwd)}"
+ shift
+
+ local -a extra_load=()
+ local extra_load_file
+ for extra_load_file in "${test_dir}"/?*-test.el; do
+ if [[ -f "${extra_load_file}" ]]; then
+ extra_load+=( -l "${extra_load_file}" )
+ fi
+ done
+
+ local -a myopts=(
+ ${EMACSFLAGS}
+ ${BYTECOMPFLAGS}
+ -L "${test_dir}"
+ "${extra_load[@]}"
+ "$@"
+ -f ert-run-tests-batch-and-exit
+ )
+ ebegin "Running ert tests"
+ ${EMACS} "${myopts[@]}"
+ eend $? "${FUNCNAME}: tests failed" || die
+}
+
+# @FUNCTION: elisp-enable-tests
+# @USAGE: [--optional] <test-runner> [test-runner-options] ...
+# @DESCRIPTION:
+# Set up IUSE, RESTRICT, BDEPEND and test runner function for running
+# tests with the specified test runner.
+#
+# The test-runner argument must be one of:
+#
+# - buttercup: for "buttercup" provided via "app-emacs/buttercup"
+#
+# - ert-runner: for "ert-runner" provided via "app-emacs/ert-runner"
+#
+# - ert: for built-in GNU Emacs test utility
+#
+# If the "--optional" flag is passed (before specifying the test
+# runner), then it is assumed that the ELisp package is a part of some
+# some project that optionally enables GNU Emacs support. This will
+# correctly set up the test and Emacs dependencies.
+#
+# Notice that the first option passed to the "test-runner" is the
+# directory and the rest are miscellaneous options applicable to that
+# given runner.
+#
+# This function has to be called post inherit, specifically after
+# "IUSE", "RESTRICT" and "BDEPEND" variables are assigned.
+# It is advised to place this call right before (re)defining a given
+# ebuild's phases.
+#
+# Example:
+# @CODE
+# inherit elisp-common
+#
+# ...
+#
+# elisp-enable-tests --optional ert-runner "${S}"/elisp -t "!org"
+#
+# src_test() {
+# emake -C tests test
+# elisp-test
+# }
+# @CODE
+
+elisp-enable-tests() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local optional
+ if [[ ${1} = "--optional" ]] ; then
+ optional=YES
+ shift
+ fi
+
+ local test_pkg
+ local test_runner=${1}
+ shift
+
+ _ELISP_TEST_OPTS=( "$@" )
+
+ case ${test_runner} in
+ buttercup )
+ test_pkg="app-emacs/buttercup"
+ _ELISP_TEST_FUNCTION=elisp-test-buttercup
+ ;;
+ ert-runner )
+ test_pkg="app-emacs/ert-runner"
+ _ELISP_TEST_FUNCTION=elisp-test-ert-runner
+ ;;
+ ert )
+ _ELISP_TEST_FUNCTION=elisp-test-ert
+ ;;
+ * )
+ die "${FUNCNAME}: unknown test runner, given ${test_runner}"
+ ;;
+ esac
+
+ if [[ ${test_pkg} ]]; then
+ IUSE+=" test "
+ RESTRICT+=" !test? ( test ) "
+ if [[ ${optional} ]]; then
+ IUSE+=" emacs "
+ BDEPEND+=" test? ( emacs? ( ${test_pkg} ) ) "
+ else
+ BDEPEND+=" test? ( ${test_pkg} ) "
+ fi
+ fi
+
+ return 0
+}
+
+# @FUNCTION: elisp-test
+# @DESCRIPTION:
+# Test the package using a ELisp test runner.
+#
+# If called without executing "elisp-enable-tests" beforehand, then
+# does nothing, otherwise a test runner is called with given
+# "test-runner-options".
+
+elisp-test() {
+ if [[ ${_ELISP_TEST_FUNCTION} ]]; then
+ ${_ELISP_TEST_FUNCTION} "${_ELISP_TEST_OPTS[@]}"
+ fi
+}
+
# @FUNCTION: elisp-install
# @USAGE: <subdirectory> <list of files>
# @DESCRIPTION:
@@ -379,7 +628,13 @@ elisp-modules-install() {
elisp-site-file-install() {
local sf="${1##*/}" my_pn="${2:-${PN}}" modules ret
- local header=";;; ${PN} site-lisp configuration"
+ local add_header="1 {
+ # Find first non-empty line
+ :x; /^\$/ { n; bx; }
+ # Insert a header, unless we already look at one
+ /^;.*${PN}/I! s/^/;;; ${PN} site-lisp configuration\n\n/
+ 1 s/^/\n/
+ }"
[[ ${sf} == [0-9][0-9]*-gentoo*.el ]] \
|| ewarn "elisp-site-file-install: bad name of site-init file"
@@ -388,7 +643,7 @@ elisp-site-file-install() {
ebegin "Installing site initialisation file for GNU Emacs"
[[ $1 == "${sf}" ]] || cp "$1" "${sf}"
modules=${EMACSMODULES//@libdir@/$(get_libdir)}
- sed -i -e "1{:x;/^\$/{n;bx;};/^;.*${PN}/I!s:^:${header}\n\n:;1s:^:\n:;}" \
+ sed -i -e "${add_header}" \
-e "s:@SITELISP@:${EPREFIX}${SITELISP}/${my_pn}:g" \
-e "s:@SITEETC@:${EPREFIX}${SITEETC}/${my_pn}:g" \
-e "s:@EMACSMODULES@:${EPREFIX}${modules}/${my_pn}:g;\$q" "${sf}"
@@ -401,6 +656,30 @@ elisp-site-file-install() {
eend ${ret} "elisp-site-file-install: doins failed" || die
}
+# @FUNCTION: elisp-make-site-file
+# @USAGE: <filename> [subdirectory] [line]...
+# @DESCRIPTION:
+# Create and install a site-init file for the package. By default,
+# this will add the package's SITELISP subdirectory to Emacs' load-path:
+#
+# @CODE
+# (add-to-list 'load-path "@SITELISP@")
+# @CODE
+#
+# Additional arguments are appended as lines to the destination file.
+# Any @SITELISP@, @SITEETC@, and @EMACSMODULES@ tokens in these
+# arguments are replaced, as described for elisp-site-file-install.
+
+elisp-make-site-file() {
+ [[ $1 == [0-9][0-9]*-gentoo.el ]] \
+ || die "elisp-make-site-file: bad name of site-init file"
+
+ local f="${T}/$1" my_pn="${2:-${PN}}"
+ shift; shift
+ printf "%s\n" "(add-to-list 'load-path \"@SITELISP@\")" "$@" >"${f}" || die
+ elisp-site-file-install "${f}" "${my_pn}"
+}
+
# @FUNCTION: elisp-site-regen
# @DESCRIPTION:
# Regenerate the site-gentoo.el file, based on packages' site
@@ -408,7 +687,7 @@ elisp-site-file-install() {
# directory.
elisp-site-regen() {
- local sitelisp=${ROOT%/}${EPREFIX}${SITELISP}
+ local sitelisp=${EROOT}${SITELISP}
local sf i ret=0 null="" page=$'\f'
local -a sflist
@@ -420,9 +699,6 @@ elisp-site-regen() {
[[ -d ${sitelisp} ]] \
|| die "elisp-site-regen: Directory ${sitelisp} does not exist"
- [[ -d ${T} ]] \
- || die "elisp-site-regen: Temporary directory ${T} does not exist"
-
ebegin "Regenerating site-gentoo.el for GNU Emacs (${EBUILD_PHASE})"
for sf in "${sitelisp}"/site-gentoo.d/[0-9][0-9]*.el; do
@@ -447,6 +723,7 @@ elisp-site-regen() {
;; Local ${null}Variables:
;; no-byte-compile: t
+ ;; no-native-compile: t
;; buffer-read-only: t
;; End:
diff --git a/eclass/elisp.eclass b/eclass/elisp.eclass
index 6b6679df5c5d..20139491c976 100644
--- a/eclass/elisp.eclass
+++ b/eclass/elisp.eclass
@@ -1,4 +1,4 @@
-# Copyright 2002-2022 Gentoo Authors
+# Copyright 2002-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: elisp.eclass
@@ -9,7 +9,8 @@
# Jeremy Maitin-Shepard <jbms@attbi.com>
# Christian Faulhammer <fauli@gentoo.org>
# Ulrich Müller <ulm@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# Maciej Barć <xgqt@gentoo.org>
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: elisp-common
# @BLURB: Eclass for Emacs Lisp packages
# @DESCRIPTION:
@@ -31,9 +32,9 @@
# @PRE_INHERIT
# @DEFAULT_UNSET
# @DESCRIPTION:
-# If you need anything different from Emacs 23, use the NEED_EMACS
-# variable before inheriting elisp.eclass. Set it to the version your
-# package uses and the dependency will be adjusted.
+# If you need anything different from Emacs 25.3 (or newer), use the
+# NEED_EMACS variable before inheriting elisp.eclass. Set it to the
+# version your package uses and the dependency will be adjusted.
# @ECLASS_VARIABLE: ELISP_PATCHES
# @DEFAULT_UNSET
@@ -65,18 +66,12 @@
inherit elisp-common
case ${EAPI} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_{unpack,prepare,configure,compile,install} \
- pkg_{setup,postinst,postrm}
-
RDEPEND=">=app-editors/emacs-${NEED_EMACS}:*"
-case ${EAPI} in
- 6) DEPEND="${RDEPEND}" ;;
- *) BDEPEND="${RDEPEND}" ;;
-esac
+BDEPEND="${RDEPEND}"
# @FUNCTION: elisp_pkg_setup
# @DESCRIPTION:
@@ -131,7 +126,7 @@ elisp_src_prepare() {
# @FUNCTION: elisp_src_configure
# @DESCRIPTION:
-# Do nothing, because Emacs packages seldomly bring a full build system.
+# Do nothing, because Emacs packages seldom bring a full build system.
elisp_src_configure() { :; }
@@ -148,6 +143,19 @@ elisp_src_compile() {
fi
}
+# @FUNCTION: elisp_src_test
+# @DESCRIPTION:
+# Call "elisp-test" to test the package if "elisp-enable-tests" was called
+# beforehand, otherwise execute the default test function - "src_test".
+
+elisp_src_test() {
+ if [[ ${_ELISP_TEST_FUNCTION} ]]; then
+ elisp-test
+ else
+ default_src_test
+ fi
+}
+
# @FUNCTION: elisp_src_install
# @DESCRIPTION:
# Call elisp-install to install all Emacs Lisp (*.el and *.elc) files.
@@ -158,7 +166,11 @@ elisp_src_compile() {
elisp_src_install() {
elisp-install ${PN} *.el *.elc
if [[ -n ${SITEFILE} ]]; then
- elisp-site-file-install "${FILESDIR}/${SITEFILE}"
+ if [[ -f "${FILESDIR}/${SITEFILE}" ]]; then
+ elisp-site-file-install "${FILESDIR}/${SITEFILE}"
+ else
+ elisp-make-site-file "${SITEFILE}"
+ fi
fi
if [[ -n ${ELISP_TEXINFO} ]]; then
set -- ${ELISP_TEXINFO}
@@ -192,3 +204,6 @@ elisp_pkg_postinst() {
elisp_pkg_postrm() {
elisp-site-regen
}
+
+EXPORT_FUNCTIONS src_{unpack,prepare,configure,compile,test,install} \
+ pkg_{setup,postinst,postrm}
diff --git a/eclass/epatch.eclass b/eclass/epatch.eclass
index 6a9c460da0a3..c42aef8e80d1 100644
--- a/eclass/epatch.eclass
+++ b/eclass/epatch.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: epatch.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6
+# @SUPPORTED_EAPIS: 6
# @BLURB: easy patch application functions
# @DEPRECATED: eapply from EAPI 7
# @DESCRIPTION:
@@ -13,11 +13,9 @@
if [[ -z ${_EPATCH_ECLASS} ]]; then
-case ${EAPI:-0} in
- 0|1|2|3|4|5|6)
- ;;
- *)
- die "${ECLASS}: banned in EAPI=${EAPI}; use eapply* instead";;
+case ${EAPI} in
+ 6) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit estack
@@ -52,10 +50,6 @@ EPATCH_COMMON_OPTS="-g0 -E --no-backup-if-mismatch"
# List of patches not to apply. Note this is only file names,
# and not the full path. Globs accepted.
EPATCH_EXCLUDE=""
-# @VARIABLE: EPATCH_SINGLE_MSG
-# @DESCRIPTION:
-# Change the printed message for a single patch.
-EPATCH_SINGLE_MSG=""
# @VARIABLE: EPATCH_MULTI_MSG
# @DESCRIPTION:
# Change the printed message for multiple patches.
@@ -196,7 +190,7 @@ epatch() {
local patchname=${x##*/}
# Apply single patches, or forced sets of patches, or
- # patches with ARCH dependant names.
+ # patches with ARCH dependent names.
# ???_arch_foo.patch
# Else, skip this input altogether
local a=${patchname#*_} # strip the ???_
@@ -235,13 +229,9 @@ epatch() {
fi
if [[ ${SINGLE_PATCH} == "yes" ]] ; then
- if [[ -n ${EPATCH_SINGLE_MSG} ]] ; then
- einfo "${EPATCH_SINGLE_MSG}"
- else
- einfo "Applying ${patchname} ..."
- fi
+ ebegin "Applying ${patchname}"
else
- einfo " ${patchname} ..."
+ ebegin " ${patchname}"
fi
# Handle aliased patch command #404447 #461568
@@ -282,7 +272,7 @@ epatch() {
fi
# Check for absolute paths in patches. If sandbox is disabled,
- # people could (accidently) patch files in the root filesystem.
+ # people could (accidentally) patch files in the root filesystem.
# Or trigger other unpleasantries #237667. So disallow -p0 on
# such patches.
local abs_paths=$(grep -E -n '^[-+]{3} /' "${PATCH_TARGET}" | awk '$2 != "/dev/null" { print }')
@@ -386,85 +376,5 @@ epatch() {
: # everything worked
}
-case ${EAPI:-0} in
-0|1|2|3|4|5)
-
-# @ECLASS_VARIABLE: EPATCH_USER_SOURCE
-# @USER_VARIABLE
-# @DESCRIPTION:
-# Location for user patches, see the epatch_user function.
-# Should be set by the user. Don't set this in ebuilds.
-: ${EPATCH_USER_SOURCE:=${PORTAGE_CONFIGROOT%/}/etc/portage/patches}
-
-# @FUNCTION: epatch_user
-# @USAGE:
-# @DESCRIPTION:
-# Applies user-provided patches to the source tree. The patches are
-# taken from /etc/portage/patches/<CATEGORY>/<P-PR|P|PN>[:SLOT]/, where the first
-# of these three directories to exist will be the one to use, ignoring
-# any more general directories which might exist as well. They must end
-# in ".patch" to be applied.
-#
-# User patches are intended for quick testing of patches without ebuild
-# modifications, as well as for permanent customizations a user might
-# desire. Obviously, there can be no official support for arbitrarily
-# patched ebuilds. So whenever a build log in a bug report mentions that
-# user patches were applied, the user should be asked to reproduce the
-# problem without these.
-#
-# Not all ebuilds do call this function, so placing patches in the
-# stated directory might or might not work, depending on the package and
-# the eclasses it inherits and uses. It is safe to call the function
-# repeatedly, so it is always possible to add a call at the ebuild
-# level. The first call is the time when the patches will be
-# applied.
-#
-# Ideally, this function should be called after gentoo-specific patches
-# have been applied, so that their code can be modified as well, but
-# before calls to e.g. eautoreconf, as the user patches might affect
-# autotool input files as well.
-epatch_user() {
- [[ $# -ne 0 ]] && die "epatch_user takes no options"
-
- # Allow multiple calls to this function; ignore all but the first
- local applied="${T}/epatch_user.log"
- [[ -e ${applied} ]] && return 2
-
- # don't clobber any EPATCH vars that the parent might want
- local EPATCH_SOURCE check
- for check in ${CATEGORY}/{${P}-${PR},${P},${PN}}{,:${SLOT%/*}}; do
- EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${CTARGET}/${check}
- [[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${CHOST}/${check}
- [[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${check}
- if [[ -d ${EPATCH_SOURCE} ]] ; then
- local old_n_applied_patches=${EPATCH_N_APPLIED_PATCHES:-0}
- EPATCH_SOURCE=${EPATCH_SOURCE} \
- EPATCH_SUFFIX="patch" \
- EPATCH_FORCE="yes" \
- EPATCH_MULTI_MSG="Applying user patches from ${EPATCH_SOURCE} ..." \
- epatch
- echo "${EPATCH_SOURCE}" > "${applied}"
- if [[ ${old_n_applied_patches} -lt ${EPATCH_N_APPLIED_PATCHES} ]]; then
- has epatch_user_death_notice ${EBUILD_DEATH_HOOKS} || \
- EBUILD_DEATH_HOOKS+=" epatch_user_death_notice"
- fi
- return 0
- fi
- done
- echo "none" > "${applied}"
- return 1
-}
-
-# @FUNCTION: epatch_user_death_notice
-# @INTERNAL
-# @DESCRIPTION:
-# Include an explicit notice in the die message itself that user patches were
-# applied to this build.
-epatch_user_death_notice() {
- ewarn "!!! User patches were applied to this build!"
-}
-
-esac
-
_EPATCH_ECLASS=1
fi #_EPATCH_ECLASS
diff --git a/eclass/eqawarn.eclass b/eclass/eqawarn.eclass
new file mode 100644
index 000000000000..288976182fb3
--- /dev/null
+++ b/eclass/eqawarn.eclass
@@ -0,0 +1,26 @@
+# Copyright 1999-2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: eqawarn.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @SUPPORTED_EAPIS: 6
+# @BLURB: output a QA warning
+
+case ${EAPI} in
+ 6) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+# @FUNCTION: eqawarn
+# @USAGE: [message]
+# @DESCRIPTION:
+# Proxy to ewarn for package managers that don't provide eqawarn and
+# use the PM implementation if available. Reuses PORTAGE_ELOG_CLASSES
+# as set by the dev profile.
+if ! declare -F eqawarn >/dev/null ; then
+ eqawarn() {
+ has qa ${PORTAGE_ELOG_CLASSES} && ewarn "$@"
+ :
+ }
+fi
diff --git a/eclass/estack.eclass b/eclass/estack.eclass
index c0823adb03f7..d00c931e117d 100644
--- a/eclass/estack.eclass
+++ b/eclass/estack.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: estack.eclass
@@ -156,7 +156,7 @@ evar_pop() {
eshopts_push() {
# Save both "shopt" and "set -o" option sets, because otherwise
# restoring posix would disable expand_aliases by side effect. #662586
- estack_push eshopts "$(shopt -p -o) $(shopt -p)"
+ estack_push eshopts "$(shopt -p -o; shopt -p)"
if [[ $1 == -[su] ]] ; then
[[ $# -le 1 ]] && return 0
shopt "$@" || die "${FUNCNAME}: bad options to shopt: $*"
diff --git a/eclass/eutils.eclass b/eclass/eutils.eclass
index 207d05e7f975..1e36c78f7780 100644
--- a/eclass/eutils.eclass
+++ b/eclass/eutils.eclass
@@ -1,194 +1,21 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+# @DEAD
# @ECLASS: eutils.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6
# @BLURB: many extra (but common) functions that are used in ebuilds
-# @DESCRIPTION:
-# The eutils eclass contains a suite of functions that complement
-# the ones that ebuild.sh already contain. The idea is that the functions
-# are not required in all ebuilds but enough utilize them to have a common
-# home rather than having multiple ebuilds implementing the same thing.
-#
-# Due to the nature of this eclass, some functions may have maintainers
-# different from the overall eclass!
-#
-# This eclass is DEPRECATED and must not be inherited by any new ebuilds
-# or eclasses. Use the more specific split eclasses instead, or native
-# package manager functions when available.
+# @DEPRECATED: native package manager functions, more specific eclasses
if [[ -z ${_EUTILS_ECLASS} ]]; then
_EUTILS_ECLASS=1
# implicitly inherited (now split) eclasses
case ${EAPI} in
- 5|6)
- inherit desktop edos2unix epatch estack ltprune multilib \
- preserve-libs strip-linguas toolchain-funcs vcs-clean wrapper
- ;;
- 7) inherit edos2unix strip-linguas wrapper ;;
+ 6) inherit desktop edos2unix epatch eqawarn estack ltprune multilib \
+ preserve-libs strip-linguas toolchain-funcs vcs-clean wrapper ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-
-# @FUNCTION: emktemp
-# @USAGE: [temp dir]
-# @DESCRIPTION:
-# Cheap replacement for when coreutils (and thus mktemp) does not exist
-# on the user's system.
-emktemp() {
- eqawarn "emktemp is deprecated. Create a temporary file in \${T} instead."
-
- local exe="touch"
- [[ $1 == -d ]] && exe="mkdir" && shift
- local topdir=$1
-
- if [[ -z ${topdir} ]] ; then
- [[ -z ${T} ]] \
- && topdir="/tmp" \
- || topdir=${T}
- fi
-
- if ! type -P mktemp > /dev/null ; then
- # system lacks `mktemp` so we have to fake it
- local tmp=/
- while [[ -e ${tmp} ]] ; do
- tmp=${topdir}/tmp.${RANDOM}.${RANDOM}.${RANDOM}
- done
- ${exe} "${tmp}" || ${exe} -p "${tmp}"
- echo "${tmp}"
- else
- # the args here will give slightly wierd names on BSD,
- # but should produce a usable file on all userlands
- if [[ ${exe} == "touch" ]] ; then
- TMPDIR="${topdir}" mktemp -t tmp.XXXXXXXXXX
- else
- TMPDIR="${topdir}" mktemp -dt tmp.XXXXXXXXXX
- fi
- fi
-}
-
-path_exists() {
- eerror "path_exists has been removed. Please see the following post"
- eerror "for a replacement snippet:"
- eerror "https://blogs.gentoo.org/mgorny/2018/08/09/inlining-path_exists/"
- die "path_exists is banned"
-}
-
-# @FUNCTION: use_if_iuse
-# @USAGE: <flag>
-# @DESCRIPTION:
-# Return true if the given flag is in USE and IUSE.
-#
-# Note that this function should not be used in the global scope.
-use_if_iuse() {
- eqawarn "use_if_iuse is deprecated."
- eqawarn "Define it as a local function, or inline it:"
- eqawarn " in_iuse foo && use foo"
- in_iuse $1 || return 1
- use $1
-}
-
-if [[ ${EAPI} == 5 ]] ; then
-
-# @FUNCTION: einstalldocs
-# @DESCRIPTION:
-# Install documentation using DOCS and HTML_DOCS, in EAPIs that do not
-# provide this function. When available (i.e., in EAPI 6 or later),
-# the package manager implementation should be used instead.
-#
-# If DOCS is declared and non-empty, all files listed in it are
-# installed. The files must exist, otherwise the function will fail.
-# In EAPI 4 and 5, DOCS may specify directories as well; in earlier
-# EAPIs using directories is unsupported.
-#
-# If DOCS is not declared, the files matching patterns given
-# in the default EAPI implementation of src_install will be installed.
-# If this is undesired, DOCS can be set to empty value to prevent any
-# documentation from being installed.
-#
-# If HTML_DOCS is declared and non-empty, all files and/or directories
-# listed in it are installed as HTML docs (using dohtml).
-#
-# Both DOCS and HTML_DOCS can either be an array or a whitespace-
-# separated list. Whenever directories are allowed, '<directory>/.' may
-# be specified in order to install all files within the directory
-# without creating a sub-directory in docdir.
-#
-# Passing additional options to dodoc and dohtml is not supported.
-# If you needed such a thing, you need to call those helpers explicitly.
-einstalldocs() {
- debug-print-function ${FUNCNAME} "${@}"
-
- local dodoc_opts=-r
-
- if ! declare -p DOCS &>/dev/null ; then
- local d
- for d in README* ChangeLog AUTHORS NEWS TODO CHANGES \
- THANKS BUGS FAQ CREDITS CHANGELOG ; do
- if [[ -s ${d} ]] ; then
- dodoc "${d}" || die
- fi
- done
- elif [[ $(declare -p DOCS) == "declare -a"* ]] ; then
- if [[ ${DOCS[@]} ]] ; then
- dodoc ${dodoc_opts} "${DOCS[@]}" || die
- fi
- else
- if [[ ${DOCS} ]] ; then
- dodoc ${dodoc_opts} ${DOCS} || die
- fi
- fi
-
- if [[ $(declare -p HTML_DOCS 2>/dev/null) == "declare -a"* ]] ; then
- if [[ ${HTML_DOCS[@]} ]] ; then
- dohtml -r "${HTML_DOCS[@]}" || die
- fi
- else
- if [[ ${HTML_DOCS} ]] ; then
- dohtml -r ${HTML_DOCS} || die
- fi
- fi
-
- return 0
-}
-
-# @FUNCTION: in_iuse
-# @USAGE: <flag>
-# @DESCRIPTION:
-# Determines whether the given flag is in IUSE. Strips IUSE default
-# prefixes as necessary. In EAPIs where it is available (i.e., EAPI 6
-# or later), the package manager implementation should be used instead.
-#
-# Note that this function must not be used in the global scope.
-in_iuse() {
- debug-print-function ${FUNCNAME} "${@}"
- [[ ${#} -eq 1 ]] || die "Invalid args to ${FUNCNAME}()"
-
- local flag=${1}
- local liuse=( ${IUSE} )
-
- has "${flag}" "${liuse[@]#[+-]}"
-}
-
-fi # EAPI 5
-
-if [[ ${EAPI} == [56] ]] ; then
-
-# @FUNCTION: eqawarn
-# @USAGE: [message]
-# @DESCRIPTION:
-# Proxy to ewarn for package managers that don't provide eqawarn and use the PM
-# implementation if available. Reuses PORTAGE_ELOG_CLASSES as set by the dev
-# profile.
-if ! declare -F eqawarn >/dev/null ; then
- eqawarn() {
- has qa ${PORTAGE_ELOG_CLASSES} && ewarn "$@"
- :
- }
-fi
-
-fi # EAPI [56]
-
fi
diff --git a/eclass/fcaps.eclass b/eclass/fcaps.eclass
index d1860f5ac9a3..477e1e954ab8 100644
--- a/eclass/fcaps.eclass
+++ b/eclass/fcaps.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: fcaps.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: function to set POSIX file-based capabilities
# @DESCRIPTION:
# This eclass provides a function to set file-based capabilities on binaries.
@@ -30,8 +30,8 @@
# @CODE
case ${EAPI} in
- 6|7|8) ;;
- *) die "EAPI ${EAPI:-0} is unsupported" ;;
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_FCAPS_ECLASS} ]]; then
@@ -41,8 +41,8 @@ IUSE="+filecaps"
# Since it is needed in pkg_postinst() it must be in IDEPEND
case ${EAPI} in
- 7) BDEPEND="filecaps? ( sys-libs/libcap )" ;& # fallthrough
- 6) RDEPEND="filecaps? ( sys-libs/libcap )" ;;
+ 7) BDEPEND="filecaps? ( sys-libs/libcap )"
+ RDEPEND="filecaps? ( sys-libs/libcap )" ;;
*) IDEPEND="filecaps? ( sys-libs/libcap )" ;;
esac
@@ -191,6 +191,6 @@ fcaps_pkg_postinst() {
done
}
-EXPORT_FUNCTIONS pkg_postinst
-
fi
+
+EXPORT_FUNCTIONS pkg_postinst
diff --git a/eclass/findlib.eclass b/eclass/findlib.eclass
index 0e14514e298f..fdcaa0c1b77c 100644
--- a/eclass/findlib.eclass
+++ b/eclass/findlib.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: findlib.eclass
@@ -6,13 +6,13 @@
# ML <ml@gentoo.org>
# @AUTHOR:
# Original author: Matthieu Sozeau <mattam@gentoo.org> (retired)
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: ocamlfind (a.k.a. findlib) eclass
# @DESCRIPTION:
# ocamlfind (a.k.a. findlib) eclass
-case ${EAPI:-0} in
- [67]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -26,13 +26,16 @@ QA_FLAGS_IGNORED='.*'
IUSE="+ocamlopt"
# From this findlib version, there is proper stublibs support.
-DEPEND=">=dev-ml/findlib-1.0.4-r1"
+DEPEND=">=dev-ml/findlib-1.0.4-r1[ocamlopt?]"
[[ ${FINDLIB_USE} ]] && DEPEND="${FINDLIB_USE}? ( ${DEPEND} )"
RDEPEND="dev-lang/ocaml:=[ocamlopt?]"
[[ ${FINDLIB_USE} ]] && RDEPEND="${FINDLIB_USE}? ( ${RDEPEND} )"
+# @FUNCTION: check_ocamlfind
+# @DESCRIPTION:
+# Die if ocamlfind is not found
check_ocamlfind() {
- if [ ! -x "${EPREFIX}"/usr/bin/ocamlfind ] ; then
+ if [[ ! -x ${EPREFIX}/usr/bin/ocamlfind ]] ; then
eerror "In ${ECLASS}: could not find the ocamlfind executable"
eerror "Please report this bug on Gentoo's Bugzilla, assigning to ml@gentoo.org"
die "ocamlfind executable not found"
@@ -45,21 +48,19 @@ check_ocamlfind() {
# We use the stublibs style, so no ld.conf needs to be
# updated when a package installs C shared libraries.
findlib_src_preinst() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
- has "${EAPI:-0}" 0 1 2 && use !prefix && ED="${D}"
check_ocamlfind
# destdir is the ocaml sitelib
- local destdir=`ocamlfind printconf destdir`
+ local destdir=$(ocamlfind printconf destdir)
# strip off prefix
destdir=${destdir#${EPREFIX}}
- dodir ${destdir} || die "dodir failed"
+ dodir "${destdir}"
export OCAMLFIND_DESTDIR=${ED}${destdir}
# stublibs style
- dodir ${destdir}/stublibs || die "dodir failed"
+ dodir "${destdir}"/stublibs
export OCAMLFIND_LDCONF=ignore
}
diff --git a/eclass/flag-o-matic.eclass b/eclass/flag-o-matic.eclass
index 50caa401bacf..0e5271c7824f 100644
--- a/eclass/flag-o-matic.eclass
+++ b/eclass/flag-o-matic.eclass
@@ -1,19 +1,18 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: flag-o-matic.eclass
# @MAINTAINER:
# toolchain@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: common functions to manipulate and query toolchain flags
# @DESCRIPTION:
# This eclass contains a suite of functions to help developers sanely
# and safely manage toolchain flags in their builds.
-case ${EAPI:-0} in
- 0|1|2|3|4) die "flag-o-matic.eclass: EAPI ${EAPI} is too old." ;;
- 5|6|7|8) ;;
- *) die "EAPI ${EAPI} is not supported by flag-o-matic.eclass." ;;
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_FLAG_O_MATIC_ECLASS} ]]; then
@@ -21,11 +20,11 @@ _FLAG_O_MATIC_ECLASS=1
inherit toolchain-funcs
-[[ ${EAPI} == [567] ]] && inherit eutils
+[[ ${EAPI} == 6 ]] && inherit eqawarn
# @FUNCTION: all-flag-vars
# @DESCRIPTION:
-# Return all the flag variables that our high level funcs operate on.
+# Return all the flag variables that our high level functions operate on.
all-flag-vars() {
echo {ADA,C,CPP,CXX,CCAS,F,FC,LD}FLAGS
}
@@ -36,7 +35,7 @@ all-flag-vars() {
# {C,CPP,CXX,CCAS,F,FC,LD}FLAGS that we allow in strip-flags
# Note: shell globs and character lists are allowed
setup-allowed-flags() {
- [[ ${EAPI} == [567] ]] ||
+ [[ ${EAPI} == [67] ]] ||
die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."
_setup-allowed-flags "$@"
}
@@ -48,20 +47,26 @@ setup-allowed-flags() {
# Note: shell globs and character lists are allowed
_setup-allowed-flags() {
ALLOWED_FLAGS=(
- -pipe -O '-O[12sg]' '-mcpu=*' '-march=*' '-mtune=*'
+ -pipe -O '-O[123szg]' '-mcpu=*' '-march=*' '-mtune=*' '-mfpmath=*'
+ -flto '-flto=*' -fno-lto
# Hardening flags
'-fstack-protector*'
- '-fstack-check*' -fno-stack-check
-fstack-clash-protection
'-fcf-protection=*'
-fbounds-check -fbounds-checking
-fno-PIE -fno-pie -nopie -no-pie
+ -fharden-compares -fharden-conditional-branches
+ -fharden-control-flow-redundancy -fno-harden-control-flow-redundancy
+ -fhardcfr-skip-leaf -fhardcfr-check-exceptions -fhardcfr-check-returning-calls
+ '-fhardcfr-check-noreturn-calls=*'
# Spectre mitigations, bug #646076
'-mindirect-branch=*'
-mindirect-branch-register
'-mfunction-return=*'
-mretpoline
+ '-mharden-sls=*'
+ '-mbranch-protection=*'
# Misc
-fno-unit-at-a-time -fno-strict-overflow
@@ -76,6 +81,7 @@ _setup-allowed-flags() {
-gstabs -gstabs+
-gz
-glldb
+ '-fdebug-default-version=*'
# Cosmetic/output related, see e.g. bug #830534
-fno-diagnostics-color '-fmessage-length=*'
@@ -96,6 +102,7 @@ _setup-allowed-flags() {
'-fno-stack-protector*' '-fabi-version=*'
-fno-strict-aliasing -fno-bounds-check -fno-bounds-checking -fstrict-overflow
-fno-omit-frame-pointer '-fno-builtin*'
+ -mno-omit-leaf-frame-pointer
)
ALLOWED_FLAGS+=(
'-mregparm=*' -mno-app-regs -mapp-regs -mno-mmx -mno-sse
@@ -108,8 +115,15 @@ _setup-allowed-flags() {
-mno-faster-structs -mfaster-structs -m32 -m64 -mx32 '-mabi=*'
-mlittle-endian -mbig-endian -EL -EB -fPIC -mlive-g0 '-mcmodel=*'
-mstack-bias -mno-stack-bias -msecure-plt '-m*-toc' '-mfloat-abi=*'
+
+ # This is default on for a bunch of arches except amd64 in GCC
+ # already, and amd64 itself is planned too.
+ '-mtls-dialect=*'
+
+ # MIPS errata
-mfix-r4000 -mno-fix-r4000 -mfix-r4400 -mno-fix-r4400
- -mfix-rm7000 -mno-fix-rm7000 -mfix-r10000 -mno-fix-r10000
+ -mfix-r10000 -mno-fix-r10000
+
'-mr10k-cache-barrier=*' -mthumb -marm
# needed for arm64 (and in particular SCS)
@@ -132,6 +146,14 @@ _setup-allowed-flags() {
# Allow explicit stack realignment to run non-conformant
# binaries: bug #677852
-mstackrealign
+ '-mpreferred-stack-boundary=*'
+ '-mincoming-stack-boundary=*'
+ )
+ ALLOWED_FLAGS+=(
+ # Clang-only
+ '--unwindlib=*'
+ '--rtlib=*'
+ '--stdlib=*'
)
}
@@ -148,7 +170,10 @@ _filter-hardened() {
# not -fPIC or -fpic, but too many places filter -fPIC without
# thinking about -fPIE.
-fPIC|-fpic|-fPIE|-fpie|-Wl,pie|-pie)
- gcc-specs-pie || continue
+ if ! gcc-specs-pie && ! tc-enables-pie ; then
+ continue
+ fi
+
if ! is-flagq -nopie && ! is-flagq -no-pie ; then
# Support older Gentoo form first (-nopie) before falling
# back to the official gcc-6+ form (-no-pie).
@@ -159,15 +184,36 @@ _filter-hardened() {
fi
fi
;;
- -fstack-protector)
- gcc-specs-ssp || continue
- is-flagq -fno-stack-protector || append-flags $(test-flags -fno-stack-protector);;
+
+ -fstack-protector|-fstack-protector-strong)
+ if ! gcc-specs-ssp && ! tc-enables-ssp && ! tc-enables-ssp-strong ; then
+ continue
+ fi
+
+ is-flagq -fno-stack-protector || append-flags $(test-flags -fno-stack-protector)
+ ;;
-fstack-protector-all)
- gcc-specs-ssp-to-all || continue
- is-flagq -fno-stack-protector-all || append-flags $(test-flags -fno-stack-protector-all);;
+ if ! gcc-specs-ssp-to-all && ! tc-enables-ssp-all ; then
+ continue
+ fi
+
+ is-flagq -fno-stack-protector-all || append-flags $(test-flags -fno-stack-protector-all)
+ ;;
-fno-strict-overflow)
gcc-specs-nostrict || continue
- is-flagq -fstrict-overflow || append-flags $(test-flags -fstrict-overflow);;
+
+ is-flagq -fstrict-overflow || append-flags $(test-flags -fstrict-overflow)
+ ;;
+ -D_GLIBCXX_ASSERTIONS|-D_LIBCPP_ENABLE_ASSERTIONS|-D_LIBCPP_ENABLE_HARDENED_MODE)
+ tc-enables-cxx-assertions || continue
+
+ append-cppflags -U_GLIBCXX_ASSERTIONS -U_LIBCPP_ENABLE_ASSERTIONS -U_LIBCPP_ENABLE_HARDENED_MODE
+ ;;
+ -D_FORTIFY_SOURCE=*)
+ tc-enables-fortify-source || continue
+
+ append-cppflags -U_FORTIFY_SOURCE
+ ;;
esac
done
}
@@ -210,6 +256,7 @@ filter-flags() {
# Remove flags that enable Large File Support.
filter-lfs-flags() {
[[ $# -ne 0 ]] && die "filter-lfs-flags takes no arguments"
+
# http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html
# _LARGEFILE_SOURCE: enable support for new LFS funcs (ftello/etc...)
# _LARGEFILE64_SOURCE: enable support for 64bit variants (off64_t/fseeko64/etc...)
@@ -218,6 +265,14 @@ filter-lfs-flags() {
filter-flags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_TIME_BITS=64
}
+# @FUNCTION: filter-lto
+# @DESCRIPTION:
+# Remove flags that enable LTO and those that depend on it
+filter-lto() {
+ [[ $# -ne 0 ]] && die "filter-lto takes no arguments"
+ filter-flags '-flto*' -fwhole-program-vtables '-fsanitize=cfi*'
+}
+
# @FUNCTION: filter-ldflags
# @USAGE: <flags>
# @DESCRIPTION:
@@ -247,7 +302,7 @@ append-cppflags() {
# @CODE
append-cflags() {
[[ $# -eq 0 ]] && return 0
- # Do not do automatic flag testing ourselves. #417047
+ # Do not do automatic flag testing ourselves, bug #417047
export CFLAGS+=" $*"
return 0
}
@@ -262,7 +317,7 @@ append-cflags() {
# @CODE
append-cxxflags() {
[[ $# -eq 0 ]] && return 0
- # Do not do automatic flag testing ourselves. #417047
+ # Do not do automatic flag testing ourselves, bug #417047
export CXXFLAGS+=" $*"
return 0
}
@@ -277,7 +332,7 @@ append-cxxflags() {
# @CODE
append-fflags() {
[[ $# -eq 0 ]] && return 0
- # Do not do automatic flag testing ourselves. #417047
+ # Do not do automatic flag testing ourselves, bug #417047
export FFLAGS+=" $*"
export FCFLAGS+=" $*"
return 0
@@ -288,7 +343,8 @@ append-fflags() {
# Add flags that enable Large File Support.
append-lfs-flags() {
[[ $# -ne 0 ]] && die "append-lfs-flags takes no arguments"
- # see comments in filter-lfs-flags func for meaning of these
+
+ # See comments in filter-lfs-flags func for meaning of these
append-cppflags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
}
@@ -315,9 +371,9 @@ append-ldflags() {
append-flags() {
[[ $# -eq 0 ]] && return 0
case " $* " in
- *' '-[DIU]*) eqawarn 'please use append-cppflags for preprocessor flags' ;;
+ *' '-[DIU]*) eqawarn 'Please use append-cppflags for preprocessor flags' ;;
*' '-L*|\
- *' '-Wl,*) eqawarn 'please use append-ldflags for linker flags' ;;
+ *' '-Wl,*) eqawarn 'Please use append-ldflags for linker flags' ;;
esac
append-cflags "$@"
append-cxxflags "$@"
@@ -504,7 +560,7 @@ strip-flags() {
# Returns shell true if <flag> is supported by given <compiler>,
# else returns shell false.
test-flag-PROG() {
- [[ ${EAPI} == [567] ]] ||
+ [[ ${EAPI} == [67] ]] ||
die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."
_test-flag-PROG "$@"
}
@@ -565,6 +621,15 @@ _test-flag-PROG() {
c+ld)
in_ext='c'
in_src='int main(void) { return 0; }'
+
+ if is-ldflagq -fuse-ld=* ; then
+ # Respect linker chosen by user so we don't
+ # end up giving false results by checking
+ # with default linker. bug #832377
+ fuse_ld_value=$(get-flag -fuse-ld=*)
+ cmdline_extra+=(${fuse_ld_value})
+ fi
+
cmdline_extra+=(-xc)
;;
esac
@@ -574,7 +639,7 @@ _test-flag-PROG() {
printf "%s\n" "${in_src}" > "${test_in}" || die "Failed to create '${test_in}'"
# Currently we rely on warning-free output of a compiler
- # before the flag to see if a flag prduces any warnings.
+ # before the flag to see if a flag produces any warnings.
# This has a few drawbacks:
# - if compiler already generates warnings we filter out
# every single flag: bug #712488
@@ -583,11 +648,20 @@ _test-flag-PROG() {
#
# We can add more selective detection of no-op flags via
# '-Werror=ignored-optimization-argument' and similar error options
- # similar to what we are doing with '-Qunused-arguments'.
+ # or accept unused flags with '-Qunused-arguments' like we
+ # used to for bug #627474. Since we now invoke the linker
+ # for testing linker flags, unused argument warnings aren't
+ # ignored; linker flags may no longer be accepted in CFLAGS.
+ #
+ # However, warnings emitted by a compiler for a clean source
+ # can break feature detection by CMake or autoconf since
+ # many checks use -Werror internally. See e.g. bug #714742.
local cmdline=(
"${comp[@]}"
# Clang will warn about unknown gcc flags but exit 0.
# Need -Werror to force it to exit non-zero.
+ #
+ # See also bug #712488 and bug #714742.
-Werror
"$@"
# -x<lang> options need to go before first source file
@@ -596,14 +670,7 @@ _test-flag-PROG() {
"${test_in}" -o "${test_out}"
)
- if ! "${cmdline[@]}" &>/dev/null; then
- # -Werror makes clang bail out on unused arguments as well;
- # try to add -Qunused-arguments to work-around that
- # other compilers don't support it but then, it's failure like
- # any other
- cmdline+=( -Qunused-arguments )
- "${cmdline[@]}" &>/dev/null
- fi
+ "${cmdline[@]}" &>/dev/null
}
# @FUNCTION: test-flag-CC
@@ -643,7 +710,7 @@ test-flag-CCLD() { _test-flag-PROG CC c+ld "$@"; }
# Returns shell true if <flags> are supported by given <compiler>,
# else returns shell false.
test-flags-PROG() {
- [[ ${EAPI} == [567] ]] ||
+ [[ ${EAPI} == [67] ]] ||
die "Internal function ${FUNCNAME} is not available in EAPI ${EAPI}."
_test-flags-PROG "$@"
}
@@ -665,7 +732,7 @@ _test-flags-PROG() {
while (( $# )); do
case "$1" in
- # '-B /foo': bug # 687198
+ # '-B /foo': bug #687198
--param|-B)
if test-flag-${comp} "$1" "$2"; then
flags+=( "$1" "$2" )
@@ -851,7 +918,7 @@ raw-ldflags() {
x=${x#-Wl,}
set -- "$@" ${x//,/ }
;;
- *) # Assume it's a compiler driver flag, so throw it away #441808
+ *) # Assume it's a compiler driver flag, so throw it away, bug #441808
;;
esac
done
@@ -868,4 +935,107 @@ no-as-needed() {
esac
}
+# @FUNCTION: test-compile
+# @USAGE: <language> <code>
+# @DESCRIPTION:
+# Attempts to compile (and possibly link) the given program. The first
+# <language> parameter corresponds to the standard -x compiler argument.
+# If the program should additionally be attempted to be linked, the string
+# "+ld" should be added to the <language> parameter.
+test-compile() {
+ local lang=$1
+ local code=$2
+ shift 2
+
+ [[ -z "${lang}" ]] && return 1
+ [[ -z "${code}" ]] && return 1
+
+ local compiler filename_in filename_out args=() libs=()
+ case "${lang}" in
+ c)
+ compiler="$(tc-getCC)"
+ filename_in="${T}/test.c"
+ filename_out="${T}/test.o"
+ args+=(${CFLAGS[@]} -xc -c)
+ ;;
+ c++)
+ compiler="$(tc-getCXX)"
+ filename_in="${T}/test.cc"
+ filename_out="${T}/test.o"
+ args+=(${CXXFLAGS[@]} -xc++ -c)
+ ;;
+ f77)
+ compiler="$(tc-getF77)"
+ filename_in="${T}/test.f"
+ filename_out="${T}/test.o"
+ args+=(${FFFLAGS[@]} -xf77 -c)
+ ;;
+ f95)
+ compiler="$(tc-getFC)"
+ filename_in="${T}/test.f90"
+ filename_out="${T}/test.o"
+ args+=(${FCFLAGS[@]} -xf95 -c)
+ ;;
+ c+ld)
+ compiler="$(tc-getCC)"
+ filename_in="${T}/test.c"
+ filename_out="${T}/test.exe"
+ args+=(${CFLAGS[@]} ${LDFLAGS[@]} -xc)
+ libs+=(${LIBS[@]})
+ ;;
+ c+++ld)
+ compiler="$(tc-getCXX)"
+ filename_in="${T}/test.cc"
+ filename_out="${T}/test.exe"
+ args+=(${CXXFLAGS[@]} ${LDFLAGS[@]} -xc++)
+ libs+=(${LIBS[@]})
+ ;;
+ f77+ld)
+ compiler="$(tc-getF77)"
+ filename_in="${T}/test.f"
+ filename_out="${T}/test.exe"
+ args+=(${FFLAGS[@]} ${LDFLAGS[@]} -xf77)
+ libs+=(${LIBS[@]})
+ ;;
+ f95+ld)
+ compiler="$(tc-getFC)"
+ filename_in="${T}/test.f90"
+ filename_out="${T}/test.exe"
+ args+=(${FCFLAGS[@]} ${LDFLAGS[@]} -xf95)
+ libs+=(${LIBS[@]})
+ ;;
+ *)
+ die "Unknown compiled language ${lang}"
+ ;;
+ esac
+
+ printf "%s\n" "${code}" > "${filename_in}" || die "Failed to create '${test_in}'"
+
+ "${compiler}" ${args[@]} "${filename_in}" -o "${filename_out}" ${libs[@]} &>/dev/null
+}
+
+# @FUNCTION: append-atomic-flags
+# @DESCRIPTION:
+# Attempts to detect if appending -latomic works, and does so.
+append-atomic-flags() {
+ # Make sure that the flag is actually valid. If it isn't, then maybe the
+ # library both doesn't exist and is redundant, or maybe the toolchain is
+ # broken, but let the build succeed or fail on its own.
+ test-flags-CCLD "-latomic" &>/dev/null || return
+
+ # We unconditionally append this flag. In the case that it's needed, the
+ # flag is, well, needed. In the case that it's not needed, it causes no
+ # harm, because we ensure that this specific library is definitely
+ # certainly linked with as-needed.
+ #
+ # Really, this should be implemented directly in the compiler, including
+ # the use of push/pop for as-needed. It's exactly what the gcc spec file
+ # does for e.g. -lgcc_s, but gcc is concerned about doing so due to build
+ # system internals and as a result all users have to deal with this mess
+ # instead.
+ #
+ # See https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81358
+ append-libs "-Wl,--push-state,--as-needed,-latomic,--pop-state"
+}
+
fi
diff --git a/eclass/font-ebdftopcf.eclass b/eclass/font-ebdftopcf.eclass
index 88256c3231bd..afd77e083bee 100644
--- a/eclass/font-ebdftopcf.eclass
+++ b/eclass/font-ebdftopcf.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: font-ebdftopcf.eclass
@@ -14,11 +14,9 @@
case ${EAPI} in
7) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_compile
-
if [[ -z ${_FONT_EBDFTOPCF_ECLASS} ]]; then
_FONT_EBDFTOPCF_ECLASS=1
@@ -57,3 +55,5 @@ font-ebdftopcf_src_compile() {
}
fi
+
+EXPORT_FUNCTIONS src_compile
diff --git a/eclass/font.eclass b/eclass/font.eclass
index 83636ac3fed5..3d2578172bc9 100644
--- a/eclass/font.eclass
+++ b/eclass/font.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: font.eclass
@@ -7,16 +7,14 @@
# @SUPPORTED_EAPIS: 7 8
# @BLURB: Eclass to make font installation uniform
-case ${EAPI:-0} in
- [7-8]) ;;
- *) die "EAPI ${EAPI} is not supported by font.eclass." ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_FONT_ECLASS} ]]; then
+if [[ -z ${_FONT_ECLASS} ]]; then
_FONT_ECLASS=1
-EXPORT_FUNCTIONS pkg_setup src_install pkg_postinst pkg_postrm
-
# @ECLASS_VARIABLE: FONT_SUFFIX
# @DEFAULT_UNSET
# @REQUIRED
@@ -46,6 +44,12 @@ FONTDIR=${FONTDIR:-/usr/share/fonts/${FONT_PN}}
# Array containing fontconfig conf files to install.
FONT_CONF=( "" )
+# @ECLASS_VARIABLE: FONT_OPENTYPE_COMPAT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Determines whether detected BDF and PCF font files should be converted
+# to an SFNT wrapper, for use with newer Pango.
+
if [[ ${CATEGORY}/${PN} != media-fonts/encodings ]]; then
IUSE="X"
BDEPEND="X? (
@@ -54,6 +58,31 @@ if [[ ${CATEGORY}/${PN} != media-fonts/encodings ]]; then
)"
fi
+if [[ -n ${FONT_OPENTYPE_COMPAT} ]] ; then
+ IUSE+=" +opentype-compat"
+ BDEPEND+=" opentype-compat? ( x11-apps/fonttosfnt )"
+fi
+
+# @FUNCTION: font_wrap_opentype_compat
+# @DESCRIPTION:
+# Converts .bdf and .pcf fonts detected within ${ED} to the OTB wrapper format
+# using x11-apps/fonttosfnt. Handles optional .gz extension.
+font_wrap_opentype_compat() {
+ local file tmpfile
+
+ while IFS= read -rd '' file; do
+ if [[ ${file} == *.gz ]] ; then
+ tmpfile=${file%.*}
+
+ gzip -cd -- "${file}" > "${tmpfile}" \
+ && fonttosfnt -v -o "${file%.*}.otb" -- "${tmpfile}" \
+ && rm -- "${tmpfile}"
+ else
+ fonttosfnt -v -o "${file%.*}.otb" -- "${file}"
+ fi || ! break
+ done < <(find "${ED}" \( -name '*.bdf' -o -name '*.bdf.gz' -o -name '*.pcf' -o -name '*.pcf.gz' \) -type f ! -type l -print0) || die
+}
+
# @FUNCTION: font_xfont_config
# @DESCRIPTION:
# Generate Xorg font files (mkfontscale/mkfontdir).
@@ -150,6 +179,10 @@ font_pkg_setup() {
font_src_install() {
local dir suffix commondoc
+ if [[ -n ${FONT_OPENTYPE_COMPAT} ]] && in_iuse opentype-compat && use opentype-compat ; then
+ font_wrap_opentype_compat
+ fi
+
if [[ $(declare -p FONT_S 2>/dev/null) == "declare -a"* ]]; then
# recreate the directory structure if FONT_S is an array
for dir in "${FONT_S[@]}"; do
@@ -186,10 +219,6 @@ font_src_install() {
# @DESCRIPTION:
# Updates fontcache if !prefix and media-libs/fontconfig installed
_update_fontcache() {
- # unreadable font files = fontconfig segfaults
- find "${EROOT}"/usr/share/fonts/ -type f '!' -perm 0644 \
- -exec chmod -v 0644 2>/dev/null {} + || die "failed to fix font files perms"
-
if [[ -z ${ROOT} ]] ; then
if has_version media-libs/fontconfig ; then
ebegin "Updating global fontcache"
@@ -233,3 +262,5 @@ font_pkg_postrm() {
}
fi
+
+EXPORT_FUNCTIONS pkg_setup src_install pkg_postinst pkg_postrm
diff --git a/eclass/fortran-2.eclass b/eclass/fortran-2.eclass
index e26dd700f7cf..855dcba59a39 100644
--- a/eclass/fortran-2.eclass
+++ b/eclass/fortran-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: fortran-2.eclass
@@ -26,24 +26,21 @@
#
# FORTRAN_NEED_OPENMP=1
-inherit toolchain-funcs
-
-case ${EAPI:-0} in
- # not used in the eclass, but left for backward compatibility with legacy users
- 5|6) inherit eutils ;;
- 7|8) ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_setup
+if [[ -z ${_FORTRAN_2_ECLASS} ]]; then
+_FORTRAN_2_ECLASS=1
-if [[ ! ${_FORTRAN_2_CLASS} ]]; then
+inherit toolchain-funcs
# @ECLASS_VARIABLE: FORTRAN_NEED_OPENMP
# @DESCRIPTION:
# Set to "1" in order to automatically have the eclass abort if the fortran
# compiler lacks openmp support.
-: ${FORTRAN_NEED_OPENMP:=0}
+: "${FORTRAN_NEED_OPENMP:=0}"
# @ECLASS_VARIABLE: FORTRAN_STANDARD
# @DESCRIPTION:
@@ -51,7 +48,7 @@ if [[ ! ${_FORTRAN_2_CLASS} ]]; then
# Generally not needed as default is sufficient.
#
# Valid settings are any combination of: 77 90 95 2003
-: ${FORTRAN_STANDARD:=77}
+: "${FORTRAN_STANDARD:=77}"
# @ECLASS_VARIABLE: FORTRAN_NEEDED
# @DESCRIPTION:
@@ -64,7 +61,7 @@ if [[ ! ${_FORTRAN_2_CLASS} ]]; then
# DEPEND="lapack? ( virtual/fortran )"
#
# If unset, we always depend on virtual/fortran.
-: ${FORTRAN_NEEDED:=always}
+: "${FORTRAN_NEEDED:=always}"
for _f_use in ${FORTRAN_NEEDED}; do
case ${_f_use} in
@@ -110,7 +107,7 @@ fortran_int64_abi_fflags() {
elif [[ ${_FC} == ifort ]]; then
echo "-integer-size 64"
else
- die "Compiler flag for 64bit interger for ${_FC} unknown"
+ die "Compiler flag for 64bit integer for ${_FC} unknown"
fi
}
@@ -224,9 +221,9 @@ _fortran_test_function() {
local dialect
- : ${F77:=$(tc-getFC)}
+ : "${F77:=$(tc-getFC)}"
- : ${FORTRAN_STANDARD:=77}
+ : "${FORTRAN_STANDARD:=77}"
for dialect in ${FORTRAN_STANDARD}; do
case ${dialect} in
77) _fortran_compile_test "$(tc-getF77)" || \
@@ -293,5 +290,6 @@ fortran-2_pkg_setup() {
fi
}
-_FORTRAN_2_ECLASS=1
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/frameworks.kde.org.eclass b/eclass/frameworks.kde.org.eclass
new file mode 100644
index 000000000000..53811e339976
--- /dev/null
+++ b/eclass/frameworks.kde.org.eclass
@@ -0,0 +1,92 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: frameworks.kde.org.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: kde.org
+# @BLURB: Support eclass for KDE Frameworks packages.
+# @DESCRIPTION:
+# This eclass extends kde.org.eclass for Frameworks release group to assemble
+# default SRC_URI for tarballs, set up git-r3.eclass for stable/master branch
+# versions or restrict access to unreleased (packager access only) tarballs
+# in Gentoo KDE overlay.
+#
+# This eclass unconditionally inherits kde.org.eclass and all its public
+# variables and helper functions (not phase functions) may be considered as
+# part of this eclass's API.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_FRAMEWORKS_KDE_ORG_ECLASS} ]]; then
+_FRAMEWORKS_KDE_ORG_ECLASS=1
+
+# @ECLASS_VARIABLE: KDE_PV_UNRELEASED
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_PV_UNRELEASED=( )
+
+inherit kde.org
+
+HOMEPAGE="https://develop.kde.org/products/frameworks/"
+
+SLOT=6
+if ver_test ${PV} -lt 5.240; then
+ SLOT=5
+fi
+if [[ ${PN} == extra-cmake-modules ]]; then
+ SLOT=0
+else
+ if [[ ${KDE_BUILD_TYPE} == release ]]; then
+ SLOT=${SLOT}/$(ver_cut 1-2)
+ else
+ SLOT=${SLOT}/9999
+ fi
+fi
+
+# @ECLASS_VARIABLE: KDE_ORG_SCHEDULE_URI
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_ORG_SCHEDULE_URI+="/Frameworks"
+
+# @ECLASS_VARIABLE: _KDE_SRC_URI
+# @INTERNAL
+# @DESCRIPTION:
+# Helper variable to construct release group specific SRC_URI.
+_KDE_SRC_URI="mirror://kde/"
+
+case ${KDE_BUILD_TYPE} in
+ live)
+ if [[ ${PV} == 5.239.9999 ]]; then
+ EGIT_BRANCH="kf5"
+ fi
+ ;;
+ *)
+ if [[ -z ${KDE_ORG_COMMIT} ]]; then
+ _KDE_SRC_URI+="stable/frameworks/$(ver_cut 1-2)/"
+ case ${KDE_ORG_NAME} in
+ kdelibs4support | \
+ kdesignerplugin | \
+ kdewebkit | \
+ khtml | \
+ kjs | \
+ kjsembed | \
+ kmediaplayer | \
+ kross | \
+ kxmlrpcclient)
+ _KDE_SRC_URI+="portingAids/"
+ ;;
+ esac
+
+ SRC_URI="${_KDE_SRC_URI}${KDE_ORG_TAR_PN}-${PV}.tar.xz"
+ fi
+ ;;
+esac
+
+fi
diff --git a/eclass/freedict.eclass b/eclass/freedict.eclass
index f38ce93edc82..b649afc7c1af 100644
--- a/eclass/freedict.eclass
+++ b/eclass/freedict.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: freedict.eclass
@@ -12,6 +12,11 @@
# This eclass exists to ease the installation of freedict translation
# dictionaries.
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_FREEDICT_ECLASS} ]]; then
_FREEDICT_ECLASS=1
@@ -20,16 +25,11 @@ _FREEDICT_ECLASS=1
# Strips PN of 'freedict' prefix, to be used in SRC_URI and doins
FREEDICT_P=${PN/freedict-/}
-case ${EAPI:-0} in
- 7) ;;
- *) die "${ECLASS}.eclass is banned in EAPI=${EAPI}" ;;
-esac
-
[[ ${FORLANG} ]] && die "FORLANG is banned, set DESCRIPTION instead"
[[ ${TOLANG} ]] && die "TOLANG is banned, set DESCRIPTION instead"
-HOMEPAGE="http://freedict.sourceforge.net/en/"
-SRC_URI="http://freedict.sourceforge.net/download/linux/${FREEDICT_P}.tar.gz"
+HOMEPAGE="https://freedict.sourceforge.net/en/"
+SRC_URI="https://freedict.sourceforge.net/download/linux/${FREEDICT_P}.tar.gz"
LICENSE="GPL-2+"
SLOT="0"
@@ -47,6 +47,6 @@ freedict_src_install() {
doins ${FREEDICT_P}.index
}
-EXPORT_FUNCTIONS src_install
-
fi
+
+EXPORT_FUNCTIONS src_install
diff --git a/eclass/gap-pkg.eclass b/eclass/gap-pkg.eclass
new file mode 100644
index 000000000000..e242cc92e8a3
--- /dev/null
+++ b/eclass/gap-pkg.eclass
@@ -0,0 +1,388 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: gap-pkg.eclass
+# @MAINTAINER:
+# François Bissey <frp.bissey@gmail.com>
+# Michael Orlitzky <mjo@gentoo.org>
+# Gentoo Mathematics Project <sci-mathematics@gentoo.org>
+# @AUTHOR:
+# François Bissey <frp.bissey@gmail.com>
+# Michael Orlitzky <mjo@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @BLURB: Simplify the installation of GAP packages.
+# @DESCRIPTION:
+# The main purpose of this eclass is to build and install GAP packages
+# that typically occupy the dev-gap category. Most GAP packages do not
+# support an install target out of the box, so the default installation
+# is "by hand," with attention paid to those directories that are part
+# of the recommended layout. The prepare, configure, and compile phases
+# do however try to support packages having a real build system.
+#
+# GAP itself has four "required" packages that are packaged separately,
+# making dependencies between them somewhat weird. The four required
+# packages are,
+#
+# * dev-gap/gapdoc
+# * dev-gap/primgrp
+# * dev-gap/smallgrp
+# * dev-gap/transgrp
+#
+# Those four packages will have only sci-mathematics/gap added to
+# RDEPEND. All other packages will have the four required packages above
+# added to RDEPEND in addition to sci-mathematics/gap. In theory it
+# would be better to list all dependencies explicitly rather than
+# grouping together the "required" four, but this is how upstream GAP
+# works, and is what all GAP packages expect; for example, most test
+# suites fail without the required packages but make no attempt to load
+# them.
+#
+# If you need a version constraint on sci-mathematics/gap, you'll have
+# to specify it yourself. Compiled packages will likely need
+# sci-mathematics/gap in DEPEND as well, and may also want a subslot
+# dependency.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+# For eshopts_push and eshopts_pop
+inherit estack
+
+# Some packages have additional homepages, but pretty much every GAP
+# package can be found at this URL.
+HOMEPAGE="https://www.gap-system.org/Packages/${PN}.html"
+
+# _GAP_PKG_IS_REQUIRED is an internal variable that indicates whether or
+# not $PN is one of the four "required" GAP packages that are always
+# loaded, even when GAP is started with the "-A" flag. We treat this
+# four somewhat differently since they are implicit dependencies of
+# everything else in the GAP ecosystem.
+_GAP_PKG_IS_REQUIRED=no
+case ${CATEGORY}/${PN} in
+ dev-gap/gapdoc|dev-gap/smallgrp|dev-gap/primgrp|dev-gap/transgrp)
+ _GAP_PKG_IS_REQUIRED=yes
+ ;;
+ *)
+ ;;
+esac
+
+# _GAP_PKG_RDEPEND is an internal variable to hold the RDEPEND entries
+# added by this eclass. We use a separate variable for this because we
+# need its contents later in gap-pkg_enable_tests, and that function is
+# called from an ebuild context where the list of RDEPEND is maintained
+# separately. Basically: the values we add to RDEPEND here do not appear
+# in RDEPEND when gap-pkg_enable_tests is called.
+_GAP_PKG_RDEPEND="sci-mathematics/gap"
+
+# The four "required" packages depend only on GAP itself, while every
+# other package depends (also) on the four required ones.
+if [[ "${_GAP_PKG_IS_REQUIRED}" = "no" ]]; then
+ _GAP_PKG_RDEPEND+="
+ dev-gap/gapdoc
+ dev-gap/smallgrp
+ dev-gap/primgrp
+ dev-gap/transgrp"
+fi
+RDEPEND="${_GAP_PKG_RDEPEND}"
+
+# @FUNCTION: gap-pkg_dir
+# @DESCRIPTION:
+# The directory into which the gap package should be installed. The
+# accepted current location is /usr/$(get_libdir)/gap/pkg, but
+# technically this depends on the econf call in sci-mathematics/gap.
+gap-pkg_dir() {
+ echo "/usr/$(get_libdir)/gap/pkg/${PN}"
+}
+
+# @FUNCTION: _gap-pkg_gaproot
+# @INTERNAL
+# @DESCRIPTION:
+# The directory containing sysinfo.gap. This is frequently passed to GAP
+# packages via ./configure --with-gaproot or as a positional argument to
+# hand-written configure scripts. We also use it to find the value of
+# $GAParch, which is contained in sysinfo.gap. The "gaproot" is
+# implicitly determined by the econf call in sci-mathematics/gap. As a
+# result, calling this function requires sci-mathematics/gap at
+# build-time.
+_gap-pkg_gaproot() {
+ echo "${ESYSROOT}/usr/$(get_libdir)/gap"
+}
+
+# @FUNCTION: gap-pkg_econf
+# @USAGE: [extra econf args]
+# @DESCRIPTION:
+# Call econf, passing the value of _gap-pkg_gaproot to --with-gaproot.
+# All arguments to gap-pkg_econf are passed through to econf.
+#
+# @EXAMPLE
+# src_configure() {
+# gap-pkg_econf --with-external-libsemigroups
+# }
+#
+gap-pkg_econf() {
+ econf --with-gaproot="$(_gap-pkg_gaproot)" "${@}"
+}
+
+# @FUNCTION: gap-pkg_src_configure
+# @DESCRIPTION:
+# Handle both autoconf configure scripts and the hand-written ones used
+# by many GAP packages. We determine which one we're dealing with by
+# running ./configure --help; an autoconf configure script will mention
+# "PREFIX" in the output, the others will not.
+#
+# Autoconf configure scripts are configured using gap-pkg_econf, while
+# hand-written ones are executed directly with _gap-pkg_gaproot as their
+# sole positional argument.
+gap-pkg_src_configure() {
+ local _configure="${ECONF_SOURCE:-.}/configure"
+ if [[ -x ${_configure} ]] ; then
+ if ${_configure} --help | grep PREFIX &>/dev/null; then
+ # This is an autoconf ./configure script
+ gap-pkg_econf
+ else
+ # It's an "old-style" handwritten script that does
+ # not print usage information with --help.
+ ${_configure} $(_gap-pkg_gaproot) || die
+ fi
+ fi
+}
+
+# @FUNCTION: gap-pkg_src_compile
+# @DESCRIPTION:
+# The default src_compile with the addition of V=1 to emake. The
+# Makefile.gappkg used to build most C packages defaults to a quiet
+# build without this.
+gap-pkg_src_compile() {
+ if [[ -f Makefile ]] || [[ -f GNUmakefile ]] || [[ -f makefile ]]; then
+ emake V=1 || die "emake failed"
+ fi
+}
+
+# @FUNCTION: gap-pkg_enable_tests
+# @DESCRIPTION:
+# Amend IUSE, RESTRICT, and BDEPEND for a package with a test suite.
+# This is modeled on similar functions in the distutils-r1 and
+# elisp-common eclasses, except here only a single default testing
+# strategy is supported. All runtime and post-merge dependencies are
+# added as build dependencies if USE=test is set.
+gap-pkg_enable_tests() {
+ IUSE+=" test "
+ RESTRICT+=" !test? ( test ) "
+
+ # Use the internal variable here, too, because the RDEPEND list from
+ # the ebuild is maintained separately by the package manager. We add
+ # PDEPEND too because we use it to break some circular dependencies
+ # between e.g. polycyclic and alnuth.
+ BDEPEND+=" test? ( ${_GAP_PKG_RDEPEND} ${RDEPEND} ${PDEPEND} ) "
+}
+
+# @FUNCTION: gap-pkg_src_test
+# @DESCRIPTION:
+# Run this package's test suite if it has one. The GAP TestPackage
+# function is the standard way to do this, but it does rely on the
+# package itself to get a few things right, like running the tests
+# verbosely and exiting with the appropriate code. The alternative would
+# be run TestDirectory ourselves on "tst", but that has its own issues;
+# in particular many packages have set-up code that is run only with
+# TestPackage. YMMV.
+gap-pkg_src_test() {
+ [[ -f PackageInfo.g ]] || return
+
+ # We would prefer --bare to -A so that we can test (say) primgrp
+ # after installing only gapdoc and not smallgrp or transgrp. But,
+ # that would cause problems for basically every non-required
+ # package, because they usually don't explicitly load the four
+ # "required" packages in their test suites. So we use -A unless
+ # this is one of the chosen four.
+ local bareflag="--bare"
+ if [[ "${_GAP_PKG_IS_REQUIRED}" = "no" ]]; then
+ bareflag="-A"
+ fi
+
+ # Run GAP non-interactively to test the just-built package. We omit
+ # the "-r" flag here because we use the UserGapRoot directory to
+ # store AtlasRep data, and without it, the atlasrep tests (and the
+ # tests of any packages depending on it) will fail.
+ local gapcmd="gap -R ${bareflag} --nointeract"
+
+ # ForceQuitGap translates a boolean return value to the expected
+ # zero or one, useful for packages that set a single *.tst file as
+ # their TestFile.
+ gapcmd+=" -c ForceQuitGap(TestPackage(\"${PN}\"));"
+
+ # Fake the directory structure that GAP needs to be able to find
+ # packages with a symlink under ${T}, then prepend ${T} to the list
+ # of search paths so that if this package is already installed, we
+ # load the just-built copy first.
+ ln -s "${WORKDIR}" "${T}/pkg" || die
+ gapcmd+=" --roots ${T}/; "
+
+ # False negatives can occur if GAP fails to start, or if there are
+ # syntax errors:
+ #
+ # https://github.com/gap-system/gap/issues/5541
+ #
+ # There's nothing you can do about that, but now you know.
+ #
+ # The pipe to tee is more important than it looks. Any test suite
+ # involving dev-gap/browse is likely to bork the user's terminal.
+ # The "browse" package is however smart enough to figure out when
+ # stdout is not a tty, and avoids breaking it in that case. So by
+ # piping to tee, we encourage it not to do anything too crazy.
+ eshopts_push -o pipefail
+ ${gapcmd} | tee test-suite.log \
+ || die "test suite failed, see test-suite.log"
+ eshopts_pop
+}
+
+# @ECLASS_VARIABLE: GAP_PKG_EXTRA_INSTALL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A bash array of extra files and directories to install recursively at
+# the root of this package's directory tree. For example, if you have a
+# package that mostly follows the suggested layout (described in the
+# gap-pkg_src_install documentation) but also includes a "data"
+# directory, you should set
+#
+# GAP_PKG_EXTRA_INSTALL=( data )
+#
+# to install the data directory without having to override the entire
+# src_install phase.
+
+# @ECLASS_VARIABLE: GAP_PKG_HTML_DOCDIR
+# @DESCRIPTION:
+# The directory inside the tarball where the HTML documentation is
+# located. This is _usually_ "doc", which conforms to the suggested
+# GAPDoc layout and is the default value of this variable. Many
+# packages however use a top-level "htm" directory instead. The named
+# directory will be installed to gap-pkg_dir and symlinked to the usual
+# location under /usr/share/doc. As a result, you should only use this
+# for directories referenced by PackageInfo.g or by some other part of
+# the package. HTML documentation whose location doesn't need to be
+# known to the package at runtime should instead be installed with
+# HTML_DOCS or a similar mechanism.
+: "${GAP_PKG_HTML_DOCDIR:=doc}"
+
+# @FUNCTION: gap-pkg_src_install
+# @DESCRIPTION:
+# Install a GAP package that follows the suggested layout,
+#
+# https://docs.gap-system.org/doc/ref/chap76.html
+#
+# In particular:
+#
+# 1. All GAP source files (*.g) in $S are installed.
+#
+# 2. If a library directory named "gap" or "lib" exists,
+# it is installed.
+#
+# 3. If a binary directory "bin" exists, it is installed.
+#
+# 4. If a "doc" directory exists, we assume GAPDoc conventions
+# (https://docs.gap-system.org/pkg/gapdoc/doc/chap5.html) and install
+# what we find there. Unfortunately for us, each package's
+# PackageInfo.g contains a "PackageDoc" section that points to this
+# documentation, and we can't break the paths it references. Instead,
+# we try to dosym the human-readable parts of the documentation (PDF
+# manuals) into appropriate Gentoo locations.
+#
+# 5. We consult GAP_PKG_HTML_DOCDIR for the HTML documentation and repeat
+# the process above.
+#
+# A few GAP packages have autotools build systems with working "make
+# install" routines, but most don't. So for the time being we omit that
+# step. It's harder to work around the packages that don't support it
+# than the other way around.
+gap-pkg_src_install() {
+ einstalldocs
+
+ # So we don't have to "test -f" on the result of every glob.
+ eshopts_push -s nullglob
+
+ # Install the "normal" documentation from the doc directory. This
+ # includes anything the interactive GAP help might need in addition
+ # to the documentation intended for direct user consumption.
+ if [[ -d doc ]]; then
+ pushd doc > /dev/null || die
+
+ local docdir="$(gap-pkg_dir)/doc"
+ insinto "${docdir}"
+
+ # These files are needed by the GAP interface. We don't symlink
+ # these because they're not meant for direct human consumption;
+ # the text files are not *plain* text -- they contain color
+ # codes. I'm not sure if the BibTeX files are actually used,
+ # but the GAP packaging documentation mentions specifically
+ # that they should be included. XML files are included in case
+ # the bibliography is in BibXMLext format, but you may wind up
+ # with some additional GAPDoc (XML) source files as a result.
+ for f in *.{bib,lab,six,tex,txt,xml}; do
+ doins "${f}"
+ done
+
+ # The PDF docs are also potentially used by the interface, since
+ # they appear in PackageInfo.g, so we install them "as is." But
+ # then afterwards we symlink them to their proper Gentoo
+ # locations
+ for f in *.pdf; do
+ doins "${f}"
+ dosym -r "${docdir}/${f}" "/usr/share/doc/${PF}/${f}"
+ done
+
+ popd > /dev/null || die
+ fi
+
+ # Install the HTML documentation. The procedure is basically the
+ # same as for the PDF docs.
+ if [[ -d "${GAP_PKG_HTML_DOCDIR}" ]]; then
+ pushd "${GAP_PKG_HTML_DOCDIR}" > /dev/null || die
+
+ local docdir="$(gap-pkg_dir)/${GAP_PKG_HTML_DOCDIR}"
+ insinto "${docdir}"
+
+ # See above
+ for f in *.{htm,html,css,js,png}; do
+ doins "${f}"
+ dosym -r "${docdir}/${f}" "/usr/share/doc/${PF}/html/${f}"
+ done
+
+ popd > /dev/null || die
+ fi
+
+ # Any GAP source files that live in the top-level directory.
+ insinto $(gap-pkg_dir)
+ for f in *.g; do
+ doins "${f}"
+ done
+
+ # We're done globbing
+ eshopts_pop
+
+ # The gap and lib dirs that usually also contain GAP code.
+ [[ -d gap ]] && doins -r gap
+ [[ -d lib ]] && doins -r lib
+
+ # Any additional user-specified files or directories.
+ for f in "${GAP_PKG_EXTRA_INSTALL[@]}"; do
+ doins -r "${f}"
+ done
+
+ # The bin dir, that contains shared libraries but also sometimes
+ # regular executables in an arch-specific subdirectory. We do
+ # this last because it messes with insopts -- doexe doesn't work
+ # recursively and we don't care what the subdirectory structure is.
+ if [[ -d bin ]]; then
+ insopts -m0755
+ doins -r bin
+
+ # Find and remove .la files from this package's bindir. The
+ # usual "find" command doesn't work here because occasionally we
+ # find *.la files in GAP packages that are not libtool archives
+ # and should not be deleted.
+ find "${ED%/}$(gap-pkg_dir)/bin" -type f -name '*.la' -delete || die
+ fi
+}
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
diff --git a/eclass/gear.kde.org.eclass b/eclass/gear.kde.org.eclass
new file mode 100644
index 000000000000..f3a3c56369ad
--- /dev/null
+++ b/eclass/gear.kde.org.eclass
@@ -0,0 +1,254 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: gear.kde.org.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: kde.org
+# @BLURB: Support eclass for KDE Gear packages.
+# @DESCRIPTION:
+# This eclass extends kde.org.eclass for KDE Gear release group to assemble
+# default SRC_URI for tarballs, set up git-r3.eclass for stable/master branch
+# versions or restrict access to unreleased (packager access only) tarballs
+# in Gentoo KDE overlay.
+#
+# This eclass unconditionally inherits kde.org.eclass and all its public
+# variables and helper functions (not phase functions) may be considered as
+# part of this eclass's API.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_GEAR_KDE_ORG_ECLASS} ]]; then
+_GEAR_KDE_ORG_ECLASS=1
+
+# @ECLASS_VARIABLE: KDE_PV_UNRELEASED
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_PV_UNRELEASED=( )
+
+inherit kde.org
+
+HOMEPAGE="https://apps.kde.org/"
+
+# @ECLASS_VARIABLE: KDE_ORG_SCHEDULE_URI
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_ORG_SCHEDULE_URI+="/KDE_Gear_${PV:0:5}_Schedule"
+
+# @ECLASS_VARIABLE: _KDE_SRC_URI
+# @INTERNAL
+# @DESCRIPTION:
+# Helper variable to construct release group specific SRC_URI.
+_KDE_SRC_URI="mirror://kde/"
+
+if [[ ${KDE_BUILD_TYPE} == live ]]; then
+ if [[ ${PV} == ??.??.49.9999 ]]; then
+ EGIT_BRANCH="release/$(ver_cut 1-2)"
+ fi
+elif [[ -z ${KDE_ORG_COMMIT} ]]; then
+ case ${PV} in
+ ??.??.[6-9]? )
+ _KDE_SRC_URI+="unstable/release-service/${PV}/src/"
+ RESTRICT+=" mirror"
+ ;;
+ *) _KDE_SRC_URI+="stable/release-service/${PV}/src/" ;;
+ esac
+
+ SRC_URI="${_KDE_SRC_URI}${KDE_ORG_TAR_PN}-${PV}.tar.xz"
+fi
+
+# list of applications ported to KF6 in SLOT=6 having to block SLOT=5
+if $(ver_test -gt 24.01.75); then
+ case ${PN} in
+ akonadi | \
+ akonadi-calendar | \
+ akonadi-contacts | \
+ akonadi-import-wizard | \
+ akonadi-mime | \
+ akonadi-notes | \
+ akonadi-search | \
+ akonadiconsole | \
+ akregator | \
+ analitza | \
+ ark | \
+ audiocd-kio | \
+ baloo-widgets | \
+ blinken | \
+ bomber | \
+ bovo | \
+ calendarjanitor | \
+ calendarsupport | \
+ colord-kde | \
+ dolphin | \
+ dolphin-plugins-dropbox | \
+ dolphin-plugins-git | \
+ dolphin-plugins-mercurial | \
+ dolphin-plugins-mountiso | \
+ dolphin-plugins-subversion | \
+ dragon | \
+ elisa | \
+ eventviews | \
+ ffmpegthumbs | \
+ filelight | \
+ granatier | \
+ grantlee-editor | \
+ grantleetheme | \
+ gwenview | \
+ incidenceeditor | \
+ isoimagewriter | \
+ juk | \
+ kaccounts-integration | \
+ kaccounts-providers | \
+ kaddressbook | \
+ kajongg | \
+ kalarm | \
+ kalgebra | \
+ kamera | \
+ kanagram | \
+ kapman | \
+ kapptemplate | \
+ kate | \
+ kate-addons | \
+ kate-lib | \
+ katomic | \
+ kbackup | \
+ kblackbox | \
+ kblocks | \
+ kbounce | \
+ kbreakout | \
+ kbruch | \
+ kcachegrind | \
+ kcalc | \
+ kcalutils | \
+ kcharselect | \
+ kcolorchooser | \
+ kcron | \
+ kde-dev-utils | \
+ kdebugsettings | \
+ kdeconnect | \
+ kdegraphics-mobipocket | \
+ kdenetwork-filesharing | \
+ kdenlive | \
+ kdepim-addons | \
+ kdepim-runtime | \
+ kdf | \
+ kdialog | \
+ kdiamond | \
+ keditbookmarks | \
+ kfind | \
+ kfourinline | \
+ kgeography | \
+ kget | \
+ kgoldrunner | \
+ kgpg | \
+ khangman | \
+ khelpcenter | \
+ kidentitymanagement | \
+ kigo | \
+ killbots | \
+ kimap | \
+ kiriki | \
+ kiten | \
+ kitinerary | \
+ kjumpingcube | \
+ kldap | \
+ kleopatra | \
+ klettres | \
+ klickety | \
+ klines | \
+ kmag | \
+ kmahjongg | \
+ kmail | \
+ kmail-account-wizard | \
+ kmailtransport | \
+ kmbox | \
+ kmime | \
+ kmines | \
+ kmousetool | \
+ kmouth | \
+ knavalbattle | \
+ knetwalk | \
+ knights | \
+ knotes | \
+ kolf | \
+ kollision | \
+ konqueror | \
+ konquest | \
+ konsole | \
+ konsolekalendar | \
+ kontact | \
+ kontactinterface | \
+ kontrast | \
+ konversation | \
+ korganizer | \
+ kopeninghours | \
+ kosmindoormap | \
+ kpat | \
+ kpimtextedit | \
+ kpkpass | \
+ kpmcore | \
+ kpublictransport | \
+ kreversi | \
+ krfb | \
+ kruler | \
+ kshisen | \
+ ksirk | \
+ ksmtp | \
+ ksnakeduel | \
+ kspaceduel | \
+ ksquares | \
+ ksudoku | \
+ ksystemlog | \
+ kteatime | \
+ ktimer | \
+ ktorrent | \
+ ktuberling | \
+ kturtle | \
+ kubrick | \
+ kwalletmanager | \
+ kweather | \
+ kwordquiz | \
+ kwrite | \
+ libgravatar | \
+ libkeduvocdocument | \
+ libkdegames | \
+ libkdepim | \
+ libkleo | \
+ libkmahjongg | \
+ libksieve | \
+ libktnef | \
+ libktorrent | \
+ lskat | \
+ mailcommon | \
+ mailimporter | \
+ markdownpart | \
+ mbox-importer | \
+ merkuro | \
+ messagelib | \
+ okular | \
+ palapeli | \
+ parley | \
+ partitionmanager | \
+ picmi | \
+ pim-data-exporter | \
+ pim-sieve-editor | \
+ pimcommon | \
+ skanpage | \
+ spectacle | \
+ svgpart | \
+ sweeper | \
+ thumbnailers | \
+ yakuake | \
+ zanshin)
+ RDEPEND+=" !${CATEGORY}/${PN}:5" ;;
+ *) ;;
+ esac
+fi
+
+fi
diff --git a/eclass/ghc-package.eclass b/eclass/ghc-package.eclass
index 8c77ad5bc3cb..1d9483e012fb 100644
--- a/eclass/ghc-package.eclass
+++ b/eclass/ghc-package.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ghc-package.eclass
@@ -6,7 +6,7 @@
# "Gentoo's Haskell Language team" <haskell@gentoo.org>
# @AUTHOR:
# Original Author: Andres Loeh <kosmikus@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: This eclass helps with the Glasgow Haskell Compiler's package configuration utility.
# @DESCRIPTION:
# Helper eclass to handle ghc installation/upgrade/deinstallation process.
@@ -16,7 +16,6 @@ inherit multiprocessing
# Maintain version-testing compatibility with ebuilds not using EAPI 7.
case ${EAPI} in
7|8) ;;
- 6) inherit eapi7-ver ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -24,7 +23,7 @@ esac
# QA check generates false positive because it assumes
# presence of GCC-specific sections.
#
-# Workaround false positiove by disabling the check completely.
+# Workaround false positive by disabling the check completely.
# bug #722078, bug #677600
QA_FLAGS_IGNORED='.*'
@@ -39,9 +38,8 @@ ghc-getghc() {
}
# @FUNCTION: ghc-getghcpkg
-# @INTERNAL
# @DESCRIPTION:
-# Internal function determines returns the name of the ghc-pkg executable
+# returns the name of the ghc-pkg executable
ghc-getghcpkg() {
if ! type -P ${HC_PKG:-ghc-pkg}; then
ewarn "ghc-pkg not found"
@@ -58,11 +56,11 @@ ghc-getghcpkg() {
# because for some reason the global package file
# must be specified
ghc-getghcpkgbin() {
- local empty_db="${T}/empty.conf.d" ghc_pkg="$(ghc-libdir)/bin/ghc-pkg"
+ local empty_db="${T}/empty.conf.d" ghc_pkg="$(ghc-bindir)/ghc-pkg"
if [[ ! -d ${empty_db} ]]; then
"${ghc_pkg}" init "${empty_db}" || die "Failed to initialize empty global db"
fi
- echo "$(ghc-libdir)/bin/ghc-pkg" "--global-package-db=${empty_db}"
+ echo "$(ghc-bindir)/ghc-pkg" "--global-package-db=${empty_db}"
}
# @FUNCTION: ghc-version
@@ -176,6 +174,24 @@ ghc-libdir() {
echo "${_GHC_LIBDIR_CACHE}"
}
+# @FUNCTION: ghc-bindir
+# @DESCRIPTION:
+# returns the directory where ghc binaries live
+_GHC_BINDIR_CACHE=""
+ghc-bindir() {
+ if [[ -z "${_GHC_BINDIR_CACHE}" ]]; then
+ local bindir
+ if [[ "$(basename $(ghc-libdir))" == "lib" ]]; then
+ bindir="$(ghc-libdir)/../bin/"
+ else
+ bindir="$(ghc-libdir)/bin/"
+ fi
+ bindir="$(realpath "${bindir}")" || die "Cannot find ghc bindir: ${bindir}"
+ _GHC_BINDIR_CACHE="${bindir}"
+ fi
+ echo "${_GHC_BINDIR_CACHE}"
+}
+
# @FUNCTION: ghc-make-args
# @DESCRIPTION:
# Returns default arguments passed along 'ghc --make'
@@ -273,9 +289,14 @@ ghc-install-pkg() {
mkdir -p "${hint_db}" || die
for pkg_config_file in "$@"; do
- local pkg_name="gentoo-${CATEGORY}-${PF}-"$(basename "${pkg_config_file}")
- cp "${pkg_config_file}" "${hint_db}/${pkg_name}" || die
- chmod 0644 "${hint_db}/${pkg_name}" || die
+ # 'haskell-updater' relies on '.conf' presence when scans gentoo/.
+ # Passed files can either already have .conf (single-file style DB)
+ # or not have a .conf suffix (directory-stype).
+ # Here we always normalize file names to have single .conf suffix.
+ local base_name=$(basename "${pkg_config_file}")
+ local pkg_name="gentoo-${CATEGORY}-${PF}-${base_name%.conf}"
+ cp "${pkg_config_file}" "${hint_db}/${pkg_name}.conf" || die
+ chmod 0644 "${hint_db}/${pkg_name}.conf" || die
done
}
diff --git a/eclass/git-r3.eclass b/eclass/git-r3.eclass
index 59d4f9a0038f..565f6ada8382 100644
--- a/eclass/git-r3.eclass
+++ b/eclass/git-r3.eclass
@@ -1,30 +1,46 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: git-r3.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Eclass for fetching and unpacking git repositories.
# @DESCRIPTION:
# Third generation eclass for easing maintenance of live ebuilds using
# git as remote repository.
-case ${EAPI:-0} in
- 5|6|7|8) ;;
+# @ECLASS_VARIABLE: EGIT_LFS
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set, git lfs support will be enabled.
+# Set before inheriting this eclass.
+
+# @ECLASS_VARIABLE: _NUM_LFS_FILTERS_FOUND
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# This is used to provide QA warnings if a repo has git lfs filters
+# defined but EGIT_LFS is not turned on and vice versa.
+# If non-empty, then the repo likely needs EGIT_LFS to clone properly.
+
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack
-
-if [[ ! ${_GIT_R3} ]]; then
+if [[ -z ${_GIT_R3_ECLASS} ]]; then
+_GIT_R3_ECLASS=1
PROPERTIES+=" live"
-if [[ ${EAPI} != [56] ]]; then
+if [[ ${EAPI} != 6 ]]; then
BDEPEND=">=dev-vcs/git-1.8.2.1[curl]"
+ [[ ${EGIT_LFS} ]] && BDEPEND+=" dev-vcs/git-lfs"
else
DEPEND=">=dev-vcs/git-1.8.2.1[curl]"
+ [[ ${EGIT_LFS} ]] && DEPEND+=" dev-vcs/git-lfs"
fi
# @ECLASS_VARIABLE: EGIT_CLONE_TYPE
@@ -63,7 +79,7 @@ fi
# unavailable calls like 'git describe' will not reference prior tags.
# No purging of old references is done. This mode is intended mostly for
# embedded systems with limited disk space.
-: ${EGIT_CLONE_TYPE:=single}
+: "${EGIT_CLONE_TYPE:=single}"
# @ECLASS_VARIABLE: EGIT_MIN_CLONE_TYPE
# @DESCRIPTION:
@@ -80,7 +96,37 @@ fi
# or a similar remote is used that does not support shallow clones
# and fetching tags along with commits. Please use sparingly, and to fix
# fatal errors rather than 'non-pretty versions'.
-: ${EGIT_MIN_CLONE_TYPE:=shallow}
+: "${EGIT_MIN_CLONE_TYPE:=shallow}"
+
+# @ECLASS_VARIABLE: EGIT_LFS_CLONE_TYPE
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Type of lfs clone that should be used against the remote repository.
+# This can be either of: 'mirror', 'single', 'shallow'.
+#
+# This works a bit differently than EGIT_CLONE_TYPE.
+#
+# The 'mirror' type clones all LFS files that is available from the
+# cloned repo. Is is mostly useful for backup or rehosting purposes as
+# the disk usage will be excessive.
+#
+# The 'single' type clones only the LFS files from the current commit.
+# However unlike 'shallow', it will not cleanup stale LFS files.
+#
+# The 'shallow' type clones only the LFS files from the current commit.
+# LFS files that are not referenced by the current commit and more than
+# a few days old will be automatically removed to save disk space.
+# This is the recommended mode for LFS repos to prevent excessive disk
+# usage.
+: "${EGIT_LFS_CLONE_TYPE:=shallow}"
+
+# @ECLASS_VARIABLE: EVCS_STORE_DIRS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Record of names of all the repositories directories being cloned in the git3_src.
+# This is useful in the case of ebuild that fetch multiple repos and
+# it would be used by eclean to clean them up.
+EVCS_STORE_DIRS=()
# @ECLASS_VARIABLE: EGIT3_STORE_DIR
# @USER_VARIABLE
@@ -116,7 +162,7 @@ fi
# read the manpage for git-clone(1).
#
# URIs should be using https:// whenever possible. http:// and git://
-# URIs are completely unsecured and their use (even if only as
+# URIs are completely insecure and their use (even if only as
# a fallback) renders the ebuild completely vulnerable to MITM attacks.
#
# Can be a whitespace-separated list or an array.
@@ -318,10 +364,12 @@ _git-r3_set_gitdir() {
repo_name=${repo_name//\//_}
local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
- : ${EGIT3_STORE_DIR:=${distdir}/git3-src}
+ : "${EGIT3_STORE_DIR:=${distdir}/git3-src}"
GIT_DIR=${EGIT3_STORE_DIR}/${repo_name}
+ EVCS_STORE_DIRS+=( "${GIT_DIR}" )
+
if [[ ! -d ${EGIT3_STORE_DIR} && ! ${EVCS_OFFLINE} ]]; then
(
addwrite /
@@ -345,7 +393,7 @@ _git-r3_set_gitdir() {
umask "${EVCS_UMASK}" || die "Bad options to umask: ${EVCS_UMASK}"
fi
mkdir "${GIT_DIR}" || die
- git init --bare || die
+ git init --bare -b __init__ || die
if [[ ${saved_umask} ]]; then
umask "${saved_umask}" || die
fi
@@ -381,6 +429,7 @@ _git-r3_set_submodules() {
l=${l#submodule.}
local subname=${l%%.url=*}
+ local is_manually_specified=
# filter out on EGIT_SUBMODULES
if declare -p EGIT_SUBMODULES &>/dev/null; then
@@ -401,13 +450,14 @@ _git-r3_set_submodules() {
continue
else
einfo "Using submodule ${parent_path}${subname}"
+ is_manually_specified=1
fi
fi
# skip modules that have 'update = none', bug #487262.
local upd=$(echo "${data}" | git config -f /dev/fd/0 \
submodule."${subname}".update)
- [[ ${upd} == none ]] && continue
+ [[ ${upd} == none && ! ${is_manually_specified} ]] && continue
# https://github.com/git/git/blob/master/refs.c#L31
# we are more restrictive than git itself but that should not
@@ -534,7 +584,7 @@ git-r3_fetch() {
local r
for r in "${repos[@]}"; do
if [[ ${r} == git:* || ${r} == http:* ]]; then
- ewarn "git-r3: ${r%%:*} protocol is completely unsecure and may render the ebuild"
+ ewarn "git-r3: ${r%%:*} protocol is completely insecure and may render the ebuild"
ewarn "easily susceptible to MITM attacks (even if used only as fallback). Please"
ewarn "use https instead."
ewarn "[URI: ${r}]"
@@ -559,49 +609,46 @@ git-r3_fetch() {
local commit_id=${2:-${EGIT_COMMIT}}
local commit_date=${4:-${EGIT_COMMIT_DATE}}
- # support new override API for EAPI 6+
- if [[ ${EAPI} != 5 ]]; then
- # get the name and do some more processing:
- # 1) kill .git suffix,
- # 2) underscore (remaining) non-variable characters,
- # 3) add preceding underscore if it starts with a digit,
- # 4) uppercase.
- local override_name=${GIT_DIR##*/}
- override_name=${override_name%.git}
- override_name=${override_name//[^a-zA-Z0-9_]/_}
- override_name=${override_name^^}
-
- local varmap=(
- REPO:repos
- BRANCH:branch_name
- COMMIT:commit_id
- COMMIT_DATE:commit_date
- )
-
- local localvar livevar live_warn= override_vars=()
- for localvar in "${varmap[@]}"; do
- livevar=EGIT_OVERRIDE_${localvar%:*}_${override_name}
- localvar=${localvar#*:}
- override_vars+=( "${livevar}" )
-
- if [[ -n ${!livevar} ]]; then
- [[ ${localvar} == repos ]] && repos=()
- live_warn=1
- ewarn "Using ${livevar}=${!livevar}"
- declare "${localvar}=${!livevar}"
- fi
- done
+ # get the name and do some more processing:
+ # 1) kill .git suffix,
+ # 2) underscore (remaining) non-variable characters,
+ # 3) add preceding underscore if it starts with a digit,
+ # 4) uppercase.
+ local override_name=${GIT_DIR##*/}
+ override_name=${override_name%.git}
+ override_name=${override_name//[^a-zA-Z0-9_]/_}
+ override_name=${override_name^^}
+
+ local varmap=(
+ REPO:repos
+ BRANCH:branch_name
+ COMMIT:commit_id
+ COMMIT_DATE:commit_date
+ )
- if [[ ${live_warn} ]]; then
- ewarn "No support will be provided."
- else
- einfo "To override fetched repository properties, use:"
- local x
- for x in "${override_vars[@]}"; do
- einfo " ${x}"
- done
- einfo
+ local localvar livevar live_warn= override_vars=()
+ for localvar in "${varmap[@]}"; do
+ livevar=EGIT_OVERRIDE_${localvar%:*}_${override_name}
+ localvar=${localvar#*:}
+ override_vars+=( "${livevar}" )
+
+ if [[ -n ${!livevar} ]]; then
+ [[ ${localvar} == repos ]] && repos=()
+ live_warn=1
+ ewarn "Using ${livevar}=${!livevar}"
+ declare "${localvar}=${!livevar}"
fi
+ done
+
+ if [[ ${live_warn} ]]; then
+ ewarn "No support will be provided."
+ else
+ einfo "To override fetched repository properties, use:"
+ local x
+ for x in "${override_vars[@]}"; do
+ einfo " ${x}"
+ done
+ einfo
fi
# set final variables after applying overrides
@@ -637,6 +684,8 @@ git-r3_fetch() {
# and HEAD in case we need the default branch
# (we keep it in refs/git-r3 since otherwise --prune interferes)
"+HEAD:refs/git-r3/HEAD"
+ # fetch the specifc commit_ref to deal with orphan commits
+ "${remote_ref}"
)
else # single or shallow
local fetch_l fetch_r
@@ -764,6 +813,39 @@ git-r3_fetch() {
fi
fi
+ if [[ ${EGIT_LFS} ]]; then
+ # Fetch the LFS files from the current ref (if any)
+ local lfs_fetch_command=( git lfs fetch "${r}" "${remote_ref}" )
+
+ case "${EGIT_LFS_CLONE_TYPE}" in
+ shallow)
+ if [[ -d ${GIT_DIR}/lfs/objects ]] && ! rmdir "${GIT_DIR}"/lfs/objects 2> /dev/null; then
+ # Only prune if the lfs directory is not empty.
+ # The prune command can take a very long time to resolve even if there are no lfs objects.
+ lfs_fetch_command+=(
+ --prune
+ )
+ fi
+ ;;
+ single)
+ ;;
+ mirror)
+ lfs_fetch_command+=(
+ --all
+ )
+ ;;
+ *)
+ die "Invalid EGIT_LFS_CLONE_TYPE=${EGIT_LFS_CLONE_TYPE}"
+ esac
+
+ set -- "${lfs_fetch_command[@]}"
+ echo "${@}" >&2
+ "${@}" || die
+ elif [[ -d ${GIT_DIR}/lfs && ${EGIT_LFS_CLONE_TYPE} == shallow ]]; then
+ # Cleanup the LFS files from old checkouts if LFS support has been turned off.
+ rm -fr ${GIT_DIR}/lfs || die
+ fi
+
success=1
break
done
@@ -773,7 +855,7 @@ git-r3_fetch() {
[[ ${success} ]] || die "Unable to fetch from any of EGIT_REPO_URI"
# submodules can reference commits in any branch
- # always use the 'mirror' mode to accomodate that, bug #503332
+ # always use the 'mirror' mode to accommodate that, bug #503332
local EGIT_CLONE_TYPE=mirror
# recursively fetch submodules
@@ -878,7 +960,12 @@ git-r3_checkout() {
# use git init+fetch instead of clone since the latter doesn't like
# non-empty directories.
- git init --quiet || die
+ git init --quiet -b __init__ || die
+ if [[ ${EGIT_LFS} ]]; then
+ # The "skip-repo" flag will just skip the installation of the pre-push hooks.
+ # We don't use these hook as we don't do any pushes
+ git lfs install --local --skip-repo || die
+ fi
# setup 'alternates' to avoid copying objects
echo "${orig_repo}/objects" > "${GIT_DIR}"/objects/info/alternates || die
# now copy the refs
@@ -887,6 +974,11 @@ git-r3_checkout() {
cp "${orig_repo}"/packed-refs "${GIT_DIR}"/packed-refs || die
fi
+ # mark this directory as "safe" so that src_install() can access it
+ # https://bugs.gentoo.org/879353
+ git config --global --add safe.directory \
+ "$(cd "${out_dir}" && echo "${PWD}")" || die
+
# (no need to copy HEAD, we will set it via checkout)
if [[ -f ${orig_repo}/shallow ]]; then
@@ -904,6 +996,16 @@ git-r3_checkout() {
fi
echo "${@}" >&2
"${@}" || die "git checkout ${remote_ref:-${new_commit_id}} failed"
+
+ # If any filters in any of the ".gitattributes" files specifies lfs,
+ # then this repo is most likely storing files with git lfs.
+ local has_git_lfs_filters=$(
+ git grep "filter=lfs" -- ".gitattributes" "**/.gitattributes"
+ )
+ if [[ $has_git_lfs_filters ]]; then
+ # This is used for issuing QA warnings regarding LFS files in the repo (or lack thereof)
+ _EGIT_LFS_FILTERS_FOUND="yes"
+ fi
}
git-r3_sub_checkout
unset -f git-r3_sub_checkout
@@ -1053,6 +1155,13 @@ git-r3_src_unpack() {
_git-r3_env_setup
git-r3_src_fetch
git-r3_checkout
+
+ if [[ ! ${EGIT_LFS} && ${_EGIT_LFS_FILTERS_FOUND} ]]; then
+ eqawarn "QA Notice: There are Git LFS filters setup in the cloned repo, consider using EGIT_LFS!"
+ fi
+ if [[ ${EGIT_LFS} && ! ${_EGIT_LFS_FILTERS_FOUND} ]]; then
+ eqawarn "QA Notice: There are no Git LFS filters setup in the cloned repo. EGIT_LFS will do nothing!"
+ fi
}
# https://bugs.gentoo.org/show_bug.cgi?id=482666
@@ -1074,5 +1183,6 @@ git-r3_pkg_needrebuild() {
# 'export' locally until this gets into EAPI
pkg_needrebuild() { git-r3_pkg_needrebuild; }
-_GIT_R3=1
fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/gkrellm-plugin.eclass b/eclass/gkrellm-plugin.eclass
index c1e4bcc1310a..1424fdfe53f9 100644
--- a/eclass/gkrellm-plugin.eclass
+++ b/eclass/gkrellm-plugin.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gkrellm-plugin.eclass
@@ -8,56 +8,40 @@
# Original author: Jim Ramsay <lack@gentoo.org>
# EAPI 6 author: David Seifert <soap@gentoo.org>
# EAPI 8 author: Thomas Bracht Laumann Jespersen <t@laumann.xyz>
-# @SUPPORTED_EAPIS: 6 8
-# @PROVIDES: multilib
+# @SUPPORTED_EAPIS: 8
# @BLURB: Provides src_install used by (almost) all gkrellm plugins
# @DESCRIPTION:
# - Sets up default dependencies
# - Provides a common src_install method to avoid code duplication
-#
-# Changelog:
-# 17 March 2022: Thomas Bracht Laumann Jespersen <t@laumann.xyz>
-# - Port to EAPI 8
-# 03 January 2018: David Seifert <soap@gentoo.org>
-# - Port to EAPI 6, remove built_with_use, simplify a lot
-# 12 March 2007: Jim Ramsay <lack@gentoo.org>
-# - Added server plugin support
-# 09 March 2007: Jim Ramsay <lack@gentoo.org>
-# - Initial commit
-#
# @ECLASS_VARIABLE: PLUGIN_SO
# @DESCRIPTION:
# The name of the plugin's .so file which will be installed in
-# the plugin dir. Defaults to "${PN}$(get_modname)". Has to be a bash array.
+# the plugin dir. Defaults to "${PN}$(get_modname)". Has to be a bash array.
# @ECLASS_VARIABLE: PLUGIN_SERVER_SO
# @DEFAULT_UNSET
# @DESCRIPTION:
# The name of the plugin's server plugin $(get_modname) portion.
-# Unset by default. Has to be a bash array.
+# Unset by default. Has to be a bash array.
# @ECLASS_VARIABLE: PLUGIN_DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# An optional list of docs to be installed, in addition to the default
-# DOCS variable which is respected too. Has to be a bash array.
+# DOCS variable which is respected too. Has to be a bash array.
case ${EAPI} in
- 6|8) ;;
+ 8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit multilib
+if [[ ! ${_GKRELLM_PLUGIN_ECLASS} ]]; then
+_GKRELLM_PLUGIN_ECLASS=1
-if [[ ! ${_GKRELLM_PLUGIN_R1} ]]; then
-_GKRELLM_PLUGIN_R1=1
+inherit multilib
-if [[ ${EAPI} == 6 ]]; then
- DEPEND="virtual/pkgconfig"
-else
- BDEPEND="virtual/pkgconfig"
-fi
+BDEPEND="virtual/pkgconfig"
# @FUNCTION: gkrellm-plugin_src_install
# @USAGE:
@@ -68,20 +52,13 @@ gkrellm-plugin_src_install() {
if ! declare -p PLUGIN_SO >/dev/null 2>&1 ; then
doexe ${PN}$(get_modname)
- elif declare -p PLUGIN_SO | grep -q "^declare -a " ; then
- doexe "${PLUGIN_SO[@]}"
else
- die "PLUGIN_SO has to be a bash array!"
+ doexe "${PLUGIN_SO[@]}"
fi
if [[ -n ${PLUGIN_SERVER_SO} ]]; then
exeinto /usr/$(get_libdir)/gkrellm2/plugins-gkrellmd
-
- if declare -p PLUGIN_SERVER_SO | grep -q "^declare -a " ; then
- doexe "${PLUGIN_SERVER_SO[@]}"
- else
- die "PLUGIN_SERVER_SO has to be a bash array!"
- fi
+ doexe "${PLUGIN_SERVER_SO[@]}"
fi
einstalldocs
@@ -90,13 +67,7 @@ gkrellm-plugin_src_install() {
[[ -s "${d}" ]] && dodoc "${d}"
done
- if [[ -n ${PLUGIN_DOCS} ]]; then
- if declare -p PLUGIN_DOCS | grep -q "^declare -a " ; then
- dodoc "${PLUGIN_DOCS[@]}"
- else
- die "PLUGIN_DOCS has to be a bash array!"
- fi
- fi
+ [[ -n ${PLUGIN_DOCS} ]] && dodoc "${PLUGIN_DOCS[@]}"
}
fi
diff --git a/eclass/gnome.org.eclass b/eclass/gnome.org.eclass
index 05025f5f58fa..760dc2ba0b66 100644
--- a/eclass/gnome.org.eclass
+++ b/eclass/gnome.org.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome.org.eclass
@@ -29,7 +29,7 @@ _GNOME_ORG_ECLASS=1
# Most projects hosted on gnome.org mirrors provide tarballs as tar.bz2 or
# tar.xz. This eclass defaults to xz. This is because the GNOME mirrors are
# moving to only have xz tarballs for new releases.
-: ${GNOME_TARBALL_SUFFIX:="xz"}
+: "${GNOME_TARBALL_SUFFIX:="xz"}"
# Even though xz-utils are in @system, they must still be added to BDEPEND; see
# https://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
@@ -45,20 +45,27 @@ fi
# @DESCRIPTION:
# Name of the module as hosted on gnome.org mirrors.
# Leave unset if package name matches module name.
-: ${GNOME_ORG_MODULE:=$PN}
+: "${GNOME_ORG_MODULE:=$PN}"
-# @ECLASS_VARIABLE: GNOME_ORG_PVP
+# @ECLASS_VARIABLE: GNOME_ORG_RELEASE
# @INTERNAL
# @DESCRIPTION:
# Components of the version number that correspond to a 6 month release.
if ver_test -ge 40.0; then
- : ${GNOME_ORG_PVP:=$(ver_cut 1)}
+ : "${GNOME_ORG_RELEASE:=$(ver_cut 1)}"
else
- : ${GNOME_ORG_PVP:=$(ver_cut 1-2)}
+ : "${GNOME_ORG_RELEASE:=$(ver_cut 1-2)}"
fi
-SRC_URI="mirror://gnome/sources/${GNOME_ORG_MODULE}/${GNOME_ORG_PVP}/${GNOME_ORG_MODULE}-${PV}.tar.${GNOME_TARBALL_SUFFIX}"
+# @ECLASS_VARIABLE: GNOME_ORG_PV
+# @DESCRIPTION:
+# PV in the GNOME version scheme format.
+# The package version in the format used upstream by GNOME projects.
+# See https://discourse.gnome.org/t/new-gnome-versioning-scheme/4235
+: "${GNOME_ORG_PV:=$(ver_rs 1- .)}"
+
+SRC_URI="mirror://gnome/sources/${GNOME_ORG_MODULE}/${GNOME_ORG_RELEASE}/${GNOME_ORG_MODULE}-${GNOME_ORG_PV}.tar.${GNOME_TARBALL_SUFFIX}"
-S="${WORKDIR}/${GNOME_ORG_MODULE}-${PV}"
+S="${WORKDIR}/${GNOME_ORG_MODULE}-${GNOME_ORG_PV}"
fi
diff --git a/eclass/gnome2-utils.eclass b/eclass/gnome2-utils.eclass
index 6c8d222a712e..bbee2a419dfc 100644
--- a/eclass/gnome2-utils.eclass
+++ b/eclass/gnome2-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome2-utils.eclass
@@ -29,25 +29,29 @@ esac
# @INTERNAL
# @DESCRIPTION:
# Path to gconftool-2
-: ${GCONFTOOL_BIN:="/usr/bin/gconftool-2"}
+: "${GCONFTOOL_BIN:="/usr/bin/gconftool-2"}"
# @ECLASS_VARIABLE: SCROLLKEEPER_DIR
# @INTERNAL
# @DESCRIPTION:
# Directory where scrollkeeper-update should do its work
-: ${SCROLLKEEPER_DIR:="/var/lib/scrollkeeper"}
+: "${SCROLLKEEPER_DIR:="/var/lib/scrollkeeper"}"
# @ECLASS_VARIABLE: SCROLLKEEPER_UPDATE_BIN
# @INTERNAL
# @DESCRIPTION:
# Path to scrollkeeper-update
-: ${SCROLLKEEPER_UPDATE_BIN:="/usr/bin/scrollkeeper-update"}
+: "${SCROLLKEEPER_UPDATE_BIN:="/usr/bin/scrollkeeper-update"}"
+
+# @ECLASS_VARIABLE: GLIB_COMPILE_RESOURCES
+# @DESCRIPTION:
+# Path to glib-compile-resources
+: "${GLIB_COMPILE_RESOURCES:="/usr/bin/glib-compile-resources"}"
# @ECLASS_VARIABLE: GLIB_COMPILE_SCHEMAS
-# @INTERNAL
# @DESCRIPTION:
# Path to glib-compile-schemas
-: ${GLIB_COMPILE_SCHEMAS:="/usr/bin/glib-compile-schemas"}
+: "${GLIB_COMPILE_SCHEMAS:="/usr/bin/glib-compile-schemas"}"
# @ECLASS_VARIABLE: GNOME2_ECLASS_SCHEMAS
# @INTERNAL
@@ -82,7 +86,7 @@ esac
# @FUNCTION: gnome2_environment_reset
# @DESCRIPTION:
-# Reset various variables inherited from root's evironment to a reasonable
+# Reset various variables inherited from root's environment to a reasonable
# default for ebuilds to help avoid access violations and test failures.
gnome2_environment_reset() {
xdg_environment_reset
@@ -359,11 +363,6 @@ gnome2_gdk_pixbuf_update() {
local updater="${EROOT%/}/usr/bin/${CHOST}-gdk-pixbuf-query-loaders"
[[ -x ${updater} ]] || updater="${EROOT%/}/usr/bin/gdk-pixbuf-query-loaders"
- if [[ -z ${GNOME2_ECLASS_GDK_PIXBUF_LOADERS} ]]; then
- debug-print "gdk-pixbuf loader cache does not need an update"
- return
- fi
-
if tc-is-cross-compiler ; then
ewarn "Updating of gdk-pixbuf loader cache skipped due to cross-compilation."
ewarn "You might want to run gdk-pixbuf-query-loaders manually on the target"
diff --git a/eclass/gnome2.eclass b/eclass/gnome2.eclass
index e80a517ee0da..66de2df97f9f 100644
--- a/eclass/gnome2.eclass
+++ b/eclass/gnome2.eclass
@@ -1,35 +1,34 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome2.eclass
# @MAINTAINER:
# gnome@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @PROVIDES: gnome2-utils
# @BLURB: Provides phases for Gnome/Gtk+ based packages.
# @DESCRIPTION:
# Exports portage base functions used by ebuilds written for packages using the
# GNOME framework. For additional functions, see gnome2-utils.eclass.
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_GNOME2_ECLASS} ]]; then
+_GNOME2_ECLASS=1
+
# @ECLASS_VARIABLE: GNOME2_EAUTORECONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Run eautoreconf instead of only elibtoolize
GNOME2_EAUTORECONF=${GNOME2_EAUTORECONF:-""}
-[[ ${GNOME2_EAUTORECONF} == 'yes' ]] && inherit autotools
-[[ ${EAPI} == [56] ]] && inherit eutils ltprune
-inherit libtool gnome.org gnome2-utils xdg
+[[ ${GNOME2_EAUTORECONF} == yes ]] && inherit autotools
+[[ ${EAPI} == 6 ]] && inherit ltprune
-case ${EAPI} in
- 5)
- EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
- ;;
- 6|7|8)
- EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
- ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
-esac
+inherit libtool gnome.org gnome2-utils xdg
# @ECLASS_VARIABLE: ELTCONF
# @DEFAULT_UNSET
@@ -37,34 +36,6 @@ esac
# Extra options passed to elibtoolize
ELTCONF=${ELTCONF:-""}
-# @ECLASS_VARIABLE: G2CONF
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Extra configure opts passed to econf.
-# Deprecated, pass extra arguments to gnome2_src_configure.
-# Banned in eapi6 and newer.
-if has ${EAPI} 5; then
- G2CONF=${G2CONF:-""}
-fi
-
-# @ECLASS_VARIABLE: GCONF_DEBUG
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Whether to handle debug or not.
-# Some gnome applications support various levels of debugging (yes, no, minimum,
-# etc), but using --disable-debug also removes g_assert which makes debugging
-# harder. This variable should be set to yes for such packages for the eclass
-# to handle it properly. It will enable minimal debug with USE=-debug.
-# Note that this is most commonly found in configure.ac as GNOME_DEBUG_CHECK.
-#
-# Banned since eapi6 as upstream is moving away from this obsolete macro in favor
-# of autoconf-archive macros, that do not expose this issue (bug #270919)
-if has ${EAPI} 5; then
- if [[ ${GCONF_DEBUG} != "no" ]]; then
- IUSE="debug"
- fi
-fi
-
# @ECLASS_VARIABLE: GNOME2_ECLASS_GIO_MODULES
# @INTERNAL
# @DESCRIPTION:
@@ -72,47 +43,29 @@ fi
# @ECLASS_VARIABLE: GNOME2_LA_PUNT
# @DESCRIPTION:
-# In EAPIs 5 and 6, it relies on prune_libtool_files (from ltprune.eclass) for
+# In EAPI 6, it relies on prune_libtool_files (from ltprune.eclass) for
# this. Later EAPIs use find ... -delete. Available values for GNOME2_LA_PUNT:
# - "no": will not clean any .la files
# - "yes": will run prune_libtool_files --modules
# - If it is not set, it will run prune_libtool_files
GNOME2_LA_PUNT=${GNOME2_LA_PUNT:-""}
-# @FUNCTION: gnome2_src_unpack
-# @DESCRIPTION:
-# Stub function for old EAPI.
-gnome2_src_unpack() {
- if has ${EAPI} 5; then
- unpack ${A}
- cd "${S}"
- else
- die "gnome2_src_unpack is banned since eapi6"
- fi
-}
-
# @FUNCTION: gnome2_src_prepare
# @DESCRIPTION:
# Prepare environment for build, fix build of scrollkeeper documentation,
# run elibtoolize.
gnome2_src_prepare() {
- [[ ${EAPI} != 5 ]] && default
+ default
# Prevent assorted access violations and test failures
gnome2_environment_reset
- # Prevent scrollkeeper access violations
- # We stop to run it from eapi6 as scrollkeeper helpers from
- # rarian are not running anything and, then, access violations
- # shouldn't occur.
- has ${EAPI} 5 && gnome2_omf_fix
-
# Disable all deprecation warnings
gnome2_disable_deprecation_warning
# Run libtoolize or eautoreconf, bug #591584
# https://bugzilla.gnome.org/show_bug.cgi?id=655517
- if [[ ${GNOME2_EAUTORECONF} == 'yes' ]]; then
+ if [[ ${GNOME2_EAUTORECONF} == yes ]]; then
eautoreconf
else
elibtoolize ${ELTCONF}
@@ -123,31 +76,10 @@ gnome2_src_prepare() {
# @DESCRIPTION:
# Gnome specific configure handling
gnome2_src_configure() {
- # Deprecated for a long time now and banned since eapi6, see Gnome team policies
- if [[ -n ${G2CONF} ]] ; then
- if has ${EAPI} 5; then
- eqawarn "G2CONF set, please review documentation at https://wiki.gentoo.org/wiki/Project:GNOME/Gnome_Team_Ebuild_Policies#G2CONF_and_src_configure"
- else
- die "G2CONF set, please review documentation at https://wiki.gentoo.org/wiki/Project:GNOME/Gnome_Team_Ebuild_Policies#G2CONF_and_src_configure"
- fi
- fi
-
local g2conf=()
- if has ${EAPI} 5; then
- if [[ ${GCONF_DEBUG} != 'no' ]] ; then
- if use debug ; then
- g2conf+=( --enable-debug=yes )
- fi
- fi
- else
- if [[ -n ${GCONF_DEBUG} ]] ; then
- die "GCONF_DEBUG is banned since eapi6 in favor of each ebuild taking care of the proper handling of debug configure option"
- fi
- fi
-
# We consider packages installing gtk-doc to be handled by adding
- # DEPEND="dev-util/gtk-doc-am" which provides tools to relink URLs in
+ # DEPEND="dev-build/gtk-doc-am" which provides tools to relink URLs in
# documentation to already installed documentation. This decision also
# greatly helps with constantly broken doc generation.
# Remember to drop 'doc' USE flag from your package if it was only used to
@@ -187,36 +119,17 @@ gnome2_src_configure() {
g2conf+=( --enable-compile-warnings=minimum )
fi
- # Pass --docdir with proper directory, bug #482646 (not needed since eapi6)
- if has ${EAPI} 5; then
- if grep -q "^ *--docdir=" "${ECONF_SOURCE:-.}"/configure; then
- g2conf+=( --docdir="${EPREFIX}"/usr/share/doc/${PF} )
- fi
- fi
-
# Avoid sandbox violations caused by gnome-vfs (bug #128289 and #345659)
- if has ${EAPI} 5; then
- addwrite "$(unset HOME; echo ~)/.gnome2"
- else
- addpredict "$(unset HOME; echo ~)/.gnome2"
- fi
+ addpredict "$(unset HOME; echo ~)/.gnome2"
- if has ${EAPI} 5; then
- econf ${g2conf[@]} ${G2CONF} "$@"
- else
- econf ${g2conf[@]} "$@"
- fi
+ econf ${g2conf[@]} "$@"
}
# @FUNCTION: gnome2_src_compile
# @DESCRIPTION:
# Only default src_compile for now
gnome2_src_compile() {
- if has ${EAPI} 5; then
- emake
- else
- default
- fi
+ default
}
# @FUNCTION: gnome2_src_install
@@ -233,40 +146,31 @@ gnome2_src_install() {
#
# if this is not present, scrollkeeper-update may segfault and
# create bogus directories in /var/lib/
- if has ${EAPI} 5; then
- dodir "${sk_tmp_dir}" || die "dodir failed"
- emake DESTDIR="${D}" "scrollkeeper_localstate_dir=${ED}${sk_tmp_dir} " "$@" install || die "install failed"
- else
- default
- fi
+ default
unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
# Handle documentation as 'default' for eapi5, bug #373131
# Since eapi6 this is handled by default on its own plus MAINTAINERS and HACKING
# files that are really common in gnome packages (bug #573390)
- if has ${EAPI} 5; then
- einstalldocs
- else
- local d
- for d in HACKING MAINTAINERS; do
- [[ -s "${d}" ]] && dodoc "${d}"
- done
- fi
+ local d
+ for d in HACKING MAINTAINERS; do
+ [[ -s ${d} ]] && dodoc "${d}"
+ done
# Do not keep /var/lib/scrollkeeper because:
# 1. The scrollkeeper database is regenerated at pkg_postinst()
# 2. ${ED}/var/lib/scrollkeeper contains only indexes for the current pkg
# thus it makes no sense if pkg_postinst ISN'T run for some reason.
- rm -rf "${ED}${sk_tmp_dir}"
+ rm -rf "${ED}${sk_tmp_dir}" || die
rmdir "${ED}/var/lib" 2>/dev/null
rmdir "${ED}/var" 2>/dev/null
# Make sure this one doesn't get in the portage db
- rm -fr "${ED}/usr/share/applications/mimeinfo.cache"
+ rm -rf "${ED}/usr/share/applications/mimeinfo.cache" || die
# Delete all .la files
- if has ${EAPI} 5 6; then
+ if has ${EAPI} 6; then
case "${GNOME2_LA_PUNT}" in
yes) prune_libtool_files --modules;;
no) ;;
@@ -310,7 +214,9 @@ gnome2_pkg_postinst() {
gnome2_schemas_update
fi
gnome2_scrollkeeper_update
- gnome2_gdk_pixbuf_update
+ if [[ -n ${GNOME2_ECLASS_GDK_PIXBUF_LOADERS} ]]; then
+ gnome2_gdk_pixbuf_update
+ fi
if [[ ${#GNOME2_ECLASS_GIO_MODULES[@]} -gt 0 ]]; then
gnome2_giomodule_cache_update
@@ -336,3 +242,7 @@ gnome2_pkg_postrm() {
gnome2_giomodule_cache_update
fi
}
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
diff --git a/eclass/gnustep-2.eclass b/eclass/gnustep-2.eclass
index 68c15bbc62e1..3797dd80151e 100644
--- a/eclass/gnustep-2.eclass
+++ b/eclass/gnustep-2.eclass
@@ -1,18 +1,18 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnustep-2.eclass
# @MAINTAINER:
# GNUstep Herd <gnustep@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: gnustep-base
# @BLURB: eclass for GNUstep Apps, Frameworks, and Bundles build
# @DESCRIPTION:
# This eclass sets up GNUstep environment to properly install
# GNUstep packages
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -21,19 +21,11 @@ _GNUSTEP_2_ECLASS=1
inherit gnustep-base
-case ${EAPI:-0} in
- [56])
- DEPEND=">=gnustep-base/gnustep-make-2.0"
- ;;
- *)
- BDEPEND=">=gnustep-base/gnustep-make-2.0"
- ;;
-esac
-
-DEPEND+=" virtual/gnustep-back"
-RDEPEND="${DEPEND}"
+RDEPEND="virtual/gnustep-back"
+DEPEND="${RDEPEND}"
+BDEPEND=">=gnustep-base/gnustep-make-2.0"
-# The following gnustep-based EXPORT_FUNCTIONS are available:
+# The following gnustep-based exported functions are available:
# * gnustep-base_pkg_setup
# * gnustep-base_src_prepare
# * gnustep-base_src_configure
diff --git a/eclass/gnustep-base.eclass b/eclass/gnustep-base.eclass
index d37ecad102e0..a0a877bf8e13 100644
--- a/eclass/gnustep-base.eclass
+++ b/eclass/gnustep-base.eclass
@@ -1,17 +1,17 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnustep-base.eclass
# @MAINTAINER:
# GNUstep Herd <gnustep@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
-# @BLURB: Internal handling of GNUstep pacakges
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: Internal handling of GNUstep packages
# @DESCRIPTION:
# Inner gnustep eclass, should only be inherited directly by gnustep-base
# packages
-case ${EAPI:-0} in
- [5678]) inherit eutils ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -64,7 +64,7 @@ gnustep-base_src_prepare() {
eend $?
fi
- ! has ${EAPI} 5 && default
+ default
}
gnustep-base_src_configure() {
@@ -127,7 +127,6 @@ egnustep_env() {
-i "${WORKDIR}"/GNUstep.conf || die "GNUstep.conf sed failed"
fi
-
if [[ ! -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
# Set rpath in ldflags when available
case ${CHOST} in
@@ -171,7 +170,7 @@ egnustep_env() {
# Make utilizing GNUstep Makefiles
egnustep_make() {
if [[ -f ./Makefile || -f ./makefile || -f ./GNUmakefile ]] ; then
- emake ${*} "${GS_ENV[@]}" all || die "package make failed"
+ emake ${*} "${GS_ENV[@]}" all
return 0
fi
die "no Makefile found"
@@ -184,7 +183,7 @@ egnustep_install() {
mkdir -p "${D}"${GNUSTEP_SYSTEM_TOOLS}
fi
if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
- emake ${*} "${GS_ENV[@]}" install || die "package install failed"
+ emake ${*} "${GS_ENV[@]}" install
return 0
fi
die "no Makefile found"
@@ -196,8 +195,8 @@ egnustep_doc() {
# Check documentation presence
pushd "${S}"/Documentation || die
if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
- emake "${GS_ENV[@]}" all || die "doc make failed"
- emake "${GS_ENV[@]}" install || die "doc install failed"
+ emake "${GS_ENV[@]}" all
+ emake "${GS_ENV[@]}" install
fi
popd || die
fi
@@ -255,7 +254,7 @@ EOF
if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
exeinto /usr/bin
else
- exeinto ${GNUSTEP_SYSTEM_TOOLS#${EPREFIX}}/Gentoo
+ exeinto "${GNUSTEP_SYSTEM_TOOLS#${EPREFIX}}"/Gentoo
fi
doexe "${T}"/${cfile}
}
diff --git a/eclass/go-env.eclass b/eclass/go-env.eclass
new file mode 100644
index 000000000000..baba0d3ee8b3
--- /dev/null
+++ b/eclass/go-env.eclass
@@ -0,0 +1,105 @@
+# Copyright 2023-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: go-env.eclass
+# @MAINTAINER:
+# Flatcar Linux Maintainers <infra@flatcar-linux.org>
+# @AUTHOR:
+# Flatcar Linux Maintainers <infra@flatcar-linux.org>
+# @BLURB: Helper eclass for setting the Go compile environment. Required for cross-compiling.
+# @DESCRIPTION:
+# This eclass includes helper functions for setting the compile environment for Go ebuilds.
+# Intended to be called by other Go eclasses in an early build stage, e.g. src_unpack.
+
+if [[ -z ${_GO_ENV_ECLASS} ]]; then
+_GO_ENV_ECLASS=1
+
+inherit flag-o-matic toolchain-funcs
+
+# @FUNCTION: go-env_set_compile_environment
+# @DESCRIPTION:
+# Set up basic compile environment: CC, CXX, and GOARCH.
+# Necessary platform-specific settings such as GOARM or GO386 are also set
+# according to the Portage configuration when building for those architectures.
+# Also carry over CFLAGS, LDFLAGS and friends.
+# Required for cross-compiling with crossdev.
+# If not set, host defaults will be used and the resulting binaries are host arch.
+# (e.g. "emerge-aarch64-cross-linux-gnu foo" run on x86_64 will emerge "foo" for x86_64
+# instead of aarch64)
+go-env_set_compile_environment() {
+ tc-export CC CXX PKG_CONFIG
+
+ export GOARCH="$(go-env_goarch)"
+ use arm && export GOARM=$(go-env_goarm)
+ use x86 && export GO386=$(go-env_go386)
+
+ # XXX: Hack for checking ICE (bug #912152, gcc PR113204)
+ [[ $(gcc-fullversion) == 14.0.1 ]] && filter-lto
+
+ export CGO_CFLAGS="${CGO_CFLAGS:-$CFLAGS}"
+ export CGO_CPPFLAGS="${CGO_CPPFLAGS:-$CPPFLAGS}"
+ export CGO_CXXFLAGS="${CGO_CXXFLAGS:-$CXXFLAGS}"
+ export CGO_LDFLAGS="${CGO_LDFLAGS:-$LDFLAGS}"
+}
+
+# @FUNCTION: go-env_goarch
+# @USAGE: [toolchain prefix]
+# @DESCRIPTION:
+# Returns the appropriate GOARCH setting for the target architecture.
+go-env_goarch() {
+ # By chance most portage arch names match Go
+ local tc_arch=$(tc-arch $@)
+ case "${tc_arch}" in
+ x86) echo 386;;
+ x64-*) echo amd64;;
+ loong) echo loong64;;
+ mips) if use abi_mips_o32; then
+ [[ $(tc-endian $@) = big ]] && echo mips || echo mipsle
+ elif use abi_mips_n64; then
+ [[ $(tc-endian $@) = big ]] && echo mips64 || echo mips64le
+ fi ;;
+ ppc64) [[ $(tc-endian $@) = big ]] && echo ppc64 || echo ppc64le ;;
+ riscv) echo riscv64 ;;
+ s390) echo s390x ;;
+ *) echo "${tc_arch}";;
+ esac
+}
+
+# @FUNCTION: go-env_go386
+# @DESCRIPTION:
+# Returns the appropriate GO386 setting for the CFLAGS in use.
+go-env_go386() {
+ # Piggy-back off any existing CPU_FLAGS_X86 usage in the ebuild if
+ # it's there.
+ if in_iuse cpu_flags_x86_sse2 && use cpu_flags_x86_sse2 ; then
+ echo 'sse2'
+ return
+ fi
+
+ if tc-cpp-is-true "defined(__SSE2__)" ${CFLAGS} ${CXXFLAGS} ; then
+ echo 'sse2'
+ return
+ fi
+
+ # Go 1.16 dropped explicit support for 386 FP and relies on software
+ # emulation instead in the absence of SSE2.
+ echo 'softfloat'
+}
+
+# @FUNCTION: go-env_goarm
+# @USAGE: [CHOST-value]
+# @DESCRIPTION:
+# Returns the appropriate GOARM setting for the CHOST given, or the default
+# CHOST.
+go-env_goarm() {
+ case "${1:-${CHOST}}" in
+ armv5*) echo 5;;
+ armv6*) echo 6;;
+ armv7*) echo 7;;
+ *)
+ die "unknown GOARM for ${1:-${CHOST}}"
+ ;;
+ esac
+}
+
+fi
diff --git a/eclass/go-module.eclass b/eclass/go-module.eclass
index a5dafb45cab8..cad63ee6d0f0 100644
--- a/eclass/go-module.eclass
+++ b/eclass/go-module.eclass
@@ -1,4 +1,4 @@
-# Copyright 2019-2022 Gentoo Authors
+# Copyright 2019-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: go-module.eclass
@@ -14,7 +14,7 @@
# written in the go programming language that uses modules.
# If the software you are packaging has a file named go.mod in its top level
# directory, it uses modules.
-#
+#
# Modules have been the preferred method of tracking dependencies in software
# written in Go since version 1.16,
# so if the software isn't using modules, it should be updated.
@@ -26,7 +26,9 @@
# If the software has a directory named vendor in its
# top level directory, the only thing you need to do is inherit the
# eclass. If it doesn't, you need to also create a dependency tarball and
-# host it somewhere, for example in your dev space.
+# host it somewhere, for example in your dev space. It's recommended that
+# a format supporting parallel decompression is used and developers should
+# use higher levels of compression like '-9' for xz.
#
# Here is an example of how to create a dependency tarball.
# The base directory in the GOMODCACHE setting must be go-mod in order
@@ -36,14 +38,14 @@
#
# $ cd /path/to/project
# $ GOMODCACHE="${PWD}"/go-mod go mod download -modcacherw
-# $ tar -acf project-1.0-deps.tar.xz go-mod
+# $ XZ_OPT='-T0 -9' tar -acf project-1.0-deps.tar.xz go-mod
#
# @CODE
#
# Since Go programs are statically linked, it is important that your ebuild's
# LICENSE= setting includes the licenses of all statically linked
# dependencies. So please make sure it is accurate.
-# You can use a utility like dev-go/golicense (network connectivity is
+# You can use a utility like dev-go/lichen (network connectivity is
# required) to extract this information from the compiled binary.
#
# @EXAMPLE:
@@ -63,20 +65,19 @@ case ${EAPI} in
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ -z ${_GO_MODULE} ]]; then
+if [[ -z ${_GO_MODULE_ECLASS} ]]; then
+_GO_MODULE_ECLASS=1
-_GO_MODULE=1
+inherit multiprocessing toolchain-funcs go-env
if [[ ! ${GO_OPTIONAL} ]]; then
- BDEPEND=">=dev-lang/go-1.16"
+ BDEPEND=">=dev-lang/go-1.20:="
# Workaround for pkgcheck false positive: https://github.com/pkgcore/pkgcheck/issues/214
# MissingUnpackerDep: version ...: missing BDEPEND="app-arch/unzip"
# Added here rather than to each affected package, so it can be cleaned up just
# once when pkgcheck is improved.
BDEPEND+=" app-arch/unzip"
-
- EXPORT_FUNCTIONS src_unpack
fi
# Force go to build in module mode.
@@ -93,10 +94,12 @@ export GOCACHE="${T}/go-build"
export GOMODCACHE="${WORKDIR}/go-mod"
# The following go flags should be used for all builds.
+# -buildmode=pie builds position independent executables
+# -buildvcs=false omits version control information
# -modcacherw makes the build cache read/write
# -v prints the names of packages as they are compiled
# -x prints commands as they are executed
-export GOFLAGS="-modcacherw -v -x"
+export GOFLAGS="-buildvcs=false -modcacherw -v -x"
# Do not complain about CFLAGS etc since go projects do not use them.
QA_FLAGS_IGNORED='.*'
@@ -259,7 +262,22 @@ go-module_set_globals() {
continue
fi
- _dir=$(_go-module_gomod_encode "${module}")
+ # Encode the name(path) of a Golang module in the format expected by Goproxy.
+ # Upper letters are replaced by their lowercase version with a '!' prefix.
+ # The transformed result of 'module' is stored in the '_dir' variable.
+ #
+ ## Python:
+ # return re.sub('([A-Z]{1})', r'!\1', s).lower()
+ ## Sed:
+ ## This uses GNU Sed extension \l to downcase the match
+ # echo "${module}" |sed 's,[A-Z],!\l&,g'
+ local re _dir lower
+ _dir="${module}"
+ re='(.*)([A-Z])(.*)'
+ while [[ ${_dir} =~ ${re} ]]; do
+ lower='!'"${BASH_REMATCH[2],}"
+ _dir="${BASH_REMATCH[1]}${lower}${BASH_REMATCH[3]}"
+ done
for _ext in "${exts[@]}" ; do
# Relative URI within a GOPROXY for a file
@@ -340,11 +358,18 @@ go-module_setup_proxy() {
# @FUNCTION: go-module_src_unpack
# @DESCRIPTION:
-# If EGO_SUM is set, unpack the base tarball(s) and set up the
-# local go proxy. Also warn that this usage is deprecated.
-# - Otherwise, if EGO_VENDOR is set, bail out.
-# - Otherwise do a normal unpack.
+# Sets up GOFLAGS for the system and then unpacks based on the following rules:
+# 1. If EGO_SUM is set, unpack the base tarball(s) and set up the
+# local go proxy. This mode is deprecated.
+# 2. Otherwise, if EGO_VENDOR is set, bail out, as this functionality was removed.
+# 3. Otherwise, call 'ego mod verify' and then do a normal unpack.
+# Set compile env via go-env.
go-module_src_unpack() {
+ if use amd64 || use arm || use arm64 ||
+ ( use ppc64 && [[ $(tc-endian) == "little" ]] ) || use s390 || use x86; then
+ GOFLAGS="-buildmode=pie ${GOFLAGS}"
+ fi
+ GOFLAGS="${GOFLAGS} -p=$(makeopts_jobs)"
if [[ "${#EGO_SUM[@]}" -gt 0 ]]; then
eqawarn "This ebuild uses EGO_SUM which is deprecated"
eqawarn "Please migrate to a dependency tarball"
@@ -355,7 +380,15 @@ go-module_src_unpack() {
die "Please update this ebuild"
else
default
+ if [[ ! -d "${S}"/vendor ]]; then
+ cd "${S}"
+ local nf
+ [[ -n ${NONFATAL_VERIFY} ]] && nf=nonfatal
+ ${nf} ego mod verify
+ fi
fi
+
+ go-env_set_compile_environment
}
# @FUNCTION: _go-module_src_unpack_gosum
@@ -481,31 +514,8 @@ go-module_live_vendor() {
popd >& /dev/null || die
}
-# @FUNCTION: _go-module_gomod_encode
-# @DEPRECATED: none
-# @DESCRIPTION:
-# Encode the name(path) of a Golang module in the format expected by Goproxy.
-#
-# Upper letters are replaced by their lowercase version with a '!' prefix.
-#
-_go-module_gomod_encode() {
- ## Python:
- # return re.sub('([A-Z]{1})', r'!\1', s).lower()
-
- ## Sed:
- ## This uses GNU Sed extension \l to downcase the match
- #echo "${module}" |sed 's,[A-Z],!\l&,g'
- #
- # Bash variant:
- debug-print-function "${FUNCNAME}" "$@"
- #local re input lower
- re='(.*)([A-Z])(.*)'
- input="${1}"
- while [[ ${input} =~ ${re} ]]; do
- lower='!'"${BASH_REMATCH[2],}"
- input="${BASH_REMATCH[1]}${lower}${BASH_REMATCH[3]}"
- done
- echo "${input}"
-}
+fi
+if [[ ! ${GO_OPTIONAL} ]]; then
+ EXPORT_FUNCTIONS src_unpack
fi
diff --git a/eclass/golang-build.eclass b/eclass/golang-build.eclass
index f24029a1a9fd..235313bd70f5 100644
--- a/eclass/golang-build.eclass
+++ b/eclass/golang-build.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2019 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-build.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7
# @PROVIDES: golang-base
# @BLURB: Eclass for compiling go packages.
# @DEPRECATED: go-module.eclass
@@ -12,21 +12,15 @@
# This eclass provides default src_compile, src_test and src_install
# functions for software written in the Go programming language.
-inherit golang-base
-
-case "${EAPI:-0}" in
- 5|6|7)
- ;;
- *)
- die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
- ;;
+case ${EAPI} in
+ 6|7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_compile src_install src_test
-
-if [[ -z ${_GOLANG_BUILD} ]]; then
+if [[ -z ${_GOLANG_BUILD_ECLASS} ]]; then
+_GOLANG_BUILD_ECLASS=1
-_GOLANG_BUILD=1
+inherit golang-base
# @ECLASS_VARIABLE: EGO_BUILD_FLAGS
# @DEFAULT_UNSET
@@ -85,3 +79,5 @@ golang-build_src_test() {
}
fi
+
+EXPORT_FUNCTIONS src_compile src_install src_test
diff --git a/eclass/golang-vcs-snapshot.eclass b/eclass/golang-vcs-snapshot.eclass
index 5140064a651e..d34b8a6e913d 100644
--- a/eclass/golang-vcs-snapshot.eclass
+++ b/eclass/golang-vcs-snapshot.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-vcs-snapshot.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7
# @PROVIDES: golang-base
# @BLURB: eclass to unpack VCS snapshot tarballs for Go software
# @DEPRECATED: go-module.eclass
@@ -44,14 +44,15 @@
# ${WORKDIR}/${P}/src/github.com/user/package
# and add the vendored tarballs to ${WORKDIR}/src/${EGO_PN}/vendor
-inherit golang-base
-
-case ${EAPI:-0} in
- 5|6|7) ;;
- *) die "${ECLASS} API in EAPI ${EAPI} not yet established."
+case ${EAPI} in
+ 6|7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack
+if [[ -z ${_GOLANG_VCS_SNAPSHOT_ECLASS} ]]; then
+_GOLANG_VCS_SNAPSHOT_ECLASS=1
+
+inherit golang-base go-env
# @ECLASS_VARIABLE: EGO_VENDOR
# @DESCRIPTION:
@@ -82,7 +83,7 @@ unset -f _golang-vcs-snapshot_set_vendor_uri
_golang-vcs-snapshot_dovendor() {
local VENDOR_PATH=$1 VENDORPN=$2 TARBALL=$3
- rm -fr "${VENDOR_PATH}/${VENDORPN}" || die
+ rm -rf "${VENDOR_PATH}/${VENDORPN}" || die
mkdir -p "${VENDOR_PATH}/${VENDORPN}" || die
tar -C "${VENDOR_PATH}/${VENDORPN}" -x --strip-components 1\
-f "${DISTDIR}"/${TARBALL} || die
@@ -91,6 +92,7 @@ _golang-vcs-snapshot_dovendor() {
# @FUNCTION: golang-vcs-snapshot_src_unpack
# @DESCRIPTION:
# Extract the first archive from ${A} to the appropriate location for GOPATH.
+# Set compile env via go-env.
golang-vcs-snapshot_src_unpack() {
local lib vendor_path x
ego_pn_check
@@ -116,4 +118,10 @@ golang-vcs-snapshot_src_unpack() {
fi
done
fi
+
+ go-env_set_compile_environment
}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/golang-vcs.eclass b/eclass/golang-vcs.eclass
index 595412ea0c88..6f7a837bc15f 100644
--- a/eclass/golang-vcs.eclass
+++ b/eclass/golang-vcs.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-vcs.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7
# @PROVIDES: golang-base
# @BLURB: Eclass for fetching and unpacking go repositories.
# @DEPRECATED: go-module.eclass
@@ -12,21 +12,15 @@
# This eclass is written to ease the maintenance of live ebuilds
# of software written in the Go programming language.
-inherit estack eutils golang-base
-
-case "${EAPI:-0}" in
- 5|6|7)
- ;;
- *)
- die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
- ;;
+case ${EAPI} in
+ 6|7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack
-
-if [[ -z ${_GOLANG_VCS} ]]; then
+if [[ -z ${_GOLANG_VCS_ECLASS} ]]; then
+_GOLANG_VCS_ECLASS=1
-_GOLANG_VCS=1
+inherit estack golang-base go-env
PROPERTIES+=" live"
@@ -69,11 +63,12 @@ PROPERTIES+=" live"
# @INTERNAL
# @DESCRIPTION:
# Create EGO_STORE_DIR if necessary.
+# Set compile env via go-env.
_golang-vcs_env_setup() {
debug-print-function ${FUNCNAME} "$@"
local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
- : ${EGO_STORE_DIR:=${distdir}/go-src}
+ : "${EGO_STORE_DIR:=${distdir}/go-src}"
[[ -n ${EVCS_UMASK} ]] && eumask_push $EVCS_UMASK
@@ -90,6 +85,8 @@ _golang-vcs_env_setup() {
mkdir -p "${WORKDIR}/${P}/src" ||
die "${ECLASS}: unable to create ${WORKDIR}/${P}"
return 0
+
+ go-env_set_compile_environment
}
# @FUNCTION: _golang-vcs_fetch
@@ -139,3 +136,5 @@ golang-vcs_src_unpack() {
}
fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/gstreamer-meson.eclass b/eclass/gstreamer-meson.eclass
index f81c2beeccd8..a26b06ba4595 100644
--- a/eclass/gstreamer-meson.eclass
+++ b/eclass/gstreamer-meson.eclass
@@ -1,10 +1,11 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gstreamer-meson.eclass
# @MAINTAINER:
# gstreamer@gentoo.org
# @AUTHOR:
+# Mart Raudsepp <leio@gentoo.org>
# Haelwenn (lanodan) Monnier <contact@hacktivis.me>
# Michał Górny <mgorny@gentoo.org>
# Gilles Dartiguelongue <eva@gentoo.org>
@@ -12,7 +13,7 @@
# foser <foser@gentoo.org>
# zaheerm <zaheerm@gentoo.org>
# Steven Newbury
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: meson multilib-minimal
# @BLURB: Helps building core & split gstreamer plugins
# @DESCRIPTION:
@@ -26,22 +27,30 @@
# plugin, consider adding media-plugins/gst-plugins-meta dependency, but
# also list any packages that provide explicitly requested plugins.
-# multilib-minimal goes last
-inherit meson multilib toolchain-funcs virtualx xdg-utils multilib-minimal
-
case "${EAPI:-0}" in
- 7)
+ 7|8)
;;
*)
die "EAPI=\"${EAPI}\" is not supported"
;;
esac
+PYTHON_COMPAT=( python3_{10..12} )
+[[ ${EAPI} == 8 ]] && inherit python-any-r1
+
+# TODO: Remove after all older versions are gone from tree
+if ver_test ${PV} -lt 1.22.10 ; then
+ inherit virtualx
+fi
+
+# multilib-minimal goes last
+inherit meson multilib toolchain-funcs xdg-utils multilib-minimal
+
# @ECLASS_VARIABLE: GST_PLUGINS_ENABLED
# @DESCRIPTION:
# Defines the plugins to be built.
# May be set by an ebuild and contain more than one identifier, space
-# separated (only src_configure can handle mutiple plugins at this time).
+# separated (only src_configure can handle multiple plugins at this time).
# @ECLASS_VARIABLE: GST_PLUGINS_NOAUTO
# @DESCRIPTION:
@@ -79,15 +88,29 @@ gstreamer_get_plugins() {
"${S}/meson_options.txt" || die "Failed to extract options for plugins with external deps"
)
- # opencv and hls in gst-plugins-bad are split, can't be properly detected
- if grep -q "option('opencv'" "${EMESON_SOURCE}"/meson_options.txt ; then
- GST_PLUGINS_EXT_DEPS="${GST_PLUGINS_EXT_DEPS}
-opencv"
- fi
- if grep -q "option('hls'" "${EMESON_SOURCE}"/meson_options.txt ; then
- GST_PLUGINS_EXT_DEPS="${GST_PLUGINS_EXT_DEPS}
-hls"
- fi
+ # meson_options that should be in GST_PLUGINS_EXT_DEPS but automatic parsing above can't catch
+ local extra_options
+ extra_options=(
+ # gst-plugins-base
+ gl
+ # gst-plugins-good
+ qt5
+ qt6
+ soup
+ v4l2
+ ximagesrc
+ # gst-plugins-bad
+ hls
+ opencv
+ wayland
+ )
+
+ for option in ${extra_options[@]} ; do
+ if grep -q "option('${option}'" "${EMESON_SOURCE}"/meson_options.txt ; then
+ GST_PLUGINS_EXT_DEPS="${GST_PLUGINS_EXT_DEPS}
+${option}"
+ fi
+ done
}
# @FUNCTION: gstreamer_system_package
@@ -110,6 +133,10 @@ gstreamer_system_package() {
pc=${tuple#*:}-${SLOT}
sed -e "1i${dependency} = dependency('${pc}', required : true)" \
-i "${pdir}"/meson.build || die
+ # TODO: Remove conditional applying once older versions are all gone
+ if ver_test ${PV} -gt 1.22.5 ; then
+ sed -e "/meson\.override_dependency[(]pkg_name, ${dependency}[)]/d" -i "${S}"/gst-libs/gst/*/meson.build || die
+ fi
done
done
}
@@ -143,7 +170,7 @@ gstreamer_system_library() {
# Actual build directories of the plugins.
# Most often the same as the configure switch name.
# FIXME: Change into a bash array
-: ${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}
+: "${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}"
# @ECLASS_VARIABLE: GST_TARBALL_SUFFIX
# @DESCRIPTION:
@@ -151,7 +178,7 @@ gstreamer_system_library() {
# tarballs as tar.bz2 or tar.xz. This eclass defaults to xz. This is
# because the gstreamer mirrors are moving to only have xz tarballs for
# new releases.
-: ${GST_TARBALL_SUFFIX:="xz"}
+: "${GST_TARBALL_SUFFIX:="xz"}"
# Even though xz-utils are in @system, they must still be added to BDEPEND; see
# https://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
@@ -163,13 +190,13 @@ fi
# @DESCRIPTION:
# Name of the module as hosted on gstreamer.freedesktop.org mirrors.
# Leave unset if package name matches module name.
-: ${GST_ORG_MODULE:=${PN}}
+: "${GST_ORG_MODULE:=${PN}}"
# @ECLASS_VARIABLE: GST_ORG_PVP
# @INTERNAL
# @DESCRIPTION:
# Major and minor numbers of the version number.
-: ${GST_ORG_PVP:=$(ver_cut 1-2)}
+: "${GST_ORG_PVP:=$(ver_cut 1-2)}"
DESCRIPTION="${BUILD_GST_PLUGINS} plugin for gstreamer"
@@ -184,10 +211,16 @@ RDEPEND="
>=dev-libs/glib-2.40.0:2[${MULTILIB_USEDEP}]
"
BDEPEND="
- >=sys-apps/sed-4
virtual/pkgconfig
virtual/perl-JSON-PP
"
+[[ ${EAPI} == 8 ]] && BDEPEND="${BDEPEND} ${PYTHON_DEPS}"
+# gst-plugins-{base,good} splits all require glib-utils due to gnome.mkenums_simple meson calls in gst-libs
+# The alternative would be to patch out the subdir calls, but some packages need it themselves too anyways, thus
+# something in a full upgrade path will require it anyways at build time, so not worth the risk.
+if [[ "${GST_ORG_MODULE}" == "gst-plugins-base" ]] || [[ "${GST_ORG_MODULE}" == "gst-plugins-bad" ]]; then
+ BDEPEND="${BDEPEND} dev-util/glib-utils"
+fi
if [[ "${PN}" != "gstreamer" ]]; then
RDEPEND="
@@ -210,7 +243,7 @@ if [[ "${PN}" != "${GST_ORG_MODULE}" ]]; then
# Export multilib phases used for split builds.
multilib_src_install_all() { gstreamer_multilib_src_install_all; }
else
- local extra_deps=""
+ inherit virtualx
IUSE="nls test"
RESTRICT="!test? ( test )"
@@ -256,8 +289,7 @@ gstreamer_get_plugin_dir() {
# @INTERNAL
# @DESCRIPTION:
# Contains false-positives.
-# - gst-plugins-bad puts "shm" in external deps
-GST_PLUGINS_ENOAUTO="shm"
+GST_PLUGINS_ENOAUTO=""
# @FUNCTION: gstreamer_multilib_src_configure
# @DESCRIPTION:
@@ -283,9 +315,14 @@ gstreamer_multilib_src_configure() {
if grep -q "option('orc'" "${EMESON_SOURCE}"/meson_options.txt ; then
if in_iuse orc ; then
gst_conf+=( -Dorc=$(usex orc enabled disabled) )
+ if [[ "${PN}" != "${GST_ORG_MODULE}" ]] && ! _gstreamer_get_has_orc_dep; then
+ eqawarn "QA: IUSE=orc is present while plugin does not seem to support it"
+ fi
else
gst_conf+=( -Dorc=disabled )
- eqawarn "QA: IUSE=orc is missing while plugin supports it"
+ if [[ "${PN}" == "${GST_ORG_MODULE}" ]] || _gstreamer_get_has_orc_dep; then
+ eqawarn "QA: IUSE=orc is missing while plugin supports it"
+ fi
fi
else
if in_iuse orc ; then
@@ -298,7 +335,9 @@ gstreamer_multilib_src_configure() {
gst_conf+=( -Dintrospection=$(multilib_native_usex introspection enabled disabled) )
else
gst_conf+=( -Dintrospection=disabled )
- eqawarn "QA: IUSE=introspection is missing while plugin supports it"
+ if [[ "${PN}" == "${GST_ORG_MODULE}" ]]; then
+ eqawarn "QA: IUSE=introspection is missing while package supports it"
+ fi
fi
else
if in_iuse introspection ; then
@@ -376,6 +415,23 @@ EOF
|| die "Failed to extract target filenames from meson-info"
}
+# @FUNCTION: _gstreamer_get_has_orc_dep
+# @INTERNAL
+# @DESCRIPTION:
+# Finds whether plugin appears to use dev-lang/orc or not.
+_gstreamer_get_has_orc_dep() {
+ local has_orc_dep pdir plugin_dir
+ has_orc_dep=0
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ pdir=$(gstreamer_get_plugin_dir ${plugin_dir})
+ if grep -q "orc_dep" "${S}/${pdir}"/meson.build ; then
+ has_orc_dep=1
+ fi
+ done
+ [[ ${has_orc_dep} -ne 0 ]]
+}
+
# @FUNCTION: gstreamer_multilib_src_compile
# @DESCRIPTION:
# Compiles requested gstreamer plugin.
@@ -393,7 +449,7 @@ gstreamer_multilib_src_compile() {
# https://github.com/ninja-build/ninja/issues/1251
# https://github.com/ninja-build/ninja/issues/1330
- build_dir=$(readlink -f ${BUILD_DIR})
+ build_dir=$(readlink -f "${BUILD_DIR}")
plugin_path="${plugin%%:*}"
eninja "${plugin_path/"${build_dir}/"/}"
@@ -401,11 +457,19 @@ gstreamer_multilib_src_compile() {
fi
}
+# @FUNCTION: gstreamer-meson_pkg_setup
+# @DESCRIPTION:
+# Proxies python-any-r1_pkg_setup for forward-proofing any future pkg_setup needs.
+# Only exported for EAPI-8.
+gstreamer-meson_pkg_setup() {
+ python-any-r1_pkg_setup
+}
+
# @FUNCTION: gstreamer_multilib_src_test
# @DESCRIPTION:
# Tests the gstreamer plugin (non-split)
gstreamer_multilib_src_test() {
- GST_GL_WINDOW=x11 virtx eninja test
+ GST_GL_WINDOW=x11 virtx meson test --timeout-multiplier 5
}
# @FUNCTION: gstreamer_multilib_src_install
@@ -420,6 +484,7 @@ gstreamer_multilib_src_install() {
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
for plugin in $(_gstreamer_get_target_filename $(gstreamer_get_plugin_dir ${plugin_dir})); do
local install_filename="${plugin##*:}"
+ install_filename="${install_filename#${EPREFIX}}"
insinto "${install_filename%/*}"
doins "${plugin%%:*}"
done
@@ -429,12 +494,23 @@ gstreamer_multilib_src_install() {
# @FUNCTION: gstreamer_multilib_src_install_all
# @DESCRIPTION:
-# Installs documentation for requested gstreamer plugin
+# Installs documentation and presets for requested gstreamer plugin
gstreamer_multilib_src_install_all() {
local plugin_dir
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
local dir=$(gstreamer_get_plugin_dir ${plugin_dir})
[[ -e ${dir}/README ]] && dodoc "${dir}"/README
+ if [[ ${EAPI} == 8 ]]; then
+ local presets=( "${dir}"/*.prs )
+ if [[ -e ${presets[0]} ]]; then
+ insinto /usr/share/gstreamer-${SLOT}/presets
+ doins "${presets[@]}"
+ fi
+ fi
done
}
+
+if [[ ${EAPI} == 8 ]]; then
+ EXPORT_FUNCTIONS pkg_setup
+fi
diff --git a/eclass/gstreamer.eclass b/eclass/gstreamer.eclass
deleted file mode 100644
index 28db8b1e035f..000000000000
--- a/eclass/gstreamer.eclass
+++ /dev/null
@@ -1,269 +0,0 @@
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: gstreamer.eclass
-# @MAINTAINER:
-# gstreamer@gentoo.org
-# @AUTHOR:
-# Michał Górny <mgorny@gentoo.org>
-# Gilles Dartiguelongue <eva@gentoo.org>
-# Saleem Abdulrasool <compnerd@gentoo.org>
-# foser <foser@gentoo.org>
-# zaheerm <zaheerm@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6
-# @PROVIDES: multilib-minimal
-# @BLURB: Helps building core & split gstreamer plugins.
-# @DESCRIPTION:
-# Eclass to make external gst-plugins emergable on a per-plugin basis
-# and to solve the problem with gst-plugins generating far too much
-# unneeded dependencies.
-#
-# GStreamer consuming applications should depend on the specific plugins
-# they need as defined in their source code. Usually you can find that
-# out by grepping the source tree for 'factory_make'. If it uses playbin
-# plugin, consider adding media-plugins/gst-plugins-meta dependency, but
-# also list any packages that provide explicitly requested plugins.
-
-inherit eutils ltprune multilib multilib-minimal toolchain-funcs versionator xdg-utils
-
-case "${EAPI:-0}" in
- 5|6)
- ;;
- 0|1|2|3|4)
- die "EAPI=\"${EAPI:-0}\" is not supported anymore"
- ;;
- *)
- die "EAPI=\"${EAPI}\" is not supported yet"
- ;;
-esac
-
-# @ECLASS_VARIABLE: GST_PLUGINS_BUILD
-# @DESCRIPTION:
-# Defines the plugins to be built.
-# May be set by an ebuild and contain more than one identifier, space
-# separated (only src_configure can handle mutiple plugins at this time).
-: ${GST_PLUGINS_BUILD:=${PN/gst-plugins-/}}
-
-# @ECLASS_VARIABLE: GST_PLUGINS_BUILD_DIR
-# @DESCRIPTION:
-# Actual build directory of the plugin.
-# Most often the same as the configure switch name.
-: ${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}
-
-# @ECLASS_VARIABLE: GST_TARBALL_SUFFIX
-# @DESCRIPTION:
-# Most projects hosted on gstreamer.freedesktop.org mirrors provide
-# tarballs as tar.bz2 or tar.xz. This eclass defaults to xz. This is
-# because the gstreamer mirrors are moving to only have xz tarballs for
-# new releases.
-: ${GST_TARBALL_SUFFIX:="xz"}
-
-# Even though xz-utils are in @system, they must still be added to DEPEND; see
-# https://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
-if [[ ${GST_TARBALL_SUFFIX} == "xz" ]]; then
- DEPEND="${DEPEND} app-arch/xz-utils"
-fi
-
-# @ECLASS_VARIABLE: GST_ORG_MODULE
-# @DESCRIPTION:
-# Name of the module as hosted on gstreamer.freedesktop.org mirrors.
-# Leave unset if package name matches module name.
-: ${GST_ORG_MODULE:=$PN}
-
-# @ECLASS_VARIABLE: GST_ORG_PVP
-# @INTERNAL
-# @DESCRIPTION:
-# Major and minor numbers of the version number.
-: ${GST_ORG_PVP:=$(get_version_component_range 1-2)}
-
-
-DESCRIPTION="${BUILD_GST_PLUGINS} plugin for gstreamer"
-HOMEPAGE="https://gstreamer.freedesktop.org/"
-SRC_URI="https://gstreamer.freedesktop.org/src/${GST_ORG_MODULE}/${GST_ORG_MODULE}-${PV}.tar.${GST_TARBALL_SUFFIX}"
-
-LICENSE="GPL-2"
-case ${GST_ORG_PVP} in
- 0.10) SLOT="0.10"; GST_MIN_PV="0.10.36-r2" ;;
- 1.*) SLOT="1.0"; GST_MIN_PV="1.2.4-r1" ;;
- *) die "Unkown gstreamer release."
-esac
-
-S="${WORKDIR}/${GST_ORG_MODULE}-${PV}"
-
-RDEPEND="
- >=dev-libs/glib-2.38.2-r1:2[${MULTILIB_USEDEP}]
- >=media-libs/gstreamer-${GST_MIN_PV}:${SLOT}[${MULTILIB_USEDEP}]
-"
-DEPEND="
- >=sys-apps/sed-4
- virtual/pkgconfig
-"
-
-# Export common multilib phases.
-multilib_src_configure() { gstreamer_multilib_src_configure; }
-
-if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
- # Do not run test phase for individual plugin ebuilds.
- RESTRICT="test"
- RDEPEND="${RDEPEND}
- >=media-libs/${GST_ORG_MODULE}-${PV}:${SLOT}[${MULTILIB_USEDEP}]"
-
- # Export multilib phases used for split builds.
- multilib_src_compile() { gstreamer_multilib_src_compile; }
- multilib_src_install() { gstreamer_multilib_src_install; }
- multilib_src_install_all() { gstreamer_multilib_src_install_all; }
-else
- IUSE="nls"
- DEPEND="${DEPEND} nls? ( >=sys-devel/gettext-0.17 )"
-fi
-
-DEPEND="${DEPEND} ${RDEPEND}"
-
-# @FUNCTION: gstreamer_environment_reset
-# @INTERNAL
-# @DESCRIPTION:
-# Clean up environment for clean builds.
-# >=dev-lang/orc-0.4.23 rely on environment variables to find a place to
-# allocate files to mmap.
-gstreamer_environment_reset() {
- xdg_environment_reset
-}
-
-# @FUNCTION: gstreamer_get_plugins
-# @INTERNAL
-# @DESCRIPTION:
-# Get the list of plugins requiring external dependencies.
-gstreamer_get_plugins() {
- # Must be called from src_prepare/src_configure
- GST_PLUGINS_LIST=$(sed -rn 's/^AG_GST_CHECK_FEATURE\((\w+),.*/ \1 /p' \
- "${ECONF_SOURCE:-${S}}"/configure.* | LC_ALL='C' tr '[:upper:]' '[:lower:]')
-}
-
-# @FUNCTION: gstreamer_get_plugin_dir
-# @USAGE: [build_dir]
-# @INTERNAL
-# @DESCRIPTION:
-# Finds plugin build directory and output it.
-# Defaults to ${GST_PLUGINS_BUILD_DIR} if argument is not provided
-gstreamer_get_plugin_dir() {
- local build_dir=${1:-${GST_PLUGINS_BUILD_DIR}}
-
- if [[ ! -d ${S}/ext/${build_dir} ]]; then
- if [[ ! -d ${S}/sys/${build_dir} ]]; then
- ewarn "No such plugin directory"
- die
- fi
- einfo "Building system plugin in ${build_dir}..." >&2
- echo sys/${build_dir}
- else
- einfo "Building external plugin in ${build_dir}..." >&2
- echo ext/${build_dir}
- fi
-}
-
-# @FUNCTION: gstreamer_system_link
-# @USAGE: <gst-libs/gst/audio:gstreamer-audio> [...]
-# @DESCRIPTION:
-# Walks through makefiles in order to make sure build will link against system
-# libraries.
-# Takes a list of path fragments and corresponding pkgconfig libraries
-# separated by colon (:). Will replace the path fragment by the output of
-# pkgconfig.
-gstreamer_system_link() {
- local pdir directory libs pkgconfig pc tuple
- pkgconfig=$(tc-getPKG_CONFIG)
-
- for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
- pdir=$(gstreamer_get_plugin_dir ${plugin_dir})
-
- for tuple in $@ ; do
- directory=${tuple%:*}
- pc=${tuple#*:}-${SLOT}
- libs="$(${pkgconfig} --libs-only-l ${pc} || die)"
- sed -e "s:\$(top_builddir)/${directory}/.*\.la:${libs}:" \
- -i "${pdir}"/Makefile.{am,in} || die
- done
- done
-}
-
-# @FUNCTION: gstreamer_multilib_src_configure
-# @DESCRIPTION:
-# Handles logic common to configuring gstreamer plugins
-gstreamer_multilib_src_configure() {
- local plugin gst_conf=() ECONF_SOURCE=${ECONF_SOURCE:-${S}}
-
- gstreamer_get_plugins
- gstreamer_environment_reset
-
- for plugin in ${GST_PLUGINS_LIST} ; do
- if has ${plugin} ${GST_PLUGINS_BUILD} ; then
- gst_conf+=( --enable-${plugin} )
- else
- gst_conf+=( --disable-${plugin} )
- fi
- done
-
- if grep -q "ORC_CHECK" "${ECONF_SOURCE}"/configure.* ; then
- if in_iuse orc ; then
- gst_conf+=( $(use_enable orc) )
- else
- gst_conf+=( --disable-orc )
- fi
- fi
-
- if grep -q "AM_MAINTAINER_MODE" "${ECONF_SOURCE}"/configure.* ; then
- gst_conf+=( --disable-maintainer-mode )
- fi
-
- if grep -q "disable-schemas-compile" "${ECONF_SOURCE}"/configure ; then
- gst_conf+=( --disable-schemas-compile )
- fi
-
- if [[ ${PN} == ${GST_ORG_MODULE} ]]; then
- gst_conf+=( $(use_enable nls) )
- fi
-
- einfo "Configuring to build ${GST_PLUGINS_BUILD} plugin(s) ..."
- econf \
- --with-package-name="Gentoo GStreamer ebuild" \
- --with-package-origin="https://www.gentoo.org" \
- "${gst_conf[@]}" "${@}"
-}
-
-# @FUNCTION: gstreamer_multilib_src_compile
-# @DESCRIPTION:
-# Compiles requested gstreamer plugin.
-gstreamer_multilib_src_compile() {
- local plugin_dir
-
- for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
- emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})"
- done
-}
-
-# @FUNCTION: gstreamer_multilib_src_install
-# @DESCRIPTION:
-# Installs requested gstreamer plugin.
-gstreamer_multilib_src_install() {
- local plugin_dir
-
- for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
- emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})" \
- DESTDIR="${D}" install
- done
-}
-
-# @FUNCTION: gstreamer_multilib_src_install_all
-# @DESCRIPTION:
-# Installs documentation for requested gstreamer plugin, and removes .la
-# files.
-gstreamer_multilib_src_install_all() {
- local plugin_dir
-
- for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
- local dir=$(gstreamer_get_plugin_dir ${plugin_dir})
- [[ -e ${dir}/README ]] && dodoc "${dir}"/README
- done
-
- prune_libtool_files --modules
-}
diff --git a/eclass/haskell-cabal.eclass b/eclass/haskell-cabal.eclass
index 541bb4a90ee5..7895d9256eba 100644
--- a/eclass/haskell-cabal.eclass
+++ b/eclass/haskell-cabal.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: haskell-cabal.eclass
@@ -7,8 +7,7 @@
# @AUTHOR:
# Original author: Andres Loeh <kosmikus@gentoo.org>
# Original author: Duncan Coutts <dcoutts@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
-# @PROVIDES: ghc-package
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: for packages that make use of the Haskell Common Architecture for Building Applications and Libraries (cabal)
# @DESCRIPTION:
# Basic instructions:
@@ -27,30 +26,28 @@
# nocabaldep -- don't add dependency on cabal.
# only used for packages that _must_ not pull the dependency
# on cabal, but still use this eclass (e.g. haskell-updater).
-# ghcdeps -- constraint dependency on package to ghc onces
+# ghcdeps -- constraint dependency on package to ghc once
# only used for packages that use libghc internally and _must_
# not pull upper versions
# test-suite -- add support for cabal test-suites (introduced in Cabal-1.8)
-# rebuild-after-doc-workaround -- enable doctest test failue workaround.
+# rebuild-after-doc-workaround -- enable doctest test failure workaround.
# Symptom: when `./setup haddock` is run in a `build-type: Custom`
# package it might cause cause the test-suite to fail with
# errors like:
# > <command line>: cannot satisfy -package-id singletons-2.7-3Z7pnljD8tU1NrslJodXmr
-# Workaround re-reginsters the package to avoid the failure
+# Workaround re-registers the package to avoid the failure
# (and rebuilds changes).
# FEATURE can be removed once https://github.com/haskell/cabal/issues/7213
# is fixed.
case ${EAPI} in
- # eutils is for eqawarn
- 6|7) inherit eutils ;;
- 8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit ghc-package multilib toolchain-funcs
-EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_postinst pkg_postrm
+EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_compile src_test src_install pkg_postinst pkg_postrm
# @ECLASS_VARIABLE: CABAL_EXTRA_CONFIGURE_FLAGS
# @USER_VARIABLE
@@ -58,14 +55,14 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# User-specified additional parameters passed to 'setup configure'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_CONFIGURE_FLAGS="--enable-shared --enable-executable-dynamic"
-: ${CABAL_EXTRA_CONFIGURE_FLAGS:=}
+: "${CABAL_EXTRA_CONFIGURE_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_EXTRA_BUILD_FLAGS
# @USER_VARIABLE
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup build'.
# example: /etc/portage/make.conf: CABAL_EXTRA_BUILD_FLAGS=-v
-: ${CABAL_EXTRA_BUILD_FLAGS:=}
+: "${CABAL_EXTRA_BUILD_FLAGS:=}"
# @ECLASS_VARIABLE: GHC_BOOTSTRAP_FLAGS
# @USER_VARIABLE
@@ -74,7 +71,7 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# _only_ 'setup' binary bootstrap.
# example: /etc/portage/make.conf: GHC_BOOTSTRAP_FLAGS=-dynamic to make
# linking 'setup' faster.
-: ${GHC_BOOTSTRAP_FLAGS:=}
+: "${GHC_BOOTSTRAP_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_EXTRA_HADDOCK_FLAGS
# @USER_VARIABLE
@@ -82,7 +79,7 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# User-specified additional parameters passed to 'setup haddock'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HADDOCK_FLAGS="--haddock-options=--latex --haddock-options=--pretty-html"
-: ${CABAL_EXTRA_HADDOCK_FLAGS:=}
+: "${CABAL_EXTRA_HADDOCK_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_EXTRA_HOOGLE_FLAGS
# @USER_VARIABLE
@@ -90,7 +87,7 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# User-specified additional parameters passed to 'setup haddock --hoogle'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HOOGLE_FLAGS="--haddock-options=--show-all"
-: ${CABAL_EXTRA_HOOGLE_FLAGS:=}
+: "${CABAL_EXTRA_HOOGLE_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_EXTRA_HSCOLOUR_FLAGS
# @USER_VARIABLE
@@ -98,8 +95,7 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# User-specified additional parameters passed to 'setup hscolour'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HSCOLOUR_FLAGS="--executables --tests"
-: ${CABAL_EXTRA_HSCOLOUR_FLAGS:=}
-
+: "${CABAL_EXTRA_HSCOLOUR_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_EXTRA_TEST_FLAGS
# @USER_VARIABLE
@@ -107,13 +103,13 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# User-specified additional parameters passed to 'setup test'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_TEST_FLAGS="-v3 --show-details=streaming"
-: ${CABAL_EXTRA_TEST_FLAGS:=}
+: "${CABAL_EXTRA_TEST_FLAGS:=}"
# @ECLASS_VARIABLE: CABAL_DEBUG_LOOSENING
# @DESCRIPTION:
# Show debug output for 'cabal_chdeps' function if set.
# Needs working 'diff'.
-: ${CABAL_DEBUG_LOOSENING:=}
+: "${CABAL_DEBUG_LOOSENING:=}"
# @ECLASS_VARIABLE: CABAL_REPORT_OTHER_BROKEN_PACKAGES
# @DESCRIPTION:
@@ -121,7 +117,127 @@ EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_po
# It should be normally enabled unless you know you are about
# to try to compile a lot of broken packages. Default value: 'yes'
# Set to anything else to disable.
-: ${CABAL_REPORT_OTHER_BROKEN_PACKAGES:=yes}
+: "${CABAL_REPORT_OTHER_BROKEN_PACKAGES:=yes}"
+
+# @ECLASS_VARIABLE: CABAL_HACKAGE_REVISION
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Set the upstream revision number from Hackage. This will automatically
+# add the upstream cabal revision to SRC_URI and apply it in src_prepare.
+: "${CABAL_HACKAGE_REVISION:=0}"
+
+# @ECLASS_VARIABLE: CABAL_PN
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Set the name of the package as it is recorded in the Hackage database. This
+# is mostly used when packages use CamelCase names upstream, but we want them
+# to be lowercase in portage.
+: "${CABAL_PN:=${PN}}"
+
+# @ECLASS_VARIABLE: CABAL_PV
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Set the version of the package as it is recorded in the Hackage database.
+# This can be useful if we use a different versioning scheme in Portage than
+# the one from upstream
+: "${CABAL_PV:=${PV}}"
+
+# @ECLASS_VARIABLE: CABAL_P
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# The combined $CABAL_PN and $CABAL_PV variables, analogous to $P
+CABAL_P="${CABAL_PN}-${CABAL_PV}"
+
+S="${WORKDIR}/${CABAL_P}"
+
+# @ECLASS_VARIABLE: CABAL_FILE
+# @DESCRIPTION:
+# The location of the .cabal file for the Haskell package. This defaults to
+# "${S}/${CABAL_PN}.cabal".
+#
+# NOTE: If $S is redefined in the ebuild after inheriting this eclass,
+# $CABAL_FILE will also need to be redefined as well.
+: "${CABAL_FILE:="${S}/${CABAL_PN}.cabal"}"
+
+# @ECLASS_VARIABLE: CABAL_DISTFILE
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# The name of the .cabal file downloaded from Hackage. This filename does not
+# include $DISTDIR
+if [[ ${CABAL_HACKAGE_REVISION} -ge 1 ]]; then
+ CABAL_DISTFILE="${P}-rev${CABAL_HACKAGE_REVISION}.cabal"
+fi
+
+# @ECLASS_VARIABLE: CABAL_CHDEPS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specifies changes to be made to the .cabal file.
+# Accepts argument list as pairs of substitutions: <from-string> <to-string>...
+# Uses the cabal_chdeps function internally and shares the same syntax.
+#
+# Example:
+#
+# CABAL_CHDEPS=(
+# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7'
+# 'containers ==0.4.*' 'containers >= 0.4 && < 0.6'
+# )
+: "${CABAL_CHDEPS:=}"
+
+# @ECLASS_VARIABLE: CABAL_LIVE_VERSION
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this to any value to prevent SRC_URI from being set automatically.
+: "${CABAL_LIVE_VERSION:=}"
+
+# @ECLASS_VARIABLE: GHC_BOOTSTRAP_PACKAGES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Extra packages that need to be exposed when compiling Setup.hs
+# @EXAMPLE:
+# GHC_BOOTSTRAP_PACKAGES=(
+# cabal-doctest
+# )
+: "${GHC_BOOTSTRAP_PACKAGES:=}"
+
+# @ECLASS_VARIABLE: CABAL_TEST_REQUIRED_BINS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Binaries included in this package which are needed during testing. This
+# adjusts PATH during src_test() so that the binaries can be found, even if
+# they have not been installed yet.
+#
+# Example:
+#
+# CABAL_TEST_REQUIRED_BINS=( arbtt-{capture,dump,import,recover,stats} )
+: "${CABAL_TEST_REQUIRED_BINS:=}"
+
+# @ECLASS_VARIABLE: CABAL_HADDOCK_TARGETS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Manually set the targets for haddock/hoogle. This is occasionally needed
+# when './setup haddock' cannot calculate the transient dependencies.
+#
+# Example:
+#
+# CABAL_HADDOCK_TARGETS="lib:${CABAL_PN}"
+: "${CABAL_HADDOCK_TARGETS:=}"
+
+# @ECLASS_VARIABLE: CABAL_CHBINS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Renames executables that are installed with the package.
+# Accepts argument list as pairs of substitutions: <from-string> <to-string>...
+#
+# Example:
+#
+# CABAL_CHBINS=(
+# 'demo' 'byline-demo'
+# 'simple' 'byline-simple'
+# 'menu' 'byline-menu'
+# 'shell' 'byline-shell'
+# )
+: "${CABAL_CHBINS:=}"
# 'dev-haskell/cabal' passes those options with ./configure-based
# configuration, but most packages don't need/don't accept it:
@@ -156,12 +272,13 @@ fi
if [[ -n "${CABAL_USE_HOOGLE}" ]]; then
# enabled only in ::haskell
- #IUSE="${IUSE} hoogle"
+ # IUSE="${IUSE} hoogle"
CABAL_USE_HOOGLE=
fi
if [[ -n "${CABAL_USE_PROFILE}" ]]; then
IUSE="${IUSE} profile"
+ RDEPEND+=" dev-lang/ghc:=[profile?]"
fi
if [[ -n "${CABAL_TEST_SUITE}" ]]; then
@@ -169,9 +286,31 @@ if [[ -n "${CABAL_TEST_SUITE}" ]]; then
RESTRICT+=" !test? ( test )"
fi
+# If SRC_URI is defined in the ebuild without appending, it will overwrite
+# the value set here. This will not be set on packages whose versions end in "9999"
+# or if CABAL_LIVE_VERSION is set.
+case $PV in
+ *9999) ;;
+ *)
+ if [[ -z "${CABAL_LIVE_VERSION}" ]]; then
+ # Without this if/then/else block, pkgcheck gives a
+ # RedundantUriRename warning for every package
+ if [[ "${CABAL_P}" == "${P}" ]]; then
+ SRC_URI="https://hackage.haskell.org/package/${P}/${P}.tar.gz"
+ else
+ SRC_URI="https://hackage.haskell.org/package/${CABAL_P}/${CABAL_P}.tar.gz -> ${P}.tar.gz"
+ fi
+ if [[ -n ${CABAL_DISTFILE} ]]; then
+ SRC_URI+=" https://hackage.haskell.org/package/${CABAL_P}/revision/${CABAL_HACKAGE_REVISION}.cabal -> ${CABAL_DISTFILE}"
+ fi
+ fi ;;
+esac
+
+BDEPEND="${BDEPEND} app-text/dos2unix"
+
# returns the version of cabal currently in use.
# Rarely it's handy to pin cabal version from outside.
-: ${_CABAL_VERSION_CACHE:=""}
+: "${_CABAL_VERSION_CACHE:=""}"
cabal-version() {
if [[ -z "${_CABAL_VERSION_CACHE}" ]]; then
if [[ "${CABAL_BOOTSTRAP}" ]]; then
@@ -184,6 +323,10 @@ cabal-version() {
# We ask portage, not ghc, so that we only pick up
# portage-installed cabal versions.
_CABAL_VERSION_CACHE="$(ghc-extract-pm-version dev-haskell/cabal)"
+ # exception for live (9999) version
+ if [[ "${_CABAL_VERSION_CACHE}" == 9999 ]]; then
+ _CABAL_VERSION_CACHE="$(ghc-cabal-version)"
+ fi
fi
fi
echo "${_CABAL_VERSION_CACHE}"
@@ -216,8 +359,44 @@ cabal-bootstrap() {
setup_bootstrap_args+=(-threaded)
fi
+ # The packages available when compiling Setup.hs need to be controlled,
+ # otherwise module name collisions are possible.
+ local -a bootstrap_pkg_args=(-hide-all-packages)
+
+ # Expose common packages bundled with GHC
+ # See: <https://gitlab.haskell.org/ghc/ghc/-/wikis/commentary/libraries/version-history>
+ local default_exposed_pkgs="
+ Cabal
+ array
+ base
+ binary
+ bytestring
+ containers
+ deepseq
+ directory
+ exceptions
+ filepath
+ haskeline
+ mtl
+ parsec
+ pretty
+ process
+ stm
+ template-haskell
+ terminfo
+ text
+ time
+ transformers
+ unix
+ xhtml
+ "
+
+ for pkg in $default_exposed_pkgs ${GHC_BOOTSTRAP_PACKAGES[*]}; do
+ bootstrap_pkg_args+=(-package "$pkg")
+ done
+
make_setup() {
- set -- -package "${cabalpackage}" --make "${setupmodule}" \
+ set -- "${bootstrap_pkg_args[@]}" --make "${setupmodule}" \
$(ghc-make-args) \
"${setup_bootstrap_args[@]}" \
${HCFLAGS} \
@@ -228,20 +407,6 @@ cabal-bootstrap() {
$(ghc-getghc) "$@"
}
if $(ghc-supports-shared-libraries); then
- # # some custom build systems might use external libraries,
- # # for which we don't have shared libs, so keep static fallback
- # bug #411789, http://hackage.haskell.org/trac/ghc/ticket/5743#comment:3
- # http://hackage.haskell.org/trac/ghc/ticket/7062
- # http://hackage.haskell.org/trac/ghc/ticket/3072
- # ghc does not set RPATH for extralibs, thus we do it ourselves by hands
- einfo "Prepending $(ghc-libdir) to LD_LIBRARY_PATH"
- if [[ ${CHOST} != *-darwin* ]]; then
- LD_LIBRARY_PATH="$(ghc-libdir)${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}"
- export LD_LIBRARY_PATH
- else
- DYLD_LIBRARY_PATH="$(ghc-libdir)${DYLD_LIBRARY_PATH:+:}${DYLD_LIBRARY_PATH}"
- export DYLD_LIBRARY_PATH
- fi
{ make_setup -dynamic "$@" && ./setup --help >/dev/null; } ||
make_setup "$@" || die "compiling ${setupmodule} failed"
else
@@ -254,7 +419,7 @@ cabal-mksetup() {
local setup_src=${setupdir}/Setup.hs
rm -vf "${setupdir}"/Setup.{lhs,hs}
- elog "Creating 'Setup.hs' for 'Simple' build type."
+ einfo "Creating 'Setup.hs' for 'Simple' build type."
echo 'import Distribution.Simple; main = defaultMain' \
> "${setup_src}" || die "failed to create default Setup.hs"
@@ -270,7 +435,7 @@ cabal-hscolour() {
}
cabal-haddock() {
- haskell-cabal-run_verbose ./setup haddock "$@"
+ haskell-cabal-run_verbose ./setup haddock ${CABAL_HADDOCK_TARGETS[*]} "$@"
}
cabal-die-if-nonempty() {
@@ -319,9 +484,9 @@ cabal-configure() {
# it generates for ghc's base and other packages.
local p=${EPREFIX}/usr/bin/haddock-ghc-$(ghc-version)
if [[ -f $p ]]; then
- cabalconf+=(--with-haddock="${p}")
+ cabalconf+=( --with-haddock="${p}" )
else
- cabalconf+=(--with-haddock=${EPREFIX}/usr/bin/haddock)
+ cabalconf+=( --with-haddock="${EPREFIX}"/usr/bin/haddock )
fi
fi
if [[ -n "${CABAL_USE_PROFILE}" ]] && use profile; then
@@ -396,7 +561,7 @@ cabal-configure() {
cabalconf+=(--verbose)
# We build shared version of our Cabal where ghc ships it's shared
- # version of it. We will link ./setup as dynamic binary againt Cabal later.
+ # version of it. We will link ./setup as dynamic binary against Cabal later.
[[ ${CATEGORY}/${PN} == "dev-haskell/cabal" ]] && \
$(ghc-supports-shared-libraries) && \
cabalconf+=(--enable-shared)
@@ -448,7 +613,7 @@ cabal-build() {
}
cabal-copy() {
- set -- copy --destdir="${D}" "$@"
+ set -- copy "$@" --destdir="${D}"
echo ./setup "$@"
./setup "$@" || die "setup copy failed"
@@ -510,6 +675,41 @@ haskell-cabal_pkg_setup() {
fi
}
+haskell-cabal_src_prepare() {
+ # Needed for packages that are still using MY_PN
+ if [[ -n ${MY_PN} ]]; then
+ local cabal_file="${S}/${MY_PN}.cabal"
+ else
+ local cabal_file="${CABAL_FILE}"
+ fi
+
+ if [[ -n ${CABAL_DISTFILE} ]]; then
+ # pull revised cabal from upstream
+ einfo "Using revised .cabal file from Hackage: revision ${CABAL_HACKAGE_REVISION}"
+ cp "${DISTDIR}/${CABAL_DISTFILE}" "${cabal_file}" || die
+ fi
+
+ # Convert to unix line endings
+ dos2unix "${cabal_file}" || die
+
+ # Apply patches *after* pulling the revised cabal
+ default
+
+ if [[ -n "${CABAL_CHBINS}" ]]; then
+ for b in "${CABAL_CHBINS[@]}"; do
+ export CABAL_CHDEPS=( "${CABAL_CHDEPS[@]}" "executable ${b}" )
+ done
+ fi
+
+ # Clean CABAL_CHDEPS of any blank entries
+ local chdeps=()
+ for d in "${CABAL_CHDEPS[@]}"; do
+ [[ -n "${d}" ]] && export chdeps+=( "${d}" )
+ done
+
+ [[ -n "${chdeps[@]}" ]] && cabal_chdeps "${chdeps[@]}"
+}
+
haskell-cabal_src_configure() {
cabal-is-dummy-lib && return
@@ -527,6 +727,19 @@ cabal_src_configure() {
haskell-cabal_src_configure "$@"
}
+# Run this to search for directories in "${S}/dist/build/" which contain
+# libraries, and add them to LD_LIBRARY_PATH
+cabal-export-dist-libs() {
+ local so lib_dir
+ while read -r lib_dir; do
+ export LD_LIBRARY_PATH="${lib_dir}${LD_LIBRARY_PATH+:}${LD_LIBRARY_PATH}"
+ done < <(
+ find "${S}/dist/build" -name "*.so" \
+ | while read -r so; do dirname "$so"; done \
+ | sort -u \
+ )
+}
+
# exported function: cabal-style bootstrap configure and compile
cabal_src_compile() {
cabal-is-dummy-lib && return
@@ -534,9 +747,10 @@ cabal_src_compile() {
cabal-build
if [[ -n "$CABAL_USE_HADDOCK" ]] && use doc; then
+
if [[ -n "$CABAL_USE_HSCOLOUR" ]] && use hscolour; then
# --hyperlink-source implies calling 'setup hscolour'
- haddock_args+=(--hyperlink-source)
+ local haddock_args=(--hyperlink-source)
fi
cabal-haddock "${haddock_args[@]}" $CABAL_EXTRA_HADDOCK_FLAGS
@@ -546,7 +760,7 @@ cabal_src_compile() {
fi
if [[ -n "${CABAL_REBUILD_AFTER_DOC_WORKAROUND}" ]]; then
ewarn "rebuild-after-doc-workaround is enabled. This is a"
- ewarn "temporary worakround to deal with https://github.com/haskell/cabal/issues/7213"
+ ewarn "temporary workaround to deal with https://github.com/haskell/cabal/issues/7213"
ewarn "until the upstream issue can be resolved."
cabal-build
fi
@@ -559,6 +773,10 @@ cabal_src_compile() {
ewarn "hoogle USE flag requires doc USE flag, building without hoogle"
fi
fi
+
+ # Export built libraries to LD_LIBRARY_PATH so they can be used in the
+ # test and install phases.
+ cabal-export-dist-libs
}
haskell-cabal_src_compile() {
@@ -579,6 +797,15 @@ haskell-cabal_src_test() {
else
einfo ">>> Test phase [cabal test]: ${CATEGORY}/${PF}"
+ cabal-register-inplace
+
+ # Add binary build paths to PATH so just-built binaries can be found
+ # during testing.
+ local bin
+ for bin in ${CABAL_TEST_REQUIRED_BINS[*]}; do
+ export PATH="${S}/dist/build/${bin}${PATH+:}${PATH}"
+ done
+
# '--show-details=streaming' appeared in Cabal-1.20
if ./setup test --help | grep -q -- "'streaming'"; then
cabaltest+=(--show-details=streaming)
@@ -599,7 +826,8 @@ haskell-cabal_src_test() {
# exported function: cabal-style copy and register
cabal_src_install() {
if ! cabal-is-dummy-lib; then
- cabal-copy
+ # Pass arguments to cabal-copy
+ cabal-copy "$@"
cabal-pkg
fi
@@ -607,23 +835,43 @@ cabal_src_install() {
# if it does not exist (dummy libraries and binaries w/o libraries)
local ghc_confdir_with_prefix="$(ghc-confdir)"
# remove EPREFIX
- dodir ${ghc_confdir_with_prefix#${EPREFIX}}
+ dodir "${ghc_confdir_with_prefix#${EPREFIX}}"
local hint_db="${D}/$(ghc-confdir)"
local hint_file="${hint_db}/gentoo-empty-${CATEGORY}-${PF}.conf"
mkdir -p "${hint_db}" || die
touch "${hint_file}" || die
}
+# Arguments passed to this function will make their way to `cabal-copy`
+# and eventually `./setup copy`. This allows you to specify which
+# components will be installed.
+# e.g. `haskell-cabal_src_install "lib:${CABAL_PN}"` will only install the library
haskell-cabal_src_install() {
pushd "${S}" > /dev/null || die
- cabal_src_install
+ cabal_src_install "$@"
popd > /dev/null || die
}
haskell-cabal_pkg_postinst() {
ghc-package_pkg_postinst
+
+ if [[ -n "${CABAL_CHBINS}" ]]; then
+ elog "The following executables installed with this package have been renamed to help"
+ elog "prevent name collisions:"
+ elog ""
+
+ local from
+ for b in "${CABAL_CHBINS[@]}"; do
+ if [[ -z "${from}" ]]; then
+ from="${b}"
+ else
+ elog "${from} -> ${b}"
+ from=""
+ fi
+ done
+ fi
}
haskell-cabal_pkg_postrm() {
@@ -661,7 +909,10 @@ cabal_flag() {
}
# @FUNCTION: cabal_chdeps
+# @DEPRECATED: CABAL_CHDEPS
# @DESCRIPTION:
+# See the CABAL_CHDEPS variable for the preferred way to use this function.
+#
# Allows easier patching of $CABAL_FILE (${S}/${PN}.cabal by default)
# depends
#
@@ -669,32 +920,23 @@ cabal_flag() {
#
# Dies on error.
#
-# Usage examples:
-#
-# src_prepare() {
-# cabal_chdeps \
-# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7' \
-# 'containers ==0.4.*' 'containers >= 0.4 && < 0.6'
-#}
-# or
-# src_prepare() {
-# CABAL_FILE=${S}/${MY_PN}.cabal cabal_chdeps \
-# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7'
-# CABAL_FILE=${S}/${MY_PN}-tools.cabal cabal_chdeps \
-# 'base == 3.*' 'base >= 4.2 && < 4.7'
-#}
-#
cabal_chdeps() {
- local cabal_fn=${MY_PN:-${PN}}.cabal
- local cf=${CABAL_FILE:-${S}/${cabal_fn}}
+ # Needed for compatibility with ebuilds still using MY_PN
+ if [[ -n ${MY_PN} ]]; then
+ local cabal_file="${S}/${MY_PN}.cabal"
+ else
+ local cabal_file="${CABAL_FILE}"
+ fi
+
local from_ss # ss - substring
local to_ss
local orig_c # c - contents
local new_c
- [[ -f $cf ]] || die "cabal file '$cf' does not exist"
+ [[ -f "${cabal_file}" ]] || die "cabal file '${cabal_file}' does not exist"
- orig_c=$(< "$cf")
+ orig_c=$(< "${cabal_file}")
+ local next_c=${orig_c}
while :; do
from_pat=$1
@@ -709,27 +951,34 @@ cabal_chdeps() {
from_pat=${from_pat//\*/\\*}
from_pat=${from_pat//\[/\\[}
- new_c=${orig_c//${from_pat}/${to_str}}
+ # escape ampersands in the 'to' part
+ to_str=$(sed -e 's%&%\\\&%g' <<< "${to_str}")
- if [[ -n $CABAL_DEBUG_LOOSENING ]]; then
- echo "${orig_c}" >"${T}/${cf}".pre
- echo "${new_c}" >"${T}/${cf}".post
- diff -u "${T}/${cf}".{pre,post}
- fi
+ # use sed instead of bash to make sure things are consistent in the presence
+ # of the patsub_replacement shell option
+ # See: <https://github.com/gentoo-haskell/gentoo-haskell/issues/1363>
+ new_c="$(sed -e "s%${from_pat}%${to_str}%g" <<< "${next_c}")"
- [[ "${orig_c}" == "${new_c}" ]] && die "no trigger for '${from_pat}'"
- orig_c=${new_c}
+ [[ "${next_c}" == "${new_c}" ]] && die "no trigger for '${from_pat}'"
+ next_c=${new_c}
shift
shift
done
- echo "${new_c}" > "$cf" ||
+ if [[ -n $CABAL_DEBUG_LOOSENING ]]; then
+ local cabal_base="${T}/$(basename "${cabal_file}")"
+ echo "${orig_c}" > "${cabal_base}.pre"
+ echo "${new_c}" > "${cabal_base}.post"
+ diff -u --color=always "${cabal_base}".{pre,post}
+ fi
+
+ echo "${new_c}" > "$cabal_file" ||
die "failed to update"
}
# @FUNCTION: cabal-constraint
# @DESCRIPTION:
-# Allowes to set contraint to the libraries that are
+# Allows to set constraint to the libraries that are
# used by specified package
cabal-constraint() {
while read p v ; do
@@ -758,3 +1007,112 @@ replace-hcflags() {
return 0
}
+
+# @FUNCTION: cabal-register-inplace
+# @DESCRIPTION:
+# Register the package library with the in-place package DB, located in
+# "${S}/dist/package.conf.inplace/". This is sometimes needed for tests when
+# the package is not yet installed. Unfortunately, prebuilt solutions to this
+# problem, such as './setup register --inplace', do not seem to work correctly.
+#
+# This function will not run unless CABAL_HAS_LIBRARIES is set to a nonempty
+# value.
+#
+# You can set SKIP_REGISTER_INPLACE to a nonempty value to skip this function
+# (useful since it is automatically called from within haskell-cabal_src_test).
+#
+# The environment variables TEST_CABAL_PN and TEST_PN can be manually set in
+# case the test suite is within a separate haskell package.
+#
+# The environment variable EXTRA_PACKAGE_DBS can be used to set extra databases
+# for ghc-pkg to read.
+cabal-register-inplace() {
+ if [[ -n ${CABAL_HAS_LIBRARIES} ]] && [[ -z ${SKIP_REGISTER_INPLACE} ]]; then
+ # It is assumed that the package-id is either registered in the global
+ # DB or in an "in-place" DB, local to the build dir. cabal-doctest is an
+ # example of something that makes this assumption.
+ local inplace_db="${S}/dist/package.conf.inplace/"
+
+ # Set test-specific CABAL_PN/PN values if they are not set already
+ : ${TEST_CABAL_PN:="$(
+ if [[ -n $MY_PN ]]; then
+ echo "${MY_PN}"
+ else
+ echo "${CABAL_PN}"
+ fi
+ )"}
+ : ${TEST_PN:="${PN}"}
+
+ local cabal_file="${S}/${TEST_CABAL_PN}.cabal"
+ local conf="${S}/${TEST_CABAL_PN}.conf"
+
+ # Generate the package conf
+ local ipid="$(./setup register --gen-pkg-config="${conf}" --print-ipid || die)"
+
+ # In the case that the package has multiple libraries (one "normal" and
+ # one or more "private" libraries) './setup register' will create a
+ # folder instead of a file, containing one conf file per library.
+ # The main library's conf file will end with the string captured by the
+ # 'ipid' variable.
+ if [[ -d "${S}/${TEST_CABAL_PN}.conf" ]]; then
+ local pkg_conf="$(find "${conf}" -maxdepth 1 -type f -name "*${ipid}")"
+ [[ -z $pkg_conf ]] && die "Failed to find package conf file in ${conf}"
+ elif [[ -f "${conf}" ]]; then
+ local pkg_conf="${conf}"
+ else
+ die "Package conf was not created by './setup register'"
+ fi
+
+ # Modify the package conf so that it points to directories within the build
+ # dir.
+ local sed=( sed -ri ) k
+ for k in import-dirs library-dirs dynamic-library-dirs; do
+ sed+=( -e "s%(^${k}:\s+)\S.*%\1${S}/dist/build%" )
+ done
+ sed+=( -e "s%/usr/share/doc/${P}/html%${S}/dist/doc/html/${TEST_CABAL_PN}%" )
+ sed+=( "${pkg_conf}" )
+ "${sed[@]}" || die "sed command failed"
+
+ local extra_pkg_dbs=() db
+ for db in ${EXTRA_PACKAGE_DBS[*]}; do
+ extra_pkg_dbs+=( --package-db="${db}" )
+ done
+
+ # The package-id may already be registered in the global DB, which will
+ # cause ghc-pkg to fail. However, we don't want to 'die' in this case, as
+ # the package registration in the global DB will be used instead.
+ /usr/bin/ghc-pkg "${extra_pkg_dbs[@]}" --package-db="${inplace_db}" register "${pkg_conf}"
+
+ local ret="$?"
+
+ case ${ret} in
+ 0) return 0 ;;
+ 1) einfo "Package is already registered in global DB"; return 0 ;;
+ *) die "ghc-pkg returned unusual code: ${ret}" ;;
+ esac
+ fi
+}
+
+# @FUNCTION: cabal-run-dist-bin
+# @USAGE: <bin> [args]
+# @DESCRIPTION:
+# Run an executable that was built but has not been installed to the system.
+# These live in "${S}/dist/build/", which also includes libraries that are
+# needed by the executable. (Needed libraries are automatically added to
+# LD_LIBRARY_PATH by haskell-cabal_src_compile().)
+#
+# This is only intended to be run in the test and install phases.
+cabal-run-dist-bin() {
+ einfo "LD_LIBRARY_PATH: ${LD_LIBRARY_PATH}"
+ case "$EBUILD_PHASE_FUNC" in
+ src_test|src_install)
+ local bin="$1"
+ shift
+ "${S}/dist/build/${bin}/${bin}" "$@"
+ ;;
+ *)
+ ewarn "cabal-run-dist-bin() called from ${EBUILD_PHASE_FUNC} (ignoring)"
+ false
+ ;;
+ esac
+}
diff --git a/eclass/java-ant-2.eclass b/eclass/java-ant-2.eclass
index 9898d5f7df6f..b0d2fb216b10 100644
--- a/eclass/java-ant-2.eclass
+++ b/eclass/java-ant-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2021 Gentoo Authors
+# Copyright 2004-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-ant-2.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# kiorky <kiorky@cryptelium.net>
# Petteri Räty <betelgeuse@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: java-utils-2
# @BLURB: eclass for ant based Java packages
# @DESCRIPTION:
@@ -15,18 +15,16 @@
# manual manipulation of build.xml files. Should be inherited after java-pkg-2
# or java-pkg-opt-2 eclass.
-inherit java-utils-2 multilib
-
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_configure
-
if [[ -z ${_JAVA_ANT_2_ECLASS} ]] ; then
_JAVA_ANT_2_ECLASS=1
+inherit java-utils-2 multilib
+
# This eclass provides functionality for Java packages which use
# ant to build. In particular, it will attempt to fix build.xml files, so that
# they use the appropriate 'target' and 'source' attributes.
@@ -50,18 +48,14 @@ _JAVA_ANT_2_ECLASS=1
#The implementation of dependencies is handled by java-utils-2.eclass
#WANT_ANT_TASKS
-# @ECLASS_VARIABLE: JAVA_ANT_DISABLE_ANT_CORE_DEP
-# @DEFAULT_UNSET
+# @VARIABLE: JAVA_ANT_E_DEPEND
+# @INTERNAL
# @DESCRIPTION:
-# Setting this variable non-empty before inheriting java-ant-2 disables adding
-# dev-java/ant-core into DEPEND.
-if [[ -z "${JAVA_ANT_DISABLE_ANT_CORE_DEP}" ]]; then
- JAVA_ANT_E_DEPEND+=" >=dev-java/ant-core-1.8.2"
- [[ "${EAPI:-0}" != 0 ]] && JAVA_ANT_E_DEPEND+=":0"
-fi
+# Convenience variable adding packages to DEPEND so they need not be added
+# in the ebuild.
+JAVA_ANT_E_DEPEND+=" >=dev-java/ant-1.10.14-r2:0"
# add ant tasks specified in WANT_ANT_TASKS to DEPEND
-local ANT_TASKS_DEPEND;
ANT_TASKS_DEPEND="$(java-pkg_ant-tasks-depend)"
# check that java-pkg_ant-tasks-depend didn't fail
if [[ $? != 0 ]]; then
@@ -72,8 +66,9 @@ fi
# We need some tools from javatoolkit. We also need ant dependencies
# constructed above.
JAVA_ANT_E_DEPEND="${JAVA_ANT_E_DEPEND}
- ${ANT_TASKS_DEPEND}
- dev-java/javatoolkit"
+ ${ANT_TASKS_DEPEND}
+ dev-java/javatoolkit"
+unset ANT_TASKS_DEPEND
# this eclass must be inherited after java-pkg-2 or java-pkg-opt-2
# if it's java-pkg-opt-2, ant dependencies are pulled based on USE flag
@@ -123,7 +118,7 @@ JAVA_ANT_CLASSPATH_TAGS="javac xjavac"
# @FUNCTION: java-ant-2_src_configure
# @DESCRIPTION:
-# src_configure rewrites the build.xml files automatically, unless EAPI is undefined, 0 or 1.
+# src_configure rewrites the build.xml files automatically.
java-ant-2_src_configure() {
# if java support is optional, don't perform this when the USE flag is off
if has java-pkg-opt-2 ${INHERITED}; then
@@ -277,7 +272,7 @@ java-ant_bsfix_files() {
for dir in ${JAVA_ANT_JAVADOC_INPUT_DIRS};do
if [[ ! -d ${dir} ]]; then
- eerror "This dir: ${dir} doesnt' exists"
+ eerror "Directory ${dir} doesn't exist"
die "You must specify directories for javadoc input/output dirs."
fi
done
@@ -439,3 +434,5 @@ java-ant_rewrite-bootclasspath() {
}
fi
+
+EXPORT_FUNCTIONS src_configure
diff --git a/eclass/java-osgi.eclass b/eclass/java-osgi.eclass
index 2043cbfb2d89..7019fab7b203 100644
--- a/eclass/java-osgi.eclass
+++ b/eclass/java-osgi.eclass
@@ -1,4 +1,4 @@
-# Copyright 2007-2021 Gentoo Authors
+# Copyright 2007-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-osgi.eclass
@@ -6,7 +6,7 @@
# java@gentoo.org
# @AUTHOR:
# Java maintainers <java@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: java-utils-2
# @BLURB: Java OSGi eclass
# @DESCRIPTION:
@@ -15,8 +15,8 @@
# in their manifests. Currently this is used only by Eclipse-3.3 - later we
# could extend this so that Gentoo Java system would be fully OSGi compliant.
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -28,12 +28,9 @@ inherit java-utils-2
# @ECLASS_VARIABLE: _OSGI_T
# @INTERNAL
# @DESCRIPTION:
-# We define _OSGI_T so that it does not contain a slash at the end.
-# According to Paludis guys, there is currently a proposal for EAPIs that
-# would require all variables to end with a slash.
-_OSGI_T="${T/%\//}"
+_OSGI_T="${T}"
-# must get Diego to commit something like this to portability.eclass
+# TODO add to portability.eclass
_canonicalise() {
if type -p realpath > /dev/null; then
realpath "${@}"
diff --git a/eclass/java-pkg-2.eclass b/eclass/java-pkg-2.eclass
index ef4670a110f4..c17a9db26b3b 100644
--- a/eclass/java-pkg-2.eclass
+++ b/eclass/java-pkg-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2021 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-2.eclass
@@ -6,15 +6,15 @@
# java@gentoo.org
# @AUTHOR:
# Thomas Matthijs <axxo@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @PROVIDES: java-utils-2
# @BLURB: Eclass for Java Packages
# @DESCRIPTION:
# This eclass should be inherited for pure Java packages, or by packages which
# need to use Java.
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -73,7 +73,7 @@ java-pkg-2_src_prepare() {
# EANT_BUILD_TARGET - the ant target/targets to execute (default: jar)
# EANT_DOC_TARGET - the target to build extra docs under the doc use flag
# (default: javadoc; declare empty to disable completely)
-# EANT_GENTOO_CLASSPATH - @see eant documention in java-utils-2.eclass
+# EANT_GENTOO_CLASSPATH - @see eant documentation in java-utils-2.eclass
# EANT_EXTRA_ARGS - extra arguments to pass to eant
# EANT_ANT_TASKS - modifies the ANT_TASKS variable in the eant environment
# @CODE
diff --git a/eclass/java-pkg-opt-2.eclass b/eclass/java-pkg-opt-2.eclass
index b4461d7f1491..7b3e79749fb6 100644
--- a/eclass/java-pkg-opt-2.eclass
+++ b/eclass/java-pkg-opt-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2021 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-opt-2.eclass
@@ -6,15 +6,15 @@
# java@gentoo.org
# @AUTHOR:
# Thomas Matthijs <axxo@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: java-utils-2
# @BLURB: Eclass for package with optional Java support
# @DESCRIPTION:
# Inherit this eclass instead of java-pkg-2 if you only need optional Java
# support.
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -49,10 +49,7 @@ java-pkg-opt-2_pkg_setup() {
java-pkg-opt-2_src_prepare() {
use ${JAVA_PKG_OPT_USE} && java-utils-2_src_prepare
- case "${EAPI:-0}" in
- [0-5]) ;;
- *) use ${JAVA_PKG_OPT_USE} || eapply_user ;;
- esac
+ use ${JAVA_PKG_OPT_USE} || eapply_user
}
diff --git a/eclass/java-pkg-simple.eclass b/eclass/java-pkg-simple.eclass
index 47499c7870a9..6b473ed768ed 100644
--- a/eclass/java-pkg-simple.eclass
+++ b/eclass/java-pkg-simple.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2022 Gentoo Authors
+# Copyright 2004-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-simple.eclass
@@ -6,7 +6,7 @@
# java@gentoo.org
# @AUTHOR:
# Java maintainers <java@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Eclass for packaging Java software with ease.
# @DESCRIPTION:
# This class is intended to build pure Java packages from Java sources
@@ -16,8 +16,7 @@
# addressed by an ebuild by putting corresponding files into the target
# directory before calling the src_compile function of this eclass.
-case ${EAPI:-0} in
- 5|6) inherit eutils ;; # eutils for eqawarn
+case ${EAPI} in
7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -36,7 +35,7 @@ S="${WORKDIR}"
# handle dependencies for testing frameworks
if has test ${JAVA_PKG_IUSE}; then
- local test_deps
+ test_deps=
for framework in ${JAVA_TESTING_FRAMEWORKS}; do
case ${framework} in
junit)
@@ -47,10 +46,12 @@ if has test ${JAVA_PKG_IUSE}; then
test_deps+=" amd64? ( dev-util/pkgdiff
dev-util/japi-compliance-checker )";;
testng)
- test_deps+=" dev-java/testng:0";;
+ [[ ${PN} != testng ]] && \
+ test_deps+=" dev-java/testng:0";;
esac
done
[[ ${test_deps} ]] && DEPEND="test? ( ${test_deps} )"
+ unset test_deps
fi
# @ECLASS_VARIABLE: JAVA_GENTOO_CLASSPATH
@@ -116,7 +117,7 @@ fi
# @ECLASS_VARIABLE: JAVA_ENCODING
# @DESCRIPTION:
# The character encoding used in the source files.
-: ${JAVA_ENCODING:=UTF-8}
+: "${JAVA_ENCODING:=UTF-8}"
# @ECLASS_VARIABLE: JAVAC_ARGS
# @DEFAULT_UNSET
@@ -134,6 +135,12 @@ fi
# JAVA_MAIN_CLASS="org.gentoo.java.ebuilder.Main"
# @CODE
+# @ECLASS_VARIABLE: JAVA_AUTOMATIC_MODULE_NAME
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The value of the Automatic-Module-Name entry, which is going to be added to
+# MANIFEST.MF.
+
# @ECLASS_VARIABLE: JAVADOC_ARGS
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -142,7 +149,7 @@ fi
# @ECLASS_VARIABLE: JAVA_JAR_FILENAME
# @DESCRIPTION:
# The name of the jar file to create and install.
-: ${JAVA_JAR_FILENAME:=${PN}.jar}
+: "${JAVA_JAR_FILENAME:=${PN}.jar}"
# @ECLASS_VARIABLE: JAVA_BINJAR_FILENAME
# @DEFAULT_UNSET
@@ -155,7 +162,11 @@ fi
# If ${JAVA_MAIN_CLASS} is set, we will create a launcher to
# execute the jar, and ${JAVA_LAUNCHER_FILENAME} will be the
# name of the script.
-: ${JAVA_LAUNCHER_FILENAME:=${PN}-${SLOT}}
+if [[ ${SLOT} = 0 ]]; then
+ : "${JAVA_LAUNCHER_FILENAME:=${PN}}"
+else
+ : "${JAVA_LAUNCHER_FILENAME:=${PN}-${SLOT}}"
+fi
# @ECLASS_VARIABLE: JAVA_TESTING_FRAMEWORKS
# @DEFAULT_UNSET
@@ -338,9 +349,6 @@ java-pkg-simple_prepend_resources() {
java-pkg-simple_src_compile() {
local sources=sources.lst classes=target/classes apidoc=target/api moduleinfo
- # auto generate classpath
- java-pkg_gen-cp JAVA_GENTOO_CLASSPATH
-
# do not compile if we decide to install binary jar
if has binary ${JAVA_PKG_IUSE} && use binary; then
# register the runtime dependencies
@@ -351,17 +359,21 @@ java-pkg-simple_src_compile() {
cp "${DISTDIR}"/${JAVA_BINJAR_FILENAME} ${JAVA_JAR_FILENAME}\
|| die "Could not copy the binary jar file to ${S}"
return 0
+ else
+ # auto generate classpath
+ java-pkg_gen-cp JAVA_GENTOO_CLASSPATH
fi
# gather sources
# if target < 9, we need to compile module-info.java separately
# as this feature is not supported before Java 9
- if [[ java-pkg_get-target -lt 9 ]]; then
+ local target="$(java-pkg_get-target)"
+ if [[ ${target#1.} -lt 9 ]]; then
find "${JAVA_SRC_DIR[@]}" -name \*.java ! -name module-info.java > ${sources}
- moduleinfo=$(find "${JAVA_SRC_DIR[@]}" -name module-info.java)
else
find "${JAVA_SRC_DIR[@]}" -name \*.java > ${sources}
fi
+ moduleinfo=$(find "${JAVA_SRC_DIR[@]}" -name module-info.java)
# create the target directory
mkdir -p ${classes} || die "Could not create target directory"
@@ -371,7 +383,7 @@ java-pkg-simple_src_compile() {
java-pkg-simple_getclasspath
java-pkg-simple_prepend_resources ${classes} "${JAVA_RESOURCE_DIRS[@]}"
- if [[ -n ${moduleinfo} ]] || [[ java-pkg_get-target -lt 9 ]]; then
+ if [[ -z ${moduleinfo} ]] || [[ ${target#1.} -lt 9 ]]; then
ejavac -d ${classes} -encoding ${JAVA_ENCODING}\
${classpath:+-classpath ${classpath}} ${JAVAC_ARGS} @${sources}
else
@@ -381,7 +393,7 @@ java-pkg-simple_src_compile() {
fi
# handle module-info.java separately as it needs at least JDK 9
- if [[ -n ${moduleinfo} ]]; then
+ if [[ -n ${moduleinfo} ]] && [[ ${target#1.} -lt 9 ]]; then
if java-pkg_is-vm-version-ge "9" ; then
local tmp_source=${JAVA_PKG_WANT_SOURCE} tmp_target=${JAVA_PKG_WANT_TARGET}
@@ -401,30 +413,53 @@ java-pkg-simple_src_compile() {
# javadoc
if has doc ${JAVA_PKG_IUSE} && use doc; then
- mkdir -p ${apidoc}
- ejavadoc -d ${apidoc} \
- -encoding ${JAVA_ENCODING} -docencoding UTF-8 -charset UTF-8 \
- ${classpath:+-classpath ${classpath}} ${JAVADOC_ARGS:- -quiet} \
- @${sources} || die "javadoc failed"
+ if [[ ${JAVADOC_SRC_DIRS} ]]; then
+ einfo "JAVADOC_SRC_DIRS exists, you need to call ejavadoc separately"
+ else
+ mkdir -p ${apidoc}
+ if [[ -z ${moduleinfo} ]] || [[ ${target#1.} -lt 9 ]]; then
+ ejavadoc -d ${apidoc} \
+ -encoding ${JAVA_ENCODING} -docencoding UTF-8 -charset UTF-8 \
+ ${classpath:+-classpath ${classpath}} ${JAVADOC_ARGS:- -quiet} \
+ @${sources} || die "javadoc failed"
+ else
+ ejavadoc -d ${apidoc} \
+ -encoding ${JAVA_ENCODING} -docencoding UTF-8 -charset UTF-8 \
+ ${classpath:+--module-path ${classpath}} ${JAVADOC_ARGS:- -quiet} \
+ @${sources} || die "javadoc failed"
+ fi
+ fi
fi
# package
local jar_args
if [[ -e ${classes}/META-INF/MANIFEST.MF ]]; then
+ sed '/Created-By: /Id' -i ${classes}/META-INF/MANIFEST.MF
jar_args="cfm ${JAVA_JAR_FILENAME} ${classes}/META-INF/MANIFEST.MF"
- elif [[ ${JAVA_MAIN_CLASS} ]]; then
- jar_args="cfe ${JAVA_JAR_FILENAME} ${JAVA_MAIN_CLASS}"
else
jar_args="cf ${JAVA_JAR_FILENAME}"
fi
jar ${jar_args} -C ${classes} . || die "jar failed"
+ if [[ -n "${JAVA_AUTOMATIC_MODULE_NAME}" ]]; then
+ echo "Automatic-Module-Name: ${JAVA_AUTOMATIC_MODULE_NAME}" \
+ >> "${T}/add-to-MANIFEST.MF" || die "adding module name failed"
+ fi
+ if [[ -n "${JAVA_MAIN_CLASS}" ]]; then
+ echo "Main-Class: ${JAVA_MAIN_CLASS}" \
+ >> "${T}/add-to-MANIFEST.MF" || die "adding main class failed"
+ fi
+ if [[ -f "${T}/add-to-MANIFEST.MF" ]]; then
+ jar ufmv ${JAVA_JAR_FILENAME} "${T}/add-to-MANIFEST.MF" \
+ || die "updating MANIFEST.MF failed"
+ rm -f "${T}/add-to-MANIFEST.MF" || die "cannot remove"
+ fi
}
# @FUNCTION: java-pkg-simple_src_install
# @DESCRIPTION:
# src_install for simple single jar java packages. Simply installs
# ${JAVA_JAR_FILENAME}. It will also install a launcher if
-# ${JAVA_MAIN_CLASS} is set.
+# ${JAVA_MAIN_CLASS} is set. Also invokes einstalldocs.
java-pkg-simple_src_install() {
local sources=sources.lst classes=target/classes apidoc=target/api
@@ -455,13 +490,18 @@ java-pkg-simple_src_install() {
fi
java-pkg_dosrc ${srcdirs}
fi
+
+ einstalldocs
}
# @FUNCTION: java-pkg-simple_src_test
# @DESCRIPTION:
# src_test for simple single java jar file.
-# It will perform test with frameworks that are defined in
-# ${JAVA_TESTING_FRAMEWORKS}.
+# It will compile test classes from test sources using ejavac and perform tests
+# with frameworks that are defined in ${JAVA_TESTING_FRAMEWORKS}.
+# test-classes compiled with alternative compilers like groovyc need to be placed
+# in the "generated-test" directory as content of this directory is preserved,
+# whereas content of target/test-classes is removed.
java-pkg-simple_src_test() {
local test_sources=test_sources.lst classes=target/test-classes moduleinfo
local tests_to_run classpath
@@ -476,9 +516,18 @@ java-pkg-simple_src_test() {
return
fi
+ # https://bugs.gentoo.org/906311
+ # This will remove target/test-classes. Do not put any test-classes there manually.
+ rm -rf ${classes} || die
+
# create the target directory
mkdir -p ${classes} || die "Could not create target directory for testing"
+ # generated test classes should get compiled into "generated-test" directory
+ if [[ -d generated-test ]]; then
+ cp -r generated-test/* "${classes}" || die "cannot copy generated test classes"
+ fi
+
# get classpath
classpath="${classes}:${JAVA_JAR_FILENAME}"
java-pkg-simple_getclasspath
@@ -487,17 +536,17 @@ java-pkg-simple_src_test() {
# gathering sources for testing
# if target < 9, we need to compile module-info.java separately
# as this feature is not supported before Java 9
- if [[ java-pkg_get-target -lt 9 ]]; then
+ local target="$(java-pkg_get-target)"
+ if [[ ${target#1.} -lt 9 ]]; then
find "${JAVA_TEST_SRC_DIR[@]}" -name \*.java ! -name module-info.java > ${test_sources}
- moduleinfo=$(find "${JAVA_TEST_SRC_DIR[@]}" -name module-info.java)
else
find "${JAVA_TEST_SRC_DIR[@]}" -name \*.java > ${test_sources}
fi
-
+ moduleinfo=$(find "${JAVA_TEST_SRC_DIR[@]}" -name module-info.java)
# compile
if [[ -s ${test_sources} ]]; then
- if [[ -n ${moduleinfo} ]] || [[ java-pkg_get-target -lt 9 ]]; then
+ if [[ -z ${moduleinfo} ]] || [[ ${target#1.} -lt 9 ]]; then
ejavac -d ${classes} -encoding ${JAVA_ENCODING}\
${classpath:+-classpath ${classpath}} ${JAVAC_ARGS} @${test_sources}
else
@@ -508,7 +557,7 @@ java-pkg-simple_src_test() {
fi
# handle module-info.java separately as it needs at least JDK 9
- if [[ -n ${moduleinfo} ]]; then
+ if [[ -n ${moduleinfo} ]] && [[ ${target#1.} -lt 9 ]]; then
if java-pkg_is-vm-version-ge "9" ; then
local tmp_source=${JAVA_PKG_WANT_SOURCE} tmp_target=${JAVA_PKG_WANT_TARGET}
diff --git a/eclass/java-utils-2.eclass b/eclass/java-utils-2.eclass
index 5b4783f350f2..47123287ce70 100644
--- a/eclass/java-utils-2.eclass
+++ b/eclass/java-utils-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2022 Gentoo Authors
+# Copyright 2004-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-utils-2.eclass
@@ -6,7 +6,7 @@
# java@gentoo.org
# @AUTHOR:
# Thomas Matthijs <axxo@gentoo.org>, Karl Trygve Kalleberg <karltk@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Base eclass for Java packages
# @DESCRIPTION:
# This eclass provides functionality which is used by java-pkg-2.eclass,
@@ -17,17 +17,17 @@
# that have optional Java support. In addition you can inherit java-ant-2 for
# Ant-based packages.
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_JAVA_UTILS_2_ECLASS} ]] ; then
_JAVA_UTILS_2_ECLASS=1
-# EAPI 7 has version functions built-in. Use eapi7-ver for all earlier eclasses.
+# EAPI 7 has version functions built-in. Use eapi7-ver for all earlier EAPIs.
# Keep versionator inheritance in case consumers are using it implicitly.
-[[ ${EAPI} == [56] ]] && inherit eapi7-ver eutils multilib versionator
+[[ ${EAPI} == 6 ]] && inherit eapi7-ver eqawarn multilib versionator
# Make sure we use java-config-2
export WANT_JAVA_CONFIG="2"
@@ -37,7 +37,7 @@ has test ${JAVA_PKG_IUSE} && RESTRICT+=" !test? ( test )"
# @VARIABLE: JAVA_PKG_E_DEPEND
# @INTERNAL
# @DESCRIPTION:
-# This is a convience variable to be used from the other java eclasses. This is
+# This is a convenience variable to be used from the other java eclasses. This is
# the version of java-config we want to use. Usually the latest stable version
# so that ebuilds can use new features without depending on specific versions.
JAVA_PKG_E_DEPEND=">=dev-java/java-config-2.2.0-r3"
@@ -66,6 +66,21 @@ JAVA_PKG_ALLOW_VM_CHANGE=${JAVA_PKG_ALLOW_VM_CHANGE:="yes"}
# JAVA_PKG_FORCE_VM=openjdk-11 emerge foo
# @CODE
+# @ECLASS_VARIABLE: JAVA_PKG_NO_CLEAN
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array of expressions to match *.class or *.jar files in order to protect
+# them against deletion by java-pkg_clean.
+#
+# @CODE
+# JAVA_PKG_NO_CLEAN=(
+# "*/standard.jar"
+# "*/launch4j.jar"
+# "*/apps/jetty/apache-tomcat*"
+# "*/lib/jetty*"
+# )
+# @CODE
+
# @ECLASS_VARIABLE: JAVA_PKG_WANT_BUILD_VM
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -124,7 +139,7 @@ JAVA_PKG_ALLOW_VM_CHANGE=${JAVA_PKG_ALLOW_VM_CHANGE:="yes"}
# )
# @CODE
-# @ECLASS-VARIABLE: JAVA_TEST_RUNNER_EXTRA_ARGS
+# @ECLASS_VARIABLE: JAVA_TEST_RUNNER_EXTRA_ARGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array of extra arguments that should be passed to the test runner when running tests.
@@ -203,6 +218,46 @@ JAVA_PKG_COMPILERS_CONF=${JAVA_PKG_COMPILERS_CONF:="/etc/java-config-2/build/com
# ebuild foo.ebuild compile
# @CODE
+# @ECLASS_VARIABLE: JAVADOC_CLASSPATH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Comma or space separated list of java packages that are needed for generating
+# javadocs. Can be used to avoid overloading the compile classpath in multi-jar
+# packages if there are jar files which have different dependencies.
+#
+# @CODE
+# Example:
+# JAVADOC_CLASSPATH="
+# jna-4
+# jsch
+# "
+# @CODE
+
+# @ECLASS_VARIABLE: JAVADOC_SRC_DIRS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array of directories relative to ${S} which contain the sources of
+# the application. It needs to sit in global scope; if put in src_compile()
+# it would not work.
+# It is needed by the java-pkg-simple.eclass to decide whether to call ejavadoc
+# or not. If this variable is defined then java-pkg-simple_src_compile will not
+# call ejavadoc automatically. ejavadoc has then to be called explicitly from
+# the ebuild. It is meant for usage in multi-jar packages in order to avoid an
+# extra compilation run only for producing the javadocs.
+#
+# @CODE
+# Example:
+# JAVADOC_SRC_DIRS=(
+# "${PN}-core"
+# "${PN}-jsch"
+# "${PN}-pageant"
+# "${PN}-sshagent"
+# "${PN}-usocket-jna"
+# "${PN}-usocket-nc"
+# "${PN}-connector-factory"
+# )
+# @CODE
+
# TODO document me
JAVA_PKG_QA_VIOLATIONS=0
@@ -316,7 +371,6 @@ java-pkg_rm_files() {
[[ ! -f "${filename}" ]] && die "${filename} is not a regular file. Aborting."
einfo "Removing unneeded file ${filename}"
rm -f "${S}/${filename}" || die "cannot remove ${filename}"
- eend $?
done
}
@@ -563,7 +617,7 @@ java-pkg_regso() {
java-pkg_append_ JAVA_PKG_LIBRARY "/${target_dir#${D}}"
# Check the path of the lib relative to ${D}
elif [[ -e "${D}${lib}" ]]; then
- target_dir="$(java-pkg_expand_dir_ ${D}${lib})"
+ target_dir="$(java-pkg_expand_dir_ "${D}${lib}")"
java-pkg_append_ JAVA_PKG_LIBRARY "${target_dir}"
else
die "${lib} does not exist"
@@ -769,7 +823,7 @@ java-pkg_dosrc() {
# @FUNCTION: java-pkg_dolauncher
# @USAGE: <filename> [options]
# @DESCRIPTION:
-# Make a wrapper script to lauch/start this package
+# Make a wrapper script to launch/start this package
# If necessary, the wrapper will switch to the appropriate VM.
#
# Can be called without parameters if the package installs only one jar
@@ -910,7 +964,7 @@ java-pkg_recordjavadoc()
debug-print-function ${FUNCNAME} $*
# the find statement is important
# as some packages include multiple trees of javadoc
- JAVADOC_PATH="$(find ${D}/usr/share/doc/ -name allclasses-frame.html -printf '%h:')"
+ JAVADOC_PATH="$(find "${D}"/usr/share/doc/ -name allclasses-frame.html -printf '%h:')"
# remove $D - TODO: check this is ok with all cases of the above
JAVADOC_PATH="${JAVADOC_PATH//${D}}"
if [[ -n "${JAVADOC_PATH}" ]] ; then
@@ -939,7 +993,7 @@ java-pkg_recordjavadoc()
# Example: get a specific jar from xerces slot 2
# java-pkg_jar-from xerces-2 xml-apis.jar
#
-# Example: get a specific jar from xerces slot 2, and name it diffrently
+# Example: get a specific jar from xerces slot 2, and name it differently
# java-pkg_jar-from xerces-2 xml-apis.jar xml.jar
#
# Example: get junit.jar which is needed only for building
@@ -1350,7 +1404,7 @@ java-pkg_register-optional-dependency() {
# @DESCRIPTION:
# Register an arbitrary environment variable into package.env. The gjl launcher
# for this package or any package depending on this will export it into
-# environement before executing java command.
+# environment before executing java command.
# Must only be called in src_install phase.
JAVA_PKG_EXTRA_ENV="${T}/java-pkg-extra-env"
JAVA_PKG_EXTRA_ENV_VARS=""
@@ -1689,16 +1743,6 @@ java-pkg_get-jni-cflags() {
echo ${flags}
}
-java-pkg_ensure-gcj() {
- # was enforcing sys-devel/gcc[gcj]
- die "${FUNCNAME} was removed. Use use-deps available as of EAPI 2 instead. #261562"
-}
-
-java-pkg_ensure-test() {
- # was enforcing USE=test if FEATURES=test
- die "${FUNCNAME} was removed. Package mangers handle this already. #278965"
-}
-
# @FUNCTION: java-pkg_register-ant-task
# @USAGE: [--version x.y] [<name>]
# @DESCRIPTION:
@@ -1872,13 +1916,17 @@ ejunit4() {
# @CODE
# $1 - -cp or -classpath
# $2 - the classpath passed to it
-# $@ - test classes for testng to run.
+# $@ - test classes or testng.xml for testng to run.
# @CODE
etestng() {
debug-print-function ${FUNCNAME} $*
local runner=org.testng.TestNG
- local cp=$(java-pkg_getjars --with-dependencies testng)
+ if [[ ${PN} != testng ]]; then
+ local cp=$(java-pkg_getjars --with-dependencies testng)
+ else
+ local cp=testng.jar
+ fi
local tests
if [[ ${1} = -cp || ${1} = -classpath ]]; then
@@ -1896,14 +1944,24 @@ etestng() {
-cp ${cp}
-Djava.io.tmpdir="${T}"
-Djava.awt.headless=true
+ -Dtest.resources.dir="${JAVA_TEST_RESOURCE_DIRS}"
${JAVA_TEST_EXTRA_ARGS[@]}
${runner}
${JAVA_TEST_RUNNER_EXTRA_ARGS[@]}
)
- [[ ! "${JAVA_TEST_RUNNER_EXTRA_ARGS[@]}" =~ "-usedefaultlisteners" ]] && args+=( -usedefaultlisteners false )
+ if [[ ! "${JAVA_TEST_RUNNER_EXTRA_ARGS[@]}" =~ "-usedefaultlisteners" ]]; then
+ args+=(
+ -verbose 3
+ -usedefaultlisteners true
+ )
+ fi
- args+=( -testclass ${tests} )
+ if [[ "${test%.xml}" == "${test}" ]]; then
+ args+=( -testclass ${tests} )
+ else
+ args+=( ${tests%,} )
+ fi
debug-print "java ${args[@]}"
java ${args[@]} || die "Running TestNG failed."
@@ -1914,13 +1972,9 @@ etestng() {
# src_prepare Searches for bundled jars
# Don't call directly, but via java-pkg-2_src_prepare!
java-utils-2_src_prepare() {
- case ${EAPI:-0} in
- 5)
- java-pkg_func-exists java_prepare && java_prepare ;;
- *)
- java-pkg_func-exists java_prepare &&
- eqawarn "java_prepare is no longer called, define src_prepare instead."
- eapply_user ;;
+ case ${EAPI} in
+ [678]) eapply_user ;;
+ *) default_src_prepare ;;
esac
# Check for files in JAVA_RM_FILES array.
@@ -2141,9 +2195,27 @@ ejavadoc() {
einfo "javadoc ${javadoc_args} ${@}"
fi
- local args=( javadoc ${javadoc_args} "${@}" )
- echo "${args[@]}" >&2
- "${args[@]}" || die "ejavadoc failed"
+ if [[ "${JAVADOC_SRC_DIRS[@]}" ]]; then
+ mkdir -p target/api || die "cannot create target/api"
+ local dependency
+ for dependency in ${JAVADOC_CLASSPATH}; do
+ classpath="${classpath}:$(java-pkg_getjars \
+ --build-only \
+ --with-dependencies \
+ ${dependency})"
+ done
+ find "${JAVADOC_SRC_DIRS[@]}" -name '*.java' > sources
+ javadoc \
+ "${javadoc_args}" \
+ -d target/api \
+ -cp "${classpath}" \
+ -quiet \
+ @sources || die "ejavadoc failed"
+ else
+ local args=( javadoc ${javadoc_args} "${@}" )
+ echo "${args[@]}" >&2
+ "${args[@]}" || die "ejavadoc failed"
+ fi
}
# @FUNCTION: java-pkg_filter-compiler
@@ -2369,7 +2441,7 @@ java-pkg_init-compiler_() {
}
-# @FUNCTION: init_paths_
+# @FUNCTION: java-pkg_init_paths_
# @INTERNAL
# @DESCRIPTION:
# Initializes some variables that will be used. These variables are mostly used
@@ -2421,6 +2493,9 @@ java-pkg_do_write_() {
echo "SLOT=\"${SLOT}\""
echo "CATEGORY=\"${CATEGORY}\""
echo "PVR=\"${PVR}\""
+ # Record LIBDIR so that gjl can set java.library.path
+ # accordingly. Bug #917326.
+ echo "LIBDIR=\"$(get_libdir)\""
[[ -n "${JAVA_PKG_CLASSPATH}" ]] && echo "CLASSPATH=\"${JAVA_PKG_CLASSPATH}\""
[[ -n "${JAVA_PKG_LIBRARY}" ]] && echo "LIBRARY_PATH=\"${JAVA_PKG_LIBRARY}\""
@@ -2619,7 +2694,7 @@ java-pkg_get-vm-version() {
# @RETURN: VM handle of an available JDK
# @DESCRIPTION:
# Selects a build vm from a list of vm handles. First checks for the system-vm
-# beeing usable, then steps through the listed handles till a suitable vm is
+# being usable, then steps through the listed handles till a suitable vm is
# found.
#
java-pkg_build-vm-from-handle() {
@@ -2922,11 +2997,13 @@ is-java-strict() {
# @FUNCTION: java-pkg_clean
# @DESCRIPTION:
# Java package cleaner function. This will remove all *.class and *.jar
-# files, removing any bundled dependencies.
+# files, except those specified by expressions in JAVA_PKG_NO_CLEAN.
java-pkg_clean() {
- if [[ -z "${JAVA_PKG_NO_CLEAN}" ]]; then
- find "${@}" '(' -name '*.class' -o -name '*.jar' ')' -type f -delete -print || die
- fi
+ NO_DELETE=()
+ for keep in ${JAVA_PKG_NO_CLEAN[@]}; do
+ NO_DELETE+=( '!' '-path' ${keep} )
+ done
+ find "${@}" '(' -name '*.class' -o -name '*.jar' ${NO_DELETE[@]} ')' -type f -delete -print || die
}
# @FUNCTION: java-pkg_gen-cp
diff --git a/eclass/java-virtuals-2.eclass b/eclass/java-virtuals-2.eclass
deleted file mode 100644
index d827342e9ba2..000000000000
--- a/eclass/java-virtuals-2.eclass
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 1999-2022 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: java-virtuals-2.eclass
-# @MAINTAINER:
-# java@gentoo.org
-# @AUTHOR:
-# Original Author: Alistair John Bush <ali_bush@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
-# @BLURB: Java virtuals eclass
-# @DESCRIPTION:
-# To provide a default (and only) src_install function for ebuilds in the
-# java-virtuals category.
-
-case ${EAPI:-0} in
- [5678]) ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
-esac
-
-if [[ -z ${_JAVA_VIRTUALS_2_ECLASS} ]] ; then
-_JAVA_VIRTUALS_2_ECLASS=1
-
-inherit java-utils-2
-
-DEPEND="dev-java/java-config"
-RDEPEND="${DEPEND}"
-
-S="${WORKDIR}"
-
-# @FUNCTION: java-virtuals-2_src_install
-# @DESCRIPTION:
-# default src_install
-
-java-virtuals-2_src_install() {
- java-virtuals-2_do_write
-}
-
-# @FUNCTION: java-pkg_do_virtuals_write
-# @INTERNAL
-# @DESCRIPTION:
-# Writes the virtual env file out to disk.
-
-java-virtuals-2_do_write() {
- java-pkg_init_paths_
-
- dodir "${JAVA_PKG_VIRTUALS_PATH}"
- {
- if [[ -n "${JAVA_VIRTUAL_PROVIDES}" ]]; then
- echo "PROVIDERS=\"${JAVA_VIRTUAL_PROVIDES}\""
- fi
-
- if [[ -n "${JAVA_VIRTUAL_VM}" ]]; then
- echo "VM=\"${JAVA_VIRTUAL_VM}\""
- fi
-
- if [[ -n "${JAVA_VIRTUAL_VM_CLASSPATH}" ]]; then
- echo "VM_CLASSPATH=\"${JAVA_VIRTUAL_VM_CLASSPATH}\""
- fi
- echo "MULTI_PROVIDER=\"${JAVA_VIRTUAL_MULTI=FALSE}\""
- } > "${JAVA_PKG_VIRTUAL_PROVIDER}"
-}
-
-fi
-
-EXPORT_FUNCTIONS src_install
diff --git a/eclass/java-vm-2.eclass b/eclass/java-vm-2.eclass
index 7c63e63fad76..e5d3159f2854 100644
--- a/eclass/java-vm-2.eclass
+++ b/eclass/java-vm-2.eclass
@@ -1,29 +1,32 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-vm-2.eclass
# @MAINTAINER:
# java@gentoo.org
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Java Virtual Machine eclass
# @DESCRIPTION:
# This eclass provides functionality which assists with installing
# virtual machines, and ensures that they are recognized by java-config.
-case ${EAPI:-0} in
- [678]) ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit multilib pax-utils prefix xdg-utils
+if [[ -z ${_JAVA_VM_2_ECLASS} ]]; then
+_JAVA_VM_2_ECLASS=1
-EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_prerm pkg_postrm
+inherit multilib pax-utils prefix xdg-utils
RDEPEND="
dev-java/java-config
app-eselect/eselect-java
"
DEPEND="${RDEPEND}"
+BDEPEND="app-arch/unzip"
+IDEPEND="app-eselect/eselect-java"
export WANT_JAVA_CONFIG=2
@@ -83,14 +86,32 @@ java-vm-2_pkg_postinst() {
xdg_desktop_database_update
}
+# @FUNCTION: has_eselect_java-vm_update
+# @INTERNAL
+# @DESCRIPTION:
+# Checks if an eselect-java version providing "eselect java-vm update"
+# is available.
+# @RETURN: 0 if >=app-eselect/eselect-java-0.5 is installed, 1 otherwise.
+has_eselect_java-vm_update() {
+ local has_version_args="-b"
+
+ has_version "${has_version_args}" ">=app-eselect/eselect-java-0.5"
+}
# @FUNCTION: java-vm-2_pkg_prerm
# @DESCRIPTION:
# default pkg_prerm
#
-# Warn user if removing system-vm.
+# Does nothing if eselect-java-0.5 or newer is available. Otherwise,
+# warn user if removing system-vm.
java-vm-2_pkg_prerm() {
+ if has_eselect_java-vm_update; then
+ # We will potentially switch to a new Java system VM in
+ # pkg_postrm().
+ return
+ fi
+
if [[ $(GENTOO_VM= java-config -f 2>/dev/null) == ${VMHANDLE} && -z ${REPLACED_BY_VERSION} ]]; then
ewarn "It appears you are removing your system-vm! Please run"
ewarn "\"eselect java-vm list\" to list available VMs, then use"
@@ -103,10 +124,14 @@ java-vm-2_pkg_prerm() {
# @DESCRIPTION:
# default pkg_postrm
#
-# Update mime database.
+# Invoke "eselect java-vm update" if eselect-java 0.5, or newer, is
+# available. Also update the mime database.
java-vm-2_pkg_postrm() {
xdg_desktop_database_update
+ if has_eselect_java-vm_update; then
+ eselect java-vm update
+ fi
}
@@ -312,10 +337,14 @@ java-vm_sandbox-predict() {
[[ -z "${1}" ]] && die "${FUNCNAME} takes at least one argument"
local path path_arr=("$@")
- # subshell this to prevent IFS bleeding out dependant on bash version.
+ # subshell this to prevent IFS bleeding out dependent on bash version.
# could use local, which *should* work, but that requires a lot of testing.
path=$(IFS=":"; echo "${path_arr[*]}")
dodir /etc/sandbox.d
echo "SANDBOX_PREDICT=\"${path}\"" > "${ED}/etc/sandbox.d/20${VMHANDLE}" \
|| die "Failed to write sandbox control file"
}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_prerm pkg_postrm
diff --git a/eclass/kde.org.eclass b/eclass/kde.org.eclass
index b38ca5a024ab..b9d68ccbf5c2 100644
--- a/eclass/kde.org.eclass
+++ b/eclass/kde.org.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kde.org.eclass
# @MAINTAINER:
# kde@gentoo.org
-# @SUPPORTED_EAPIS: 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: Support eclass for packages that are hosted on kde.org infrastructure.
# @DESCRIPTION:
# This eclass is mainly providing facilities for the three upstream release
@@ -16,8 +16,8 @@
# particular build system.
case ${EAPI} in
- 7|8) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_KDE_ORG_ECLASS} ]]; then
@@ -84,7 +84,7 @@ readonly KDE_ORG_CATEGORIES
# @DESCRIPTION:
# If unset, default value is mapped from ${CATEGORY} to corresponding upstream
# category on invent.kde.org, with "kde" as fallback value.
-: ${KDE_ORG_CATEGORY:=${KDE_ORG_CATEGORIES[${CATEGORY}]:-kde}}
+: "${KDE_ORG_CATEGORY:=${KDE_ORG_CATEGORIES[${CATEGORY}]:-kde}}"
# @ECLASS_VARIABLE: KDE_ORG_COMMIT
# @PRE_INHERIT
@@ -98,20 +98,13 @@ readonly KDE_ORG_CATEGORIES
# @PRE_INHERIT
# @DESCRIPTION:
# If unset, default value is set to ${PN}.
-# Name of the package as hosted on kde.org mirrors.
-: ${KDE_ORG_NAME:=$PN}
+# Name of the package (repository) as hosted on invent.kde.org.
+: "${KDE_ORG_NAME:=$PN}"
-# @ECLASS_VARIABLE: KDE_GEAR
-# @PRE_INHERIT
+# @ECLASS_VARIABLE: KDE_ORG_SCHEDULE_URI
# @DESCRIPTION:
-# Mark package is being part of KDE Gear release schedule.
-# By default, this is set to "false" and does nothing.
-# If CATEGORY equals kde-apps, this is automatically set to "true".
-# If set to "true", set SRC_URI accordingly and apply KDE_UNRELEASED.
-: ${KDE_GEAR:=false}
-if [[ ${CATEGORY} == kde-apps ]]; then
- KDE_GEAR=true
-fi
+# Known schedule URI of package or release group.
+: "${KDE_ORG_SCHEDULE_URI:="https://community.kde.org/Schedules"}"
# @ECLASS_VARIABLE: KDE_SELINUX_MODULE
# @PRE_INHERIT
@@ -119,7 +112,16 @@ fi
# If set to "none", do nothing.
# For any other value, add selinux to IUSE, and depending on that useflag
# add a dependency on sec-policy/selinux-${KDE_SELINUX_MODULE} to (R)DEPEND.
-: ${KDE_SELINUX_MODULE:=none}
+: "${KDE_SELINUX_MODULE:=none}"
+
+# @ECLASS_VARIABLE: KDE_ORG_TAR_PN
+# @PRE_INHERIT
+# @DESCRIPTION:
+# If unset, default value is set to ${KDE_ORG_NAME}.
+# Filename sans version of the tarball as hosted on kde.org download mirrors.
+# This is used e.g. when upstream's tarball name differs from repository,
+# especially after repository moves.
+: "${KDE_ORG_TAR_PN:=$KDE_ORG_NAME}"
case ${KDE_SELINUX_MODULE} in
none) ;;
@@ -129,13 +131,21 @@ case ${KDE_SELINUX_MODULE} in
;;
esac
-# @ECLASS_VARIABLE: KDE_UNRELEASED
+# @ECLASS_VARIABLE: KDE_PV_UNRELEASED
# @INTERNAL
+# @DEFAULT_UNSET
# @DESCRIPTION:
-# An array of $CATEGORY-$PV pairs of packages that are unreleased upstream.
+# An array of package versions that are unreleased upstream.
# Any package matching this will have fetch restriction enabled, and receive
# a proper error message via pkg_nofetch.
-KDE_UNRELEASED=( )
+
+# @ECLASS_VARIABLE: KDE_ORG_UNRELEASED
+# @DESCRIPTION:
+# If set to "true" fetch restriction will be enabled, and a proper error
+# message displayed via pkg_nofetch.
+KDE_ORG_UNRELEASED=false
+has ${PV} "${KDE_PV_UNRELEASED[*]}" && KDE_ORG_UNRELEASED=true
+[[ ${KDE_ORG_UNRELEASED} == true ]] && RESTRICT+=" fetch"
# @ECLASS_VARIABLE: EGIT_MIRROR
# @DESCRIPTION:
@@ -149,143 +159,36 @@ KDE_UNRELEASED=( )
HOMEPAGE="https://kde.org/"
-case ${CATEGORY} in
- dev-qt)
- KDE_ORG_NAME=${QT5_MODULE:-${PN}}
- HOMEPAGE="https://community.kde.org/Qt5PatchCollection
- https://invent.kde.org/qt/qt/ https://www.qt.io/"
- ;;
- kde-plasma)
- HOMEPAGE="https://kde.org/plasma-desktop"
- ;;
- kde-frameworks)
- HOMEPAGE="https://kde.org/products/frameworks/"
- SLOT=5/${PV}
- [[ ${KDE_BUILD_TYPE} == release ]] && SLOT=$(ver_cut 1)/$(ver_cut 1-2)
- ;;
- *) ;;
-esac
-
-# @FUNCTION: _kde.org_is_unreleased
-# @INTERNAL
-# @DESCRIPTION:
-# Return true if $CATEGORY-$PV matches against an entry in KDE_UNRELEASED array.
-_kde.org_is_unreleased() {
- local pair
- for pair in "${KDE_UNRELEASED[@]}" ; do
- if [[ "${pair}" == "${CATEGORY}-${PV}" ]]; then
- return 0
- elif [[ ${KDE_GEAR} == true ]]; then
- if [[ "${pair/kde-apps/${CATEGORY}}" == "${CATEGORY}-${PV}" ]]; then
- return 0
- fi
- fi
- done
-
- return 1
-}
-
-# @FUNCTION: _kde.org_calculate_src_uri
-# @INTERNAL
-# @DESCRIPTION:
-# Determine fetch location for released tarballs
-_kde.org_calculate_src_uri() {
- debug-print-function ${FUNCNAME} "$@"
-
- local _src_uri="mirror://kde/"
-
- if [[ ${KDE_GEAR} == true ]]; then
- case ${PV} in
- ??.??.[6-9]? )
- _src_uri+="unstable/release-service/${PV}/src/"
- RESTRICT+=" mirror"
- ;;
- *) _src_uri+="stable/release-service/${PV}/src/" ;;
- esac
- fi
-
- case ${CATEGORY} in
- kde-frameworks)
- _src_uri+="stable/frameworks/$(ver_cut 1-2)/"
- case ${PN} in
- countryflags | \
- kdelibs4support | \
- kdesignerplugin | \
- kdewebkit | \
- khtml | \
- kjs | \
- kjsembed | \
- kmediaplayer | \
- kross | \
- kxmlrpcclient)
- _src_uri+="portingAids/"
- ;;
- esac
- ;;
- kde-plasma)
- case ${PV} in
- 5.??.[6-9]?* )
- _src_uri+="unstable/plasma/$(ver_cut 1-3)/"
- RESTRICT+=" mirror"
- ;;
- *) _src_uri+="stable/plasma/$(ver_cut 1-3)/" ;;
- esac
- ;;
- esac
-
- if [[ ${PN} == kdevelop* && ${PV} == 5.6.2 ]]; then
- _src_uri+="stable/kdevelop/${PV}/src/"
- fi
-
- if [[ -n ${KDE_ORG_COMMIT} ]]; then
- SRC_URI="https://invent.kde.org/${KDE_ORG_CATEGORY}/${KDE_ORG_NAME}/-/"
- SRC_URI+="archive/${KDE_ORG_COMMIT}/${KDE_ORG_NAME}-${KDE_ORG_COMMIT}.tar.gz"
- SRC_URI+=" -> ${KDE_ORG_NAME}-${PV}-${KDE_ORG_COMMIT:0:8}.tar.gz"
- else
- SRC_URI="${_src_uri}${KDE_ORG_NAME}-${PV}.tar.xz"
- fi
-
- if _kde.org_is_unreleased ; then
- RESTRICT+=" fetch"
- fi
-}
-
-# @FUNCTION: _kde.org_calculate_live_repo
-# @INTERNAL
-# @DESCRIPTION:
-# Determine fetch location for live sources
-_kde.org_calculate_live_repo() {
- debug-print-function ${FUNCNAME} "$@"
-
- SRC_URI=""
-
- EGIT_MIRROR=${EGIT_MIRROR:=https://invent.kde.org/${KDE_ORG_CATEGORY}}
-
- if [[ ${PV} == 5.??(.?)*.9999 && ${CATEGORY} == dev-qt ]]; then
- EGIT_BRANCH="kde/$(ver_cut 1-2)"
- fi
-
- if [[ ${PV} == ??.??.49.9999 && ${KDE_GEAR} == true ]]; then
- EGIT_BRANCH="release/$(ver_cut 1-2)"
- fi
-
- if [[ ${PV} != 9999 && ${CATEGORY} == kde-plasma ]]; then
- EGIT_BRANCH="Plasma/$(ver_cut 1-2)"
- fi
-
- EGIT_REPO_URI="${EGIT_MIRROR}/${EGIT_REPONAME:=$KDE_ORG_NAME}.git"
-}
+if [[ ${CATEGORY} == dev-qt ]]; then
+ KDE_ORG_NAME=${QT5_MODULE:-${PN}}
+ HOMEPAGE="https://community.kde.org/Qt5PatchCollection
+ https://invent.kde.org/qt/qt/ https://www.qt.io/"
+fi
case ${KDE_BUILD_TYPE} in
- live) _kde.org_calculate_live_repo ;;
+ live)
+ EGIT_MIRROR=${EGIT_MIRROR:=https://invent.kde.org/${KDE_ORG_CATEGORY}}
+ EGIT_REPO_URI="${EGIT_MIRROR}/${EGIT_REPONAME:=$KDE_ORG_NAME}.git"
+
+ if [[ ${PV} == 5.15.*.9999 && ${CATEGORY} == dev-qt ]]; then
+ EGIT_BRANCH="kde/$(ver_cut 1-2)"
+ fi
+ ;;
*)
- _kde.org_calculate_src_uri
+ if [[ -n ${KDE_ORG_COMMIT} ]]; then
+ _KDE_ORG_TARFILE="${KDE_ORG_NAME}-${PV}-${KDE_ORG_COMMIT:0:8}.tar.gz"
+ SRC_URI="mirror://gentoo/${_KDE_ORG_TARFILE}"
+ SRC_URI+=" https://invent.kde.org/${KDE_ORG_CATEGORY}/${KDE_ORG_NAME}/-/"
+ SRC_URI+="archive/${KDE_ORG_COMMIT}/${KDE_ORG_NAME}-${KDE_ORG_COMMIT}.tar.gz"
+ SRC_URI+=" -> ${_KDE_ORG_TARFILE}"
+ fi
+ [[ ${KDE_ORG_UNRELEASED} == true ]] && RESTRICT+=" fetch"
debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: SRC_URI is ${SRC_URI}"
if [[ -n ${KDE_ORG_COMMIT} ]]; then
S=${WORKDIR}/${KDE_ORG_NAME}-${KDE_ORG_COMMIT}
[[ ${CATEGORY} == dev-qt ]] && QT5_BUILD_DIR="${S}_build"
else
- S=${WORKDIR}/${KDE_ORG_NAME}-${PV}
+ S=${WORKDIR}/${KDE_ORG_TAR_PN}-${PV}
fi
;;
esac
@@ -296,20 +199,7 @@ esac
# KDE_UNRELEASED, display a giant warning that the package has not yet been
# released upstream and should not be used.
kde.org_pkg_nofetch() {
- if ! _kde.org_is_unreleased ; then
- return
- fi
-
- local sched_uri="https://community.kde.org/Schedules"
- case ${CATEGORY} in
- kde-frameworks) sched_uri+="/Frameworks" ;;
- kde-plasma) sched_uri+="/Plasma_5" ;;
- *)
- [[ ${KDE_GEAR} == true ]] &&
- sched_uri+="/KDE_Gear_$(ver_cut 1-2)_Schedule"
- ;;
- esac
-
+ [[ ${KDE_ORG_UNRELEASED} == true ]] || return
eerror " _ _ _ _ ____ _____ _ _____ _ ____ _____ ____ "
eerror "| | | | \ | | _ \| ____| | | ____| / \ / ___|| ____| _ \ "
eerror "| | | | \| | |_) | _| | | | _| / _ \ \___ \| _| | | | |"
@@ -329,7 +219,7 @@ kde.org_pkg_nofetch() {
eerror ""
eerror "Please consult the upstream release schedule to see when this "
eerror "package is scheduled to be released:"
- eerror "${sched_uri}"
+ eerror "${KDE_ORG_SCHEDULE_URI}"
}
# @FUNCTION: kde.org_src_unpack
diff --git a/eclass/kernel-2.eclass b/eclass/kernel-2.eclass
index b3fb5cef76c3..ffbe986f3e87 100644
--- a/eclass/kernel-2.eclass
+++ b/eclass/kernel-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kernel-2.eclass
@@ -22,11 +22,11 @@
# @DESCRIPTION:
# Utilized for 32-bit userland on ppc64.
-# @ECLASS_VARIABLE: CKV
+# @ECLASS_VARIABLE: CKV
# @DEFAULT_UNSET
# @DESCRIPTION:
# Used as a comparison kernel version, which is used when
-# PV doesnt reflect the genuine kernel version.
+# PV doesn't reflect the genuine kernel version.
# This gets set to the portage style versioning. ie:
# CKV=2.6.11_rc4
@@ -87,7 +87,7 @@
# @ECLASS_VARIABLE: K_EXTRAEINFO
# @DEFAULT_UNSET
# @DESCRIPTION:
-# this is a new-line seperated list of einfo displays in
+# this is a new-line separated list of einfo displays in
# postinst and can be used to carry additional postinst
# messages
@@ -171,7 +171,7 @@
# @ECLASS_VARIABLE: K_SYMLINK
# @DEFAULT_UNSET
# @DESCRIPTION:
-# if this is set, then forcably create symlink anyway
+# if this is set, then forcibly create symlink anyway
# @ECLASS_VARIABLE: K_USEPV
# @DEFAULT_UNSET
@@ -187,36 +187,36 @@
# Apply genpatches to kernel source. Provide any
# combination of "base", "extras" or "experimental".
-# @ECLASS_VARIABLE: KERNEL_URI
+# @ECLASS_VARIABLE: KERNEL_URI
# @DEFAULT_UNSET
# @DESCRIPTION:
# Upstream kernel src URI
-# @ECLASS_VARIABLE: KV
+# @ECLASS_VARIABLE: KV
# @DEFAULT_UNSET
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# Kernel Version (2.6.0-gentoo/2.6.0-test11-gentoo-r1)
-# @ECLASS_VARIABLE: KV_FULL
+# @ECLASS_VARIABLE: KV_FULL
# @DEFAULT_UNSET
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# Kernel full version
-# @ECLASS_VARIABLE: KV_MAJOR
+# @ECLASS_VARIABLE: KV_MAJOR
# @DEFAULT_UNSET
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# Kernel major version from <KV_MAJOR>.<KV_MINOR>.<KV_PATCH
-# @ECLASS_VARIABLE: KV_MINOR
+# @ECLASS_VARIABLE: KV_MINOR
# @DEFAULT_UNSET
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# Kernel minor version from <KV_MAJOR>.<KV_MINOR>.<KV_PATCH
-# @ECLASS_VARIABLE: KV_PATCH
+# @ECLASS_VARIABLE: KV_PATCH
# @DEFAULT_UNSET
# @OUTPUT_VARIABLE
# @DESCRIPTION:
@@ -227,12 +227,12 @@
# @DESCRIPTION:
# Default cflags if not already set
-# @ECLASS_VARIABLE: OKV
+# @ECLASS_VARIABLE: OKV
# @DEFAULT_UNSET
# @DESCRIPTION:
# Original Kernel Version (2.6.0/2.6.0-test11)
-# @ECLASS_VARIABLE: RELEASE
+# @ECLASS_VARIABLE: RELEASE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Representative of the kernel release tag (-rc3/-git3)
@@ -251,7 +251,7 @@
# @ECLASS_VARIABLE: UNIPATCH_EXCLUDE
# @DEFAULT_UNSET
# @DESCRIPTION:
-# An addition var to support exlusion based completely
+# An addition var to support exclusion based completely
# on "<passedstring>*" and not "<passedno#>_*"
# this should _NOT_ be used from the ebuild as this is
# reserved for end users passing excludes from the cli
@@ -261,15 +261,15 @@
# @DESCRIPTION:
# space delimetered list of patches to be applied to the kernel
-# @ECLASS_VARIABLE: UNIPATCH_LIST_DEFAULT
+# @ECLASS_VARIABLE: UNIPATCH_LIST_DEFAULT
# @INTERNAL
# @DESCRIPTION:
# Upstream kernel patch archive
-# @ECLASS_VARIABLE: UNIPATCH_LIST_GENPATCHES
+# @ECLASS_VARIABLE: UNIPATCH_LIST_GENPATCHES
# @INTERNAL
# @DESCRIPTION:
-# List of genpatches archives to apply to the kernel
+# List of genpatches archives to apply to the kernel
# @ECLASS_VARIABLE: UNIPATCH_STRICTORDER
# @DEFAULT_UNSET
@@ -281,32 +281,28 @@
# If you do change them, there is a chance that we will not fix resulting bugs;
# that of course does not mean we're not willing to help.
-inherit estack toolchain-funcs
-
-case ${EAPI} in
- 7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
-esac
-
# Added by Daniel Ostrow <dostrow@gentoo.org>
# This is an ugly hack to get around an issue with a 32-bit userland on ppc64.
# I will remove it when I come up with something more reasonable.
+# Alfred Persson Forsberg <cat@catcream.org>
+# Moved this above inherit as crossdev.eclass uses CHOST internally.
[[ ${PROFILE_ARCH} == ppc64 ]] && CHOST="powerpc64-${CHOST#*-}"
-export CTARGET=${CTARGET:-${CHOST}}
-if [[ ${CTARGET} == ${CHOST} && ${CATEGORY/cross-} != ${CATEGORY} ]]; then
- export CTARGET=${CATEGORY/cross-}
-fi
+inherit crossdev estack multiprocessing optfeature toolchain-funcs
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
HOMEPAGE="https://www.kernel.org/ https://wiki.gentoo.org/wiki/Kernel ${HOMEPAGE}"
-: ${LICENSE:="GPL-2"}
+: "${LICENSE:="GPL-2"}"
# No need to run scanelf/strip on kernel sources/headers (bug #134453).
RESTRICT="binchecks strip"
# set LINUX_HOSTCFLAGS if not already set
-: ${LINUX_HOSTCFLAGS:="-Wall -Wstrict-prototypes -Os -fomit-frame-pointer -I${S}/include"}
-
+: "${LINUX_HOSTCFLAGS:="-Wall -Wstrict-prototypes -Os -fomit-frame-pointer -I${S}/include"}"
# @FUNCTION: debug-print-kernel2-variables
# @USAGE:
@@ -377,7 +373,7 @@ handle_genpatches() {
UNIPATCH_LIST_GENPATCHES+=" ${DISTDIR}/${tarball}"
debug-print "genpatches tarball: ${tarball}"
fi
- GENPATCHES_URI+=" ${use_cond_start}$(echo https://dev.gentoo.org/~{alicef,mpagano,whissi}/dist/genpatches/${tarball})${use_cond_end}"
+ GENPATCHES_URI+=" ${use_cond_start}$(echo https://dev.gentoo.org/~{alicef,mpagano}/dist/genpatches/${tarball})${use_cond_end}"
done
}
@@ -393,7 +389,7 @@ detect_version() {
[[ -n ${KV_FULL} ]] && return 0
# CKV is used as a comparison kernel version, which is used when
- # PV doesnt reflect the genuine kernel version.
+ # PV doesn't reflect the genuine kernel version.
# this gets set to the portage style versioning. ie:
# CKV=2.6.11_rc4
CKV=${CKV:-${PV}}
@@ -487,7 +483,7 @@ detect_version() {
RELEASE=${RELEASE/_beta}
RELEASE=${RELEASE/_rc/-rc}
RELEASE=${RELEASE/_pre/-pre}
- # We cannot trivally call kernel_is here, because it calls us to detect the
+ # We cannot trivially call kernel_is here, because it calls us to detect the
# version
#kernel_is ge 2 6 && RELEASE=${RELEASE/-pre/-git}
(( KV_MAJOR * 1000 + ${KV_MINOR:-0} >= 2006 )) && RELEASE=${RELEASE/-pre/-git}
@@ -646,7 +642,7 @@ kernel_is() {
eq) operator="-eq"; shift;;
*) operator="-eq";;
esac
- [[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
+ [[ $# -gt 3 ]] && die "Error in ${ECLASS}_${FUNCNAME}(): too many parameters"
ver_test \
"${KV_MAJOR:-0}.${KV_MINOR:-0}.${KV_PATCH:-0}" \
@@ -656,42 +652,32 @@ kernel_is() {
# Capture the sources type and set DEPENDs
if [[ ${ETYPE} == sources ]]; then
- BDEPEND="!build? ( sys-apps/sed )"
RDEPEND="!build? (
- app-arch/cpio
+ app-alternatives/cpio
dev-lang/perl
- sys-devel/bc
+ app-alternatives/bc
+ dev-build/make
sys-devel/bison
sys-devel/flex
- sys-devel/make
>=sys-libs/ncurses-5.2
virtual/libelf
virtual/pkgconfig
)"
- SLOT="${PVR}"
+ SLOT=${SLOT:=${PVR}}
DESCRIPTION="Sources based on the Linux Kernel"
IUSE="symlink build"
# Bug #266157, deblob for libre support
if [[ -z ${K_PREDEBLOBBED} ]]; then
- # deblob less than 5.10 require python 2.7
- if kernel_is lt 5 10; then
- K_DEBLOB_AVAILABLE=0
- fi
if [[ ${K_DEBLOB_AVAILABLE} == 1 ]]; then
- PYTHON_COMPAT=( python3_{8..10} )
-
- inherit python-any-r1
-
IUSE="${IUSE} deblob"
# Reflect that kernels contain firmware blobs unless otherwise
# stripped. Starting with version 4.14, the whole firmware
# tree has been dropped from the kernel.
- kernel_is lt 4 14 && LICENSE+=" !deblob? ( linux-firmware )"
-
- BDEPEND+=" deblob? ( ${PYTHON_DEPS} )"
+ kernel_is lt 4 14 &&
+ LICENSE+=" !deblob? ( linux-fw-redistributable all-rights-reserved )"
if [[ -n KV_MINOR ]]; then
DEBLOB_PV="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}"
@@ -721,7 +707,7 @@ if [[ ${ETYPE} == sources ]]; then
elif kernel_is lt 4 14; then
# Deblobbing is not available, so just mark kernels older
# than 4.14 as tainted with non-libre materials.
- LICENSE+=" linux-firmware"
+ LICENSE+=" linux-fw-redistributable all-rights-reserved"
fi
fi
@@ -756,25 +742,35 @@ cross_pre_c_headers() {
use headers-only && [[ ${CHOST} != ${CTARGET} ]]
}
-# @FUNCTION: env_setup_xmakeopts
+# @FUNCTION: env_setup_kernel_makeopts
# @USAGE:
# @DESCRIPTION:
-# set the ARCH/CROSS_COMPILE when cross compiling
+# Set the toolchain variables, as well as ARCH and CROSS_COMPILE when
+# cross-compiling.
-env_setup_xmakeopts() {
+env_setup_kernel_makeopts() {
# Kernel ARCH != portage ARCH
export KARCH=$(tc-arch-kernel)
# When cross-compiling, we need to set the ARCH/CROSS_COMPILE
# variables properly or bad things happen !
- xmakeopts="ARCH=${KARCH}"
+ KERNEL_MAKEOPTS=( ARCH="${KARCH}" )
if [[ ${CTARGET} != ${CHOST} ]] && ! cross_pre_c_headers; then
- xmakeopts="${xmakeopts} CROSS_COMPILE=${CTARGET}-"
+ KERNEL_MAKEOPTS+=( CROSS_COMPILE="${CTARGET}-" )
elif type -p ${CHOST}-ar >/dev/null; then
- xmakeopts="${xmakeopts} CROSS_COMPILE=${CHOST}-"
+ KERNEL_MAKEOPTS+=( CROSS_COMPILE="${CHOST}-" )
fi
- xmakeopts="${xmakeopts} HOSTCC=$(tc-getBUILD_CC) CC=$(tc-getCC) LD=$(tc-getLD) AR=$(tc-getAR) NM=$(tc-getNM) OBJCOPY=$(tc-getOBJCOPY) READELF=$(tc-getREADELF) STRIP=$(tc-getSTRIP)"
- export xmakeopts
+ KERNEL_MAKEOPTS+=(
+ HOSTCC="$(tc-getBUILD_CC)"
+ CC="$(tc-getCC)"
+ LD="$(tc-getLD)"
+ AR="$(tc-getAR)"
+ NM="$(tc-getNM)"
+ OBJCOPY="$(tc-getOBJCOPY)"
+ READELF="$(tc-getREADELF)"
+ STRIP="$(tc-getSTRIP)"
+ )
+ export KERNEL_MAKEOPTS
}
# @FUNCTION: universal_unpack
@@ -860,8 +856,8 @@ install_universal() {
install_headers() {
local ddir=$(kernel_header_destdir)
- env_setup_xmakeopts
- emake headers_install INSTALL_HDR_PATH="${ED}"${ddir}/.. ${xmakeopts}
+ env_setup_kernel_makeopts
+ emake headers_install INSTALL_HDR_PATH="${ED}"${ddir}/.. "${KERNEL_MAKEOPTS[@]}"
# let other packages install some of these headers
rm -rf "${ED}"${ddir}/scsi || die #glibc/uclibc/etc...
@@ -880,7 +876,7 @@ install_sources() {
dodir /usr/src
einfo ">>> Copying sources ..."
- file="$(find ${WORKDIR} -iname "docs" -type d)"
+ file="$(find "${WORKDIR}" -iname "docs" -type d)"
if [[ -n ${file} ]]; then
for file in $(find ${file} -type f); do
echo "${file//*docs\/}" >> "${S}"/patches.txt
@@ -891,7 +887,7 @@ install_sources() {
done
fi
- mv "${WORKDIR}"/linux* "${ED}"/usr/src || die
+ cp -R "${WORKDIR}"/linux* "${ED}"/usr/src || die
if [[ -n ${UNIPATCH_DOCS} ]]; then
for i in ${UNIPATCH_DOCS}; do
@@ -929,7 +925,7 @@ postinst_sources() {
# use deblob && \
# K_SECURITY_UNSUPPORTED=deblob
- # if we are to forcably symlink, delete it if it already exists first.
+ # if we are to forcibly symlink, delete it if it already exists first.
if [[ ${K_SYMLINK} -gt 0 ]]; then
if [[ -e ${EROOT}/usr/src/linux && ! -L ${EROOT}/usr/src/linux ]] ; then
die "${EROOT}/usr/src/linux exists and is not a symlink"
@@ -996,6 +992,9 @@ postinst_sources() {
fi
fi
fi
+
+ optfeature "versioned kernel image installation and optionally automating tasks such as generating an initramfs or unified kernel image" \
+ "sys-kernel/installkernel"
}
# pkg_setup functions
@@ -1025,7 +1024,7 @@ setup_headers() {
# Universal function that will apply patches to source
unipatch() {
- local i x y z extention PIPE_CMD UNIPATCH_DROP KPATCH_DIR PATCH_DEPTH ELINE
+ local i x y z extension PIPE_CMD UNIPATCH_DROP KPATCH_DIR PATCH_DEPTH ELINE
local STRICT_COUNT PATCH_LEVEL myLC_ALL myLANG
# set to a standard locale to ensure sorts are ordered properly.
@@ -1071,11 +1070,11 @@ unipatch() {
[[ ${i} == *:* ]] && elog ">>> Strict patch levels not currently supported for tarballed patchsets"
else
- extention=${i/*./}
- extention=${extention/:*/}
+ extension=${i/*./}
+ extension=${extension/:*/}
PIPE_CMD=""
- case ${extention} in
- xz) PIPE_CMD="xz -dc";;
+ case ${extension} in
+ xz) PIPE_CMD="xz -T$(makeopts_jobs) -dc";;
lzma) PIPE_CMD="lzma -dc";;
bz2) PIPE_CMD="bzip2 -dc";;
patch*) PIPE_CMD="cat";;
@@ -1088,7 +1087,7 @@ unipatch() {
PATCH_LEVEL=${i/*([^:])?(:)}
i=${i/:*/}
x=${i/*\//}
- x=${x/\.${extention}/}
+ x=${x/\.${extension}/}
if [[ -n ${PIPE_CMD} ]]; then
if [[ ! -r ${i} ]]; then
@@ -1155,7 +1154,7 @@ unipatch() {
UNIPATCH_DROP+=" 5011_enable-cpu-optimizations-for-gcc8.patch"
UNIPATCH_DROP+=" 5012_enable-cpu-optimizations-for-gcc91.patch"
UNIPATCH_DROP+=" 5013_enable-cpu-optimizations-for-gcc10.patch"
- if [[ ${GCC_MAJOR_VER} -lt 9 ]]; then
+ if [[ ${GCC_MAJOR_VER} -lt 9 ]] && ! tc-is-clang; then
UNIPATCH_DROP+=" 5010_enable-cpu-optimizations-universal.patch"
fi
# this legacy section should be targeted for removal
@@ -1201,14 +1200,14 @@ unipatch() {
fi
done
- #populate KPATCH_DIRS so we know where to look to remove the excludes
+ # Populate KPATCH_DIRS so we know where to look to remove the excludes
x=${KPATCH_DIR}
KPATCH_DIR=""
for i in $(find ${x} -type d | sort -n); do
KPATCH_DIR="${KPATCH_DIR} ${i}"
done
- #so now lets get rid of the patchno's we want to exclude
+ # So now lets get rid of the patch numbers we want to exclude
UNIPATCH_DROP="${UNIPATCH_EXCLUDE} ${UNIPATCH_DROP}"
for i in ${UNIPATCH_DROP}; do
ebegin "Excluding Patch #${i}"
@@ -1235,7 +1234,7 @@ unipatch() {
# addition of a file with the same name as the symlink in the #
# same location; this causes the dry-run to fail, see bug #507656. #
# #
- # https://bugs.gentoo.org/show_bug.cgi?id=507656 #
+ # https://bugs.gentoo.org/507656 #
####################################################################
if [[ -n ${K_NODRYRUN} ]]; then
ebegin "Applying ${i/*\//} (-p1)"
@@ -1343,7 +1342,7 @@ getfilevar() {
# @USAGE:
# @DESCRIPTION:
# This function sets ARCH_URI and ARCH_PATCH
-# with the neccessary info for the arch sepecific compatibility
+# with the necessary info for the arch specific compatibility
# patchsets.
detect_arch() {
@@ -1427,8 +1426,8 @@ kernel-2_src_unpack() {
[[ -z ${K_NOSETEXTRAVERSION} ]] && unpack_set_extraversion
unpack_fix_install_path
- # Setup xmakeopts and cd into sourcetree.
- env_setup_xmakeopts
+ # Setup KERNEL_MAKEOPTS and cd into sourcetree.
+ env_setup_kernel_makeopts
cd "${S}" || die
if [[ ${K_DEBLOB_AVAILABLE} == 1 ]] && use deblob; then
@@ -1470,8 +1469,10 @@ kernel-2_src_compile() {
cd "${S}" || die
if [[ ${K_DEBLOB_AVAILABLE} == 1 ]] && use deblob; then
+ einfo ">>> Patching deblob script for forcing awk ..."
+ sed -i '/check="\/bin\/sh $check"/a \ check="$check --use-awk"' \
+ "${T}/${DEBLOB_A}" || die "Failed to patch ${DEBLOB_A}"
einfo ">>> Running deblob script ..."
- python_setup
sh "${T}/${DEBLOB_A}" --force || die "Deblob script failed to run!!!"
fi
}
@@ -1498,7 +1499,7 @@ kernel-2_pkg_preinst() {
# @FUNCTION: kernel-2_src_install
# @USAGE:
# @DESCRIPTION:
-# Install headers or sources dependant on ETYPE
+# Install headers or sources dependent on ETYPE
kernel-2_src_install() {
install_universal
diff --git a/eclass/kernel-build.eclass b/eclass/kernel-build.eclass
index 0a49533e48d3..7922638be6e1 100644
--- a/eclass/kernel-build.eclass
+++ b/eclass/kernel-build.eclass
@@ -1,4 +1,4 @@
-# Copyright 2020-2022 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kernel-build.eclass
@@ -6,7 +6,7 @@
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 8
# @PROVIDES: kernel-install
# @BLURB: Build mechanics for Distribution Kernels
# @DESCRIPTION:
@@ -20,30 +20,120 @@
# the kernel and installing it along with its modules and subset
# of sources needed to build external modules.
-if [[ ! ${_KERNEL_BUILD_ECLASS} ]]; then
-
-case "${EAPI:-0}" in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 7)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-PYTHON_COMPAT=( python3_{8..10} )
+if [[ ! ${_KERNEL_BUILD_ECLASS} ]]; then
+_KERNEL_BUILD_ECLASS=1
-inherit python-any-r1 savedconfig toolchain-funcs kernel-install
+PYTHON_COMPAT=( python3_{10..12} )
+if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ inherit secureboot
+fi
+
+inherit multiprocessing python-any-r1 savedconfig toolchain-funcs kernel-install
BDEPEND="
${PYTHON_DEPS}
- app-arch/cpio
- sys-devel/bc
+ app-alternatives/cpio
+ app-alternatives/bc
+ sys-devel/bison
sys-devel/flex
virtual/libelf
- virtual/yacc"
+ arm? ( sys-apps/dtc )
+ arm64? ( sys-apps/dtc )
+ riscv? ( sys-apps/dtc )
+"
+
+IUSE="+strip"
+
+# @ECLASS_VARIABLE: KERNEL_IUSE_MODULES_SIGN
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-null value, inherits secureboot.eclass, adds
+# IUSE=modules-sign and required logic to manipulate the kernel
+# config while respecting the MODULES_SIGN_HASH, MODULES_SIGN_CERT,
+# and MODULES_SIGN_KEY user variables.
+
+# @ECLASS_VARIABLE: MODULES_SIGN_HASH
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to hash algorithm to use
+# during signature generation (CONFIG_MODULE_SIG_SHA256).
+#
+# Valid values: sha512,sha384,sha256,sha224,sha1
+#
+# Default if unset: sha512
+
+# @ECLASS_VARIABLE: MODULES_SIGN_KEY
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to the path of the private
+# key in PEM format to use, or a PKCS#11 URI (CONFIG_MODULE_SIG_KEY).
+#
+# If path is relative (e.g. "certs/name.pem"), it is assumed to be
+# relative to the kernel build directory being used.
+#
+# If the key requires a passphrase or PIN, the used kernel sign-file
+# utility recognizes the KBUILD_SIGN_PIN environment variable. Be
+# warned that the package manager may store this value in binary
+# packages, database files, temporary files, and possibly logs. This
+# eclass unsets the variable after use to mitigate the issue (notably
+# for shared binary packages), but use this with care.
+#
+# Default if unset: certs/signing_key.pem
+
+# @ECLASS_VARIABLE: MODULES_SIGN_CERT
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to the path of the public
+# key in PEM format to use. Must be specified if MODULES_SIGN_KEY
+# is set to a path of a file that only contains the private key.
+
+# @ECLASS_VARIABLE: KERNEL_GENERIC_UKI_CMDLINE
+# @USER_VARIABLE
+# @DESCRIPTION:
+# If KERNEL_IUSE_GENERIC_UKI is set, this variable allows setting the
+# built-in kernel command line for the UKI. If unset, the default is
+# root=/dev/gpt-auto-root ro
+: "${KERNEL_GENERIC_UKI_CMDLINE:="root=/dev/gpt-auto-root ro"}"
+
+if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ IUSE+=" modules-sign"
+ REQUIRED_USE="secureboot? ( modules-sign )"
+ BDEPEND+="
+ modules-sign? ( dev-libs/openssl )
+ "
+fi
+
+if [[ ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ BDEPEND+="
+ generic-uki? ( ${!INITRD_PACKAGES[@]} )
+ "
+fi
+
+# @FUNCTION: kernel-build_pkg_setup
+# @DESCRIPTION:
+# Call python-any-r1 and secureboot pkg_setup
+kernel-build_pkg_setup() {
+ python-any-r1_pkg_setup
+ if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ secureboot_pkg_setup
+ if [[ -e ${MODULES_SIGN_KEY} && ${MODULES_SIGN_KEY} != pkcs11:* ]]; then
+ if [[ -e ${MODULES_SIGN_CERT} && ${MODULES_SIGN_CERT} != ${MODULES_SIGN_KEY} ]]; then
+ MODULES_SIGN_KEY_CONTENTS="$(cat "${MODULES_SIGN_CERT}" "${MODULES_SIGN_KEY}" || die)"
+ else
+ MODULES_SIGN_KEY_CONTENTS="$(< "${MODULES_SIGN_KEY}")"
+ fi
+ fi
+ fi
+}
# @FUNCTION: kernel-build_src_configure
# @DESCRIPTION:
@@ -87,7 +177,7 @@ kernel-build_src_configure() {
LD="${LD}"
AR="$(tc-getAR)"
NM="$(tc-getNM)"
- STRIP=":"
+ STRIP="$(tc-getSTRIP)"
OBJCOPY="$(tc-getOBJCOPY)"
OBJDUMP="$(tc-getOBJDUMP)"
@@ -95,6 +185,27 @@ kernel-build_src_configure() {
ARCH=$(tc-arch-kernel)
)
+ if type -P xz &>/dev/null ; then
+ export XZ_OPT="-T$(makeopts_jobs) --memlimit-compress=50% -q"
+ fi
+
+ if type -P zstd &>/dev/null ; then
+ export ZSTD_NBTHREADS="$(makeopts_jobs)"
+ fi
+
+ # pigz/pbzip2/lbzip2 all need to take an argument, not an env var,
+ # for their options, which won't work because of how the kernel build system
+ # uses the variables (e.g. passes directly to tar as an executable).
+ if type -P pigz &>/dev/null ; then
+ MAKEARGS+=( KGZIP="pigz" )
+ fi
+
+ if type -P pbzip2 &>/dev/null ; then
+ MAKEARGS+=( KBZIP2="pbzip2" )
+ elif type -P lbzip2 &>/dev/null ; then
+ MAKEARGS+=( KBZIP2="lbzip2" )
+ fi
+
restore_config .config
[[ -f .config ]] || die "Ebuild error: please copy default config into .config"
@@ -103,6 +214,12 @@ kernel-build_src_configure() {
.config)
fi
+ # If this is set by USE=secureboot or user config this will have an effect
+ # on the name of the output image. Set this variable to track this setting.
+ if grep -q "CONFIG_EFI_ZBOOT=y" .config; then
+ KERNEL_EFI_ZBOOT=1
+ fi
+
mkdir -p "${WORKDIR}"/modprep || die
mv .config "${WORKDIR}"/modprep/ || die
emake O="${WORKDIR}"/modprep "${MAKEARGS[@]}" olddefconfig
@@ -125,19 +242,26 @@ kernel-build_src_compile() {
# from kernel-install.eclass with the correct paths.
kernel-build_src_test() {
debug-print-function ${FUNCNAME} "${@}"
- local targets=( modules_install )
- # on arm or arm64 you also need dtb
- if use arm || use arm64; then
- targets+=( dtbs_install )
+
+ # Use the kernel build system to strip, this ensures the modules
+ # are stripped *before* they are signed or compressed.
+ local strip_args
+ if use strip; then
+ strip_args="--strip-unneeded"
fi
emake O="${WORKDIR}"/build "${MAKEARGS[@]}" \
- INSTALL_MOD_PATH="${T}" "${targets[@]}"
+ INSTALL_MOD_PATH="${T}" INSTALL_MOD_STRIP="${strip_args}" \
+ modules_install
+
+ local dir_ver=${PV}${KV_LOCALVERSION}
+ local relfile=${WORKDIR}/build/include/config/kernel.release
+ local module_ver
+ module_ver=$(<"${relfile}") || die
- local ver="${PV}${KV_LOCALVERSION}"
- kernel-install_test "${ver}" \
+ kernel-install_test "${module_ver}" \
"${WORKDIR}/build/$(dist-kernel_get_image_path)" \
- "${T}/lib/modules/${ver}"
+ "${T}/lib/modules/${module_ver}"
}
# @FUNCTION: kernel-build_src_install
@@ -151,24 +275,52 @@ kernel-build_src_install() {
# on what kind of installkernel is installed
local targets=( modules_install )
# on arm or arm64 you also need dtb
- if use arm || use arm64; then
+ if use arm || use arm64 || use riscv; then
targets+=( dtbs_install )
fi
+ # Use the kernel build system to strip, this ensures the modules
+ # are stripped *before* they are signed or compressed.
+ local strip_args
+ if use strip; then
+ strip_args="--strip-unneeded"
+ fi
+ # Modules were already stripped by the kernel build system
+ dostrip -x /lib/modules
+
+ local compress=()
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]] && ! use modules-compress; then
+ compress+=(
+ # force installing uncompressed modules even if compression
+ # is enabled via config
+ suffix-y=
+ )
+ fi
+
emake O="${WORKDIR}"/build "${MAKEARGS[@]}" \
- INSTALL_MOD_PATH="${ED}" INSTALL_PATH="${ED}/boot" "${targets[@]}"
+ INSTALL_MOD_PATH="${ED}" INSTALL_MOD_STRIP="${strip_args}" \
+ INSTALL_PATH="${ED}/boot" "${compress[@]}" "${targets[@]}"
# note: we're using mv rather than doins to save space and time
# install main and arch-specific headers first, and scripts
local kern_arch=$(tc-arch-kernel)
- local ver="${PV}${KV_LOCALVERSION}"
- dodir "/usr/src/linux-${ver}/arch/${kern_arch}"
- mv include scripts "${ED}/usr/src/linux-${ver}/" || die
+ local dir_ver=${PV}${KV_LOCALVERSION}
+ local kernel_dir=/usr/src/linux-${dir_ver}
+
+ if use sparc ; then
+ # We don't want tc-arch-kernel's sparc64, even though we do
+ # need to pass ARCH=sparc64 to the build system. It's a quasi-alias
+ # in Kbuild.
+ kern_arch=sparc
+ fi
+
+ dodir "${kernel_dir}/arch/${kern_arch}"
+ mv include scripts "${ED}${kernel_dir}/" || die
mv "arch/${kern_arch}/include" \
- "${ED}/usr/src/linux-${ver}/arch/${kern_arch}/" || die
+ "${ED}${kernel_dir}/arch/${kern_arch}/" || die
# some arches need module.lds linker script to build external modules
if [[ -f arch/${kern_arch}/kernel/module.lds ]]; then
- insinto "/usr/src/linux-${ver}/arch/${kern_arch}/kernel"
+ insinto "${kernel_dir}/arch/${kern_arch}/kernel"
doins "arch/${kern_arch}/kernel/module.lds"
fi
@@ -176,7 +328,7 @@ kernel-build_src_install() {
find -type f '!' '(' -name 'Makefile*' -o -name 'Kconfig*' ')' \
-delete || die
find -type l -delete || die
- cp -p -R * "${ED}/usr/src/linux-${ver}/" || die
+ cp -p -R * "${ED}${kernel_dir}/" || die
cd "${WORKDIR}" || die
# strip out-of-source build stuffs from modprep
@@ -187,23 +339,155 @@ kernel-build_src_install() {
'(' -name '.*' -a -not -name '.config' ')' \
')' -delete || die
rm modprep/source || die
- cp -p -R modprep/. "${ED}/usr/src/linux-${ver}"/ || die
+ cp -p -R modprep/. "${ED}${kernel_dir}"/ || die
+ # If CONFIG_MODULES=y, then kernel.release will be found in modprep as well, but not
+ # in case of CONFIG_MODULES is not set.
+ # The one in build is exactly the same as the one in modprep, but the one in build
+ # always exists, so it can just be copied unconditionally.
+ cp "${WORKDIR}/build/include/config/kernel.release" \
+ "${ED}${kernel_dir}/include/config/" || die
# install the kernel and files needed for module builds
- insinto "/usr/src/linux-${ver}"
- doins build/{System.map,Module.symvers}
+ insinto "${kernel_dir}"
+ doins build/System.map
+ # build/Module.symvers does not exist if CONFIG_MODULES is not set.
+ [[ -f build/Module.symvers ]] && doins build/Module.symvers
local image_path=$(dist-kernel_get_image_path)
- cp -p "build/${image_path}" "${ED}/usr/src/linux-${ver}/${image_path}" || die
+ local image=${ED}${kernel_dir}/${image_path}
+ cp -p "build/${image_path}" "${image}" || die
+
+ # If a key was generated, copy it so external modules can be signed
+ local suffix
+ for suffix in pem x509; do
+ if [[ -f "build/certs/signing_key.${suffix}" ]]; then
+ cp -p "build/certs/signing_key.${suffix}" "${ED}${kernel_dir}/certs" || die
+ fi
+ done
# building modules fails with 'vmlinux has no symtab?' if stripped
- use ppc64 && dostrip -x "/usr/src/linux-${ver}/${image_path}"
+ use ppc64 && dostrip -x "${kernel_dir}/${image_path}"
+
+ # Install vmlinux with debuginfo when requested
+ if use debug; then
+ if [[ "${image_path}" != "vmlinux" ]]; then
+ mv "build/vmlinux" "${ED}${kernel_dir}/vmlinux" || die
+ fi
+ dostrip -x "${kernel_dir}/vmlinux"
+ fi
# strip empty directories
find "${D}" -type d -empty -exec rmdir {} + || die
+ local relfile=${ED}${kernel_dir}/include/config/kernel.release
+ local module_ver
+ module_ver=$(<"${relfile}") || die
+
# fix source tree and build dir symlinks
- dosym ../../../usr/src/linux-${ver} /lib/modules/${ver}/build
- dosym ../../../usr/src/linux-${ver} /lib/modules/${ver}/source
+ dosym "../../../${kernel_dir}" "/lib/modules/${module_ver}/build"
+ dosym "../../../${kernel_dir}" "/lib/modules/${module_ver}/source"
+ if [[ "${image_path}" == *vmlinux* ]]; then
+ dosym "../../../${kernel_dir}/${image_path}" "/lib/modules/${module_ver}/vmlinux"
+ else
+ dosym "../../../${kernel_dir}/${image_path}" "/lib/modules/${module_ver}/vmlinuz"
+ fi
+
+ if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ secureboot_sign_efi_file "${image}"
+ fi
+
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ if use generic-uki; then
+ # NB: if you pass a path that does not exist or is not a regular
+ # file/directory, dracut will silently ignore it and use the default
+ # https://github.com/dracutdevs/dracut/issues/1136
+ > "${T}"/empty-file || die
+ mkdir -p "${T}"/empty-directory || die
+
+ local dracut_modules=(
+ base bash btrfs cifs crypt crypt-gpg crypt-loop dbus dbus-daemon
+ dm dmraid dracut-systemd fido2 i18n fs-lib kernel-modules
+ kernel-network-modules kernel-modules-extra lunmask lvm nbd
+ mdraid modsign network network-manager nfs nvdimm nvmf pcsc
+ pkcs11 qemu qemu-net resume rngd rootfs-block shutdown
+ systemd systemd-ac-power systemd-ask-password systemd-initrd
+ systemd-integritysetup systemd-pcrphase systemd-sysusers
+ systemd-udevd systemd-veritysetup terminfo tpm2-tss udev-rules
+ uefi-lib usrmount virtiofs
+ )
+
+ local dracut_args=(
+ --conf "${T}/empty-file"
+ --confdir "${T}/empty-directory"
+ --kernel-image "${image}"
+ --kmoddir "${ED}/lib/modules/${dir_ver}"
+ --kver "${dir_ver}"
+ --verbose
+ --compress="xz -9e --check=crc32"
+ --no-hostonly
+ --no-hostonly-cmdline
+ --no-hostonly-i18n
+ --no-machineid
+ --nostrip
+ --no-uefi
+ --early-microcode
+ --reproducible
+ --ro-mnt
+ --modules "${dracut_modules[*]}"
+ # Pulls in huge firmware files
+ --omit-drivers "nfp"
+ )
+
+ # Tries to update ld cache
+ addpredict /etc/ld.so.cache~
+ dracut "${dracut_args[@]}" "${image%/*}/initrd" ||
+ die "Failed to generate initramfs"
+
+ local ukify_args=(
+ --linux="${image}"
+ --initrd="${image%/*}/initrd"
+ --cmdline="${KERNEL_GENERIC_UKI_CMDLINE}"
+ --uname="${dir_ver}"
+ --output="${image%/*}/uki.efi"
+ )
+
+ if [[ ${KERNEL_IUSE_SECUREBOOT} ]] && use secureboot; then
+ ukify_args+=(
+ --signtool=sbsign
+ --secureboot-private-key="${SECUREBOOT_SIGN_KEY}"
+ --secureboot-certificate="${SECUREBOOT_SIGN_CERT}"
+ )
+ if [[ ${SECUREBOOT_SIGN_KEY} == pkcs11:* ]]; then
+ ukify_args+=(
+ --signing-engine="pkcs11"
+ )
+ else
+ # Sytemd-measure does not currently support pkcs11
+ ukify_args+=(
+ --measure
+ --pcrpkey="${ED}${kernel_dir}/certs/signing_key.x509"
+ --pcr-private-key="${SECUREBOOT_SIGN_KEY}"
+ --phases="enter-initrd"
+ --pcr-private-key="${SECUREBOOT_SIGN_KEY}"
+ --phases="enter-initrd:leave-initrd enter-initrd:leave-initrd:sysinit enter-initrd:leave-initrd:sysinit:ready"
+ )
+ fi
+ fi
+
+ # systemd<255 does not install ukify in /usr/bin
+ PATH="${PATH}:${BROOT}/usr/lib/systemd:${BROOT}/lib/systemd" \
+ ukify build "${ukify_args[@]}" || die "Failed to generate UKI"
+
+ # Overwrite unnecessary image types to save space
+ > "${image}" || die
+ else
+ # Placeholders to ensure we own these files
+ > "${image%/*}/uki.efi" || die
+ fi
+ > "${image%/*}/initrd" || die
+ fi
+
+ # unset to at least be out of the environment file in, e.g. shared binpkgs
+ unset KBUILD_SIGN_PIN
save_config build/.config
}
@@ -214,6 +498,26 @@ kernel-build_src_install() {
kernel-build_pkg_postinst() {
kernel-install_pkg_postinst
savedconfig_pkg_postinst
+
+ if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ if use modules-sign && [[ -z ${MODULES_SIGN_KEY} ]]; then
+ ewarn
+ ewarn "MODULES_SIGN_KEY was not set, this means the kernel build system"
+ ewarn "automatically generated the signing key. This key was installed"
+ ewarn "in ${EROOT}/usr/src/linux-${PV}${KV_LOCALVERSION}/certs"
+ ewarn "and will also be included in any binary packages."
+ ewarn "Please take appropriate action to protect the key!"
+ ewarn
+ ewarn "Recompiling this package causes a new key to be generated. As"
+ ewarn "a result any external kernel modules will need to be resigned."
+ ewarn "Use emerge @module-rebuild, or manually sign the modules as"
+ ewarn "described on the wiki [1]"
+ ewarn
+ ewarn "Consider using the MODULES_SIGN_KEY variable to use an external key."
+ ewarn
+ ewarn "[1]: https://wiki.gentoo.org/wiki/Signed_kernel_module_support"
+ fi
+ fi
}
# @FUNCTION: kernel-build_merge_configs
@@ -236,19 +540,58 @@ kernel-build_merge_configs() {
local user_configs=( "${BROOT}"/etc/kernel/config.d/*.config )
shopt -u nullglob
+ local merge_configs=( "${@}" )
+
+ if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ if use modules-sign; then
+ : "${MODULES_SIGN_HASH:=sha512}"
+ cat <<-EOF > "${WORKDIR}/modules-sign.config" || die
+ ## Enable module signing
+ CONFIG_MODULE_SIG=y
+ CONFIG_MODULE_SIG_ALL=y
+ CONFIG_MODULE_SIG_FORCE=y
+ CONFIG_MODULE_SIG_${MODULES_SIGN_HASH^^}=y
+ EOF
+ if [[ -n ${MODULES_SIGN_KEY_CONTENTS} ]]; then
+ (umask 066 && touch "${T}/kernel_key.pem" || die)
+ echo "${MODULES_SIGN_KEY_CONTENTS}" > "${T}/kernel_key.pem" || die
+ unset MODULES_SIGN_KEY_CONTENTS
+ export MODULES_SIGN_KEY="${T}/kernel_key.pem"
+ fi
+ if [[ ${MODULES_SIGN_KEY} == pkcs11:* || -r ${MODULES_SIGN_KEY} ]]; then
+ echo "CONFIG_MODULE_SIG_KEY=\"${MODULES_SIGN_KEY}\"" \
+ >> "${WORKDIR}/modules-sign.config"
+ elif [[ -n ${MODULES_SIGN_KEY} ]]; then
+ die "MODULES_SIGN_KEY=${MODULES_SIGN_KEY} not found or not readable!"
+ fi
+ merge_configs+=( "${WORKDIR}/modules-sign.config" )
+ fi
+ fi
+
+ # Only semi-related but let's use that to avoid changing stable ebuilds.
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ # NB: we enable this even with USE=-modules-compress, in order
+ # to support both uncompressed and compressed modules in prebuilt
+ # kernels
+ cat <<-EOF > "${WORKDIR}/module-compress.config" || die
+ CONFIG_MODULE_COMPRESS_XZ=y
+ EOF
+ merge_configs+=( "${WORKDIR}/module-compress.config" )
+ fi
+
if [[ ${#user_configs[@]} -gt 0 ]]; then
elog "User config files are being applied:"
local x
for x in "${user_configs[@]}"; do
elog "- ${x}"
done
+ merge_configs+=( "${user_configs[@]}" )
fi
./scripts/kconfig/merge_config.sh -m -r \
- .config "${@}" "${user_configs[@]}" || die
+ .config "${merge_configs[@]}" || die
}
-_KERNEL_BUILD_ECLASS=1
fi
-EXPORT_FUNCTIONS src_configure src_compile src_test src_install pkg_postinst
+EXPORT_FUNCTIONS pkg_setup src_configure src_compile src_test src_install pkg_postinst
diff --git a/eclass/kernel-install.eclass b/eclass/kernel-install.eclass
index 08f631b5e865..f512d815fe09 100644
--- a/eclass/kernel-install.eclass
+++ b/eclass/kernel-install.eclass
@@ -1,4 +1,4 @@
-# Copyright 2020-2022 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kernel-install.eclass
@@ -6,7 +6,7 @@
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 8
# @PROVIDES: dist-kernel-utils
# @BLURB: Installation mechanics for Distribution Kernels
# @DESCRIPTION:
@@ -21,6 +21,13 @@
# Additionally, the inherited mount-boot eclass exports pkg_pretend.
# It also stubs out pkg_preinst and pkg_prerm defined by mount-boot.
+# @ECLASS_VARIABLE: KERNEL_IUSE_GENERIC_UKI
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-null value, adds IUSE=generic-uki and required
+# logic to install a generic unified kernel image.
+
# @ECLASS_VARIABLE: KV_LOCALVERSION
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -28,20 +35,22 @@
# Needs to be set only when installing binary kernels,
# kernel-build.eclass obtains it from kernel config.
+# @ECLASS_VARIABLE: INITRD_PACKAGES
+# @INTERNAL
+# @DESCRIPTION:
+# Used with KERNEL_IUSE_GENERIC_UKI. The eclass sets this to an array of
+# packages to depend on for building the generic UKI and their licenses.
+# Used in kernel-build.eclass.
+
if [[ ! ${_KERNEL_INSTALL_ECLASS} ]]; then
+_KERNEL_INSTALL_ECLASS=1
-case "${EAPI:-0}" in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 7)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit dist-kernel-utils mount-boot toolchain-funcs
+inherit dist-kernel-utils mount-boot multiprocessing toolchain-funcs
SLOT="${PV}"
IUSE="+initramfs test"
@@ -51,14 +60,149 @@ RESTRICT+="
arm? ( test )
"
-# install-DEPEND actually
-# note: we need installkernel with initramfs support!
-RDEPEND="
- || (
- sys-kernel/installkernel-gentoo
- sys-kernel/installkernel-systemd-boot
+_IDEPEND_BASE="
+ !initramfs? (
+ >=sys-kernel/installkernel-14
+ )
+ initramfs? (
+ >=sys-kernel/installkernel-14[dracut(-)]
+ )
+"
+
+LICENSE="GPL-2"
+if [[ ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ IUSE+=" generic-uki modules-compress"
+ # https://github.com/AndrewAmmerlaan/dist-kernel-log-to-licenses
+ # This script can help with generating the array below, keep in mind
+ # that it is not a fully automatic solution, i.e. use flags will
+ # still have to handled manually.
+ declare -gA INITRD_PACKAGES=(
+ ["app-alternatives/awk"]="CC0-1.0"
+ ["app-alternatives/gzip"]="CC0-1.0"
+ ["app-alternatives/sh"]="CC0-1.0"
+ ["app-arch/bzip2"]="BZIP2"
+ ["app-arch/gzip"]="GPL-3+"
+ ["app-arch/lz4"]="BSD-2 GPL-2"
+ ["app-arch/xz-utils"]="public-domain LGPL-2.1+ GPL-2+"
+ ["app-arch/zstd"]="|| ( BSD GPL-2 )"
+ ["app-crypt/argon2"]="|| ( Apache-2.0 CC0-1.0 )"
+ ["app-crypt/gnupg[smartcard,tpm(-)]"]="GPL-3+"
+ ["app-crypt/p11-kit"]="MIT"
+ ["app-crypt/tpm2-tools"]="BSD"
+ ["app-crypt/tpm2-tss"]="BSD-2"
+ ["app-misc/ddcutil"]="GPL-2"
+ ["app-misc/jq"]="MIT CC-BY-3.0"
+ ["app-shells/bash"]="GPL-3+"
+ ["dev-db/sqlite"]="public-domain"
+ ["dev-libs/cyrus-sasl"]="BSD-with-attribution"
+ ["dev-libs/expat"]="MIT"
+ ["dev-libs/glib"]="LGPL-2.1+"
+ ["dev-libs/hidapi"]="|| ( BSD GPL-3 HIDAPI )"
+ ["dev-libs/icu"]="BSD"
+ ["dev-libs/json-c"]="MIT"
+ ["dev-libs/libaio"]="LGPL-2"
+ ["dev-libs/libassuan"]="GPL-3 LGPL-2.1"
+ ["dev-libs/libevent"]="BSD"
+ ["dev-libs/libffi"]="MIT"
+ ["dev-libs/libgcrypt"]="LGPL-2.1 MIT"
+ ["dev-libs/libgpg-error"]="GPL-2 LGPL-2.1"
+ ["dev-libs/libp11"]="LGPL-2.1"
+ ["dev-libs/libpcre2"]="BSD"
+ ["dev-libs/libtasn1"]="LGPL-2.1+"
+ ["dev-libs/libunistring"]="|| ( LGPL-3+ GPL-2+ ) || ( FDL-1.2 GPL-3+ )"
+ ["dev-libs/libusb"]="LGPL-2.1"
+ ["dev-libs/lzo"]="GPL-2+"
+ ["dev-libs/npth"]="LGPL-2.1+"
+ ["dev-libs/nss"]="|| ( MPL-2.0 GPL-2 LGPL-2.1 )"
+ ["dev-libs/oniguruma"]="BSD-2"
+ ["dev-libs/opensc"]="LGPL-2.1"
+ ["dev-libs/openssl"]="Apache-2.0"
+ ["dev-libs/userspace-rcu"]="LGPL-2.1"
+ ["media-libs/libmtp"]="LGPL-2.1"
+ ["media-libs/libv4l"]="LGPL-2.1+"
+ ["net-dns/c-ares"]="MIT ISC"
+ ["net-dns/libidn2"]="|| ( GPL-2+ LGPL-3+ ) GPL-3+ unicode"
+ ["net-fs/cifs-utils"]="GPL-3"
+ ["net-fs/nfs-utils"]="GPL-2"
+ ["net-fs/samba"]="GPL-3"
+ ["net-libs/libmnl"]="LGPL-2.1"
+ ["net-libs/libndp"]="LGPL-2.1+"
+ ["net-libs/libtirpc"]="BSD BSD-2 BSD-4 LGPL-2.1+"
+ ["net-libs/nghttp2"]="MIT"
+ ["net-misc/curl"]="BSD curl ISC"
+ ["net-misc/networkmanager[iwd]"]="GPL-2+ LGPL-2.1+"
+ ["net-nds/openldap"]="OPENLDAP GPL-2"
+ ["net-wireless/bluez"]="GPL-2+ LGPL-2.1+"
+ ["net-wireless/iwd"]="GPL-2"
+ ["sys-apps/acl"]="LGPL-2.1"
+ ["sys-apps/attr"]="LGPL-2.1"
+ ["sys-apps/baselayout"]="GPL-2"
+ ["sys-apps/coreutils"]="GPL-3+"
+ ["sys-apps/dbus"]="|| ( AFL-2.1 GPL-2 )"
+ ["sys-apps/fwupd"]="LGPL-2.1+"
+ ["sys-apps/gawk"]="GPL-3+"
+ ["sys-apps/hwdata"]="GPL-2+"
+ ["sys-apps/iproute2"]="GPL-2"
+ ["sys-apps/kbd"]="GPL-2"
+ ["sys-apps/keyutils"]="GPL-2 LGPL-2.1"
+ ["sys-apps/kmod"]="LGPL-2"
+ ["sys-apps/less"]="|| ( GPL-3 BSD-2 )"
+ ["sys-apps/nvme-cli"]="GPL-2 GPL-2+"
+ ["sys-apps/pcsc-lite"]="BSD ISC MIT GPL-3+ GPL-2"
+ ["sys-apps/rng-tools"]="GPL-2"
+ ["sys-apps/sed"]="GPL-3+"
+ ["sys-apps/shadow"]="BSD GPL-2"
+ ["sys-apps/systemd[boot(-),cryptsetup,pkcs11,policykit,tpm,ukify(-)]"]="GPL-2 LGPL-2.1 MIT public-domain"
+ ["sys-apps/util-linux"]="GPL-2 GPL-3 LGPL-2.1 BSD-4 MIT public-domain"
+ ["sys-auth/polkit"]="LGPL-2"
+ ["sys-block/nbd"]="GPL-2"
+ ["sys-devel/gcc"]="GPL-3+ LGPL-3+ || ( GPL-3+ libgcc libstdc++ gcc-runtime-library-exception-3.1 ) FDL-1.3+"
+ ["sys-fs/btrfs-progs"]="GPL-2"
+ ["sys-fs/cryptsetup"]="GPL-2+"
+ ["sys-fs/dmraid"]="GPL-2"
+ ["sys-fs/dosfstools"]="GPL-3"
+ ["sys-fs/e2fsprogs"]="GPL-2 BSD"
+ ["sys-fs/lvm2[lvm]"]="GPL-2"
+ ["sys-fs/mdadm"]="GPL-2"
+ ["sys-fs/multipath-tools"]="GPL-2"
+ ["sys-fs/xfsprogs"]="LGPL-2.1"
+ ["sys-kernel/dracut"]="GPL-2"
+ ["sys-kernel/linux-firmware[redistributable,-unknown-license]"]="GPL-2 GPL-2+ GPL-3 BSD MIT || ( MPL-1.1 GPL-2 ) linux-fw-redistributable BSD-2 BSD BSD-4 ISC MIT"
+ ["sys-libs/glibc"]="LGPL-2.1+ BSD HPND ISC inner-net rc PCRE"
+ ["sys-libs/libapparmor"]="GPL-2 LGPL-2.1"
+ ["sys-libs/libcap"]="|| ( GPL-2 BSD )"
+ ["sys-libs/libcap-ng"]="LGPL-2.1"
+ ["sys-libs/libnvme"]="LGPL-2.1+"
+ ["sys-libs/libseccomp"]="LGPL-2.1"
+ ["sys-libs/libxcrypt"]="LGPL-2.1+ public-domain BSD BSD-2"
+ ["sys-libs/ncurses"]="MIT"
+ ["sys-libs/pam"]="|| ( BSD GPL-2 )"
+ ["sys-libs/readline"]="GPL-3+"
+ ["sys-libs/zlib"]="ZLIB"
+ ["sys-process/procps"]="GPL-2+ LGPL-2+ LGPL-2.1+"
+ ["amd64? ( sys-firmware/intel-microcode )"]="amd64? ( intel-ucode )"
+ ["x86? ( sys-firmware/intel-microcode )"]="x86? ( intel-ucode )"
)
- initramfs? ( >=sys-kernel/dracut-049-r3 )"
+ LICENSE+="
+ generic-uki? ( ${INITRD_PACKAGES[@]} )
+ "
+
+ RDEPEND+="
+ sys-apps/kmod[lzma]
+ "
+ IDEPEND="
+ generic-uki? (
+ >=sys-kernel/installkernel-14[-dracut(-),-ukify(-)]
+ )
+ !generic-uki? (
+ ${_IDEPEND_BASE}
+ )
+ "
+else
+ IDEPEND="${_IDEPEND_BASE}"
+fi
+unset _IDEPEND_BASE
+
# needed by objtool that is installed along with the kernel and used
# to build external modules
# NB: linux-mod.eclass also adds this dep but it's cleaner to have
@@ -75,6 +219,7 @@ BDEPEND="
arm64? ( app-emulation/qemu[qemu_softmmu_targets_aarch64] )
ppc? ( app-emulation/qemu[qemu_softmmu_targets_ppc] )
ppc64? ( app-emulation/qemu[qemu_softmmu_targets_ppc64] )
+ sparc? ( app-emulation/qemu[qemu_softmmu_targets_sparc,qemu_softmmu_targets_sparc64] )
x86? ( app-emulation/qemu[qemu_softmmu_targets_i386] )
)"
@@ -156,20 +301,14 @@ kernel-install_get_qemu_arch() {
x86)
echo i386
;;
- arm)
- echo arm
- ;;
+ arm|ppc|ppc64|riscv|sparc|sparc64)
+ echo ${ARCH}
+ ;;
arm64)
echo aarch64
;;
- ppc)
- echo ppc
- ;;
- ppc64)
- echo ppc64
- ;;
- riscv)
- echo riscv
+ loong)
+ echo loongarch64
;;
*)
die "${FUNCNAME}: unsupported ARCH=${ARCH}"
@@ -235,6 +374,8 @@ kernel-install_create_qemu_image() {
# some layout needed to pass dracut's usable_root() validation
mkdir -p "${imageroot}"/{bin,dev,etc,lib,proc,root,sbin,sys} || die
touch "${imageroot}/lib/ld-fake.so" || die
+ # Initrd images with systemd require some os-release file
+ cp "${BROOT}/etc/os-release" "${imageroot}/etc/os-release" || die
kernel-install_create_init "${imageroot}/sbin/init"
@@ -267,6 +408,7 @@ kernel-install_test() {
plymouth # hangs, or sometimes steals output
rngd # hangs or segfaults sometimes
i18n # copies all the fonts from /usr/share/consolefonts
+ dracut-systemd systemd systemd-initrd # gets stuck in boot loop
)
# NB: if you pass a path that does not exist or is not a regular
@@ -275,6 +417,12 @@ kernel-install_test() {
> "${T}"/empty-file || die
mkdir -p "${T}"/empty-directory || die
+ local compress="gzip"
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]] && use generic-uki; then
+ # Test with same compression method as the generic initrd
+ compress="xz -9e --check=crc32"
+ fi
+
dracut \
--conf "${T}"/empty-file \
--confdir "${T}"/empty-directory \
@@ -284,6 +432,7 @@ kernel-install_test() {
--omit "${omit_mods[*]}" \
--nostrip \
--no-early-microcode \
+ --compress="${compress}" \
"${T}/initrd" "${version}" || die
kernel-install_create_qemu_image "${T}/fs.img"
@@ -315,6 +464,10 @@ kernel-install_test() {
;;
esac
+ if [[ ${KERNEL_IUSE_MODULES_SIGN} ]]; then
+ use modules-sign && qemu_extra_append+=" module.sig_enforce=1"
+ fi
+
cat > run.sh <<-EOF || die
#!/bin/sh
exec qemu-system-${qemu_arch} \
@@ -379,14 +532,25 @@ kernel-install_pkg_pretend() {
ewarn "for your hardware to work. If in doubt, it is recommended"
ewarn "to pause or abort the build process and install it before"
ewarn "resuming."
+ elog
+ elog "If you decide to install linux-firmware later, you can rebuild"
+ elog "the initramfs via issuing a command equivalent to:"
+ elog
+ elog " emerge --config ${CATEGORY}/${PN}:${SLOT}"
+ fi
- if use initramfs; then
- elog
- elog "If you decide to install linux-firmware later, you can rebuild"
- elog "the initramfs via issuing a command equivalent to:"
- elog
- elog " emerge --config ${CATEGORY}/${PN}:${SLOT}"
- fi
+ if ! use initramfs && ! has_version "${CATEGORY}/${PN}[-initramfs]"; then
+ ewarn
+ ewarn "WARNING: The standard configuration of the Gentoo distribution"
+ ewarn "kernels requires an initramfs! You have disabled the initramfs"
+ ewarn "USE flag and as a result dracut was not pulled in as a dependency."
+ ewarn "Please ensure that you are either overriding the standard"
+ ewarn "configuration or that an alternative initramfs generation plugin"
+ ewarn "is installed for your installkernel implementation!"
+ ewarn
+ ewarn "This is an advanced use case, you are on your own to ensure"
+ ewarn "that your system is bootable!"
+ ewarn
fi
}
@@ -405,50 +569,109 @@ kernel-install_src_test() {
kernel-install_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
- local ver="${PV}${KV_LOCALVERSION}"
- local kdir="${ED}/usr/src/linux-${ver}"
- local relfile="${kdir}/include/config/kernel.release"
- [[ ! -d ${kdir} ]] && die "Kernel directory ${kdir} not installed!"
- [[ ! -f ${relfile} ]] && die "Release file ${relfile} not installed!"
- local release="$(<"${relfile}")"
- if [[ ${release} != ${PV}* ]]; then
- eerror "Kernel release mismatch!"
- eerror " expected (PV): ${PV}*"
- eerror " found: ${release}"
- eerror "Please verify that you are applying the correct patches."
- die "Kernel release mismatch (${release} instead of ${PV}*)"
+ local dir_ver=${PV}${KV_LOCALVERSION}
+ local kernel_dir=${ED}/usr/src/linux-${dir_ver}
+ local relfile=${kernel_dir}/include/config/kernel.release
+ local image_path=$(dist-kernel_get_image_path)
+ [[ ! -d ${kernel_dir} ]] &&
+ die "Kernel directory ${kernel_dir} not installed!"
+ [[ ! -f ${relfile} ]] &&
+ die "Release file ${relfile} not installed!"
+ local release
+ release="$(<"${relfile}")" || die
+ DIST_KERNEL_RELEASE="${release}"
+
+ # perform the version check for release ebuilds only
+ if [[ ${PV} != *9999 ]]; then
+ local expected_ver=$(dist-kernel_PV_to_KV "${PV}")
+
+ if [[ ${release} != ${expected_ver}* ]]; then
+ eerror "Kernel release mismatch!"
+ eerror " expected (PV): ${expected_ver}*"
+ eerror " found: ${release}"
+ eerror "Please verify that you are applying the correct patches."
+ die "Kernel release mismatch (${release} instead of ${expected_ver}*)"
+ fi
+ fi
+
+ if [[ -L ${EROOT}/lib && ${EROOT}/lib -ef ${EROOT}/usr/lib ]]; then
+ # Adjust symlinks for merged-usr.
+ rm "${ED}/lib/modules/${release}"/{build,source} || die
+ dosym "../../../src/linux-${dir_ver}" "/usr/lib/modules/${release}/build"
+ dosym "../../../src/linux-${dir_ver}" "/usr/lib/modules/${release}/source"
+ for file in vmlinux vmlinuz; do
+ if [[ -L "${ED}/lib/modules/${release}/${file}" ]]; then
+ rm "${ED}/lib/modules/${release}/${file}" || die
+ dosym "../../../src/linux-${dir_ver}/${image_path}" "/usr/lib/modules/${release}/${file}"
+ fi
+ done
fi
}
+# @FUNCTION: kernel-install_extract_from_uki
+# @USAGE: <type> <input> <output>
+# @DESCRIPTION:
+# Extracts kernel image or initrd from an UKI. <type> must be "linux"
+# or "initrd".
+kernel-install_extract_from_uki() {
+ [[ ${#} -eq 3 ]] || die "${FUNCNAME}: invalid arguments"
+ local extract_type=${1}
+ local uki=${2}
+ local out=${3}
+
+ $(tc-getOBJCOPY) -O binary "-j.${extract_type}" "${uki}" "${out}" ||
+ die "Failed to extract ${extract_type}"
+ chmod 644 "${out}" || die
+}
+
# @FUNCTION: kernel-install_install_all
# @USAGE: <ver>
# @DESCRIPTION:
-# Build an initramfs for the kernel and install the kernel. This is
-# called from pkg_postinst() and pkg_config(). <ver> is the full
-# kernel version.
+# Install the kernel, initramfs/uki generation is optionally handled by
+# installkernel. This is called from pkg_postinst() and pkg_config().
+# <ver> is the full kernel version.
kernel-install_install_all() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME}: invalid arguments"
- local ver=${1}
+ local dir_ver=${1}
+ local kernel_dir=${EROOT}/usr/src/linux-${dir_ver}
+ local relfile=${kernel_dir}/include/config/kernel.release
+ local image_path=$(dist-kernel_get_image_path)
+ local image_dir=${image_path%/*}
+ local module_ver
+ module_ver=$(<"${relfile}") || die
+
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ if use generic-uki; then
+ # Populate placeholders
+ kernel-install_extract_from_uki linux \
+ "${kernel_dir}/${image_dir}"/uki.efi \
+ "${kernel_dir}/${image_path}"
+ kernel-install_extract_from_uki initrd \
+ "${kernel_dir}/${image_dir}"/uki.efi \
+ "${kernel_dir}/${image_dir}"/initrd
+ if [[ -L ${EROOT}/lib && ${EROOT}/lib -ef ${EROOT}/usr/lib ]]; then
+ ln -sf "../../../src/linux-${dir_ver}/${image_dir}/initrd" "${EROOT}/usr/lib/modules/${module_ver}/initrd" || die
+ ln -sf "../../../src/linux-${dir_ver}/${image_dir}/uki.efi" "${EROOT}/usr/lib/modules/${module_ver}/uki.efi" || die
+ else
+ ln -sf "../../../usr/src/linux-${dir_ver}/${image_dir}/initrd" "${EROOT}/lib/modules/${module_ver}/initrd" || die
+ ln -sf "../../../usr/src/linux-${dir_ver}/${image_dir}/uki.efi" "${EROOT}/lib/modules/${module_ver}/uki.efi" || die
+ fi
+ else
+ # Remove placeholders, -f because these have already been removed
+ # when doing emerge --config.
+ rm -f "${kernel_dir}/${image_dir}"/{initrd,uki.efi} || die
+ fi
+ fi
local success=
# not an actual loop but allows error handling with 'break'
while :; do
nonfatal mount-boot_check_status || break
- local image_path=$(dist-kernel_get_image_path)
- if use initramfs; then
- # putting it alongside kernel image as 'initrd' makes
- # kernel-install happier
- nonfatal dist-kernel_build_initramfs \
- "${EROOT}/usr/src/linux-${ver}/${image_path%/*}/initrd" \
- "${ver}" || break
- fi
-
- nonfatal dist-kernel_install_kernel "${ver}" \
- "${EROOT}/usr/src/linux-${ver}/${image_path}" \
- "${EROOT}/usr/src/linux-${ver}/System.map" || break
+ nonfatal dist-kernel_install_kernel "${module_ver}" \
+ "${kernel_dir}/${image_path}" "${kernel_dir}/System.map" || break
success=1
break
@@ -472,11 +695,26 @@ kernel-install_install_all() {
kernel-install_pkg_postinst() {
debug-print-function ${FUNCNAME} "${@}"
- local ver="${PV}${KV_LOCALVERSION}"
- kernel-install_update_symlink "${EROOT}/usr/src/linux" "${ver}"
+ local dir_ver=${PV}${KV_LOCALVERSION}
+ kernel-install_update_symlink "${EROOT}/usr/src/linux" "${dir_ver}"
+ dist-kernel_compressed_module_cleanup \
+ "${EROOT}/lib/modules/${DIST_KERNEL_RELEASE}"
if [[ -z ${ROOT} ]]; then
- kernel-install_install_all "${ver}"
+ kernel-install_install_all "${dir_ver}"
+ fi
+
+ if [[ ${KERNEL_IUSE_GENERIC_UKI} ]] && use generic-uki; then
+ ewarn "The prebuilt initramfs and unified kernel image are highly experimental!"
+ ewarn "These images may not work on your system. Please ensure that a working"
+ ewarn "alternative kernel(+initramfs) or UKI is also installed before rebooting!"
+ ewarn
+ ewarn "Note that when secureboot is enabled in the firmware settings any kernel"
+ ewarn "command line arguments supplied to the UKI by the bootloader are ignored."
+ ewarn "To ensure the root partition can be found, systemd-gpt-auto-generator must"
+ ewarn "be used. See [1] for more information."
+ ewarn
+ ewarn "[1]: https://wiki.gentoo.org/wiki/Systemd#Automatic_mounting_of_partitions_at_boot"
fi
}
@@ -495,12 +733,13 @@ kernel-install_pkg_prerm() {
kernel-install_pkg_postrm() {
debug-print-function ${FUNCNAME} "${@}"
- if [[ -z ${ROOT} ]] && use initramfs; then
- local ver="${PV}${KV_LOCALVERSION}"
+ if [[ -z ${ROOT} && ! ${KERNEL_IUSE_GENERIC_UKI} ]]; then
+ local dir_ver=${PV}${KV_LOCALVERSION}
+ local kernel_dir=${EROOT}/usr/src/linux-${dir_ver}
local image_path=$(dist-kernel_get_image_path)
ebegin "Removing initramfs"
- rm -f "${EROOT}/usr/src/linux-${ver}/${image_path%/*}"/initrd{,.uefi} &&
- find "${EROOT}/usr/src/linux-${ver}" -depth -type d -empty -delete
+ rm -f "${kernel_dir}/${image_path%/*}"/{initrd,uki.efi} &&
+ find "${kernel_dir}" -depth -type d -empty -delete
eend ${?}
fi
}
@@ -514,7 +753,25 @@ kernel-install_pkg_config() {
kernel-install_install_all "${PV}${KV_LOCALVERSION}"
}
-_KERNEL_INSTALL_ECLASS=1
+# @FUNCTION: kernel-install_compress_modules
+# @DESCRIPTION:
+# Compress modules installed in ED, if USE=modules-compress is enabled.
+kernel-install_compress_modules() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if use modules-compress; then
+ einfo "Compressing kernel modules ..."
+ # xz options taken from scripts/Makefile.modinst
+ # we don't do 'xz -T' because it applies multithreading per file,
+ # so it works only for big files, and we have lots of small files
+ # instead
+ find "${ED}/lib" -name '*.ko' -print0 |
+ xargs -0 -P "$(makeopts_jobs)" -n 128 \
+ xz --check=crc32 --lzma2=dict=1MiB
+ assert "Compressing kernel modules failed"
+ fi
+}
+
fi
EXPORT_FUNCTIONS src_test pkg_preinst pkg_postinst pkg_prerm pkg_postrm
diff --git a/eclass/kodi-addon.eclass b/eclass/kodi-addon.eclass
index 8cbbad9224fc..b7678485795e 100644
--- a/eclass/kodi-addon.eclass
+++ b/eclass/kodi-addon.eclass
@@ -1,26 +1,21 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kodi-addon.eclass
# @MAINTAINER:
# candrews@gentoo.org
-# @SUPPORTED_EAPIS: 4 5 6 7
-# @PROVIDES: cmake cmake-utils
+# @SUPPORTED_EAPIS: 7
+# @PROVIDES: cmake
# @BLURB: Helper for correct building and (importantly) installing Kodi addon packages.
# @DESCRIPTION:
# Provides a src_configure function for correct CMake configuration
-case "${EAPI:-0}" in
- 4|5|6)
- inherit cmake-utils multilib
- ;;
- 7)
- inherit cmake
- ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_configure
+inherit cmake
# @FUNCTION: kodi-addon_src_configure
# @DESCRIPTION:
@@ -28,11 +23,10 @@ EXPORT_FUNCTIONS src_configure
kodi-addon_src_configure() {
mycmakeargs+=(
- -DCMAKE_INSTALL_LIBDIR=${EPREFIX%/}/usr/$(get_libdir)/kodi
+ -DCMAKE_INSTALL_LIBDIR="${EPREFIX}/usr/$(get_libdir)/kodi"
)
- case ${EAPI} in
- 4|5|6) cmake-utils_src_configure ;;
- 7) cmake_src_configure ;;
- esac
+ cmake_src_configure
}
+
+EXPORT_FUNCTIONS src_configure
diff --git a/eclass/libretro-core.eclass b/eclass/libretro-core.eclass
index e4f7221a5408..906526e32436 100644
--- a/eclass/libretro-core.eclass
+++ b/eclass/libretro-core.eclass
@@ -1,4 +1,4 @@
-# Copyright 2018-2021 Gentoo Authors
+# Copyright 2018-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: libretro-core.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Cecil Curry <leycec@gmail.com>
# Craig Andrews <candrews@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: Simplify libretro core ebuilds
# @DESCRIPTION:
# The libretro eclass is designed to streamline the construction of
@@ -34,6 +34,11 @@
# SLOT="0"
# @CODE
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_LIBRETRO_CORE_ECLASS} ]]; then
_LIBRETRO_CORE_ECLASS=1
@@ -64,28 +69,20 @@ fi
# @DESCRIPTION:
# Contains the real repo name of the core formatted as "repouser/reponame".
# Needs to be set before inherit. Otherwise defaults to "libretro/${PN}"
-: ${LIBRETRO_REPO_NAME:="libretro/libretro-${LIBRETRO_CORE_NAME}"}
+: "${LIBRETRO_REPO_NAME:="libretro/libretro-${LIBRETRO_CORE_NAME}"}"
-: ${HOMEPAGE:="https://github.com/${LIBRETRO_REPO_NAME}"}
+: "${HOMEPAGE:="https://github.com/${LIBRETRO_REPO_NAME}"}"
if [[ ${PV} == *9999 ]]; then
- : ${EGIT_REPO_URI:="https://github.com/${LIBRETRO_REPO_NAME}.git"}
+ : "${EGIT_REPO_URI:="https://github.com/${LIBRETRO_REPO_NAME}.git"}"
inherit git-r3
else
[[ -z "${LIBRETRO_COMMIT_SHA}" ]] && die "LIBRETRO_COMMIT_SHA must be set before inherit."
S="${WORKDIR}/${LIBRETRO_REPO_NAME##*/}-${LIBRETRO_COMMIT_SHA}"
- : ${SRC_URI:="https://github.com/${LIBRETRO_REPO_NAME}/archive/${LIBRETRO_COMMIT_SHA}.tar.gz -> ${P}.tar.gz"}
+ : "${SRC_URI:="https://github.com/${LIBRETRO_REPO_NAME}/archive/${LIBRETRO_COMMIT_SHA}.tar.gz -> ${P}.tar.gz"}"
fi
inherit flag-o-matic toolchain-funcs
-case "${EAPI:-0}" in
- 6|7)
- EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install
- ;;
- *)
- die "EAPI=${EAPI} is not supported" ;;
-esac
-
# @FUNCTION: libretro-core_src_unpack
# @DESCRIPTION:
# The libretro-core src_unpack function which is exported.
@@ -210,3 +207,5 @@ libretro-core_src_install() {
}
fi # end _LIBRETRO_CORE_ECLASS guard
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install
diff --git a/eclass/libtool.eclass b/eclass/libtool.eclass
index a38f41588289..bd6141e1ede9 100644
--- a/eclass/libtool.eclass
+++ b/eclass/libtool.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: libtool.eclass
@@ -17,10 +17,10 @@
if [[ -z ${_LIBTOOL_ECLASS} ]]; then
_LIBTOOL_ECLASS=1
-case ${EAPI:-0} in
- 5|6) DEPEND=">=app-portage/elt-patches-20170815" ;;
- 7|8) BDEPEND=">=app-portage/elt-patches-20170815" ;;
- *) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
+case ${EAPI} in
+ 6) DEPEND=">=app-portage/elt-patches-20240116" ;;
+ 7|8) BDEPEND=">=app-portage/elt-patches-20240116" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit toolchain-funcs
diff --git a/eclass/linux-info.eclass b/eclass/linux-info.eclass
index 7e130062a6c1..864594f607ca 100644
--- a/eclass/linux-info.eclass
+++ b/eclass/linux-info.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: linux-info.eclass
@@ -6,6 +6,7 @@
# kernel@gentoo.org
# @AUTHOR:
# Original author: John Mylchreest <johnm@gentoo.org>
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: eclass used for accessing kernel related information
# @DESCRIPTION:
# This eclass is used as a central eclass for accessing kernel
@@ -26,9 +27,26 @@
# get_version
# get_running_version
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_LINUX_INFO_ECLASS} ]]; then
+_LINUX_INFO_ECLASS=1
+
# A Couple of env vars are available to effect usage of this eclass
# These are as follows:
+
+# @ECLASS_VARIABLE: CHECKCONFIG_DONOTHING
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Do not error out in check_extra_config if CONFIG settings are not met.
+# This is a user flag and should under _no circumstances_ be set in the ebuild.
+: "${CHECKCONFIG_DONOTHING:=""}"
+
# @ECLASS_VARIABLE: KERNEL_DIR
# @DESCRIPTION:
# A string containing the directory of the target kernel sources. The default value is
@@ -67,8 +85,8 @@ KERNEL_DIR="${KERNEL_DIR:-${ROOT%/}/usr/src/linux}"
# e.g.: ERROR_MTRR="MTRR exists in the .config but shouldn't!!"
#
# CONFIG_CHECK="CFG" with ERROR_<CFG>="Error Message" will die
-# CONFIG_CHECK="~CFG" with ERROR_<CFG>="Error Message" calls eerror without dieing
-# CONFIG_CHECK="~CFG" with WARNING_<CFG>="Warning Message" calls ewarn without dieing
+# CONFIG_CHECK="~CFG" with ERROR_<CFG>="Error Message" calls eerror without dying
+# CONFIG_CHECK="~CFG" with WARNING_<CFG>="Warning Message" calls ewarn without dying
# @ECLASS_VARIABLE: KBUILD_OUTPUT
@@ -87,7 +105,7 @@ KERNEL_DIR="${KERNEL_DIR:-${ROOT%/}/usr/src/linux}"
# the following names, in order: GNUmakefile, makefile and Makefile. Set this variable to the
# proper Makefile name or the eclass will search in this order for it.
# See https://www.gnu.org/software/make/manual/make.html
-: ${KERNEL_MAKEFILE:=""}
+: "${KERNEL_MAKEFILE:=""}"
# @ECLASS_VARIABLE: KV_FULL
# @OUTPUT_VARIABLE
@@ -131,14 +149,20 @@ KERNEL_DIR="${KERNEL_DIR:-${ROOT%/}/usr/src/linux}"
# A read-only variable. It's a string containing the kernel object directory, will be KV_DIR unless
# KBUILD_OUTPUT is used. This should be used for referencing .config.
+# @ECLASS_VARIABLE: SKIP_KERNEL_CHECK
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Do not check for kernel sources or a running kernel version.
+# Main use-case is for chroots.
+# This is a user flag and should under _no circumstances_ be set in the ebuild.
+: "${SKIP_KERNEL_CHECK:=""}"
+
# And to ensure all the weirdness with crosscompile
inherit toolchain-funcs
-[[ ${EAPI:-0} == [0123456] ]] && inherit eapi7-ver
+[[ ${EAPI} == 6 ]] && inherit eapi7-ver
-EXPORT_FUNCTIONS pkg_setup
-
-# Bug fixes
-# fix to bug #75034
+# bug #75034
case ${ARCH} in
ppc) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
ppc64) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
@@ -149,21 +173,6 @@ esac
# Set the env ARCH to match what the kernel expects.
set_arch_to_kernel() { export ARCH=$(tc-arch-kernel); }
-# @FUNCTION: set_arch_to_portage
-# @DESCRIPTION:
-# Set the env ARCH to match what portage expects.
-set_arch_to_portage() {
-
- ewarn "The function name: set_arch_to_portage is being deprecated and"
- ewarn "being changed to: set_arch_to_pkgmgr to comply with pms policy."
- ewarn "See bug #843686"
- ewarn "The old function name will be removed on or about July 1st, 2022."
- ewarn "Please update your ebuild or eclass before this date."
- ewarn ""
-
- export ARCH=$(tc-arch);
-}
-
# @FUNCTION: set_arch_to_pkgmgr
# @DESCRIPTION:
# Set the env ARCH to match what the package manager expects.
@@ -171,7 +180,7 @@ set_arch_to_pkgmgr() { export ARCH=$(tc-arch); }
# @FUNCTION: qout
# @DESCRIPTION:
-# qout <einfo | ewarn | eerror> is a quiet call when EBUILD_PHASE should not have visible output.
+# qout <einfo | ewarn | eerror> is a quiet call when EBUILD_PHASE should not have visible output.
qout() {
local outputmsg type
type=${1}
@@ -182,7 +191,7 @@ qout() {
clean) unset outputmsg;;
preinst) unset outputmsg;;
esac
- [ -n "${outputmsg}" ] && ${type} "${outputmsg}"
+ [[ -n "${outputmsg}" ]] && ${type} "${outputmsg}"
}
# @FUNCTION: qeinfo
@@ -192,14 +201,12 @@ qeinfo() { qout einfo "${@}" ; }
# @FUNCTION: qewarn
# @DESCRIPTION:
-# qewarn is a quiet ewarn call when EBUILD_PHASE
-# should not have visible output.
+# qewarn is a quiet ewarn call when EBUILD_PHASE should not have visible output.
qewarn() { qout ewarn "${@}" ; }
# @FUNCTION: qeerror
# @DESCRIPTION:
-# qeerror is a quiet error call when EBUILD_PHASE
-# should not have visible output.
+# qeerror is a quiet error call when EBUILD_PHASE should not have visible output.
qeerror() { qout eerror "${@}" ; }
# File Functions
@@ -209,18 +216,17 @@ qeerror() { qout eerror "${@}" ; }
# @USAGE: <variable> <configfile>
# @RETURN: the value of the variable
# @DESCRIPTION:
-# It detects the value of the variable defined in the file configfile. This is
-# done by including the configfile, and printing the variable with Make.
+# It detects the value of the variable defined in the file 'configfile'. This is
+# done by including the 'configfile', and printing the variable with Make.
# It WILL break if your makefile has missing dependencies!
getfilevar() {
local ERROR basefname basedname myARCH="${ARCH}"
ERROR=0
- [ -z "${1}" ] && ERROR=1
- [ ! -f "${2}" ] && ERROR=1
+ [[ -z "${1}" ]] && ERROR=1
+ [[ ! -f "${2}" ]] && ERROR=1
- if [ "${ERROR}" = 1 ]
- then
+ if [[ "${ERROR}" = 1 ]]; then
echo -e "\n"
eerror "getfilevar requires 2 variables, with the second a valid file."
eerror " getfilevar <VARIABLE> <CONFIGFILE>"
@@ -232,9 +238,10 @@ getfilevar() {
# We use nonfatal because we want the caller to take care of things #373151
# Pass need-config= to make to avoid config check in kernel Makefile.
# Pass dot-config=0 to avoid the config check in kernels prior to 5.4.
- [[ ${EAPI:-0} == [0123] ]] && nonfatal() { "$@"; }
echo -e "e:\\n\\t@echo \$(${1})\\ninclude ${basefname}" | \
- nonfatal emake -C "${basedname}" --no-print-directory M="${T}" dot-config=0 need-config= ${BUILD_FIXES} -s -f - 2>/dev/null
+ nonfatal emake -C "${basedname}" --no-print-directory M="${T}" \
+ dot-config=0 need-config= need-compiler= \
+ ${BUILD_FIXES} -s -f - 2>/dev/null
ARCH=${myARCH}
fi
@@ -244,7 +251,7 @@ getfilevar() {
# @USAGE: <variable> <configfile>
# @RETURN: the value of the variable
# @DESCRIPTION:
-# It detects the value of the variable defined in the file configfile.
+# It detects the value of the variable defined in the file 'configfile'.
# This is done with sed matching an expression only. If the variable is defined,
# you will run into problems. See getfilevar for those cases.
getfilevar_noexec() {
@@ -252,12 +259,11 @@ getfilevar_noexec() {
ERROR=0
mycat='cat'
- [ -z "${1}" ] && ERROR=1
- [ ! -f "${2}" ] && ERROR=1
- [ "${2%.gz}" != "${2}" ] && mycat='zcat'
+ [[ -z "${1}" ]] && ERROR=1
+ [[ ! -f "${2}" ]] && ERROR=1
+ [[ "${2%.gz}" != "${2}" ]] && mycat='zcat'
- if [ "${ERROR}" = 1 ]
- then
+ if [[ "${ERROR}" = 1 ]]; then
echo -e "\n"
eerror "getfilevar_noexec requires 2 variables, with the second a valid file."
eerror " getfilevar_noexec <VARIABLE> <CONFIGFILE>"
@@ -284,10 +290,11 @@ _LINUX_CONFIG_EXISTS_DONE=
# @FUNCTION: linux_config_qa_check
# @INTERNAL
# @DESCRIPTION:
-# Helper funciton which returns an error before the function argument is run if no config exists
+# Helper function which returns an error before the function argument is run if no config exists
linux_config_qa_check() {
local f="$1"
- if [ -z "${_LINUX_CONFIG_EXISTS_DONE}" ]; then
+
+ if [[ -z "${_LINUX_CONFIG_EXISTS_DONE}" ]]; then
ewarn "QA: You called $f before any linux_config_exists!"
ewarn "QA: The return value of $f will NOT guaranteed later!"
fi
@@ -345,6 +352,8 @@ linux_config_path() {
# This function verifies that the current kernel is configured (it checks against the existence of .config)
# otherwise it dies.
require_configured_kernel() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
+
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
@@ -356,6 +365,7 @@ require_configured_kernel() {
qeerror "it points to the necessary object directory so that it might find .config."
die "Kernel not configured; no .config found in ${KV_OUT_DIR}"
fi
+
get_version || die "Unable to determine configured kernel version"
}
@@ -367,6 +377,7 @@ require_configured_kernel() {
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_present() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
linux_config_qa_check linux_chkconfig_present
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == [my] ]]
}
@@ -379,6 +390,7 @@ linux_chkconfig_present() {
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_module() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
linux_config_qa_check linux_chkconfig_module
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == m ]]
}
@@ -391,6 +403,7 @@ linux_chkconfig_module() {
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_builtin() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
linux_config_qa_check linux_chkconfig_builtin
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == y ]]
}
@@ -403,6 +416,7 @@ linux_chkconfig_builtin() {
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_string() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
linux_config_qa_check linux_chkconfig_string
getfilevar_noexec "CONFIG_$1" "$(linux_config_path)"
}
@@ -432,7 +446,7 @@ kernel_is() {
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
- # if we haven't determined the version yet, we need to.
+ # If we haven't determined the version yet, we need to.
linux-info_get_any_version
# Now we can continue
@@ -446,7 +460,7 @@ kernel_is() {
eq) operator="-eq"; shift;;
*) operator="-eq";;
esac
- [[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
+ [[ $# -gt 3 ]] && die "Error in ${ECLASS}_${FUNCNAME}(): too many parameters"
ver_test \
"${KV_MAJOR:-0}.${KV_MINOR:-0}.${KV_PATCH:-0}" \
@@ -465,6 +479,7 @@ kernel_is() {
# - make is not present
# - corruption exists in the kernel makefile
get_makefile_extract_function() {
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
local a='' b='' mkfunc='getfilevar'
a="$(getfilevar VERSION ${KERNEL_MAKEFILE})"
b="$(getfilevar_noexec VERSION ${KERNEL_MAKEFILE})"
@@ -474,7 +489,7 @@ get_makefile_extract_function() {
# @ECLASS_VARIABLE: get_version_warning_done
# @INTERNAL
-# @DESCRIPTION:
+# @DESCRIPTION:
# Internal variable, so we know to only print the warning once.
get_version_warning_done=
@@ -495,26 +510,27 @@ get_version() {
local tmplocal
- # no need to execute this twice assuming KV_FULL is populated.
- # we can force by unsetting KV_FULL
- [ -n "${KV_FULL}" ] && return 0
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
+
+ # No need to execute this twice assuming KV_FULL is populated.
+ # We can force by unsetting KV_FULL.
+ [[ -n "${KV_FULL}" ]] && return 0
- # if we dont know KV_FULL, then we need too.
- # make sure KV_DIR isnt set since we need to work it out via KERNEL_DIR
+ # If we don't know KV_FULL, then we need to.
+ # Make sure KV_DIR isn't set since we need to work it out via KERNEL_DIR.
unset KV_DIR
# KV_DIR will contain the full path to the sources directory we should use
- [ -z "${get_version_warning_done}" ] && \
+ [[ -z "${get_version_warning_done}" ]] && \
qeinfo "Determining the location of the kernel source code"
- [ -d "${KERNEL_DIR}" ] && KV_DIR="${KERNEL_DIR}"
+ [[ -d "${KERNEL_DIR}" ]] && KV_DIR="${KERNEL_DIR}"
- if [ -z "${KV_DIR}" ]
- then
- if [ -z "${get_version_warning_done}" ]; then
+ if [[ -z "${KV_DIR}" ]]; then
+ if [[ -z "${get_version_warning_done}" ]]; then
get_version_warning_done=1
qewarn "Unable to find kernel sources at ${KERNEL_DIR}"
#qeinfo "This package requires Linux sources."
- if [ "${KERNEL_DIR}" == "/usr/src/linux" ] ; then
+ if [[ "${KERNEL_DIR}" == "/usr/src/linux" ]] ; then
qeinfo "Please make sure that ${KERNEL_DIR} points at your running kernel, "
qeinfo "(or the kernel you wish to build against)."
qeinfo "Alternatively, set the KERNEL_DIR environment variable to the kernel sources location"
@@ -526,22 +542,21 @@ get_version() {
fi
# See if the kernel dir is actually an output dir. #454294
- if [ -z "${KBUILD_OUTPUT}" -a -L "${KERNEL_DIR}/source" ]; then
+ if [[ -z "${KBUILD_OUTPUT}" && -L "${KERNEL_DIR}/source" ]]; then
KBUILD_OUTPUT=${KERNEL_DIR}
KERNEL_DIR=$(readlink -f "${KERNEL_DIR}/source")
KV_DIR=${KERNEL_DIR}
fi
- if [ -z "${get_version_warning_done}" ]; then
+ if [[ -z "${get_version_warning_done}" ]]; then
qeinfo "Found kernel source directory:"
qeinfo " ${KV_DIR}"
fi
kernel_get_makefile
- if [[ ! -s ${KERNEL_MAKEFILE} ]]
- then
- if [ -z "${get_version_warning_done}" ]; then
+ if [[ ! -s ${KERNEL_MAKEFILE} ]]; then
+ if [[ -z "${get_version_warning_done}" ]]; then
get_version_warning_done=1
qeerror "Could not find a Makefile in the kernel source directory."
qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources"
@@ -551,8 +566,9 @@ get_version() {
# OK so now we know our sources directory, but they might be using
# KBUILD_OUTPUT, and we need this for .config and localversions-*
- # so we better find it eh?
- # do we pass KBUILD_OUTPUT on the CLI?
+ # so we better find it, eh?
+ #
+ # Do we pass KBUILD_OUTPUT on the CLI?
local OUTPUT_DIR=${KBUILD_OUTPUT}
if [[ -z ${OUTPUT_DIR} ]]; then
@@ -563,17 +579,16 @@ get_version() {
OUTPUT_DIR=$(${mkfunc} KBUILD_OUTPUT "${KERNEL_MAKEFILE}")
fi
- # And contrary to existing functions I feel we shouldn't trust the
+ # And contrary to existing functions, I feel we shouldn't trust the
# directory name to find version information as this seems insane.
- # So we parse ${KERNEL_MAKEFILE}.
+ # So we parse ${KERNEL_MAKEFILE}.
KV_MAJOR=$(getfilevar VERSION "${KERNEL_MAKEFILE}")
KV_MINOR=$(getfilevar PATCHLEVEL "${KERNEL_MAKEFILE}")
KV_PATCH=$(getfilevar SUBLEVEL "${KERNEL_MAKEFILE}")
KV_EXTRA=$(getfilevar EXTRAVERSION "${KERNEL_MAKEFILE}")
- if [ -z "${KV_MAJOR}" -o -z "${KV_MINOR}" -o -z "${KV_PATCH}" ]
- then
- if [ -z "${get_version_warning_done}" ]; then
+ if [[ -z "${KV_MAJOR}" || -z "${KV_MINOR}" || -z "${KV_PATCH}" ]]; then
+ if [[ -z "${get_version_warning_done}" ]]; then
get_version_warning_done=1
qeerror "Could not detect kernel version."
qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources."
@@ -581,9 +596,8 @@ get_version() {
return 1
fi
- [ -d "${OUTPUT_DIR}" ] && KV_OUT_DIR="${OUTPUT_DIR}"
- if [ -n "${KV_OUT_DIR}" ];
- then
+ [[ -d "${OUTPUT_DIR}" ]] && KV_OUT_DIR="${OUTPUT_DIR}"
+ if [[ -n "${KV_OUT_DIR}" ]]; then
qeinfo "Found kernel object directory:"
qeinfo " ${KV_OUT_DIR}"
fi
@@ -593,9 +607,9 @@ get_version() {
# Grab the kernel release from the output directory.
# TODO: we MUST detect kernel.release being out of date, and 'return 1' from
# this function.
- if [ -s "${KV_OUT_DIR}"/include/config/kernel.release ]; then
+ if [[ -s "${KV_OUT_DIR}"/include/config/kernel.release ]]; then
KV_LOCAL=$(<"${KV_OUT_DIR}"/include/config/kernel.release)
- elif [ -s "${KV_OUT_DIR}"/.kernelrelease ]; then
+ elif [[ -s "${KV_OUT_DIR}"/.kernelrelease ]]; then
KV_LOCAL=$(<"${KV_OUT_DIR}"/.kernelrelease)
else
KV_LOCAL=
@@ -608,13 +622,13 @@ get_version() {
# Clear out KV_LOCAL in that case.
# TODO: this does not detect a change in the localversion part between
# kernel.release and the value that would be generated.
- if [ "$KV_LOCAL" = "$tmplocal" ]; then
+ if [[ "${KV_LOCAL}" = "${tmplocal}" ]]; then
KV_LOCAL=
else
- KV_LOCAL=$tmplocal
+ KV_LOCAL=${tmplocal}
fi
- # and in newer versions we can also pull LOCALVERSION if it is set.
+ # and in newer versions, we can also pull LOCALVERSION if it is set.
# but before we do this, we need to find if we use a different object directory.
# This *WILL* break if the user is using localversions, but we assume it was
# caught before this if they are.
@@ -665,7 +679,7 @@ get_running_version() {
KV_MINOR=$(ver_cut 2 ${kv_full})
KV_PATCH=$(ver_cut 3 ${kv_full})
KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}${KV_PATCH:+.${KV_PATCH}}}"
- : ${KV_PATCH:=0}
+ : "${KV_PATCH:=0}"
return 0
}
@@ -682,11 +696,17 @@ linux-info_get_any_version() {
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
- if ! get_version; then
+ if [[ ${MERGE_TYPE} == binary && -z ${LINUX_INFO_BINARY_RESET} ]]; then
+ unset KV_FULL _LINUX_CONFIG_EXISTS_DONE KV_OUT_DIR
+ LINUX_INFO_BINARY_RESET=1
+ fi
+
+ if [[ ${MERGE_TYPE} != binary ]] && ! get_version; then
ewarn "Unable to calculate Linux Kernel version for build, attempting to use running version"
- if ! get_running_version; then
- die "Unable to determine any Linux Kernel version, please report a bug"
- fi
+ fi
+
+ if [[ -z ${KV_FULL} ]] && ! get_running_version; then
+ die "Unable to determine any Linux Kernel version, please report a bug"
fi
}
@@ -702,7 +722,10 @@ check_kernel_built() {
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
- # if we haven't determined the version yet, we need to
+ # If we haven't determined the version yet, we need to
+
+ [[ -n ${SKIP_KERNEL_CHECK} ]] && return
+
require_configured_kernel
local versionh_path
@@ -712,8 +735,7 @@ check_kernel_built() {
versionh_path="include/linux/version.h"
fi
- if [ ! -f "${KV_OUT_DIR}/${versionh_path}" ]
- then
+ if [[ ! -f "${KV_OUT_DIR}/${versionh_path}" ]]; then
eerror "These sources have not yet been prepared."
eerror "We cannot build against an unprepared tree."
eerror "To resolve this, please type the following:"
@@ -735,7 +757,7 @@ check_modules_supported() {
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
- # if we haven't determined the version yet, we need too.
+ # If we haven't determined the version yet, we need to.
require_configured_kernel
if ! linux_chkconfig_builtin "MODULES"; then
@@ -755,12 +777,12 @@ check_extra_config() {
local config negate die error reworkmodulenames
local soft_errors_count=0 hard_errors_count=0 config_required=0
- # store the value of the QA check, because otherwise we won't catch usages
+ # Store the value of the QA check, because otherwise we won't catch usages
# after if check_extra_config is called AND other direct calls are done
# later.
local old_LINUX_CONFIG_EXISTS_DONE="${_LINUX_CONFIG_EXISTS_DONE}"
- # if we haven't determined the version yet, we need to
+ # If we haven't determined the version yet, we need to.
linux-info_get_any_version
# Determine if we really need a .config. The only time when we don't need
@@ -795,15 +817,20 @@ check_extra_config() {
export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
return 0
fi
- else
- require_configured_kernel
+ elif ! linux_config_exists; then
+ qeerror "Could not find a neither a usable .config in the kernel source directory"
+ qeerror "nor a /proc/config.gz file,"
+ qeerror "Please ensure that ${KERNEL_DIR} points to a configured set of Linux sources."
+ qeerror "If you are using KBUILD_OUTPUT, please set the environment var so that"
+ qeerror "it points to the necessary object directory so that it might find .config"
+ qeerror "or have a properly configured kernel to produce a config.gz file. (CONFIG_IKCONFIG)."
+ die "Kernel not configured; no .config found in ${KV_OUT_DIR} or /proc/config.gz found"
fi
ebegin "Checking for suitable kernel configuration options"
- for config in ${CONFIG_CHECK}
- do
- # if we specify any fatal, ensure we honor them
+ for config in ${CONFIG_CHECK}; do
+ # If we specify any fatal, ensure we honor them
die=1
error=0
negate=0
@@ -900,7 +927,7 @@ check_zlibinflate() {
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
- # if we haven't determined the version yet, we need to
+ # If we haven't determined the version yet, we need to.
require_configured_kernel
# although I restructured this code - I really really really dont support it!
@@ -932,15 +959,15 @@ check_zlibinflate() {
LINENO_END="$(grep -n 'CONFIG_ZLIB_INFLATE y' ${KV_DIR}/lib/Config.in | cut -d : -f 1)"
LINENO_START="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | grep -n 'if \[' | tail -n 1 | cut -d : -f 1)"
- (( LINENO_AMOUNT = $LINENO_END - $LINENO_START ))
- (( LINENO_END = $LINENO_END - 1 ))
+ (( LINENO_AMOUNT = ${LINENO_END} - ${LINENO_START} ))
+ (( LINENO_END = ${LINENO_END} - 1 ))
SYMBOLS="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | tail -n $LINENO_AMOUNT | sed -e 's/^.*\(CONFIG_[^\" ]*\).*/\1/g;')"
# okay, now we have a list of symbols
# we need to check each one in turn, to see whether it is set or not
- for x in $SYMBOLS ; do
- if [ "${!x}" = "y" ]; then
- # we have a winner!
+ for x in ${SYMBOLS} ; do
+ if [[ "${!x}" = "y" ]]; then
+ # We have a winner!
einfo "${x} ensures zlib is linked into your kernel - excellent"
return 0
fi
@@ -955,7 +982,7 @@ check_zlibinflate() {
eerror "Please ensure that you enable at least one of these options:"
eerror
- for x in $SYMBOLS ; do
+ for x in ${SYMBOLS} ; do
eerror " * $x"
done
@@ -978,7 +1005,7 @@ linux-info_pkg_setup() {
linux-info_get_any_version
- [ -n "${CONFIG_CHECK}" ] && check_extra_config;
+ [[ -n "${CONFIG_CHECK}" && -z ${CHECKCONFIG_DONOTHING} ]] && check_extra_config
}
# @FUNCTION: kernel_get_makefile
@@ -994,3 +1021,7 @@ kernel_get_makefile() {
[[ -s ${KV_DIR}/Makefile ]] && KERNEL_MAKEFILE="${KV_DIR}/Makefile" && return
}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/linux-mod-r1.eclass b/eclass/linux-mod-r1.eclass
new file mode 100644
index 000000000000..8d384c2b30c8
--- /dev/null
+++ b/eclass/linux-mod-r1.eclass
@@ -0,0 +1,1280 @@
+# Copyright 2023-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: linux-mod-r1.eclass
+# @MAINTAINER:
+# Ionen Wolkens <ionen@gentoo.org>
+# Gentoo Kernel project <kernel@gentoo.org>
+# @AUTHOR:
+# Ionen Wolkens <ionen@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: linux-info
+# @BLURB: Functions for installing out-of-tree Linux kernel modules
+# @DESCRIPTION:
+# See the linux-mod-r1_src_compile function documentation for in-depth
+# usage, and see the example further down for a quick overview.
+#
+# @SUBSECTION linux-mod -> linux-mod-r1 migration notes
+# 0. Define a src_compile if missing, local variables below go there.
+# 1. MODULE_NAMES="name(libdir:srcdir:objdir)"
+# BUILD_TARGETS="target"
+# -> local modlist=( name=libdir:srcdir:objdir:target(s) )
+# - try without :target first, it is now almost always unnecessary
+# - srcdir defaults to the current directory, and note that paths
+# can be relative to that (should typically *not* pass ${S})
+# 2. BUILD_PARAMS and/or BUILD_FIXES
+# -> local modargs=( VAR="${KV_OUT_DIR}" ... )
+# - CC/LD and similar are unneeded, always passed (V=1 too)
+# - eval (aka eval "${BUILD_PARAMS}") is /not/ used for this anymore
+# 3. s/linux-mod_/linux-mod-r1/g
+# 4. _preinst+_postrm can be dropped, keep linux-mod-r1_pkg_postinst
+# 5. linux-mod-r1_src_install now runs einstalldocs, adjust as needed
+# 6. if *not* using linux-mod-r1_src_compile/install, then refer to
+# the eclass' 2nd example and ensure using modules_post_process
+# 7. If any, clang<->gcc switching custom workarounds can be dropped
+# 8. See MODULES_KERNEL_MAX/_MIN if had or need kernel version checks.
+#
+# Not an exhaustive list, verify that no installed files are missing
+# after. Look for "command not found" errors in the build log too.
+#
+# Revision bumps are not strictly needed to migrate unless want to
+# keep the old as fallback for regressions, kernel upgrades or the
+# new IUSE=+strip will typically cause rebuilds either way.
+#
+# @EXAMPLE:
+#
+# If source directory S had a layout such as:
+# - Makefile (builds a gentoo.ko in current directory)
+# - gamepad/Makefile (want to install to kernel/drivers/hid)
+# - gamepad/obj/ (the built gamepad.ko ends up here)
+#
+# ...and the Makefile uses the NIH_SOURCE variable to find where the
+# kernel build directory is (aka KV_OUT_DIR, see linux-info.eclass)
+#
+# then:
+#
+# @CODE
+# CONFIG_CHECK="INPUT_FF_MEMLESS" # gamepad needs it to rumble
+# MODULES_KERNEL_MIN=5.4 # needs features introduced in 5.4
+#
+# src_compile() {
+# local modlist=(
+# gentoo
+# gamepad=kernel/drivers/hid:gamepad:gamepad/obj
+# )
+# local modargs=( NIH_SOURCE="${KV_OUT_DIR}" )
+#
+# linux-mod-r1_src_compile
+# }
+# @CODE
+#
+# Alternatively, if using the package's build system directly is
+# more convenient, a typical example could be:
+#
+# @CODE
+# src_compile() {
+# MODULES_MAKEARGS+=(
+# NIH_KDIR="${KV_OUT_DIR}"
+# NIH_KSRC="${KV_DIR}"
+# )
+#
+# emake "${MODULES_MAKEARGS[@]}"
+# }
+#
+# src_install() {
+# emake "${MODULES_MAKEARGS[@]}" DESTDIR="${ED}" install
+# modules_post_process # strip->sign->compress
+#
+# einstalldocs
+# }
+# @CODE
+#
+# Some extra make variables may be of interest:
+# - INSTALL_MOD_PATH: sometime used as DESTDIR
+# - INSTALL_MOD_DIR: equivalent to linux_moduleinto
+#
+# MODULES_MAKEARGS is set by the eclass to handle toolchain and,
+# when installing, also attempts to disable automatic stripping,
+# compression, signing, and depmod to let the eclass handle it.
+#
+# linux_domodule can alternatively be used to install a single module.
+#
+# (remember to ensure that linux-mod-r1_pkg_postinst is ran for depmod)
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_LINUX_MOD_R1_ECLASS} ]]; then
+_LINUX_MOD_R1_ECLASS=1
+
+inherit dist-kernel-utils edo linux-info multiprocessing toolchain-funcs
+
+IUSE="dist-kernel modules-compress modules-sign +strip ${MODULES_OPTIONAL_IUSE}"
+
+RDEPEND="
+ sys-apps/kmod[tools]
+ dist-kernel? ( virtual/dist-kernel:= )
+"
+DEPEND="
+ virtual/linux-sources
+"
+BDEPEND="
+ sys-apps/kmod[tools]
+ modules-sign? (
+ dev-libs/openssl
+ virtual/pkgconfig
+ )
+"
+IDEPEND="
+ sys-apps/kmod[tools]
+"
+
+if [[ -n ${MODULES_OPTIONAL_IUSE} ]]; then
+ : "${MODULES_OPTIONAL_IUSE#+}? ( | )"
+ RDEPEND=${_/|/${RDEPEND}} DEPEND=${_/|/${DEPEND}} \
+ BDEPEND=${_/|/${BDEPEND}} IDEPEND=${_/|/${IDEPEND}}
+fi
+
+# @ECLASS_VARIABLE: KERNEL_CHOST
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Can be set to the CHOST value to use when selecting the toolchain
+# for building kernel modules. This is similar to setting the kernel
+# build system's CROSS_COMPILE variable minus the trailing dash.
+#
+# If this does not auto-select the desired toolchain, finer control
+# can be achieved by setting the not directly documented (but valid)
+# variables:
+#
+# KERNEL_{CC,CXX,LD,AR,NM,OBJCOPY,OBJDUMP,READELF,STRIP}
+#
+# If in doubt, do not set any of this.
+#
+# Default if unset: auto-detection, typically same as the current CHOST
+
+# @ECLASS_VARIABLE: MODULES_EXTRA_EMAKE
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Extra arguments to pass to emake when building modules.
+# Can contain arguments with quoted spaces, e.g.
+# @CODE
+# ..._EMAKE="KCFLAGS='-fzomg-optimize -fsuper-strict-aliasing' ..."
+# @CODE
+
+# @ECLASS_VARIABLE: MODULES_I_WANT_FULL_CONTROL
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# When set to a non-empty value, disables passing most of the eclass'
+# toolchain defaults to emake when building modules. Basic eclass
+# requirements, ebuilds' modargs, and users' MODULES_EXTRA_EMAKE are
+# still used.
+#
+# Primarily intended for expert users with modified kernel Makefiles
+# that want the Makefile's values to be used by default.
+#
+# May want to look at KERNEL_CHOST before considering this.
+
+# @ECLASS_VARIABLE: MODULES_SIGN_HASH
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to hash algorithm to use
+# during signature generation.
+#
+# Rather than set this, it is recommended to select using the kernel's
+# configuration to ensure proper support (e.g. CONFIG_MODULE_SIG_SHA256),
+# and then it will be auto-detected here.
+#
+# Valid values: sha512,sha384,sha256,sha224,sha1
+#
+# Default if unset: kernel CONFIG_MODULE_SIG_HASH's value
+
+# @ECLASS_VARIABLE: MODULES_SIGN_KEY
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to the path of the private
+# key in PEM format to use, or a PKCS#11 URI.
+#
+# If path is relative (e.g. "certs/name.pem"), it is assumed to be
+# relative to the kernel build directory being used.
+#
+# If the key requires a passphrase or PIN, the used kernel sign-file
+# utility recognizes the KBUILD_SIGN_PIN environment variable. Be
+# warned that the package manager may store this value in binary
+# packages, database files, temporary files, and possibly logs. This
+# eclass unsets the variable after use to mitigate the issue (notably
+# for shared binary packages), but use this with care.
+#
+# Default if unset: kernel CONFIG_MODULE_SIG_KEY's value which itself
+# defaults to certs/signing_key.pem
+
+# @ECLASS_VARIABLE: MODULES_SIGN_CERT
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Used with USE=modules-sign. Can be set to the path of the X.509
+# public key certificate to use.
+#
+# If path is relative (e.g. "certs/name.x509"), it is assumed to be
+# relative to the kernel build directory being used.
+: "${MODULES_SIGN_CERT:=certs/signing_key.x509}"
+
+# @ECLASS_VARIABLE: MODULES_KERNEL_MAX
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a kernel version (format: 1, 1.2, or 1.2.3), will print a
+# warning if the used version is greater than (ver_test -gt) to this
+# value using the same amount of version components (i.e. MAX=1.2
+# allows 1.2.3, but MAX=1.2.2 does not).
+#
+# This should *only* be used for modules that are known to break
+# frequently on kernel upgrades. If setting this to a non-LTS kernel,
+# then should also take care to test and update this value regularly
+# with new major kernel releases not to let the warning become stale
+# and ignored by users.
+#
+# Not fatal to allow users to try or self-patch easily, but the (large)
+# warning is difficult to miss. If need a fatal check for more serious
+# issues (e.g. runtime filesystem corruption), please do it manually.
+#
+# This is intended to reduce the amount of bug reports for recurring
+# expected issues that can be easily mitigated by using LTS kernels
+# and waiting for new releases.
+#
+# If used, must be set before linux-mod-r1_pkg_setup is called.
+
+# @ECLASS_VARIABLE: MODULES_KERNEL_MIN
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a kernel version (format: 1, 1.2, or 1.2.3), will abort if
+# the used version is less than (ver_test -lt) this value.
+#
+# Should only be used if known broken, or if upstream recommends a sane
+# minimum. Not particularly necessary for kernels that are no longer
+# in the tree.
+#
+# If used, must be set before linux-mod-r1_pkg_setup is called.
+
+# @ECLASS_VARIABLE: MODULES_OPTIONAL_IUSE
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# May contain a single flag to be added to IUSE optionally prefixed
+# with a + sign to enable it by default. Doing so makes *all* of
+# linux-mod-r1's functions and dependencies a no-op unless the flag
+# is enabled. This includes phases, e.g. linux-mod-r1_pkg_setup will
+# not process CONFIG_CHECK unless the flag is set.
+#
+# The typical recommended value is "+modules" (global IUSE).
+#
+# Note that modules being optional can be useful even if user space
+# tools require them (e.g. installing in a chroot or prefix when the
+# modules are loaded on the host, saves setting up linux sources).
+# However, if tools are non-trivial to build, it may be preferable
+# to split into two packages than use this variable due to requiring
+# rebuilds every kernel upgrades.
+
+# @ECLASS_VARIABLE: MODULES_MAKEARGS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Will be set after linux-mod-r1_pkg_setup has been called. Contains
+# arguments that should be passed to emake when building or installing
+# modules.
+#
+# Modifying this variable is acceptable (e.g. to append kernel source
+# arguments) but, if using linux-mod-r1_src_compile, setting modargs
+# is the intended method seen as cleaner and less error-prone.
+
+# @FUNCTION: linux-mod-r1_pkg_setup
+# @DESCRIPTION:
+# Required before using other functions from this eclass, and will:
+# 1. run linux-info_pkg_setup (see linux-info.eclass)
+# -> implies processing CONFIG_CHECK, and providing KV_ variables
+# (MODULES and TRIM_UNUSED_KSYMS are always checked)
+# 2. prepare toolchain to match the kernel
+# -> sets KERNEL_{CHOST,CC,CXX,LD,AR,NM,OBJCOPY,OBJDUMP,READELF,STRIP}
+# -> also sets MODULES_MAKEARGS array with, e.g. CC="${KERNEL_CC}"
+# (normally these should not be used directly, for custom builds)
+# 3. perform various sanity checks to fail early on issues
+linux-mod-r1_pkg_setup() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ [[ ${MERGE_TYPE} != binary ]] || return 0
+ _MODULES_GLOBAL[ran:pkg_setup]=1
+ _modules_check_function ${#} 0 0 || return 0
+ _modules_check_migration
+
+ _modules_prepare_kernel
+ _modules_prepare_sign
+ _modules_prepare_toolchain
+
+ _modules_set_makeargs
+
+ _modules_sanity_gccplugins
+}
+
+# @FUNCTION: linux-mod-r1_src_compile
+# @DESCRIPTION:
+# Builds modules, see the eclass' example for a quick overview.
+# Uses the variables modlist and modargs as described below:
+#
+# * local modlist=( ... ) - list of modules to build, set as:
+#
+# module-name=install-dir:source-dir:build-dir:make-target
+#
+# > module-name: Resulting name, aka <module-name>.ko (required).
+#
+# > install-dir: Kernel modules sub-directory to install the module
+# to (/lib/modules/version/<install-dir>/name.ko). Will be used when
+# run linux-mod-r1_src_install. May want to consider the values of
+# INSTALL_MOD_DIR(Makefile) or DEST_MODULE_LOCATION(dkms.conf) if it
+# exists, but it can be anything.
+# -> Default: extra
+#
+# Warning: Changing this location may leave stale modules until a
+# kernel upgrade as the package manager does not typically delete
+# old modules and only does overwrite on rebuilds.
+#
+# > source-dir: Directory containing the Makefile to build the module.
+# Path can be relative to the current directory or absolute.
+# -> Default: current directory
+#
+# > build-dir: Directory that will hold the built module-name.ko.
+# -> Default: same as source-dir's value
+#
+# > make-target: Almost always unneeded but, if defaults are not right,
+# then can specify the Makefile's target(s) to build the module/extras.
+# Multiple targets can be used with spaces, e.g. :"first second".
+# -> Default: specially tries modules, module, <name>.ko, default,
+# all, empty target, and runs the first found usable
+#
+# Missing elements results in defaults being used, e.g. this is valid:
+# modlist=( name1 name2=:source name3=install::build )
+#
+# * local modargs=( ... ) - extra arguments to pass to emake
+#
+# Makefile should notably be inspected for which variable it uses
+# to find the kernel's build directory then, e.g. KDIR="${KV_OUT_DIR}"
+# as appropriate. Note that typically want to pass KV_OUT_DIR(build)
+# rather than KV_DIR(sources) if not both. This allows users to do
+# out-of-source kernel builds and still build modules.
+#
+# Passing common toolchain variables such as CC or LD is not needed
+# here as they are passed by default.
+#
+# ---
+#
+# Allowed to be called multiple times with a different modlist if need
+# different make arguments per modules or intermediate steps -- albeit,
+# if atypical, may want to build manually (see eclass' example).
+linux-mod-r1_src_compile() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 0 0 || return 0
+
+ [[ ${modlist@a} == *a* && ${#modlist[@]} -gt 0 ]] ||
+ die "${FUNCNAME[0]} was called without a 'modlist' array"
+
+ # run this again to verify built files access with src_compile's user
+ _modules_sanity_kernelbuilt
+
+ local -a emakeargs=( "${MODULES_MAKEARGS[@]}" )
+ [[ ${modargs@a} == *a* ]] && emakeargs+=( "${modargs[@]}" )
+
+ local -A built=()
+ local build mod name target
+ for mod in "${modlist[@]}"; do
+ # note modlist was not made an associative array ([name]=) to preserve
+ # ordering, but is still using = to improve readability
+ name=${mod%%=*}
+ [[ -n ${name} && ${name} != *:* ]] || die "invalid mod entry '${mod}'"
+
+ # 0:install-dir 1:source-dir 2:build-dir 3:make-target(s)
+ mod=${mod#"${name}"}
+ IFS=: read -ra mod <<<"${mod#=}"
+ [[ ${#mod[@]} -le 4 ]] || die "too many ':' in ${name}'s modlist"
+
+ [[ ${mod[1]:=${PWD}} != /* ]] && mod[1]=${PWD}/${mod[1]}
+ [[ ${mod[2]:=${mod[1]}} != /* ]] && mod[2]=${PWD}/${mod[2]}
+ _MODULES_INSTALL[${mod[2]}/${name}.ko]=${mod[0]:-extra}
+
+ pushd "${mod[1]}" >/dev/null || die
+
+ if [[ -z ${mod[3]} ]]; then
+ # guess between commonly used targets if none given, fallback to
+ # an empty target without trying to see the error output
+ for target in module{s,} "${name}".ko default all; do
+ nonfatal emake "${emakeargs[@]}" -q "${target}" &>/dev/null
+ if [[ ${?} -eq 1 ]]; then
+ mod[3]=${target}
+ break
+ fi
+ done
+ fi
+
+ # sometime modules are all from same source dir and built all at once,
+ # make will not rebuild either way but can skip the unnecessary noise
+ build=
+ for target in ${mod[3]:-&}; do
+ if ! has "${target}" ${built[${mod[1]}]}; then
+ build=1
+ built[${mod[1]}]+=" ${target} "
+ fi
+ done
+
+ if [[ ${build} ]]; then
+ einfo "Building ${name} module in ${mod[1]} ..."
+
+ # allow word splitting for rare cases of multiple targets
+ emake "${emakeargs[@]}" ${mod[3]}
+ else
+ einfo "Building ${name} module in ${mod[1]} ... already done."
+ fi
+
+ popd >/dev/null || die
+ done
+}
+
+# @FUNCTION: linux-mod-r1_src_install
+# @DESCRIPTION:
+# Installs modules built by linux-mod-r1_src_compile using
+# linux_domodule, then runs modules_post_process and einstalldocs.
+linux-mod-r1_src_install() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 0 0 || return 0
+
+ (( ${#_MODULES_INSTALL[@]} )) ||
+ die "${FUNCNAME[0]} was called without running linux-mod-r1_src_compile"
+
+ (
+ for mod in "${!_MODULES_INSTALL[@]}"; do
+ linux_moduleinto "${_MODULES_INSTALL[${mod}]}"
+ linux_domodule "${mod}"
+ done
+ )
+
+ modules_post_process
+
+ einstalldocs
+}
+
+# @FUNCTION: linux-mod-r1_pkg_postinst
+# @DESCRIPTION:
+# Updates module dependencies using depmod.
+linux-mod-r1_pkg_postinst() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 0 0 || return 0
+
+ dist-kernel_compressed_module_cleanup "${EROOT}/lib/modules/${KV_FULL}"
+ _modules_update_depmod
+
+ # post_process ensures modules were installed and that the eclass' USE
+ # are likely not no-ops (unfortunately postinst itself may be missed)
+ [[ -v _MODULES_GLOBAL[ran:post_process] ]] ||
+ eqawarn "QA Notice: neither linux-mod-r1_src_install nor modules_post_process were used"
+}
+
+# @FUNCTION: linux_domodule
+# @USAGE: <module>...
+# @DESCRIPTION:
+# Installs Linux modules (.ko files).
+#
+# See also linux_moduleinto.
+linux_domodule() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 1 '' "<module>..." || return 0
+ (
+ # linux-mod-r0 formerly supported INSTALL_MOD_PATH (bug #642240), but
+ # this been judged messy to integrate consistently as not everything
+ # uses this function and build systems sometime mix it with DESTDIR
+ # (try ROOT if need to install somewhere else instead)
+ insinto "/lib/modules/${KV_FULL}/${_MODULES_GLOBAL[moduleinto]:-extra}"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: linux_moduleinto
+# @USAGE: <install-dir>
+# @DESCRIPTION:
+# Directory to install modules into when calling linux_domodule.
+# Relative to kernel modules path as in:
+# ${ED}/lib/modules/${KV_FULL}/<install-dir>
+#
+# Can contain subdirectories, e.g. kernel/fs.
+#
+# If not called, defaults to "extra". On the kernel build system,
+# this is like setting INSTALL_MOD_DIR which has the same default
+# for external modules.
+linux_moduleinto() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 1 1 "<install-dir>" || return 0
+ _MODULES_GLOBAL[moduleinto]=${1}
+}
+
+# @FUNCTION: modules_post_process
+# @USAGE: [<path>]
+# @DESCRIPTION:
+# Strip, sign, verify, and compress all .ko modules found under
+# <path>. Should typically *not* be called directly as it will
+# be run by linux-mod-r1_src_install. This is intended for use
+# when modules were installed some other way.
+#
+# <path> should exist under ${ED}.
+# Defaults to /lib/modules/${KV_FULL}.
+#
+# Filenames may change due to compression, so any operations on
+# these should be performed prior.
+#
+# Warning: This will abort if no modules are found, which can happen
+# if modules were unexpectedly pre-compressed possibly due to using
+# make install without passing MODULES_MAKEARGS to disable it.
+modules_post_process() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ _modules_check_function ${#} 0 1 '[<path>]' || return 0
+ [[ ${EBUILD_PHASE} == install ]] ||
+ die "${FUNCNAME[0]} can only be called in the src_install phase"
+
+ local path=${ED}${1-/lib/modules/${KV_FULL}}
+ local -a mods
+ [[ -d ${path} ]] && mapfile -td '' mods < <(
+ find "${path}" -type f -name '*.ko' -print0 || die
+ )
+ (( ${#mods[@]} )) ||
+ die "${FUNCNAME[0]} was called with no installed modules under ${path}"
+
+ # TODO?: find way for sane use with dracut (its 90kernel-modules-extra
+ # parses depmod.d files directly and assumes should include its modules
+ # which can lead to unnecessarily increased size or stale modules)
+# _modules_process_depmod.d "${mods[@]#"${path}/"}"
+
+ _modules_process_strip "${mods[@]}"
+ _modules_process_sign "${mods[@]}"
+ _modules_sanity_modversion "${mods[@]}" # after strip/sign in case broke it
+ _modules_process_compress "${mods[@]}"
+
+ _MODULES_GLOBAL[ran:post_process]=1
+}
+
+# @ECLASS_VARIABLE: _MODULES_GLOBAL
+# @INTERNAL
+# @DESCRIPTION:
+# General use associative array to avoid defining separate globals.
+declare -gA _MODULES_GLOBAL=()
+
+# @ECLASS_VARIABLE: _MODULES_INSTALL
+# @INTERNAL
+# @DESCRIPTION:
+# List of modules from linux-mod-r1_src_compile to be installed.
+declare -gA _MODULES_INSTALL=()
+
+# @FUNCTION: _modules_check_function
+# @USAGE: [<args-count> <args-min> <args-max> [<usage-string>]]
+# @RETURN: 0 or 1 if caller should do nothing
+# @INTERNAL
+# @DESCRIPTION:
+# Checks for MODULES_OPTIONAL_IUSE, and aborts if amount of arguments
+# does not add up or if it was called before linux-mod-r1_pkg_setup.
+_modules_check_function() {
+ [[ -z ${MODULES_OPTIONAL_IUSE} ]] ||
+ use "${MODULES_OPTIONAL_IUSE#+}" || return 1
+
+ [[ ${#} == 0 || ${1} -ge ${2} && ( ! ${3} || ${1} -le ${3} ) ]] ||
+ die "Usage: ${FUNCNAME[1]} ${4-(no arguments)}"
+
+ [[ -v _MODULES_GLOBAL[ran:pkg_setup] ]] ||
+ die "${FUNCNAME[1]} was called without running linux-mod-r1_pkg_setup"
+}
+
+# @FUNCTION: _modules_check_migration
+# @INTERNAL
+# @DESCRIPTION:
+# Aborts if see obsolete variables from the linux-mod-r0 eclass being
+# used, likely due to an incomplete migration. This function should
+# eventually be removed after linux-mod-r0 is @DEAD not to fail for
+# nothing if users happen to have these in their environment given the
+# naming for some is a bit generic.
+_modules_check_migration() {
+ _modules_check_var() {
+ [[ -z ${!1} ]] ||
+ die "${1} is obsolete, see ${2} in linux-mod-r1 eclass docs"
+ }
+ # the 'I' on this one is notably sneaky and could silently be ignored
+ _modules_check_var MODULES_OPTIONAL_USE MODULES_OPTIONAL_IUSE
+ _modules_check_var MODULES_OPTIONAL_USE_IUSE_DEFAULT MODULES_OPTIONAL_IUSE
+ _modules_check_var BUILD_PARAMS modargs
+ _modules_check_var BUILD_TARGETS modlist
+ _modules_check_var MODULE_NAMES modlist
+ [[ -z ${!MODULESD_*} ]] ||
+ die "MODULESD_* variables are no longer supported, replace by handcrafted .conf files if needed"
+
+ # Ignored variables:
+ # - BUILD_FIXES: seen in some ebuilds but was undocumented and linux-info
+ # still sets it preventing from blocking it entirely
+ # - ECONF_PARAMS: documented but was a no-op in linux-mod too
+}
+
+# @FUNCTION: _modules_prepare_kernel
+# @INTERNAL
+# @DESCRIPTION:
+# Handles linux-info bits to provide usable sources, KV_ variables,
+# and CONFIG_CHECK use.
+_modules_prepare_kernel() {
+ get_version
+
+ # linux-info allows skipping checks if SKIP_KERNEL_CHECK is set and
+ # then require_configured_kernel will not abort, but no sources means
+ # 100% failure for building modules and so just abort now (the proper
+ # way to allow skipping sources here is MODULES_OPTIONAL_IUSE)
+ [[ -n ${KV_FULL} ]] ||
+ die "kernel sources are required to build kernel modules"
+
+ require_configured_kernel
+
+ _modules_sanity_kernelbuilt
+ _modules_sanity_kernelversion
+
+ # note: modules-specific check_modules_supported could probably be
+ # removed from linux-info in the future as this is a sufficient check
+ local CONFIG_CHECK="${CONFIG_CHECK} MODULES"
+
+ # kernel will not typically know about symbols we use (bug #591832),
+ # but stay non-fatal if kernel has an exception list set (bug #759238)
+ # note: possible to bypass either way with CHECKCONFIG_DONOTHING=1
+ if [[ $(linux_chkconfig_string UNUSED_KSYMS_WHITELIST) == \"+(?)\" ]]; then
+ CONFIG_CHECK+=" ~!TRIM_UNUSED_KSYMS"
+ else
+ CONFIG_CHECK+=" !TRIM_UNUSED_KSYMS"
+ fi
+
+ linux-info_pkg_setup
+
+ if use dist-kernel &&
+ ! has_version "~virtual/dist-kernel-${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}"
+ then
+ ewarn
+ ewarn "The kernel modules in ${CATEGORY}/${PN} are being built for"
+ ewarn "kernel version ${KV_FULL}. But this does not match the"
+ ewarn "installed version of virtual/dist-kernel."
+ ewarn
+ ewarn "If this is not intentional, the problem may be corrected by"
+ ewarn "using \"eselect kernel\" to set the default kernel version to"
+ ewarn "the same version as the installed version of virtual/dist-kernel."
+ ewarn
+ ewarn "If the distribution kernel is being downgraded, ensure that"
+ ewarn "virtual/dist-kernel is also downgraded to the same version"
+ ewarn "before rebuilding external kernel modules."
+ ewarn
+ fi
+}
+
+# @FUNCTION: _modules_prepare_sign
+# @INTERNAL
+# @DESCRIPTION:
+# Determines arguments to pass to sign-file (hash/keys), and performs
+# basic sanity checks to abort early if signing does not look possible.
+_modules_prepare_sign() {
+ use modules-sign || return 0
+
+ _modules_sign_die() {
+ eerror "USE=modules-sign requires additional configuration, please see the"
+ eerror "kernel[1] documentation and the linux-mod-r1 eclass[2] user variables."
+ eerror "[1] https://www.kernel.org/doc/html/v${KV_MAJOR}.${KV_MINOR}/admin-guide/module-signing.html"
+ eerror "[2] https://devmanual.gentoo.org/eclass-reference/linux-mod-r1.eclass/index.html"
+ die "USE=modules-sign is set but ${*}"
+ }
+
+ linux_chkconfig_present MODULE_SIG ||
+ _modules_sign_die "CONFIG_MODULE_SIG is not set in the kernel"
+
+ if [[ -z ${MODULES_SIGN_HASH} ]]; then
+ : "$(linux_chkconfig_string MODULE_SIG_HASH)"
+ MODULES_SIGN_HASH=${_//\"}
+ [[ -n ${MODULES_SIGN_HASH} ]] ||
+ _modules_sign_die "CONFIG_MODULE_SIG_HASH is not set in the kernel"
+ fi
+
+ if [[ -z ${MODULES_SIGN_KEY} ]]; then
+ : "$(linux_chkconfig_string MODULE_SIG_KEY)"
+ MODULES_SIGN_KEY=${_//\"}
+ [[ -n ${MODULES_SIGN_KEY} ]] ||
+ _modules_sign_die "CONFIG_MODULE_SIG_KEY is not set in the kernel"
+ fi
+
+ [[ ${MODULES_SIGN_KEY} != @(/|pkcs11:)* ]] &&
+ MODULES_SIGN_KEY=${KV_OUT_DIR}/${MODULES_SIGN_KEY}
+ [[ ${MODULES_SIGN_CERT} != /* ]] &&
+ MODULES_SIGN_CERT=${KV_OUT_DIR}/${MODULES_SIGN_CERT}
+
+ # assumes users know what they are doing if using a pkcs11 URI
+ [[ ${MODULES_SIGN_KEY} == pkcs11:* || -f ${MODULES_SIGN_KEY} ]] ||
+ _modules_sign_die "the private key '${MODULES_SIGN_KEY}' was not found"
+ [[ -f ${MODULES_SIGN_CERT} ]] ||
+ _modules_sign_die "the public key certificate '${MODULES_SIGN_CERT}' was not found"
+}
+
+# @FUNCTION: _modules_prepare_toolchain
+# @INTERNAL
+# @DESCRIPTION:
+# Sets KERNEL_{CC,CXX,LD,AR,NM,OBJCOPY,OBJDUMP,READELF,STRIP} based on
+# the kernel configuration and KERNEL_CHOST (also set if missing) that
+# *should* be usable to build modules.
+#
+# Tries to match compiler type (gcc or clang), and major version. Will
+# inform if matching was not possible likely due to the compiler being
+# uninstalled. Users can set KERNEL_ variables themselves to override.
+#
+# These variables are normally manipulated by the kernel's LLVM=1 with
+# the exception of CXX that is included anyway given *some* out-of-tree
+# modules use it, e.g. nvidia-drivers[kernel-open].
+_modules_prepare_toolchain() {
+ # note that the kernel adds -m32/-m64/-m elf_x86_64/etc... for, e.g.
+ # toolchains defaulting to x32, but may need automagic here if need
+ # a different toolchain such as sys-devel/kgcc64
+ [[ -z ${KERNEL_CHOST} ]] && linux_chkconfig_present 64BIT &&
+ case ${CHOST} in
+ # matching kernel-build.eclass, see for details
+ hppa2.0-*) KERNEL_CHOST=${CHOST/2.0/64};;
+ esac
+
+ # recognizing KERNEL_CHOST given CROSS_COMPILE seems too generic here,
+ # but should rarely be necessary unless different userland and kernel
+ : "${KERNEL_CHOST:=${CHOST}}"
+
+ einfo "Preparing ${KERNEL_CHOST} toolchain for kernel modules (override with KERNEL_CHOST) ..."
+
+ _modules_tc_best() {
+ [[ -z ${!1} ]] && read -r ${1} < <(type -P -- "${@:2}")
+ }
+
+ local gccv clangv tool
+ if linux_chkconfig_present CC_IS_GCC; then
+ gccv=$(linux_chkconfig_string GCC_VERSION)
+ gccv=${gccv::2} # major version, will break on gcc-100...
+ # chost-gcc-ver > chost-gcc > gcc-ver > gcc
+ _modules_tc_best KERNEL_CC {"${KERNEL_CHOST}-",}gcc{"-${gccv}",}
+ _modules_tc_best KERNEL_CXX {"${KERNEL_CHOST}-",}g++{"-${gccv}",}
+ # unknown what was used exactly here, but prefer non-llvm with gcc
+ for tool in AR NM OBJCOPY OBJDUMP READELF STRIP; do
+ _modules_tc_best KERNEL_${tool} \
+ {"${KERNEL_CHOST}-",}{gcc-,}${tool,,}
+ done
+ elif linux_chkconfig_present CC_IS_CLANG; then
+ clangv=$(linux_chkconfig_string CLANG_VERSION)
+ clangv=${clangv::2}
+ # like gcc, but try directories to get same version on all tools
+ # (not using get_llvm_prefix to avoid conflicts with ebuilds using
+ # llvm slots for non-modules reasons, e.g. sets llvm_check_deps)
+ _modules_tc_best KERNEL_CC \
+ {"${BROOT}/usr/lib/llvm/${clangv}/bin/",}{"${KERNEL_CHOST}-",}clang{"-${clangv}",}
+ _modules_tc_best KERNEL_CXX \
+ {"${BROOT}/usr/lib/llvm/${clangv}/bin/",}{"${KERNEL_CHOST}-",}clang++{"-${clangv}",}
+ for tool in AR NM OBJCOPY OBJDUMP READELF STRIP; do
+ _modules_tc_best KERNEL_${tool} \
+ {"${BROOT}/usr/lib/llvm/${clangv}/bin/",}{"${KERNEL_CHOST}-",}{llvm-,}${tool,,}
+ done
+ fi
+
+ if linux_chkconfig_present LD_IS_BFD; then
+ _modules_tc_best KERNEL_LD {"${KERNEL_CHOST}-",}ld.bfd
+ elif linux_chkconfig_present LD_IS_LLD; then
+ # also match with clang if it was used
+ _modules_tc_best KERNEL_LD \
+ {${clangv+"${BROOT}/usr/lib/llvm/${clangv}/bin/"},}{"${KERNEL_CHOST}-",}ld.lld
+ fi
+
+ # if any variables are still empty, fallback to normal defaults
+ local CHOST=${KERNEL_CHOST}
+ : "${KERNEL_CC:=$(tc-getCC)}"
+ : "${KERNEL_CXX:=$(tc-getCXX)}"
+ : "${KERNEL_LD:=$(tc-getLD)}"
+ : "${KERNEL_AR:=$(tc-getAR)}"
+ : "${KERNEL_NM:=$(tc-getNM)}"
+ : "${KERNEL_OBJCOPY:=$(tc-getOBJCOPY)}"
+ : "${KERNEL_OBJDUMP:=$(tc-getOBJDUMP)}"
+ : "${KERNEL_READELF:=$(tc-getREADELF)}"
+ : "${KERNEL_STRIP:=$(tc-getSTRIP)}"
+
+ # for toolchain-funcs, uses CPP > CC but set both not to make assumptions
+ local CC=${KERNEL_CC} CPP="${KERNEL_CC} -E" LD=${KERNEL_LD}
+
+ # show results, skip line wrap to avoid standing out too much
+ einfo "Toolchain picked for kernel modules (override with KERNEL_CC, _LD, ...):"\
+ "'${KERNEL_CC}' '${KERNEL_CXX}' '${KERNEL_LD}' '${KERNEL_AR}'"\
+ "'${KERNEL_NM}' '${KERNEL_OBJCOPY}' '${KERNEL_OBJDUMP}'"\
+ "'${KERNEL_READELF}' '${KERNEL_STRIP}'"
+
+ # hack: kernel adds --thinlto-cache-dir to KBUILD_LDFLAGS with ThinLTO
+ # resulting in sandbox violations and we cannot safely override that
+ # variable, using *both* {LDFLAGS_MODULE,ldflags-y}=--thinlto-cache-dir=
+ # can work but raises concerns about breaking packages that may use these
+ if linux_chkconfig_present LTO_CLANG_THIN && tc-ld-is-lld; then
+ KERNEL_LD=${T}/linux-mod-r1_ld.lld
+ printf '#!/usr/bin/env sh\nexec %s "${@}" --thinlto-cache-dir=\n' \
+ "${LD}" > "${KERNEL_LD}" || die
+ chmod +x -- "${KERNEL_LD}" || die
+ fi
+
+ # warn if final picked CC type or major version is mismatching, arguably
+ # should be fatal but not forcing given it is not *always* an issue
+ local warn
+ if [[ -v gccv ]]; then
+ if ! tc-is-gcc; then
+ warn="gcc-${gccv} but\n '${KERNEL_CC}' is not gcc"
+ elif [[ $(gcc-major-version) -ne "${gccv}" ]]; then
+ warn="gcc-${gccv} but\n '${KERNEL_CC}' is gcc-$(gcc-major-version)"
+ fi
+ elif [[ -v clangv ]]; then
+ if ! tc-is-clang; then
+ warn="clang-${clangv} but\n '${KERNEL_CC}' is not clang"
+ elif [[ $(clang-major-version) -ne "${clangv}" ]]; then
+ warn="clang-${clangv} but\n '${KERNEL_CC}' is clang-$(clang-major-version)"
+ fi
+ fi
+
+ if [[ -v warn ]]; then
+ ewarn
+ ewarn "Warning: kernel ${KV_FULL} is built with ${warn}"
+ ewarn "This *could* result in build issues or other incompatibilities."
+ ewarn "It is recommended to either \`make clean\` and rebuild the kernel"
+ ewarn "with the current toolchain (for distribution kernels, re-installing"
+ ewarn "will do the same), or set the KERNEL_CC variable to point to the"
+ ewarn "same compiler. Note that when it is available, auto-selection is"
+ ewarn "attempted making the latter rarely needed."
+ ewarn
+ fi
+}
+
+# @FUNCTION: _modules_process_compress
+# @USAGE: <module>...
+# @INTERNAL
+# @DESCRIPTION:
+# If enabled in the kernel configuration, this compresses the given
+# modules using the same format.
+_modules_process_compress() {
+ use modules-compress || return
+
+ local -a compress
+ if linux_chkconfig_present MODULE_COMPRESS_XZ; then
+ compress=(
+ xz -q
+ --memlimit-compress=50%
+ --threads="$(makeopts_jobs)"
+ # match options from kernel's Makefile.modinst (bug #920837)
+ --check=crc32
+ --lzma2=dict=1MiB
+ )
+ elif linux_chkconfig_present MODULE_COMPRESS_GZIP; then
+ if type -P pigz &>/dev/null; then
+ compress=(pigz -p"$(makeopts_jobs)")
+ else
+ compress=(gzip)
+ fi
+ elif linux_chkconfig_present MODULE_COMPRESS_ZSTD; then
+ compress=(zstd -qT"$(makeopts_jobs)" --rm)
+ else
+ die "USE=modules-compress enabled but no MODULE_COMPRESS* configured"
+ fi
+
+ # could fail, assumes have commands that were needed for the kernel
+ einfo "Compressing modules (matching the kernel configuration) ..."
+ edob "${compress[@]}" -- "${@}"
+}
+
+# @FUNCTION: _modules_process_depmod.d
+# @USAGE: <relative-module-path>...
+# @INTERNAL
+# @DESCRIPTION:
+# Generate a depmod.d file to ensure priority if duplicate modules
+# exist, such as stale modules in different directories, or to
+# override the kernel's own modules.
+_modules_process_depmod.d() {
+ (
+ [[ ${SLOT%/*} == 0 ]] && slot= || slot=-${SLOT%/*}
+ insinto /lib/depmod.d
+ newins - ${PN}${slot}.conf < <(
+ echo "# Automatically generated by linux-mod-r1.eclass for ${CATEGORY}/${PN}"
+ for mod; do
+ [[ ${mod} =~ ^(.+)/(.+).ko$ ]] &&
+ echo "override ${BASH_REMATCH[2]} ${KV_FULL} ${BASH_REMATCH[1]}"
+ done
+ )
+ )
+}
+
+# @FUNCTION: _modules_process_sign
+# @USAGE: <module>...
+# @INTERNAL
+# @DESCRIPTION:
+# Cryptographically signs the given modules when USE=modules-sign is
+# enabled.
+_modules_process_sign() {
+ use modules-sign || return 0
+
+ # scripts/sign-file used to be a perl script but is now written in C,
+ # and it could either be missing or broken given it links with openssl
+ # (no subslot rebuilds on kernel sources), trivial to compile regardless
+ local sign=
+ if [[ -f ${KV_DIR}/scripts/sign-file.c ]]; then
+ sign=${T}/linux-mod-r1_sign-file
+ (
+ # unfortunately using the kernel's Makefile is inconvenient (no
+ # simple build target for this), may need revisiting on changes
+ einfo "Compiling sign-file ..."
+ tc-export_build_env
+ nonfatal edob $(tc-getBUILD_CC) ${BUILD_CFLAGS} ${BUILD_CPPFLAGS} \
+ $($(tc-getBUILD_PKG_CONFIG) --cflags libcrypto) \
+ ${BUILD_LDFLAGS} -o "${sign}" "${KV_DIR}"/scripts/sign-file.c \
+ $($(tc-getBUILD_PKG_CONFIG) --libs libcrypto || echo -lcrypto)
+ ) || {
+ einfo "Trying fallback ..."
+ sign=
+ }
+ fi
+
+ if [[ -z ${sign} ]]; then
+ if [[ -x ${KV_OUT_DIR}/scripts/sign-file ]]; then
+ sign=${KV_OUT_DIR}/scripts/sign-file # try if built
+ elif [[ -x ${KV_DIR}/scripts/sign-file ]]; then
+ sign=${KV_DIR}/scripts/sign-file # old kernel (<linux-4.4)
+ else
+ die "USE=modules-sign is set but '${KV_DIR}/scripts/sign-file.c' is not usable"
+ fi
+ fi
+
+ einfo "Signing modules ..."
+
+ # good odds the private key has limited access, and with the kernel's
+ # automated method it is likely to be -rw------- root:root (but is
+ # rarely an issue given src_install *normally* runs as root)
+ [[ ${MODULES_SIGN_KEY} == pkcs11:* || -r ${MODULES_SIGN_KEY} ]] ||
+ die "USE=modules-sign is set but lacking read access to the signing key at '${MODULES_SIGN_KEY}'"
+
+ local mod
+ for mod; do
+ edob "${sign}" "${MODULES_SIGN_HASH}" "${MODULES_SIGN_KEY}" \
+ "${MODULES_SIGN_CERT}" "${mod}"
+ done
+
+ # unset to at least be out of the environment file in, e.g. shared binpkgs
+ unset KBUILD_SIGN_PIN
+}
+
+# @FUNCTION: _modules_process_strip
+# @USAGE: <module>...
+# @INTERNAL
+# @DESCRIPTION:
+# Strips the given modules of unneeded symbols when USE=strip is
+# enabled, and informs the package manager not to regardless.
+_modules_process_strip() {
+ # letting the package manager handle this complicates scenarios
+ # where we want to either compress the pre-stripped module, or
+ # sign the module without its signature becoming invalid on merge
+ dostrip -x "${@#"${ED}"}"
+
+ if use strip; then
+ einfo "Stripping modules ..."
+ edob "${KERNEL_STRIP}" --strip-unneeded -- "${@}"
+ fi
+}
+
+# @FUNCTION: _modules_sanity_gccplugins
+# @INTERNAL
+# @DESCRIPTION:
+# Performs a basic build test to detect GCC plugins mismatch issues
+# and, if so, aborts with explanation given it often confuses users.
+#
+# Using mismatching gcc can sometime work to build modules, but if
+# GCC plugins are enabled it will almost always be an error.
+#
+# Note: may need occasional review to ensure the test still works by:
+# enabling a GCC plugin in the kernel, building with older GCC,
+# then building a module by setting KERNEL_CC=gcc-<major-version+1>.
+_modules_sanity_gccplugins() {
+ linux_chkconfig_present GCC_PLUGINS || return 0
+
+ local tmp=${T}/linux-mod-r1_gccplugins
+ mkdir -p -- "${tmp}" || die
+
+ echo "obj-m += test.o" > "${tmp}"/Kbuild || die
+ :> "${tmp}"/test.c || die
+
+ # always fails, but interested in the stderr messages
+ local output=$(
+ cd -- "${KV_OUT_DIR}" && # fwiw skip non-POSIX -C in eclasses
+ LC_ALL=C nonfatal emake "${MODULES_MAKEARGS[@]}" M="${tmp}" \
+ 2>&1 >/dev/null
+ )
+
+ if [[ ${output} == *"error: incompatible gcc/plugin version"* ]]; then
+ eerror "GCC_PLUGINS is enabled in the kernel and plugin version mismatch issues"
+ eerror "have been detected. Please \`make clean\` and rebuild the kernel using"
+ eerror "the current version of GCC (or re-install for distribution kernels)."
+ die "kernel ${KV_FULL} needs to be rebuilt"
+ fi
+}
+
+# @FUNCTION: _modules_sanity_kernelbuilt
+# @INTERNAL
+# @DESCRIPTION:
+# Checks if the kernel seems fully built by having a Module.symvers
+# that is also readable, abort otherwise.
+#
+# About readability, occasionally users build their kernel as root with
+# umask 0077 and then the package manager's user cannot read built files
+# leaving them confused.
+#
+# Given user and access can very between phases (notably src_compile),
+# it makes sense to run this check more than once.
+#
+# Note:
+# This is an alternate version of linux-info's check_kernel_built
+# which probably will not need to exist there if linux-mod-r0 is
+# gone, error it gives is also modules-specific and fits better here.
+#
+# The old check_kernel_built checks version.h and suggests running
+# modules_prepare if missing, but that does not create Module.symvers.
+# Nowadays the kernel makes unresolved symbols fatal by default
+# meaning that all modules will fail unless KBUILD_MODPOST_WARN=1
+# which seem questionable to support. So rather than version.h, this
+# checks and require Module.symvers, and suggests a full build if
+# missing (if really must, users can bypass by touching the file).
+# nvidia-drivers (for one) further checks this file directly to do
+# configure tests that will break badly without.
+_modules_sanity_kernelbuilt() {
+ local symvers=${KV_OUT_DIR}/Module.symvers
+
+ if [[ ! -f ${symvers} ]]; then
+ eerror "'${symvers}' was not found implying that the"
+ eerror "linux-${KV_FULL} tree at that location has not been built."
+ eerror
+ eerror "Please verify that this is the intended kernel version, then perform"
+ eerror "a full build[1] (i.e. make && make modules_install && make install)."
+ eerror
+ eerror "Alternatively, consider a distribution kernel[2] that does not need"
+ eerror "these manual steps (e.g. sys-kernel/gentoo-kernel or gentoo-kernel-bin)."
+ eerror
+ eerror "[1] https://wiki.gentoo.org/wiki/Kernel/Configuration#Build"
+ eerror "[2] https://wiki.gentoo.org/wiki/Project:Distribution_Kernel"
+ die "built kernel sources are required to build kernel modules"
+ fi
+
+ if [[ ! -r ${symvers} ]]; then
+ eerror "'${symvers}' exists but cannot be read by the"
+ eerror "user id(${EUID}) of the package manager, likely implying no world"
+ eerror "read access permissions:"
+ eerror
+ eerror " $(ls -l -- "${symvers}")"
+ eerror
+ eerror "Causes may vary, but a common one is building the kernel with a umask"
+ eerror "value of '0077' rather than the more typical '0022' (run the \`umask\`"
+ eerror "command to confirm, as root if was building the kernel using it)."
+ eerror
+ eerror "Many other files are likely affected and will lead to build failures."
+ eerror "It is recommended to clean the sources and rebuild with \`umask 0022\`"
+ eerror "rather than attempt to fix the permissions manually."
+ die "no read access permission to the generated kernel files"
+ fi
+}
+
+# @FUNCTION: _modules_sanity_kernelversion
+# @INTERNAL
+# @DESCRIPTION:
+# Prints a warning if the kernel version is greater than to
+# MODULES_KERNEL_MAX (while only considering same amount of version
+# components), or aborts if it is less than MODULES_KERNEL_MIN
+_modules_sanity_kernelversion() {
+ local kv=${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}
+
+ if [[ -n ${MODULES_KERNEL_MIN} ]] &&
+ ver_test "${kv}" -lt "${MODULES_KERNEL_MIN}"
+ then
+ eerror "${P} requires a kernel version of at least >=${MODULES_KERNEL_MIN},"
+ eerror "but the current kernel is ${KV_FULL}. Please update."
+ die "kernel ${KV_FULL} is too old"
+ fi
+
+ if [[ -n ${MODULES_KERNEL_MAX} ]]; then
+ : "${MODULES_KERNEL_MAX//[^.]/}"
+ local -i count=${#_}
+
+ if ver_test "$(ver_cut 1-$((count+1)) "${kv}")" \
+ -gt "${MODULES_KERNEL_MAX}"
+ then
+ # add .x to 1 missing component to make, e.g. <=1.2.x more natural,
+ # not <1.3 given users sometimes see it as 1.3 support at a glance
+ local max=${MODULES_KERNEL_MAX}
+ [[ ${count} -lt 2 ]] && max+=.x
+
+ ewarn
+ ewarn " *** WARNING *** "
+ ewarn
+ ewarn "${PN} is known to break easily with new kernel versions and,"
+ ewarn "with the current kernel (${KV_FULL}), it was either hardly"
+ ewarn "tested or is known broken. It is recommended to use one of:"
+ ewarn
+ # fwiw we do not know what is *actually* used or wanted even with
+ # the USE, so stay a bit vague and always mention both dist+sources
+ if use dist-kernel; then
+ ewarn " <=virtual/dist-kernel-${max} or"
+ else
+ ewarn " <=sys-kernel/gentoo-kernel-${max} or"
+ fi
+ ewarn " <=sys-kernel/gentoo-sources-${max}"
+ ewarn
+ ewarn "or equivalent rather than file downstream bug reports if run into"
+ ewarn "issues, then wait for upstream fixes and a new release. Ideally,"
+ ewarn "with out-of-tree modules, use an LTS (Long Term Support) kernel"
+ ewarn "branch[1]. If in doubt, Gentoo's stable kernels are always LTS"
+ ewarn "and can be easily used even on ~testing systems."
+ ewarn
+ ewarn "[1] https://www.kernel.org/category/releases.html"
+ ewarn
+ fi
+ fi
+}
+
+# @FUNCTION: _modules_sanity_modversion
+# @USAGE: <module>...
+# @INTERNAL
+# @DESCRIPTION:
+# Checks if the passed module(s) do not seem obviously broken and the
+# builtin versions match ${KV_FULL}, otherwise die with an explanation.
+#
+# If receive a bug with a version error, an easy way to reproduce is to
+# set KERNEL_DIR with the sources of a different kernel version than
+# both the ones pointed by /usr/src/linux and `uname -r`. Refer to
+# linux-mod-r1_src_compile's modargs in the eclass docs for fixing.
+_modules_sanity_modversion() {
+ local mod ver
+ for mod; do
+ # modinfo can read different-arch modules, being fatal *should* be safe
+ # and serve as a basic sanity check to ensure the module is valid
+ read -rd ' ' ver < <(
+ LC_ALL=C modinfo -F vermagic -- "${mod}" ||
+ die "modinfo failed to read module '${mod}' (broken module?)"
+ )
+ [[ -n ${ver} ]] ||
+ die "modinfo found no kernel version in '${mod}' (broken module?)"
+
+ if [[ ${ver} != "${KV_FULL}" ]]; then
+ eerror "A module seem to have been built for kernel version '${ver}'"
+ eerror "while it was meant for '${KV_FULL}'. This may indicate an"
+ eerror "ebuild issue (e.g. used runtime \`uname -r\` kernel rather than"
+ eerror "the chosen sources). Please report this to the ebuild's maintainer."
+ die "module and source version mismatch in '${mod}'"
+ fi
+ done
+}
+
+# @FUNCTION: _modules_set_makeargs
+# @INTERNAL
+# @DESCRIPTION:
+# Sets the MODULES_MAKEARGS global array.
+_modules_set_makeargs() {
+ MODULES_MAKEARGS=(
+ ARCH="$(tc-arch-kernel)"
+
+ V=1
+ # normally redundant with V, but some custom Makefiles override it
+ KBUILD_VERBOSE=1
+
+ # unrealistic when building modules that often have slow releases,
+ # but note that the kernel will still pass some -Werror=bad-thing
+ CONFIG_WERROR=
+
+ # these are only needed if using these arguments for installing, lets
+ # eclass handle strip, sign, compress, and depmod (CONFIG_ should
+ # have no impact on building, only used by Makefile.modinst)
+ CONFIG_MODULE_{SIG_ALL,COMPRESS_{GZIP,XZ,ZSTD}}=
+ DEPMOD=true #916587
+ STRIP=true
+ )
+
+ if [[ ! ${MODULES_I_WANT_FULL_CONTROL} ]]; then
+ # many of these are unlikely to be useful here, but still trying to be
+ # complete given never know what out-of-tree modules may use
+ MODULES_MAKEARGS+=(
+ # wrt bug #550428, given most toolchain variables are being passed to
+ # make, setting CROSS in the environment would change very little
+ # (instead set KERNEL_CHOST which will affect other variables,
+ # or MODULES_I_WANT_FULL_CONTROL if do not want any of this)
+ CROSS_COMPILE="${KERNEL_CHOST}-"
+
+ HOSTCC="$(tc-getBUILD_CC)"
+ HOSTCXX="$(tc-getBUILD_CXX)"
+
+ # fwiw this function is not meant to pollute the environment
+ HOSTCFLAGS="$(tc-export_build_env; echo "${BUILD_CFLAGS}")"
+ HOSTCXXFLAGS="$(tc-export_build_env; echo "${BUILD_CXXFLAGS}")"
+ HOSTLDFLAGS="$(tc-export_build_env; echo "${BUILD_LDFLAGS}")"
+
+ HOSTPKG_CONFIG="$(tc-getBUILD_PKG_CONFIG)"
+
+ CC="${KERNEL_CC}"
+ CXX="${KERNEL_CXX}"
+ LD="${KERNEL_LD}"
+ AR="${KERNEL_AR}"
+ NM="${KERNEL_NM}"
+ OBJCOPY="${KERNEL_OBJCOPY}"
+ OBJDUMP="${KERNEL_OBJDUMP}"
+ READELF="${KERNEL_READELF}"
+ )
+ fi
+
+ # eval is to handle quoted spaces, die is for syntax errors
+ eval "MODULES_MAKEARGS+=( ${MODULES_EXTRA_EMAKE} )" || die
+}
+
+# @FUNCTION: _modules_update_depmod
+# @INTERNAL
+# @DESCRIPTION:
+# If possible, update module dependencies using depmod and System.map,
+# otherwise prompt user to handle it. System.map may notably no longer
+# be available on binary merges.
+_modules_update_depmod() {
+ # prefer /lib/modules' path given it is what depmod operates on,
+ # and is mostly foolproof when it comes to ROOT (relative symlink)
+ local map=${EROOT}/lib/modules/${KV_FULL}/build/System.map
+
+ if [[ ! -f ${map} ]]; then
+ # KV_OUT_DIR may still be right even on a different system, but state
+ # of (E)ROOT is unknown, e.g. could be from KERNEL_DIR=${OLDROOT}/...
+ map=${KV_OUT_DIR}/System.map
+
+ # last resort, typical but may not be mounted/readable/installed
+ [[ ! -f ${map} ]] &&
+ map=${EROOT}/boot/System.map-${KV_FULL}
+ fi
+
+ einfo "Updating module dependencies for kernel ${KV_FULL} ..."
+ if [[ -f ${map} ]]; then
+ local depmodargs=( -ae -F "${map}" "${KV_FULL}" )
+
+ # for nicer postinst display, keep command shorter if EROOT is unset
+ [[ ${EROOT} ]] &&
+ depmodargs+=(
+ -b "${EROOT}"
+
+ # EROOT from -b is not used when looking for configuration
+ # directories, so pass the whole list from kmod's tools/depmod.c
+ --config="${EROOT}"/{etc,run,usr/local/lib,lib}/depmod.d
+ )
+
+ nonfatal edob depmod "${depmodargs[@]}" && return 0
+ else
+ eerror
+ eerror "System.map for kernel ${KV_FULL} was not found, may be due to the"
+ eerror "built kernel sources no longer being available and lacking the fallback:"
+ eerror
+ eerror "${EROOT}/boot/System.map-${KV_FULL}"
+ fi
+ eerror
+ eerror "Some modules may not load without updating manually using depmod."
+}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup src_compile src_install pkg_postinst
diff --git a/eclass/linux-mod.eclass b/eclass/linux-mod.eclass
index 6a820371b767..c71ace53aa00 100644
--- a/eclass/linux-mod.eclass
+++ b/eclass/linux-mod.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: linux-mod.eclass
@@ -7,9 +7,10 @@
# @AUTHOR:
# John Mylchreest <johnm@gentoo.org>,
# Stefan Schweizer <genstef@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: linux-info
# @BLURB: It provides the functionality required to install external modules against a kernel source tree.
+# @DEPRECATED: linux-mod-r1.eclass
# @DESCRIPTION:
# This eclass is used to interface with linux-info.eclass in such a way
# to provide the functionality and initial functions
@@ -38,7 +39,7 @@
# @DESCRIPTION:
# A string containing the directory of the target kernel sources. The default value is
# "/usr/src/linux"
-: ${KERNEL_DIR:=/usr/src/linux}
+: "${KERNEL_DIR:=/usr/src/linux}"
# @ECLASS_VARIABLE: ECONF_PARAMS
# @DEFAULT_UNSET
@@ -54,7 +55,7 @@
# @ECLASS_VARIABLE: BUILD_TARGETS
# @DESCRIPTION:
# It's a string with the build targets to pass to make. The default value is "clean module"
-: ${BUILD_TARGETS:=clean module}
+: "${BUILD_TARGETS:=clean module}"
# @ECLASS_VARIABLE: MODULE_NAMES
# @DEFAULT_UNSET
@@ -149,12 +150,14 @@
# @DESCRIPTION:
# It's a read-only variable. It contains the extension of the kernel modules.
-case ${EAPI:-0} in
- [67])
- inherit eutils
- ;;
- 8)
- ;;
+# @ECLASS_VARIABLE: KV_OBJ_COMPRESS_EXT
+# @INTERNAL
+# @DESCRIPTION:
+# Read-only variable. It contains the compression extension of the kernel
+# modules (.xz, .gz, .zst)
+
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -163,17 +166,13 @@ _LINUX_MOD_ECLASS=1
# TODO: When adding support for future EAPIs, please audit this list
# for unused inherits and conditionalise them.
-inherit linux-info multilib toolchain-funcs
+inherit linux-info multilib multiprocessing toolchain-funcs
case ${MODULES_OPTIONAL_USE_IUSE_DEFAULT:-n} in
[nNfF]*|[oO][fF]*|0|-) _modules_optional_use_iuse_default='' ;;
*) _modules_optional_use_iuse_default='+' ;;
esac
-[[ -n "${_modules_optional_use_iuse_default}" ]] && case ${EAPI:-0} in
- 0) die "EAPI=${EAPI} is not supported with MODULES_OPTIONAL_USE_IUSE_DEFAULT due to lack of IUSE defaults" ;;
-esac
-
IUSE="dist-kernel
${MODULES_OPTIONAL_USE:+${_modules_optional_use_iuse_default}}${MODULES_OPTIONAL_USE}"
SLOT="0"
@@ -185,8 +184,7 @@ RDEPEND="
)
${MODULES_OPTIONAL_USE:+)}"
DEPEND="${RDEPEND}
- ${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (}
- sys-apps/sed
+ ${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (}
kernel_linux? ( virtual/linux-sources virtual/libelf )
${MODULES_OPTIONAL_USE:+)}"
@@ -200,14 +198,13 @@ DEPEND="${RDEPEND}
use_m() {
debug-print-function ${FUNCNAME} $*
- # if we haven't determined the version yet, we need too.
- get_version;
+ # If we haven't determined the version yet, we need to.
+ get_version
- # if the kernel version is greater than 2.6.6 then we should use
+ # If the kernel version is greater than 2.6.6 then we should use
# M= instead of SUBDIRS=
- [ ${KV_MAJOR} -ge 3 ] && return 0
- [ ${KV_MAJOR} -eq 2 -a ${KV_MINOR} -gt 5 -a ${KV_PATCH} -gt 5 ] && \
- return 0 || return 1
+ [[ ${KV_MAJOR} -ge 3 ]] && return 0
+ [[ ${KV_MAJOR} -eq 2 && ${KV_MINOR} -gt 5 && ${KV_PATCH} -gt 5 ]]
}
# @FUNCTION: convert_to_m
@@ -217,10 +214,10 @@ use_m() {
convert_to_m() {
debug-print-function ${FUNCNAME} $*
- if use_m
- then
- [ ! -f "${1}" ] && \
+ if use_m; then
+ [[ ! -f "${1}" ]] && \
die "convert_to_m() requires a filename as an argument"
+
ebegin "Converting ${1/${WORKDIR}\//} to use M= instead of SUBDIRS="
sed -i 's:SUBDIRS=:M=:g' "${1}"
eend $?
@@ -234,12 +231,11 @@ convert_to_m() {
update_depmod() {
debug-print-function ${FUNCNAME} $*
- # if we haven't determined the version yet, we need too.
- get_version;
+ # If we haven't determined the version yet, we need to.
+ get_version
ebegin "Updating module dependencies for ${KV_FULL}"
- if [ -r "${KV_OUT_DIR}"/System.map ]
- then
+ if [[ -r "${KV_OUT_DIR}"/System.map ]]; then
depmod -ae -F "${KV_OUT_DIR}"/System.map -b "${ROOT:-/}" ${KV_FULL}
eend $?
else
@@ -258,8 +254,8 @@ update_depmod() {
move_old_moduledb() {
debug-print-function ${FUNCNAME} $*
- local OLDDIR="${ROOT%/}"/usr/share/module-rebuild
- local NEWDIR="${ROOT%/}"/var/lib/module-rebuild
+ local OLDDIR="${ROOT}"/usr/share/module-rebuild
+ local NEWDIR="${ROOT}"/var/lib/module-rebuild
if [[ -f "${OLDDIR}"/moduledb ]]; then
[[ ! -d "${NEWDIR}" ]] && mkdir -p "${NEWDIR}"
@@ -276,7 +272,7 @@ move_old_moduledb() {
update_moduledb() {
debug-print-function ${FUNCNAME} $*
- local MODULEDB_DIR="${ROOT%/}"/var/lib/module-rebuild
+ local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild
move_old_moduledb
if [[ ! -f "${MODULEDB_DIR}"/moduledb ]]; then
@@ -296,7 +292,7 @@ update_moduledb() {
remove_moduledb() {
debug-print-function ${FUNCNAME} $*
- local MODULEDB_DIR="${ROOT%/}"/var/lib/module-rebuild
+ local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild
move_old_moduledb
if grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then
@@ -311,15 +307,15 @@ remove_moduledb() {
set_kvobj() {
debug-print-function ${FUNCNAME} $*
- if kernel_is ge 2 6
- then
+ if kernel_is ge 2 6; then
KV_OBJ="ko"
else
KV_OBJ="o"
fi
+
# Do we really need to know this?
- # Lets silence it.
- # einfo "Using KV_OBJ=${KV_OBJ}"
+ # Let's silence it.
+ #einfo "Using KV_OBJ=${KV_OBJ}"
}
# @FUNCTION: get-KERNEL_CC
@@ -335,7 +331,7 @@ get-KERNEL_CC() {
fi
local kernel_cc
- if [ -n "${KERNEL_ABI}" ]; then
+ if [[ -n "${KERNEL_ABI}" ]]; then
# In future, an arch might want to define CC_$ABI
#kernel_cc="$(get_abi_CC)"
#[ -z "${kernel_cc}" ] &&
@@ -351,7 +347,7 @@ get-KERNEL_CC() {
# @USAGE: /path/to/the/modulename_without_extension
# @RETURN: A file in /etc/modprobe.d
# @DESCRIPTION:
-# This function will generate and install the neccessary modprobe.d file from the
+# This function will generate and install the necessary modprobe.d file from the
# information contained in the modules exported parms.
# (see the variables MODULESD_<modulename>_ENABLED, MODULESD_<modulename>_EXAMPLES,
# MODULESD_<modulename>_ALIASES, MODULESD_<modulename>_ADDITION and MODULESD_<modulename>_DOCS).
@@ -359,14 +355,13 @@ get-KERNEL_CC() {
# At the end the documentation specified with MODULESD_<modulename>_DOCS is installed.
generate_modulesd() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
- local currm_path currm currm_t t myIFS myVAR
- local module_docs module_enabled module_aliases \
+ local currm_path currm currm_t t myIFS myVAR
+ local module_docs module_enabled module_aliases \
module_additions module_examples module_modinfo module_opts
- for currm_path in ${@}
- do
+ for currm_path in ${@}; do
currm=${currm_path//*\/}
currm=$(echo ${currm} | tr '[:lower:]' '[:upper:]')
currm_t=${currm}
@@ -380,17 +375,16 @@ generate_modulesd() {
module_additions="$(eval echo \${#MODULESD_${currm_t}_ADDITIONS[*]})"
module_examples="$(eval echo \${#MODULESD_${currm_t}_EXAMPLES[*]})"
- [[ ${module_aliases} -eq 0 ]] && unset module_aliases
+ [[ ${module_aliases} -eq 0 ]] && unset module_aliases
[[ ${module_additions} -eq 0 ]] && unset module_additions
- [[ ${module_examples} -eq 0 ]] && unset module_examples
+ [[ ${module_examples} -eq 0 ]] && unset module_examples
# If we specify we dont want it, then lets exit, otherwise we assume
# that if its set, we do want it.
[[ ${module_enabled} == no ]] && return 0
# unset any unwanted variables.
- for t in ${!module_*}
- do
+ for t in ${!module_*}; do
[[ -z ${!t} ]] && unset ${t}
done
@@ -398,7 +392,7 @@ generate_modulesd() {
# OK so now if we have got this far, then we know we want to continue
# and generate the modprobe.d file.
- module_modinfo="$(modinfo -p ${currm_path}.${KV_OBJ})"
+ module_modinfo="$(modinfo -p ${currm_path}.${KV_OBJ}${KV_OBJ_COMPRESS_EXT})"
module_config="${T}/modulesd-${currm}"
ebegin "Preparing file for modprobe.d"
@@ -421,26 +415,23 @@ generate_modulesd() {
for((t=0; t<${module_aliases}; t++))
do
- echo "alias $(eval echo \${MODULESD_${currm}_ALIASES[$t]})" \
+ echo "alias $(eval echo \${MODULESD_${currm_t}_ALIASES[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
- if [[ -n ${module_modinfo} ]]
- then
+ if [[ -n ${module_modinfo} ]]; then
echo >> "${module_config}"
echo "# Configurable module parameters" >> "${module_config}"
echo "# ------------------------------" >> "${module_config}"
myIFS="${IFS}"
IFS="$(echo -en "\n\b")"
- for t in ${module_modinfo}
- do
+ for t in ${module_modinfo}; do
myVAR="$(echo ${t#*:} | grep -o "[^ ]*[0-9][ =][^ ]*" | tail -1 | grep -o "[0-9]")"
- if [[ -n ${myVAR} ]]
- then
+ if [[ -n ${myVAR} ]]; then
module_opts="${module_opts} ${t%%:*}:${myVAR}"
fi
echo -e "# ${t%%:*}:\t${t#*:}" >> "${module_config}"
@@ -450,11 +441,9 @@ generate_modulesd() {
fi
#-----------------------------------------------------------------------
- if [[ $(eval echo \${MODULESD_${currm}_ALIASES[0]}) == guess ]]
- then
- # So lets do some guesswork eh?
- if [[ -n ${module_opts} ]]
- then
+ if [[ $(eval echo \${MODULESD_${currm_t}_ALIASES[0]}) == guess ]]; then
+ # So, let's do some guesswork, eh?
+ if [[ -n ${module_opts} ]]; then
echo "# For Example..." >> "${module_config}"
echo "# --------------" >> "${module_config}"
for t in ${module_opts}
@@ -463,24 +452,20 @@ generate_modulesd() {
done
echo '' >> "${module_config}"
fi
- elif [[ ${module_examples} -gt 0 ]]
- then
+ elif [[ ${module_examples} -gt 0 ]]; then
echo "# For Example..." >> "${module_config}"
echo "# --------------" >> "${module_config}"
- for((t=0; t<${module_examples}; t++))
- do
- echo "options $(eval echo \${MODULESD_${currm}_EXAMPLES[$t]})" \
+ for ((t=0; t<${module_examples}; t++)); do
+ echo "options $(eval echo \${MODULESD_${currm_t}_EXAMPLES[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
- if [[ ${module_additions} -gt 0 ]]
- then
- for((t=0; t<${module_additions}; t++))
- do
- echo "$(eval echo \${MODULESD_${currm}_ADDITIONS[$t]})" \
+ if [[ ${module_additions} -gt 0 ]]; then
+ for ((t=0; t<${module_additions}; t++)); do
+ echo "$(eval echo \${MODULESD_${currm_t}_ADDITIONS[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
@@ -511,8 +496,7 @@ find_module_params() {
local matched_offset=0 matched_opts=0 test="${@}" temp_var result
local i=0 y=0 z=0
- for((i=0; i<=${#test}; i++))
- do
+ for ((i=0; i<=${#test}; i++)); do
case ${test:${i}:1} in
\() matched_offset[0]=${i};;
\:) matched_opts=$((${matched_opts} + 1));
@@ -522,8 +506,7 @@ find_module_params() {
esac
done
- for((i=0; i<=${matched_opts}; i++))
- do
+ for ((i=0; i<=${matched_opts}; i++)); do
# i = offset were working on
# y = last offset
# z = current offset - last offset
@@ -557,7 +540,7 @@ find_module_params() {
# in the kernel and sets the object extension KV_OBJ.
linux-mod_pkg_setup() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
local is_bin="${MERGE_TYPE}"
@@ -570,12 +553,12 @@ linux-mod_pkg_setup() {
# External modules use kernel symbols (bug #591832)
CONFIG_CHECK+=" !TRIM_UNUSED_KSYMS"
- linux-info_pkg_setup;
+ linux-info_pkg_setup
require_configured_kernel
- check_kernel_built;
- strip_modulenames;
+ check_kernel_built
+ strip_modulenames
[[ -n ${MODULE_NAMES} ]] && check_modules_supported
- set_kvobj;
+ set_kvobj
}
# @FUNCTION: linux-mod_pkg_setup_binary
@@ -587,13 +570,13 @@ linux-mod_pkg_setup_binary() {
debug-print-function ${FUNCNAME} $*
local new_CONFIG_CHECK
# ~ needs always to be quoted, else bash expands it.
- for config in $CONFIG_CHECK ; do
+ for config in ${CONFIG_CHECK} ; do
optional='~'
[[ ${config:0:1} == "~" ]] && optional=''
new_CONFIG_CHECK="${new_CONFIG_CHECK} ${optional}${config}"
done
CONFIG_CHECK="${new_CONFIG_CHECK}"
- linux-info_pkg_setup;
+ linux-info_pkg_setup
}
# @FUNCTION: strip_modulenames
@@ -618,7 +601,7 @@ strip_modulenames() {
# Look at the description of these variables for more details.
linux-mod_src_compile() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
local modulename libdir srcdir objdir i n myABI="${ABI}"
set_arch_to_kernel
@@ -632,30 +615,22 @@ linux-mod_src_compile() {
local -x CROSS_COMPILE=${CROSS_COMPILE-${CHOST}-}
BUILD_TARGETS=${BUILD_TARGETS:-clean module}
- strip_modulenames;
- cd "${S}"
- touch Module.symvers
- for i in ${MODULE_NAMES}
- do
+ strip_modulenames
+ cd "${S}" || die
+ touch Module.symvers || die
+ for i in ${MODULE_NAMES}; do
unset libdir srcdir objdir
- for n in $(find_module_params ${i})
- do
+ for n in $(find_module_params ${i}); do
eval ${n/:*}=${n/*:/}
done
libdir=${libdir:-misc}
srcdir=${srcdir:-${S}}
objdir=${objdir:-${srcdir}}
- if [ ! -f "${srcdir}/.built" ];
- then
- cd "${srcdir}"
- ln -s "${S}"/Module.symvers Module.symvers
+ if [[ ! -f "${srcdir}/.built" ]]; then
+ cd "${srcdir}" || die
+ ln -s "${S}"/Module.symvers Module.symvers # no die for bug #888679
einfo "Preparing ${modulename} module"
- if [[ -n ${ECONF_PARAMS} ]]
- then
- econf ${ECONF_PARAMS} || \
- die "Unable to run econf ${ECONF_PARAMS}"
- fi
# This looks messy, but it is needed to handle multiple variables
# being passed in the BUILD_* stuff where the variables also have
@@ -668,8 +643,8 @@ linux-mod_src_compile() {
${BUILD_PARAMS} \
${BUILD_TARGETS} " \
|| die "Unable to emake HOSTCC="$(tc-getBUILD_CC)" LDFLAGS="$(get_abi_LDFLAGS)" ${BUILD_FIXES} ${BUILD_PARAMS} ${BUILD_TARGETS}"
- cd "${OLDPWD}"
- touch "${srcdir}"/.built
+ cd "${OLDPWD}" || die
+ touch "${srcdir}"/.built || die
fi
done
@@ -690,18 +665,16 @@ linux-mod_src_compile() {
# Look at the description of these variables for more details.
linux-mod_src_install() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
local modulename libdir srcdir objdir i n
[[ -n ${KERNEL_DIR} ]] && addpredict "${KERNEL_DIR}/null.dwo"
- strip_modulenames;
- for i in ${MODULE_NAMES}
- do
+ strip_modulenames
+ for i in ${MODULE_NAMES}; do
unset libdir srcdir objdir
- for n in $(find_module_params ${i})
- do
+ for n in $(find_module_params ${i}); do
eval ${n/:*}=${n/*:/}
done
libdir=${libdir:-misc}
@@ -711,8 +684,32 @@ linux-mod_src_install() {
einfo "Installing ${modulename} module"
cd "${objdir}" || die "${objdir} does not exist"
insinto "${INSTALL_MOD_PATH}"/lib/modules/${KV_FULL}/${libdir}
- doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed"
- cd "${OLDPWD}"
+
+ # check here for CONFIG_MODULE_COMPRESS_<compression option> (NONE, GZIP, XZ, ZSTD)
+ # and similarly compress the module being built if != NONE.
+
+ if linux_chkconfig_present MODULE_COMPRESS_XZ; then
+ # match kernel compression options for compatibility
+ # https://bugs.gentoo.org/920837
+ xz -T$(makeopts_jobs) --memlimit-compress=50% -q --check=crc32 --lzma2=dict=1MiB ${modulename}.${KV_OBJ} || die "Compressing ${modulename}.${KV_OBJ} with xz failed"
+ doins ${modulename}.${KV_OBJ}.xz
+ KV_OBJ_COMPRESS_EXT=".xz"
+ elif linux_chkconfig_present MODULE_COMPRESS_GZIP; then
+ if type -P pigz &>/dev/null ; then
+ pigz -p$(makeopts_jobs) ${modulename}.${KV_OBJ} || die "Compressing ${modulename}.${KV_OBJ} with pigz failed"
+ else
+ gzip ${modulename}.${KV_OBJ} || die "Compressing ${modulename}.${KV_OBJ} with gzip failed"
+ fi
+ doins ${modulename}.${KV_OBJ}.gz
+ KV_OBJ_COMPRESS_EXT=".gz"
+ elif linux_chkconfig_present MODULE_COMPRESS_ZSTD; then
+ zstd -T$(makeopts_jobs) ${modulename}.${KV_OBJ} || "Compressing ${modulename}.${KV_OBJ} with zstd failed"
+ doins ${modulename}.${KV_OBJ}.zst
+ KV_OBJ_COMPRESS_EXT=".zst"
+ else
+ doins ${modulename}.${KV_OBJ}
+ fi
+ cd "${OLDPWD}" || die "${OLDPWD} does not exist"
generate_modulesd "${objdir}/${modulename}"
done
@@ -723,32 +720,32 @@ linux-mod_src_install() {
# It checks what to do after having merged the package.
linux-mod_pkg_preinst() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n ${MODULES_OPTIONAL_USE} ]] && use !${MODULES_OPTIONAL_USE} && return
- [ -d "${D%/}/lib/modules" ] && UPDATE_DEPMOD=true || UPDATE_DEPMOD=false
- [ -d "${D%/}/lib/modules" ] && UPDATE_MODULEDB=true || UPDATE_MODULEDB=false
+ [[ -d ${D}/lib/modules ]] && UPDATE_DEPMOD=true || UPDATE_DEPMOD=false
+ [[ -d ${D}/lib/modules ]] && UPDATE_MODULEDB=true || UPDATE_MODULEDB=false
}
# @FUNCTION: linux-mod_pkg_postinst
# @DESCRIPTION:
# It executes /sbin/depmod and adds the package to the /var/lib/module-rebuild/moduledb
-# database (if ${D}/lib/modules is created)"
+# database (if ${D}/lib/modules is created)
linux-mod_pkg_postinst() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
- ${UPDATE_DEPMOD} && update_depmod;
- ${UPDATE_MODULEDB} && update_moduledb;
+ ${UPDATE_DEPMOD} && update_depmod
+ ${UPDATE_MODULEDB} && update_moduledb
}
# @FUNCTION: linux-mod_pkg_postrm
# @DESCRIPTION:
-# It removes the package from the /var/lib/module-rebuild/moduledb database but it doens't
+# It removes the package from the /var/lib/module-rebuild/moduledb database but it doesn't
# call /sbin/depmod because the modules are still installed.
linux-mod_pkg_postrm() {
debug-print-function ${FUNCNAME} $*
- [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
- remove_moduledb;
+ [[ -n "${MODULES_OPTIONAL_USE}" ]] && use !${MODULES_OPTIONAL_USE} && return
+ remove_moduledb
}
fi
diff --git a/eclass/llvm-r1.eclass b/eclass/llvm-r1.eclass
new file mode 100644
index 000000000000..658946a1ecbd
--- /dev/null
+++ b/eclass/llvm-r1.eclass
@@ -0,0 +1,250 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: llvm-r1.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: llvm-utils
+# @BLURB: Provide LLVM_SLOT to build against slotted LLVM
+# @DESCRIPTION:
+# An eclass to reliably depend on a set of LLVM-related packages
+# in a matching slot. To use the eclass:
+#
+# 1. Set LLVM_COMPAT to the list of supported LLVM slots.
+# 2. Use llvm_gen_dep and/or LLVM_USEDEP to add appropriate
+# dependencies.
+# 3. Use llvm-r1_pkg_setup, get_llvm_prefix or LLVM_SLOT.
+#
+# The eclass sets IUSE and REQUIRED_USE. The flag corresponding
+# to the newest supported stable LLVM slot (or the newest testing,
+# if no stable slots are supported) is enabled by default.
+#
+# Example:
+# @CODE
+# LLVM_COMPAT=( {16..18} )
+#
+# inherit llvm-r1
+#
+# DEPEND="
+# dev-libs/libfoo[${LLVM_USEDEP}]
+# $(llvm_gen_dep '
+# sys-devel/clang:${LLVM_SLOT}
+# sys-devel/llvm:${LLVM_SLOT}
+# ')
+# "
+# @CODE
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_LLVM_R1_ECLASS} ]]; then
+_LLVM_R1_ECLASS=1
+
+inherit llvm-utils
+
+# == internal control knobs ==
+
+# @ECLASS_VARIABLE: _LLVM_OLDEST_SLOT
+# @INTERNAL
+# @DESCRIPTION:
+# Oldest supported LLVM slot. This is used to automatically filter out
+# unsupported LLVM_COMPAT values.
+_LLVM_OLDEST_SLOT=15
+
+# @ECLASS_VARIABLE: _LLVM_NEWEST_STABLE
+# @INTERNAL
+# @DESCRIPTION:
+# The newest stable LLVM version. Versions newer than that won't
+# be automatically enabled via USE defaults.
+_LLVM_NEWEST_STABLE=17
+
+# == control variables ==
+
+# @ECLASS_VARIABLE: LLVM_COMPAT
+# @PRE_INHERIT
+# @REQUIRED
+# @DESCRIPTION:
+# A list of LLVM slots supported by the package, oldest to newest.
+#
+# Example:
+# @CODE
+# LLVM_COMPAT=( {15..17} )
+# @CODE
+
+# @ECLASS_VARIABLE: LLVM_OPTIONAL
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-empty value, disables setting REQUIRED_USE
+# and exporting pkg_setup. You have to add LLVM_REQUIRED_USE and call
+# pkg_setup manually, with appropriate USE conditions.
+
+# == global metadata ==
+
+# @ECLASS_VARIABLE: LLVM_REQUIRED_USE
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# An eclass-generated REQUIRED_USE string that enforces selecting
+# exactly one slot. It LLVM_OPTIONAL is set, it needs to be copied
+# into REQUIRED_USE, under appropriate USE conditions. Otherwise,
+# it is added automatically.
+
+# @ECLASS_VARIABLE: LLVM_USEDEP
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# An eclass-generated USE dependency string that can be applied to other
+# packages using the same eclass, to enforce a LLVM slot match.
+
+_llvm_set_globals() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${LLVM_COMPAT@a} != *a* ]]; then
+ die "LLVM_COMPAT must be set to an array before inheriting ${ECLASS}"
+ fi
+
+ local stable=() unstable=()
+ local x
+ for x in "${LLVM_COMPAT[@]}"; do
+ if [[ ${x} -gt ${_LLVM_NEWEST_STABLE} ]]; then
+ unstable+=( "${x}" )
+ elif [[ ${x} -ge ${_LLVM_OLDEST_SLOT} ]]; then
+ stable+=( "${x}" )
+ fi
+ done
+
+ _LLVM_SLOTS=( "${stable[@]}" "${unstable[@]}" )
+ if [[ ! ${_LLVM_SLOTS[@]} ]]; then
+ die "LLVM_COMPAT does not contain any valid versions (all older than ${_LLVM_OLDEST_SLOT}?)"
+ fi
+
+ if [[ ${stable[@]} ]]; then
+ IUSE="+llvm_slot_${stable[-1]}"
+ unset 'stable[-1]'
+ else
+ IUSE="+llvm_slot_${unstable[-1]}"
+ unset 'unstable[-1]'
+ fi
+ local nondefault=( "${stable[@]}" "${unstable[@]}" )
+ IUSE+=" ${nondefault[*]/#/llvm_slot_}"
+
+ local flags=( "${_LLVM_SLOTS[@]/#/llvm_slot_}" )
+ LLVM_REQUIRED_USE="^^ ( ${flags[*]} )"
+ local usedep_flags=${flags[*]/%/(-)?}
+ LLVM_USEDEP=${usedep_flags// /,}
+ readonly LLVM_REQUIRED_USE LLVM_USEDEP
+
+ if [[ ! ${LLVM_OPTIONAL} ]]; then
+ REQUIRED_USE=${LLVM_REQUIRED_USE}
+ fi
+}
+_llvm_set_globals
+unset -f _llvm_set_globals
+
+# == metadata helpers ==
+
+# @FUNCTION: llvm_gen_dep
+# @USAGE: <dependency>
+# @DESCRIPTION:
+# Output a dependency block, repeating "<dependency>" conditionally
+# to all llvm_slot_* USE flags. Any occurences of '${LLVM_SLOT}'
+# within the block will be substituted for the respective slot.
+#
+# Example:
+# @CODE
+# DEPEND="
+# $(llvm_gen_dep '
+# sys-devel/clang:${LLVM_SLOT}
+# sys-devel/llvm:${LLVM_SLOT}
+# ')
+# "
+# @CODE
+llvm_gen_dep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -ne 1 ]] && die "Usage: ${FUNCNAME} <dependency>"
+
+ local dep=${1}
+
+ local slot
+ for slot in "${_LLVM_SLOTS[@]}"; do
+ echo "llvm_slot_${slot}? ( ${dep//\$\{LLVM_SLOT\}/${slot}} )"
+ done
+}
+
+# == ebuild helpers ==
+
+# @FUNCTION: get_llvm_prefix
+# @USAGE: [-b|-d]
+# @DESCRIPTION:
+# Output the path to the selected LLVM slot.
+#
+# With no option or "-d", the path is prefixed by ESYSROOT. LLVM
+# dependencies should be in DEPEND then.
+#
+# With "-b" option, the path is prefixed by BROOT. LLVM dependencies
+# should be in BDEPEND then.
+get_llvm_prefix() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -gt 1 ]] && die "Usage: ${FUNCNAME} [-b|-d]"
+
+ local prefix
+ case ${1--d} in
+ -d)
+ prefix=${ESYSROOT}
+ ;;
+ -b)
+ prefix=${BROOT}
+ ;;
+ *)
+ die "${FUNCNAME}: invalid option: ${1}"
+ ;;
+ esac
+
+ echo "${prefix}/usr/lib/llvm/${LLVM_SLOT}"
+}
+
+# @FUNCTION: llvm-r1_pkg_setup
+# @DESCRIPTION:
+# Prepend the appropriate executable directory for the selected LLVM
+# slot to PATH.
+#
+# The PATH manipulation is only done for source builds. The function
+# is a no-op when installing a binary package.
+#
+# If any other behavior is desired, the contents of the function
+# should be inlined into the ebuild and modified as necessary.
+#
+# Note that this function is not exported if LLVM_OPTIONAL is set.
+# In that case, it needs to be called manually.
+llvm-r1_pkg_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ [[ -z ${LLVM_SLOT} ]] && die "LLVM_SLOT unset (broken USE_EXPAND?)"
+
+ llvm_fix_clang_version CC CPP CXX
+ # keep in sync with profiles/features/llvm/make.defaults!
+ llvm_fix_tool_path ADDR2LINE AR AS LD NM OBJCOPY OBJDUMP RANLIB
+ llvm_fix_tool_path READELF STRINGS STRIP
+
+ # Set LLVM_CONFIG to help Meson (bug #907965) but only do it
+ # for empty ESYSROOT (as a proxy for "are we cross-compiling?").
+ if [[ -z ${ESYSROOT} ]] ; then
+ llvm_fix_tool_path LLVM_CONFIG
+ fi
+
+ llvm_prepend_path "${LLVM_SLOT}"
+ fi
+}
+
+fi
+
+if [[ ! ${LLVM_OPTIONAL} ]]; then
+ EXPORT_FUNCTIONS pkg_setup
+fi
diff --git a/eclass/llvm-utils.eclass b/eclass/llvm-utils.eclass
new file mode 100644
index 000000000000..532e609679b8
--- /dev/null
+++ b/eclass/llvm-utils.eclass
@@ -0,0 +1,153 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: llvm-utils.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: Common utility functions for building against installed LLVM
+# @DESCRIPTION:
+# The utility eclass providing shared functions reused between
+# llvm.eclass and llvm-r1.eclass. It may also be used directly
+# in ebuilds.
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_LLVM_UTILS_ECLASS} ]]; then
+_LLVM_UTILS_ECLASS=1
+
+# @FUNCTION: llvm_tuple_to_target
+# @USAGE: [<tuple>]
+# @DESCRIPTION:
+# Translate a tuple into a target suitable for LLVM_TARGETS.
+# Defaults to ${CHOST} if not specified.
+llvm_tuple_to_target() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -gt 1 ]] && die "Usage: ${FUNCNAME} [<tuple>]"
+
+ case ${1:-${CHOST}} in
+ aarch64*) echo "AArch64";;
+ amdgcn*) echo "AMDGPU";;
+ arc*) echo "ARC";;
+ arm*) echo "ARM";;
+ avr*) echo "AVR";;
+ bpf*) echo "BPF";;
+ csky*) echo "CSKY";;
+ loong*) echo "LoongArch";;
+ m68k*) echo "M68k";;
+ mips*) echo "Mips";;
+ msp430*) echo "MSP430";;
+ nvptx*) echo "NVPTX";;
+ powerpc*) echo "PowerPC";;
+ riscv*) echo "RISCV";;
+ sparc*) echo "Sparc";;
+ s390*) echo "SystemZ";;
+ x86_64*|i?86*) echo "X86";;
+ xtensa*) echo "Xtensa";;
+ *) die "Unknown LLVM target for tuple ${1:-${CHOST}}"
+ esac
+}
+
+# @FUNCTION: llvm_fix_clang_version
+# @USAGE: <variable-name>...
+# @DESCRIPTION:
+# Fix the clang compiler name in specified variables to include
+# the major version, to prevent PATH alterations from forcing an older
+# clang version being used.
+llvm_fix_clang_version() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local shopt_save=$(shopt -p -o noglob)
+ set -f
+ local var
+ for var; do
+ local split=( ${!var} )
+ case ${split[0]} in
+ *clang|*clang++|*clang-cpp)
+ local version=()
+ read -r -a version < <("${split[0]}" --version)
+ local major=${version[-1]%%.*}
+ if [[ -n ${major//[0-9]} ]]; then
+ die "${var}=${!var} produced invalid --version: ${version[*]}"
+ fi
+
+ split[0]+=-${major}
+ if ! type -P "${split[0]}" &>/dev/null; then
+ die "${split[0]} does not seem to exist"
+ fi
+ declare -g "${var}=${split[*]}"
+ ;;
+ esac
+ done
+ ${shopt_save}
+}
+
+# @FUNCTION: llvm_fix_tool_path
+# @USAGE: <variable-name>...
+# @DESCRIPTION:
+# Fix the LLVM tools referenced in the specified variables to their
+# current location, to prevent PATH alterations from forcing older
+# versions being used.
+llvm_fix_tool_path() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local shopt_save=$(shopt -p -o noglob)
+ set -f
+ local var
+ for var; do
+ local split=( ${!var} )
+ local path=$(type -P ${split[0]} 2>/dev/null)
+ # if it resides in one of the LLVM prefixes, it's an LLVM tool!
+ if [[ ${path} == "${BROOT}/usr/lib/llvm"* ]]; then
+ split[0]=${path}
+ declare -g "${var}=${split[*]}"
+ fi
+ done
+ ${shopt_save}
+}
+
+# @FUNCTION: llvm_prepend_path
+# @USAGE: <slot>
+# @DESCRIPTION:
+# Prepend the path to the specified LLVM slot to PATH variable,
+# and reexport it.
+llvm_prepend_path() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -ne 1 ]] && die "Usage: ${FUNCNAME} <slot>"
+ local slot=${1}
+
+ local llvm_path=${ESYSROOT}/usr/lib/llvm/${slot}/bin
+ local IFS=:
+ local split_path=( ${PATH} )
+ local new_path=()
+ local x added=
+
+ for x in "${split_path[@]}"; do
+ if [[ ${x} == */usr/lib/llvm/*/bin ]]; then
+ # prepend new path in front of the first LLVM version found
+ if [[ ! ${added} ]]; then
+ new_path+=( "${llvm_path}" )
+ added=1
+ fi
+ # remove duplicate copies of the same path
+ if [[ ${x} == ${llvm_path} ]]; then
+ # deduplicate
+ continue
+ fi
+ fi
+ new_path+=( "${x}" )
+ done
+ # ...or to the end of PATH
+ [[ ${added} ]] || new_path+=( "${llvm_path}" )
+
+ export PATH=${new_path[*]}
+}
+
+fi
diff --git a/eclass/llvm.eclass b/eclass/llvm.eclass
index 5f2c445913cc..e297fe992c9f 100644
--- a/eclass/llvm.eclass
+++ b/eclass/llvm.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: llvm.eclass
@@ -6,7 +6,8 @@
# Michał Górny <mgorny@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
+# @PROVIDES: llvm-utils
# @BLURB: Utility functions to build against slotted LLVM
# @DESCRIPTION:
# The llvm.eclass provides utility functions that can be used to build
@@ -56,20 +57,15 @@
# }
# @CODE
-case "${EAPI:-0}" in
- 0|1|2|3|4|5)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 6|7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_setup
-
if [[ ! ${_LLVM_ECLASS} ]]; then
+_LLVM_ECLASS=1
+
+inherit llvm-utils
# make sure that the versions installing straight into /usr/bin
# are uninstalled
@@ -85,17 +81,24 @@ DEPEND="!!sys-devel/llvm:0"
# @INTERNAL
# @DESCRIPTION:
# Correct values of LLVM slots, newest first.
-declare -g -r _LLVM_KNOWN_SLOTS=( {15..8} )
+declare -g -r _LLVM_KNOWN_SLOTS=( {19..8} )
-# @FUNCTION: get_llvm_prefix
+# @ECLASS_VARIABLE: LLVM_ECLASS_SKIP_PKG_SETUP
+# @INTERNAL
+# @DESCRIPTION:
+# If set to a non-empty value, llvm_pkg_setup will not perform LLVM version
+# check, nor set PATH. Useful for bootstrap-prefix.sh, where AppleClang has
+# unparseable version numbers, which are irrelevant anyway.
+
+# @FUNCTION: get_llvm_slot
# @USAGE: [-b|-d] [<max_slot>]
# @DESCRIPTION:
# Find the newest LLVM install that is acceptable for the package,
-# and print an absolute path to it.
+# and print its major version number (i.e. slot).
#
# If -b is specified, the checks are performed relative to BROOT,
# and BROOT-path is returned. This is appropriate when your package
-# calls llvm-config executable. -b is supported since EAPI 7.
+# calls llvm-config executable.
#
# If -d is specified, the checks are performed relative to ESYSROOT,
# and ESYSROOT-path is returned. This is appropriate when your package
@@ -114,7 +117,7 @@ declare -g -r _LLVM_KNOWN_SLOTS=( {15..8} )
# is acceptable, false otherwise. If llvm_check_deps() is not defined,
# the function defaults to checking whether sys-devel/llvm:${LLVM_SLOT}
# is installed.
-get_llvm_prefix() {
+get_llvm_slot() {
debug-print-function ${FUNCNAME} "${@}"
local hv_switch=-d
@@ -126,28 +129,6 @@ get_llvm_prefix() {
shift
done
- local prefix=
- if [[ ${EAPI} != 6 ]]; then
- case ${hv_switch} in
- -b)
- prefix=${BROOT}
- ;;
- -d)
- prefix=${ESYSROOT}
- ;;
- esac
- else
- case ${hv_switch} in
- -b)
- die "${FUNCNAME} -b is not supported in EAPI ${EAPI}"
- ;;
- -d)
- prefix=${EPREFIX}
- hv_switch=
- ;;
- esac
- fi
-
local max_slot=${1}
local slot
for slot in "${_LLVM_KNOWN_SLOTS[@]}"; do
@@ -168,7 +149,7 @@ get_llvm_prefix() {
has_version ${hv_switch} "sys-devel/llvm:${slot}" || continue
fi
- echo "${prefix}/usr/lib/llvm/${slot}"
+ echo "${slot}"
return
done
@@ -180,6 +161,22 @@ get_llvm_prefix() {
die "No LLVM slot${1:+ <= ${1}} satisfying the package's dependencies found installed!"
}
+# @FUNCTION: get_llvm_prefix
+# @USAGE: [-b|-d] [<max_slot>]
+# @DESCRIPTION:
+# Find the newest LLVM install that is acceptable for the package,
+# and print an absolute path to it.
+#
+# The options and behavior is the same as for get_llvm_slot.
+get_llvm_prefix() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local prefix=${ESYSROOT}
+ [[ ${1} == -b ]] && prefix=${BROOT}
+
+ echo "${prefix}/usr/lib/llvm/$(get_llvm_slot "${@}")"
+}
+
# @FUNCTION: llvm_pkg_setup
# @DESCRIPTION:
# Prepend the appropriate executable directory for the newest
@@ -197,32 +194,28 @@ get_llvm_prefix() {
llvm_pkg_setup() {
debug-print-function ${FUNCNAME} "${@}"
+ if [[ ${LLVM_ECLASS_SKIP_PKG_SETUP} ]]; then
+ return
+ fi
+
if [[ ${MERGE_TYPE} != binary ]]; then
- local llvm_path=$(get_llvm_prefix "${LLVM_MAX_SLOT}")/bin
- local IFS=:
- local split_path=( ${PATH} )
- local new_path=()
- local x added=
-
- # prepend new path before first LLVM version found
- for x in "${split_path[@]}"; do
- if [[ ${x} == */usr/lib/llvm/*/bin ]]; then
- if [[ ${x} != ${llvm_path} ]]; then
- new_path+=( "${llvm_path}" )
- elif [[ ${added} && ${x} == ${llvm_path} ]]; then
- # deduplicate
- continue
- fi
- added=1
- fi
- new_path+=( "${x}" )
- done
- # ...or to the end of PATH
- [[ ${added} ]] || new_path+=( "${llvm_path}" )
+ LLVM_SLOT=$(get_llvm_slot "${LLVM_MAX_SLOT}")
- export PATH=${new_path[*]}
+ llvm_fix_clang_version CC CPP CXX
+ # keep in sync with profiles/features/llvm/make.defaults!
+ llvm_fix_tool_path ADDR2LINE AR AS LD NM OBJCOPY OBJDUMP RANLIB
+ llvm_fix_tool_path READELF STRINGS STRIP
+
+ # Set LLVM_CONFIG to help Meson (bug #907965) but only do it
+ # for empty ESYSROOT (as a proxy for "are we cross-compiling?").
+ if [[ -z ${ESYSROOT} ]] ; then
+ llvm_fix_tool_path LLVM_CONFIG
+ fi
+
+ llvm_prepend_path "${LLVM_SLOT}"
fi
}
-_LLVM_ECLASS=1
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/llvm.org.eclass b/eclass/llvm.org.eclass
index 0ddd48b19e46..49e600bdaf04 100644
--- a/eclass/llvm.org.eclass
+++ b/eclass/llvm.org.eclass
@@ -1,4 +1,4 @@
-# Copyright 2019-2022 Gentoo Authors
+# Copyright 2019-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: llvm.org.eclass
@@ -7,6 +7,7 @@
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7 8
+# @PROVIDES: git-r3
# @BLURB: Common bits for fetching & unpacking llvm.org projects
# @DESCRIPTION:
# The llvm.org eclass provides common code to fetch and unpack parts
@@ -29,43 +30,77 @@
# llvm.org_set_globals
# @CODE
-case "${EAPI:-0}" in
- 7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+# == version substrings ==
+
+# @ECLASS_VARIABLE: LLVM_MAJOR
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# The major LLVM version.
+LLVM_MAJOR=$(ver_cut 1)
+
+# @ECLASS_VARIABLE: LLVM_VERSION
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# The full 3-component LLVM version without suffixes or .9999.
+LLVM_VERSION=$(ver_cut 1-3)
+
# == internal control bits ==
-# @ECLASS_VARIABLE: _LLVM_MASTER_MAJOR
+# @ECLASS_VARIABLE: _LLVM_MAIN_MAJOR
# @INTERNAL
# @DESCRIPTION:
# The major version of current LLVM trunk. Used to determine
# the correct branch to use.
-_LLVM_MASTER_MAJOR=15
+_LLVM_MAIN_MAJOR=19
# @ECLASS_VARIABLE: _LLVM_SOURCE_TYPE
# @INTERNAL
# @DESCRIPTION:
-# Source type to use: 'git' or 'tar'.
+# Source type to use: 'git', 'tar' or 'snapshot'.
if [[ -z ${_LLVM_SOURCE_TYPE+1} ]]; then
- if [[ ${PV} == *.9999 ]]; then
- _LLVM_SOURCE_TYPE=git
- else
- _LLVM_SOURCE_TYPE=tar
- fi
+ case ${PV} in
+ *.9999)
+ _LLVM_SOURCE_TYPE=git
+ ;;
+ *_pre*)
+ _LLVM_SOURCE_TYPE=snapshot
+
+ case ${PV} in
+ 19.0.0_pre20240420)
+ EGIT_COMMIT=f03cd2db91956456f1c5e2da86d3c50183eebd28
+ ;;
+ 19.0.0_pre20240410)
+ EGIT_COMMIT=ee284d2da0720dc21191d6f545504cbfcf5dcbcf
+ ;;
+ *)
+ die "Unknown snapshot: ${PV}"
+ ;;
+ esac
+ export EGIT_VERSION=${EGIT_COMMIT}
+ ;;
+ *)
+ _LLVM_SOURCE_TYPE=tar
+ ;;
+ esac
fi
[[ ${_LLVM_SOURCE_TYPE} == git ]] && inherit git-r3
-[[ ${PV} == ${_LLVM_MASTER_MAJOR}.* && ${_LLVM_SOURCE_TYPE} == tar ]] &&
- die "${ECLASS}: Release ebuild for master branch?!"
+[[ ${LLVM_MAJOR} == ${_LLVM_MAIN_MAJOR} && ${_LLVM_SOURCE_TYPE} == tar ]] &&
+ die "${ECLASS}: Release ebuild for main branch?!"
inherit multiprocessing
+if [[ ${_LLVM_SOURCE_TYPE} == tar ]]; then
+ inherit verify-sig
+fi
+
# == control variables ==
@@ -86,9 +121,10 @@ inherit multiprocessing
# @ECLASS_VARIABLE: LLVM_MANPAGES
# @DEFAULT_UNSET
# @DESCRIPTION:
-# Set to 'build', include the dependency on dev-python/sphinx to build
-# the manpages. If set to 'pregenerated', fetch and install
-# pregenerated manpages from the archive.
+# Set to a non-empty value in ebuilds that build manpages via Sphinx.
+# The eclass will either include the dependency on dev-python/sphinx
+# or pull the pregenerated manpage tarball depending on the package
+# version.
# @ECLASS_VARIABLE: LLVM_PATCHSET
# @DEFAULT_UNSET
@@ -131,29 +167,31 @@ inherit multiprocessing
# The list of USE flags corresponding to all LLVM targets in this LLVM
# version. The value depends on ${PV}.
-case ${PV} in
- 10*|11*|12*)
- # this API is not present for old LLVM versions
- ;;
- 13*)
- ALL_LLVM_EXPERIMENTAL_TARGETS=( ARC CSKY M68k VE )
+case ${LLVM_MAJOR} in
+ 14)
+ ALL_LLVM_EXPERIMENTAL_TARGETS=( ARC CSKY M68k )
ALL_LLVM_PRODUCTION_TARGETS=(
AArch64 AMDGPU ARM AVR BPF Hexagon Lanai Mips MSP430 NVPTX
- PowerPC RISCV Sparc SystemZ WebAssembly X86 XCore
+ PowerPC RISCV Sparc SystemZ VE WebAssembly X86 XCore
)
;;
- 14*)
- ALL_LLVM_EXPERIMENTAL_TARGETS=( ARC CSKY M68k )
+ 15)
+ ALL_LLVM_EXPERIMENTAL_TARGETS=(
+ ARC CSKY DirectX LoongArch M68k SPIRV
+ )
ALL_LLVM_PRODUCTION_TARGETS=(
AArch64 AMDGPU ARM AVR BPF Hexagon Lanai Mips MSP430 NVPTX
PowerPC RISCV Sparc SystemZ VE WebAssembly X86 XCore
)
;;
*)
- ALL_LLVM_EXPERIMENTAL_TARGETS=( ARC CSKY LoongArch M68k )
+ ALL_LLVM_EXPERIMENTAL_TARGETS=(
+ ARC CSKY DirectX M68k SPIRV Xtensa
+ )
ALL_LLVM_PRODUCTION_TARGETS=(
- AArch64 AMDGPU ARM AVR BPF Hexagon Lanai Mips MSP430 NVPTX
- PowerPC RISCV Sparc SystemZ VE WebAssembly X86 XCore
+ AArch64 AMDGPU ARM AVR BPF Hexagon Lanai LoongArch Mips
+ MSP430 NVPTX PowerPC RISCV Sparc SystemZ VE WebAssembly X86
+ XCore
)
;;
esac
@@ -163,6 +201,18 @@ ALL_LLVM_TARGET_FLAGS=(
"${ALL_LLVM_EXPERIMENTAL_TARGETS[@]/#/llvm_targets_}"
)
+# @ECLASS_VARIABLE: LLVM_SOABI
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# The current ABI version of LLVM dylib, in a form suitable for use
+# as a subslot.
+if [[ ${LLVM_MAJOR} == ${_LLVM_MAIN_MAJOR} ]]; then
+ LLVM_SOABI=${PV}
+elif ver_test ${PV} -ge 18.1.0_rc3; then
+ LLVM_SOABI=$(ver_cut 1-2)
+else
+ LLVM_SOABI=${LLVM_MAJOR}
+fi
# == global scope logic ==
@@ -180,17 +230,36 @@ llvm.org_set_globals() {
fi
fi
- if [[ ${_LLVM_SOURCE_TYPE} == git ]]; then
- EGIT_REPO_URI="https://github.com/llvm/llvm-project.git"
+ case ${_LLVM_SOURCE_TYPE} in
+ git)
+ EGIT_REPO_URI="https://github.com/llvm/llvm-project.git"
- [[ ${PV} != ${_LLVM_MASTER_MAJOR}.* ]] &&
- EGIT_BRANCH="release/${PV%%.*}.x"
- elif [[ ${_LLVM_SOURCE_TYPE} == tar ]]; then
- SRC_URI+="
- https://github.com/llvm/llvm-project/archive/llvmorg-${PV/_/-}.tar.gz"
- else
- die "Invalid _LLVM_SOURCE_TYPE: ${LLVM_SOURCE_TYPE}"
- fi
+ [[ ${LLVM_MAJOR} != ${_LLVM_MAIN_MAJOR} ]] &&
+ EGIT_BRANCH="release/${LLVM_MAJOR}.x"
+ ;;
+ tar)
+ SRC_URI+="
+ https://github.com/llvm/llvm-project/releases/download/llvmorg-${PV/_/-}/llvm-project-${PV/_/}.src.tar.xz
+ verify-sig? (
+ https://github.com/llvm/llvm-project/releases/download/llvmorg-${PV/_/-}/llvm-project-${PV/_/}.src.tar.xz.sig
+ )
+ "
+ BDEPEND+="
+ verify-sig? (
+ >=sec-keys/openpgp-keys-llvm-16.0.4
+ )
+ "
+ VERIFY_SIG_OPENPGP_KEY_PATH=/usr/share/openpgp-keys/llvm.asc
+ ;;
+ snapshot)
+ SRC_URI+="
+ https://github.com/llvm/llvm-project/archive/${EGIT_COMMIT}.tar.gz
+ -> llvm-project-${EGIT_COMMIT}.tar.gz
+ "
+ ;;
+ *)
+ die "Invalid _LLVM_SOURCE_TYPE: ${LLVM_SOURCE_TYPE}"
+ esac
S=${WORKDIR}/${LLVM_COMPONENTS[0]}
@@ -199,25 +268,38 @@ llvm.org_set_globals() {
RESTRICT+=" !test? ( test )"
fi
- case ${LLVM_MANPAGES:-__unset__} in
- __unset__)
- # no manpage support
- ;;
- build)
- IUSE+=" doc"
- # NB: this is not always the correct dep but it does no harm
- BDEPEND+=" dev-python/sphinx"
- ;;
- pregenerated)
- IUSE+=" doc"
+ if [[ ${LLVM_MANPAGES} ]]; then
+ # @ECLASS_VARIABLE: LLVM_MANPAGE_DIST
+ # @OUTPUT_VARIABLE
+ # @DESCRIPTION:
+ # The filename of the prebuilt manpage tarball for this version.
+ LLVM_MANPAGE_DIST=
+ if [[ ${_LLVM_SOURCE_TYPE} == tar && ${PV} != *_rc* ]]; then
+ case ${PV} in
+ 14*|15*|16.0.[0-3])
+ LLVM_MANPAGE_DIST="llvm-${PV}-manpages.tar.bz2"
+ ;;
+ 16*)
+ LLVM_MANPAGE_DIST="llvm-16.0.4-manpages.tar.bz2"
+ ;;
+ 17*)
+ LLVM_MANPAGE_DIST="llvm-17.0.1-manpages.tar.bz2"
+ ;;
+ 18*)
+ LLVM_MANPAGE_DIST="llvm-18.1.0-manpages.tar.bz2"
+ ;;
+ esac
+ fi
+
+ IUSE+=" doc"
+ if [[ -n ${LLVM_MANPAGE_DIST} ]]; then
SRC_URI+="
!doc? (
- https://dev.gentoo.org/~mgorny/dist/llvm/llvm-${PV}-manpages.tar.bz2
- )"
- ;;
- *)
- die "Invalid LLVM_MANPAGES=${LLVM_MANPAGES}"
- esac
+ https://dev.gentoo.org/~mgorny/dist/llvm/${LLVM_MANPAGE_DIST}
+ )
+ "
+ fi
+ fi
if [[ -n ${LLVM_PATCHSET} ]]; then
SRC_URI+="
@@ -254,8 +336,6 @@ llvm.org_set_globals() {
# == phase functions ==
-EXPORT_FUNCTIONS src_unpack src_prepare
-
# @FUNCTION: llvm.org_src_unpack
# @DESCRIPTION:
# Unpack or checkout requested LLVM components.
@@ -269,24 +349,43 @@ llvm.org_src_unpack() {
components+=( "${LLVM_TEST_COMPONENTS[@]}" )
fi
- if [[ ${_LLVM_SOURCE_TYPE} == git ]]; then
- git-r3_fetch
- git-r3_checkout '' . '' "${components[@]}"
- default_src_unpack
- else
- local archive=llvmorg-${PV/_/-}.tar.gz
- ebegin "Unpacking from ${archive}"
- tar -x -z -o --strip-components 1 \
- -f "${DISTDIR}/${archive}" \
- "${components[@]/#/llvm-project-${archive%.tar*}/}" || die
- eend ${?}
-
- # unpack all remaining distfiles
- local x
- for x in ${A}; do
- [[ ${x} != ${archive} ]] && unpack "${x}"
- done
- fi
+ local archive=
+ case ${_LLVM_SOURCE_TYPE} in
+ git)
+ git-r3_fetch
+ git-r3_checkout '' . '' "${components[@]}"
+ ;;
+ tar)
+ archive=llvm-project-${PV/_/}.src.tar.xz
+ if use verify-sig; then
+ verify-sig_verify_detached \
+ "${DISTDIR}/${archive}" "${DISTDIR}/${archive}.sig"
+ fi
+
+ ebegin "Unpacking from ${archive}"
+ tar -x -J -o --strip-components 1 \
+ -f "${DISTDIR}/${archive}" \
+ "${components[@]/#/${archive%.tar*}/}" || die
+ eend ${?}
+ ;;
+ snapshot)
+ archive=llvm-project-${EGIT_COMMIT}.tar.gz
+ ebegin "Unpacking from ${archive}"
+ tar -x -z -o --strip-components 1 \
+ -f "${DISTDIR}/${archive}" \
+ "${components[@]/#/${archive%.tar*}/}" || die
+ eend ${?}
+ ;;
+ *)
+ die "Invalid _LLVM_SOURCE_TYPE: ${LLVM_SOURCE_TYPE}"
+ ;;
+ esac
+
+ # unpack all remaining distfiles
+ local x
+ for x in ${A}; do
+ [[ ${x} != ${archive} ]] && unpack "${x}"
+ done
if [[ -n ${LLVM_PATCHSET} ]]; then
local nocomp=$(grep -r -L "^Gentoo-Component:" \
@@ -299,7 +398,7 @@ llvm.org_src_unpack() {
local IFS='|'
grep -E -r -L "^Gentoo-Component:.*(${components[*]})" \
"${WORKDIR}/llvm-gentoo-patchset-${LLVM_PATCHSET}" |
- xargs rm
+ xargs -r rm
local status=( "${PIPESTATUS[@]}" )
[[ ${status[1]} -ne 0 ]] && die "rm failed"
[[ ${status[0]} -ne 0 ]] &&
@@ -359,7 +458,7 @@ get_lit_flags() {
# Return true (0) if manpages are going to be built from source,
# false (1) if preinstalled manpages will be used.
llvm_are_manpages_built() {
- use doc || [[ ${LLVM_MANPAGES} == build ]]
+ use doc || [[ -z ${LLVM_MANPAGE_DIST} ]]
}
# @FUNCTION: llvm_install_manpages
@@ -369,7 +468,9 @@ llvm_install_manpages() {
# install pre-generated manpages
if ! llvm_are_manpages_built; then
# (doman does not support custom paths)
- insinto "/usr/lib/llvm/${SLOT}/share/man/man1"
- doins "${WORKDIR}/llvm-${PV}-manpages/${LLVM_COMPONENTS[0]}"/*.1
+ insinto "/usr/lib/llvm/${LLVM_MAJOR}/share/man/man1"
+ doins "${WORKDIR}"/llvm-*-manpages/"${LLVM_COMPONENTS[0]}"/*.1
fi
}
+
+EXPORT_FUNCTIONS src_unpack src_prepare
diff --git a/eclass/lua-single.eclass b/eclass/lua-single.eclass
index 12eba57f4a89..8432df0583bb 100644
--- a/eclass/lua-single.eclass
+++ b/eclass/lua-single.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua-single.eclass
@@ -52,7 +52,7 @@
# "
# BDEPEND="virtual/pkgconfig"
#
-# # Only neeed if the setup phase has to do more than just call lua-single_pkg_setup
+# # Only need if the setup phase has to do more than just call lua-single_pkg_setup
# pkg_setup() {
# lua-single_pkg_setup
# [...]
@@ -64,23 +64,19 @@
# @CODE
case ${EAPI} in
- 7|8)
- ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_LUA_SINGLE_R0} ]]; then
+if [[ -z ${_LUA_SINGLE_ECLASS} ]]; then
+_LUA_SINGLE_ECLASS=1
-if [[ ${_LUA_R0} ]]; then
+if [[ ${_LUA_ECLASS} ]]; then
die 'lua-single.eclass cannot be used with lua.eclass.'
fi
inherit lua-utils
-fi
-
-EXPORT_FUNCTIONS pkg_setup
-
# @ECLASS_VARIABLE: LUA_COMPAT
# @REQUIRED
# @PRE_INHERIT
@@ -276,8 +272,6 @@ _lua_single_set_globals() {
_lua_single_set_globals
unset -f _lua_single_set_globals
-if [[ ! ${_LUA_SINGLE_R0} ]]; then
-
# @FUNCTION: _lua_gen_usedep
# @USAGE: [<pattern>...]
# @INTERNAL
@@ -532,5 +526,6 @@ lua-single_pkg_setup() {
[[ ${MERGE_TYPE} != binary ]] && lua_setup
}
-_LUA_SINGLE_R0=1
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/lua-utils.eclass b/eclass/lua-utils.eclass
index 475bd993894b..0ff36734dc8f 100644
--- a/eclass/lua-utils.eclass
+++ b/eclass/lua-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua-utils.eclass
@@ -18,12 +18,12 @@
# functions. It can be inherited safely.
case ${EAPI} in
- 7|8)
- ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_LUA_UTILS_R0} ]]; then
+if [[ -z ${_LUA_UTILS_ECLASS} ]]; then
+_LUA_UTILS_ECLASS=1
inherit toolchain-funcs
@@ -384,7 +384,7 @@ lua_enable_tests() {
busted)
test_directory="${2:-spec}"
test_pkg="dev-lua/busted"
- if [[ ! ${_LUA_SINGLE_R0} ]]; then
+ if [[ ! ${_LUA_SINGLE_ECLASS} ]]; then
eval "lua_src_test() {
busted --lua=\"\${ELUA}\" --output=\"plainTerminal\" \"${test_directory}\" || die \"Tests fail with \${ELUA}\"
}"
@@ -403,7 +403,7 @@ lua_enable_tests() {
local test_deps=${RDEPEND}
if [[ -n ${test_pkg} ]]; then
- if [[ ! ${_LUA_SINGLE_R0} ]]; then
+ if [[ ! ${_LUA_SINGLE_ECLASS} ]]; then
test_deps+=" ${test_pkg}[${LUA_USEDEP}]"
else
test_deps+=" $(lua_gen_cond_dep "
@@ -536,5 +536,4 @@ lua_get_version() {
echo "${LUA_VERSION}"
}
-_LUA_UTILS_R0=1
fi
diff --git a/eclass/lua.eclass b/eclass/lua.eclass
index 29b8b6ec6348..bb2e41ef1061 100644
--- a/eclass/lua.eclass
+++ b/eclass/lua.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua.eclass
@@ -52,21 +52,19 @@
# @CODE
case ${EAPI} in
- 7|8)
- ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_LUA_R0} ]]; then
+if [[ -z ${_LUA_ECLASS} ]]; then
+_LUA_ECLASS=1
-if [[ ${_LUA_SINGLE_R0} ]]; then
+if [[ ${_LUA_SINGLE_ECLASS} ]]; then
die 'lua.eclass cannot be used with lua-single.eclass.'
fi
inherit multibuild lua-utils
-fi
-
# @ECLASS_VARIABLE: LUA_COMPAT
# @REQUIRED
# @PRE_INHERIT
@@ -197,8 +195,6 @@ fi
# lua_targets_lua5-1(-)?,lua_targets_lua5-3(-)?
# @CODE
-if [[ ! ${_LUA_R0} ]]; then
-
# @FUNCTION: _lua_validate_useflags
# @INTERNAL
# @DESCRIPTION:
@@ -314,9 +310,6 @@ lua_foreach_impl() {
multibuild_foreach_variant _lua_multibuild_wrapper "${@}"
}
-_LUA_R0=1
-fi
-
# @FUNCTION: _lua_set_globals
# @INTERNAL
# @DESCRIPTION:
@@ -375,3 +368,5 @@ _lua_set_globals() {
_lua_set_globals
unset -f _lua_set_globals
+
+fi
diff --git a/eclass/mate-desktop.org.eclass b/eclass/mate-desktop.org.eclass
index dfa66155936b..28d704395b9a 100644
--- a/eclass/mate-desktop.org.eclass
+++ b/eclass/mate-desktop.org.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mate-desktop.org.eclass
@@ -6,48 +6,48 @@
# mate@gentoo.org
# @AUTHOR:
# Authors: NP-Hardass <NP-Hardass@gentoo.org> based upon the gnome.org eclass.
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Helper eclass for mate-desktop.org hosted archives
# @DESCRIPTION:
# Provide a default SRC_URI and EGIT_REPO_URI for MATE packages as well as
# exporting some useful values like the MATE_BRANCH
-# EAPIs < 6 are banned.
-case "${EAPI:-0}" in
- 6|7) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_MATE_DESKTOP_ORG_ECLASS} ]]; then
+_MATE_DESKTOP_ORG_ECLASS=1
+
if [[ ${PV} == 9999 ]]; then
inherit git-r3
fi
-[[ ${EAPI:-0} -eq 6 ]] && inherit eapi7-ver
-
# @ECLASS_VARIABLE: MATE_TARBALL_SUFFIX
# @INTERNAL
# @DESCRIPTION:
# All projects hosted on mate-desktop.org provide tarballs as tar.xz.
# Undefined in live ebuilds.
-[[ ${PV} != 9999 ]] && : ${MATE_TARBALL_SUFFIX:="xz"}
+[[ ${PV} != 9999 ]] && : "${MATE_TARBALL_SUFFIX:="xz"}"
# @ECLASS_VARIABLE: MATE_DESKTOP_ORG_PN
# @DESCRIPTION:
# Name of the package as hosted on mate-desktop.org.
# Leave unset if package name matches PN.
-: ${MATE_DESKTOP_ORG_PN:=${PN}}
+: "${MATE_DESKTOP_ORG_PN:=${PN}}"
# @ECLASS_VARIABLE: MATE_DESKTOP_ORG_PV
# @DESCRIPTION:
# Package version string as listed on mate-desktop.org.
# Leave unset if package version string matches PV.
-: ${MATE_DESKTOP_ORG_PV:=${PV}}
+: "${MATE_DESKTOP_ORG_PV:=${PV}}"
# @ECLASS_VARIABLE: MATE_BRANCH
# @DESCRIPTION:
# Major and minor numbers of the version number, unless live.
# If live ebuild, will be set to '9999'.
-: ${MATE_BRANCH:=$(ver_cut 1-2)}
+: "${MATE_BRANCH:=$(ver_cut 1-2)}"
# Set SRC_URI or EGIT_REPO_URI based on whether live
if [[ ${PV} == 9999 ]]; then
@@ -59,3 +59,5 @@ fi
# Set HOMEPAGE for all ebuilds
HOMEPAGE="https://mate-desktop.org"
+
+fi
diff --git a/eclass/mate.eclass b/eclass/mate.eclass
index a734b4c4dc7e..d9158a79d611 100644
--- a/eclass/mate.eclass
+++ b/eclass/mate.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mate.eclass
@@ -7,29 +7,28 @@
# @AUTHOR:
# Authors: NP-Hardass <NP-Hardass@gentoo.org> based upon the gnome2
# and autotools-utils eclasses
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
+# @PROVIDES: mate-desktop.org
# @BLURB: Provides phases for MATE based packages.
# @DESCRIPTION:
# Exports portage base functions used by ebuilds written for packages using the
-# MATE framework. Occassionally acts as a wrapper to gnome2 due to the
+# MATE framework. Occasionally acts as a wrapper to gnome2 due to the
# fact that MATE is a GNOME fork. For additional functions, see gnome2-utils.eclass.
-# Check EAPI only
-case "${EAPI:-0}" in
- 6|7) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_MATE_ECLASS} ]]; then
+_MATE_ECLASS=1
+
# Inherit happens below after declaration of GNOME2_LA_PUNT
# @ECLASS_VARIABLE: MATE_LA_PUNT
# @DESCRIPTION:
# Available values for MATE_LA_PUNT:
# - "no": will not clean any .la files
-# - In EAPI < 7:
-# - "yes": will run prune_libtool_files --modules
-# - If it is not set, it will run prune_libtool_files
-# - In EAPI 7:
# - Any non-"no" value will run
# find "${ED}" -name '*.la' -delete || die
# MATE_LA_PUNT is a stub to GNOME2_LA_PUNT
@@ -38,13 +37,8 @@ GNOME2_LA_PUNT="${MATE_LA_PUNT}"
inherit gnome2 autotools mate-desktop.org
-case "${EAPI:-0}" in
- 6|7) EXPORT_FUNCTIONS src_prepare src_configure src_install pkg_preinst pkg_postinst pkg_postrm ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
-esac
-
# Autotools requires our MATE m4 files
-DEPEND=">=mate-base/mate-common-${MATE_BRANCH}"
+BDEPEND=">=mate-base/mate-common-${MATE_BRANCH}"
# @FUNCTION: mate_py_cond_func_wrap
# @DESCRIPTION:
@@ -53,8 +47,8 @@ DEPEND=">=mate-base/mate-common-${MATE_BRANCH}"
# This function should only be used if the ebuild also inherits the
# python-r1 eclass
mate_py_cond_func_wrap() {
- if [[ ! ${_PYTHON_R1} ]]; then
- die "This function requires the inheritence of the python-r1 eclass"
+ if [[ ! ${_PYTHON_R1_ECLASS} ]]; then
+ die "This function requires the inheritance of the python-r1 eclass"
fi
if use python; then
python_foreach_impl run_in_build_dir "$@"
@@ -69,7 +63,7 @@ mate_py_cond_func_wrap() {
# - true: will always run eautoreconf
# - false: will default to automatic detect
# - If it is not set, it will default to false
-: ${MATE_FORCE_AUTORECONF:="false"}
+: "${MATE_FORCE_AUTORECONF:="false"}"
# @FUNCTION: ematedocize
# @DESCRIPTION:
@@ -164,3 +158,7 @@ mate_pkg_postinst() {
mate_pkg_postrm() {
gnome2_pkg_postrm "$@"
}
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_install pkg_preinst pkg_postinst pkg_postrm
diff --git a/eclass/mercurial.eclass b/eclass/mercurial.eclass
index 2616b88311cf..16d9fc87cb0a 100644
--- a/eclass/mercurial.eclass
+++ b/eclass/mercurial.eclass
@@ -1,9 +1,9 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mercurial.eclass
# @MAINTAINER:
-# Christoph Junghans <junghans@gentoo.org>
+# No maintainer <maintainer-needed@gentoo.org>
# @AUTHOR:
# Next gen author: Krzysztof Pawlik <nelchael@gentoo.org>
# Original author: Aron Griffis <agriffis@gentoo.org>
@@ -25,14 +25,6 @@ _MERCURIAL_ECLASS=1
PROPERTIES+=" live"
-case ${EAPI:-0} in
- 7)
- # For compatibiilty only (indirect inherits).
- # Eclass itself doesn't need it.
- inherit eutils
- ;;
-esac
-
BDEPEND="dev-vcs/mercurial"
# @ECLASS_VARIABLE: EHG_REPO_URI
@@ -45,7 +37,7 @@ BDEPEND="dev-vcs/mercurial"
#
# EHG_REVISION is passed as a value for --updaterev parameter, so it can be more
# than just a revision, please consult `hg help revisions' for more details.
-: ${EHG_REVISION:="default"}
+: "${EHG_REVISION:="default"}"
# @ECLASS_VARIABLE: EHG_STORE_DIR
# @USER_VARIABLE
@@ -70,7 +62,7 @@ BDEPEND="dev-vcs/mercurial"
# @ECLASS_VARIABLE: EHG_QUIET
# @DESCRIPTION:
# Suppress some extra noise from mercurial, set it to 'ON' to be quiet.
-: ${EHG_QUIET:="OFF"}
+: "${EHG_QUIET:="OFF"}"
[[ "${EHG_QUIET}" == "ON" ]] && EHG_QUIET_CMD_OPT="--quiet"
# @ECLASS_VARIABLE: EHG_CONFIG
diff --git a/eclass/meson-multilib.eclass b/eclass/meson-multilib.eclass
index 49c64418727e..0c069af11930 100644
--- a/eclass/meson-multilib.eclass
+++ b/eclass/meson-multilib.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: meson-multilib.eclass
@@ -31,8 +31,6 @@ _MESON_MULTILIB_ECLASS=1
inherit meson multilib-minimal
-EXPORT_FUNCTIONS src_configure src_compile src_test src_install
-
# @FUNCTION: meson_native_use_bool
# @USAGE: <USE flag> [option name]
# @DESCRIPTION:
@@ -130,3 +128,5 @@ multilib_src_install() {
}
fi
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
diff --git a/eclass/meson.eclass b/eclass/meson.eclass
index 905c4d89f501..a22a85887584 100644
--- a/eclass/meson.eclass
+++ b/eclass/meson.eclass
@@ -1,11 +1,10 @@
-# Copyright 2017-2022 Gentoo Authors
+# Copyright 2017-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: meson.eclass
# @MAINTAINER:
-# William Hubbs <williamh@gentoo.org>
-# Mike Gilbert <floppym@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# base-system@gentoo.org
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: common ebuild functions for meson-based packages
# @DESCRIPTION:
# This eclass contains the default phase functions for packages which
@@ -35,39 +34,36 @@
# @CODE
case ${EAPI} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_MESON_ECLASS} ]]; then
_MESON_ECLASS=1
-[[ ${EAPI} == 6 ]] && inherit eapi7-ver
-inherit multiprocessing ninja-utils python-utils-r1 toolchain-funcs
+inherit flag-o-matic multiprocessing ninja-utils python-utils-r1 toolchain-funcs
-EXPORT_FUNCTIONS src_configure src_compile src_test src_install
-
-_MESON_DEPEND=">=dev-util/meson-0.59.4
+BDEPEND=">=dev-build/meson-1.2.3
${NINJA_DEPEND}
- dev-util/meson-format-array
+ dev-build/meson-format-array
"
-if [[ ${EAPI} == 6 ]]; then
- DEPEND=${_MESON_DEPEND}
-else
- BDEPEND=${_MESON_DEPEND}
-fi
-
# @ECLASS_VARIABLE: BUILD_DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Build directory, location where all generated files should be placed.
# If this isn't set, it defaults to ${WORKDIR}/${P}-build.
+# @ECLASS_VARIABLE: MESON_VERBOSE
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Set to OFF to disable verbose messages during compilation
+: "${MESON_VERBOSE:=ON}"
+
# @ECLASS_VARIABLE: EMESON_BUILDTYPE
# @DESCRIPTION:
# The buildtype value to pass to meson setup.
-: ${EMESON_BUILDTYPE=plain}
+: "${EMESON_BUILDTYPE=plain}"
# @ECLASS_VARIABLE: EMESON_SOURCE
# @DEFAULT_UNSET
@@ -121,10 +117,7 @@ _meson_get_machine_info() {
# system roughly corresponds to uname -s (lowercase)
case ${tuple} in
- *-aix*) system=aix ;;
- *-cygwin*) system=cygwin ;;
*-darwin*) system=darwin ;;
- *-freebsd*) system=freebsd ;;
*-linux*) system=linux ;;
mingw*|*-mingw*) system=windows ;;
*-solaris*) system=sunos ;;
@@ -168,7 +161,10 @@ _meson_create_cross_file() {
objc = $(_meson_env_array "$(tc-getPROG OBJC cc)")
objcopy = $(_meson_env_array "$(tc-getOBJCOPY)")
objcpp = $(_meson_env_array "$(tc-getPROG OBJCXX c++)")
+ # TODO: Cleanup 'pkgconfig' and keep just 'pkg-config' once we require
+ # >=1.3.0.
pkgconfig = '$(tc-getPKG_CONFIG)'
+ pkg-config = '$(tc-getPKG_CONFIG)'
strip = $(_meson_env_array "$(tc-getSTRIP)")
windres = $(_meson_env_array "$(tc-getRC)")
@@ -222,7 +218,10 @@ _meson_create_native_file() {
objc = $(_meson_env_array "$(tc-getBUILD_PROG OBJC cc)")
objcopy = $(_meson_env_array "$(tc-getBUILD_OBJCOPY)")
objcpp = $(_meson_env_array "$(tc-getBUILD_PROG OBJCXX c++)")
+ # TODO: Cleanup 'pkgconfig' and keep just 'pkg-config' once we require
+ # >=1.3.0.
pkgconfig = '$(tc-getBUILD_PKG_CONFIG)'
+ pkg-config = '$(tc-getBUILD_PKG_CONFIG)'
strip = $(_meson_env_array "$(tc-getBUILD_STRIP)")
windres = $(_meson_env_array "$(tc-getBUILD_PROG RC windres)")
@@ -278,14 +277,43 @@ meson_feature() {
usex "$1" "-D${2-$1}=enabled" "-D${2-$1}=disabled"
}
-# @FUNCTION: meson_src_configure
-# @USAGE: [extra meson arguments]
+# @FUNCTION: setup_meson_src_configure
# @DESCRIPTION:
-# This is the meson_src_configure function.
-meson_src_configure() {
- debug-print-function ${FUNCNAME} "$@"
-
- [[ -n "${NINJA_DEPEND}" ]] || ewarn "Unknown value '${NINJA}' for \${NINJA}"
+# Calculate the command line which meson should use, and other relevant
+# variables. Invoke via "${MESONARGS[@]}" in the calling environment.
+# This function is called from meson_src_configure.
+setup_meson_src_configure() {
+ MESONARGS=()
+ if tc-is-lto; then
+ # We want to connect -flto in *FLAGS to the dedicated meson option,
+ # to ensure that meson has visibility into what the user set. Although
+ # it is unlikely projects will check `get_option('b_lto')` and change
+ # their behavior, individual targets which are broken with LTO can
+ # disable it per target. Injecting via *FLAGS means that meson cannot
+ # strip -flto from that target.
+ MESONARGS+=( -Db_lto=true )
+
+ # respect -flto value, e.g. -flto=8, -flto=thin
+ local v=$(get-flag flto)
+ case ${v} in
+ thin)
+ MESONARGS+=( -Db_lto_mode=thin )
+ ;;
+ ''|*[!0-9]*)
+ ;;
+ *)
+ MESONARGS+=( -Db_lto_threads=${v} )
+ ;;
+ esac
+ # finally, remove it from *FLAGS to avoid passing it:
+ # - twice, with potentially different values
+ # - on excluded targets
+ filter-lto
+ else
+ # Prevent projects from enabling LTO by default. In Gentoo, LTO is
+ # enabled via setting *FLAGS appropriately.
+ MESONARGS+=( -Db_lto=false )
+ fi
local BUILD_CFLAGS=${BUILD_CFLAGS}
local BUILD_CPPFLAGS=${BUILD_CPPFLAGS}
@@ -298,25 +326,24 @@ meson_src_configure() {
local BUILD_PKG_CONFIG_PATH=${BUILD_PKG_CONFIG_PATH}
if tc-is-cross-compiler; then
- : ${BUILD_CFLAGS:=-O1 -pipe}
- : ${BUILD_CXXFLAGS:=-O1 -pipe}
- : ${BUILD_FCFLAGS:=-O1 -pipe}
- : ${BUILD_OBJCFLAGS:=-O1 -pipe}
- : ${BUILD_OBJCXXFLAGS:=-O1 -pipe}
+ : "${BUILD_CFLAGS:=-O1 -pipe}"
+ : "${BUILD_CXXFLAGS:=-O1 -pipe}"
+ : "${BUILD_FCFLAGS:=-O1 -pipe}"
+ : "${BUILD_OBJCFLAGS:=-O1 -pipe}"
+ : "${BUILD_OBJCXXFLAGS:=-O1 -pipe}"
else
- : ${BUILD_CFLAGS:=${CFLAGS}}
- : ${BUILD_CPPFLAGS:=${CPPFLAGS}}
- : ${BUILD_CXXFLAGS:=${CXXFLAGS}}
- : ${BUILD_FCFLAGS:=${FCFLAGS}}
- : ${BUILD_LDFLAGS:=${LDFLAGS}}
- : ${BUILD_OBJCFLAGS:=${OBJCFLAGS}}
- : ${BUILD_OBJCXXFLAGS:=${OBJCXXFLAGS}}
- : ${BUILD_PKG_CONFIG_LIBDIR:=${PKG_CONFIG_LIBDIR}}
- : ${BUILD_PKG_CONFIG_PATH:=${PKG_CONFIG_PATH}}
+ : "${BUILD_CFLAGS:=${CFLAGS}}"
+ : "${BUILD_CPPFLAGS:=${CPPFLAGS}}"
+ : "${BUILD_CXXFLAGS:=${CXXFLAGS}}"
+ : "${BUILD_FCFLAGS:=${FCFLAGS}}"
+ : "${BUILD_LDFLAGS:=${LDFLAGS}}"
+ : "${BUILD_OBJCFLAGS:=${OBJCFLAGS}}"
+ : "${BUILD_OBJCXXFLAGS:=${OBJCXXFLAGS}}"
+ : "${BUILD_PKG_CONFIG_LIBDIR:=${PKG_CONFIG_LIBDIR}}"
+ : "${BUILD_PKG_CONFIG_PATH:=${PKG_CONFIG_PATH}}"
fi
- local mesonargs=(
- meson setup
+ MESONARGS+=(
--libdir "$(get_libdir)"
--localstatedir "${EPREFIX}/var/lib"
--prefix "${EPREFIX}/usr"
@@ -335,22 +362,22 @@ meson_src_configure() {
# It's Gentoo policy to not have builds die on blanket -Werror, as it's
# an upstream development matter. bug #754279.
-Dwerror=false
+
+ "${ltoflags[@]}"
)
if [[ -n ${EMESON_BUILDTYPE} ]]; then
- mesonargs+=( --buildtype "${EMESON_BUILDTYPE}" )
+ MESONARGS+=( -Dbuildtype="${EMESON_BUILDTYPE}" )
fi
if tc-is-cross-compiler; then
- mesonargs+=( --cross-file "$(_meson_create_cross_file)" )
+ MESONARGS+=( --cross-file "$(_meson_create_cross_file)" )
fi
- BUILD_DIR="${BUILD_DIR:-${WORKDIR}/${P}-build}"
-
# Handle quoted whitespace
eval "local -a MYMESONARGS=( ${MYMESONARGS} )"
- mesonargs+=(
+ MESONARGS+=(
# Arguments from ebuild
"${emesonargs[@]}"
@@ -359,12 +386,6 @@ meson_src_configure() {
# Arguments from user
"${MYMESONARGS[@]}"
-
- # Source directory
- "${EMESON_SOURCE:-${S}}"
-
- # Build directory
- "${BUILD_DIR}"
)
# Used by symbolextractor.py
@@ -372,18 +393,42 @@ meson_src_configure() {
tc-export NM
tc-getPROG READELF readelf >/dev/null
+ # https://bugs.gentoo.org/721786
+ export BOOST_INCLUDEDIR="${BOOST_INCLUDEDIR-${EPREFIX}/usr/include}"
+ export BOOST_LIBRARYDIR="${BOOST_LIBRARYDIR-${EPREFIX}/usr/$(get_libdir)}"
+}
+
+# @FUNCTION: meson_src_configure
+# @USAGE: [extra meson arguments]
+# @DESCRIPTION:
+# This is the meson_src_configure function.
+meson_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -n "${NINJA_DEPEND}" ]] || ewarn "Unknown value '${NINJA}' for \${NINJA}"
+
+ BUILD_DIR="${BUILD_DIR:-${WORKDIR}/${P}-build}"
+
# https://bugs.gentoo.org/625396
python_export_utf8_locale
- # https://bugs.gentoo.org/721786
- local -x BOOST_INCLUDEDIR="${BOOST_INCLUDEDIR-${EPREFIX}/usr/include}"
- local -x BOOST_LIBRARYDIR="${BOOST_LIBRARYDIR-${EPREFIX}/usr/$(get_libdir)}"
-
(
+ setup_meson_src_configure "$@"
+ MESONARGS+=(
+ # Source directory
+ "${EMESON_SOURCE:-${S}}"
+
+ # Build directory
+ "${BUILD_DIR}"
+ )
+
export -n {C,CPP,CXX,F,OBJC,OBJCXX,LD}FLAGS PKG_CONFIG_{LIBDIR,PATH}
- echo "${mesonargs[@]}" >&2
- "${mesonargs[@]}"
- ) || die
+ echo meson setup "${MESONARGS[@]}" >&2
+ meson setup "${MESONARGS[@]}"
+ )
+ local rv=$?
+ [[ ${rv} -eq 0 ]] || die -n "configure failed"
+ return ${rv}
}
# @FUNCTION: meson_src_compile
@@ -393,17 +438,28 @@ meson_src_configure() {
meson_src_compile() {
debug-print-function ${FUNCNAME} "$@"
+ pushd "${BUILD_DIR}" > /dev/null || die
+
local mesoncompileargs=(
- -C "${BUILD_DIR}"
- --jobs "$(makeopts_jobs "${MAKEOPTS}" 0)"
- --load-average "$(makeopts_loadavg "${MAKEOPTS}" 0)"
- --verbose
- "$@"
+ --jobs "$(get_makeopts_jobs 0)"
+ --load-average "$(get_makeopts_loadavg 0)"
)
+ case ${MESON_VERBOSE} in
+ OFF) ;;
+ *) mesoncompileargs+=( --verbose ) ;;
+ esac
+
+ mesoncompileargs+=( "$@" )
+
set -- meson compile "${mesoncompileargs[@]}"
echo "$@" >&2
- "$@" || die "compile failed"
+ "$@"
+ local rv=$?
+ [[ ${rv} -eq 0 ]] || die -n "compile failed"
+
+ popd > /dev/null || die
+ return ${rv}
}
# @FUNCTION: meson_src_test
@@ -413,15 +469,22 @@ meson_src_compile() {
meson_src_test() {
debug-print-function ${FUNCNAME} "$@"
+ pushd "${BUILD_DIR}" > /dev/null || die
+
local mesontestargs=(
- -C "${BUILD_DIR}"
+ --print-errorlogs
--num-processes "$(makeopts_jobs "${MAKEOPTS}")"
"$@"
)
set -- meson test "${mesontestargs[@]}"
echo "$@" >&2
- "$@" || die "tests failed"
+ "$@"
+ local rv=$?
+ [[ ${rv} -eq 0 ]] || die -n "tests failed"
+
+ popd > /dev/null || die
+ return ${rv}
}
# @FUNCTION: meson_install
@@ -431,15 +494,22 @@ meson_src_test() {
meson_install() {
debug-print-function ${FUNCNAME} "$@"
+ pushd "${BUILD_DIR}" > /dev/null || die
+
local mesoninstallargs=(
- -C "${BUILD_DIR}"
--destdir "${D}"
+ --no-rebuild
"$@"
)
set -- meson install "${mesoninstallargs[@]}"
echo "$@" >&2
- "$@" || die "install failed"
+ "$@"
+ local rv=$?
+ [[ ${rv} -eq 0 ]] || die -n "install failed"
+
+ popd > /dev/null || die
+ return ${rv}
}
# @FUNCTION: meson_src_install
@@ -454,3 +524,5 @@ meson_src_install() {
}
fi
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
diff --git a/eclass/mono-env.eclass b/eclass/mono-env.eclass
index 8cec214e615f..48712587ff3e 100644
--- a/eclass/mono-env.eclass
+++ b/eclass/mono-env.eclass
@@ -1,21 +1,19 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mono-env.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7
# @BLURB: Set environment variables commonly used by dotnet packages.
# @DESCRIPTION:
# Set environment variables commonly used by dotnet packages.
-case ${EAPI:-0} in
- [567]) ;;
+case ${EAPI} in
+ 6|7) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_setup
-
if [[ -z ${_MONO_ENV_ECLASS} ]] ; then
_MONO_ENV_ECLASS=1
@@ -48,3 +46,5 @@ mono-env_pkg_setup() {
}
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/mono.eclass b/eclass/mono.eclass
index ddea2d4b9c64..c096acc8c40e 100644
--- a/eclass/mono.eclass
+++ b/eclass/mono.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mono.eclass
@@ -76,7 +76,7 @@ mono_multilib_comply() {
then
for exe in "${ED}/usr/bin"/*
do
- if [[ "$(file "${exe}")" == *"shell script text"* ]]
+ if [[ "$(file -S "${exe}")" == *"shell script text"* ]]
then
sed -r -i -e ":/lib(/|$): s:/lib(/|$):/$(get_libdir)\1:" \
"${exe}" || die "Sedding some sense into ${exe} failed"
diff --git a/eclass/mount-boot.eclass b/eclass/mount-boot.eclass
index 3111d9dcb9b5..73beb9adea67 100644
--- a/eclass/mount-boot.eclass
+++ b/eclass/mount-boot.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mount-boot.eclass
@@ -13,13 +13,11 @@
# function tries to ensure that it's mounted in rw mode, exiting with an
# error if it can't. It does nothing if /boot isn't a separate partition.
-case ${EAPI:-0} in
+case ${EAPI} in
6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_pretend pkg_preinst pkg_postinst pkg_prerm pkg_postrm
-
# @FUNCTION: mount-boot_is_disabled
# @INTERNAL
# @DESCRIPTION:
@@ -111,3 +109,5 @@ mount-boot_pkg_prerm() {
mount-boot_pkg_postinst() { :; }
mount-boot_pkg_postrm() { :; }
+
+EXPORT_FUNCTIONS pkg_pretend pkg_preinst pkg_postinst pkg_prerm pkg_postrm
diff --git a/eclass/mozcoreconf-v5.eclass b/eclass/mozcoreconf-v5.eclass
deleted file mode 100644
index 8da12ab0f939..000000000000
--- a/eclass/mozcoreconf-v5.eclass
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright 1999-2022 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-#
-# @ECLASS: mozcoreconf-v5.eclass
-# @MAINTAINER:
-# Mozilla team <mozilla@gentoo.org>
-# @BLURB: core options and configuration functions for mozilla
-# @DESCRIPTION:
-#
-# inherit mozconfig-v6.* or above for mozilla configuration support
-
-# @ECLASS_VARIABLE: MOZILLA_FIVE_HOME
-# @DESCRIPTION:
-# This is an eclass-generated variable that defines the rpath that the mozilla
-# product will be installed in. Read-only
-
-if [[ ! ${_MOZCORECONF} ]]; then
-
-PYTHON_COMPAT=( python2_7 )
-PYTHON_REQ_USE='ncurses,sqlite,ssl,threads(+)'
-
-inherit multilib toolchain-funcs flag-o-matic python-any-r1 versionator
-
-IUSE="${IUSE} custom-cflags custom-optimization"
-
-DEPEND="virtual/pkgconfig
- ${PYTHON_DEPS}"
-
-# @FUNCTION: mozconfig_annotate
-# @DESCRIPTION:
-# add an annotated line to .mozconfig
-#
-# Example:
-# mozconfig_annotate "building on ultrasparc" --enable-js-ultrasparc
-# => ac_add_options --enable-js-ultrasparc # building on ultrasparc
-mozconfig_annotate() {
- declare reason=$1 x ; shift
- [[ $# -gt 0 ]] || die "mozconfig_annotate missing flags for ${reason}\!"
- for x in ${*}; do
- echo "ac_add_options ${x} # ${reason}" >>.mozconfig
- done
-}
-
-# @FUNCTION: mozconfig_use_enable
-# @DESCRIPTION:
-# add a line to .mozconfig based on a USE-flag
-#
-# Example:
-# mozconfig_use_enable truetype freetype2
-# => ac_add_options --enable-freetype2 # +truetype
-mozconfig_use_enable() {
- declare flag=$(use_enable "$@")
- mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
-}
-
-# @FUNCTION: mozconfig_use_with
-# @DESCRIPTION:
-# add a line to .mozconfig based on a USE-flag
-#
-# Example:
-# mozconfig_use_with kerberos gss-api /usr/$(get_libdir)
-# => ac_add_options --with-gss-api=/usr/lib # +kerberos
-mozconfig_use_with() {
- declare flag=$(use_with "$@")
- mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
-}
-
-# @FUNCTION: mozconfig_use_extension
-# @DESCRIPTION:
-# enable or disable an extension based on a USE-flag
-#
-# Example:
-# mozconfig_use_extension gnome gnomevfs
-# => ac_add_options --enable-extensions=gnomevfs
-mozconfig_use_extension() {
- declare minus=$(use $1 || echo -)
- mozconfig_annotate "${minus:-+}$1" --enable-extensions=${minus}${2}
-}
-
-moz_pkgsetup() {
- # Ensure we use C locale when building
- export LANG="C"
- export LC_ALL="C"
- export LC_MESSAGES="C"
- export LC_CTYPE="C"
-
- # Ensure we use correct toolchain
- export HOST_CC="$(tc-getBUILD_CC)"
- export HOST_CXX="$(tc-getBUILD_CXX)"
- tc-export CC CXX LD PKG_CONFIG
-
- # Ensure that we have a sane build enviroment
- export MOZILLA_CLIENT=1
- export BUILD_OPT=1
- export NO_STATIC_LIB=1
- export USE_PTHREADS=1
- export ALDFLAGS=${LDFLAGS}
- # ensure MOZCONFIG is not defined
- unset MOZCONFIG
-
- # set MOZILLA_FIVE_HOME
- export MOZILLA_FIVE_HOME="/usr/$(get_libdir)/${PN}"
-
- # nested configure scripts in mozilla products generate unrecognized options
- # false positives when toplevel configure passes downwards.
- export QA_CONFIGURE_OPTIONS=".*"
-
- python-any-r1_pkg_setup
-}
-
-# @FUNCTION: mozconfig_init
-# @DESCRIPTION:
-# Initialize mozilla configuration and populate with core settings.
-# This should be called in src_configure before any other mozconfig_* functions.
-mozconfig_init() {
- declare enable_optimize pango_version myext x
- declare XUL=$([[ ${PN} == xulrunner ]] && echo true || echo false)
- declare FF=$([[ ${PN} == firefox ]] && echo true || echo false)
- declare SM=$([[ ${PN} == seamonkey ]] && echo true || echo false)
- declare TB=$([[ ${PN} == thunderbird ]] && echo true || echo false)
- declare WF=$([[ ${PN} == waterfox* ]] && echo true || echo false)
-
- ####################################
- #
- # Setup the initial .mozconfig
- # See http://www.mozilla.org/build/configure-build.html
- #
- ####################################
-
- case ${PN} in
- *xulrunner)
- cp xulrunner/config/mozconfig .mozconfig \
- || die "cp xulrunner/config/mozconfig failed" ;;
- *firefox|waterfox*)
- cp browser/config/mozconfig .mozconfig \
- || die "cp browser/config/mozconfig failed" ;;
- seamonkey)
- # Must create the initial mozconfig to enable application
- : >.mozconfig || die "initial mozconfig creation failed"
- mozconfig_annotate "" --enable-application=suite ;;
- *thunderbird)
- # Must create the initial mozconfig to enable application
- : >.mozconfig || die "initial mozconfig creation failed"
- mozconfig_annotate "" --enable-application=mail ;;
- esac
-
- ####################################
- #
- # CFLAGS setup and ARCH support
- #
- ####################################
-
- # Set optimization level
- mozconfig_annotate "Workaround known breakage" --enable-optimize=-O2
-
- if [[ ${ARCH} == hppa ]]; then
- mozconfig_annotate "more than -O0 causes a segfault on hppa" --enable-optimize=-O0
- elif [[ ${ARCH} == x86 ]]; then
- mozconfig_annotate "less then -O2 causes a segfault on x86" --enable-optimize=-O2
- elif use custom-optimization || [[ ${ARCH} =~ (alpha|ia64) ]]; then
- # Set optimization level based on CFLAGS
- if is-flag -O0; then
- mozconfig_annotate "from CFLAGS" --enable-optimize=-O0
- elif [[ ${ARCH} == ppc ]] && has_version '>=sys-libs/glibc-2.8'; then
- mozconfig_annotate "more than -O1 segfaults on ppc with glibc-2.8" --enable-optimize=-O1
- elif is-flag -O4; then
- mozconfig_annotate "from CFLAGS" --enable-optimize=-O4
- elif is-flag -O3; then
- mozconfig_annotate "from CFLAGS" --enable-optimize=-O3
- elif is-flag -O1; then
- mozconfig_annotate "from CFLAGS" --enable-optimize=-O1
- elif is-flag -Os; then
- mozconfig_annotate "from CFLAGS" --enable-optimize=-Os
- else
- mozconfig_annotate "Gentoo's default optimization" --enable-optimize=-O2
- fi
- else
- # Enable Mozilla's default
- mozconfig_annotate "mozilla default" --enable-optimize
- fi
-
- # Strip optimization so it does not end up in compile string
- filter-flags '-O*'
-
- # Strip over-aggressive CFLAGS
- use custom-cflags || strip-flags
-
- # Additional ARCH support
- case "${ARCH}" in
- arm)
- # Reduce the memory requirements for linking
- append-ldflags -Wl,--no-keep-memory -Wl,--reduce-memory-overheads
- ;;
- alpha)
- # Historically we have needed to add -fPIC manually for 64-bit.
- # Additionally, alpha should *always* build with -mieee for correct math
- # operation
- append-flags -fPIC -mieee
- ;;
- ia64)
- # Historically we have needed to add this manually for 64-bit
- append-flags -fPIC
- ;;
- ppc64)
- append-flags -fPIC -mminimal-toc
- # Reduce the memory requirements for linking
- append-ldflags -Wl,--no-keep-memory -Wl,--reduce-memory-overheads
- ;;
- esac
-
- # We need to append flags for >= gcc-6 support
- append-cxxflags -fno-delete-null-pointer-checks -fno-lifetime-dse -fno-schedule-insns2
-
- # Use the MOZILLA_FIVE_HOME for the rpath
- append-ldflags -Wl,-rpath="${MOZILLA_FIVE_HOME}",--enable-new-dtags
- # Set MOZILLA_FIVE_HOME in mozconfig
- mozconfig_annotate '' --with-default-mozilla-five-home=${MOZILLA_FIVE_HOME}
-
- ####################################
- #
- # mozconfig setup
- #
- ####################################
-
- mozconfig_annotate disable_update_strip \
- --disable-updater \
- --disable-strip \
- --disable-install-strip
-
- # jemalloc won't build with older glibc
- ! has_version ">=sys-libs/glibc-2.4" && mozconfig_annotate "we have old glibc" --disable-jemalloc
-}
-
-# @FUNCTION: mozconfig_final
-# @DESCRIPTION:
-# Apply EXTRA_ECONF values to .mozconfig
-# Display a table describing all configuration options paired
-# with reasons, then clean up extensions list.
-# This should be called in src_configure at the end of all other mozconfig_* functions.
-mozconfig_final() {
- declare ac opt hash reason
-
- # Apply EXTRA_ECONF entries to .mozconfig
- if [[ -n ${EXTRA_ECONF} ]]; then
- IFS=\! read -a ac <<<${EXTRA_ECONF// --/\!}
- for opt in "${ac[@]}"; do
- mozconfig_annotate "EXTRA_ECONF" --${opt#--}
- done
- fi
-
- echo
- echo "=========================================================="
- echo "Building ${PF} with the following configuration"
- grep ^ac_add_options .mozconfig | while read ac opt hash reason; do
- [[ -z ${hash} || ${hash} == \# ]] \
- || die "error reading mozconfig: ${ac} ${opt} ${hash} ${reason}"
- printf " %-30s %s\n" "${opt}" "${reason:-mozilla.org default}"
- done
- echo "=========================================================="
- echo
-
- # Resolve multiple --enable-extensions down to one
- declare exts=$(sed -n 's/^ac_add_options --enable-extensions=\([^ ]*\).*/\1/p' \
- .mozconfig | xargs)
- sed -i '/^ac_add_options --enable-extensions/d' .mozconfig
- echo "ac_add_options --enable-extensions=${exts// /,}" >> .mozconfig
-}
-
-_MOZCORECONF=1
-fi
diff --git a/eclass/mozcoreconf-v6.eclass b/eclass/mozcoreconf-v6.eclass
index 8cea230a959c..71dbc6802d71 100644
--- a/eclass/mozcoreconf-v6.eclass
+++ b/eclass/mozcoreconf-v6.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-#
+
# @ECLASS: mozcoreconf-v6.eclass
# @MAINTAINER:
# Mozilla team <mozilla@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: core options and configuration functions for mozilla
# @DESCRIPTION:
#
@@ -15,26 +15,19 @@
# This is an eclass-generated variable that defines the rpath that the mozilla
# product will be installed in. Read-only
-if [[ ! ${_MOZCORECONF} ]]; then
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_MOZCORECONF_V6_ECLASS} ]]; then
+_MOZCORECONF_V6_ECLASS=1
inherit toolchain-funcs flag-o-matic python-any-r1
BDEPEND="virtual/pkgconfig
- dev-lang/python:2.7[ncurses,sqlite,ssl,threads(+)]
${PYTHON_DEPS}"
-case "${EAPI:-0}" in
- 6)
- inherit multilib versionator
- DEPEND+=" ${BDEPEND}"
- ;;
- 7|8)
- ;;
- *)
- die "EAPI ${EAPI} is not supported, contact eclass maintainers"
- ;;
-esac
-
IUSE="${IUSE} custom-cflags custom-optimization"
# @FUNCTION: mozconfig_annotate
@@ -78,17 +71,17 @@ mozconfig_use_with() {
moz_pkgsetup() {
# Ensure we use C locale when building
- export LANG="C"
- export LC_ALL="C"
- export LC_MESSAGES="C"
- export LC_CTYPE="C"
+ export LANG="C.UTF-8"
+ export LC_ALL="C.UTF-8"
+ export LC_MESSAGES="C.UTF-8"
+ export LC_CTYPE="C.UTF-8"
# Ensure we use correct toolchain
export HOST_CC="$(tc-getBUILD_CC)"
export HOST_CXX="$(tc-getBUILD_CXX)"
tc-export CC CXX LD PKG_CONFIG AR RANLIB
- # Ensure that we have a sane build enviroment
+ # Ensure that we have a sane build environment
export MOZILLA_CLIENT=1
export BUILD_OPT=1
export NO_STATIC_LIB=1
@@ -105,12 +98,6 @@ moz_pkgsetup() {
export QA_CONFIGURE_OPTIONS=".*"
python-any-r1_pkg_setup
- # workaround to set python3 into PYTHON3 until mozilla doesn't need py2
- if [[ "${PYTHON_COMPAT[@]}" != "${PYTHON_COMPAT[@]#python3*}" ]]; then
- export PYTHON3=${PYTHON}
- export PYTHON=python2.7
- export EPYTHON="${EPREFIX}"/usr/bin/python2.7
- fi
}
# @FUNCTION: mozconfig_init
@@ -195,10 +182,6 @@ mozconfig_init() {
# Strip optimization so it does not end up in compile string
filter-flags '-O*'
- if is-flagq '-g*' ; then
- mozconfig_annotate 'elf-hack broken with -g* flags' --disable-elf-hack
- fi
-
# Strip over-aggressive CFLAGS
use custom-cflags || strip-flags
@@ -279,5 +262,4 @@ mozconfig_final() {
echo
}
-_MOZCORECONF=1
fi
diff --git a/eclass/mozextension.eclass b/eclass/mozextension.eclass
index 25089aaeaf93..52fe26280e6e 100644
--- a/eclass/mozextension.eclass
+++ b/eclass/mozextension.eclass
@@ -1,13 +1,19 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-#
# @ECLASS: mozextension.eclass
# @MAINTAINER:
# Mozilla team <mozilla@gentoo.org>
+# @SUPPORTED_EAPIS: 8
# @BLURB: Install extensions for use in Mozilla products.
-if [[ ! ${_MOZEXTENSION} ]]; then
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_MOZEXTENSION_ECLASS} ]]; then
+_MOZEXTENSION_ECLASS=1
# @ECLASS_VARIABLE: MOZEXTENSION_TARGET
# @DESCRIPTION:
@@ -15,9 +21,9 @@ if [[ ! ${_MOZEXTENSION} ]]; then
# to be overridden from the default app-global extensions path.
# Default is empty, which installs to predetermined hard-coded
# paths specified in the eclass.
-: ${MOZEXTENSION_TARGET:=""}
+: "${MOZEXTENSION_TARGET:=""}"
-DEPEND="app-arch/unzip"
+BDEPEND="app-arch/unzip"
mozversion_extension_location() {
case ${PN} in
@@ -117,8 +123,7 @@ xpi_copy() {
insinto "${MOZILLA_FIVE_HOME}"/extensions
fi
- newins "${DISTDIR%/}"/${x##*/}.xpi ${emid}.xpi
+ newins "${DISTDIR}"/${x##*/}.xpi ${emid}.xpi
}
-_MOZEXTENSION=1
fi
diff --git a/eclass/mozlinguas-v2.eclass b/eclass/mozlinguas-v2.eclass
index 155b894edc53..81e00275a8f6 100644
--- a/eclass/mozlinguas-v2.eclass
+++ b/eclass/mozlinguas-v2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mozlinguas-v2.eclass
@@ -7,25 +7,22 @@
# @AUTHOR:
# Nirbheek Chauhan <nirbheek@gentoo.org>
# Ian Stakenvicius <axs@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: Handle language packs for mozilla products
# @DESCRIPTION:
# Sets IUSE according to MOZ_LANGS (language packs available). Also exports
# src_unpack, src_compile and src_install for use in ebuilds, and provides
# supporting functions for langpack generation and installation.
-inherit mozextension
-
-case "${EAPI:-0}" in
- 6)
- inherit eapi7-ver ;;
- 7|8)
- ;;
- *)
- die "EAPI ${EAPI} is not supported, contact eclass maintainers" ;;
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack src_compile src_install
+if [[ ! ${_MOZLINGUAS_V2_ECLASS} ]]; then
+_MOZLINGUAS_V2_ECLASS=1
+
+inherit mozextension
# @ECLASS_VARIABLE: MOZ_LANGS
# @DEFAULT_UNSET
@@ -33,59 +30,59 @@ EXPORT_FUNCTIONS src_unpack src_compile src_install
# Array containing the list of language pack xpis available for
# this release. The list can be updated with scripts/get_langs.sh from the
# mozilla overlay.
-: ${MOZ_LANGS:=()}
+: "${MOZ_LANGS:=()}"
# @ECLASS_VARIABLE: MOZ_PV
# @DEFAULT_UNSET
# @DESCRIPTION:
# Ebuild package version converted to equivalent upstream version.
# Defaults to ${PV}, and should be overridden for alphas, betas, and RCs
-: ${MOZ_PV:="${PV}"}
+: "${MOZ_PV:="${PV}"}"
# @ECLASS_VARIABLE: MOZ_PN
# @DEFAULT_UNSET
# @DESCRIPTION:
# Ebuild package name converted to equivalent upstream name.
# Defaults to ${PN}, and should be overridden for binary ebuilds.
-: ${MOZ_PN:="${PN}"}
+: "${MOZ_PN:="${PN}"}"
# @ECLASS_VARIABLE: MOZ_P
# @DEFAULT_UNSET
# @DESCRIPTION:
# Ebuild package name + version converted to upstream equivalent.
# Defaults to ${MOZ_PN}-${MOZ_PV}
-: ${MOZ_P:="${MOZ_PN}-${MOZ_PV}"}
+: "${MOZ_P:="${MOZ_PN}-${MOZ_PV}"}"
# @ECLASS_VARIABLE: MOZ_FTP_URI
# @DEFAULT_UNSET
# @DESCRIPTION:
# The ftp URI prefix for the release tarballs and language packs.
-: ${MOZ_FTP_URI:=""}
+: "${MOZ_FTP_URI:=""}"
# @ECLASS_VARIABLE: MOZ_HTTP_URI
# @PRE_INHERIT
# @DESCRIPTION:
# The http URI prefix for the release tarballs and language packs.
-: ${MOZ_HTTP_URI:=""}
+: "${MOZ_HTTP_URI:=""}"
# @ECLASS_VARIABLE: MOZ_LANGPACK_HTTP_URI
# @PRE_INHERIT
# @DESCRIPTION:
# An alternative http URI if it differs from official mozilla URI.
# Defaults to whatever MOZ_HTTP_URI was set to.
-: ${MOZ_LANGPACK_HTTP_URI:=${MOZ_HTTP_URI}}
+: "${MOZ_LANGPACK_HTTP_URI:=${MOZ_HTTP_URI}}"
# @ECLASS_VARIABLE: MOZ_LANGPACK_PREFIX
# @DESCRIPTION:
# The relative path till the lang code in the langpack file URI.
# Defaults to ${MOZ_PV}/linux-i686/xpi/
-: ${MOZ_LANGPACK_PREFIX:="${MOZ_PV}/linux-i686/xpi/"}
+: "${MOZ_LANGPACK_PREFIX:="${MOZ_PV}/linux-i686/xpi/"}"
# @ECLASS_VARIABLE: MOZ_LANGPACK_SUFFIX
# @DESCRIPTION:
# The suffix after the lang code in the langpack file URI.
# Defaults to '.xpi'
-: ${MOZ_LANGPACK_SUFFIX:=".xpi"}
+: "${MOZ_LANGPACK_SUFFIX:=".xpi"}"
# @ECLASS_VARIABLE: MOZ_LANGPACK_UNOFFICIAL
# @DESCRIPTION:
@@ -95,7 +92,7 @@ EXPORT_FUNCTIONS src_unpack src_compile src_install
# unofficially by others (ie the Gentoo mozilla team). When
# this var is set, the distfile will have a .unofficial.xpi
# suffix.
-: ${MOZ_LANGPACK_UNOFFICIAL:=""}
+: "${MOZ_LANGPACK_UNOFFICIAL:=""}"
# @ECLASS_VARIABLE: MOZ_GENERATE_LANGPACKS
# @PRE_INHERIT
@@ -106,13 +103,13 @@ EXPORT_FUNCTIONS src_unpack src_compile src_install
# being downloaded and installed from upstream pre-built
# extensions. Primarily it supports pre-release builds.
# Defaults to empty.
-: ${MOZ_GENERATE_LANGPACKS:=""}
+: "${MOZ_GENERATE_LANGPACKS:=""}"
# @ECLASS_VARIABLE: MOZ_L10N_SOURCEDIR
# @DESCRIPTION:
# The path that l10n sources can be found at, once unpacked.
# Defaults to ${WORKDIR}/l10n-sources
-: ${MOZ_L10N_SOURCEDIR:="${WORKDIR}/l10n-sources"}
+: "${MOZ_L10N_SOURCEDIR:="${WORKDIR}/l10n-sources"}"
# @ECLASS_VARIABLE: MOZ_L10N_URI_PREFIX
# @PRE_INHERIT
@@ -123,21 +120,21 @@ EXPORT_FUNCTIONS src_unpack src_compile src_install
# SRC_URI when MOZ_GENERATE_LANGPACKS is set. If empty, nothing will
# be added to SRC_URI.
# Defaults to empty.
-: ${MOZ_L10N_URI_PREFIX:=""}
+: "${MOZ_L10N_URI_PREFIX:=""}"
# @ECLASS_VARIABLE: MOZ_L10N_URI_SUFFIX
# @DEFAULT_UNSET
# @DESCRIPTION:
# The suffix of l10n source distfiles.
# Defaults to '.tar.xz'
-: ${MOZ_L10N_URI_SUFFIX:=".tar.xz"}
+: "${MOZ_L10N_URI_SUFFIX:=".tar.xz"}"
# @ECLASS_VARIABLE: MOZ_FORCE_UPSTREAM_L10N
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set this to use upstream langpaks even if the package normally
# shouldn't (ie it is an alpha or beta package)
-: ${MOZ_FORCE_UPSTREAM_L10N:=""}
+: "${MOZ_FORCE_UPSTREAM_L10N:=""}"
# @ECLASS_VARIABLE: MOZ_TOO_REGIONALIZED_FOR_L10N
# @INTERNAL
@@ -149,7 +146,7 @@ MOZ_TOO_REGIONALIZED_FOR_L10N=( fy-NL ga-IE gu-IN hi-IN hy-AM nb-NO nn-NO pa-IN
# @DESCRIPTION:
# Install langpacks as .xpi file instead of unpacked directory.
# Leave unset to install unpacked
-: ${MOZ_INSTALL_L10N_XPIFILE:=""}
+: "${MOZ_INSTALL_L10N_XPIFILE:=""}"
# Add l10n_* to IUSE according to available language packs
# No language packs for alphas and betas
@@ -402,3 +399,7 @@ mozlinguas_src_install() {
mozlinguas-v2_src_install() {
mozlinguas_src_install
}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
diff --git a/eclass/multibuild.eclass b/eclass/multibuild.eclass
index 7ae03adbe18c..f15d3327c7dd 100644
--- a/eclass/multibuild.eclass
+++ b/eclass/multibuild.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: multibuild.eclass
@@ -14,7 +14,10 @@
# implementations).
case ${EAPI} in
- 6|7|8) ;;
+ 6|7|8)
+ # backwards compatibility for run_in_build_dir
+ inherit out-of-source-utils
+ ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -126,8 +129,8 @@ multibuild_foreach_variant() {
_multibuild_run "${@}" \
> >(exec tee -a "${T}/build-${MULTIBUILD_ID}.log") 2>&1
lret=${?}
+ [[ ${ret} -eq 0 && ${lret} -ne 0 ]] && ret=${lret}
done
- [[ ${ret} -eq 0 && ${lret} -ne 0 ]] && ret=${lret}
return ${ret}
}
@@ -167,34 +170,13 @@ multibuild_copy_sources() {
_multibuild_create_source_copy() {
einfo "${MULTIBUILD_VARIANT}: copying to ${BUILD_DIR}"
- # enable reflinking if possible to make this faster
- cp -p -R --reflink=auto \
+ cp -p -R \
"${_MULTIBUILD_INITIAL_BUILD_DIR}" "${BUILD_DIR}" || die
}
multibuild_foreach_variant _multibuild_create_source_copy
}
-# @FUNCTION: run_in_build_dir
-# @USAGE: <argv>...
-# @DESCRIPTION:
-# Run the given command in the directory pointed by BUILD_DIR.
-run_in_build_dir() {
- debug-print-function ${FUNCNAME} "${@}"
- local ret
-
- [[ ${#} -ne 0 ]] || die "${FUNCNAME}: no command specified."
- [[ ${BUILD_DIR} ]] || die "${FUNCNAME}: BUILD_DIR not set."
-
- mkdir -p "${BUILD_DIR}" || die
- pushd "${BUILD_DIR}" >/dev/null || die
- "${@}"
- ret=${?}
- popd >/dev/null || die
-
- return ${ret}
-}
-
# @FUNCTION: multibuild_merge_root
# @USAGE: <src-root> <dest-root>
# @DESCRIPTION:
@@ -207,8 +189,7 @@ multibuild_merge_root() {
local src=${1}
local dest=${2}
- # enable reflinking if possible to make this faster
- cp -a --reflink=auto "${src}"/. "${dest}"/ || die "${MULTIBUILD_VARIANT:-(unknown)}: merging image failed"
+ cp -a "${src}"/. "${dest}"/ || die "${MULTIBUILD_VARIANT:-(unknown)}: merging image failed"
rm -rf "${src}" || die
}
diff --git a/eclass/multilib-build.eclass b/eclass/multilib-build.eclass
index 42efdc9def61..1774ad057430 100644
--- a/eclass/multilib-build.eclass
+++ b/eclass/multilib-build.eclass
@@ -1,4 +1,4 @@
-# Copyright 2013-2022 Gentoo Authors
+# Copyright 2013-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: multilib-build.eclass
@@ -7,7 +7,6 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 6 7 8
-# @PROVIDES: multibuild
# @BLURB: flags and utility functions for building multilib packages
# @DESCRIPTION:
# The multilib-build.eclass exports USE flags and utility functions
@@ -140,7 +139,7 @@ unset -f _multilib_build_set_globals
# If multilib is disabled, the default ABI will be returned
# in order to enforce consistent testing with multilib code.
multilib_get_enabled_abis() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local pairs=( $(multilib_get_enabled_abi_pairs) )
echo "${pairs[@]#*.}"
@@ -155,7 +154,7 @@ multilib_get_enabled_abis() {
# If multilib is disabled, the default ABI will be returned
# along with empty <use-flag>.
multilib_get_enabled_abi_pairs() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local abis=( $(get_all_abis) )
@@ -198,7 +197,7 @@ multilib_get_enabled_abi_pairs() {
# @DESCRIPTION:
# Initialize the environment for ABI selected for multibuild.
_multilib_multibuild_wrapper() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local ABI=${MULTIBUILD_VARIANT#*.}
local -r MULTILIB_ABI_FLAG=${MULTIBUILD_VARIANT%.*}
@@ -218,7 +217,7 @@ _multilib_multibuild_wrapper() {
# If multilib support is disabled, it just runs the commands. No setup
# is done.
multilib_foreach_abi() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
multibuild_foreach_variant _multilib_multibuild_wrapper "${@}"
@@ -237,7 +236,7 @@ multilib_foreach_abi() {
# This function used to run multiple commands in parallel. Now it's just
# a deprecated alias to multilib_foreach_abi.
multilib_parallel_foreach_abi() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
multibuild_foreach_variant _multilib_multibuild_wrapper "${@}"
@@ -295,7 +294,7 @@ multilib_check_headers() {
# to ABI-specific build directory matching BUILD_DIR used by
# multilib_foreach_abi().
multilib_copy_sources() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
multibuild_copy_sources
@@ -373,7 +372,7 @@ multilib_copy_sources() {
# After all wrappers are prepared, multilib_install_wrappers shall
# be called to commit them to the installation tree.
multilib_prepare_wrappers() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
[[ ${#} -le 1 ]] || die "${FUNCNAME}: too many arguments"
@@ -537,7 +536,7 @@ _EOF_
# between the calls to multilib_prepare_wrappers
# and multilib_install_wrappers.
multilib_install_wrappers() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
[[ ${#} -le 1 ]] || die "${FUNCNAME}: too many arguments"
@@ -558,13 +557,31 @@ multilib_install_wrappers() {
# Determine whether the currently built ABI is the profile native.
# Return true status (0) if that is true, otherwise false (1).
multilib_is_native_abi() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
[[ ${#} -eq 0 ]] || die "${FUNCNAME}: too many arguments"
[[ ${COMPLETE_MULTILIB} == yes || ${ABI} == ${DEFAULT_ABI} ]]
}
+# @FUNCTION: multilib_native_use
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Like the standard use command, but only yields true if
+# multilib_is_native_abi and use <flag> are true, otherwise false.
+multilib_native_use() {
+ multilib_is_native_abi && use "$@"
+}
+
+# @FUNCTION: multilib_native_usev
+# @USAGE: <flag> [<opt-value>]
+# @DESCRIPTION:
+# Like the standard usev command, but only prints output
+# if multilib_is_native_abi and usev <flag> are true.
+multilib_native_usev() {
+ multilib_is_native_abi && usev "$@"
+}
+
# @FUNCTION: multilib_native_use_with
# @USAGE: <flag> [<opt-name> [<opt-value>]]
# @DESCRIPTION:
diff --git a/eclass/multilib.eclass b/eclass/multilib.eclass
index 8590bbdfbff0..bf9c88f7e6a4 100644
--- a/eclass/multilib.eclass
+++ b/eclass/multilib.eclass
@@ -1,17 +1,16 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: multilib.eclass
# @MAINTAINER:
# toolchain@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: This eclass is for all functions pertaining to handling multilib configurations.
# @DESCRIPTION:
# This eclass is for all functions pertaining to handling multilib configurations.
-case ${EAPI:-0} in
- # EAPI=0 is still used by crossdev, bug #797367
- 0|5|6|7|8) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -27,7 +26,7 @@ export CFLAGS_default
export LDFLAGS_default
export CHOST_default=${CHOST_default:-${CHOST}}
export CTARGET_default=${CTARGET_default:-${CTARGET:-${CHOST_default}}}
-export LIBDIR_default=${CONF_LIBDIR:-"lib"}
+export LIBDIR_default="lib"
export KERNEL_ABI=${KERNEL_ABI:-${DEFAULT_ABI}}
# @FUNCTION: has_multilib_profile
@@ -40,32 +39,6 @@ has_multilib_profile() {
[ -n "${MULTILIB_ABIS}" -a "${MULTILIB_ABIS}" != "${MULTILIB_ABIS/ /}" ]
}
-# @FUNCTION: get_libdir
-# @RETURN: the libdir for the selected ABI
-# @DESCRIPTION:
-# This function simply returns the desired lib directory. With portage
-# 2.0.51, we now have support for installing libraries to lib32/lib64
-# to accomidate the needs of multilib systems. It's no longer a good idea
-# to assume all libraries will end up in lib. Replace any (sane) instances
-# where lib is named directly with $(get_libdir) if possible.
-#
-# Jeremy Huddleston <eradicator@gentoo.org> (23 Dec 2004):
-# Added support for ${ABI} and ${DEFAULT_ABI}. If they're both not set,
-# fall back on old behavior. Any profile that has these set should also
-# depend on a newer version of portage (not yet released) which uses these
-# over CONF_LIBDIR in econf, dolib, etc...
-if [[ ${EAPI} == [05] ]] ; then
- get_libdir() {
- local CONF_LIBDIR
- if [ -n "${CONF_LIBDIR_OVERRIDE}" ] ; then
- # if there is an override, we want to use that... always.
- echo ${CONF_LIBDIR_OVERRIDE}
- else
- get_abi_LIBDIR
- fi
- }
-fi
-
# @FUNCTION: get_abi_var
# @USAGE: <VAR> [ABI]
# @RETURN: returns the value of ${<VAR>_<ABI>} which should be set in make.defaults
@@ -74,7 +47,7 @@ fi
# ex:
# CFLAGS=$(get_abi_var CFLAGS sparc32) # CFLAGS=-m32
#
-# Note that the prefered method is to set CC="$(tc-getCC) $(get_abi_CFLAGS)"
+# Note that the preferred method is to set CC="$(tc-getCC) $(get_abi_CFLAGS)"
# This will hopefully be added to portage soon...
#
# If <ABI> is not specified, ${ABI} is used.
@@ -240,7 +213,7 @@ number_abis() {
# Returns: null string (almost everywhere) || .exe (mingw*) || ...
get_exeext() {
case ${CHOST} in
- *-cygwin*|mingw*|*-mingw*) echo ".exe";;
+ mingw*|*-mingw*) echo ".exe";;
esac
}
@@ -257,11 +230,8 @@ get_libname() {
local libname
local ver=$1
case ${CHOST} in
- *-cygwin*) libname="dll.a";; # import lib
mingw*|*-mingw*) libname="dll";;
*-darwin*) libname="dylib";;
- *-mint*) libname="irrelevant";;
- hppa*-hpux*) libname="sl";;
*) libname="so";;
esac
@@ -270,9 +240,7 @@ get_libname() {
else
for ver in "$@" ; do
case ${CHOST} in
- *-cygwin*) echo ".${ver}.${libname}";;
*-darwin*) echo ".${ver}.${libname}";;
- *-mint*) echo ".${libname}";;
*) echo ".${libname}.${ver}";;
esac
done
@@ -315,8 +283,8 @@ multilib_env() {
# - https://bugs.gentoo.org/675954
# - https://gcc.gnu.org/PR90077
# - https://github.com/gentoo/musl/issues/245
- : ${MULTILIB_ABIS=default}
- : ${DEFAULT_ABI=default}
+ : "${MULTILIB_ABIS=default}"
+ : "${DEFAULT_ABI=default}"
export MULTILIB_ABIS DEFAULT_ABI
return
fi
@@ -338,8 +306,8 @@ multilib_env() {
export CTARGET_arm64=${CHOST_arm64}
export LIBDIR_arm64="lib64"
- : ${MULTILIB_ABIS=arm64}
- : ${DEFAULT_ABI=arm64}
+ : "${MULTILIB_ABIS=arm64}"
+ : "${DEFAULT_ABI=arm64}"
;;
x86_64*)
export CFLAGS_x86=${CFLAGS_x86--m32}
@@ -364,12 +332,12 @@ multilib_env() {
case ${CTARGET} in
*-gnux32)
- : ${MULTILIB_ABIS=x32 amd64 x86}
- : ${DEFAULT_ABI=x32}
+ : "${MULTILIB_ABIS=x32 amd64 x86}"
+ : "${DEFAULT_ABI=x32}"
;;
*)
- : ${MULTILIB_ABIS=amd64 x86}
- : ${DEFAULT_ABI=amd64}
+ : "${MULTILIB_ABIS=amd64 x86}"
+ : "${DEFAULT_ABI=amd64}"
;;
esac
;;
@@ -379,8 +347,8 @@ multilib_env() {
export CTARGET_lp64d=${CTARGET}
export LIBDIR_lp64d=${LIBDIR_lp64d-lib64}
- : ${MULTILIB_ABIS=lp64d}
- : ${DEFAULT_ABI=lp64d}
+ : "${MULTILIB_ABIS=lp64d}"
+ : "${DEFAULT_ABI=lp64d}"
;;
mips64*|mipsisa64*)
export CFLAGS_o32=${CFLAGS_o32--mabi=32}
@@ -399,8 +367,8 @@ multilib_env() {
export CTARGET_n64=${CHOST_n64}
export LIBDIR_n64="lib64"
- : ${MULTILIB_ABIS=n64 n32 o32}
- : ${DEFAULT_ABI=n32}
+ : "${MULTILIB_ABIS=n64 n32 o32}"
+ : "${DEFAULT_ABI=n32}"
;;
powerpc64*)
export CFLAGS_ppc=${CFLAGS_ppc--m32}
@@ -413,12 +381,12 @@ multilib_env() {
export CTARGET_ppc64=${CHOST_ppc64}
export LIBDIR_ppc64="lib64"
- : ${MULTILIB_ABIS=ppc64 ppc}
- : ${DEFAULT_ABI=ppc64}
+ : "${MULTILIB_ABIS=ppc64 ppc}"
+ : "${DEFAULT_ABI=ppc64}"
;;
riscv64*)
- : ${MULTILIB_ABIS=lp64d lp64 ilp32d ilp32}
- : ${DEFAULT_ABI=lp64d}
+ : "${MULTILIB_ABIS=lp64d lp64 ilp32d ilp32}"
+ : "${DEFAULT_ABI=lp64d}"
# the default abi is set to the 1-level libdir default
@@ -449,8 +417,8 @@ multilib_env() {
export LIBDIR_ilp32=${LIBDIR_ilp32-lib32/ilp32}
;;
riscv32*)
- : ${MULTILIB_ABIS=ilp32d ilp32}
- : ${DEFAULT_ABI=ilp32d}
+ : "${MULTILIB_ABIS=ilp32d ilp32}"
+ : "${DEFAULT_ABI=ilp32d}"
# the default abi is set to the 1-level libdir default
@@ -481,8 +449,8 @@ multilib_env() {
export CTARGET_s390x=${CHOST_s390x}
export LIBDIR_s390x="lib64"
- : ${MULTILIB_ABIS=s390x s390}
- : ${DEFAULT_ABI=s390x}
+ : "${MULTILIB_ABIS=s390x s390}"
+ : "${DEFAULT_ABI=s390x}"
;;
sparc64*)
export CFLAGS_sparc32=${CFLAGS_sparc32--m32}
@@ -495,12 +463,12 @@ multilib_env() {
export CTARGET_sparc64=${CHOST_sparc64}
export LIBDIR_sparc64="lib64"
- : ${MULTILIB_ABIS=sparc64 sparc32}
- : ${DEFAULT_ABI=sparc64}
+ : "${MULTILIB_ABIS=sparc64 sparc32}"
+ : "${DEFAULT_ABI=sparc64}"
;;
*)
- : ${MULTILIB_ABIS=default}
- : ${DEFAULT_ABI=default}
+ : "${MULTILIB_ABIS=default}"
+ : "${DEFAULT_ABI=default}"
;;
esac
diff --git a/eclass/multiprocessing.eclass b/eclass/multiprocessing.eclass
index e55be636a02c..13d6a92f2f2e 100644
--- a/eclass/multiprocessing.eclass
+++ b/eclass/multiprocessing.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: multiprocessing.eclass
@@ -64,17 +64,35 @@ get_nproc() {
fi
}
+# @FUNCTION: _get_all_makeopts
+# @INTERNAL
+# @DESCRIPTION:
+# Returns ${MAKEOPTS} ${GNUMAKEFLAGS} ${MAKEFLAGS}.
+_get_all_makeopts() {
+ echo "${MAKEOPTS} ${GNUMAKEFLAGS} ${MAKEFLAGS}"
+}
+
+# @FUNCTION: get_makeopts_jobs
+# @USAGE: [default-jobs]
+# @DESCRIPTION:
+# Return the number of jobs extracted from the make options (MAKEOPTS,
+# GNUMAKEFLAGS, MAKEFLAGS). If the make options do not specify a number,
+# then either the provided default is returned, or 1.
+get_makeopts_jobs() {
+ makeopts_jobs "$(_get_all_makeopts)" "${1:-1}"
+}
+
# @FUNCTION: makeopts_jobs
# @USAGE: [${MAKEOPTS}] [${inf:-$(( $(get_nproc) + 1 ))}]
# @DESCRIPTION:
-# Searches the arguments (defaults to ${MAKEOPTS}) and extracts the jobs number
+# Searches the arguments (or sensible defaults) and extracts the jobs number
# specified therein. Useful for running non-make tools in parallel too.
# i.e. if the user has MAKEOPTS=-j9, this will echo "9" -- we can't return the
# number as bash normalizes it to [0, 255]. If the flags haven't specified a
# -j flag, then "1" is shown as that is the default `make` uses. If the flags
# specify -j without a number, ${inf} is returned (defaults to nproc).
makeopts_jobs() {
- [[ $# -eq 0 ]] && set -- "${MAKEOPTS}"
+ [[ $# -eq 0 ]] && set -- "$(_get_all_makeopts)"
# This assumes the first .* will be more greedy than the second .*
# since POSIX doesn't specify a non-greedy match (i.e. ".*?").
local jobs=$(echo " $* " | sed -r -n \
@@ -83,10 +101,20 @@ makeopts_jobs() {
echo ${jobs:-1}
}
+# @FUNCTION: get_makeopts_loadavg
+# @USAGE: [default-loadavg]
+# @DESCRIPTION:
+# Return the value for the load-average extracted from the make options (MAKEOPTS,
+# GNUMAKEFLAGS, MAKEFLAGS). If the make options do not specify a value, then
+# either the optional provided default is returned, or 999.
+get_makeopts_loadavg() {
+ makeopts_loadavg "$(_get_all_makeopts)" "${1:-999}"
+}
+
# @FUNCTION: makeopts_loadavg
# @USAGE: [${MAKEOPTS}] [${inf:-999}]
# @DESCRIPTION:
-# Searches the arguments (defaults to ${MAKEOPTS}) and extracts the value set
+# Searches the arguments (or sensible defaults) and extracts the value set
# for load-average. For make and ninja based builds this will mean new jobs are
# not only limited by the jobs-value, but also by the current load - which might
# get excessive due to I/O and not just due to CPU load.
@@ -95,7 +123,7 @@ makeopts_jobs() {
# If no limit is specified or --load-average is used without a number, ${inf}
# (defaults to 999) is returned.
makeopts_loadavg() {
- [[ $# -eq 0 ]] && set -- "${MAKEOPTS}"
+ [[ $# -eq 0 ]] && set -- "$(_get_all_makeopts)"
# This assumes the first .* will be more greedy than the second .*
# since POSIX doesn't specify a non-greedy match (i.e. ".*?").
local lavg=$(echo " $* " | sed -r -n \
diff --git a/eclass/myspell-r2.eclass b/eclass/myspell-r2.eclass
index 6dbd1e19e133..05d48dc97dee 100644
--- a/eclass/myspell-r2.eclass
+++ b/eclass/myspell-r2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: myspell-r2.eclass
@@ -6,7 +6,7 @@
# Conrad Kostecki <conikost@gentoo.org>
# @AUTHOR:
# Tomáš Chvátal <scarabeus@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: An eclass to streamline the construction of ebuilds for new Myspell dictionaries.
# @DESCRIPTION:
# The myspell-r2 eclass is designed to streamline the construction of ebuilds for
@@ -16,39 +16,35 @@
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array variable containing list of all dictionary files.
+# @CODE
# MYSPELL_DICT=( "file.dic" "dir/file2.aff" )
+# @CODE
# @ECLASS_VARIABLE: MYSPELL_HYPH
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array variable containing list of all hyphenation files.
+# @CODE
# MYSPELL_HYPH=( "file.dic" "dir/file2.dic" )
+# @CODE
# @ECLASS_VARIABLE: MYSPELL_THES
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array variable containing list of all thesarus files.
+# @CODE
# MYSPELL_THES=( "file.dat" "dir/file2.idx" )
+# @CODE
-case ${EAPI:-0} in
- [5-8])
- ;;
- *)
- die "${ECLASS}: EAPI ${EAPI:-0} not supported"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack src_install
-
# Basically no extra deps needed.
# Unzip is required for .oxt libreoffice extensions
# which are just fancy zip files.
-if [[ ${EAPI:-0} != [56] ]]; then
- BDEPEND="app-arch/unzip"
-else
- DEPEND="app-arch/unzip"
- RDEPEND=""
-fi
+BDEPEND="app-arch/unzip"
# by default this stuff does not have any folder in the pack
S="${WORKDIR}"
@@ -65,7 +61,7 @@ myspell-r2_src_unpack() {
case ${f} in
*.oxt)
echo ">>> Unpacking "${DISTDIR}/${f}" to ${PWD}"
- unzip -qoj ${DISTDIR}/${f}
+ unzip -qoj "${DISTDIR}"/${f}
assert "failed unpacking ${DISTDIR}/${f}"
;;
*) unpack ${f} ;;
@@ -132,3 +128,5 @@ myspell-r2_src_install() {
fi
done
}
+
+EXPORT_FUNCTIONS src_unpack src_install
diff --git a/eclass/netsurf.eclass b/eclass/netsurf.eclass
index 07b5ab3b5284..b69c93d87ca3 100644
--- a/eclass/netsurf.eclass
+++ b/eclass/netsurf.eclass
@@ -1,17 +1,17 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: netsurf.eclass
# @MAINTAINER:
-# maintainer-needed@gentoo.org
-# @SUPPORTED_EAPIS: 7
+# mjo@gentoo.org
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Handle buildsystem of www.netsurf-browser.org components
# @DESCRIPTION:
# Handle settings build environment for netsurf build system
-case "${EAPI:-0}" in
- 7) ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit toolchain-funcs
diff --git a/eclass/ninja-utils.eclass b/eclass/ninja-utils.eclass
index 0dffd2eb86ff..f07c7d410987 100644
--- a/eclass/ninja-utils.eclass
+++ b/eclass/ninja-utils.eclass
@@ -1,25 +1,24 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ninja-utils.eclass
# @MAINTAINER:
-# Michał Górny <mgorny@gentoo.org>
-# Mike Gilbert <floppym@gentoo.org>
+# base-system@gentoo.org
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# Mike Gilbert <floppym@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
-# @BLURB: common bits to run dev-util/ninja builder
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: common bits to run app-alternatives/ninja builder
# @DESCRIPTION:
# This eclass provides a single function -- eninja -- that can be used
# to run the ninja builder alike emake. It does not define any
-# dependencies, you need to depend on dev-util/ninja yourself. Since
+# dependencies, you need to depend on app-alternatives/ninja yourself. Since
# ninja is rarely used stand-alone, most of the time this eclass will
# be used indirectly by the eclasses for other build systems (CMake,
# Meson).
case ${EAPI} in
- 5|6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -34,12 +33,12 @@ _NINJA_UTILS_ECLASS=1
# but other values can be set where NINJA_DEPEND will then be set
# to a blank variable.
# The default is set to "ninja".
-: ${NINJA:=ninja}
+: "${NINJA:=ninja}"
# @ECLASS_VARIABLE: NINJA_DEPEND
# @OUTPUT_VARIABLE
# @DESCRIPTION:
-# Contains a set of build-time depenendencies based on the NINJA setting.
+# Contains a set of build-time dependencies based on the NINJA setting.
# @ECLASS_VARIABLE: NINJAOPTS
# @DEFAULT_UNSET
@@ -48,37 +47,49 @@ _NINJA_UTILS_ECLASS=1
# supposed to be set in make.conf. If unset, eninja() will convert
# MAKEOPTS instead.
+# @ECLASS_VARIABLE: NINJA_VERBOSE
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Set to OFF to disable verbose messages during compilation
+: "${NINJA_VERBOSE:=ON}"
+
inherit multiprocessing
-case "${NINJA}" in
- ninja)
- NINJA_DEPEND=">=dev-util/ninja-1.8.2"
- ;;
- samu)
- NINJA_DEPEND="dev-util/samurai"
- ;;
- *)
- NINJA_DEPEND=""
- ;;
-esac
+NINJA_DEPEND="app-alternatives/ninja"
+
+# @FUNCTION: get_NINJAOPTS
+# @DESCRIPTION:
+# Get the value of NINJAOPTS, inferring them from MAKEOPTS if unset.
+get_NINJAOPTS() {
+ if [[ -z ${NINJAOPTS+set} ]]; then
+ NINJAOPTS="-j$(get_makeopts_jobs 999) -l$(get_makeopts_loadavg 0)"
+ fi
+ echo "${NINJAOPTS}"
+}
# @FUNCTION: eninja
# @USAGE: [<args>...]
# @DESCRIPTION:
# Call Ninja, passing the NINJAOPTS (or converted MAKEOPTS), followed
-# by the supplied arguments. This function dies if ninja fails. Starting
-# with EAPI 6, it also supports being called via 'nonfatal'.
+# by the supplied arguments. This function dies if ninja fails. It
+# also supports being called via 'nonfatal'.
eninja() {
- local nonfatal_args=()
- [[ ${EAPI} != 5 ]] && nonfatal_args+=( -n )
+ case "${NINJA}" in
+ ninja|samu)
+ ;;
+ *)
+ ewarn "Unknown value '${NINJA}' for \${NINJA}"
+ ;;
+ esac
- if [[ -z ${NINJAOPTS+set} ]]; then
- NINJAOPTS="-j$(makeopts_jobs "${MAKEOPTS}" 999) -l$(makeopts_loadavg "${MAKEOPTS}" 0)"
- fi
- [[ -n "${NINJA_DEPEND}" ]] || ewarn "Unknown value '${NINJA}' for \${NINJA}"
- set -- "${NINJA}" -v ${NINJAOPTS} "$@"
+ local v
+ case "${NINJA_VERBOSE}" in
+ OFF) ;;
+ *) v="-v"
+ esac
+ set -- "${NINJA}" ${v} $(get_NINJAOPTS) "$@"
echo "$@" >&2
- "$@" || die "${nonfatal_args[@]}" "${*} failed"
+ "$@" || die -n "${*} failed"
}
fi
diff --git a/eclass/nuget.eclass b/eclass/nuget.eclass
new file mode 100644
index 000000000000..ac8629848eca
--- /dev/null
+++ b/eclass/nuget.eclass
@@ -0,0 +1,290 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: nuget.eclass
+# @MAINTAINER:
+# Gentoo Dotnet project <dotnet@gentoo.org>
+# @AUTHOR:
+# Anna Figueiredo Gomes <navi@vlhl.dev>
+# Maciej Barć <xgqt@gentoo.org>
+# @SUPPORTED_EAPIS: 8
+# @BLURB: common functions and variables for handling .NET NuGets
+# @DESCRIPTION:
+# This eclass is designed to provide support for .NET NuGet's ".nupkg" files.
+# It is used to handle NuGets installation and usage.
+# "dotnet-pkg" and "dotnet-pkg-utils" inherit this eclass.
+#
+# This eclass does not export any phase functions, for that see
+# the "dotnet-pkg" eclass.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_NUGET_ECLASS} ]] ; then
+_NUGET_ECLASS=1
+
+# @ECLASS_VARIABLE: NUGET_SYSTEM_NUGETS
+# @DESCRIPTION:
+# Location of the system NuGet packages directory.
+readonly NUGET_SYSTEM_NUGETS=/opt/dotnet-nugets
+
+# @ECLASS_VARIABLE: NUGET_APIS
+# @PRE_INHERIT
+# @DESCRIPTION:
+# NuGet API URLs to use for precompiled NuGet package ".nupkg" downloads.
+# Set this variable pre-inherit.
+#
+# Defaults to an array of one item:
+# "https://api.nuget.org/v3-flatcontainer"
+#
+# Example:
+# @CODE
+# NUGET_APIS+=( "https://api.nuget.org/v3-flatcontainer" )
+# inherit nuget
+# SRC_URI="https://example.com/example.tar.xz"
+# SRC_URI+=" ${NUGET_URIS} "
+# @CODE
+if [[ -z "${NUGET_APIS}" ]] ; then
+ NUGET_APIS=( "https://api.nuget.org/v3-flatcontainer" )
+fi
+
+# @ECLASS_VARIABLE: NUGET_PACKAGES
+# @DEFAULT_UNSET
+# @PRE_INHERIT
+# @DESCRIPTION:
+# Path from where NuGets will be restored from.
+# This is a special variable that modifies the behavior of "dotnet".
+#
+# Defaults to ${T}/nugets for use with "NUGETS" but may be set to a custom
+# location to, for example, restore NuGets extracted from a prepared archive.
+# Do not set this variable in conjunction with non-empty "NUGETS".
+if [[ -n "${NUGETS}" || -z "${NUGET_PACKAGES}" ]] ; then
+ NUGET_PACKAGES="${T}"/nugets
+fi
+export NUGET_PACKAGES
+
+# @ECLASS_VARIABLE: NUGETS
+# @DEFAULT_UNSET
+# @PRE_INHERIT
+# @DESCRIPTION:
+# String containing all NuGet packages that need to be downloaded.
+#
+# Used by "_nuget_uris".
+#
+# Example:
+# @CODE
+# NUGETS="
+# ImGui.NET@1.87.2
+# Config.Net@4.19.0
+# "
+#
+# inherit dotnet-pkg
+#
+# ...
+#
+# SRC_URI+=" ${NUGET_URIS} "
+# @CODE
+
+# @ECLASS_VARIABLE: NUGET_URIS
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# List of URIs to put in SRC_URI created from NUGETS variable.
+
+# @FUNCTION: _nuget_set_nuget_uris
+# @USAGE: <nugets>
+# @DESCRIPTION:
+# Generates the URIs to put in SRC_URI to help fetch dependencies.
+# Constructs a list of NuGets from its arguments.
+# The value is set as "NUGET_URIS".
+_nuget_set_nuget_uris() {
+ local nugets="${1}"
+
+ NUGET_URIS=""
+
+ local nuget
+ local name version
+ local nuget_api url
+ for nuget in ${nugets} ; do
+ name="${nuget%@*}"
+ version="${nuget##*@}"
+
+ for nuget_api in "${NUGET_APIS[@]}" ; do
+ case ${nuget_api%/} in
+ *dev.gentoo.org/~* )
+ url="${nuget_api}/${name}.${version}.nupkg"
+ ;;
+ */v2 )
+ url="${nuget_api}/package/${name}/${version}
+ -> ${name}.${version}.nupkg"
+ ;;
+ * )
+ url="${nuget_api}/${name}/${version}/${name}.${version}.nupkg"
+ ;;
+ esac
+
+ NUGET_URIS+="${url} "
+ done
+ done
+}
+
+_nuget_set_nuget_uris "${NUGETS}"
+
+# @FUNCTION: nuget_link
+# @USAGE: <nuget-path>
+# @DESCRIPTION:
+# Link a specified NuGet package at "nuget-path" to the "NUGET_PACKAGES"
+# directory.
+#
+# Example:
+# @CODE
+# nuget_link "${DISTDIR}"/pkg.0.nupkg
+# @CODE
+#
+# This function is used inside "dotnet-pkg_src_unpack"
+# from the "dotnet-pkg" eclass.
+nuget_link() {
+ [[ -z "${1}" ]] && die "${FUNCNAME[0]}: no nuget path given"
+
+ mkdir -p "${NUGET_PACKAGES}" || die
+
+ local nuget_name="${1##*/}"
+
+ if [[ -f "${NUGET_PACKAGES}/${nuget_name}" ]] ; then
+ eqawarn "QA Notice: \"${nuget_name}\" already exists, not linking it"
+ else
+ ln -s "${1}" "${NUGET_PACKAGES}/${nuget_name}" || die
+ fi
+}
+
+# @FUNCTION: nuget_link-system-nugets
+# @DESCRIPTION:
+# Link all system NuGet packages to the "NUGET_PACKAGES" directory.
+#
+# Example:
+# @CODE
+# src_unpack() {
+# nuget_link-system-nugets
+# default
+# }
+# @CODE
+#
+# This function is used inside "dotnet-pkg_src_unpack"
+# from the "dotnet-pkg" eclass.
+nuget_link-system-nugets() {
+ local runtime_nuget
+ for runtime_nuget in "${EPREFIX}${NUGET_SYSTEM_NUGETS}"/*.nupkg ; do
+ if [[ -f "${runtime_nuget}" ]] ; then
+ nuget_link "${runtime_nuget}"
+ fi
+ done
+}
+
+# @FUNCTION: nuget_link-nuget-archives
+# @DESCRIPTION:
+# Link NuGet packages from package source files to the "NUGET_PACKAGES"
+# directory.
+#
+# This is a complementary function to "nuget_unpack-non-nuget-archives".
+#
+# This function is used inside "dotnet-pkg_src_unpack"
+# from the "dotnet-pkg" eclass.
+nuget_link-nuget-archives() {
+ local archive
+ for archive in ${A} ; do
+ case "${archive}" in
+ *.nupkg )
+ nuget_link "${DISTDIR}/${archive}"
+ ;;
+ * )
+ :
+ ;;
+ esac
+ done
+}
+
+# @FUNCTION: nuget_unpack-non-nuget-archives
+# @DESCRIPTION:
+# Unpack all from package source files that are not NuGet packages.
+#
+# This is a complementary function to "nuget_link-nuget-archives".
+#
+# This function is used inside "dotnet-pkg_src_unpack"
+# from the "dotnet-pkg" eclass.
+nuget_unpack-non-nuget-archives() {
+ local archive
+ for archive in ${A} ; do
+ case "${archive}" in
+ *.nupkg )
+ :
+ ;;
+ * )
+ unpack "${archive}"
+ ;;
+ esac
+ done
+}
+
+# @FUNCTION: nuget_writeconfig
+# @USAGE: <path>
+# @DESCRIPTION:
+# Create a "NuGet.config" config file that can be used to overwrite any other
+# Nuget configuration file in order to prevent Nuget executable from accessing
+# the network or undesired NuPkg package sources.
+#
+# If given path ends with a slash, a file name "NuGet.config" is assumed,
+# otherwise contents are written to specified file path exactly.
+#
+# Created configuration file clears all other NuPkg sources and inserts
+# "NUGET_PACKAGES" as the only one source.
+#
+# This function is used inside "dotnet-pkg_src_prepare"
+# from the "dotnet-pkg" eclass.
+#
+# This function is used inside "dotnet-pkg_src_prepare"
+# from the "dotnet-pkg" eclass.
+nuget_writeconfig() {
+ debug-print-function "${FUNCNAME[0]}" "${@}"
+
+ case "${1}" in
+ "" ) die "${FUNCNAME[0]}: no directory/file path specified" ;;
+ */ ) mkdir -p "${1}" || die ;;
+ esac
+
+ local nuget_config_path
+
+ if [[ -d "${1}" ]] ; then
+ nuget_config_path="${1}/NuGet.config"
+ else
+ nuget_config_path="${1}"
+ fi
+
+ cat <<-EOF > "${nuget_config_path}" || die
+ <?xml version="1.0" encoding="utf-8"?>
+ <configuration>
+ <packageSources>
+ <clear />
+ <add key="nuget" value="${NUGET_PACKAGES}" />
+ </packageSources>
+ </configuration>
+ EOF
+}
+
+# @FUNCTION: nuget_donuget
+# @USAGE: <nuget-path> ...
+# @DESCRIPTION:
+# Install NuGet package(s) at "nuget-path" to the system nugets directory.
+#
+# Example:
+# @CODE
+# src_install() {
+# nuget_donuget my-pkg.nupkg
+# }
+# @CODE
+nuget_donuget() {
+ insinto "${NUGET_SYSTEM_NUGETS}"
+ doins "${@}"
+}
+
+fi
diff --git a/eclass/office-ext-r1.eclass b/eclass/office-ext-r1.eclass
index 8cf472fe5bcc..7e4d894483fe 100644
--- a/eclass/office-ext-r1.eclass
+++ b/eclass/office-ext-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: office-ext-r1.eclass
@@ -6,18 +6,18 @@
# The office team <office@gentoo.org>
# @AUTHOR:
# Tomáš Chvátal <scarabeus@gentoo.org>
-# @SUPPORTED_EAPIS: 5 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: Eclass for installing libreoffice extensions
# @DESCRIPTION:
# Eclass for easing maintenance of libreoffice extensions.
-case "${EAPI:-0}" in
- 5) inherit eutils multilib ;;
- 7) inherit eutils ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack src_install
+if [[ -z ${_OFFICE_EXT_R1_ECLASS} ]]; then
+_OFFICE_EXT_R1_ECLASS=1
# @ECLASS_VARIABLE: OFFICE_REQ_USE
# @PRE_INHERIT
@@ -29,7 +29,7 @@ EXPORT_FUNCTIONS src_unpack src_install
# OFFICE_REQ_USE="java,jemalloc(-)?"
# @CODE
if [[ ${OFFICE_REQ_USE} ]]; then
- # Append the brackets for the depend bellow
+ # Append the brackets for the depend below
OFFICE_REQ_USE="[${OFFICE_REQ_USE}]"
fi
@@ -68,7 +68,7 @@ fi
# @CODE
# OFFICE_EXTENSIONS_LOCATION="${S}/unpacked/"
# @CODE
-: ${OFFICE_EXTENSIONS_LOCATION:=${DISTDIR}}
+: "${OFFICE_EXTENSIONS_LOCATION:=${DISTDIR}}"
IUSE=""
RDEPEND=""
@@ -112,7 +112,7 @@ office-ext-r1_src_unpack() {
for i in ${OFFICE_EXTENSIONS[@]}; do
# Unpack the extensions where required and add case for oxt
# which should be most common case for the extensions.
- if [[ -f "${OFFICE_EXTENSIONS_LOCATION}/${i}" ]] ; then
+ if [[ -f ${OFFICE_EXTENSIONS_LOCATION}/${i} ]] ; then
case ${i} in
*.oxt)
mkdir -p "${WORKDIR}/${i}/" || die
@@ -142,9 +142,13 @@ office-ext-r1_src_install() {
for j in ${OFFICE_EXTENSIONS[@]}; do
pushd "${WORKDIR}/${j}/" > /dev/null || die
insinto /usr/$(get_libdir)/${i}/share/extensions/${j/.oxt/}
- doins -r *
+ doins -r .
popd > /dev/null || die
done
fi
done
}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack src_install
diff --git a/eclass/opam.eclass b/eclass/opam.eclass
index dee40a4e9f96..49e3c426b062 100644
--- a/eclass/opam.eclass
+++ b/eclass/opam.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: opam.eclass
@@ -7,15 +7,15 @@
# ML <ml@gentoo.org>
# @AUTHOR:
# Alexis Ballier <aballier@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Provides functions for installing opam packages.
# @DESCRIPTION:
# Provides dependencies on opam and ocaml, opam-install and a default
# src_install for opam-based packages.
-case ${EAPI:-0} in
- 5|6|7) ;;
- *) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# Do not complain about CFLAGS etc since ml projects do not use them.
@@ -25,23 +25,16 @@ QA_FLAGS_IGNORED='.*'
# @PRE_INHERIT
# @DESCRIPTION:
# Override dependency for OPAM_INSTALLER
-: ${OPAM_INSTALLER_DEP:="dev-ml/opam-installer"}
+: "${OPAM_INSTALLER_DEP:="dev-ml/opam-installer"}"
RDEPEND=">=dev-lang/ocaml-4:="
-case ${EAPI:-0} in
- 5|6)
- DEPEND="${RDEPEND} ${OPAM_INSTALLER_DEP}"
- ;;
- *)
- BDEPEND="${OPAM_INSTALLER_DEP} dev-lang/ocaml"
- DEPEND="${RDEPEND}"
- ;;
-esac
+BDEPEND="${OPAM_INSTALLER_DEP} dev-lang/ocaml"
+DEPEND="${RDEPEND}"
# @ECLASS_VARIABLE: OPAM_INSTALLER
# @DESCRIPTION:
# Eclass can use different opam-installer binary than the one provided in by system.
-: ${OPAM_INSTALLER:=opam-installer}
+: "${OPAM_INSTALLER:=opam-installer}"
# @FUNCTION: opam-install
# @USAGE: <list of packages>
@@ -52,10 +45,10 @@ opam-install() {
local pkg
for pkg ; do
${OPAM_INSTALLER} -i \
- --prefix="${ED%/}/usr" \
- --libdir="${D%/}/$(ocamlc -where)" \
- --docdir="${ED%/}/usr/share/doc/${PF}" \
- --mandir="${ED%/}/usr/share/man" \
+ --prefix="${ED}/usr" \
+ --libdir="${D}/$(ocamlc -where)" \
+ --docdir="${ED}/usr/share/doc/${PF}" \
+ --mandir="${ED}/usr/share/man" \
"${pkg}.install" || die
done
}
@@ -64,9 +57,9 @@ opam_src_install() {
local pkg="${1:-${PN}}"
opam-install "${pkg}"
# Handle opam putting doc in a subdir
- if [ -d "${ED%/}/usr/share/doc/${PF}/${pkg}" ] ; then
- mv "${ED%/}/usr/share/doc/${PF}/${pkg}/"* "${ED%/}/usr/share/doc/${PF}/" || die
- rmdir "${ED%/}/usr/share/doc/${PF}/${pkg}" || die
+ if [[ -d ${ED}/usr/share/doc/${PF}/${pkg} ]] ; then
+ mv "${ED}/usr/share/doc/${PF}/${pkg}/"* "${ED}/usr/share/doc/${PF}/" || die
+ rmdir "${ED}/usr/share/doc/${PF}/${pkg}" || die
fi
}
diff --git a/eclass/optfeature.eclass b/eclass/optfeature.eclass
index acf8584e6dbc..c8b4911320d3 100644
--- a/eclass/optfeature.eclass
+++ b/eclass/optfeature.eclass
@@ -1,15 +1,15 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: optfeature.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Advertise optional functionality that might be useful to users
case ${EAPI} in
- 6|7|8) ;;
- *) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_OPTFEATURE_ECLASS} ]]; then
diff --git a/eclass/out-of-source-utils.eclass b/eclass/out-of-source-utils.eclass
new file mode 100644
index 000000000000..d68b21088995
--- /dev/null
+++ b/eclass/out-of-source-utils.eclass
@@ -0,0 +1,43 @@
+# Copyright 2022-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: out-of-source-utils.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# @SUPPORTED_EAPIS: 6 7 8
+# @BLURB: Utility functions for building packages out-of-source
+# @DESCRIPTION:
+# This eclass provides a run_in_build_dir() helper that can be used
+# to execute specified command inside BUILD_DIR.
+
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_OUT_OF_SOURCE_UTILS_ECLASS} ]]; then
+_OUT_OF_SOURCE_UTILS_ECLASS=1
+
+# @FUNCTION: run_in_build_dir
+# @USAGE: <argv>...
+# @DESCRIPTION:
+# Run the given command in the directory pointed by BUILD_DIR.
+run_in_build_dir() {
+ debug-print-function ${FUNCNAME} "${@}"
+ local ret
+
+ [[ ${#} -eq 0 ]] && die "${FUNCNAME}: no command specified."
+ [[ -z ${BUILD_DIR} ]] && die "${FUNCNAME}: BUILD_DIR not set."
+
+ mkdir -p "${BUILD_DIR}" || die
+ pushd "${BUILD_DIR}" >/dev/null || die
+ "${@}"
+ ret=${?}
+ popd >/dev/null || die
+
+ return ${ret}
+}
+
+fi
diff --git a/eclass/out-of-source.eclass b/eclass/out-of-source.eclass
index 81e03f3894db..dd54c8933d4e 100644
--- a/eclass/out-of-source.eclass
+++ b/eclass/out-of-source.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: out-of-source.eclass
@@ -34,12 +34,19 @@
case ${EAPI} in
7|8);;
- *) die "EAPI ${EAPI:-0} unsupported (too old)";;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_OUT_OF_SOURCE_ECLASS} ]]; then
+if [[ -z ${_OUT_OF_SOURCE_ECLASS} ]]; then
_OUT_OF_SOURCE_ECLASS=1
+# @ECLASS_VARIABLE: BUILD_DIR
+# @OUTPUT_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The current build directory. Defaults to ${WORKDIR}/${P}_build
+# if unset.
+
# @FUNCTION: out-of-source_src_configure
# @DESCRIPTION:
# The default src_configure() implementation establishes a BUILD_DIR,
diff --git a/eclass/pam.eclass b/eclass/pam.eclass
index 9928e746e41e..2516fa896587 100644
--- a/eclass/pam.eclass
+++ b/eclass/pam.eclass
@@ -1,19 +1,19 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: pam.eclass
# @MAINTAINER:
-# Mikle Kolyada <zlogene@gentoo.org>
+# base-system@gentoo.org
# @AUTHOR:
# Diego Pettenò <flameeyes@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Handles pam related tasks
# @DESCRIPTION:
# This eclass contains functions to install pamd configuration files and
# pam modules.
-case ${EAPI:-0} in
- [678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -22,6 +22,14 @@ _PAM_ECLASS=1
inherit flag-o-matic
+# @FUNCTION: _pam_flag_disabled
+# @INTERNAL
+# @DESCRIPTION:
+# Check whether pam support is disabled.
+_pam_flag_disabled() {
+ in_iuse pam && ! use pam
+}
+
# @FUNCTION: dopamd
# @USAGE: <file> [more files]
# @DESCRIPTION:
@@ -29,15 +37,13 @@ inherit flag-o-matic
dopamd() {
[[ -z $1 ]] && die "dopamd requires at least one argument"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
( # dont want to pollute calling env
insinto /etc/pam.d
insopts -m 0644
doins "$@"
- ) || die "failed to install $@"
+ )
cleanpamd "$@"
}
@@ -48,15 +54,13 @@ dopamd() {
newpamd() {
[[ $# -ne 2 ]] && die "newpamd requires two arguments"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
( # dont want to pollute calling env
insinto /etc/pam.d
insopts -m 0644
newins "$1" "$2"
- ) || die "failed to install $1 as $2"
+ )
cleanpamd $2
}
@@ -67,15 +71,13 @@ newpamd() {
dopamsecurity() {
[[ $# -lt 2 ]] && die "dopamsecurity requires at least two arguments"
- if has pam ${IUSE} && ! use pam; then
- return 0
- fi
+ _pam_flag_disabled && return 0
( # dont want to pollute calling env
insinto /etc/security/$1
insopts -m 0644
doins "${@:2}"
- ) || die "failed to install ${@:2}"
+ )
}
# @FUNCTION: newpamsecurity
@@ -85,15 +87,13 @@ dopamsecurity() {
newpamsecurity() {
[[ $# -ne 3 ]] && die "newpamsecurity requires three arguments"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
( # dont want to pollute calling env
insinto /etc/security/$1
insopts -m 0644
newins "$2" "$3"
- ) || die "failed to install $2 as $3"
+ )
}
# @FUNCTION: getpam_mod_dir
@@ -129,12 +129,10 @@ EOF
dopammod() {
[[ -z $1 ]] && die "dopammod requires at least one argument"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
exeinto $(getpam_mod_dir)
- doexe "$@" || die "failed to install $@"
+ doexe "$@"
}
# @FUNCTION: newpammod
@@ -143,14 +141,12 @@ dopammod() {
# Install pam module file <old name> as <new name> in the pam
# modules' dir for current implementation
newpammod() {
- [[ $# -ne 2 ]] && die "newpammod requires two arguements"
+ [[ $# -ne 2 ]] && die "newpammod requires two arguments"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
exeinto $(getpam_mod_dir)
- newexe "$1" "$2" || die "failed to install $1 as $2"
+ newexe "$1" "$2"
}
# @FUNCTION: pamd_mimic_system
@@ -159,7 +155,7 @@ newpammod() {
# This function creates a pamd file which mimics system-auth file
# for the given levels in the /etc/pam.d directory.
pamd_mimic_system() {
- [[ $# -lt 2 ]] && die "pamd_mimic_system requires at least two argments"
+ [[ $# -lt 2 ]] && die "pamd_mimic_system requires at least two arguments"
pamd_mimic system-auth "$@"
}
@@ -169,28 +165,25 @@ pamd_mimic_system() {
# This function creates a pamd file which mimics the given stack
# for the given levels in the /etc/pam.d directory.
pamd_mimic() {
- [[ $# -lt 3 ]] && die "pamd_mimic requires at least three argments"
+ [[ $# -lt 3 ]] && die "pamd_mimic requires at least three arguments"
- if has pam ${IUSE} && ! use pam; then
- return 0;
- fi
+ _pam_flag_disabled && return 0
dodir /etc/pam.d
- pamdfile=${D}/etc/pam.d/$2
- echo -e "# File autogenerated by pamd_mimic in pam eclass\n\n" >> \
- $pamdfile
+ local pamdfile="${ED}/etc/pam.d/$2"
+ echo -e "# File autogenerated by pamd_mimic in pam eclass\n\n" \
+ >> "${pamdfile}" || die
- originalstack=$1
- authlevels="auth account password session"
+ local authlevels="auth account password session"
- mimic="\tsubstack\t\t${originalstack}"
+ local mimic="\tsubstack\t\t$1"
shift; shift
while [[ -n $1 ]]; do
has $1 ${authlevels} || die "unknown level type"
- echo -e "$1${mimic}" >> ${pamdfile}
+ echo -e "$1${mimic}" >> "${pamdfile}" || die
shift
done
@@ -204,7 +197,7 @@ pamd_mimic() {
cleanpamd() {
while [[ -n $1 ]]; do
if ! has_version sys-libs/pam; then
- sed -i -e '/pam_shells\|pam_console/s:^:#:' "${D}/etc/pam.d/$1" || die
+ sed -i -e '/pam_shells\|pam_console/s:^:#:' "${ED}/etc/pam.d/$1" || die
fi
shift
diff --git a/eclass/perl-functions.eclass b/eclass/perl-functions.eclass
index 4adba921485e..142fdeb8cfbd 100644
--- a/eclass/perl-functions.eclass
+++ b/eclass/perl-functions.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2019 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: perl-functions.eclass
@@ -8,7 +8,7 @@
# Seemant Kulleen <seemant@gentoo.org>
# Andreas K. Huettel <dilfridge@gentoo.org>
# Kent Fredric <kentnl@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: helper functions eclass for perl modules
# @DESCRIPTION:
# The perl-functions eclass is designed to allow easier installation of perl
@@ -16,16 +16,16 @@
# It provides helper functions, no phases or variable manipulation in
# global scope.
-[[ ${CATEGORY} == "perl-core" ]] && inherit alternatives
-
-case "${EAPI:-0}" in
- 5|6|7|8)
+case ${EAPI} in
+ 7|8)
;;
*)
- die "EAPI=${EAPI} is not supported by perl-functions.eclass"
+ die "${ECLASS}: EAPI ${EAPI:-0} not supported"
;;
esac
+[[ ${CATEGORY} == "perl-core" ]] && inherit alternatives
+
perlinfo_done=false
# @FUNCTION: perl_set_version
@@ -44,7 +44,8 @@ perlinfo_done=false
# echo $PERL_VERSION
# @CODE
perl_set_version() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
debug-print "$FUNCNAME: perlinfo_done=${perlinfo_done}"
${perlinfo_done} && return 0
perlinfo_done=true
@@ -65,7 +66,7 @@ perl_set_version() {
#
# This function used to be called fixlocalpod as well.
perl_delete_localpod() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
find "${D}" -type f -name perllocal.pod -delete
find "${D}" -depth -mindepth 1 -type d -empty -delete
@@ -75,7 +76,7 @@ perl_delete_localpod() {
# @DESCRIPTION:
# Look through ${S} for AppleDouble encoded files and get rid of them.
perl_fix_osx_extra() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
local f
find "${S}" -type f -name "._*" -print0 | while read -rd '' f ; do
@@ -92,7 +93,7 @@ perl_fix_osx_extra() {
# Bump off manpages installed by the current module such as *.3pm files as well
# as empty directories.
perl_delete_module_manpages() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
if [[ -d "${ED}"/usr/share/man ]] ; then
find "${ED}"/usr/share/man -type f -name "*.3pm" -delete
@@ -105,7 +106,8 @@ perl_delete_module_manpages() {
# Look through ${D} for .packlist files, empty .bs files and empty directories,
# and get rid of items found.
perl_delete_packlist() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
perl_set_version
if [[ -d ${D}/${VENDOR_ARCH} ]] ; then
find "${D}/${VENDOR_ARCH}" -type f -a -name .packlist -delete
@@ -118,7 +120,8 @@ perl_delete_packlist() {
# Look through ${D} for empty .bs files and empty directories,
# and get rid of items found.
perl_delete_emptybsdir() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
perl_set_version
if [[ -d ${D}/${VENDOR_ARCH} ]] ; then
find "${D}/${VENDOR_ARCH}" -type f \
@@ -132,7 +135,8 @@ perl_delete_emptybsdir() {
# Make all of ${D} user-writable, since EU::MM does silly things with
# the w bit. See bug 554346.
perl_fix_permissions() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
perl_set_version
einfo Fixing installed file permissions
fperms -R u+w /
@@ -145,7 +149,7 @@ perl_fix_permissions() {
# Remove duplicate entries; then validate all entries in the packlist against ${D}
# and prune entries that do not correspond to installed files.
perl_fix_packlist() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
local packlist_temp="${T}/.gentoo_packlist_temp"
find "${D}" -type f -name '.packlist' -print0 | while read -rd '' f ; do
@@ -161,7 +165,7 @@ perl_fix_packlist() {
# remove files that dont exist
cat "${f}" | while read -r entry; do
- if [ ! -e "${D}/${entry}" ]; then
+ if [[ ! -e ${D}/${entry} ]]; then
einfo "Pruning surplus packlist entry ${entry}"
grep -v -x -F "${entry}" "${f}" > "${packlist_temp}"
mv "${packlist_temp}" "${f}"
@@ -176,7 +180,7 @@ perl_fix_packlist() {
# Look through ${D} for text files containing the temporary installation
# folder (i.e. ${D}). If the pattern is found, replace it with `/' and warn.
perl_remove_temppath() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
find "${D}" -type f -not -name '*.so' -print0 | while read -rd '' f ; do
if file "${f}" | grep -q -i " text" ; then
@@ -209,7 +213,8 @@ perl_remove_temppath() {
# }
# @CODE
perl_rm_files() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
local skipfile="${T}/.gentoo_makefile_skip"
local manifile="${S}/MANIFEST"
local manitemp="${T}/.gentoo_manifest_temp"
@@ -236,10 +241,9 @@ perl_rm_files() {
# lead to file collisions. Mainly for use in pkg_postinst and pkg_postrm, and makes
# only sense for perl-core packages.
perl_link_duallife_scripts() {
- debug-print-function $FUNCNAME "$@"
- if [[ ${CATEGORY} != perl-core ]] || ! has_version ">=dev-lang/perl-5.8.8-r8" ; then
- return 0
- fi
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ ${CATEGORY} != perl-core ]] && return 0
local i ff
if has "${EBUILD_PHASE:-none}" "postinst" "postrm" ; then
@@ -278,12 +282,12 @@ perl_check_env() {
for i in PERL_MM_OPT PERL5LIB PERL5OPT PERL_MB_OPT PERL_CORE PERLPREFIX; do
# Next unless match
- [ -v $i ] || continue;
+ [[ -v $i ]] || continue;
# Warn only once, and warn only when one of the bad values are set.
# record failure here.
- if [ ${errored:-0} == 0 ]; then
- if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ if [[ ${errored:-0} == 0 ]]; then
+ if [[ -n ${I_KNOW_WHAT_I_AM_DOING} ]]; then
elog "perl-module.eclass: Suspicious environment values found.";
else
eerror "perl-module.eclass: Suspicious environment values found.";
@@ -295,7 +299,7 @@ perl_check_env() {
value=${!i};
# Print ENV name/value pair
- if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ if [[ -n ${I_KNOW_WHAT_I_AM_DOING} ]]; then
elog " $i=\"$value\"";
else
eerror " $i=\"$value\"";
@@ -303,10 +307,10 @@ perl_check_env() {
done
# Return if there were no failures
- [ ${errored:-0} == 0 ] && return;
+ [[ ${errored:-0} == 0 ]] && return;
# Return if user knows what they're doing
- if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ if [[ -n ${I_KNOW_WHAT_I_AM_DOING} ]]; then
elog "Continuing anyway, seems you know what you're doing."
return
fi
@@ -330,7 +334,7 @@ perl_check_env() {
# }
# @CODE
perl_doexamples() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
einfo "Installing examples into /usr/share/doc/${PF}/examples"
@@ -364,7 +368,7 @@ perl_doexamples() {
# @CODE
perl_has_module() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
[[ $# -gt 0 ]] || die "${FUNCNAME}: No module name provided"
[[ $# -lt 2 ]] || die "${FUNCNAME}: Too many parameters ($#)"
@@ -402,7 +406,7 @@ perl_has_module() {
# && echo "Test::Tester 0.017 or greater installed"
# @CODE
perl_has_module_version() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
[[ $# -gt 0 ]] || die "${FUNCNAME}: No module name provided"
[[ $# -gt 1 ]] || die "${FUNCNAME}: No module version provided"
@@ -443,7 +447,7 @@ perl_has_module_version() {
# @CODE
perl_get_module_version() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
[[ $# -gt 0 ]] || die "${FUNCNAME}: No module name provided"
[[ $# -lt 2 ]] || die "${FUNCNAME}: Too many parameters ($#)"
@@ -489,7 +493,7 @@ perl_get_module_version() {
# @CODE
perl_get_raw_vendorlib() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
[[ $# -lt 1 ]] || die "${FUNCNAME}: Too many parameters ($#)"
@@ -510,7 +514,7 @@ perl_get_raw_vendorlib() {
# @CODE
perl_get_vendorlib() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
[[ $# -lt 1 ]] || die "${FUNCNAME}: Too many parameters ($#)"
@@ -609,7 +613,7 @@ perl_domodule() {
# @CODE
perl_get_wikiurl() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
if [[ -z "${1}" ]]; then
echo "https://wiki.gentoo.org/wiki/Project:Perl/maint-notes/${CATEGORY}/${PN}"
diff --git a/eclass/perl-module.eclass b/eclass/perl-module.eclass
index 273cc2bc8059..7bb02abed8c5 100644
--- a/eclass/perl-module.eclass
+++ b/eclass/perl-module.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: perl-module.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Seemant Kulleen <seemant@gentoo.org>
# Andreas K. Hüttel <dilfridge@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: perl-functions
# @BLURB: eclass for installing Perl module distributions
# @DESCRIPTION:
@@ -19,21 +19,17 @@
# ExtUtils::MakeMaker or Module::Build), we recommend to use perl-functions.eclass
# instead.
-case ${EAPI:-0} in
- 5)
- inherit eutils multiprocessing unpacker perl-functions
- PERL_EXPF="src_unpack src_prepare src_configure src_compile src_test src_install"
- ;;
- 6|7)
- inherit multiprocessing perl-functions
+case ${EAPI} in
+ 7)
+ inherit multiprocessing perl-functions toolchain-funcs
PERL_EXPF="src_prepare src_configure src_compile src_test src_install"
;;
8)
- inherit multiprocessing perl-functions readme.gentoo-r1
+ inherit multiprocessing perl-functions readme.gentoo-r1 toolchain-funcs
PERL_EXPF="src_prepare src_configure src_compile src_test src_install"
;;
*)
- die "EAPI=${EAPI} is not supported by perl-module.eclass"
+ die "${ECLASS}: EAPI ${EAPI:-0} not supported"
;;
esac
@@ -43,69 +39,12 @@ esac
# This variable controls whether a runtime and build time dependency on
# dev-lang/perl is automatically added by the eclass. It defaults to yes.
# Set to no to disable, set to noslotop to add a perl dependency without
-# slot operator (EAPI=6). All packages installing into the vendor_perl
+# slot operator. All packages installing into the vendor_perl
# path must use yes here. (EAPI=8 and later) Also adds a test useflag,
# a use-conditional build time dependency on virtual/perl-Test-Simple, and
# the required RESTRICT setting.
-case ${EAPI:-0} in
- 5)
- [[ ${CATEGORY} == perl-core ]] && \
- PERL_EXPF+=" pkg_postinst pkg_postrm"
-
- case "${GENTOO_DEPEND_ON_PERL:-yes}" in
- yes)
- case "${GENTOO_DEPEND_ON_PERL_SUBSLOT:-yes}" in
- yes)
- DEPEND="dev-lang/perl:=[-build(-)]"
- ;;
- *)
- DEPEND="dev-lang/perl[-build(-)]"
- ;;
- esac
- RDEPEND="${DEPEND}"
- ;;
- esac
-
- case "${PERL_EXPORT_PHASE_FUNCTIONS:-yes}" in
- yes)
- EXPORT_FUNCTIONS ${PERL_EXPF}
- ;;
- no)
- debug-print "PERL_EXPORT_PHASE_FUNCTIONS=no"
- ;;
- *)
- die "PERL_EXPORT_PHASE_FUNCTIONS=${PERL_EXPORT_PHASE_FUNCTIONS} is not supported by perl-module.eclass"
- ;;
- esac
- ;;
- 6)
- [[ ${CATEGORY} == perl-core ]] && \
- PERL_EXPF+=" pkg_postinst pkg_postrm"
-
- case "${GENTOO_DEPEND_ON_PERL:-yes}" in
- yes)
- DEPEND="dev-lang/perl"
- RDEPEND="dev-lang/perl:="
- ;;
- noslotop)
- DEPEND="dev-lang/perl"
- RDEPEND="dev-lang/perl"
- ;;
- esac
-
- if [[ "${GENTOO_DEPEND_ON_PERL_SUBSLOT:-yes}" != "yes" ]]; then
- eerror "GENTOO_DEPEND_ON_PERL_SUBSLOT=no is banned in EAPI=6 and later. If you don't want a slot operator"
- die "set GENTOO_DEPEND_ON_PERL=noslotop instead."
- fi
-
- if [[ "${PERL_EXPORT_PHASE_FUNCTIONS}" ]]; then
- eerror "PERL_EXPORT_PHASE_FUNCTIONS is banned in EAPI=6 and later. Use perl-module.eclass if you need"
- die "phase functions, perl-functions.eclass if not."
- fi
-
- EXPORT_FUNCTIONS ${PERL_EXPF}
- ;;
+case ${EAPI} in
7)
[[ ${CATEGORY} == perl-core ]] && \
PERL_EXPF+=" pkg_postinst pkg_postrm"
@@ -124,11 +63,11 @@ case ${EAPI:-0} in
esac
if [[ "${GENTOO_DEPEND_ON_PERL_SUBSLOT:-yes}" != "yes" ]]; then
- die "GENTOO_DEPEND_ON_PERL_SUBSLOT=no is banned in EAPI=6 and later."
+ die "GENTOO_DEPEND_ON_PERL_SUBSLOT=no is banned."
fi
if [[ "${PERL_EXPORT_PHASE_FUNCTIONS}" ]]; then
- die "PERL_EXPORT_PHASE_FUNCTIONS is banned in EAPI=6 and later."
+ die "PERL_EXPORT_PHASE_FUNCTIONS is banned."
fi
EXPORT_FUNCTIONS ${PERL_EXPF}
@@ -141,7 +80,7 @@ case ${EAPI:-0} in
yes|noslotop)
DEPEND="dev-lang/perl"
BDEPEND="dev-lang/perl
- test? ( virtual/perl-Test-Simple )"
+ test? ( >=virtual/perl-Test-Simple-1 )"
IUSE="test"
RESTRICT="!test? ( test )"
;;&
@@ -154,11 +93,11 @@ case ${EAPI:-0} in
esac
if [[ "${GENTOO_DEPEND_ON_PERL_SUBSLOT:-yes}" != "yes" ]]; then
- die "GENTOO_DEPEND_ON_PERL_SUBSLOT=no is banned in EAPI=6 and later."
+ die "GENTOO_DEPEND_ON_PERL_SUBSLOT=no is banned."
fi
if [[ "${PERL_EXPORT_PHASE_FUNCTIONS}" ]]; then
- die "PERL_EXPORT_PHASE_FUNCTIONS is banned in EAPI=6 and later."
+ die "PERL_EXPORT_PHASE_FUNCTIONS is banned."
fi
EXPORT_FUNCTIONS ${PERL_EXPF}
@@ -170,46 +109,43 @@ LICENSE="${LICENSE:-|| ( Artistic GPL-1+ )}"
# @ECLASS_VARIABLE: DIST_NAME
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable provides a way to override PN for the calculation of S,
+# This variable provides a way to override PN for the calculation of S,
# SRC_URI, and HOMEPAGE. If unset, defaults to PN.
# @ECLASS_VARIABLE: DIST_VERSION
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable provides a way to override PV for the calculation of S and SRC_URI.
+# This variable provides a way to override PV for the calculation of S and SRC_URI.
# Use it to provide the non-normalized, upstream version number. If unset, defaults to PV.
-# Named MODULE_VERSION in EAPI=5.
# @ECLASS_VARIABLE: DIST_A_EXT
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable provides a way to override the distfile extension for the calculation of
-# SRC_URI. If unset, defaults to tar.gz. Named MODULE_A_EXT in EAPI=5.
+# This variable provides a way to override the distfile extension for the calculation of
+# SRC_URI. If unset, defaults to tar.gz.
# @ECLASS_VARIABLE: DIST_A
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable provides a way to override the distfile name for the calculation of
-# SRC_URI. If unset, defaults to ${DIST_NAME}-${DIST_VERSION}.${DIST_A_EXT} Named MODULE_A in EAPI=5.
+# This variable provides a way to override the distfile name for the calculation of
+# SRC_URI. If unset, defaults to ${DIST_NAME}-${DIST_VERSION}.${DIST_A_EXT}.
# @ECLASS_VARIABLE: DIST_AUTHOR
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable sets the module author name for the calculation of
-# SRC_URI. Named MODULE_AUTHOR in EAPI=5.
+# This variable sets the module author name for the calculation of SRC_URI.
# @ECLASS_VARIABLE: DIST_SECTION
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This variable sets the module section for the calculation of
+# This variable sets the module section for the calculation of
# SRC_URI. Only required in rare cases for very special snowflakes.
-# Named MODULE_SECTION in EAPI=5.
# @ECLASS_VARIABLE: DIST_EXAMPLES
# @PRE_INHERIT
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) This Bash array allows passing a list of example files to be installed
+# This Bash array allows passing a list of example files to be installed
# in /usr/share/doc/${PF}/examples. If set before inherit, automatically adds
# a use-flag examples, if not you'll have to add the useflag in your ebuild.
# Examples are installed only if the useflag examples exists and is activated.
@@ -233,67 +169,31 @@ if [[ $(declare -p DIST_MAKE 2>&-) != "declare -a DIST_MAKE="* ]]; then
DIST_MAKE=( OPTIMIZE="${CFLAGS}" )
fi
+DIST_NAME=${DIST_NAME:-${PN}}
+DIST_P=${DIST_NAME}-${DIST_VERSION:-${PV}}
+S=${WORKDIR}/${DIST_P}
-if [[ ${EAPI:-0} == 5 ]]; then
- if [[ -n ${MY_PN} || -n ${MY_PV} || -n ${MODULE_VERSION} ]] ; then
- : ${MY_P:=${MY_PN:-${PN}}-${MY_PV:-${MODULE_VERSION:-${PV}}}}
- S=${MY_S:-${WORKDIR}/${MY_P}}
- fi
- MODULE_NAME=${MY_PN:-${PN}}
- MODULE_P=${MY_P:-${P}}
-
- [[ -z "${SRC_URI}" && -z "${MODULE_A}" ]] && \
- MODULE_A="${MODULE_P}.${MODULE_A_EXT:-tar.gz}"
- [[ -z "${SRC_URI}" && -n "${MODULE_AUTHOR}" ]] && \
- SRC_URI="mirror://cpan/authors/id/${MODULE_AUTHOR:0:1}/${MODULE_AUTHOR:0:2}/${MODULE_AUTHOR}/${MODULE_SECTION:+${MODULE_SECTION}/}${MODULE_A}"
- [[ -z "${HOMEPAGE}" ]] && \
- HOMEPAGE="https://metacpan.org/release/${MODULE_NAME}"
-
- SRC_TEST="skip"
-else
- DIST_NAME=${DIST_NAME:-${PN}}
- DIST_P=${DIST_NAME}-${DIST_VERSION:-${PV}}
- S=${WORKDIR}/${DIST_P}
-
- [[ -z "${SRC_URI}" && -z "${DIST_A}" ]] && \
- DIST_A="${DIST_P}.${DIST_A_EXT:-tar.gz}"
- [[ -z "${SRC_URI}" && -n "${DIST_AUTHOR}" ]] && \
- SRC_URI="mirror://cpan/authors/id/${DIST_AUTHOR:0:1}/${DIST_AUTHOR:0:2}/${DIST_AUTHOR}/${DIST_SECTION:+${DIST_SECTION}/}${DIST_A}"
- [[ -z "${HOMEPAGE}" ]] && \
- HOMEPAGE="https://metacpan.org/release/${DIST_NAME}"
-
- [[ -z "${DIST_EXAMPLES}" ]] || IUSE+=" examples"
-fi
+[[ -z "${SRC_URI}" && -z "${DIST_A}" ]] &&
+ DIST_A="${DIST_P}.${DIST_A_EXT:-tar.gz}"
+[[ -z "${SRC_URI}" && -n "${DIST_AUTHOR}" ]] &&
+ SRC_URI="mirror://cpan/authors/id/${DIST_AUTHOR:0:1}/${DIST_AUTHOR:0:2}/${DIST_AUTHOR}/${DIST_SECTION:+${DIST_SECTION}/}${DIST_A}"
+[[ -z "${HOMEPAGE}" ]] &&
+ HOMEPAGE="https://metacpan.org/release/${DIST_NAME}"
+
+[[ -z "${DIST_EXAMPLES}" ]] || IUSE+=" examples"
-SRC_PREP="no"
PREFER_BUILDPL="yes"
pm_echovar=""
-# @FUNCTION: perl-module_src_unpack
-# @DESCRIPTION:
-# Unpack the ebuild tarball(s).
-# This function is to be called during the ebuild src_unpack() phase.
-perl-module_src_unpack() {
- debug-print-function $FUNCNAME "$@"
- [[ ${EAPI:-0} == 5 ]] || die "perl-module_src_unpack is banned in EAPI=6 or later"
- unpacker_src_unpack
-}
-
# @FUNCTION: perl-module_src_prepare
# @DESCRIPTION:
# Get the ebuild sources ready.
# This function is to be called during the ebuild src_prepare() phase.
perl-module_src_prepare() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
- if [[ ${EAPI:-0} == 5 ]] ; then
- [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
- debug-print "$FUNCNAME: applying user patches"
- epatch_user
- else
- default
- fi
+ default
if [[ ${PERL_RM_FILES[@]} ]]; then
debug-print "$FUNCNAME: stripping unneeded files"
@@ -307,12 +207,11 @@ perl-module_src_prepare() {
# Configure the ebuild sources.
# This function is to be called during the ebuild src_configure() phase.
perl-module_src_configure() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
- if [[ ${EAPI:-0} == 5 && ${SRC_PREP} == yes ]]; then
- return 0
- fi
- SRC_PREP="yes"
+ # Perl runs LD with LDFLAGS
+ export CCLD=$(tc-getCC)
+ unset LD
perl_check_env
@@ -321,6 +220,8 @@ perl-module_src_configure() {
[[ -z ${pm_echovar} ]] && export PERL_MM_USE_DEFAULT=1
# Disable ExtUtils::AutoInstall from prompting
export PERL_EXTUTILS_AUTOINSTALL="--skipdeps"
+ # Noisy and not really appropriate to show to the user in a PM
+ export PERL_CANARY_STABILITY_DISABLE=1
if [[ $(declare -p myconf 2>&-) != "declare -a myconf="* ]]; then
local myconf_local=(${myconf})
@@ -329,38 +230,20 @@ perl-module_src_configure() {
fi
if [[ ( ${PREFER_BUILDPL} == yes || ! -f Makefile.PL ) && -f Build.PL ]] ; then
- case ${EAPI:-0} in
- 5|6)
- if grep -q '\(use\|require\)\s*Module::Build::Tiny' Build.PL ; then
- einfo "Using Module::Build::Tiny"
- if [[ ${DEPEND} != *dev-perl/Module-Build-Tiny* && ${PN} != Module-Build-Tiny ]]; then
- eerror "QA Notice: The ebuild uses Module::Build::Tiny but doesn't depend on it."
- die " Add dev-perl/Module-Build-Tiny to DEPEND!"
- fi
- else
- einfo "Using Module::Build"
- if [[ ${DEPEND} != *virtual/perl-Module-Build* && ${DEPEND} != *dev-perl/Module-Build* && ${PN} != Module-Build ]] ; then
- eerror "QA Notice: The ebuild uses Module::Build but doesn't depend on it."
- die " Add dev-perl/Module-Build to DEPEND!"
- fi
- fi
- ;;
- *)
- if grep -q '\(use\|require\)\s*Module::Build::Tiny' Build.PL ; then
- einfo "Using Module::Build::Tiny"
- if [[ ${BDEPEND} != *dev-perl/Module-Build-Tiny* && ${PN} != Module-Build-Tiny ]]; then
- eerror "QA Notice: The ebuild uses Module::Build::Tiny but doesn't depend on it."
- eerror " Add dev-perl/Module-Build-Tiny to BDEPEND!"
- fi
- else
- einfo "Using Module::Build"
- if [[ ${BDEPEND} != *virtual/perl-Module-Build* && ${BDEPEND} != *dev-perl/Module-Build* && ${PN} != Module-Build ]] ; then
- eerror "QA Notice: The ebuild uses Module::Build but doesn't depend on it."
- eerror " Add dev-perl/Module-Build to BDEPEND!"
- fi
- fi
- ;;
- esac
+ if grep -q '\(use\|require\)\s*Module::Build::Tiny' Build.PL ; then
+ einfo "Using Module::Build::Tiny"
+ if [[ ${BDEPEND} != *dev-perl/Module-Build-Tiny* && ${PN} != Module-Build-Tiny ]]; then
+ eerror "QA Notice: The ebuild uses Module::Build::Tiny but doesn't depend on it."
+ eerror " Add dev-perl/Module-Build-Tiny to BDEPEND!"
+ fi
+ else
+ einfo "Using Module::Build"
+ if [[ ${BDEPEND} != *virtual/perl-Module-Build* && ${BDEPEND} != *dev-perl/Module-Build* && ${PN} != Module-Build ]] ; then
+ eerror "QA Notice: The ebuild uses Module::Build but doesn't depend on it."
+ eerror " Add dev-perl/Module-Build to BDEPEND!"
+ fi
+ fi
+
set -- \
--installdirs=vendor \
--libdoc= \
@@ -373,7 +256,7 @@ perl-module_src_configure() {
elif [[ -f Makefile.PL ]] ; then
einfo "Using ExtUtils::MakeMaker"
set -- \
- PREFIX=${EPREFIX}/usr \
+ PREFIX="${EPREFIX}"/usr \
INSTALLDIRS=vendor \
INSTALLMAN3DIR='none' \
DESTDIR="${D}" \
@@ -393,11 +276,12 @@ perl-module_src_configure() {
# Compile the ebuild sources.
# This function is to be called during the ebuild src_compile() phase.
perl-module_src_compile() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
perl_set_version
case ${EAPI} in
- 5|6|7)
+ 7)
if [[ $(declare -p mymake 2>&-) != "declare -a mymake="* ]]; then
local mymake_local=(${mymake})
else
@@ -416,21 +300,16 @@ perl-module_src_compile() {
set -- \
OTHERLDFLAGS="${LDFLAGS}" \
"${mymake_local[@]}"
- einfo "emake" "$@"
- emake "$@" \
- || die "Compilation failed"
-# OPTIMIZE="${CFLAGS}" \
+ emake "$@"
fi
}
# @ECLASS_VARIABLE: DIST_TEST
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) Variable that controls if tests are run in the test phase
+# Variable that controls if tests are run in the test phase
# at all, and if yes under which conditions. If unset, defaults to "do parallel"
# If neither "do" nor "parallel" is recognized, tests are skipped.
-# (In EAPI=5 the variable is called SRC_TEST, defaults to "skip", and
-# recognizes fewer options.)
# The following space-separated keywords are recognized:
# do : run tests
# parallel : run tests in parallel
@@ -441,61 +320,60 @@ perl-module_src_compile() {
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
-# (EAPI=6 and later) Variable that controls if tests are run in the test phase
+# Variable that controls if tests are run in the test phase
# at all, and if yes under which conditions. It is intended for use in
# make.conf or the environment by ebuild authors during testing, and
# accepts the same values as DIST_TEST. If set, it overrides DIST_TEST
# completely. DO NOT USE THIS IN EBUILDS!
-# @FUNCTION: perl-module_src-test
+# @FUNCTION: perl-module_src_test
# @DESCRIPTION:
# This code attempts to work out your threadingness and runs tests
# according to the settings of DIST_TEST using Test::Harness.
perl-module_src_test() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
local my_test_control
local my_test_verbose
+ local my_test_makeopts
- if [[ ${EAPI} == 5 ]] ; then
- my_test_control=${SRC_TEST}
- my_test_verbose=${TEST_VERBOSE:-0}
- if has 'do' ${my_test_control} || has 'parallel' ${my_test_control} ; then
- if has "${my_test_verbose}" 0 && has 'parallel' ${my_test_control} ; then
- export HARNESS_OPTIONS=j$(makeopts_jobs)
- einfo "Test::Harness Jobs=$(makeopts_jobs)"
- fi
- else
- einfo Skipping tests due to SRC_TEST=${SRC_TEST}
- return 0
- fi
- else
- [[ -n "${DIST_TEST_OVERRIDE}" ]] && ewarn DIST_TEST_OVERRIDE is set to ${DIST_TEST_OVERRIDE}
- my_test_control=${DIST_TEST_OVERRIDE:-${DIST_TEST:-do parallel}}
+ [[ -n "${DIST_TEST_OVERRIDE}" ]] && ewarn "DIST_TEST_OVERRIDE is set to ${DIST_TEST_OVERRIDE}"
+ my_test_control=${DIST_TEST_OVERRIDE:-${DIST_TEST:-do parallel}}
- if ! has 'do' ${my_test_control} && ! has 'parallel' ${my_test_control} ; then
- einfo Skipping tests due to DIST_TEST=${my_test_control}
- return 0
- fi
+ if ! has 'do' ${my_test_control} && ! has 'parallel' ${my_test_control} ; then
+ einfo Skipping tests due to DIST_TEST=${my_test_control}
+ return 0
+ fi
- if has verbose ${my_test_control} ; then
- my_test_verbose=1
- else
- my_test_verbose=0
- fi
+ if has 'do' ${my_test_control} && ! has 'parallel' ${my_test_control} ; then
+ my_test_makeopts="-j1"
+ fi
- if has parallel ${my_test_control} ; then
- export HARNESS_OPTIONS=j$(makeopts_jobs)
- einfo "Test::Harness Jobs=$(makeopts_jobs)"
- fi
+ if has verbose ${my_test_control} ; then
+ my_test_verbose=1
+ else
+ my_test_verbose=0
+ fi
- # this might sometimes work...
- if ! has network ${my_test_control} ; then
- export NO_NETWORK_TESTING=1
- fi
+ if has parallel ${my_test_control} ; then
+ export HARNESS_OPTIONS=j$(makeopts_jobs)
+ einfo "Test::Harness Jobs=$(makeopts_jobs)"
fi
+ # this might sometimes work...
+ if ! has network ${my_test_control} ; then
+ export NO_NETWORK_TESTING=1
+ fi
+
+ # See https://www.perlmonks.org/?node_id=1225311
+ # * AUTOMATED_TESTING appears inappropriate for us, as it affects
+ # exit codes and might mask failures if configuration is wrong.
+ # * EXTENDED_TESTING is something we could consider if we had
+ # some way to opt-in to expensive tests.
+ export NONINTERACTIVE_TESTING=1
+
case ${EAPI} in
- 5|6|7)
+ 7)
;;
*)
if has 'tests' ${DIST_WIKI} ; then
@@ -510,7 +388,7 @@ perl-module_src_test() {
if [[ -f Build ]] ; then
./Build test verbose=${my_test_verbose} || die "test failed"
elif [[ -f Makefile ]] ; then
- emake test TEST_VERBOSE=${my_test_verbose} || die "test failed"
+ emake ${my_test_makeopts} test TEST_VERBOSE=${my_test_verbose}
fi
}
@@ -519,7 +397,7 @@ perl-module_src_test() {
# Install a Perl ebuild.
# This function is to be called during the ebuild src_install() phase.
perl-module_src_install() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
perl_set_version
@@ -541,12 +419,11 @@ perl-module_src_install() {
else
local myinst_local=("${myinst[@]}")
fi
- emake "${myinst_local[@]}" ${mytargets} \
- || die "emake ${myinst_local[@]} ${mytargets} failed"
+ emake "${myinst_local[@]}" ${mytargets}
fi
case ${EAPI} in
- 5|6|7)
+ 7)
;;
*)
perl_fix_permissions
@@ -555,28 +432,22 @@ perl-module_src_install() {
perl_delete_module_manpages
perl_delete_localpod
- if [[ ${EAPI} == 5 ]] ; then
- perl_delete_packlist
- else
- perl_fix_packlist
- perl_delete_emptybsdir
- fi
+ perl_fix_packlist
+ perl_delete_emptybsdir
perl_remove_temppath
for f in Change* CHANGES README* TODO FAQ ${mydoc}; do
[[ -s ${f} ]] && dodoc ${f}
done
- if [[ ${EAPI} != 5 ]] ; then
- if in_iuse examples && use examples ; then
- [[ ${#DIST_EXAMPLES[@]} -eq 0 ]] || perl_doexamples "${DIST_EXAMPLES[@]}"
- fi
+ if in_iuse examples && use examples ; then
+ [[ ${#DIST_EXAMPLES[@]} -eq 0 ]] || perl_doexamples "${DIST_EXAMPLES[@]}"
fi
perl_link_duallife_scripts
case ${EAPI} in
- 5|6|7)
+ 7)
;;
*)
if has 'features' ${DIST_WIKI} ; then
@@ -599,7 +470,8 @@ perl-module_src_install() {
# links that prevent file collisions for dual-life packages installing scripts.
# In any other category it immediately exits.
perl-module_pkg_postinst() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
if [[ ${CATEGORY} != perl-core ]] ; then
eerror "perl-module.eclass: You are calling perl-module_pkg_postinst outside the perl-core category."
die " This does not do anything; the call can be removed."
@@ -614,7 +486,8 @@ perl-module_pkg_postinst() {
# links that prevent file collisions for dual-life packages installing scripts.
# In any other category it immediately exits.
perl-module_pkg_postrm() {
- debug-print-function $FUNCNAME "$@"
+ debug-print-function ${FUNCNAME} "$@"
+
if [[ ${CATEGORY} != perl-core ]] ; then
eerror "perl-module.eclass: You are calling perl-module_pkg_postrm outside the perl-core category."
die " This does not do anything; the call can be removed."
diff --git a/eclass/php-ext-pecl-r3.eclass b/eclass/php-ext-pecl-r3.eclass
index b1b9429e8a26..11ae34429fbb 100644
--- a/eclass/php-ext-pecl-r3.eclass
+++ b/eclass/php-ext-pecl-r3.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: php-ext-pecl-r3.eclass
# @MAINTAINER:
# Gentoo PHP team <php-bugs@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: php-ext-source-r3
# @BLURB: A uniform way to install PECL extensions
# @DESCRIPTION:
@@ -13,7 +13,7 @@
# see https://pecl.php.net/
case ${EAPI:-0} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
diff --git a/eclass/php-ext-source-r3.eclass b/eclass/php-ext-source-r3.eclass
index b60d5528d526..0d58db5031c9 100644
--- a/eclass/php-ext-source-r3.eclass
+++ b/eclass/php-ext-source-r3.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: php-ext-source-r3.eclass
# @MAINTAINER:
# Gentoo PHP team <php-bugs@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Compile and install standalone PHP extensions.
# @DESCRIPTION:
# A unified interface for compiling and installing standalone PHP
@@ -12,11 +12,9 @@
inherit autotools
-case ${EAPI:-0} in
- 6) inherit eapi7-ver ;;
+case ${EAPI} in
7|8) ;;
- *)
- die "${ECLASS} is not compatible with EAPI=${EAPI}"
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# @ECLASS_VARIABLE: PHP_EXT_NAME
@@ -83,7 +81,7 @@ esac
# @CODE
# PHP_INI_NAME="40-foo"
# @CODE
-: ${PHP_INI_NAME:=${PHP_EXT_NAME}}
+: "${PHP_INI_NAME:=${PHP_EXT_NAME}}"
# @ECLASS_VARIABLE: PHP_EXT_NEEDED_USE
# @PRE_INHERIT
@@ -128,14 +126,17 @@ unset _php_slot _php_target
# the USE-conditional if PHP_EXT_OPTIONAL_USE is non-null.
REQUIRED_USE+=") ${PHP_EXT_OPTIONAL_USE:+ )}"
PHPDEPEND+=" ${PHP_EXT_OPTIONAL_USE:+ )}"
-TOOLDEPS="sys-devel/m4 sys-devel/libtool"
+TOOLDEPS="
+ sys-devel/m4
+ dev-build/libtool
+"
RDEPEND="${PHPDEPEND}"
-
-case ${EAPI:-0} in
- 6) DEPEND="${TOOLDEPS} ${PHPDEPEND}" ;;
- 7|8) DEPEND="${PHPDEPEND}" ; BDEPEND="${TOOLDEPS} ${PHPDEPEND}" ;;
-esac
+DEPEND="${PHPDEPEND}"
+BDEPEND="
+ ${TOOLDEPS}
+ ${PHPDEPEND}
+"
unset PHPDEPEND TOOLDEPS
diff --git a/eclass/php-pear-r2.eclass b/eclass/php-pear-r2.eclass
index 2d204ed24c3c..9882c7dcc700 100644
--- a/eclass/php-pear-r2.eclass
+++ b/eclass/php-pear-r2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: php-pear-r2.eclass
@@ -14,31 +14,27 @@
# Note that this eclass doesn't handle dependencies of PEAR packages
# on purpose; please use (R)DEPEND to define them correctly!
-EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
-
-case "${EAPI:-0}" in
- 6|7)
- ;;
- 8)
- IDEPEND=">=dev-php/pear-1.8.1"
- ;;
- *)
- die "Unsupported EAPI=${EAPI} for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_PHP_PEAR_R2_ECLASS} ]]; then
+_PHP_PEAR_R2_ECLASS=1
+
RDEPEND=">=dev-php/pear-1.8.1"
+[[ ${EAPI} != [67] ]] && IDEPEND=">=dev-php/pear-1.8.1"
# @ECLASS_VARIABLE: PHP_PEAR_PKG_NAME
# @DESCRIPTION:
# Set this if the PEAR package name differs from ${PN/PEAR-/}
# (generally shouldn't be the case).
-: ${PHP_PEAR_PKG_NAME:=${PN/PEAR-/}}
+: "${PHP_PEAR_PKG_NAME:=${PN/PEAR-/}}"
# @ECLASS_VARIABLE: PEAR_PV
# @DESCRIPTION:
# Set in ebuild if the ${PV} breaks SRC_URI for alpha/beta/rc versions
-: ${PEAR_PV:=${PV}}
+: "${PEAR_PV:=${PV}}"
# @ECLASS_VARIABLE: PEAR-P
# @INTERNAL
@@ -50,7 +46,7 @@ PEAR_P="${PHP_PEAR_PKG_NAME}-${PEAR_PV}"
# @DESCRIPTION:
# Set in ebuild to the domain name of the channel if not pear.php.net
# When the domain is not pear.php.net, setting the SRC_URI is required
-: ${PHP_PEAR_DOMAIN:=pear.php.net}
+: "${PHP_PEAR_DOMAIN:=pear.php.net}"
# @ECLASS_VARIABLE: PHP_PEAR_CHANNEL
# @DEFAULT_UNSET
@@ -64,7 +60,7 @@ if [[ "${PHP_PEAR_DOMAIN}" == "pear.php.net" ]] ; then
SRC_URI="https://pear.php.net/get/${PEAR_P}.tgz"
fi
-: ${HOMEPAGE:=https://${PHP_PEAR_DOMAIN}/package/${PHP_PEAR_PKG_NAME}}
+: "${HOMEPAGE:=https://${PHP_PEAR_DOMAIN}/package/${PHP_PEAR_PKG_NAME}}"
S="${WORKDIR}/${PEAR_P}"
@@ -129,3 +125,7 @@ php-pear-r2_pkg_postrm() {
# Uninstall known dependency
"${EROOT%/}/usr/bin/peardev" uninstall -nrO "${PHP_PEAR_DOMAIN}/${PHP_PEAR_PKG_NAME}"
}
+
+fi
+
+EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
diff --git a/eclass/plasma-mobile.kde.org.eclass b/eclass/plasma-mobile.kde.org.eclass
new file mode 100644
index 000000000000..9fd54100d24e
--- /dev/null
+++ b/eclass/plasma-mobile.kde.org.eclass
@@ -0,0 +1,48 @@
+# Copyright 1999-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: plasma-mobile.kde.org.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: kde.org
+# @BLURB: Support eclass for KDE Plasma Mobile packages.
+# @DESCRIPTION:
+# This eclass extends kde.org.eclass for Plasma Mobile release group to assemble
+# default SRC_URI for tarballs, set up git-r3.eclass for stable/master branch
+# versions or restrict access to unreleased (packager access only) tarballs
+# in Gentoo KDE overlay.
+#
+# This eclass unconditionally inherits kde.org.eclass and all its public
+# variables and helper functions (not phase functions) may be considered as
+# part of this eclass's API.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_PLASMA_MOBILE_KDE_ORG_ECLASS} ]]; then
+_PLASMA_MOBILE_KDE_ORG_ECLASS=1
+
+# @ECLASS_VARIABLE: KDE_ORG_CATEGORY
+# @PRE_INHERIT
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+: "${KDE_ORG_CATEGORY:=plasma-mobile}"
+
+inherit kde.org
+
+HOMEPAGE="https://plasma-mobile.org/"
+
+# @ECLASS_VARIABLE: KDE_ORG_SCHEDULE_URI
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_ORG_SCHEDULE_URI="https://invent.kde.org/plasma/plasma-mobile/-/wikis/Release-Schedule"
+
+if [[ ${KDE_BUILD_TYPE} != live && -z ${KDE_ORG_COMMIT} ]]; then
+ SRC_URI="mirror://kde/stable/plasma-mobile/$(ver_cut 1-2)/${KDE_ORG_NAME}-${PV}.tar.xz"
+fi
+
+fi
diff --git a/eclass/plasma.kde.org.eclass b/eclass/plasma.kde.org.eclass
new file mode 100644
index 000000000000..491f1fcecc8c
--- /dev/null
+++ b/eclass/plasma.kde.org.eclass
@@ -0,0 +1,91 @@
+# Copyright 1999-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: plasma.kde.org.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: kde.org
+# @BLURB: Support eclass for KDE Plasma packages.
+# @DESCRIPTION:
+# This eclass extends kde.org.eclass for KDE Plasma release group to assemble
+# default SRC_URI for tarballs, set up git-r3.eclass for stable/master branch
+# versions or restrict access to unreleased (packager access only) tarballs
+# in Gentoo KDE overlay.
+#
+# This eclass unconditionally inherits kde.org.eclass and all its public
+# variables and helper functions (not phase functions) may be considered as
+# part of this eclass's API.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_PLASMA_KDE_ORG_ECLASS} ]]; then
+_PLASMA_KDE_ORG_ECLASS=1
+
+# @ECLASS_VARIABLE: KDE_PV_UNRELEASED
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_PV_UNRELEASED=( )
+
+# @ECLASS_VARIABLE: _PSLOT
+# @INTERNAL
+# @DESCRIPTION:
+# KDE Plasma major version mapping, implied by package version. This is being
+# used throughout the eclass as a switch between Plasma 5 and 6 packages.
+_PSLOT=6
+if $(ver_test -lt 5.27.50); then
+ _PSLOT=5
+fi
+
+inherit kde.org
+
+HOMEPAGE="https://kde.org/plasma-desktop"
+
+# @ECLASS_VARIABLE: KDE_ORG_SCHEDULE_URI
+# @INTERNAL
+# @DESCRIPTION:
+# For proper description see kde.org.eclass manpage.
+KDE_ORG_SCHEDULE_URI+="/Plasma_${_PSLOT}"
+
+# @ECLASS_VARIABLE: _KDE_SRC_URI
+# @INTERNAL
+# @DESCRIPTION:
+# Helper variable to construct release group specific SRC_URI.
+_KDE_SRC_URI="mirror://kde/"
+
+if [[ ${KDE_BUILD_TYPE} == live ]]; then
+ if [[ ${PV} != 9999 ]]; then
+ EGIT_BRANCH="Plasma/$(ver_cut 1-2)"
+ fi
+elif [[ -z ${KDE_ORG_COMMIT} ]]; then
+ case ${PV} in
+ 5.??.[6-9][05]* )
+ _KDE_SRC_URI+="unstable/plasma/$(ver_cut 1-3)/"
+ RESTRICT+=" mirror"
+ ;;
+ *) _KDE_SRC_URI+="stable/plasma/$(ver_cut 1-3)/" ;;
+ esac
+
+ SRC_URI="${_KDE_SRC_URI}${KDE_ORG_TAR_PN}-${PV}.tar.xz"
+fi
+
+if [[ ${_PSLOT} == 6 ]]; then
+ case ${PN} in
+ kglobalacceld | \
+ kwayland | \
+ kwayland-integration | \
+ libplasma | \
+ ocean-sound-theme | \
+ plasma-activities | \
+ plasma-activities-stats | \
+ plasma5support | \
+ print-manager) ;;
+ *) RDEPEND+=" !kde-plasma/${PN}:5" ;;
+ esac
+fi
+
+fi
diff --git a/eclass/plocale.eclass b/eclass/plocale.eclass
index 979b1dca09b4..954b06585746 100644
--- a/eclass/plocale.eclass
+++ b/eclass/plocale.eclass
@@ -1,4 +1,4 @@
-# Copyright 2012-2021 Gentoo Authors
+# Copyright 2012-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: plocale.eclass
@@ -6,7 +6,7 @@
# Ulrich Müller <ulm@gentoo.org>
# @AUTHOR:
# Ben de Groot <yngwin@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: convenience functions to handle localizations
# @DESCRIPTION:
# The plocale (localization) eclass offers a number of functions to more
@@ -52,7 +52,7 @@
# translations and check if the ebuild's PLOCALES are still up to date.
case ${EAPI} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
diff --git a/eclass/portability.eclass b/eclass/portability.eclass
index 8df8fcebc47b..78da440e22dd 100644
--- a/eclass/portability.eclass
+++ b/eclass/portability.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: portability.eclass
@@ -6,11 +6,11 @@
# base-system@gentoo.org
# @AUTHOR:
# Diego Pettenò <flameeyes@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: This eclass is created to avoid using non-portable GNUisms inside ebuilds
-case ${EAPI:-0} in
- [567]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -90,9 +90,8 @@ dlopen_lib() {
# - Darwin needs nothing
# - *BSD needs nothing
# - Linux needs -ldl (glibc and uclibc)
- # - Interix needs -ldl
case "${CHOST}" in
- *-linux-gnu*|*-linux-uclibc|*-interix*)
+ *-linux-gnu*|*-linux-uclibc)
echo "-ldl"
;;
esac
@@ -106,7 +105,7 @@ dlopen_lib() {
#
# This will return make (provided by system packages) for BSD userlands,
# or bsdmake for Darwin userlands and pmake for the rest of userlands,
-# both of which are provided by sys-devel/pmake package.
+# both of which are provided by dev-build/pmake package.
#
# Note: the bsdmake for Darwin userland is with compatibility with MacOSX
# default name.
@@ -139,11 +138,11 @@ get_mounts() {
fi
# OK, pray we have a -p option that outputs mounts in fstab format
- # using tabs as the seperator.
+ # using tabs as the separator.
# Then pray that there are no tabs in the either.
# Currently only FreeBSD supports this and the other BSDs will
# have to be patched.
- # Athough the BSD's may support /proc, they do NOT put \040 in place
+ # Although the BSDs may support /proc, they do NOT put \040 in place
# of the spaces and we should not force a /proc either.
local IFS=$'\t'
LC_ALL=C mount -p | while read node point fs foo ; do
diff --git a/eclass/postgres-multi.eclass b/eclass/postgres-multi.eclass
index c73f33c71b49..d93df851cd24 100644
--- a/eclass/postgres-multi.eclass
+++ b/eclass/postgres-multi.eclass
@@ -1,16 +1,12 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-inherit multibuild postgres
-EXPORT_FUNCTIONS pkg_setup src_prepare src_compile src_install src_test
-
-
# @ECLASS: postgres-multi.eclass
# @MAINTAINER:
# PostgreSQL <pgsql-bugs@gentoo.org>
# @AUTHOR:
# Aaron W. Swenson <titanofold@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 7
# @PROVIDES: multibuild postgres
# @BLURB: An eclass to build PostgreSQL-related packages against multiple slots
# @DESCRIPTION:
@@ -18,12 +14,15 @@ EXPORT_FUNCTIONS pkg_setup src_prepare src_compile src_install src_test
# build and install for one or more PostgreSQL slots as specified by
# POSTGRES_TARGETS use flags.
-
-case ${EAPI:-0} in
- 5|6|7) ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ ! ${_POSTGRES_MULTI_ECLASS} ]]; then
+_POSTGRES_MULTI_ECLASS=1
+
+inherit multibuild postgres
# @ECLASS_VARIABLE: POSTGRES_COMPAT
# @PRE_INHERIT
@@ -58,7 +57,7 @@ export _POSTGRES_INTERSECT_SLOTS=( )
_postgres-multi_multibuild_wrapper() {
debug-print-function ${FUNCNAME} "${@}"
export PG_SLOT=${MULTIBUILD_VARIANT}
- export PG_CONFIG=$(which pg_config${MULTIBUILD_VARIANT//./})
+ export PG_CONFIG=$(type -P pg_config${MULTIBUILD_VARIANT//./})
if [[ -n ${PKG_CONFIG_PATH} ]] ; then
PKG_CONFIG_PATH="$(${PG_CONFIG} --libdir)/pkgconfig:${PKG_CONFIG_PATH}"
else
@@ -139,7 +138,7 @@ postgres-multi_src_prepare() {
# Portage, but won't be caught by /usr/bin/ebuild)
local slot
for slot in ${_POSTGRES_INTERSECT_SLOTS[@]} ; do
- if [[ -z $(which pg_config${slot/.} 2> /dev/null) ]] ; then
+ if [[ -z $(type -P pg_config${slot/.} 2> /dev/null) ]] ; then
eerror
eerror "postgres_targets_postgres${slot/.} use flag is enabled, but hasn't been emerged."
eerror
@@ -147,10 +146,7 @@ postgres-multi_src_prepare() {
fi
done
- case ${EAPI:-0} in
- 0|1|2|3|4|5) epatch_user ;;
- 6|7) eapply_user ;;
- esac
+ eapply_user
local MULTIBUILD_VARIANT
local MULTIBUILD_VARIANTS=("${_POSTGRES_INTERSECT_SLOTS[@]}")
@@ -164,6 +160,13 @@ postgres-multi_src_compile() {
postgres-multi_foreach emake
}
+# @FUNCTION: postgres-multi_src_test
+# @DESCRIPTION:
+# Runs `emake installcheck' in each build directory.
+postgres-multi_src_test() {
+ postgres-multi_foreach emake installcheck
+}
+
# @FUNCTION: postgres-multi_src_install
# @DESCRIPTION:
# Runs `emake install DESTDIR="${D}"' in each build directory.
@@ -171,9 +174,6 @@ postgres-multi_src_install() {
postgres-multi_foreach emake install DESTDIR="${D}"
}
-# @FUNCTION: postgres-multi_src_test
-# @DESCRIPTION:
-# Runs `emake installcheck' in each build directory.
-postgres-multi_src_test() {
- postgres-multi_foreach emake installcheck
-}
+fi
+
+EXPORT_FUNCTIONS pkg_setup src_prepare src_compile src_install src_test
diff --git a/eclass/postgres.eclass b/eclass/postgres.eclass
index ec5242be59f8..9bf84fe13118 100644
--- a/eclass/postgres.eclass
+++ b/eclass/postgres.eclass
@@ -1,14 +1,12 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-EXPORT_FUNCTIONS pkg_setup
-
# @ECLASS: postgres.eclass
# @MAINTAINER:
# PostgreSQL <pgsql-bugs@gentoo.org>
# @AUTHOR:
# Aaron W. Swenson <titanofold@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: An eclass for PostgreSQL-related packages
# @DESCRIPTION:
# This eclass provides common utility functions that many
@@ -16,18 +14,20 @@ EXPORT_FUNCTIONS pkg_setup
# currently selected PostgreSQL slot is within a range, adding a system
# user to the postgres system group, and generating dependencies.
-
-case ${EAPI:-0} in
- 5|6|7) ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ ! ${_POSTGRES_ECLASS} ]]; then
+_POSTGRES_ECLASS=1
+
# @ECLASS_VARIABLE: _POSTGRES_ALL_VERSIONS
# @INTERNAL
# @DESCRIPTION:
# List of versions to reverse sort POSTGRES_COMPAT slots
-_POSTGRES_ALL_VERSIONS=( 9999 14 13 12 11 10 )
+_POSTGRES_ALL_VERSIONS=( 9999 16 15 14 13 12 )
@@ -65,6 +65,17 @@ declare -p POSTGRES_USEDEP &>/dev/null && POSTGRES_DEP+="[${POSTGRES_USEDEP}]"
# required if the package must build against one of the PostgreSQL slots
# declared in POSTGRES_COMPAT.
+# @ECLASS_VARIABLE: PG_SLOT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# PG_SLOT is the chosen PostgreSQL slot that is used for the build.
+
+# @ECLASS_VARIABLE: PG_CONFIG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# PG_CONFIG is the path to pg_config for the chosen PostgreSQL slot.
+# For example, PG_CONFIG="pg_config15"
+
# @ECLASS_VARIABLE: _POSTGRES_COMPAT
# @INTERNAL
# @DESCRIPTION:
@@ -106,7 +117,7 @@ postgres_check_slot() {
fi
# Don't die because we can't run postgresql-config during pretend.
- [[ "$EBUILD_PHASE" = "pretend" && -z "$(which postgresql-config 2> /dev/null)" ]] \
+ [[ "$EBUILD_PHASE" = "pretend" && -z "$(type -P postgresql-config 2> /dev/null)" ]] \
&& return 0
if has $(postgresql-config show 2> /dev/null) "${POSTGRES_COMPAT[@]}"; then
@@ -148,7 +159,11 @@ postgres_pkg_setup() {
fi
export PG_SLOT=${best_slot}
- export PG_CONFIG=$(which pg_config${best_slot//./})
+ export PG_CONFIG=$(type -P pg_config${best_slot//./})
+
+ if [[ -z ${PG_CONFIG} ]] ; then
+ die "Could not find pg_config for ${PG_SLOT}. Is dev-db/postgresql:${PG_SLOT} installed?"
+ fi
local pg_pkg_config_path="$(${PG_CONFIG} --libdir)/pkgconfig"
if [[ -n "${PKG_CONFIG_PATH}" ]]; then
@@ -159,3 +174,7 @@ postgres_pkg_setup() {
elog "PostgreSQL Target: ${best_slot}"
}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/preserve-libs.eclass b/eclass/preserve-libs.eclass
index df07e511c130..35c65ef4436f 100644
--- a/eclass/preserve-libs.eclass
+++ b/eclass/preserve-libs.eclass
@@ -1,14 +1,14 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: preserve-libs.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 5 6 7 8
# @BLURB: preserve libraries after SONAME changes
case ${EAPI} in
- 5|6|7) ;;
+ 5|6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
diff --git a/eclass/pypi.eclass b/eclass/pypi.eclass
new file mode 100644
index 000000000000..b80ff9c95d36
--- /dev/null
+++ b/eclass/pypi.eclass
@@ -0,0 +1,283 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: pypi.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: A helper eclass to generate PyPI source URIs
+# @DESCRIPTION:
+# The pypi.eclass can be used to easily obtain URLs for artifacts
+# uploaded to PyPI.org. When inherited, the eclass defaults SRC_URI
+# and S to fetch .tar.gz sdist. The project filename is normalized
+# by default (unless PYPI_NO_NORMALIZE is set prior to inheriting
+# the eclass), and the version is translated using
+# pypi_translate_version.
+#
+# If necessary, SRC_URI and S can be overridden by the ebuild. Two
+# helper functions, pypi_sdist_url and pypi_wheel_url are provided
+# to generate URLs to artifacts of specified type, with customizable
+# URL components. Additionally, pypi_wheel_name can be used to generate
+# wheel filename.
+#
+# pypi_normalize_name can be used to normalize an arbitrary project name
+# according to sdist/wheel normalization rules. pypi_translate_version
+# can be used to translate a Gentoo version string into its PEP 440
+# equivalent.
+#
+# @EXAMPLE:
+# @CODE
+# inherit pypi
+#
+# SRC_URI="$(pypi_sdist_url "${PN^}" "${PV}")"
+# S=${WORKDIR}/${P^}
+# @CODE
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_PYPI_ECLASS} ]]; then
+_PYPI_ECLASS=1
+
+# @ECLASS_VARIABLE: PYPI_NO_NORMALIZE
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# When set to a non-empty value, disables project name normalization
+# for the default SRC_URI and S values.
+
+# @ECLASS_VARIABLE: PYPI_PN
+# @PRE_INHERIT
+# @DESCRIPTION:
+# The PyPI project name. This should be overridden scarcely, generally
+# when upstream project name does not conform to Gentoo naming rules,
+# e.g. when it contains dots or uppercase letters.
+#
+# Example use:
+# @CODE
+# PYPI_PN=${PN/-/.}
+# @CODE
+: "${PYPI_PN:=${PN}}"
+
+# @FUNCTION: _pypi_normalize_name
+# @INTERNAL
+# @USAGE: <name>
+# @DESCRIPTION:
+# Internal normalization function, returns the result
+# via _PYPI_NORMALIZED_NAME variable.
+_pypi_normalize_name() {
+ # NB: it's fine to alter it unconditionally since this function is
+ # always called from a subshell or in global scope
+ # (via _pypi_set_globals)
+ shopt -s extglob
+ local name=${1//+([._-])/_}
+ shopt -u extglob
+ _PYPI_NORMALIZED_NAME="${name,,}"
+}
+
+# @FUNCTION: pypi_normalize_name
+# @USAGE: <name>
+# @DESCRIPTION:
+# Normalize the project name according to sdist/wheel normalization
+# rules. That is, convert to lowercase and replace runs of [._-]
+# with a single underscore.
+#
+# Based on the spec, as of 2023-02-10:
+# https://packaging.python.org/en/latest/specifications/#package-distribution-file-formats
+pypi_normalize_name() {
+ [[ ${#} -ne 1 ]] && die "Usage: ${FUNCNAME} <name>"
+
+ local _PYPI_NORMALIZED_NAME
+ _pypi_normalize_name "${@}"
+ echo "${_PYPI_NORMALIZED_NAME}"
+}
+
+# @FUNCTION: _pypi_translate_version
+# @USAGE: <version>
+# @DESCRIPTION:
+# Internal version translation function, returns the result
+# via _PYPI_TRANSLATED_VERSION variable.
+_pypi_translate_version() {
+ local version=${1}
+ version=${version/_alpha/a}
+ version=${version/_beta/b}
+ version=${version/_pre/.dev}
+ version=${version/_rc/rc}
+ _PYPI_TRANSLATED_VERSION=${version/_p/.post}
+}
+
+# @FUNCTION: pypi_translate_version
+# @USAGE: <version>
+# @DESCRIPTION:
+# Translate the specified Gentoo version into the usual Python
+# counterpart. Assumes PEP 440 versions.
+#
+# Note that we do not have clear counterparts for the epoch segment,
+# nor for development release segment.
+pypi_translate_version() {
+ [[ ${#} -ne 1 ]] && die "Usage: ${FUNCNAME} <version>"
+
+ local _PYPI_TRANSLATED_VERSION
+ _pypi_translate_version "${@}"
+ echo "${_PYPI_TRANSLATED_VERSION}"
+}
+
+# @FUNCTION: _pypi_sdist_url
+# @INTERNAL
+# @USAGE: [--no-normalize] [<project> [<version> [<suffix>]]]
+# @DESCRIPTION:
+# Internal sdist generated, returns the result via _PYPI_SDIST_URL
+# variable.
+_pypi_sdist_url() {
+ local normalize=1
+ if [[ ${1} == --no-normalize ]]; then
+ normalize=
+ shift
+ fi
+
+ if [[ ${#} -gt 3 ]]; then
+ die "Usage: ${FUNCNAME} [--no-normalize] <project> [<version> [<suffix>]]"
+ fi
+
+ local project=${1-"${PYPI_PN}"}
+ local version=${2-"$(pypi_translate_version "${PV}")"}
+ local suffix=${3-.tar.gz}
+ local _PYPI_NORMALIZED_NAME=${project}
+ [[ ${normalize} ]] && _pypi_normalize_name "${_PYPI_NORMALIZED_NAME}"
+ _PYPI_SDIST_URL="https://files.pythonhosted.org/packages/source/${project::1}/${project}/${_PYPI_NORMALIZED_NAME}-${version}${suffix}"
+}
+
+# @FUNCTION: pypi_sdist_url
+# @USAGE: [--no-normalize] [<project> [<version> [<suffix>]]]
+# @DESCRIPTION:
+# Output the URL to PyPI sdist for specified project/version tuple.
+#
+# The `--no-normalize` option disables project name normalization
+# for sdist filename. This may be necessary when dealing with distfiles
+# generated using build systems that did not follow PEP 625
+# (i.e. the sdist name contains uppercase letters, hyphens or dots).
+#
+# If <package> is unspecified, it defaults to ${PYPI_PN}. The package
+# name is normalized according to the specification unless
+# `--no-normalize` is passed.
+#
+# If <version> is unspecified, it defaults to ${PV} translated
+# via pypi_translate_version. If it is specified, then it is used
+# verbatim (the function can be called explicitly to translate custom
+# version number).
+#
+# If <format> is unspecified, it defaults to ".tar.gz". Another valid
+# value is ".zip" (please remember to add a BDEPEND on app-arch/unzip).
+pypi_sdist_url() {
+ local _PYPI_SDIST_URL
+ _pypi_sdist_url "${@}"
+ echo "${_PYPI_SDIST_URL}"
+}
+
+# @FUNCTION: pypi_wheel_name
+# @USAGE: [<project> [<version> [<python-tag> [<abi-platform-tag>]]]]
+# @DESCRIPTION:
+# Output the wheel filename for the specified project/version tuple.
+#
+# If <package> is unspecified, it defaults to ${PYPI_PN}. The package
+# name is normalized according to the wheel specification.
+#
+# If <version> is unspecified, it defaults to ${PV} translated
+# via pypi_translate_version. If it is specified, then it is used
+# verbatim (the function can be called explicitly to translate custom
+# version number).
+#
+# If <python-tag> is unspecified, it defaults to "py3". It can also be
+# "py2.py3", or a specific version in case of non-pure wheels.
+#
+# If <abi-platform-tag> is unspecified, it defaults to "none-any".
+# You need to specify the correct value for non-pure wheels,
+# e.g. "abi3-linux_x86_64".
+pypi_wheel_name() {
+ if [[ ${#} -gt 4 ]]; then
+ die "Usage: ${FUNCNAME} <project> [<version> [<python-tag> [<abi-platform-tag>]]]"
+ fi
+
+ local _PYPI_NORMALIZED_NAME
+ _pypi_normalize_name "${1:-"${PYPI_PN}"}"
+ local version=${2-"$(pypi_translate_version "${PV}")"}
+ local pytag=${3-py3}
+ local abitag=${4-none-any}
+ echo "${_PYPI_NORMALIZED_NAME}-${version}-${pytag}-${abitag}.whl"
+}
+
+# @FUNCTION: pypi_wheel_url
+# @USAGE: [--unpack] [<project> [<version> [<python-tag> [<abi-platform-tag>]]]]
+# @DESCRIPTION:
+# Output the URL to PyPI wheel for specified project/version tuple.
+#
+# The `--unpack` option causes a SRC_URI with an arrow operator to
+# be generated, that adds a .zip suffix to the fetched distfile,
+# so that it is unpacked in default src_unpack(). Note that
+# the wheel contents will be unpacked straight into ${WORKDIR}.
+# You need to add a BDEPEND on app-arch/unzip.
+#
+# If <package> is unspecified, it defaults to ${PYPI_PN}.
+#
+# If <version> is unspecified, it defaults to ${PV} translated
+# via pypi_translate_version. If it is specified, then it is used
+# verbatim (the function can be called explicitly to translate custom
+# version number).
+#
+# If <python-tag> is unspecified, it defaults to "py3". It can also be
+# "py2.py3", or a specific version in case of non-pure wheels.
+#
+# If <abi-platform-tag> is unspecified, it defaults to "none-any".
+# You need to specify the correct value for non-pure wheels,
+# e.g. "abi3-linux_x86_64".
+pypi_wheel_url() {
+ local unpack=
+ if [[ ${1} == --unpack ]]; then
+ unpack=1
+ shift
+ fi
+
+ if [[ ${#} -gt 4 ]]; then
+ die "Usage: ${FUNCNAME} [--unpack] <project> [<version> [<python-tag> [<abi-platform-tag>]]]"
+ fi
+
+ local filename=$(pypi_wheel_name "${@}")
+ local project=${1-"${PYPI_PN}"}
+ local version=${2-"$(pypi_translate_version "${PV}")"}
+ local pytag=${3-py3}
+ printf "https://files.pythonhosted.org/packages/%s" \
+ "${pytag}/${project::1}/${project}/${filename}"
+
+ if [[ ${unpack} ]]; then
+ echo " -> ${filename}.zip"
+ fi
+}
+
+# @FUNCTION: _pypi_set_globals
+# @INTERNAL
+# @DESCRIPTION:
+# Set global variables, SRC_URI and S.
+_pypi_set_globals() {
+ local _PYPI_SDIST_URL _PYPI_TRANSLATED_VERSION
+ _pypi_translate_version "${PV}"
+
+ if [[ ${PYPI_NO_NORMALIZE} ]]; then
+ _pypi_sdist_url --no-normalize "${PYPI_PN}" "${_PYPI_TRANSLATED_VERSION}"
+ S="${WORKDIR}/${PYPI_PN}-${_PYPI_TRANSLATED_VERSION}"
+ else
+ local _PYPI_NORMALIZED_NAME
+ _pypi_normalize_name "${PYPI_PN}"
+ _pypi_sdist_url "${PYPI_PN}" "${_PYPI_TRANSLATED_VERSION}"
+ S="${WORKDIR}/${_PYPI_NORMALIZED_NAME}-${_PYPI_TRANSLATED_VERSION}"
+ fi
+
+ SRC_URI=${_PYPI_SDIST_URL}
+}
+
+_pypi_set_globals
+
+fi
diff --git a/eclass/python-any-r1.eclass b/eclass/python-any-r1.eclass
index 2051b5e89b81..a21df8e89498 100644
--- a/eclass/python-any-r1.eclass
+++ b/eclass/python-any-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: python-any-r1.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: python-utils-r1
# @BLURB: An eclass for packages having build-time dependency on Python.
# @DESCRIPTION:
@@ -38,26 +38,22 @@
# For more information, please see the Python Guide:
# https://projects.gentoo.org/python/guide/
-case "${EAPI:-0}" in
- [0-5]) die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}" ;;
- [6-8]) ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_PYTHON_ANY_R1} ]]; then
+if [[ ! ${_PYTHON_ANY_R1_ECLASS} ]]; then
+_PYTHON_ANY_R1_ECLASS=1
-if [[ ${_PYTHON_R1} ]]; then
+if [[ ${_PYTHON_R1_ECLASS} ]]; then
die 'python-any-r1.eclass can not be used with python-r1.eclass.'
-elif [[ ${_PYTHON_SINGLE_R1} ]]; then
+elif [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
die 'python-any-r1.eclass can not be used with python-single-r1.eclass.'
fi
inherit python-utils-r1
-fi
-
-EXPORT_FUNCTIONS pkg_setup
-
# @ECLASS_VARIABLE: PYTHON_COMPAT
# @REQUIRED
# @DESCRIPTION:
@@ -139,7 +135,7 @@ EXPORT_FUNCTIONS pkg_setup
# Example use:
# @CODE
# python_check_deps() {
-# has_version "dev-python/foo[${PYTHON_USEDEP}]"
+# python_has_version "dev-python/foo[${PYTHON_USEDEP}]"
# }
# @CODE
#
@@ -161,7 +157,7 @@ EXPORT_FUNCTIONS pkg_setup
# Example use:
# @CODE
# python_check_deps() {
-# has_version "dev-python/bar[${PYTHON_SINGLE_USEDEP}]"
+# python_has_version "dev-python/bar[${PYTHON_SINGLE_USEDEP}]"
# }
# @CODE
#
@@ -174,14 +170,13 @@ _python_any_set_globals() {
local usestr deps i PYTHON_PKG_DEP
[[ ${PYTHON_REQ_USE} ]] && usestr="[${PYTHON_REQ_USE}]"
- _PYTHON_ALLOW_PY27=1 \
_python_set_impls
for i in "${_PYTHON_SUPPORTED_IMPLS[@]}"; do
_python_export "${i}" PYTHON_PKG_DEP
# note: need to strip '=' slot operator for || deps
- deps="${PYTHON_PKG_DEP/:0=/:0} ${deps}"
+ deps="${PYTHON_PKG_DEP/:=} ${deps}"
done
deps="|| ( ${deps})"
@@ -206,8 +201,6 @@ _python_any_set_globals() {
_python_any_set_globals
unset -f _python_any_set_globals
-if [[ ! ${_PYTHON_ANY_R1} ]]; then
-
# @FUNCTION: python_gen_any_dep
# @USAGE: <dependency-block>
# @DESCRIPTION:
@@ -228,9 +221,9 @@ if [[ ! ${_PYTHON_ANY_R1} ]]; then
# dev-python/baz[${PYTHON_USEDEP}] )')"
#
# python_check_deps() {
-# has_version "dev-python/foo[${PYTHON_SINGLE_USEDEP}]" \
-# && { has_version "dev-python/bar[${PYTHON_USEDEP}]" \
-# || has_version "dev-python/baz[${PYTHON_USEDEP}]"; }
+# python_has_version "dev-python/foo[${PYTHON_SINGLE_USEDEP}]" &&
+# { python_has_version "dev-python/bar[${PYTHON_USEDEP}]" ||
+# python_has_version "dev-python/baz[${PYTHON_USEDEP}]"; }
# }
# @CODE
#
@@ -266,7 +259,7 @@ python_gen_any_dep() {
local i_depstr=${depstr//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}
i_depstr=${i_depstr//\$\{PYTHON_SINGLE_USEDEP\}/${PYTHON_SINGLE_USEDEP}}
# note: need to strip '=' slot operator for || deps
- out="( ${PYTHON_PKG_DEP%=} ${i_depstr} ) ${out}"
+ out="( ${PYTHON_PKG_DEP%:=} ${i_depstr} ) ${out}"
done
echo "|| ( ${out})"
}
@@ -317,6 +310,7 @@ python_setup() {
# fallback to the best installed impl.
# (reverse iteration over _PYTHON_SUPPORTED_IMPLS)
+ local i
for (( i = ${#_PYTHON_SUPPORTED_IMPLS[@]} - 1; i >= 0; i-- )); do
local impl=${_PYTHON_SUPPORTED_IMPLS[i]}
# avoid checking EPYTHON twice
@@ -348,5 +342,6 @@ python-any-r1_pkg_setup() {
[[ ${MERGE_TYPE} != binary ]] && python_setup
}
-_PYTHON_ANY_R1=1
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/python-r1.eclass b/eclass/python-r1.eclass
index 2f8cfa0316d9..fbc6082a1d92 100644
--- a/eclass/python-r1.eclass
+++ b/eclass/python-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: python-r1.eclass
@@ -7,8 +7,8 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
-# @PROVIDES: multibuild python-utils-r1
+# @SUPPORTED_EAPIS: 7 8
+# @PROVIDES: python-utils-r1
# @BLURB: A common, simple eclass for Python packages.
# @DESCRIPTION:
# A common eclass providing helper functions to build and install
@@ -30,29 +30,22 @@
# For more information, please see the Python Guide:
# https://projects.gentoo.org/python/guide/
-case "${EAPI:-0}" in
- [0-5])
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- [6-8])
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_PYTHON_R1} ]]; then
+if [[ -z ${_PYTHON_R1_ECLASS} ]]; then
+_PYTHON_R1_ECLASS=1
-if [[ ${_PYTHON_SINGLE_R1} ]]; then
+if [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
die 'python-r1.eclass can not be used with python-single-r1.eclass.'
-elif [[ ${_PYTHON_ANY_R1} ]]; then
+elif [[ ${_PYTHON_ANY_R1_ECLASS} ]]; then
die 'python-r1.eclass can not be used with python-any-r1.eclass.'
fi
inherit multibuild python-utils-r1
-fi
-
# @ECLASS_VARIABLE: PYTHON_COMPAT
# @REQUIRED
# @DESCRIPTION:
@@ -163,7 +156,7 @@ fi
# Example use:
# @CODE
# python_check_deps() {
-# has_version "dev-python/bar[${PYTHON_SINGLE_USEDEP}]"
+# python_has_version "dev-python/bar[${PYTHON_SINGLE_USEDEP}]"
# }
# @CODE
#
@@ -243,8 +236,6 @@ _python_set_globals() {
_python_set_globals
unset -f _python_set_globals
-if [[ ! ${_PYTHON_R1} ]]; then
-
# @FUNCTION: _python_validate_useflags
# @INTERNAL
# @DESCRIPTION:
@@ -483,9 +474,9 @@ python_gen_impl_dep() {
# dev-python/baz[${PYTHON_USEDEP}] )' -2)"
#
# python_check_deps() {
-# has_version "dev-python/foo[${PYTHON_SINGLE_USEDEP}]" \
-# && { has_version "dev-python/bar[${PYTHON_USEDEP}]" \
-# || has_version "dev-python/baz[${PYTHON_USEDEP}]"; }
+# python_has_version "dev-python/foo[${PYTHON_SINGLE_USEDEP}]" &&
+# { python_has_version "dev-python/bar[${PYTHON_USEDEP}]" ||
+# python_has_version "dev-python/baz[${PYTHON_USEDEP}]"; }
# }
#
# src_compile() {
@@ -531,7 +522,7 @@ python_gen_any_dep() {
local i_depstr=${depstr//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}
i_depstr=${i_depstr//\$\{PYTHON_SINGLE_USEDEP\}/${PYTHON_SINGLE_USEDEP}}
# note: need to strip '=' slot operator for || deps
- out="( ${PYTHON_PKG_DEP/:0=/:0} ${i_depstr} ) ${out}"
+ out="( ${PYTHON_PKG_DEP/:=} ${i_depstr} ) ${out}"
fi
done
echo "|| ( ${out})"
@@ -625,6 +616,24 @@ _python_multibuild_wrapper() {
python_foreach_impl() {
debug-print-function ${FUNCNAME} "${@}"
+ if [[ ${_DISTUTILS_R1_ECLASS} ]]; then
+ if has "${EBUILD_PHASE}" prepare configure compile test install &&
+ [[ ! ${_DISTUTILS_CALLING_FOREACH_IMPL} &&
+ ! ${_DISTUTILS_FOREACH_IMPL_WARNED} ]]
+ then
+ eqawarn "python_foreach_impl has been called directly while using distutils-r1."
+ eqawarn "Please redefine python_*() phase functions to meet your expectations"
+ eqawarn "instead."
+ _DISTUTILS_FOREACH_IMPL_WARNED=1
+
+ if ! has "${EAPI}" 7 8; then
+ die "Calling python_foreach_impl from distutils-r1 is banned in EAPI ${EAPI}"
+ fi
+ fi
+ # undo the eclass-set value to catch nested calls
+ local _DISTUTILS_CALLING_FOREACH_IMPL=
+ fi
+
local MULTIBUILD_VARIANTS
_python_obtain_impls
@@ -683,7 +692,8 @@ python_foreach_impl() {
# $(python_gen_any_dep 'dev-python/epydoc[${PYTHON_USEDEP}]' 'python2*') )"
#
# python_check_deps() {
-# has_version "dev-python/epydoc[${PYTHON_USEDEP}]"
+# ! use doc && return 0
+# python_has_version "dev-python/epydoc[${PYTHON_USEDEP}]"
# }
#
# src_compile() {
@@ -710,7 +720,7 @@ python_setup() {
fi
# (reverse iteration -- newest impl first)
- local found
+ local found i
_python_verify_patterns "${@}"
for (( i = ${#_PYTHON_SUPPORTED_IMPLS[@]} - 1; i >= 0; i-- )); do
local impl=${_PYTHON_SUPPORTED_IMPLS[i]}
@@ -786,10 +796,9 @@ python_replicate_script() {
local f
for f; do
local dosym=dosym
- [[ ${EAPI} == [67] ]] && dosym=dosym8
+ [[ ${EAPI} == 7 ]] && dosym=dosym8
"${dosym}" -r /usr/lib/python-exec/python-exec2 "${f#${ED}}"
done
}
-_PYTHON_R1=1
fi
diff --git a/eclass/python-single-r1.eclass b/eclass/python-single-r1.eclass
index 740c3283d1b6..4d61f08c06f0 100644
--- a/eclass/python-single-r1.eclass
+++ b/eclass/python-single-r1.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: python-utils-r1
# @BLURB: An eclass for Python packages not installed for multiple implementations.
# @DESCRIPTION:
@@ -37,31 +37,22 @@
# For more information, please see the Python Guide:
# https://projects.gentoo.org/python/guide/
-case "${EAPI:-0}" in
- [0-5])
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- [6-8])
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_PYTHON_SINGLE_R1} ]]; then
+if [[ ! ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
+_PYTHON_SINGLE_R1_ECLASS=1
-if [[ ${_PYTHON_R1} ]]; then
+if [[ ${_PYTHON_R1_ECLASS} ]]; then
die 'python-single-r1.eclass can not be used with python-r1.eclass.'
-elif [[ ${_PYTHON_ANY_R1} ]]; then
+elif [[ ${_PYTHON_ANY_R1_ECLASS} ]]; then
die 'python-single-r1.eclass can not be used with python-any-r1.eclass.'
fi
inherit python-utils-r1
-fi
-
-EXPORT_FUNCTIONS pkg_setup
-
# @ECLASS_VARIABLE: PYTHON_COMPAT
# @REQUIRED
# @DESCRIPTION:
@@ -257,8 +248,6 @@ _python_single_set_globals() {
_python_single_set_globals
unset -f _python_single_set_globals
-if [[ ! ${_PYTHON_SINGLE_R1} ]]; then
-
# @FUNCTION: python_gen_useflags
# @USAGE: [<pattern>...]
# @DESCRIPTION:
@@ -463,5 +452,6 @@ python-single-r1_pkg_setup() {
[[ ${MERGE_TYPE} != binary ]] && python_setup
}
-_PYTHON_SINGLE_R1=1
fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/python-utils-r1.eclass b/eclass/python-utils-r1.eclass
index 67dc5bf754d6..bbf751399476 100644
--- a/eclass/python-utils-r1.eclass
+++ b/eclass/python-utils-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: python-utils-r1.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Utility functions for packages with Python parts.
# @DESCRIPTION:
# A utility eclass providing functions to query Python implementations,
@@ -22,19 +22,16 @@
# NOTE: When dropping support for EAPIs here, we need to update
# metadata/install-qa-check.d/60python-pyc
# See bug #704286, bug #781878
-case "${EAPI:-0}" in
- [0-5]) die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}" ;;
- [6-8]) ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
-esac
-if [[ ${_PYTHON_ECLASS_INHERITED} ]]; then
- die 'python-r1 suite eclasses can not be used with python.eclass.'
-fi
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
-if [[ ! ${_PYTHON_UTILS_R1} ]]; then
+if [[ ! ${_PYTHON_UTILS_R1_ECLASS} ]]; then
+_PYTHON_UTILS_R1_ECLASS=1
-[[ ${EAPI} == [67] ]] && inherit eapi8-dosym
+[[ ${EAPI} == 7 ]] && inherit eapi8-dosym
inherit multiprocessing toolchain-funcs
# @ECLASS_VARIABLE: _PYTHON_ALL_IMPLS
@@ -43,7 +40,7 @@ inherit multiprocessing toolchain-funcs
# All supported Python implementations, most preferred last.
_PYTHON_ALL_IMPLS=(
pypy3
- python3_{8..11}
+ python3_{10..12}
)
readonly _PYTHON_ALL_IMPLS
@@ -55,7 +52,7 @@ _PYTHON_HISTORICAL_IMPLS=(
jython2_7
pypy pypy1_{8,9} pypy2_0
python2_{5..7}
- python3_{1..7}
+ python3_{1..9}
)
readonly _PYTHON_HISTORICAL_IMPLS
@@ -83,7 +80,7 @@ _python_verify_patterns() {
local impl pattern
for pattern; do
case ${pattern} in
- -[23]|3.[89]|3.1[01])
+ -[23]|3.[89]|3.1[012])
continue
;;
esac
@@ -117,11 +114,18 @@ _python_verify_patterns() {
_python_set_impls() {
local i
- if ! declare -p PYTHON_COMPAT &>/dev/null; then
- die 'PYTHON_COMPAT not declared.'
+ # TODO: drop BASH_VERSINFO check when we require EAPI 8
+ if [[ ${BASH_VERSINFO[0]} -ge 5 ]]; then
+ [[ ${PYTHON_COMPAT@a} == *a* ]]
+ else
+ [[ $(declare -p PYTHON_COMPAT) == "declare -a"* ]]
fi
- if [[ $(declare -p PYTHON_COMPAT) != "declare -a"* ]]; then
- die 'PYTHON_COMPAT must be an array.'
+ if [[ ${?} -ne 0 ]]; then
+ if ! declare -p PYTHON_COMPAT &>/dev/null; then
+ die 'PYTHON_COMPAT not declared.'
+ else
+ die 'PYTHON_COMPAT must be an array.'
+ fi
fi
local obsolete=()
@@ -132,9 +136,9 @@ _python_set_impls() {
# please keep them in sync with _PYTHON_ALL_IMPLS
# and _PYTHON_HISTORICAL_IMPLS
case ${i} in
- pypy3|python2_7|python3_[89]|python3_1[01])
+ pypy3|python3_9|python3_1[0-2])
;;
- jython2_7|pypy|pypy1_[89]|pypy2_0|python2_[5-6]|python3_[1-7])
+ jython2_7|pypy|pypy1_[89]|pypy2_0|python2_[5-7]|python3_[1-9])
obsolete+=( "${i}" )
;;
*)
@@ -149,17 +153,6 @@ _python_set_impls() {
done
fi
- if [[ -n ${obsolete[@]} && ${EBUILD_PHASE} == setup ]]; then
- # complain if people don't clean up old impls while touching
- # the ebuilds recently. use the copyright year to infer last
- # modification
- # NB: this check doesn't have to work reliably
- if [[ $(head -n 1 "${EBUILD}" 2>/dev/null) == *2022* ]]; then
- eqawarn "Please clean PYTHON_COMPAT of obsolete implementations:"
- eqawarn " ${obsolete[*]}"
- fi
- fi
-
local supp=() unsupp=()
for i in "${_PYTHON_ALL_IMPLS[@]}"; do
@@ -171,13 +164,7 @@ _python_set_impls() {
done
if [[ ! ${supp[@]} ]]; then
- # special-case python2_7 for python-any-r1
- if [[ ${_PYTHON_ALLOW_PY27} ]] && has python2_7 "${PYTHON_COMPAT[@]}"
- then
- supp+=( python2_7 )
- else
- die "No supported implementation in PYTHON_COMPAT."
- fi
+ die "No supported implementation in PYTHON_COMPAT."
fi
if [[ ${_PYTHON_SUPPORTED_IMPLS[@]} ]]; then
@@ -223,7 +210,7 @@ _python_impl_matches() {
for pattern; do
case ${pattern} in
-2|python2*|pypy)
- if [[ ${EAPI} != [67] ]]; then
+ if [[ ${EAPI} != 7 ]]; then
eerror
eerror "Python 2 is no longer supported in Gentoo, please remove Python 2"
eerror "${FUNCNAME[1]} calls."
@@ -232,7 +219,7 @@ _python_impl_matches() {
;;
-3)
# NB: "python3*" is fine, as "not pypy3"
- if [[ ${EAPI} != [67] ]]; then
+ if [[ ${EAPI} != 7 ]]; then
eerror
eerror "Python 2 is no longer supported in Gentoo, please remove Python 2"
eerror "${FUNCNAME[1]} calls."
@@ -240,12 +227,11 @@ _python_impl_matches() {
fi
return 0
;;
- 3.9)
- # the only unmasked pypy3 version is pypy3.9 atm
+ 3.10)
[[ ${impl} == python${pattern/./_} || ${impl} == pypy3 ]] &&
return 0
;;
- 3.8|3.1[01])
+ 3.8|3.9|3.1[1-2])
[[ ${impl} == python${pattern/./_} ]] && return 0
;;
*)
@@ -341,15 +327,17 @@ _python_export() {
debug-print "${FUNCNAME}: EPYTHON = ${EPYTHON}"
;;
PYTHON)
- export PYTHON=${EPREFIX}/usr/bin/${impl}
+ # Under EAPI 7+, this should just use ${BROOT}, but Portage
+ # <3.0.50 was buggy, and prefix users need this to update.
+ export PYTHON=${BROOT-${EPREFIX}}/usr/bin/${impl}
debug-print "${FUNCNAME}: PYTHON = ${PYTHON}"
;;
PYTHON_SITEDIR)
[[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"
PYTHON_SITEDIR=$(
- "${PYTHON}" - <<-EOF || die
- import sysconfig
- print(sysconfig.get_path("purelib"))
+ "${PYTHON}" - "${EPREFIX}/usr" <<-EOF || die
+ import sys, sysconfig
+ print(sysconfig.get_path("purelib", vars={"base": sys.argv[1]}))
EOF
)
export PYTHON_SITEDIR
@@ -358,9 +346,9 @@ _python_export() {
PYTHON_INCLUDEDIR)
[[ -n ${PYTHON} ]] || die "PYTHON needs to be set for ${var} to be exported, or requested before it"
PYTHON_INCLUDEDIR=$(
- "${PYTHON}" - <<-EOF || die
- import sysconfig
- print(sysconfig.get_path("platinclude"))
+ "${PYTHON}" - "${ESYSROOT}/usr" <<-EOF || die
+ import sys, sysconfig
+ print(sysconfig.get_path("platinclude", vars={"installed_platbase": sys.argv[1]}))
EOF
)
export PYTHON_INCLUDEDIR
@@ -411,10 +399,6 @@ _python_export() {
local val
case "${impl}" in
- python2*|python3.6|python3.7*)
- # python* up to 3.7
- val=$($(tc-getPKG_CONFIG) --libs ${impl/n/n-}) || die
- ;;
python*)
# python3.8+
val=$($(tc-getPKG_CONFIG) --libs ${impl/n/n-}-embed) || die
@@ -453,22 +437,12 @@ _python_export() {
PYTHON_PKG_DEP)
local d
case ${impl} in
- python2.7)
- PYTHON_PKG_DEP='>=dev-lang/python-2.7.5-r2:2.7';;
- python3.8)
- PYTHON_PKG_DEP=">=dev-lang/python-3.8.12_p1-r1:3.8";;
- python3.9)
- PYTHON_PKG_DEP=">=dev-lang/python-3.9.9-r1:3.9";;
- python3.10)
- PYTHON_PKG_DEP=">=dev-lang/python-3.10.0_p1-r1:3.10";;
- python3.11)
- PYTHON_PKG_DEP=">=dev-lang/python-3.11.0_beta1-r1:3.11";;
python*)
- PYTHON_PKG_DEP="dev-lang/python:${impl#python}";;
- pypy)
- PYTHON_PKG_DEP='>=dev-python/pypy-7.3.0:0=';;
+ PYTHON_PKG_DEP="dev-lang/python:${impl#python}"
+ ;;
pypy3)
- PYTHON_PKG_DEP='>=dev-python/pypy3-7.3.7-r1:0=';;
+ PYTHON_PKG_DEP="dev-python/${impl}:="
+ ;;
*)
die "Invalid implementation: ${impl}"
esac
@@ -642,22 +616,22 @@ python_optimize() {
einfo "Optimize Python modules for ${instpath}"
case "${EPYTHON}" in
- python2.7|python3.[34])
- "${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}"
- "${PYTHON}" -OO -m compileall -q -f -d "${instpath}" "${d}"
- ;;
- python3.[5678]|pypy3)
+ python3.8)
# both levels of optimization are separate since 3.5
"${PYTHON}" -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}"
"${PYTHON}" -O -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}"
"${PYTHON}" -OO -m compileall -j "${jobs}" -q -f -d "${instpath}" "${d}"
;;
- python*)
+ python*|pypy3)
+ # Python 3.9+
"${PYTHON}" -m compileall -j "${jobs}" -o 0 -o 1 -o 2 --hardlink-dupes -q -f -d "${instpath}" "${d}"
;;
- *)
+ pypy|jython2.7)
"${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}"
;;
+ *)
+ die "${FUNCNAME}: unexpected EPYTHON=${EPYTHON}"
+ ;;
esac
done
}
@@ -696,6 +670,9 @@ python_scriptinto() {
python_doexe() {
debug-print-function ${FUNCNAME} "${@}"
+ [[ ${EBUILD_PHASE} != install ]] &&
+ die "${FUNCNAME} can only be used in src_install"
+
local f
for f; do
python_newexe "${f}" "${f##*/}"
@@ -714,6 +691,8 @@ python_doexe() {
python_newexe() {
debug-print-function ${FUNCNAME} "${@}"
+ [[ ${EBUILD_PHASE} != install ]] &&
+ die "${FUNCNAME} can only be used in src_install"
[[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
[[ ${#} -eq 2 ]] || die "Usage: ${FUNCNAME} <path> <new-name>"
@@ -734,7 +713,7 @@ python_newexe() {
# install the wrapper
local dosym=dosym
- [[ ${EAPI} == [67] ]] && dosym=dosym8
+ [[ ${EAPI} == 7 ]] && dosym=dosym8
"${dosym}" -r /usr/lib/python-exec/python-exec2 "${wrapd}/${newfn}"
# don't use this at home, just call python_doscript() instead
@@ -762,6 +741,9 @@ python_newexe() {
python_doscript() {
debug-print-function ${FUNCNAME} "${@}"
+ [[ ${EBUILD_PHASE} != install ]] &&
+ die "${FUNCNAME} can only be used in src_install"
+
local _PYTHON_REWRITE_SHEBANG=1
python_doexe "${@}"
}
@@ -786,6 +768,9 @@ python_doscript() {
python_newscript() {
debug-print-function ${FUNCNAME} "${@}"
+ [[ ${EBUILD_PHASE} != install ]] &&
+ die "${FUNCNAME} can only be used in src_install"
+
local _PYTHON_REWRITE_SHEBANG=1
python_newexe "${@}"
}
@@ -805,10 +790,10 @@ python_newscript() {
# site-packages directory.
#
# In the relative case, the exact path is determined directly
-# by each python_doscript/python_newscript function. Therefore,
-# python_moduleinto can be safely called before establishing the Python
-# interpreter and/or a single call can be used to set the path correctly
-# for multiple implementations, as can be seen in the following example.
+# by each python_domodule invocation. Therefore, python_moduleinto
+# can be safely called before establishing the Python interpreter and/or
+# a single call can be used to set the path correctly for multiple
+# implementations, as can be seen in the following example.
#
# Example:
# @CODE
@@ -832,6 +817,10 @@ python_moduleinto() {
# and packages (directories). All listed files will be installed
# for all enabled implementations, and compiled afterwards.
#
+# The files are installed into ${D} when run in src_install() phase.
+# Otherwise, they are installed into ${BUILD_DIR}/install location
+# that is suitable for picking up by distutils-r1 in PEP517 mode.
+#
# Example:
# @CODE
# src_install() {
@@ -854,13 +843,24 @@ python_domodule() {
d=${sitedir#${EPREFIX}}/${_PYTHON_MODULEROOT//.//}
fi
- (
- insopts -m 0644
- insinto "${d}"
- doins -r "${@}" || return ${?}
- )
-
- python_optimize "${ED%/}/${d}"
+ if [[ ${EBUILD_PHASE} == install ]]; then
+ (
+ insopts -m 0644
+ insinto "${d}"
+ doins -r "${@}" || return ${?}
+ )
+ python_optimize "${ED%/}/${d}"
+ elif [[ -n ${BUILD_DIR} ]]; then
+ local dest=${BUILD_DIR}/install${EPREFIX}/${d}
+ mkdir -p "${dest}" || die
+ cp -pR "${@}" "${dest}/" || die
+ (
+ cd "${dest}" &&
+ chmod -R a+rX "${@##*/}"
+ ) || die
+ else
+ die "${FUNCNAME} can only be used in src_install or with BUILD_DIR set"
+ fi
}
# @FUNCTION: python_doheader
@@ -879,10 +879,12 @@ python_domodule() {
python_doheader() {
debug-print-function ${FUNCNAME} "${@}"
+ [[ ${EBUILD_PHASE} != install ]] &&
+ die "${FUNCNAME} can only be used in src_install"
[[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
local includedir=$(python_get_includedir)
- local d=${includedir#${EPREFIX}}
+ local d=${includedir#${ESYSROOT}}
(
insopts -m 0644
@@ -926,15 +928,6 @@ _python_wrapper_setup() {
local EPYTHON PYTHON
_python_export "${impl}" EPYTHON PYTHON
- local pyver pyother
- if [[ ${EPYTHON} != python2* ]]; then
- pyver=3
- pyother=2
- else
- pyver=2
- pyother=3
- fi
-
# Python interpreter
# note: we don't use symlinks because python likes to do some
# symlink reading magic that breaks stuff
@@ -943,10 +936,10 @@ _python_wrapper_setup() {
#!/bin/sh
exec "${PYTHON}" "\${@}"
_EOF_
- cp "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die
- chmod +x "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die
+ cp "${workdir}/bin/python" "${workdir}/bin/python3" || die
+ chmod +x "${workdir}/bin/python" "${workdir}/bin/python3" || die
- local nonsupp=( "python${pyother}" "python${pyother}-config" )
+ local nonsupp=( python2 python2-config )
# CPython-specific
if [[ ${EPYTHON} == python* ]]; then
@@ -955,24 +948,22 @@ _python_wrapper_setup() {
exec "${PYTHON}-config" "\${@}"
_EOF_
cp "${workdir}/bin/python-config" \
- "${workdir}/bin/python${pyver}-config" || die
+ "${workdir}/bin/python3-config" || die
chmod +x "${workdir}/bin/python-config" \
- "${workdir}/bin/python${pyver}-config" || die
+ "${workdir}/bin/python3-config" || die
# Python 2.6+.
ln -s "${PYTHON/python/2to3-}" "${workdir}"/bin/2to3 || die
# Python 2.7+.
ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}.pc \
- "${workdir}"/pkgconfig/python${pyver}.pc || die
+ "${workdir}"/pkgconfig/python3.pc || die
# Python 3.8+.
- if [[ ${EPYTHON} != python[23].[67] ]]; then
- ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}-embed.pc \
- "${workdir}"/pkgconfig/python${pyver}-embed.pc || die
- fi
+ ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}-embed.pc \
+ "${workdir}"/pkgconfig/python3-embed.pc || die
else
- nonsupp+=( 2to3 python-config "python${pyver}-config" )
+ nonsupp+=( 2to3 python-config python3-config )
fi
local x
@@ -1023,8 +1014,6 @@ python_fix_shebang() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${EPYTHON} ]] || die "${FUNCNAME}: EPYTHON unset (pkg_setup not called?)"
- local PYTHON
- _python_export "${EPYTHON}" PYTHON
local force quiet
while [[ ${@} ]]; do
@@ -1069,11 +1058,10 @@ python_fix_shebang() {
"${EPYTHON}")
match=1
;;
- python|python[23])
+ python|python3)
match=1
- [[ ${in_path##*/} == python2 ]] && error=1
;;
- python[23].[0-9]|python3.[1-9][0-9]|pypy|pypy3|jython[23].[0-9])
+ python2|python[23].[0-9]|python3.[1-9][0-9]|pypy|pypy3|jython[23].[0-9])
# Explicit mismatch.
match=1
error=1
@@ -1098,7 +1086,7 @@ python_fix_shebang() {
if [[ ! ${error} ]]; then
debug-print "${FUNCNAME}: in file ${f#${D%/}}"
debug-print "${FUNCNAME}: rewriting shebang: ${shebang}"
- sed -i -e "1s@${from}@#!${PYTHON}@" "${f}" || die
+ sed -i -e "1s@${from}@#!${EPREFIX}/usr/bin/${EPYTHON}@" "${f}" || die
any_fixed=1
else
eerror "The file has incompatible shebang:"
@@ -1234,6 +1222,62 @@ _python_check_EPYTHON() {
fi
}
+# @FUNCTION: _python_check_occluded_packages
+# @INTERNAL
+# @DESCRIPTION:
+# Check if the current directory does not contain any incomplete
+# package sources that would block installed packages from being used
+# (and effectively e.g. make it impossible to load compiled extensions).
+_python_check_occluded_packages() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ -z ${BUILD_DIR} || ! -d ${BUILD_DIR}/install ]] && return
+
+ local sitedir="${BUILD_DIR}/install$(python_get_sitedir)"
+ # avoid unnecessarily checking if we are inside install dir
+ [[ ${sitedir} -ef . ]] && return
+
+ local f fn diff l
+ for f in "${sitedir}"/*/; do
+ f=${f%/}
+ fn=${f##*/}
+
+ # skip metadata directories
+ [[ ${fn} == *.dist-info || ${fn} == *.egg-info ]] && continue
+
+ if [[ -d ${fn} ]]; then
+ diff=$(
+ comm -1 -3 <(
+ find "${fn}" -type f -not -path '*/__pycache__/*' |
+ sort
+ assert
+ ) <(
+ cd "${sitedir}" &&
+ find "${fn}" -type f -not -path '*/__pycache__/*' |
+ sort
+ assert
+ )
+ )
+
+ if [[ -n ${diff} ]]; then
+ eqawarn "The directory ${fn} occludes package installed for ${EPYTHON}."
+ eqawarn "The installed package includes additional files:"
+ eqawarn
+ while IFS= read -r l; do
+ eqawarn " ${l}"
+ done <<<"${diff}"
+ eqawarn
+
+ if [[ ! ${_PYTHON_WARNED_OCCLUDED_PACKAGES} ]]; then
+ eqawarn "For more information on occluded packages, please see:"
+ eqawarn "https://projects.gentoo.org/python/guide/test.html#importerrors-for-c-extensions"
+ _PYTHON_WARNED_OCCLUDED_PACKAGES=1
+ fi
+ fi
+ fi
+ done
+}
+
# @VARIABLE: EPYTEST_DESELECT
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -1252,6 +1296,31 @@ _python_check_EPYTHON() {
# parameter, when calling epytest. The listed files will be entirely
# skipped from test collection.
+# @ECLASS_VARIABLE: EPYTEST_TIMEOUT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-empty value, enables pytest-timeout plugin and sets
+# test timeout to the specified value. This variable can be either set
+# in ebuilds that are known to hang, or by user to prevent hangs
+# in automated test environments. If this variable is set prior
+# to calling distutils_enable_tests in distutils-r1, a test dependency
+# on dev-python/pytest-timeout is added automatically.
+
+# @ECLASS_VARIABLE: EPYTEST_XDIST
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-empty value, enables running tests in parallel
+# via pytest-xdist plugin. If this variable is set prior to calling
+# distutils_enable_tests in distutils-r1, a test dependency
+# on dev-python/pytest-xdist is added automatically.
+
+# @ECLASS_VARIABLE: EPYTEST_JOBS
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specifies the number of jobs for parallel (pytest-xdist) test runs.
+# When unset, defaults to -j from MAKEOPTS, or the current nproc.
+
# @FUNCTION: epytest
# @USAGE: [<args>...]
# @DESCRIPTION:
@@ -1264,16 +1333,10 @@ epytest() {
debug-print-function ${FUNCNAME} "${@}"
_python_check_EPYTHON
+ _python_check_occluded_packages
- local color
- case ${NOCOLOR} in
- true|yes)
- color=no
- ;;
- *)
- color=yes
- ;;
- esac
+ local color=yes
+ [[ ${NO_COLOR} ]] && color=no
local args=(
# verbose progress reporting and tracebacks
@@ -1286,23 +1349,82 @@ epytest() {
# override filterwarnings=error, we do not really want -Werror
# for end users, as it tends to fail on new warnings from deps
-Wdefault
+ # however, do error out if the package failed to load
+ # an appropriate async plugin
+ -Werror::pytest.PytestUnhandledCoroutineWarning
# override color output
"--color=${color}"
# count is more precise when we're dealing with a large number
# of tests
-o console_output_style=count
- # disable the undesirable-dependency plugins by default to
- # trigger missing argument strips. strip options that require
- # them from config files. enable them explicitly via "-p ..."
- # if you *really* need them.
- -p no:cov
- -p no:flake8
- -p no:flakes
- -p no:pylint
- # sterilize pytest-markdown as it runs code snippets from all
- # *.md files found without any warning
- -p no:markdown
+ # minimize the temporary directory retention, the test suites
+ # of some packages can grow them pretty large and normally
+ # we don't need to preserve them
+ -o tmp_path_retention_count=0
+ -o tmp_path_retention_policy=failed
)
+
+ if [[ ! ${PYTEST_DISABLE_PLUGIN_AUTOLOAD} ]]; then
+ args+=(
+ # disable the undesirable-dependency plugins by default to
+ # trigger missing argument strips. strip options that require
+ # them from config files. enable them explicitly via "-p ..."
+ # if you *really* need them.
+ -p no:cov
+ -p no:flake8
+ -p no:flakes
+ -p no:pylint
+ # sterilize pytest-markdown as it runs code snippets from all
+ # *.md files found without any warning
+ -p no:markdown
+ # pytest-sugar undoes everything that's good about pytest output
+ # and makes it hard to read logs
+ -p no:sugar
+ # pytest-xvfb automatically spawns Xvfb for every test suite,
+ # effectively forcing it even when we'd prefer the tests
+ # not to have DISPLAY at all, causing crashes sometimes
+ # and causing us to miss missing virtualx usage
+ -p no:xvfb
+ # intrusive packages that break random test suites
+ -p no:pytest-describe
+ -p no:plus
+ -p no:tavern
+ # does something to logging
+ -p no:salt-factories
+ )
+ fi
+
+ if [[ -n ${EPYTEST_TIMEOUT} ]]; then
+ if [[ ${PYTEST_PLUGINS} != *pytest_timeout* ]]; then
+ args+=(
+ -p timeout
+ )
+ fi
+
+ args+=(
+ "--timeout=${EPYTEST_TIMEOUT}"
+ )
+ fi
+
+ if [[ ${EPYTEST_XDIST} ]]; then
+ local jobs=${EPYTEST_JOBS:-$(makeopts_jobs)}
+ if [[ ${jobs} -gt 1 ]]; then
+ if [[ ${PYTEST_PLUGINS} != *xdist.plugin* ]]; then
+ args+=(
+ # explicitly enable the plugin, in case the ebuild was
+ # using PYTEST_DISABLE_PLUGIN_AUTOLOAD=1
+ -p xdist
+ )
+ fi
+ args+=(
+ -n "${jobs}"
+ # worksteal ensures that workers don't end up idle when heavy
+ # jobs are unevenly distributed
+ --dist=worksteal
+ )
+ fi
+ fi
+
local x
for x in "${EPYTEST_DESELECT[@]}"; do
args+=( --deselect "${x}" )
@@ -1318,6 +1440,11 @@ epytest() {
# remove common temporary directories left over by pytest plugins
rm -rf .hypothesis .pytest_cache || die
+ # pytest plugins create additional .pyc files while testing
+ # see e.g. https://bugs.gentoo.org/847235
+ if [[ -n ${BUILD_DIR} && -d ${BUILD_DIR} ]]; then
+ find "${BUILD_DIR}" -name '*-pytest-*.pyc' -delete || die
+ fi
return ${ret}
}
@@ -1333,8 +1460,14 @@ eunittest() {
debug-print-function ${FUNCNAME} "${@}"
_python_check_EPYTHON
+ _python_check_occluded_packages
- set -- "${EPYTHON}" -m unittest_or_fail discover -v "${@}"
+ # unittest fails with "no tests" correctly since Python 3.12
+ local runner=unittest
+ if _python_impl_matches "${EPYTHON}" 3.{9..11}; then
+ runner=unittest_or_fail
+ fi
+ set -- "${EPYTHON}" -m "${runner}" discover -v "${@}"
echo "${@}" >&2
"${@}" || die -n "Tests failed with ${EPYTHON}"
@@ -1352,15 +1485,13 @@ _python_run_check_deps() {
debug-print-function ${FUNCNAME} "${@}"
local impl=${1}
- local hasv_args=( -b )
- [[ ${EAPI} == 6 ]] && hasv_args=( --host-root )
einfo "Checking whether ${impl} is suitable ..."
local PYTHON_PKG_DEP
_python_export "${impl}" PYTHON_PKG_DEP
ebegin " ${PYTHON_PKG_DEP}"
- has_version "${hasv_args[@]}" "${PYTHON_PKG_DEP}"
+ has_version -b "${PYTHON_PKG_DEP}"
eend ${?} || return 1
declare -f python_check_deps >/dev/null || return 0
@@ -1377,10 +1508,8 @@ _python_run_check_deps() {
# A convenience wrapper for has_version() with verbose output and better
# defaults for use in python_check_deps().
#
-# The wrapper accepts EAPI 7+-style -b/-d/-r options to indicate
-# the root to perform the lookup on. Unlike has_version, the default
-# is -b. In EAPI 6, -b and -d are translated to --host-root
-# for compatibility.
+# The wrapper accepts -b/-d/-r options to indicate the root to perform
+# the lookup on. Unlike has_version, the default is -b.
#
# The wrapper accepts multiple package specifications. For the check
# to succeed, *all* specified atoms must match.
@@ -1395,14 +1524,6 @@ python_has_version() {
;;
esac
- if [[ ${EAPI} == 6 ]]; then
- if [[ ${root_arg} == -r ]]; then
- root_arg=()
- else
- root_arg=( --host-root )
- fi
- fi
-
local pkg
for pkg; do
ebegin " ${pkg}"
@@ -1413,5 +1534,4 @@ python_has_version() {
return 0
}
-_PYTHON_UTILS_R1=1
fi
diff --git a/eclass/qmail.eclass b/eclass/qmail.eclass
index ed53bca56fae..0e47aa55fa25 100644
--- a/eclass/qmail.eclass
+++ b/eclass/qmail.eclass
@@ -1,18 +1,18 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: qmail.eclass
# @MAINTAINER:
# Rolf Eike Beer <eike@sf-mail.de>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: common qmail functions
-case ${EAPI:-0} in
- [678]) ;;
+case ${EAPI} in
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ -z ${_QMAIL_ECLASS} ]] ; then
+if [[ -z ${_QMAIL_ECLASS} ]]; then
_QMAIL_ECLASS=1
inherit flag-o-matic toolchain-funcs fixheadtails
@@ -32,7 +32,7 @@ QMAIL_SPP_S="${WORKDIR}"/qmail-spp-${QMAIL_SPP_PV}
# @FUNCTION: is_prime
# @USAGE: <number>
# @DESCRIPTION:
-# Checks wether a number is a valid prime number for queue split
+# Checks whether a number is a valid prime number for queue split
is_prime() {
local number=${1} i
@@ -58,7 +58,7 @@ is_prime() {
dospp() {
exeinto "${QMAIL_HOME}"/plugins/
- newexe ${1} ${2:-$(basename ${1})}
+ newexe ${1} ${2:-${1##*/}}
}
# @FUNCTION: dosupervise
@@ -68,7 +68,7 @@ dospp() {
dosupervise() {
local service=$1
local runfile=${2:-${service}} logfile=${3:-${service}-log}
- [[ -z "${service}" ]] && die "no service given"
+ [[ -z ${service} ]] && die "no service given"
dodir ${SUPERVISE_DIR}/${service}{,/log}
fperms +t ${SUPERVISE_DIR}/${service}{,/log}
@@ -92,16 +92,21 @@ qmail_set_cc() {
echo "${cc} ${CFLAGS} ${CPPFLAGS}" > ./conf-cc || die 'Patching conf-cc failed.'
echo "${ld} ${LDFLAGS}" > ./conf-ld || die 'Patching conf-ld failed.'
- sed -e "s#'ar #'$(tc-getAR) #" -e "s#'ranlib #'$(tc-getRANLIB) #" -i make-makelib.sh
+
+ # This function is used also by sys-apps/ucspi-tcp and sys-process/daemontools-encore
+ # but they don't have make-makelib.sh script, see bugs #902009 and #902019
+ if [[ -f make-makelib.sh ]]; then
+ sed -e "s#'ar #'$(tc-getAR) #" -e "s#'ranlib #'$(tc-getRANLIB) #" -i make-makelib.sh || die
+ fi
}
genqmail_src_unpack() {
- cd "${WORKDIR}"
+ cd "${WORKDIR}" || die
[[ -n ${GENQMAIL_PV} ]] && unpack "${GENQMAIL_F}"
}
qmail_spp_src_unpack() {
- cd "${WORKDIR}"
+ cd "${WORKDIR}" || die
[[ -n ${QMAIL_SPP_PV} ]] && unpack "${QMAIL_SPP_F}"
}
@@ -109,24 +114,24 @@ qmail_spp_src_unpack() {
# @DESCRIPTION:
# Unpack common config files, and set built configuration (CFLAGS, LDFLAGS, etc)
qmail_src_postunpack() {
- cd "${S}"
+ cd "${S}" || die
qmail_set_cc
mysplit=${QMAIL_CONF_SPLIT:-23}
is_prime ${mysplit} || die "QMAIL_CONF_SPLIT is not a prime number."
einfo "Using conf-split value of ${mysplit}."
- echo -n ${mysplit} > "${S}"/conf-split
+ echo -n ${mysplit} > "${S}"/conf-split || die
}
qmail_src_compile() {
- cd "${S}"
- emake it man "$@" || die "make failed"
+ cd "${S}" || die
+ emake it man "$@"
}
qmail_spp_src_compile() {
- cd "${GENQMAIL_S}"/spp/
- emake || die "make spp failed"
+ cd "${GENQMAIL_S}"/spp/ || die
+ emake
}
qmail_base_install() {
@@ -152,6 +157,7 @@ qmail_base_install() {
qreceipt sendmail tcp-env
# obsolete tools, install if they are still present
+ local i
for i in elq maildirwatch pinq qail qsmhook; do
[[ -x ${i} ]] && doexe ${i}
done
@@ -167,7 +173,6 @@ qmail_base_install() {
exeopts -o qmailq -g qmail -m 4711
doexe qmail-queue
-
)
}
@@ -189,7 +194,7 @@ qmail_man_install() {
dodoc BLURB* INSTALL* PIC* README* REMOVE* \
SENDMAIL* TEST* THANKS* VERSION*
# notqmail converted the files to markdown
- if [ -f CHANGES ]; then
+ if [[ -f CHANGES ]]; then
dodoc CHANGES FAQ SECURITY THOUGHTS UPGRADE
else
dodoc CHANGES.md FAQ.md SECURITY.md THOUGHTS.md UPGRADE.md
@@ -225,13 +230,13 @@ qmail_maildir_install() {
"${MAILDIRMAKE}" "${D}${QMAIL_HOME}"/alias/.maildir
keepdir "${QMAIL_HOME}"/alias/.maildir/{cur,new,tmp}
+ local i
for i in "${QMAIL_HOME}"/alias/.qmail-{mailer-daemon,postmaster,root}; do
- if [[ ! -f "${ROOT}${i}" ]]; then
+ if [[ ! -f ${ROOT}${i} ]]; then
touch "${D}${i}"
fowners alias:qmail "${i}"
fi
done
-
)
}
@@ -240,7 +245,7 @@ qmail_tcprules_install() {
insinto "${TCPRULES_DIR}"
doins "${GENQMAIL_S}"/tcprules/Makefile.qmail
doins "${GENQMAIL_S}"/tcprules/tcp.qmail-*
- use ssl && use pop3 || rm -f "${D}${TCPRULES_DIR}"/tcp.qmail-pop3sd
+ rm -f "${D}${TCPRULES_DIR}"/tcp.qmail-pop3sd || die
}
qmail_supervise_install_one() {
@@ -255,15 +260,15 @@ qmail_supervise_install_one() {
qmail_supervise_install() {
einfo "Installing supervise scripts"
- cd "${GENQMAIL_S}"/supervise
+ cd "${GENQMAIL_S}"/supervise || die
+ local i
for i in qmail-{send,smtpd,qmtpd,qmqpd}; do
qmail_supervise_install_one ${i}
done
if use pop3; then
qmail_supervise_install_one qmail-pop3d
- use ssl && qmail_supervise_install_one qmail-pop3sd
fi
}
@@ -274,6 +279,7 @@ qmail_spp_install() {
einfo "Installing qmail-spp plugins"
keepdir "${QMAIL_HOME}"/plugins/
+ local i
for i in authlog mfdnscheck ifauthnext tarpit; do
dospp "${GENQMAIL_S}"/spp/${i}
done
@@ -331,7 +337,7 @@ qmail_queue_setup() {
qmail_rootmail_fixup() {
local TMPCMD="ln -sf ${QMAIL_HOME}/alias/.maildir/ ${ROOT}/root/.maildir"
- if [[ -d "${ROOT}"/root/.maildir && ! -L "${ROOT}"/root/.maildir ]] ; then
+ if [[ -d ${ROOT}/root/.maildir && ! -L ${ROOT}/root/.maildir ]] ; then
elog "Previously the qmail ebuilds created /root/.maildir/ but not"
elog "every mail was delivered there. If the directory does not"
elog "contain any mail, please delete it and run:"
@@ -344,13 +350,14 @@ qmail_rootmail_fixup() {
}
qmail_tcprules_build() {
- for f in tcp.qmail-{smtp,qmtp,qmqp,pop3,pop3s}; do
+ local f
+ for f in tcp.qmail-{smtp,qmtp,qmqp,pop3}; do
# please note that we don't check if it exists
# as we want it to make the cdb files anyway!
- src="${ROOT}${TCPRULES_DIR}/${f}"
- cdb="${ROOT}${TCPRULES_DIR}/${f}.cdb"
- tmp="${ROOT}${TCPRULES_DIR}/.${f}.tmp"
- [[ -e "${src}" ]] && tcprules "${cdb}" "${tmp}" < "${src}"
+ local src="${ROOT}${TCPRULES_DIR}/${f}"
+ local cdb="${ROOT}${TCPRULES_DIR}/${f}.cdb"
+ local tmp="${ROOT}${TCPRULES_DIR}/.${f}.tmp"
+ [[ -e ${src} ]] && tcprules "${cdb}" "${tmp}" < "${src}"
done
}
@@ -371,11 +378,6 @@ qmail_supervise_config_notice() {
elog "To start the pop3 server as well, create the following link:"
elog "ln -s ${SUPERVISE_DIR}/qmail-pop3d /service/qmail-pop3d"
elog
- if use ssl; then
- elog "To start the pop3s server as well, create the following link:"
- elog "ln -s ${SUPERVISE_DIR}/qmail-pop3sd /service/qmail-pop3sd"
- elog
- fi
fi
elog "Additionally, the QMTP and QMQP protocols are supported, "
elog "and can be started as:"
@@ -388,10 +390,10 @@ qmail_supervise_config_notice() {
}
qmail_config_fast() {
- if [[ ${ROOT} = / ]]; then
+ if [[ -z ${ROOT} ]]; then
local host=$(hostname --fqdn)
- if [[ -z "${host}" ]]; then
+ if [[ -z ${host} ]]; then
eerror
eerror "Cannot determine your fully-qualified hostname"
eerror "Please setup your /etc/hosts as described in"
@@ -400,7 +402,7 @@ qmail_config_fast() {
die "cannot determine FQDN"
fi
- if [[ ! -f "${ROOT}${QMAIL_HOME}"/control/me ]]; then
+ if [[ ! -f ${ROOT}${QMAIL_HOME}/control/me ]]; then
"${ROOT}${QMAIL_HOME}"/bin/config-fast ${host}
fi
else
diff --git a/eclass/qmake-utils.eclass b/eclass/qmake-utils.eclass
index a86ce1fbabb8..5c5fa8dcb047 100644
--- a/eclass/qmake-utils.eclass
+++ b/eclass/qmake-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: qmake-utils.eclass
@@ -9,7 +9,7 @@
# @SUPPORTED_EAPIS: 7 8
# @BLURB: Common functions for qmake-based packages.
# @DESCRIPTION:
-# Utility eclass providing wrapper functions for Qt5 qmake.
+# Utility eclass providing wrapper functions for Qt qmake.
#
# This eclass does not set any metadata variables nor export any phase
# functions. It can be inherited safely.
@@ -78,12 +78,15 @@ qt5_get_qmake_args() {
QMAKE_CFLAGS="${CFLAGS}"
QMAKE_CFLAGS_RELEASE=
QMAKE_CFLAGS_DEBUG=
+ QMAKE_CFLAGS_RELEASE_WITH_DEBUGINFO=
QMAKE_CXXFLAGS="${CXXFLAGS}"
QMAKE_CXXFLAGS_RELEASE=
QMAKE_CXXFLAGS_DEBUG=
+ QMAKE_CXXFLAGS_RELEASE_WITH_DEBUGINFO=
QMAKE_LFLAGS="${LDFLAGS}"
QMAKE_LFLAGS_RELEASE=
QMAKE_LFLAGS_DEBUG=
+ QMAKE_LFLAGS_RELEASE_WITH_DEBUGINFO=
EOF
}
@@ -116,4 +119,99 @@ eqmake5() {
fi
}
+# @FUNCTION: qt6_get_bindir
+# @DESCRIPTION:
+# Echoes the directory where Qt6 binaries are installed.
+# EPREFIX is already prepended to the returned path.
+qt6_get_bindir() {
+ echo ${EPREFIX}$(qt6_get_libdir)/qt6/bin
+}
+
+# @FUNCTION: qt6_get_headerdir
+# @DESCRIPTION:
+# Echoes the directory where Qt6 headers are installed.
+qt6_get_headerdir() {
+ echo /usr/include/qt6
+}
+
+# @FUNCTION: qt6_get_libdir
+# @DESCRIPTION:
+# Echoes the directory where Qt6 libraries are installed.
+qt6_get_libdir() {
+ echo /usr/$(get_libdir)
+}
+
+# @FUNCTION: qt6_get_mkspecsdir
+# @DESCRIPTION:
+# Echoes the directory where Qt6 mkspecs are installed.
+qt6_get_mkspecsdir() {
+ echo $(qt6_get_libdir)/qt6/mkspecs
+}
+
+# @FUNCTION: qt6_get_plugindir
+# @DESCRIPTION:
+# Echoes the directory where Qt6 plugins are installed.
+qt6_get_plugindir() {
+ echo $(qt6_get_libdir)/qt6/plugins
+}
+
+# @FUNCTION: qt6_get_qmake_args
+# @DESCRIPTION:
+# Echoes a multi-line string containing arguments to pass to qmake.
+qt6_get_qmake_args() {
+ cat <<-EOF
+ QMAKE_AR="$(tc-getAR) cqs"
+ QMAKE_CC="$(tc-getCC)"
+ QMAKE_LINK_C="$(tc-getCC)"
+ QMAKE_LINK_C_SHLIB="$(tc-getCC)"
+ QMAKE_CXX="$(tc-getCXX)"
+ QMAKE_LINK="$(tc-getCXX)"
+ QMAKE_LINK_SHLIB="$(tc-getCXX)"
+ QMAKE_OBJCOPY="$(tc-getOBJCOPY)"
+ QMAKE_RANLIB=
+ QMAKE_STRIP=
+ QMAKE_CFLAGS="${CFLAGS}"
+ QMAKE_CFLAGS_RELEASE=
+ QMAKE_CFLAGS_DEBUG=
+ QMAKE_CFLAGS_RELEASE_WITH_DEBUGINFO=
+ QMAKE_CXXFLAGS="${CXXFLAGS}"
+ QMAKE_CXXFLAGS_RELEASE=
+ QMAKE_CXXFLAGS_DEBUG=
+ QMAKE_CXXFLAGS_RELEASE_WITH_DEBUGINFO=
+ QMAKE_LFLAGS="${LDFLAGS}"
+ QMAKE_LFLAGS_RELEASE=
+ QMAKE_LFLAGS_DEBUG=
+ QMAKE_LFLAGS_RELEASE_WITH_DEBUGINFO=
+ EOF
+}
+
+# @FUNCTION: eqmake6
+# @USAGE: [arguments for qmake]
+# @DESCRIPTION:
+# Wrapper for Qt6's qmake. All arguments are passed to qmake.
+#
+# For recursive build systems, i.e. those based on the subdirs template,
+# you should run eqmake6 on the top-level project file only, unless you
+# have a valid reason to do otherwise. During the building, qmake will
+# be automatically re-invoked with the right arguments on every directory
+# specified inside the top-level project file.
+eqmake6() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ebegin "Running qmake"
+
+ local -a args
+ mapfile -t args <<<"$(qt6_get_qmake_args)"
+ # NB: we're passing literal quotes in but qmake doesn't seem to mind
+ "$(qt6_get_bindir)"/qmake -makefile "${args[@]}" "$@"
+
+ if ! eend $? ; then
+ echo
+ eerror "Running qmake has failed! (see above for details)"
+ eerror "This shouldn't happen - please send a bug report to https://bugs.gentoo.org/"
+ echo
+ die "eqmake6 failed"
+ fi
+}
+
fi
diff --git a/eclass/qt5-build.eclass b/eclass/qt5-build.eclass
index 6da97387c1ab..76a089b2908d 100644
--- a/eclass/qt5-build.eclass
+++ b/eclass/qt5-build.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: qt5-build.eclass
@@ -11,15 +11,17 @@
# @DESCRIPTION:
# This eclass contains various functions that are used when building Qt5.
-if [[ ${CATEGORY} != dev-qt ]]; then
- die "${ECLASS} is only to be used for building Qt 5"
-fi
-
case ${EAPI} in
8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_QT5_BUILD_ECLASS} ]]; then
+_QT5_BUILD_ECLASS=1
+
+[[ ${CATEGORY} != dev-qt ]] &&
+ die "${ECLASS} is only to be used for building Qt 5"
+
# @ECLASS_VARIABLE: QT5_BUILD_TYPE
# @DESCRIPTION:
# Default value is "release".
@@ -49,7 +51,7 @@ readonly QT5_BUILD_TYPE
# @DESCRIPTION:
# The upstream name of the module this package belongs to. Used for
# SRC_URI and EGIT_REPO_URI. Must be set before inheriting the eclass.
-: ${QT5_MODULE:=${PN}}
+: "${QT5_MODULE:=${PN}}"
# @ECLASS_VARIABLE: QT5_PV
# @DESCRIPTION:
@@ -63,6 +65,13 @@ readonly QT5_PV
# The upstream package name of the module this package belongs to.
# Used for SRC_URI and S.
+# @ECLASS_VARIABLE: _QT5_GENTOOPATCHSET_REV
+# @DEFAULT_UNSET
+# @INTERNAL
+# @DESCRIPTION:
+# Gentoo downstream patchset version applied over qtbase. Used for SRC_URI and
+# applied in src_prepare.
+
# @ECLASS_VARIABLE: QT5_TARGET_SUBDIRS
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -91,7 +100,7 @@ readonly QT5_PV
# For proper description see virtualx.eclass man page.
# Here we redefine default value to be manual, if your package needs virtualx
# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
-: ${VIRTUALX_REQUIRED:=manual}
+: "${VIRTUALX_REQUIRED:=manual}"
inherit estack flag-o-matic toolchain-funcs virtualx
@@ -100,7 +109,7 @@ if [[ ${PN} != qtwebengine ]]; then
*9999 )
inherit kde.org # kde/5.15 branch
;;
- 5.15.[3-9]* )
+ 5.15.[5-9]* | 5.15.??* )
# official stable release
_QT5_P=${QT5_MODULE}-everywhere-opensource-src-${PV}
HOMEPAGE="https://www.qt.io/"
@@ -115,11 +124,23 @@ if [[ ${PN} != qtwebengine ]]; then
esac
fi
+if [[ ${QT5_MODULE} == qtbase ]]; then
+ case ${PV} in
+ 5.15.11)
+ _QT5_GENTOOPATCHSET_REV=4
+ ;;
+ *)
+ _QT5_GENTOOPATCHSET_REV=5
+ ;;
+ esac
+ SRC_URI+=" https://dev.gentoo.org/~asturm/distfiles/qtbase-5.15-gentoo-patchset-${_QT5_GENTOOPATCHSET_REV}.tar.xz"
+fi
+
# @ECLASS_VARIABLE: QT5_BUILD_DIR
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# Build directory for out-of-source builds.
-: ${QT5_BUILD_DIR:=${S}_build}
+: "${QT5_BUILD_DIR:=${S}_build}"
LICENSE="|| ( GPL-2 GPL-3 LGPL-3 ) FDL-1.3"
@@ -152,14 +173,21 @@ fi
###### Phase functions ######
-EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install src_test pkg_postinst pkg_postrm
-
# @FUNCTION: qt5-build_src_prepare
# @DESCRIPTION:
# Prepares the environment and patches the sources if necessary.
qt5-build_src_prepare() {
qt5_prepare_env
+ # Workaround for bug #915203
+ # Upstream: https://bugreports.qt.io/browse/QTBUG-111514
+ if [[ ${PN} != qtcore ]]; then
+ append-ldflags $(test-flags-CCLD -Wl,--undefined-version)
+ fi
+
+ # many bugs, no one to fix
+ filter-lto
+
if [[ ${QT5_BUILD_TYPE} == live ]] || [[ -n ${KDE_ORG_COMMIT} ]]; then
if [[ -n ${KDE_ORG_COMMIT} ]]; then
einfo "Preparing KDE Qt5PatchCollection snapshot at ${KDE_ORG_COMMIT}"
@@ -174,15 +202,6 @@ qt5-build_src_prepare() {
sed -i -e "/Creating qmake/i if [ '!' -e \"\$outpath/bin/qmake\" ]; then" \
-e '/echo "Done."/a fi' configure || die "sed failed (skip qmake bootstrap)"
- # Respect CC, CXX, *FLAGS, MAKEOPTS and EXTRA_EMAKE when bootstrapping qmake
- sed -i -e "/outpath\/qmake\".*\"\$MAKE\")/ s|)| \
- ${MAKEOPTS} ${EXTRA_EMAKE} 'CC=$(tc-getCC)' 'CXX=$(tc-getCXX)' \
- 'QMAKE_CFLAGS=${CFLAGS}' 'QMAKE_CXXFLAGS=${CXXFLAGS}' 'QMAKE_LFLAGS=${LDFLAGS}'&|" \
- -e 's/\(setBootstrapVariable\s\+\|EXTRA_C\(XX\)\?FLAGS=.*\)QMAKE_C\(XX\)\?FLAGS_\(DEBUG\|RELEASE\).*/:/' \
- configure || die "sed failed (respect env for qmake build)"
- sed -i -e '/^CPPFLAGS\s*=/ s/-g //' \
- qmake/Makefile.unix || die "sed failed (CPPFLAGS for qmake build)"
-
# Respect CXX in bsymbolic_functions, fvisibility, precomp, and a few other tests
sed -i -e "/^QMAKE_CONF_COMPILER=/ s:=.*:=\"$(tc-getCXX)\":" \
configure || die "sed failed (QMAKE_CONF_COMPILER)"
@@ -193,6 +212,19 @@ qt5-build_src_prepare() {
[[ -n ${QT5_KDEPATCHSET_REV} ]] && eapply "${WORKDIR}/${QT5_MODULE}-${PV}-gentoo-kde-${QT5_KDEPATCHSET_REV}"
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ [[ -n ${_QT5_GENTOOPATCHSET_REV} ]] && eapply "${WORKDIR}/qtbase-5.15-gentoo-patchset-${_QT5_GENTOOPATCHSET_REV}"
+
+ # Respect CC, CXX, *FLAGS, MAKEOPTS and EXTRA_EMAKE when bootstrapping qmake
+ sed -i -e "/outpath\/qmake\".*\"*\$MAKE\"*)/ s|)| \
+ ${MAKEOPTS} ${EXTRA_EMAKE} 'CC=$(tc-getCC)' 'CXX=$(tc-getCXX)' \
+ 'QMAKE_CFLAGS=${CFLAGS}' 'QMAKE_CXXFLAGS=${CXXFLAGS}' 'QMAKE_LFLAGS=${LDFLAGS}'&|" \
+ -e 's/\(setBootstrapVariable\s\+\|EXTRA_C\(XX\)\?FLAGS=.*\)QMAKE_C\(XX\)\?FLAGS_\(DEBUG\|RELEASE\).*/:/' \
+ configure || die "sed failed (respect env for qmake build)"
+ sed -i -e '/^CPPFLAGS\s*=/ s/-g //' \
+ qmake/Makefile.unix || die "sed failed (CPPFLAGS for qmake build)"
+ fi
+
default
}
@@ -303,6 +335,47 @@ qt5-build_pkg_postrm() {
###### Public helpers ######
+# @FUNCTION: qt5_configure_oos_quirk
+# @USAGE: <file> or <file> <path>
+# @DESCRIPTION:
+# Quirk for out-of-source builds. Runs qmake in root directory, copies
+# generated pri <file> from source <path> to build dir <path>.
+# If no <path> is given, <file> is copied to ${QT5_BUILD_DIR}.
+qt5_configure_oos_quirk() {
+ if [[ "$#" == 2 ]]; then
+ local source="${2}/${1}"
+ local dest="${QT5_BUILD_DIR}/${2}"
+ elif [[ "$#" == 1 ]]; then
+ local source="${1}"
+ local dest="${QT5_BUILD_DIR}"
+ else
+ die "${FUNCNAME[0]} must be passed either one or two arguments"
+ fi
+
+ mkdir -p "${dest}" || die
+ qt5_qmake "${QT5_BUILD_DIR}"
+ cp "${source}" "${dest}" || die
+}
+
+# @FUNCTION: qt5_syncqt_version
+# @DESCRIPTION:
+# Wrapper for Qt5 syncqt.pl to sync header files for ${PV} (required to run if
+# headers are added/removed by patching)
+qt5_syncqt_version() {
+ if [[ ${PV} == *9999* ]]; then
+ return
+ fi
+
+ local syncqt
+ if [[ ${PN} == qtcore ]]; then
+ syncqt=bin/syncqt.pl
+ else
+ syncqt=${QT5_BINDIR}/syncqt.pl
+ fi
+
+ perl ${syncqt} -version ${PV} || die
+}
+
# @FUNCTION: qt5_symlink_binary_to_path
# @USAGE: <target binary name> [suffix]
# @DESCRIPTION:
@@ -697,9 +770,7 @@ qt5_tools_configure() {
# allow the ebuild to override what we set here
myqmakeargs=( "${qmakeargs[@]}" "${myqmakeargs[@]}" )
- mkdir -p "${QT5_BUILD_DIR}" || die
- qt5_qmake "${QT5_BUILD_DIR}"
- cp qttools-config.pri "${QT5_BUILD_DIR}" || die
+ qt5_configure_oos_quirk qttools-config.pri
}
# @FUNCTION: qt5_qmake_args
@@ -950,3 +1021,7 @@ qt5_regenerate_global_configs() {
ewarn "${qmodule_pri} or ${qmodule_pri_orig} does not exist or is not a regular file"
fi
}
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install src_test pkg_postinst pkg_postrm
diff --git a/eclass/qt6-build.eclass b/eclass/qt6-build.eclass
new file mode 100644
index 000000000000..4fc838c478af
--- /dev/null
+++ b/eclass/qt6-build.eclass
@@ -0,0 +1,294 @@
+# Copyright 2021-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: qt6-build.eclass
+# @MAINTAINER:
+# qt@gentoo.org
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: cmake
+# @BLURB: Eclass for Qt6 split ebuilds.
+# @DESCRIPTION:
+# This eclass contains various functions that are used when building Qt6.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_QT6_BUILD_ECLASS} ]]; then
+_QT6_BUILD_ECLASS=1
+
+[[ ${CATEGORY} != dev-qt ]] &&
+ die "${ECLASS} is only to be used for building Qt6"
+
+inherit cmake flag-o-matic toolchain-funcs
+
+# @ECLASS_VARIABLE: QT6_BUILD_TYPE
+# @DESCRIPTION:
+# Read only variable set based on PV to one of:
+# - release: official 6.x.x releases
+# - pre-release: development 6.x.x_rc/beta/alpha releases
+# - live: *.9999 (dev branch), 6.x.9999 (stable branch)
+
+# @ECLASS_VARIABLE: QT6_MODULE
+# @PRE_INHERIT
+# @DESCRIPTION:
+# The upstream name of the module this package belongs to.
+# Used for SRC_URI and EGIT_REPO_URI.
+: "${QT6_MODULE:=${PN}}"
+
+# @ECLASS_VARIABLE: QT6_RESTRICT_TESTS
+# @DEFAULT_UNSET
+# @PRE_INHERIT
+# @DESCRIPTION:
+# If set to a non-empty value, will not add IUSE="test" and set
+# RESTRICT="test" instead. Primarily intended for ebuilds where
+# running tests is unmaintained (or missing) rather than just
+# temporarily restricted not to have a broken USE (bug #930266).
+
+if [[ ${PV} == *.9999 ]]; then
+ inherit git-r3
+ EGIT_REPO_URI=(
+ "https://code.qt.io/qt/${QT6_MODULE}.git"
+ "https://github.com/qt/${QT6_MODULE}.git"
+ )
+
+ QT6_BUILD_TYPE=live
+ EGIT_BRANCH=dev
+ [[ ${PV} == 6.*.9999 ]] && EGIT_BRANCH=${PV%.9999}
+else
+ QT6_BUILD_TYPE=release
+ _QT6_SRC=official
+
+ if [[ ${PV} == *_@(alpha|beta|rc)* ]]; then
+ QT6_BUILD_TYPE=pre-release
+ _QT6_SRC=development
+ fi
+
+ _QT6_P=${QT6_MODULE}-everywhere-src-${PV/_/-}
+ SRC_URI="https://download.qt.io/${_QT6_SRC}_releases/qt/${PV%.*}/${PV/_/-}/submodules/${_QT6_P}.tar.xz"
+ S=${WORKDIR}/${_QT6_P}
+
+ unset _QT6_P _QT6_SRC
+fi
+readonly QT6_BUILD_TYPE
+
+HOMEPAGE="https://www.qt.io/"
+LICENSE="|| ( GPL-2 GPL-3 LGPL-3 ) FDL-1.3"
+SLOT=6/${PV%%_*}
+
+if [[ ${QT6_RESTRICT_TESTS} ]]; then
+ RESTRICT="test"
+else
+ IUSE="test"
+ RESTRICT="!test? ( test )"
+fi
+
+BDEPEND="
+ dev-lang/perl
+ virtual/pkgconfig
+"
+
+###### Phase functions ######
+
+# @FUNCTION: qt6-build_src_unpack
+# @DESCRIPTION:
+# Run git-r3_src_unpack if needed (live), then default to unpack
+# e.g. patchsets in live ebuilds.
+qt6-build_src_unpack() {
+ [[ ${QT6_BUILD_TYPE} == live ]] && git-r3_src_unpack
+
+ default
+}
+
+# @FUNCTION: qt6-build_src_prepare
+# @DESCRIPTION:
+# Run cmake_src_prepare, prepare the environment (such as set
+# QT6_PREFIX, QT6_LIBDIR, and others), and handle anything else
+# generic as needed.
+qt6-build_src_prepare() {
+ cmake_src_prepare
+
+ if [[ -e CMakeLists.txt ]]; then
+ # throw an error rather than skip if *required* conditions are not met
+ sed -e '/message(NOTICE.*Skipping/s/NOTICE/FATAL_ERROR/' \
+ -i CMakeLists.txt || die
+ fi
+
+ if in_iuse test && use test && [[ -e tests/auto/CMakeLists.txt ]]; then
+ # .cmake files tests causing a self-dependency in many modules,
+ # and that sometimes install additional test junk
+ sed -i '/add_subdirectory(cmake)/d' tests/auto/CMakeLists.txt || die
+ fi
+
+ _qt6-build_prepare_env
+ _qt6-build_match_cpu_flags
+
+ # LTO cause test failures in several components (e.g. qtcharts,
+ # multimedia, scxml, wayland, webchannel, ...).
+ #
+ # Exact extent/causes unknown, but for some related-sounding bugs:
+ # https://bugreports.qt.io/browse/QTBUG-112332
+ # https://bugreports.qt.io/browse/QTBUG-115731
+ #
+ # Does not manifest itself with clang:16 (did with gcc-13.2.0), but
+ # still assumed to be generally unsafe either way in current state.
+ in_iuse custom-cflags && use custom-cflags || filter-lto
+}
+
+# @FUNCTION: qt6-build_src_configure
+# @DESCRIPTION:
+# Run cmake_src_configure and handle anything else generic as needed.
+qt6-build_src_configure() {
+ if [[ ${PN} == qttranslations ]]; then
+ # does not compile anything, further options would be unrecognized
+ cmake_src_configure
+ return
+ fi
+
+ local defaultcmakeargs=(
+ # see _qt6-build_create_user_facing_links
+ -DINSTALL_PUBLICBINDIR="${QT6_PREFIX}"/bin
+ # note that if qtbase was built with tests, this is default ON
+ -DQT_BUILD_TESTS=$(in_iuse test && use test && echo ON || echo OFF)
+ # avoid appending -O2 after user's C(XX)FLAGS (bug #911822)
+ -DQT_USE_DEFAULT_CMAKE_OPTIMIZATION_FLAGS=ON
+ )
+
+ if [[ ${mycmakeargs@a} == *a* ]]; then
+ local mycmakeargs=("${defaultcmakeargs[@]}" "${mycmakeargs[@]}")
+ else
+ local mycmakeargs=("${defaultcmakeargs[@]}")
+ fi
+
+ cmake_src_configure
+}
+
+# @FUNCTION: qt6-build_src_test
+# @USAGE: [<cmake_src_test argument>...]
+# @DESCRIPTION:
+# Run cmake_src_test and handle anything else generic as-needed.
+qt6-build_src_test() {
+ local -x QML_IMPORT_PATH=${BUILD_DIR}${QT6_QMLDIR#"${QT6_PREFIX}"}
+ local -x QTEST_FUNCTION_TIMEOUT=900000 #914737
+ local -x QT_QPA_PLATFORM=offscreen
+
+ # TODO?: CMAKE_SKIP_TESTS skips a whole group of tests and, when
+ # only want to skip a sepcific sub-test, the BLACKLIST files
+ # could potentially be modified by implementing a QT6_SKIP_TESTS
+
+ cmake_src_test "${@}"
+}
+
+# @FUNCTION: qt6-build_src_install
+# @DESCRIPTION:
+# Run cmake_src_install and handle anything else generic as needed.
+qt6-build_src_install() {
+ cmake_src_install
+
+ _qt6-build_create_user_facing_links
+
+ # hack: trim typical junk with currently no known "proper" way
+ # to avoid that primarily happens with tests (e.g. qt5compat and
+ # qtsvg tests, but qtbase[gui,-test] currently does some too)
+ rm -rf -- "${D}${QT6_PREFIX}"/tests \
+ "${D}${QT6_LIBDIR}/objects-${CMAKE_BUILD_TYPE}" || die
+}
+
+###### Public helpers ######
+
+# @FUNCTION: qt_feature
+# @USAGE: <flag> [feature]
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+qt_feature() {
+ [[ ${#} -ge 1 ]] || die "${FUNCNAME}() requires at least one argument"
+
+ echo "-DQT_FEATURE_${2:-${1}}=$(usex ${1} ON OFF)"
+}
+
+###### Internal functions ######
+
+# @FUNCTION: _qt6-build_create_user_facing_links
+# @INTERNAL
+# @DESCRIPTION:
+# Create links for user facing tools (bug #863395) as suggested in:
+# https://doc.qt.io/qt-6/packaging-recommendations.html
+_qt6-build_create_user_facing_links() {
+ # user_facing_tool_links.txt is always created (except for qttranslations)
+ # even if no links (empty), if missing will assume that it is an error
+ [[ ${PN} == qttranslations ]] && return
+
+ # loop and match using paths (upstream suggests `xargs ln -s < ${links}`
+ # but, for what it is worth, that will fail if paths have spaces)
+ local link
+ while IFS= read -r link; do
+ if [[ -z ${link} ]]; then
+ continue
+ elif [[ ${link} =~ ^("${QT6_PREFIX}"/.+)\ ("${QT6_PREFIX}"/bin/.+) ]]
+ then
+ dosym -r "${BASH_REMATCH[1]#"${EPREFIX}"}" \
+ "${BASH_REMATCH[2]#"${EPREFIX}"}"
+ else
+ die "unrecognized line '${link}' in '${links}'"
+ fi
+ done < "${BUILD_DIR}"/user_facing_tool_links.txt || die
+}
+
+# @FUNCTION: _qt6-build_match_cpu_flags
+# @INTERNAL
+# @DESCRIPTION:
+# Try to adjust -m* cpu CXXFLAGS so that they match a configuration
+# accepted by Qt's headers, see bug #908420.
+_qt6-build_match_cpu_flags() {
+ use amd64 || use x86 || return 0
+
+ local flags=() intrin intrins
+ while IFS=' ' read -ra intrins; do
+ [[ ${intrins[*]} == *=[^_]* && ${intrins[*]} == *=_* ]] &&
+ for intrin in "${intrins[@]%=*}"; do
+ [[ ${intrin} ]] && flags+=( -mno-${intrin} )
+ done
+ done < <(
+ $(tc-getCXX) -E -P ${CXXFLAGS} ${CPPFLAGS} - <<-EOF | tail -n 2
+ avx2=__AVX2__ =__BMI__ =__BMI2__ =__F16C__ =__FMA__ =__LZCNT__ =__POPCNT__
+ avx512f=__AVX512F__ avx512bw=__AVX512BW__ avx512cd=__AVX512CD__ avx512dq=__AVX512DQ__ avx512vl=__AVX512VL__
+ EOF
+ assert
+ )
+
+ if (( ${#flags[@]} )); then
+ einfo "Adjusting CXXFLAGS for https://bugs.gentoo.org/908420 with: ${flags[*]}"
+ append-cxxflags "${flags[@]}"
+ fi
+}
+
+# @FUNCTION: _qt6-build_prepare_env
+# @INTERNAL
+# @DESCRIPTION:
+# Prepares the environment for building Qt.
+_qt6-build_prepare_env() {
+ # setup installation directories
+ # note: keep paths in sync with qmake-utils.eclass
+ readonly QT6_PREFIX=${EPREFIX}/usr
+ readonly QT6_DATADIR=${QT6_PREFIX}/share/qt6
+ readonly QT6_LIBDIR=${QT6_PREFIX}/$(get_libdir)
+
+ readonly QT6_ARCHDATADIR=${QT6_LIBDIR}/qt6
+
+ readonly QT6_BINDIR=${QT6_ARCHDATADIR}/bin
+ readonly QT6_DOCDIR=${QT6_PREFIX}/share/qt6-doc
+ readonly QT6_EXAMPLESDIR=${QT6_DATADIR}/examples
+ readonly QT6_HEADERDIR=${QT6_PREFIX}/include/qt6
+ readonly QT6_IMPORTDIR=${QT6_ARCHDATADIR}/imports
+ readonly QT6_LIBEXECDIR=${QT6_ARCHDATADIR}/libexec
+ readonly QT6_MKSPECSDIR=${QT6_ARCHDATADIR}/mkspecs
+ readonly QT6_PLUGINDIR=${QT6_ARCHDATADIR}/plugins
+ readonly QT6_QMLDIR=${QT6_ARCHDATADIR}/qml
+ readonly QT6_SYSCONFDIR=${EPREFIX}/etc/xdg
+ readonly QT6_TRANSLATIONDIR=${QT6_DATADIR}/translations
+}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_test src_install
diff --git a/eclass/readme.gentoo-r1.eclass b/eclass/readme.gentoo-r1.eclass
index baf4b57bef76..202ba31f4f70 100644
--- a/eclass/readme.gentoo-r1.eclass
+++ b/eclass/readme.gentoo-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: readme.gentoo-r1.eclass
@@ -45,7 +45,7 @@ esac
# @ECLASS_VARIABLE: README_GENTOO_SUFFIX
# @DESCRIPTION:
# If you want to specify a suffix for README.gentoo file please export it.
-: ${README_GENTOO_SUFFIX:=""}
+: "${README_GENTOO_SUFFIX:=""}"
# @FUNCTION: readme.gentoo_create_doc
# @DESCRIPTION:
diff --git a/eclass/rebar.eclass b/eclass/rebar.eclass
index e7a64bb7a7e2..c1a3aca67b54 100644
--- a/eclass/rebar.eclass
+++ b/eclass/rebar.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: rebar.eclass
@@ -6,31 +6,26 @@
# maintainer-needed@gentoo.org
# @AUTHOR:
# Amadeusz Żołnowski <aidecoe@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Build Erlang/OTP projects using dev-util/rebar.
# @DESCRIPTION:
# An eclass providing functions to build Erlang/OTP projects using
# dev-util/rebar.
#
# rebar is a tool which tries to resolve dependencies itself which is by
-# cloning remote git repositories. Dependant projects are usually expected to
+# cloning remote git repositories. Dependent projects are usually expected to
# be in sub-directory 'deps' rather than looking at system Erlang lib
# directory. Projects relying on rebar usually don't have 'install' make
# targets. The eclass workarounds some of these problems. It handles
# installation in a generic way for Erlang/OTP structured projects.
-case "${EAPI:-0}" in
- 0|1|2|3|4|5)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 6|7)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_prepare src_compile src_test src_install
+if [[ -z ${_REBAR_ECLASS} ]]; then
+_REBAR_ECLASS=1
RDEPEND="dev-lang/erlang:="
DEPEND="${RDEPEND}"
@@ -39,7 +34,7 @@ BDEPEND="
>=sys-apps/gawk-4.1
"
if [[ ${EAPI} == 6 ]]; then
- DEPEND+="${BDEPEND}"
+ DEPEND+=" ${BDEPEND}"
fi
# @ECLASS_VARIABLE: REBAR_APP_SRC
@@ -68,7 +63,7 @@ _rebar_find_dep() {
local p
local result
- pushd "${EPREFIX%/}/$(get_erl_libs)" >/dev/null || return 1
+ pushd "${EPREFIX}/$(get_erl_libs)" >/dev/null || return 1
for p in ${pn} ${pn}-*; do
if [[ -d ${p} ]]; then
# Ensure there's at most one matching.
@@ -107,7 +102,7 @@ erebar() {
(( $# > 0 )) || die "erebar: at least one target is required"
- local -x ERL_LIBS="${EPREFIX%/}/$(get_erl_libs)"
+ local -x ERL_LIBS="${EPREFIX}/$(get_erl_libs)"
[[ ${1} == eunit ]] && local -x ERL_LIBS="."
rebar -v skip_deps=true "$@" || die -n "rebar $@ failed"
@@ -116,7 +111,7 @@ erebar() {
# @FUNCTION: rebar_fix_include_path
# @USAGE: <project_name> [<rebar_config>]
# @DESCRIPTION:
-# Fix path in rebar.config to 'include' directory of dependant project/package,
+# Fix path in rebar.config to 'include' directory of dependent project/package,
# so it points to installation in system Erlang lib rather than relative 'deps'
# directory.
#
@@ -128,7 +123,7 @@ rebar_fix_include_path() {
local pn="${1}"
local rebar_config="${2:-rebar.config}"
- local erl_libs="${EPREFIX%/}/$(get_erl_libs)"
+ local erl_libs="${EPREFIX}/$(get_erl_libs)"
local p
p="$(_rebar_find_dep "${pn}")" \
@@ -217,7 +212,7 @@ rebar_src_prepare() {
rebar_src_configure() {
debug-print-function ${FUNCNAME} "${@}"
- local -x ERL_LIBS="${EPREFIX%/}/$(get_erl_libs)"
+ local -x ERL_LIBS="${EPREFIX}/$(get_erl_libs)"
default
}
@@ -262,3 +257,7 @@ rebar_src_install() {
einstalldocs
}
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_compile src_test src_install
diff --git a/eclass/rocm.eclass b/eclass/rocm.eclass
new file mode 100644
index 000000000000..9804ecde97d0
--- /dev/null
+++ b/eclass/rocm.eclass
@@ -0,0 +1,238 @@
+# Copyright 2022-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: rocm.eclass
+# @MAINTAINER:
+# Gentoo Science Project <sci@gentoo.org>
+# @AUTHOR:
+# Yiyang Wu <xgreenlandforwyy@gmail.com>
+# @SUPPORTED_EAPIS: 8
+# @BLURB: Common functions and variables for ROCm packages written in HIP
+# @DESCRIPTION:
+# ROCm packages such as sci-libs/<roc|hip>*, and packages built on top of ROCm
+# libraries, can utilize variables and functions provided by this eclass.
+# It handles the AMDGPU_TARGETS variable via USE_EXPAND, so user can
+# edit USE flag to control which GPU architecture to compile. Using
+# ${ROCM_USEDEP} can ensure coherence among dependencies. Ebuilds can call the
+# function get_amdgpu_flag to translate activated target to GPU compile flags,
+# passing it to configuration. Function check_amdgpu can help ebuild ensure
+# read and write permissions to GPU device in src_test phase, throwing friendly
+# error message if unavailable.
+#
+# @EXAMPLE:
+# Example ebuild for ROCm library in https://github.com/ROCmSoftwarePlatform
+# which uses cmake to build and test, and depends on rocBLAS:
+# @CODE
+# ROCM_VERSION=${PV}
+# inherit cmake rocm
+# # ROCm libraries SRC_URI is usually in form of:
+# SRC_URI="https://github.com/ROCmSoftwarePlatform/${PN}/archive/rocm-${PV}.tar.gz -> ${P}.tar.gz"
+# S=${WORKDIR}/${PN}-rocm-${PV}
+# SLOT="0/$(ver_cut 1-2)"
+# IUSE="test"
+# REQUIRED_USE="${ROCM_REQUIRED_USE}"
+# RESTRICT="!test? ( test )"
+#
+# RDEPEND="
+# dev-util/hip
+# sci-libs/rocBLAS:${SLOT}[${ROCM_USEDEP}]
+# "
+#
+# src_configure() {
+# # avoid sandbox violation
+# addpredict /dev/kfd
+# addpredict /dev/dri/
+# local mycmakeargs=(
+# -DAMDGPU_TARGETS="$(get_amdgpu_flags)"
+# -DBUILD_CLIENTS_TESTS=$(usex test ON OFF)
+# )
+# CXX=hipcc cmake_src_configure
+# }
+#
+# src_test() {
+# check_amdgpu
+# # export LD_LIBRARY_PATH=<path to built lib dir> if necessary
+# cmake_src_test # for packages using the cmake test
+# # For packages using a standalone test binary rather than cmake test,
+# # just execute it (or using edob)
+# }
+# @CODE
+#
+# Examples for packages depend on ROCm libraries -- a package which depends on
+# rocBLAS, uses comma separated ${HCC_AMDGPU_TARGET} to determine GPU
+# architectures, and requires ROCm version >=5.1
+# @CODE
+# ROCM_VERSION=5.1
+# inherit rocm
+# IUSE="rocm"
+# REQUIRED_USE="rocm? ( ${ROCM_REQUIRED_USE} )"
+# DEPEND="rocm? ( >=dev-util/hip-${ROCM_VERSION}
+# >=sci-libs/rocBLAS-${ROCM_VERSION}[${ROCM_USEDEP}] )"
+#
+# src_configure() {
+# if use rocm; then
+# local amdgpu_flags=$(get_amdgpu_flags)
+# export HCC_AMDGPU_TARGET=${amdgpu_flags//;/,}
+# fi
+# default
+# }
+# src_test() {
+# use rocm && check_amdgpu
+# default
+# }
+# @CODE
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_ROCM_ECLASS} ]]; then
+_ROCM_ECLASS=1
+
+# @ECLASS_VARIABLE: ROCM_VERSION
+# @REQUIRED
+# @PRE_INHERIT
+# @DESCRIPTION:
+# The ROCm version of current package. For ROCm libraries, it should be ${PV};
+# for other packages that depend on ROCm libraries, this can be set to match
+# the version required for ROCm libraries.
+
+# @ECLASS_VARIABLE: ROCM_REQUIRED_USE
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# Requires at least one AMDGPU target to be compiled.
+# Example use for ROCm libraries:
+# @CODE
+# REQUIRED_USE="${ROCM_REQUIRED_USE}"
+# @CODE
+# Example use for packages that depend on ROCm libraries:
+# @CODE
+# IUSE="rocm"
+# REQUIRED_USE="rocm? ( ${ROCM_REQUIRED_USE} )"
+# @CODE
+
+# @ECLASS_VARIABLE: ROCM_USEDEP
+# @OUTPUT_VARIABLE
+# @DESCRIPTION:
+# This is an eclass-generated USE-dependency string which can be used to
+# depend on another ROCm package being built for the same AMDGPU architecture.
+#
+# The generated USE-flag list is compatible with packages using rocm.eclass.
+#
+# Example use:
+# @CODE
+# DEPEND="sci-libs/rocBLAS[${ROCM_USEDEP}]"
+# @CODE
+
+# @ECLASS_VARIABLE: ROCM_SKIP_GLOBALS
+# @DESCRIPTION:
+# Controls whether _rocm_set_globals() is executed. This variable is for
+# ebuilds that call check_amdgpu() without the need to define amdgpu_targets_*
+# USE-flags, such as dev-util/hip and dev-libs/rocm-opencl-runtime.
+#
+# Example use:
+# @CODE
+# ROCM_SKIP_GLOBALS=1
+# inherit rocm
+# @CODE
+
+# @FUNCTION: _rocm_set_globals
+# @DESCRIPTION:
+# Set global variables useful to ebuilds: IUSE, ROCM_REQUIRED_USE, and
+# ROCM_USEDEP, unless ROCM_SKIP_GLOBALS is set.
+
+_rocm_set_globals() {
+ [[ -n ${ROCM_SKIP_GLOBALS} ]] && return
+
+ # Two lists of AMDGPU_TARGETS of certain ROCm version. Official support
+ # matrix:
+ # https://docs.amd.com/bundle/ROCm-Installation-Guide-v${ROCM_VERSION}/page/Prerequisite_Actions.html.
+ # There is no well-known unofficial support matrix.
+ # https://github.com/Bengt/ROCm/blob/patch-2/README.md#library-target-matrix
+ # may help. Gentoo have patches to enable gfx1031 as well.
+ local unofficial_amdgpu_targets official_amdgpu_targets
+ case ${ROCM_VERSION} in
+ 5.[0-3].*)
+ unofficial_amdgpu_targets=(
+ gfx803 gfx900 gfx1010 gfx1011 gfx1012 gfx1031
+ )
+ official_amdgpu_targets=(
+ gfx906 gfx908 gfx90a gfx1030
+ )
+ ;;
+ 5.*|9999)
+ unofficial_amdgpu_targets=(
+ gfx803 gfx900 gfx1010 gfx1011 gfx1012
+ gfx1031 gfx1100 gfx1101 gfx1102
+ )
+ official_amdgpu_targets=(
+ gfx906 gfx908 gfx90a gfx1030
+ )
+ ;;
+ *)
+ die "Unknown ROCm major version! Please update rocm.eclass before bumping to new ebuilds"
+ ;;
+ esac
+
+ local iuse_flags=(
+ "${official_amdgpu_targets[@]/#/+amdgpu_targets_}"
+ "${unofficial_amdgpu_targets[@]/#/amdgpu_targets_}"
+ )
+ IUSE="${iuse_flags[*]}"
+
+ local all_amdgpu_targets=(
+ "${official_amdgpu_targets[@]}"
+ "${unofficial_amdgpu_targets[@]}"
+ )
+ local allflags=( "${all_amdgpu_targets[@]/#/amdgpu_targets_}" )
+ ROCM_REQUIRED_USE=" || ( ${allflags[*]} )"
+
+ local optflags=${allflags[@]/%/(-)?}
+ ROCM_USEDEP=${optflags// /,}
+}
+_rocm_set_globals
+unset -f _rocm_set_globals
+
+# @FUNCTION: get_amdgpu_flags
+# @USAGE: get_amdgpu_flags
+# @DESCRIPTION:
+# Convert specified use flag of amdgpu_targets to compilation flags.
+# Append default target feature to GPU arch. See
+# https://llvm.org/docs/AMDGPUUsage.html#target-features
+get_amdgpu_flags() {
+ local amdgpu_target_flags
+ for gpu_target in ${AMDGPU_TARGETS}; do
+ local target_feature=
+ case ${gpu_target} in
+ gfx906|gfx908)
+ target_feature=:xnack-
+ ;;
+ gfx90a)
+ target_feature=:xnack+
+ ;;
+ *)
+ ;;
+ esac
+ amdgpu_target_flags+="${gpu_target}${target_feature};"
+ done
+ echo "${amdgpu_target_flags}"
+}
+
+# @FUNCTION: check_amdgpu
+# @USAGE: check_amdgpu
+# @DESCRIPTION:
+# grant and check read-write permissions on AMDGPU devices, die if not available.
+check_amdgpu() {
+ for device in /dev/kfd /dev/dri/render*; do
+ addwrite ${device}
+ if [[ ! -r ${device} || ! -w ${device} ]]; then
+ eerror "Cannot read or write ${device}!"
+ eerror "Make sure it is present and check the permission."
+ ewarn "By default render group have access to it. Check if portage user is in render group."
+ die "${device} inaccessible"
+ fi
+ done
+}
+
+fi
diff --git a/eclass/ros-catkin.eclass b/eclass/ros-catkin.eclass
deleted file mode 100644
index feded640abfb..000000000000
--- a/eclass/ros-catkin.eclass
+++ /dev/null
@@ -1,229 +0,0 @@
-# Copyright 1999-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: ros-catkin.eclass
-# @MAINTAINER:
-# ros@gentoo.org
-# @AUTHOR:
-# Alexis Ballier <aballier@gentoo.org>
-# @SUPPORTED_EAPIS: 7
-# @PROVIDES: cmake python-single-r1
-# @BLURB: Template eclass for catkin based ROS packages.
-# @DESCRIPTION:
-# Provides function for building ROS packages on Gentoo.
-# It supports selectively building messages, single-python installation, live ebuilds (git only).
-
-case "${EAPI:-0}" in
- 7) ;;
- *) die "EAPI='${EAPI}' is not supported" ;;
-esac
-
-# @ECLASS_VARIABLE: ROS_REPO_URI
-# @DESCRIPTION:
-# URL of the upstream repository. Usually on github.
-# Serves for fetching tarballs, live ebuilds and inferring the meta-package name.
-EGIT_REPO_URI="${ROS_REPO_URI}"
-
-# @ECLASS_VARIABLE: ROS_SUBDIR
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Subdir in which current packages is located.
-# Usually, a repository contains several packages, hence a typical value is:
-# ROS_SUBDIR=${PN}
-
-# @ECLASS_VARIABLE: CATKIN_IN_SOURCE_BUILD
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Set to enable in-source build.
-
-SCM=""
-if [ "${PV#9999}" != "${PV}" ] ; then
- SCM="git-r3"
-fi
-
-# ROS only really works with one global python version and the target
-# version depends on the release. Noetic targets 3.7 and 3.8.
-# py3.9 or later are ok to add there as long as dev-ros/* have their deps satisfied.
-PYTHON_COMPAT=( python3_{8..10} )
-
-inherit ${SCM} python-single-r1 cmake flag-o-matic
-
-REQUIRED_USE="${PYTHON_REQUIRED_USE}"
-
-IUSE="test"
-RESTRICT="!test? ( test )"
-RDEPEND="${PYTHON_DEPS}"
-DEPEND="${RDEPEND}
- $(python_gen_cond_dep "dev-util/catkin[\${PYTHON_USEDEP}]")
- $(python_gen_cond_dep "dev-python/empy[\${PYTHON_USEDEP}]")
-"
-
-# @ECLASS_VARIABLE: CATKIN_HAS_MESSAGES
-# @PRE_INHERIT
-# @DESCRIPTION:
-# Set it to a non-empty value before inherit to tell the eclass the package has messages to build.
-# Messages will be built based on ROS_MESSAGES USE_EXPANDed variable.
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_TRANSITIVE_DEPS
-# @PRE_INHERIT
-# @DESCRIPTION:
-# Some messages have dependencies on other messages.
-# In that case, CATKIN_MESSAGES_TRANSITIVE_DEPS should contain a space-separated list of atoms
-# representing those dependencies. The eclass uses it to ensure proper dependencies on these packages.
-if [ -n "${CATKIN_HAS_MESSAGES}" ] ; then
- IUSE="${IUSE} +ros_messages_python +ros_messages_cxx ros_messages_eus ros_messages_lisp ros_messages_nodejs"
- RDEPEND="${RDEPEND}
- ros_messages_cxx? ( dev-ros/gencpp:=[${PYTHON_SINGLE_USEDEP}] )
- ros_messages_eus? ( dev-ros/geneus:=[${PYTHON_SINGLE_USEDEP}] )
- ros_messages_python? ( dev-ros/genpy:=[${PYTHON_SINGLE_USEDEP}] )
- ros_messages_lisp? ( dev-ros/genlisp:=[${PYTHON_SINGLE_USEDEP}] )
- ros_messages_nodejs? ( dev-ros/gennodejs:=[${PYTHON_SINGLE_USEDEP}] )
- dev-ros/message_runtime
- "
- DEPEND="${DEPEND} ${RDEPEND}
- dev-ros/message_generation
- dev-ros/genmsg[${PYTHON_SINGLE_USEDEP}]
- "
- if [ -n "${CATKIN_MESSAGES_TRANSITIVE_DEPS}" ] ; then
- for i in ${CATKIN_MESSAGES_TRANSITIVE_DEPS} ; do
- ds="${i}[ros_messages_python(-)?,ros_messages_cxx(-)?,ros_messages_lisp(-)?,ros_messages_eus(-)?,ros_messages_nodejs(-)?] ros_messages_python? ( ${i}[${PYTHON_SINGLE_USEDEP}] )"
- RDEPEND="${RDEPEND} ${ds}"
- DEPEND="${DEPEND} ${ds}"
- done
- fi
-fi
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_CXX_USEDEP
-# @DESCRIPTION:
-# Use it as cat/pkg[${CATKIN_MESSAGES_CXX_USEDEP}] to indicate a dependency on the C++ messages of cat/pkg.
-CATKIN_MESSAGES_CXX_USEDEP="ros_messages_cxx(-)"
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_PYTHON_USEDEP
-# @DESCRIPTION:
-# Use it as cat/pkg[${CATKIN_MESSAGES_PYTHON_USEDEP}] to indicate a dependency on the Python messages of cat/pkg.
-CATKIN_MESSAGES_PYTHON_USEDEP="ros_messages_python(-),${PYTHON_SINGLE_USEDEP}"
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_LISP_USEDEP
-# @DESCRIPTION:
-# Use it as cat/pkg[${CATKIN_MESSAGES_LISP_USEDEP}] to indicate a dependency on the Common-Lisp messages of cat/pkg.
-CATKIN_MESSAGES_LISP_USEDEP="ros_messages_lisp(-)"
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_EUS_USEDEP
-# @DESCRIPTION:
-# Use it as cat/pkg[${CATKIN_MESSAGES_EUS_USEDEP}] to indicate a dependency on the EusLisp messages of cat/pkg.
-CATKIN_MESSAGES_EUS_USEDEP="ros_messages_eus(-)"
-
-# @ECLASS_VARIABLE: CATKIN_MESSAGES_NODEJS_USEDEP
-# @DESCRIPTION:
-# Use it as cat/pkg[${CATKIN_MESSAGES_NODEJS_USEDEP}] to indicate a dependency on the nodejs messages of cat/pkg.
-CATKIN_MESSAGES_NODEJS_USEDEP="ros_messages_nodejs(-)"
-
-if [ "${PV#9999}" != "${PV}" ] ; then
- SRC_URI=""
- KEYWORDS=""
- S=${WORKDIR}/${P}/${ROS_SUBDIR}
-else
- SRC_URI="${ROS_REPO_URI}/archive/${VER_PREFIX}${PV%_*}${VER_SUFFIX}.tar.gz -> ${ROS_REPO_URI##*/}-${PV}.tar.gz"
- S=${WORKDIR}/${VER_PREFIX}${ROS_REPO_URI##*/}-${PV}${VER_SUFFIX}/${ROS_SUBDIR}
-fi
-
-HOMEPAGE="https://wiki.ros.org/${PN} ${ROS_REPO_URI}"
-
-# @FUNCTION: ros-catkin_src_prepare
-# @DESCRIPTION:
-# Calls cmake_src_prepare (so that PATCHES array is handled there) and initialises the workspace
-# by installing a recursive CMakeLists.txt to handle bundles.
-ros-catkin_src_prepare() {
- # If no multibuild, just use cmake IN_SOURCE support
- [ -n "${CATKIN_IN_SOURCE_BUILD}" ] && export CMAKE_IN_SOURCE_BUILD=yes
-
- cmake_src_prepare
-
- if [ ! -f "${S}/CMakeLists.txt" ] ; then
- catkin_init_workspace || die
- fi
-
- # Most packages require C++11 these days. Do it here, in src_prepare so that
- # ebuilds can override it in src_configure.
- append-cxxflags '-std=c++14'
-}
-
-# @VARIABLE: mycatkincmakeargs
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# Optional cmake defines as a bash array. Should be defined before calling
-# src_configure.
-
-# @FUNCTION: ros-catkin_src_configure
-# @DESCRIPTION:
-# Configures a catkin-based package.
-ros-catkin_src_configure() {
- export CATKIN_PREFIX_PATH="${EPREFIX}/usr"
- export ROS_ROOT="${EPREFIX}/usr/share/ros"
- export ROS_PYTHON_VERSION="${EPYTHON#python}"
-
- if [ -n "${CATKIN_HAS_MESSAGES}" ] ; then
- ROS_LANG_DISABLE=""
- use ros_messages_cxx || ROS_LANG_DISABLE="${ROS_LANG_DISABLE}:gencpp"
- use ros_messages_eus || ROS_LANG_DISABLE="${ROS_LANG_DISABLE}:geneus"
- use ros_messages_lisp || ROS_LANG_DISABLE="${ROS_LANG_DISABLE}:genlisp"
- use ros_messages_python || ROS_LANG_DISABLE="${ROS_LANG_DISABLE}:genpy"
- use ros_messages_nodejs || ROS_LANG_DISABLE="${ROS_LANG_DISABLE}:gennodejs"
- export ROS_LANG_DISABLE
- fi
-
- local mycmakeargs=(
- "-DCATKIN_ENABLE_TESTING=$(usex test)"
- "-DCATKIN_BUILD_BINARY_PACKAGE=ON"
- "-DCATKIN_PREFIX_PATH=${SYSROOT:-${EPREFIX}}/usr"
- "${mycatkincmakeargs[@]}"
- )
-
- local sitedir="$(python_get_sitedir)"
- mycmakeargs+=(
- -DPYTHON_EXECUTABLE="${PYTHON}"
- -DPYTHON_INSTALL_DIR="${sitedir#${EPREFIX}/usr/}"
- )
- if [ -n "${CATKIN_IN_SOURCE_BUILD}" ] ; then
- export CMAKE_USE_DIR="${BUILD_DIR}"
- fi
-
- cmake_src_configure "${@}"
-}
-
-# @FUNCTION: ros-catkin_src_compile
-# @DESCRIPTION:
-# Builds a catkin-based package.
-ros-catkin_src_compile() {
- cmake_src_compile "${@}"
-}
-
-# @FUNCTION: ros-catkin_src_test
-# @DESCRIPTION:
-# Run the tests of a catkin-based package.
-ros-catkin_src_test() {
- cd "${BUILD_DIR}" || die
-
- # Regenerate env for tests, PYTHONPATH is not set properly otherwise...
- if [ -f catkin_generated/generate_cached_setup.py ] ; then
- einfo "Regenerating setup_cached.sh for tests"
- ${PYTHON:-python} catkin_generated/generate_cached_setup.py || die
- fi
-
- nonfatal cmake_build tests
- cmake_src_test "${@}"
-}
-
-# @FUNCTION: ros-catkin_src_install
-# @DESCRIPTION:
-# Installs a catkin-based package.
-ros-catkin_src_install() {
- if [ -n "${CATKIN_IN_SOURCE_BUILD}" ] ; then
- export CMAKE_USE_DIR="${BUILD_DIR}"
- fi
-
- cmake_src_install "${@}"
- python_optimize
-}
-
-EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
diff --git a/eclass/rpm.eclass b/eclass/rpm.eclass
index c97e23144c2b..c9c21aa6ebbd 100644
--- a/eclass/rpm.eclass
+++ b/eclass/rpm.eclass
@@ -1,16 +1,14 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: rpm.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: convenience class for extracting RPMs
case ${EAPI} in
- 5|6) inherit epatch eutils ;; # eutils for eqawarn
- 7) inherit eutils ;; # not needed, but ebuilds may still rely on it
- 8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -19,10 +17,7 @@ _RPM_ECLASS=1
inherit estack
-case ${EAPI} in
- 5|6) DEPEND="app-arch/rpm2targz" ;;
- *) BDEPEND="app-arch/rpm2targz" ;;
-esac
+BDEPEND="app-arch/rpm2targz"
# @FUNCTION: rpm_unpack
# @USAGE: <rpms>
@@ -89,59 +84,6 @@ rpm_src_unpack() {
done
}
-# @FUNCTION: rpm_spec_epatch
-# @USAGE: [spec]
-# @DEPRECATED: none
-# @DESCRIPTION:
-# Read the specified spec (defaults to ${PN}.spec) and attempt to apply
-# all the patches listed in it. If the spec does funky things like moving
-# files around, well this won't handle that.
-rpm_spec_epatch() {
- # no epatch in EAPI 7 and later
- [[ ${EAPI} == [56] ]] || die "${FUNCNAME} is banned in EAPI ${EAPI}"
-
- local p spec=$1
- local dir
-
- if [[ -z ${spec} ]] ; then
- # search likely places for the spec file
- for spec in "${PWD}" "${S}" "${WORKDIR}" ; do
- spec+="/${PN}.spec"
- [[ -e ${spec} ]] && break
- done
- fi
- [[ ${spec} == */* ]] \
- && dir=${spec%/*} \
- || dir=
-
- ebegin "Applying patches from ${spec}"
-
- grep '^%patch' "${spec}" | \
- while read line ; do
- # expand the %patch line
- set -- ${line}
- p=$1
- shift
-
- # process the %patch arguments
- local arg
- EPATCH_OPTS=
- for arg in "$@" ; do
- case ${arg} in
- -b) EPATCH_OPTS+=" --suffix" ;;
- *) EPATCH_OPTS+=" ${arg}" ;;
- esac
- done
-
- # extract the patch name from the Patch# line
- set -- $(grep "^P${p#%p}: " "${spec}")
- shift
- epatch "${dir:+${dir}/}$*"
- done
-
- eend
-}
-
fi
EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/ruby-fakegem.eclass b/eclass/ruby-fakegem.eclass
index 6f561f4f6a2f..40ff76ce900e 100644
--- a/eclass/ruby-fakegem.eclass
+++ b/eclass/ruby-fakegem.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ruby-fakegem.eclass
@@ -44,6 +44,7 @@ RUBY_FAKEGEM_TASK_DOC="${RUBY_FAKEGEM_TASK_DOC-rdoc}"
# - rspec3 (calls ruby-ng_rspec, adds dev-ruby/rspec:3 to the dependencies)
# - cucumber (calls ruby-ng_cucumber, adds dev-util/cucumber to the
# dependencies)
+# - sus (calls ruby-ng_sus, adds dev-ruby/sus to the dependencies)
# - none
RUBY_FAKEGEM_RECIPE_TEST="${RUBY_FAKEGEM_RECIPE_TEST-rake}"
@@ -193,7 +194,15 @@ case ${RUBY_FAKEGEM_RECIPE_TEST} in
RESTRICT+=" !test? ( test )"
ruby_add_bdepend "test? ( dev-util/cucumber )"
;;
+ sus)
+ IUSE+=" test"
+ RESTRICT+=" !test? ( test )"
+ ruby_add_bdepend "test? ( dev-ruby/sus )"
+ ;;
+ none)
+ ;;
*)
+ eqawarn "${CATEGORY}/${PF}: Unknown test recipe '${RUBY_FAKEGEM_RECIPE_TEST}' specified, using 'none'"
RUBY_FAKEGEM_RECIPE_TEST="none"
;;
esac
@@ -391,10 +400,10 @@ ruby_fakegem_binwrapper() {
# in the shebang, and we can actually avoid errors when
# calling the script by default.
local rubycmd=
- for implementation in $(_ruby_get_all_impls); do
+ for implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
# ignore non-enabled implementations
use ruby_targets_${implementation} || continue
- if [ -z $rubycmd ]; then
+ if [[ -z ${rubycmd} ]]; then
# if no other implementation was set before, set it.
rubycmd="$(ruby_implementation_command ${implementation})"
else
@@ -432,7 +441,7 @@ each_fakegem_configure() {
tc-export PKG_CONFIG
for extension in "${RUBY_FAKEGEM_EXTENSIONS[@]}" ; do
- CC=$(tc-getCC) ${RUBY} --disable=did_you_mean -C ${extension%/*} ${extension##*/} --with-cflags="${CFLAGS}" --with-ldflags="${LDFLAGS}" ${RUBY_FAKEGM_EXTENSION_OPTIONS} || die
+ CC=$(tc-getCC) ${RUBY} --disable=did_you_mean -C ${extension%/*} ${extension##*/} --with-cflags="${CFLAGS}" --with-ldflags="${LDFLAGS}" ${RUBY_FAKEGEM_EXTENSION_OPTIONS} || die
done
}
@@ -448,7 +457,7 @@ each_ruby_configure() {
# @FUNCTION: all_fakegem_compile
# @DESCRIPTION:
# Build documentation for the package if indicated by the doc USE flag
-# and if there is a documetation task defined.
+# and if there is a documentation task defined.
all_fakegem_compile() {
debug-print-function ${FUNCNAME} "${@}"
@@ -552,7 +561,7 @@ each_fakegem_test() {
case ${RUBY_FAKEGEM_RECIPE_TEST} in
rake)
- ${RUBY} --disable=did_you_mean -S rake ${RUBY_FAKEGEM_TASK_TEST} || die "tests failed"
+ MT_NO_PLUGINS=true ${RUBY} --disable=did_you_mean -S rake ${RUBY_FAKEGEM_TASK_TEST} || die -n "tests failed"
;;
rspec)
RSPEC_VERSION=2 ruby-ng_rspec
@@ -563,6 +572,9 @@ each_fakegem_test() {
cucumber)
ruby-ng_cucumber
;;
+ sus)
+ ruby-ng_sus
+ ;;
none)
ewarn "each_fakegem_test called, but \${RUBY_FAKEGEM_RECIPE_TEST} is 'none'"
;;
@@ -573,9 +585,12 @@ each_fakegem_test() {
# @DESCRIPTION:
# Run the tests for this package.
if [[ ${RUBY_FAKEGEM_RECIPE_TEST} != none ]]; then
- each_ruby_test() {
- each_fakegem_test
- }
+ # Avoid autoloading all minitest plugins present in any gem.
+ export MT_NO_PLUGINS=true
+
+ each_ruby_test() {
+ each_fakegem_test
+ }
fi
# @FUNCTION: ruby_fakegem_extensions_installed
diff --git a/eclass/ruby-ng-gnome2.eclass b/eclass/ruby-ng-gnome2.eclass
index 93ab89263593..e6636811aacc 100644
--- a/eclass/ruby-ng-gnome2.eclass
+++ b/eclass/ruby-ng-gnome2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ruby-ng-gnome2.eclass
@@ -6,21 +6,21 @@
# Ruby herd <ruby@gentoo.org>
# @AUTHOR:
# Author: Hans de Graaff <graaff@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @PROVIDES: ruby-ng
# @BLURB: An eclass to simplify handling of various ruby-gnome2 parts.
# @DESCRIPTION:
# This eclass simplifies installation of the various pieces of
# ruby-gnome2 since they share a very common installation procedure.
-case "${EAPI:-0}" in
- 6) inherit eapi7-ver ;;
- 7) ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ ! ${_RUBY_NG_GNOME2_ECLASS} ]]; then
+_RUBY_NG_GNOME2_ECLASS=1
+
RUBY_FAKEGEM_NAME="${RUBY_FAKEGEM_NAME:-${PN#ruby-}}"
RUBY_FAKEGEM_TASK_TEST=""
RUBY_FAKEGEM_TASK_DOC=""
@@ -30,7 +30,7 @@ RUBY_FAKEGEM_TASK_DOC=""
# @DESCRIPTION:
# If set to 'yes', the test is run with virtx. Set before inheriting this
# eclass.
-: ${RUBY_GNOME2_NEED_VIRTX:="no"}
+: "${RUBY_GNOME2_NEED_VIRTX:="no"}"
inherit ruby-fakegem
if [[ ${RUBY_GNOME2_NEED_VIRTX} == yes ]]; then
@@ -40,12 +40,12 @@ fi
IUSE="test"
RESTRICT+=" !test? ( test )"
-DEPEND="virtual/pkgconfig"
+BDEPEND="virtual/pkgconfig"
ruby_add_bdepend "
dev-ruby/pkg-config
test? ( >=dev-ruby/test-unit-2 )"
SRC_URI="mirror://sourceforge/ruby-gnome2/ruby-gnome2-all-${PV}.tar.gz"
-HOMEPAGE="https://ruby-gnome2.osdn.jp/"
+HOMEPAGE="https://ruby-gnome.github.io/"
LICENSE="LGPL-2.1+"
SLOT="0"
if ver_test -ge "3.4.0"; then
@@ -118,7 +118,7 @@ each_ruby_install() {
if [[ -e Makefile ]]; then
# Create the directories, or the package will create them as files.
local archdir=$(ruby_rbconfig_value "sitearchdir")
- dodir ${archdir#${EPREFIX}} /usr/$(get_libdir)/pkgconfig
+ dodir "${archdir#${EPREFIX}}" /usr/$(get_libdir)/pkgconfig
emake DESTDIR="${D}" install
fi
@@ -157,3 +157,5 @@ each_ruby_test() {
${RUBY} test/run-test.rb || die
fi
}
+
+fi
diff --git a/eclass/ruby-ng.eclass b/eclass/ruby-ng.eclass
index f0d6c4f6f6c4..d80ae96dd40b 100644
--- a/eclass/ruby-ng.eclass
+++ b/eclass/ruby-ng.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ruby-ng.eclass
@@ -8,7 +8,7 @@
# Author: Diego E. Pettenò <flameeyes@gentoo.org>
# Author: Alex Legler <a3li@gentoo.org>
# Author: Hans de Graaff <graaff@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: An eclass for installing Ruby packages with proper support for multiple Ruby slots.
# @DESCRIPTION:
# The Ruby eclass is designed to allow an easier installation of Ruby packages
@@ -67,29 +67,19 @@
# passed to "grep -E" to remove reporting of these shared objects.
case ${EAPI} in
- 5)
- inherit eutils toolchain-funcs
- ;;
- 6)
- inherit estack toolchain-funcs
- ;;
- *)
- inherit estack
- ;;
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit multilib ruby-utils
+if [[ -z ${_RUBY_NG_ECLASS} ]]; then
+_RUBY_NG_ECLASS=1
-EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_test src_install pkg_setup
+[[ ${EAPI} == 6 ]] && inherit eqawarn toolchain-funcs
+inherit multilib ruby-utils
# S is no longer automatically assigned when it doesn't exist.
S="${WORKDIR}"
-case ${EAPI} in
- 5|6|7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
-esac
-
# @FUNCTION: ruby_implementation_depend
# @USAGE: target [comparator [version]]
# @RETURN: Package atom of a Ruby implementation to be used in dependencies.
@@ -113,16 +103,26 @@ ruby_implementation_depend() {
# Return a list of valid implementations in USE_RUBY, skipping the old
# implementations that are no longer supported.
_ruby_get_all_impls() {
- local i
+ _RUBY_GET_ALL_IMPLS=()
+
+ # XXX: Please update _ruby_get_use_targets if adding a non-'ruby*'
+ # target.
+ local i found_valid_impl
for i in ${USE_RUBY}; do
case ${i} in
# removed implementations
- ruby19|ruby20|ruby21|ruby22|ruby23|ruby24|ruby25|jruby)
+ ruby19|ruby2[0-7]|ruby30|jruby)
;;
*)
- echo ${i};;
+ found_valid_impl=1
+ _RUBY_GET_ALL_IMPLS+=( ${i} )
+ ;;
esac
done
+
+ if [[ -z ${found_valid_impl} ]] ; then
+ die "No supported implementation in USE_RUBY."
+ fi
}
# @FUNCTION: ruby_samelib
@@ -135,8 +135,10 @@ _ruby_get_all_impls() {
ruby_samelib() {
debug-print-function ${FUNCNAME} "${@}"
+ _ruby_set_globals_invalidate_if_stale
+
local res=
- for _ruby_implementation in $(_ruby_get_all_impls); do
+ for _ruby_implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
has -${_ruby_implementation} $@ || \
res="${res}ruby_targets_${_ruby_implementation}(-)?,"
done
@@ -144,23 +146,6 @@ ruby_samelib() {
echo "[${res%,}]"
}
-_ruby_atoms_samelib_generic() {
- eshopts_push -o noglob
- echo "RUBYTARGET? ("
- for token in $*; do
- case "$token" in
- "||" | "(" | ")" | *"?")
- echo "${token}" ;;
- *])
- echo "${token%[*}[RUBYTARGET(-),${token/*[}" ;;
- *)
- echo "${token}[RUBYTARGET(-)]" ;;
- esac
- done
- echo ")"
- eshopts_pop
-}
-
# @FUNCTION: ruby_implementation_command
# @RETURN: the path to the given ruby implementation
# @DESCRIPTION:
@@ -176,11 +161,31 @@ ruby_implementation_command() {
echo $(type -p ${_ruby_name} 2>/dev/null)
}
+_RUBY_ATOMS_SAMELIB_RESULT=""
_ruby_atoms_samelib() {
- local atoms=$(_ruby_atoms_samelib_generic "$*")
+ _RUBY_ATOMS_SAMELIB_RESULT=""
+
+ local shopt_save=$(shopt -p -o noglob)
+ set -f
+ local token
+ local atoms=" RUBYTARGET? ("
+ for token in $*; do
+ case "${token}" in
+ "||" | "(" | ")" | *"?")
+ atoms+=" ${token}" ;;
+ *])
+ atoms+=" ${token%[*}[RUBYTARGET(-),${token/*[}" ;;
+ *)
+ atoms+=" ${token}[RUBYTARGET(-)]" ;;
+ esac
+ done
+ atoms+=" ) "
+ ${shopt_save}
- for _ruby_implementation in $(_ruby_get_all_impls); do
- echo "${atoms//RUBYTARGET/ruby_targets_${_ruby_implementation}}"
+ _ruby_set_globals_invalidate_if_stale
+ local _ruby_implementation
+ for _ruby_implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
+ _RUBY_ATOMS_SAMELIB_RESULT+="${atoms//RUBYTARGET/ruby_targets_${_ruby_implementation}}"
done
}
@@ -188,11 +193,11 @@ _ruby_wrap_conditions() {
local conditions="$1"
local atoms="$2"
- for condition in $conditions; do
+ for condition in ${conditions}; do
atoms="${condition}? ( ${atoms} )"
done
- echo "$atoms"
+ echo "${atoms}"
}
# @FUNCTION: ruby_add_rdepend
@@ -214,7 +219,7 @@ ruby_add_rdepend() {
1) ;;
2)
case ${EAPI} in
- 5|6)
+ 6)
[[ "${GENTOO_DEV}" == "yes" ]] && eqawarn "You can now use the usual syntax in ruby_add_rdepend for $CATEGORY/$PF"
ruby_add_rdepend "$(_ruby_wrap_conditions "$1" "$2")"
return
@@ -229,15 +234,16 @@ ruby_add_rdepend() {
;;
esac
- local dependency=$(_ruby_atoms_samelib "$1")
+ _ruby_set_globals_invalidate_if_stale
+ _ruby_atoms_samelib "$1"
- RDEPEND="${RDEPEND} $dependency"
+ RDEPEND+=" ${_RUBY_ATOMS_SAMELIB_RESULT}"
# Add the dependency as a test-dependency since we're going to
# execute the code during test phase.
case ${EAPI} in
- 5|6) DEPEND="${DEPEND} test? ( ${dependency} )" ;;
- *) BDEPEND="${BDEPEND} test? ( ${dependency} )" ;;
+ 6) DEPEND+=" test? ( ${_RUBY_ATOMS_SAMELIB_RESULT} )" ;;
+ *) BDEPEND+=" test? ( ${_RUBY_ATOMS_SAMELIB_RESULT} )" ;;
esac
if ! has test "$IUSE"; then
IUSE+=" test"
@@ -261,7 +267,7 @@ ruby_add_bdepend() {
1) ;;
2)
case ${EAPI} in
- 5|6)
+ 6)
[[ "${GENTOO_DEV}" == "yes" ]] && eqawarn "You can now use the usual syntax in ruby_add_bdepend for $CATEGORY/$PF"
ruby_add_bdepend "$(_ruby_wrap_conditions "$1" "$2")"
return
@@ -276,13 +282,13 @@ ruby_add_bdepend() {
;;
esac
- local dependency=$(_ruby_atoms_samelib "$1")
+ _ruby_set_globals_invalidate_if_stale
+ _ruby_atoms_samelib "$1"
case ${EAPI} in
- 5|6) DEPEND="${DEPEND} $dependency" ;;
- *) BDEPEND="${BDEPEND} $dependency" ;;
+ 6) DEPEND+=" ${_RUBY_ATOMS_SAMELIB_RESULT}" ;;
+ *) BDEPEND+=" ${_RUBY_ATOMS_SAMELIB_RESULT}" ;;
esac
- RDEPEND="${RDEPEND}"
}
# @FUNCTION: ruby_add_depend
@@ -294,7 +300,7 @@ ruby_add_depend() {
debug-print-function ${FUNCNAME} "${@}"
case ${EAPI} in
- 5|6) die "only available in EAPI 7 and newer" ;;
+ 6) die "only available in EAPI 7 and newer" ;;
*) ;;
esac
@@ -303,9 +309,10 @@ ruby_add_depend() {
*) die "bad number of arguments to $0" ;;
esac
- local dependency=$(_ruby_atoms_samelib "$1")
+ _ruby_set_globals_invalidate_if_stale
+ _ruby_atoms_samelib "$1"
- DEPEND="${DEPEND} $dependency"
+ DEPEND+=" ${_RUBY_ATOMS_SAMELIB_RESULT}"
}
# @FUNCTION: ruby_get_use_implementations
@@ -314,8 +321,10 @@ ruby_add_depend() {
ruby_get_use_implementations() {
debug-print-function ${FUNCNAME} "${@}"
+ _ruby_set_globals_invalidate_if_stale
+
local i implementation
- for implementation in $(_ruby_get_all_impls); do
+ for implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
use ruby_targets_${implementation} && i+=" ${implementation}"
done
echo $i
@@ -327,11 +336,24 @@ ruby_get_use_implementations() {
ruby_get_use_targets() {
debug-print-function ${FUNCNAME} "${@}"
- local t implementation
- for implementation in $(_ruby_get_all_impls); do
- t+=" ruby_targets_${implementation}"
- done
- echo $t
+ _ruby_set_globals_invalidate_if_stale
+ _ruby_get_use_targets
+ echo "${_RUBY_GET_USE_TARGETS}"
+}
+
+# @FUNCTION: _ruby_get_use_targets
+# @INTERNAL
+# @DESCRIPTION:
+# Gets an array of ruby use targets that the ebuild sets
+_RUBY_GET_USE_TARGETS=""
+_ruby_get_use_targets() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ _ruby_set_globals_invalidate_if_stale
+
+ local impls="${_RUBY_GET_ALL_IMPLS[@]}"
+ # XXX: This assumes all targets begin with 'ruby'.
+ _RUBY_GET_USE_TARGETS="${impls//ruby/ruby_targets_ruby}"
}
# @FUNCTION: ruby_implementations_depend
@@ -350,32 +372,60 @@ ruby_get_use_targets() {
# ...
# DEPEND="ruby? ( $(ruby_implementations_depend) )"
# RDEPEND="${DEPEND}"
+_RUBY_IMPLEMENTATIONS_DEPEND=""
ruby_implementations_depend() {
debug-print-function ${FUNCNAME} "${@}"
- local depend
- for _ruby_implementation in $(_ruby_get_all_impls); do
+ _ruby_set_globals_invalidate_if_stale
+ _ruby_implementations_depend
+ echo "${_RUBY_IMPLEMENTATIONS_DEPEND}"
+}
+
+_ruby_implementations_depend() {
+ _ruby_set_globals_invalidate_if_stale
+
+ local depend _ruby_implementation
+ for _ruby_implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
depend="${depend}${depend+ }ruby_targets_${_ruby_implementation}? ( $(ruby_implementation_depend $_ruby_implementation) )"
done
- echo "${depend}"
+ _RUBY_IMPLEMENTATIONS_DEPEND="${depend}"
+}
+
+_ruby_set_globals() {
+ _RUBY_SET_GLOBALS_USE_RUBY="${USE_RUBY}"
+ _ruby_get_all_impls
+ _ruby_get_use_targets
+ _ruby_implementations_depend
+}
+
+_ruby_set_globals_invalidate_if_stale() {
+ # Packages may try to restrict their test dependencies to ease bootstrapping/porting
+ # if they're not yet available for a newer Ruby implementation by setting
+ # USE_RUBY="<some subset of original USE_RUBY>" ruby_add_bdepend ...
+ if [[ ${_RUBY_SET_GLOBALS_USE_RUBY} != ${USE_RUBY} && -z ${_RUBY_SET_GLOBALS_INVALIDATING} ]] ; then
+ local _RUBY_SET_GLOBALS_INVALIDATING=1
+ _ruby_set_globals
+ fi
}
-IUSE+=" $(ruby_get_use_targets)"
+_ruby_set_globals
+
+IUSE+=" ${_RUBY_GET_USE_TARGETS}"
# If you specify RUBY_OPTIONAL you also need to take care of
# ruby useflag and dependency.
if [[ ${RUBY_OPTIONAL} != yes ]]; then
- DEPEND="${DEPEND} $(ruby_implementations_depend)"
- RDEPEND="${RDEPEND} $(ruby_implementations_depend)"
- REQUIRED_USE+=" || ( $(ruby_get_use_targets) )"
+ DEPEND+=" ${_RUBY_IMPLEMENTATIONS_DEPEND}"
+ RDEPEND+=" ${_RUBY_IMPLEMENTATIONS_DEPEND}"
+ REQUIRED_USE+=" || ( ${_RUBY_GET_USE_TARGETS} )"
case ${EAPI} in
- 5|6) ;;
- *) BDEPEND="${BDEPEND} $(ruby_implementations_depend)" ;;
+ 6) ;;
+ *) BDEPEND+=" ${_RUBY_IMPLEMENTATIONS_DEPEND}" ;;
esac
fi
_ruby_invoke_environment() {
old_S=${S}
- if [ -z "${RUBY_S}" ]; then
+ if [[ -z ${RUBY_S} ]]; then
sub_S=${P}
else
sub_S=${RUBY_S}
@@ -410,21 +460,29 @@ _ruby_invoke_environment() {
pushd "${WORKDIR}" &>/dev/null || die
fi
- ebegin "Running ${_PHASE:-${EBUILD_PHASE}} phase for $environment"
+ einfo "Running ${_PHASE:-${EBUILD_PHASE}} phase for $environment"
"$@"
- eend $?
popd &>/dev/null || die
S=${old_S}
}
_ruby_each_implementation() {
+ _ruby_set_globals_invalidate_if_stale
+
local invoked=no
- for _ruby_implementation in $(_ruby_get_all_impls); do
+ for _ruby_implementation in "${_RUBY_GET_ALL_IMPLS[@]}"; do
# only proceed if it's requested
use ruby_targets_${_ruby_implementation} || continue
RUBY=$(ruby_implementation_command ${_ruby_implementation})
+
+ if [[ -z ${RUBY} ]]; then
+ eerror "Failed to determine a path for \${RUBY} for USE=ruby_targets_${_ruby_implementation}:"
+ eerror " ruby_implementation_command returned an empty RUBY for ${_ruby_implementation}"
+ die "Could not find RUBY for ${_ruby_implementation}. Is $(_ruby_implementation_depend ${_ruby_implementation}) installed?"
+ fi
+
invoked=yes
if [[ -n "$1" ]]; then
@@ -436,7 +494,7 @@ _ruby_each_implementation() {
if [[ ${invoked} == "no" ]]; then
eerror "You need to select at least one compatible Ruby installation target via RUBY_TARGETS in make.conf."
- eerror "Compatible targets for this package are: $(_ruby_get_all_impls)"
+ eerror "Compatible targets for this package are: ${_RUBY_GET_ALL_IMPLS[@]}"
eerror
eerror "See https://www.gentoo.org/proj/en/prog_lang/ruby/index.xml#doc_chap3 for more information."
eerror
@@ -477,17 +535,6 @@ ruby-ng_src_unpack() {
_ruby_apply_patches() {
case ${EAPI} in
- 5)
- for patch in "${RUBY_PATCHES[@]}"; do
- if [ -f "${patch}" ]; then
- epatch "${patch}"
- elif [ -f "${FILESDIR}/${patch}" ]; then
- epatch "${FILESDIR}/${patch}"
- else
- die "Cannot find patch ${patch}"
- fi
- done
- ;;
6)
if [[ -n ${RUBY_PATCHES[@]} ]]; then
eqawarn "RUBY_PATCHES is no longer supported, use PATCHES instead"
@@ -526,13 +573,7 @@ ruby-ng_src_prepare() {
find . -name '._*' -delete
# Handle PATCHES and user supplied patches via the default phase
- case ${EAPI} in
- 5)
- ;;
- *)
- _ruby_invoke_environment all default
- ;;
- esac
+ _ruby_invoke_environment all default
_ruby_invoke_environment all _ruby_apply_patches
@@ -651,7 +692,7 @@ doruby() {
[[ -z ${RUBY} ]] && die "\$RUBY is not set"
( # don't want to pollute calling env
sitelibdir=$(ruby_rbconfig_value 'sitelibdir')
- insinto ${sitelibdir#${EPREFIX}}
+ insinto "${sitelibdir#${EPREFIX}}"
insopts -m 0644
doins "$@"
) || die "failed to install $@"
@@ -722,12 +763,12 @@ ruby-ng_rspec() {
# Explicitly pass the expected spec directory since the versioned
# rspec wrappers don't handle this automatically.
- if [ ${#@} -eq 0 ]; then
+ if [[ $# -eq 0 ]]; then
files="spec"
fi
if [[ "${DEPEND}${BDEPEND}" != *"dev-ruby/rspec"* ]]; then
- ewarn "Missing test dependency dev-ruby/rspec"
+ eqawarn "Missing test dependency dev-ruby/rspec"
fi
local rspec_params=
@@ -749,7 +790,7 @@ ruby-ng_rspec() {
;;
esac
- ${RUBY} -S rspec-${version} ${rspec_params} ${files} || die "rspec failed"
+ ${RUBY} -S rspec-${version} ${rspec_params} ${files} || die -n "rspec failed"
}
# @FUNCTION: ruby-ng_cucumber
@@ -782,7 +823,32 @@ ruby-ng_cucumber() {
;;
esac
- CUCUMBER_PUBLISH_QUIET=true ${RUBY} -S cucumber ${cucumber_params} "$@" || die "cucumber failed"
+ CUCUMBER_PUBLISH_QUIET=true ${RUBY} -S cucumber ${cucumber_params} "$@" || die -n "cucumber failed"
+}
+
+# @FUNCTION: ruby-ng_sus
+# @DESCRIPTION:
+# This is simply a wrapper around the sus-parallel command (executed by $RUBY})
+# which also respects TEST_VERBOSE and NOCOLOR environment variables.
+ruby-ng_sus() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ "${DEPEND}${BDEPEND}" != *"dev-ruby/sus"* ]]; then
+ ewarn "Missing test dependency dev-ruby/sus"
+ fi
+
+ local sus_params=
+
+ # sus has a --verbose argument but it does not seem to impact the output (yet?)
+ case ${TEST_VERBOSE} in
+ 1|yes|true)
+ sus_params+=" --verbose"
+ ;;
+ *)
+ ;;
+ esac
+
+ ${RUBY} -S sus-parallel ${sus_params} "$@" || die -n "sus failed"
}
# @FUNCTION: ruby-ng_testrb-2
@@ -820,3 +886,7 @@ ruby-ng_testrb-2() {
${RUBY} -S testrb-2 ${testrb_params} "$@" || die "testrb-2 failed"
}
+
+fi
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_test src_install pkg_setup
diff --git a/eclass/ruby-single.eclass b/eclass/ruby-single.eclass
index 1e390b9163ad..effdbd3e96a0 100644
--- a/eclass/ruby-single.eclass
+++ b/eclass/ruby-single.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ruby-single.eclass
@@ -7,7 +7,7 @@
# @AUTHOR:
# Author: Hans de Graaff <graaff@gentoo.org>
# Based on python-single-r1 by: Michał Górny <mgorny@gentoo.org>
-# @SUPPORTED_EAPIS: 4 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @PROVIDES: ruby-utils
# @BLURB: An eclass for Ruby packages not installed for multiple implementations.
# @DESCRIPTION:
@@ -18,23 +18,18 @@
# pull in the dependency on the requested ruby targets.
#
# @CODE
-# USE_RUBY="ruby26 ruby27"
+# USE_RUBY="ruby27 ruby30"
# inherit ruby-single
# RDEPEND="${RUBY_DEPS}"
# @CODE
-case "${EAPI:-0}" in
- 0|1|2|3)
- die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
- ;;
- 4|5|6|7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_RUBY_SINGLE} ]]; then
+if [[ ! ${_RUBY_SINGLE_ECLASS} ]]; then
+_RUBY_SINGLE_ECLASS=1
inherit ruby-utils
@@ -67,20 +62,23 @@ inherit ruby-utils
#
# Example value:
# @CODE
-# || ( dev-lang/ruby:2.7 dev-lang/ruby:2.6 ) virtual/rubygems
+# || ( dev-lang/ruby:3.0 dev-lang/ruby:2.7 ) virtual/rubygems
# @CODE
#
# The order of dependencies will change over time to best match the
# current state of ruby targets, e.g. stable version first.
_ruby_single_implementations_depend() {
- local depend
+ local _ruby_implementation depend
for _ruby_implementation in ${RUBY_TARGETS_PREFERENCE}; do
if [[ ${USE_RUBY} =~ ${_ruby_implementation} ]]; then
- depend="${depend} $(_ruby_implementation_depend $_ruby_implementation)"
+ depend+=" ("
+ depend+=" $(_ruby_implementation_depend ${_ruby_implementation})"
+ depend+=" virtual/rubygems[ruby_targets_${_ruby_implementation}(-)]"
+ depend+=" )"
fi
done
- echo "|| ( ${depend} ) virtual/rubygems"
+ echo "|| ( ${depend} )"
}
_ruby_single_set_globals() {
@@ -88,6 +86,4 @@ _ruby_single_set_globals() {
}
_ruby_single_set_globals
-
-_RUBY_SINGLE=1
fi
diff --git a/eclass/ruby-utils.eclass b/eclass/ruby-utils.eclass
index fb033d64201c..789f57ce25f6 100644
--- a/eclass/ruby-utils.eclass
+++ b/eclass/ruby-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ruby-utils.eclass
@@ -22,7 +22,6 @@ esac
if [[ ! ${_RUBY_UTILS} ]]; then
-
# @ECLASS_VARIABLE: RUBY_TARGETS_PREFERENCE
# @INTERNAL
# @DESCRIPTION:
@@ -33,64 +32,19 @@ if [[ ! ${_RUBY_UTILS} ]]; then
# provide for a better first installation experience.
# All stable RUBY_TARGETS
-RUBY_TARGETS_PREFERENCE="ruby26 "
+RUBY_TARGETS_PREFERENCE="ruby31 "
# All other active ruby targets
-RUBY_TARGETS_PREFERENCE+="ruby27 ruby30 ruby31"
-
+RUBY_TARGETS_PREFERENCE+="ruby32 ruby33"
_ruby_implementation_depend() {
local rubypn=
local rubyslot=
case $1 in
- ruby18)
- rubypn="dev-lang/ruby"
- rubyslot=":1.8"
- ;;
- ruby19)
- rubypn="dev-lang/ruby"
- rubyslot=":1.9"
- ;;
- ruby20)
+ ruby1[89]|ruby2[0-7]|ruby3[0-3])
rubypn="dev-lang/ruby"
- rubyslot=":2.0"
- ;;
- ruby21)
- rubypn="dev-lang/ruby"
- rubyslot=":2.1"
- ;;
- ruby22)
- rubypn="dev-lang/ruby"
- rubyslot=":2.2"
- ;;
- ruby23)
- rubypn="dev-lang/ruby"
- rubyslot=":2.3"
- ;;
- ruby24)
- rubypn="dev-lang/ruby"
- rubyslot=":2.4"
- ;;
- ruby25)
- rubypn="dev-lang/ruby"
- rubyslot=":2.5"
- ;;
- ruby26)
- rubypn="dev-lang/ruby"
- rubyslot=":2.6"
- ;;
- ruby27)
- rubypn="dev-lang/ruby"
- rubyslot=":2.7"
- ;;
- ruby30)
- rubypn="dev-lang/ruby"
- rubyslot=":3.0"
- ;;
- ruby31)
- rubypn="dev-lang/ruby"
- rubyslot=":3.1"
+ rubyslot=":${1:4:1}.${1:5}"
;;
ree18)
rubypn="dev-lang/ruby-enterprise"
@@ -110,7 +64,5 @@ _ruby_implementation_depend() {
echo "$2${rubypn}$3${rubyslot}"
}
-
-
_RUBY_UTILS=1
fi
diff --git a/eclass/rust-toolchain.eclass b/eclass/rust-toolchain.eclass
index 75b15a7088b4..5824a48734f9 100644
--- a/eclass/rust-toolchain.eclass
+++ b/eclass/rust-toolchain.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: rust-toolchain.eclass
# @MAINTAINER:
# Rust Project <rust@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: helps map gentoo arches to rust ABIs
# @DESCRIPTION:
# This eclass contains a src_unpack default phase function, and
@@ -12,10 +12,8 @@
# gentoo arches.
case ${EAPI} in
- 6) : ;;
- 7) : ;;
- 8) : ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
inherit multilib-build
@@ -25,7 +23,7 @@ inherit multilib-build
# This variable specifies the base URL used by the
# rust_arch_uri and rust_all_arch_uris functions when
# generating the URI output list.
-: ${RUST_TOOLCHAIN_BASEURL:=https://static.rust-lang.org/dist/}
+: "${RUST_TOOLCHAIN_BASEURL:=https://static.rust-lang.org/dist/}"
# @FUNCTION: rust_abi
# @USAGE: [CHOST-value]
@@ -38,21 +36,23 @@ rust_abi() {
case ${CTARGET%%*-} in
aarch64*gnu) echo aarch64-unknown-linux-gnu;;
aarch64*musl) echo aarch64-unknown-linux-musl;;
- mips64*) echo mips64-unknown-linux-gnuabi64;;
+ armv6j*h*) echo arm-unknown-linux-gnueabihf;;
+ armv6j*s*) echo arm-unknown-linux-gnueabi;;
+ armv7a*h*) echo armv7-unknown-linux-gnueabihf;;
+ i?86*) echo i686-unknown-linux-gnu;;
+ loongarch64*) echo loongarch64-unknown-linux-gnu;;
+ mips64el*) echo mips64el-unknown-linux-gnuabi64;;
+ mips64*) echo mips64-unknown-linux-gnuabi64;;
+ mipsel*) echo mipsel-unknown-linux-gnu;;
+ mips*) echo mips-unknown-linux-gnu;;
powerpc64le*) echo powerpc64le-unknown-linux-gnu;;
powerpc64*) echo powerpc64-unknown-linux-gnu;;
- x86_64*gnu) echo x86_64-unknown-linux-gnu;;
+ powerpc*) echo powerpc-unknown-linux-gnu;;
+ riscv64*) echo riscv64gc-unknown-linux-gnu;;
+ s390x*) echo s390x-unknown-linux-gnu;;
+ x86_64*gnu) echo x86_64-unknown-linux-gnu;;
x86_64*musl) echo x86_64-unknown-linux-musl;;
- armv6j*s*) echo arm-unknown-linux-gnueabi;;
- armv6j*h*) echo arm-unknown-linux-gnueabihf;;
- armv7a*h*) echo armv7-unknown-linux-gnueabihf;;
- i?86*) echo i686-unknown-linux-gnu;;
- mipsel*) echo mipsel-unknown-linux-gnu;;
- mips*) echo mips-unknown-linux-gnu;;
- powerpc*) echo powerpc-unknown-linux-gnu;;
- s390x*) echo s390x-unknown-linux-gnu;;
- riscv64*) echo riscv64gc-unknown-linux-gnu;;
- *) echo ${CTARGET};;
+ *) echo ${CTARGET};;
esac
}
@@ -107,22 +107,51 @@ rust_arch_uri() {
#
rust_all_arch_uris()
{
- local uris=""
- uris+="abi_x86_64? ( elibc_glibc? ( $(rust_arch_uri x86_64-unknown-linux-gnu "$@") )
- elibc_musl? ( $(rust_arch_uri x86_64-unknown-linux-musl "$@") ) ) "
- uris+="arm? ( $(rust_arch_uri arm-unknown-linux-gnueabi "$@")
- $(rust_arch_uri arm-unknown-linux-gnueabihf "$@")
- $(rust_arch_uri armv7-unknown-linux-gnueabihf "$@") ) "
- uris+="arm64? ( elibc_glibc? ( $(rust_arch_uri aarch64-unknown-linux-gnu "$@") )
- elibc_musl? ( $(rust_arch_uri aarch64-unknown-linux-musl "$@") ) ) "
- uris+="mips? ( $(rust_arch_uri mips-unknown-linux-gnu "$@")
- $(rust_arch_uri mipsel-unknown-linux-gnu "$@")
- $(rust_arch_uri mips64-unknown-linux-gnuabi64 "$@") ) "
- uris+="ppc? ( $(rust_arch_uri powerpc-unknown-linux-gnu "$@") ) "
- uris+="ppc64? ( $(rust_arch_uri powerpc64-unknown-linux-gnu "$@")
- $(rust_arch_uri powerpc64le-unknown-linux-gnu "$@") ) "
- uris+="s390? ( $(rust_arch_uri s390x-unknown-linux-gnu "$@") ) "
- uris+="abi_x86_32? ( $(rust_arch_uri i686-unknown-linux-gnu "$@") ) "
- uris+="riscv? ( $(rust_arch_uri riscv64gc-unknown-linux-gnu "$@") ) "
- echo "${uris}"
+ echo "
+ abi_x86_32? ( $(rust_arch_uri i686-unknown-linux-gnu "$@") )
+ abi_x86_64? (
+ elibc_glibc? ( $(rust_arch_uri x86_64-unknown-linux-gnu "$@") )
+ elibc_musl? ( $(rust_arch_uri x86_64-unknown-linux-musl "$@") )
+ )
+ arm? (
+ $(rust_arch_uri arm-unknown-linux-gnueabi "$@")
+ $(rust_arch_uri arm-unknown-linux-gnueabihf "$@")
+ $(rust_arch_uri armv7-unknown-linux-gnueabihf "$@")
+ )
+ arm64? (
+ elibc_glibc? ( $(rust_arch_uri aarch64-unknown-linux-gnu "$@") )
+ elibc_musl? ( $(rust_arch_uri aarch64-unknown-linux-musl "$@") )
+ )
+ ppc? ( $(rust_arch_uri powerpc-unknown-linux-gnu "$@") )
+ ppc64? (
+ big-endian? ( $(rust_arch_uri powerpc64-unknown-linux-gnu "$@") )
+ !big-endian? ( $(rust_arch_uri powerpc64le-unknown-linux-gnu "$@") )
+ )
+ riscv? ( $(rust_arch_uri riscv64gc-unknown-linux-gnu "$@") )
+ s390? ( $(rust_arch_uri s390x-unknown-linux-gnu "$@") )
+ "
+
+ # Upstream did not gain support for loong until v1.71.0.
+ # NOTE: Merge this into the block above after every <1.71.0 version is
+ # gone from tree.
+ local arg_version="${1##*-}"
+ arg_version="${arg_version:-$PV}"
+ if ver_test "${arg_version}" -ge 1.71.0; then
+ echo "loong? ( $(rust_arch_uri loongarch64-unknown-linux-gnu "$@") )"
+ fi
+
+ # until https://github.com/rust-lang/rust/pull/113274 is resolved, there
+ # will not be upstream-built mips artifacts
+ if ver_test "${arg_version}" -lt 1.72.0; then
+ echo "mips? (
+ abi_mips_o32? (
+ big-endian? ( $(rust_arch_uri mips-unknown-linux-gnu "$@") )
+ !big-endian? ( $(rust_arch_uri mipsel-unknown-linux-gnu "$@") )
+ )
+ abi_mips_n64? (
+ big-endian? ( $(rust_arch_uri mips64-unknown-linux-gnuabi64 "$@") )
+ !big-endian? ( $(rust_arch_uri mips64el-unknown-linux-gnuabi64 "$@") )
+ )
+ )"
+ fi
}
diff --git a/eclass/s6.eclass b/eclass/s6.eclass
index 25960ba4a1db..56b321f178f6 100644
--- a/eclass/s6.eclass
+++ b/eclass/s6.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2019 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: s6.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 8
# @BLURB: helper functions to install s6 services
# @DESCRIPTION:
# This eclass provides helpers to install s6 services.
@@ -26,7 +26,7 @@
# @CODE
case ${EAPI} in
- 5|6|7|8) ;;
+ 8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -42,7 +42,7 @@ _s6_get_servicedir() {
# @DESCRIPTION:
# Output the path for the s6 service directory (not including ${D}).
s6_get_servicedir() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
echo "${EPREFIX}$(_s6_get_servicedir)"
}
@@ -55,22 +55,22 @@ s6_get_servicedir() {
# run is the run script for the service.
# finish is the optional finish script for the service.
s6_install_service() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local name="$1"
local run="$2"
local finish="$3"
- [[ $name ]] ||
+ [[ -z ${name} ]] &&
die "${ECLASS}.eclass: you must specify the s6 service name"
- [[ $run ]] ||
+ [[ -z ${run} ]] &&
die "${ECLASS}.eclass: you must specify the s6 service run script"
(
- local servicepath="$(_s6_get_servicedir)/$name"
- exeinto "$servicepath"
- newexe "$run" run
- [[ $finish ]] && newexe "$finish" finish
+ local servicepath="$(_s6_get_servicedir)/${name}"
+ exeinto "${servicepath}"
+ newexe "${run}" run
+ [[ -n ${finish} ]] && newexe "${finish}" finish
)
}
@@ -81,18 +81,17 @@ s6_install_service() {
# default.
# servicename is the name of the service.
s6_service_down() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local name="$1"
-
- [[ $name ]] ||
+ [[ -z ${name} ]] &&
die "${ECLASS}.eclass: you must specify the s6 service name"
(
- touch "$T"/down || die
- local servicepath="$(_s6_get_servicedir)/$name"
- insinto "$servicepath"
- doins "$T"/down
+ touch "${T}"/down || die
+ local servicepath="$(_s6_get_servicedir)/${name}"
+ insinto "${servicepath}"
+ doins "${T}"/down
)
}
@@ -103,17 +102,16 @@ s6_service_down() {
# leader.
# servicename is the name of the service.
s6_service_nosetsid() {
- debug-print-function ${FUNCNAME} "${@}"
+ debug-print-function ${FUNCNAME} "$@"
local name="$1"
-
- [[ $name ]] ||
+ [[ -z ${name} ]] &&
die "${ECLASS}.eclass: you must specify the s6 service name"
(
- touch "$T"/nosetsid || die
- local servicepath="$(_s6_get_servicedir)/$name"
- insinto "$servicepath"
- doins "$T"/nosetsid
+ touch "${T}"/nosetsid || die
+ local servicepath="$(_s6_get_servicedir)/${name}"
+ insinto "${servicepath}"
+ doins "${T}"/nosetsid
)
}
diff --git a/eclass/savedconfig.eclass b/eclass/savedconfig.eclass
index 20669c08b338..cc5748543078 100644
--- a/eclass/savedconfig.eclass
+++ b/eclass/savedconfig.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: savedconfig.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: common API for saving/restoring complex configuration files
# @DESCRIPTION:
# It is not uncommon to come across a package which has a very fine
@@ -30,15 +30,18 @@
# 4. Emerge the package with just USE=savedconfig to get the custom
# build.
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_SAVEDCONFIG_ECLASS} ]]; then
+_SAVEDCONFIG_ECLASS=1
+
inherit portability
IUSE="savedconfig"
-case ${EAPI} in
- [5-7]) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
-esac
-
# @FUNCTION: save_config
# @USAGE: <config files to save>
# @DESCRIPTION:
@@ -152,4 +155,6 @@ savedconfig_pkg_postinst() {
fi
}
+fi
+
EXPORT_FUNCTIONS pkg_postinst
diff --git a/eclass/scons-utils.eclass b/eclass/scons-utils.eclass
index ff6550ae048a..b8663d34b1a2 100644
--- a/eclass/scons-utils.eclass
+++ b/eclass/scons-utils.eclass
@@ -1,27 +1,28 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: scons-utils.eclass
# @MAINTAINER:
# mgorny@gentoo.org
-# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: helper functions to deal with SCons buildsystem
# @DESCRIPTION:
# This eclass provides a set of function to help developers sanely call
-# dev-util/scons and pass parameters to it.
+# dev-build/scons and pass parameters to it.
#
-# As of dev-util/scons-3.0.1-r100, SCons supports Python 3. Since
+# As of dev-build/scons-3.0.1-r100, SCons supports Python 3. Since
# SCons* files in build systems are written as Python, all packages
# need to explicitly verify which versions of Python are supported
# and use appropriate Python suite eclass to select the implementation.
# The eclass needs to be inherited before scons-utils, and scons-utils
# will automatically take advantage of it. For more details, please see:
-# https://wiki.gentoo.org/wiki/Project:Python/scons-utils_integration
+# https://projects.gentoo.org/python/guide/buildsys.html#scons
#
# Please note that SCons is more like a 'build system creation kit',
# and requires a lot of upstream customization to be used sanely.
-# You will often need to request fixes upstream and/or patch the build
-# system. In particular:
+# We attempt to force sane behavior via custom patching but this is not
+# guaranteed to work. You will sometimes need to request fixes upstream
+# and/or patch the build system. In particular, normally:
#
# 1. There are no 'standard' variables. To respect CC, CXX, CFLAGS,
# CXXFLAGS, CPPFLAGS, LDFLAGS, upstream needs to define appropriate
@@ -35,10 +36,10 @@
#
# @EXAMPLE:
# @CODE
-# PYTHON_COMPAT=( python2_7 )
+# PYTHON_COMPAT=( python3_{8..11} )
# inherit python-any-r1 scons-utils toolchain-funcs
#
-# EAPI=5
+# EAPI=8
#
# src_configure() {
# MYSCONS=(
@@ -63,22 +64,15 @@
# @DEFAULT_UNSET
# @DESCRIPTION:
# The minimal version of SCons required for the build to work.
-
-# @VARIABLE: myesconsargs
-# @DEFAULT_UNSET
-# @DESCRIPTION:
-# DEPRECATED, EAPI 0..5 ONLY: pass options to escons instead
-#
-# List of package-specific options to pass to all SCons calls. Supposed to be
-# set in src_configure().
+: "${SCONS_MIN_VERSION:=4.4.0}"
# @ECLASS_VARIABLE: SCONSOPTS
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# The default set of options to pass to scons. Similar to MAKEOPTS,
-# supposed to be set in make.conf. If unset, escons() will use cleaned
-# up MAKEOPTS instead.
+# supposed to be set in make.conf. If unset, escons() will set -j
+# based on MAKEOPTS.
# @ECLASS_VARIABLE: EXTRA_ESCONS
# @USER_VARIABLE
@@ -88,24 +82,10 @@
# Much like EXTRA_EMAKE, this is not supposed to be used in make.conf
# and not in ebuilds!
-# @ECLASS_VARIABLE: USE_SCONS_TRUE
-# @DESCRIPTION:
-# DEPRECATED: use usex instead
-#
-# The default value for truth in scons-use() (1 by default).
-: ${USE_SCONS_TRUE:=1}
-
-# @ECLASS_VARIABLE: USE_SCONS_FALSE
-# @DESCRIPTION:
-# DEPRECATED: use usex instead
-#
-# The default value for false in scons-use() (0 by default).
-: ${USE_SCONS_FALSE:=0}
-
# -- EAPI support check --
case ${EAPI:-0} in
- 0|1|2|3|4|5|6|7) ;;
+ 7|8) ;;
*) die "EAPI ${EAPI} unsupported."
esac
@@ -113,35 +93,28 @@ inherit multiprocessing
# -- ebuild variables setup --
-if [[ -n ${SCONS_MIN_VERSION} ]]; then
- SCONS_DEPEND=">=dev-util/scons-${SCONS_MIN_VERSION}"
-else
- SCONS_DEPEND="dev-util/scons"
-fi
+SCONS_DEPEND=">=dev-build/scons-${SCONS_MIN_VERSION}"
-if [[ ${_PYTHON_ANY_R1} ]]; then
+if [[ ${_PYTHON_ANY_R1_ECLASS} ]]; then
# when using python-any-r1, use any-of dep API
BDEPEND="$(python_gen_any_dep "${SCONS_DEPEND}[\${PYTHON_USEDEP}]")"
scons-utils_python_check_deps() {
- has_version "${SCONS_DEPEND}[${PYTHON_USEDEP}]"
+ python_has_version "${SCONS_DEPEND}[${PYTHON_USEDEP}]"
}
python_check_deps() { scons-utils_python_check_deps; }
-elif [[ ${_PYTHON_SINGLE_R1} ]]; then
+elif [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
# when using python-single-r1, use PYTHON_USEDEP API
BDEPEND="
$(python_gen_cond_dep "${SCONS_DEPEND}[\${PYTHON_USEDEP}]")
${PYTHON_DEPS}"
-elif [[ ${EAPI:-0} == [0123456] ]]; then
- # in older EAPIs, just force Python 2.7
- BDEPEND="${SCONS_DEPEND}[python_targets_python2_7]"
-elif [[ ${_PYTHON_R1} ]]; then
+elif [[ ${_PYTHON_R1_ECLASS} ]]; then
# when using python-r1, you need to depend on scons yourself
# (depending on whether you need any-r1 or full -r1 API)
# -- since this is a breaking API change, it applies to EAPI 7+ only
BDEPEND=""
-elif [[ ${EAPI:-0} != [0123456] ]]; then
- # in EAPI 7+, require appropriate eclass use
+else
+ # require appropriate eclass use
eerror "Using scons-utils.eclass without any python-r1 suite eclass is not supported."
eerror "Please make sure to configure and inherit appropriate -r1 eclass."
eerror "For more information and examples, please see:"
@@ -149,200 +122,39 @@ elif [[ ${EAPI:-0} != [0123456] ]]; then
die "Invalid use of scons-utils.eclass"
fi
-if [[ ${EAPI:-0} == [0123456] ]]; then
- DEPEND=${BDEPEND}
- unset BDEPEND
-fi
-
# -- public functions --
# @FUNCTION: escons
# @USAGE: [<args>...]
# @DESCRIPTION:
# Call scons, passing the supplied arguments. Like emake, this function
-# does die on failure in EAPI 4. Respects nonfatal in EAPI 6 and newer.
+# dies on failure, unless nonfatal is used.
escons() {
local ret
debug-print-function ${FUNCNAME} "${@}"
if [[ ! ${EPYTHON} ]]; then
- if [[ ${EAPI:-0} != [0123456] ]]; then
- eerror "EPYTHON is unset while calling escons. This most likely means that"
- eerror "the ebuild did not call the appropriate eclass function before calling scons."
- if [[ ${_PYTHON_ANY_R1} ]]; then
- eerror "Please ensure that python-any-r1_pkg_setup is called in pkg_setup()."
- elif [[ ${_PYTHON_SINGLE_R1} ]]; then
- eerror "Please ensure that python-single-r1_pkg_setup is called in pkg_setup()."
- else # python-r1
- eerror "Please ensure that python_setup is called before escons, or that escons"
- eerror "is used within python_foreach_impl as appropriate."
- fi
- die "EPYTHON unset in escons"
- else
- local -x EPYTHON=python2.7
+ eerror "EPYTHON is unset while calling escons. This most likely means that"
+ eerror "the ebuild did not call the appropriate eclass function before calling scons."
+ if [[ ${_PYTHON_ANY_R1_ECLASS} ]]; then
+ eerror "Please ensure that python-any-r1_pkg_setup is called in pkg_setup()."
+ elif [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
+ eerror "Please ensure that python-single-r1_pkg_setup is called in pkg_setup()."
+ else # python-r1
+ eerror "Please ensure that python_setup is called before escons, or that escons"
+ eerror "is used within python_foreach_impl as appropriate."
fi
+ die "EPYTHON unset in escons"
fi
- # Use myesconsargs in EAPI 5 and older
- if [[ ${EAPI} == [012345] ]]; then
- set -- "${myesconsargs[@]}" "${@}"
- fi
-
- # if SCONSOPTS are _unset_, use cleaned MAKEOPTS
- if [[ ! ${SCONSOPTS+set} ]]; then
- local SCONSOPTS
- _scons_clean_makeopts
- fi
+ # if SCONSOPTS are unset, grab -j from MAKEOPTS
+ : "${SCONSOPTS:=-j$(makeopts_jobs)}"
# pass ebuild environment variables through!
local -x GENTOO_SCONS_ENV_PASSTHROUGH=1
set -- scons ${SCONSOPTS} ${EXTRA_ESCONS} "${@}"
echo "${@}" >&2
- "${@}"
- ret=${?}
-
- if [[ ${ret} -ne 0 ]]; then
- case "${EAPI:-0}" in
- 0|1|2|3) # nonfatal in EAPIs 0 through 3
- ;;
- 4|5) # 100% fatal in 4 & 5
- die "escons failed."
- ;;
- *) # respect nonfatal in 6 onwards
- die -n "escons failed."
- ;;
- esac
- fi
- return ${ret}
-}
-
-# @FUNCTION: _scons_clean_makeopts
-# @USAGE: [makeflags] [...]
-# @INTERNAL
-# @DESCRIPTION:
-# Strip the supplied makeflags (or ${MAKEOPTS} if called without
-# an argument) of options not supported by SCons and make sure --jobs
-# gets an argument. Output the resulting flag list (suitable
-# for an assignment to SCONSOPTS).
-_scons_clean_makeopts() {
- local new_makeopts=()
-
- debug-print-function ${FUNCNAME} "${@}"
-
- if [[ ${#} -eq 0 ]]; then
- debug-print "Using MAKEOPTS: [${MAKEOPTS}]"
- set -- ${MAKEOPTS}
- else
- # unquote if necessary
- set -- ${*}
- fi
-
- # empty MAKEOPTS give out empty SCONSOPTS
- # thus, we do need to worry about the initial setup
- if [[ ${*} = ${_SCONS_CACHE_MAKEOPTS} ]]; then
- SCONSOPTS=${_SCONS_CACHE_SCONSOPTS}
- debug-print "Cache hit: [${SCONSOPTS}]"
- return
- fi
- _SCONS_CACHE_MAKEOPTS=${*}
-
- while [[ ${#} -gt 0 ]]; do
- case ${1} in
- # clean, simple to check -- we like that
- --jobs=*|--keep-going)
- new_makeopts+=( ${1} )
- ;;
- # need to take a look at the next arg and guess
- --jobs)
- if [[ ${#} -gt 1 && ${2} =~ ^[0-9]+$ ]]; then
- new_makeopts+=( ${1} ${2} )
- shift
- else
- # no value means no limit, let's pass a default instead
- new_makeopts+=( ${1}=$(( $(get_nproc) + 1 )) )
- fi
- ;;
- # strip other long options
- --*)
- ;;
- # short option hell
- -*)
- local str new_optstr
- new_optstr=
- str=${1#-}
-
- while [[ -n ${str} ]]; do
- case ${str} in
- k*)
- new_optstr+=k
- ;;
- # -j needs to come last
- j)
- if [[ ${#} -gt 1 && ${2} =~ ^[0-9]+$ ]]; then
- new_optstr+="j ${2}"
- shift
- else
- new_optstr+="j $(( $(get_nproc) + 1 ))"
- fi
- ;;
- # otherwise, everything after -j is treated as an arg
- j*)
- new_optstr+=${str}
- break
- ;;
- esac
- str=${str#?}
- done
-
- if [[ -n ${new_optstr} ]]; then
- new_makeopts+=( -${new_optstr} )
- fi
- ;;
- esac
- shift
- done
-
- SCONSOPTS=${new_makeopts[*]}
- _SCONS_CACHE_SCONSOPTS=${SCONSOPTS}
- debug-print "New SCONSOPTS: [${SCONSOPTS}]"
-}
-
-# @FUNCTION: use_scons
-# @USAGE: <use-flag> [var-name] [var-opt-true] [var-opt-false]
-# @DESCRIPTION:
-# DEPRECATED, EAPI 0..5 ONLY: use usex instead
-#
-# Output a SCons parameter with value depending on the USE flag state.
-# If the USE flag is set, output <var-name>=<var-opt-true>; otherwise
-# <var-name>=<var-opt-false>.
-#
-# If <var-name> is omitted, <use-flag> will be used instead. However,
-# if <use-flag> starts with an exclamation mark (!flag), 'no' will be
-# prepended to the name (e.g. noflag).
-#
-# If <var-opt-true> and/or <var-opt-false> are omitted,
-# ${USE_SCONS_TRUE} and/or ${USE_SCONS_FALSE} will be used instead.
-use_scons() {
- [[ ${EAPI} == [012345] ]] \
- || die "${FUNCNAME} is banned in EAPI ${EAPI}, use usex instead"
-
- local flag=${1}
- local varname=${2:-${flag/\!/no}}
- local vartrue=${3:-${USE_SCONS_TRUE}}
- local varfalse=${4:-${USE_SCONS_FALSE}}
-
- debug-print-function ${FUNCNAME} "${@}"
-
- if [[ ${#} -eq 0 ]]; then
- eerror "Usage: scons-use <use-flag> [var-name] [var-opt-true] [var-opt-false]"
- die 'scons-use(): not enough arguments'
- fi
-
- if use "${flag}"; then
- echo "${varname}=${vartrue}"
- else
- echo "${varname}=${varfalse}"
- fi
+ "${@}" || die -n "escons failed."
}
diff --git a/eclass/secureboot.eclass b/eclass/secureboot.eclass
new file mode 100644
index 000000000000..a9ba514cb7a0
--- /dev/null
+++ b/eclass/secureboot.eclass
@@ -0,0 +1,175 @@
+# Copyright 1999-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: secureboot.eclass
+# @MAINTAINER:
+# Andrew Ammerlaan <andrewammerlaan@gentoo.org>
+# @AUTHOR:
+# Author: Andrew Ammerlaan <andrewammerlaan@gentoo.org>
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: A small eclass to sign efi files for Secure Boot
+# @DESCRIPTION:
+# Eclass for packages that install .efi files. A use flag and two user
+# variables allow signing these .efi files for use on systems with Secure Boot
+# enabled.
+#
+# Signing the files during emerge ensures that any tooling that actually
+# installs the bootloaders and kernels to ESP always uses a signed version.
+# This prevents Secure Boot from accidentally breaking when upgrading the
+# kernel or the bootloader.
+#
+# Example use
+# @CODE
+# src_install() {
+# default
+# secureboot_sign_efi_file in.efi out.efi.signed
+# }
+# @CODE
+#
+# Or
+# @CODE
+# src_install() {
+# default
+# secureboot_auto_sign
+# }
+# @CODE
+#
+# Some tools will automatically detect and use EFI executables with the .signed
+# suffix. For tools that do not do this the --in-place argument for
+# secureboot_auto_sign can be used to ensure that the signed version is used.
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+IUSE="secureboot"
+BDEPEND="secureboot? ( app-crypt/sbsigntools )"
+
+# @ECLASS_VARIABLE: SECUREBOOT_SIGN_KEY
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=secureboot. Should be set to the path of the private
+# key in PEM format to use, or a PKCS#11 URI.
+
+# @ECLASS_VARIABLE: SECUREBOOT_SIGN_CERT
+# @USER_VARIABLE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used with USE=secureboot. Should be set to the path of the public
+# key certificate in PEM format to use.
+
+if [[ -z ${_SECUREBOOT_ECLASS} ]]; then
+_SECUREBOOT_ECLASS=1
+
+# @FUNCTION: _secureboot_die_if_unset
+# @INTERNAL
+# @DESCRIPTION:
+# If USE=secureboot is enabled die if the required user variables are unset
+# and die if the keys can't be found.
+_secureboot_die_if_unset() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ use secureboot || return
+
+ if [[ -z ${SECUREBOOT_SIGN_KEY} || -z ${SECUREBOOT_SIGN_CERT} ]]; then
+ die "USE=secureboot enabled but SECUREBOOT_SIGN_KEY and/or SECUREBOOT_SIGN_CERT not set."
+ fi
+ if [[ ! ${SECUREBOOT_SIGN_KEY} == pkcs11:* && ! -r ${SECUREBOOT_SIGN_KEY} ]]; then
+ die "SECUREBOOT_SIGN_KEY=${SECUREBOOT_SIGN_KEY} not found or not readable!"
+ fi
+ if [[ ! -r ${SECUREBOOT_SIGN_CERT} ]]; then
+ die "SECUREBOOT_SIGN_CERT=${SECUREBOOT_SIGN_CERT} not found or not readable!"
+ fi
+}
+
+# @FUNCTION: secureboot_pkg_setup
+# @DESCRIPTION:
+# Checks if required user variables are set before starting the build
+secureboot_pkg_setup() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ use secureboot || return
+
+ # If we are merging a binary then the files in this binary
+ # are already signed, no need to check the variables.
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ _secureboot_die_if_unset
+ fi
+}
+
+# @FUNCTION: secureboot_sign_efi_file
+# @USAGE: <input file> [<output file>]
+# @DESCRIPTION:
+# Sign a file using sbsign and the requested key/certificate.
+# If the file is already signed with our key then the file is skipped.
+# If no output file is specified the output file will be the same
+# as the input file, i.e. the file will be overwritten.
+secureboot_sign_efi_file() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ use secureboot || return
+
+ local input_file=${1}
+ local output_file=${2:-${1}}
+
+ _secureboot_die_if_unset
+
+ ebegin "Signing ${input_file}"
+ local return=1
+ if sbverify "${input_file}" --cert "${SECUREBOOT_SIGN_CERT}" &> /dev/null; then
+ ewarn "${input_file} already signed, skipping"
+ return=0
+ else
+ local args=(
+ "--key=${SECUREBOOT_SIGN_KEY}"
+ "--cert=${SECUREBOOT_SIGN_CERT}"
+ )
+ if [[ ${SECUREBOOT_SIGN_KEY} == pkcs11:* ]]; then
+ args+=( --engine=pkcs11 )
+ fi
+
+ sbsign "${args[@]}" "${input_file}" --output "${output_file}"
+ return=${?}
+ fi
+ eend ${return} || die "Signing ${input_file} failed"
+}
+
+# @FUNCTION: secureboot_auto_sign
+# @USAGE: [--in-place]
+# @DESCRIPTION:
+# Automatically discover and sign efi files in the image directory.
+#
+# By default signed files gain the .signed suffix. If the --in-place
+# argument is given the efi files are replaced with a signed version in place.
+secureboot_auto_sign() {
+ debug-print-function ${FUNCNAME[0]} "${@}"
+ use secureboot || return
+
+ [[ ${EBUILD_PHASE} == install ]] ||
+ die "${FUNCNAME[0]} can only be called in the src_install phase"
+
+ local -a efi_execs
+ mapfile -td '' efi_execs < <(
+ find "${ED}" -type f \
+ \( -iname '*.efi' -o -iname '*.efi32' -o -iname '*.efi64' \) \
+ -print0 || die
+ )
+ (( ${#efi_execs[@]} )) ||
+ die "${FUNCNAME[0]} was called but no efi executables were found"
+
+ local suffix
+ if [[ ${1} == --in-place ]]; then
+ suffix=""
+ elif [[ -n ${1} ]]; then
+ die "Invalid argument ${1}"
+ else
+ suffix=".signed"
+ fi
+
+ for efi_exec in "${efi_execs[@]}"; do
+ secureboot_sign_efi_file "${efi_exec}" "${efi_exec}${suffix}"
+ done
+}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/selinux-policy-2.eclass b/eclass/selinux-policy-2.eclass
index 81d982f749c1..ad760673cab1 100644
--- a/eclass/selinux-policy-2.eclass
+++ b/eclass/selinux-policy-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# Eclass for installing SELinux policy, and optionally
@@ -7,7 +7,7 @@
# @ECLASS: selinux-policy-2.eclass
# @MAINTAINER:
# selinux@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7
# @BLURB: This eclass supports the deployment of the various SELinux modules in sec-policy
# @DESCRIPTION:
# The selinux-policy-2.eclass supports deployment of the various SELinux modules
@@ -18,25 +18,33 @@
# Also, it supports for bundling patches to make the whole thing just a bit more
# manageable.
+case ${EAPI} in
+ 7) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ ! ${_SELINUX_POLICY_2_ECLASS} ]]; then
+_SELINUX_POLICY_2_ECLASS=1
+
# @ECLASS_VARIABLE: MODS
# @DESCRIPTION:
# This variable contains the (upstream) module name for the SELinux module.
# This name is only the module name, not the category!
-: ${MODS:="_illegal"}
+: "${MODS:="_illegal"}"
# @ECLASS_VARIABLE: BASEPOL
# @DESCRIPTION:
# This variable contains the version string of the selinux-base-policy package
# that this module build depends on. It is used to patch with the appropriate
# patch bundle(s) that are part of selinux-base-policy.
-: ${BASEPOL:=${PVR}}
+: "${BASEPOL:=${PVR}}"
# @ECLASS_VARIABLE: POLICY_PATCH
# @DESCRIPTION:
# This variable contains the additional patch(es) that need to be applied on top
# of the patchset already contained within the BASEPOL variable. The variable
# can be both a simple string (space-separated) or a bash array.
-: ${POLICY_PATCH:=""}
+: "${POLICY_PATCH:=""}"
# @ECLASS_VARIABLE: POLICY_FILES
# @DESCRIPTION:
@@ -45,7 +53,7 @@
# Generally, users would want to include at least a .te and .fc file, but .if
# files are supported as well. The variable can be both a simple string
# (space-separated) or a bash array.
-: ${POLICY_FILES:=""}
+: "${POLICY_FILES:=""}"
# @ECLASS_VARIABLE: POLICY_TYPES
# @DESCRIPTION:
@@ -54,7 +62,7 @@
# This variable is the same POLICY_TYPES variable that we tell SELinux
# users to set in make.conf. Therefore, it is not the module that should
# override it, but the user.
-: ${POLICY_TYPES:="targeted strict mcs mls"}
+: "${POLICY_TYPES:="targeted strict mcs mls"}"
# @ECLASS_VARIABLE: SELINUX_GIT_REPO
# @DESCRIPTION:
@@ -63,7 +71,7 @@
# using a single variable, rather than having to set the packagename_LIVE_REPO
# variable for each and every SELinux policy module package they want to install.
# The default value is Gentoo's hardened-refpolicy repository.
-: ${SELINUX_GIT_REPO:="https://anongit.gentoo.org/git/proj/hardened-refpolicy.git"};
+: "${SELINUX_GIT_REPO:="https://anongit.gentoo.org/git/proj/hardened-refpolicy.git"}"
# @ECLASS_VARIABLE: SELINUX_GIT_BRANCH
# @DESCRIPTION:
@@ -72,13 +80,7 @@
# SELinux policy packages, rather than having to override them one by one with the
# packagename_LIVE_BRANCH variable.
# The default value is the 'master' branch.
-: ${SELINUX_GIT_BRANCH:="master"};
-
-case "${EAPI:-0}" in
- 0|1|2|3|4|5) die "EAPI<6 is not supported";;
- 6|7) : ;;
- *) die "unknown EAPI" ;;
-esac
+: "${SELINUX_GIT_BRANCH:="master"}"
case ${BASEPOL} in
9999) inherit git-r3
@@ -113,17 +115,12 @@ else
RDEPEND=">=sys-apps/policycoreutils-2.0.82
>=sec-policy/selinux-base-policy-${PV}"
fi
-if [[ ${EAPI} == 6 ]]; then
- DEPEND="${RDEPEND}
- sys-devel/m4
- >=sys-apps/checkpolicy-2.0.21"
-else
- DEPEND="${RDEPEND}"
- BDEPEND="sys-devel/m4
- >=sys-apps/checkpolicy-2.0.21"
-fi
-EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm
+DEPEND="${RDEPEND}"
+BDEPEND="
+ sys-devel/m4
+ >=sys-apps/checkpolicy-2.0.21
+"
# @FUNCTION: selinux-policy-2_src_unpack
# @DESCRIPTION:
@@ -159,7 +156,7 @@ selinux-policy-2_src_prepare() {
if [[ -n ${BASEPOL} ]] && [[ "${BASEPOL}" != "9999" ]]; then
cd "${S}"
einfo "Applying SELinux policy updates ... "
- eapply -p0 "${WORKDIR}/0001-full-patch-against-stable-release.patch"
+ eapply -p0 -- "${WORKDIR}/0001-full-patch-against-stable-release.patch"
fi
# Call in eapply_user. We do this early on as we start moving
@@ -169,7 +166,7 @@ selinux-policy-2_src_prepare() {
# Copy additional files to the 3rd_party/ location
if [[ "$(declare -p POLICY_FILES 2>/dev/null 2>&1)" == "declare -a"* ]] ||
[[ -n ${POLICY_FILES} ]]; then
- add_interfaces=1;
+ add_interfaces=1;
cd "${S}/refpolicy/policy/modules"
for POLFILE in ${POLICY_FILES[@]};
do
@@ -177,21 +174,21 @@ selinux-policy-2_src_prepare() {
done
fi
- # Apply the additional patches refered to by the module ebuild.
+ # Apply the additional patches referred to by the module ebuild.
# But first some magic to differentiate between bash arrays and strings
if [[ "$(declare -p POLICY_PATCH 2>/dev/null 2>&1)" == "declare -a"* ]]; then
- [[ -n ${POLICY_PATCH[*]} ]] && eapply -d "${S}/refpolicy/policy/modules" "${POLICY_PATCH[@]}"
+ [[ -n ${POLICY_PATCH[*]} ]] && eapply -d "${S}/refpolicy/policy/modules" -- "${POLICY_PATCH[@]}"
else
- [[ -n ${POLICY_PATCH} ]] && eapply -d "${S}/refpolicy/policy/modules" ${POLICY_PATCH}
+ [[ -n ${POLICY_PATCH} ]] && eapply -d "${S}/refpolicy/policy/modules" -- ${POLICY_PATCH}
fi
# Collect only those files needed for this particular module
for i in ${MODS}; do
- modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.te) $modfiles"
- modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.fc) $modfiles"
- modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.cil) $modfiles"
+ modfiles="$(find "${S}/refpolicy/policy/modules" -iname $i.te) $modfiles"
+ modfiles="$(find "${S}/refpolicy/policy/modules" -iname $i.fc) $modfiles"
+ modfiles="$(find "${S}/refpolicy/policy/modules" -iname $i.cil) $modfiles"
if [[ ${add_interfaces} -eq 1 ]]; then
- modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.if) $modfiles"
+ modfiles="$(find "${S}/refpolicy/policy/modules" -iname $i.if) $modfiles"
fi
done
@@ -219,7 +216,7 @@ selinux-policy-2_src_compile() {
for i in ${POLICY_TYPES}; do
# Support USE flags in builds
export M4PARAM="${makeuse}"
- emake NAME=$i SHAREDIR="${ROOT%/}"/usr/share/selinux -C "${S}"/${i} || die "${i} compile failed"
+ emake NAME=$i SHAREDIR="${EPREFIX}"/usr/share/selinux -C "${S}"/${i}
done
}
@@ -255,8 +252,8 @@ selinux-policy-2_src_install() {
selinux-policy-2_pkg_postinst() {
# Set root path and don't load policy into the kernel when cross compiling
local root_opts=""
- if [[ "${ROOT%/}" != "" ]]; then
- root_opts="-p ${ROOT%/} -n"
+ if [[ -n ${ROOT} ]]; then
+ root_opts="-p ${ROOT} -n"
fi
# build up the command in the case of multiple modules
@@ -274,7 +271,7 @@ selinux-policy-2_pkg_postinst() {
einfo "Inserting the following modules into the $i module store: ${MODS}"
- cd "${ROOT%/}/usr/share/selinux/${i}" || die "Could not enter /usr/share/selinux/${i}"
+ cd "${ROOT}/usr/share/selinux/${i}" || die "Could not enter /usr/share/selinux/${i}"
for j in ${MODS} ; do
if [[ -f "${j}.pp" ]] ; then
COMMAND="${j}.pp ${COMMAND}"
@@ -306,7 +303,7 @@ selinux-policy-2_pkg_postinst() {
ewarn "If it is the last SELinux module package being installed however,"
ewarn "then it is advised to look at the error above and take appropriate"
ewarn "action since the new SELinux policies are not loaded until the"
- ewarn "command finished succesfully."
+ ewarn "command finished successfully."
ewarn ""
ewarn "To reload, run the following command from within /usr/share/selinux/${i}:"
ewarn " semodule ${COMMAND_base} -i \$(ls *.pp | grep -v base.pp)"
@@ -314,16 +311,16 @@ selinux-policy-2_pkg_postinst() {
ewarn " semodule ${COMMAND_base} -i \$(ls *.pp | grep -v base.pp | grep -v unconfined.pp)"
ewarn "depending on if you need the unconfined domain loaded as well or not."
else
- einfo "SELinux modules reloaded succesfully."
+ einfo "SELinux modules reloaded successfully."
fi
else
- einfo "SELinux modules loaded succesfully."
+ einfo "SELinux modules loaded successfully."
fi
COMMAND="";
done
# Don't relabel when cross compiling
- if [[ "${ROOT%/}" == "" ]]; then
+ if [[ -z ${ROOT} ]]; then
# Relabel depending packages
local PKGSET="";
if [[ -x /usr/bin/qdepends ]] ; then
@@ -346,8 +343,8 @@ selinux-policy-2_pkg_postrm() {
if [[ -z "${REPLACED_BY_VERSION}" ]]; then
# Set root path and don't load policy into the kernel when cross compiling
local root_opts=""
- if [[ "${ROOT%/}" != "" ]]; then
- root_opts="-p ${ROOT%/} -n"
+ if [[ -n ${ROOT} ]]; then
+ root_opts="-p ${ROOT} -n"
fi
# build up the command in the case of multiple modules
@@ -363,9 +360,12 @@ selinux-policy-2_pkg_postrm() {
if [[ $? -ne 0 ]]; then
ewarn "SELinux module unload failed.";
else
- einfo "SELinux modules unloaded succesfully."
+ einfo "SELinux modules unloaded successfully."
fi
done
fi
}
+fi
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm
diff --git a/eclass/sgml-catalog-r1.eclass b/eclass/sgml-catalog-r1.eclass
index 9f8bb13d6095..eff6db31062f 100644
--- a/eclass/sgml-catalog-r1.eclass
+++ b/eclass/sgml-catalog-r1.eclass
@@ -1,4 +1,4 @@
-# Copyright 2019-2021 Gentoo Authors
+# Copyright 2019-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: sgml-catalog-r1.eclass
@@ -12,14 +12,13 @@
# This eclass regenerates /etc/sgml/catalog as necessary for the DocBook
# tooling. This is done via exported pkg_postinst and pkg_postrm phases.
-case ${EAPI:-0} in
+case ${EAPI} in
7) ;;
- *) die "Unsupported EAPI=${EAPI} for ${ECLASS}";;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_postinst pkg_postrm
-
-if [[ ! ${_SGML_CATALOG_R1} ]]; then
+if [[ -z ${_SGML_CATALOG_R1_ECLASS} ]]; then
+_SGML_CATALOG_R1_ECLASS=1
if [[ ${CATEGORY}/${PN} != app-text/sgml-common ]]; then
RDEPEND=">=app-text/sgml-common-0.6.3-r7"
@@ -36,7 +35,7 @@ sgml-catalog-r1_update_catalog() {
if [[ ${#cats[@]} -gt 0 ]]; then
ebegin "Updating ${EROOT}/etc/sgml/catalog"
- printf 'CATALOG "%s"\n' "${cats[@]}" > "${T}"/catalog &&
+ printf 'CATALOG "%s"\n' "${cats[@]#${ROOT}}" > "${T}"/catalog &&
mv "${T}"/catalog "${EROOT}"/etc/sgml/catalog
eend "${?}"
else
@@ -64,5 +63,6 @@ sgml-catalog-r1_pkg_postrm() {
sgml-catalog-r1_update_env
}
-_SGML_CATALOG_R1=1
fi
+
+EXPORT_FUNCTIONS pkg_postinst pkg_postrm
diff --git a/eclass/shell-completion.eclass b/eclass/shell-completion.eclass
new file mode 100644
index 000000000000..d582028847b4
--- /dev/null
+++ b/eclass/shell-completion.eclass
@@ -0,0 +1,115 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+# @ECLASS: shell-completion.eclass
+# @MAINTAINER:
+# Jonas Frei <freijon@pm.me>
+# Florian Schmaus <flow@gentoo.org>
+# @AUTHOR:
+# Alfred Wingate <parona@protonmail.com>
+# @SUPPORTED_EAPIS: 8
+# @PROVIDES: bash-completion-r1
+# @BLURB: a few quick functions to install various shell completion files
+# @DESCRIPTION:
+# This eclass provides a standardised way to install shell completions
+# for popular shells. It inherits the already widely adopted
+# 'bash-completion-r1', thus extending on its functionality.
+
+case ${EAPI} in
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported"
+esac
+
+if [[ ! ${_SHELL_COMPLETION_ECLASS} ]]; then
+_SHELL_COMPLETION_ECLASS=1
+
+# Extend bash-completion-r1
+inherit bash-completion-r1
+
+# @FUNCTION: _shell-completion_get_fishcompdir
+# @INTERNAL
+# @RETURN: unprefixed fish completions directory
+_shell-completion_get_fishcompdir() {
+ echo "/usr/share/fish/vendor_completions.d"
+}
+
+# @FUNCTION: _shell-completion_get_zshcompdir
+# @INTERNAL
+# @RETURN: unprefixed zsh completions directory
+_shell-completion_get_zshcompdir() {
+ echo "/usr/share/zsh/site-functions"
+}
+
+# @FUNCTION: get_fishcompdir
+# @RETURN: the fish completions directory (with EPREFIX)
+get_fishcompdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_shell-completion_get_fishcompdir)"
+}
+
+# @FUNCTION: get_zshcompdir
+# @RETURN: the zsh completions directory (with EPREFIX)
+get_zshcompdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_shell-completion_get_zshcompdir)"
+}
+
+# @FUNCTION: dofishcomp
+# @USAGE: <file...>
+# @DESCRIPTION:
+# Install fish completion files passed as args.
+dofishcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insopts -m 0644
+ insinto "$(_shell-completion_get_fishcompdir)"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: dozshcomp
+# @USAGE: <file...>
+# @DESCRIPTION:
+# Install zsh completion files passed as args.
+dozshcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insopts -m 0644
+ insinto "$(_shell-completion_get_zshcompdir)"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: newfishcomp
+# @USAGE: <file> <newname>
+# @DESCRIPTION:
+# Install fish file under a new name.
+newfishcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insopts -m 0644
+ insinto "$(_shell-completion_get_fishcompdir)"
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: newzshcomp
+# @USAGE: <file> <newname>
+# @DESCRIPTION:
+# Install zsh file under a new name.
+newzshcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insopts -m 0644
+ insinto "$(_shell-completion_get_zshcompdir)"
+ newins "${@}"
+ )
+}
+
+fi
diff --git a/eclass/ssl-cert.eclass b/eclass/ssl-cert.eclass
index 4b016ea82c87..240f9f12cc91 100644
--- a/eclass/ssl-cert.eclass
+++ b/eclass/ssl-cert.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ssl-cert.eclass
@@ -6,7 +6,7 @@
# maintainer-needed@gentoo.org
# @AUTHOR:
# Max Kalika <max@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Eclass for SSL certificates
# @DESCRIPTION:
# This eclass implements a standard installation procedure for installing
@@ -14,50 +14,39 @@
# @EXAMPLE:
# "install_cert /foo/bar" installs ${ROOT}/foo/bar.{key,csr,crt,pem}
-case "${EAPI}" in
- 6|7|8) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_SSL_CERT_ECLASS} ]]; then
+if [[ -z ${_SSL_CERT_ECLASS} ]]; then
_SSL_CERT_ECLASS=1
# @ECLASS_VARIABLE: SSL_CERT_MANDATORY
# @PRE_INHERIT
# @DESCRIPTION:
# Set to non zero if ssl-cert is mandatory for ebuild.
-: ${SSL_CERT_MANDATORY:=0}
+: "${SSL_CERT_MANDATORY:=0}"
# @ECLASS_VARIABLE: SSL_CERT_USE
# @PRE_INHERIT
# @DESCRIPTION:
# Use flag to append dependency to.
-: ${SSL_CERT_USE:=ssl}
+: "${SSL_CERT_USE:=ssl}"
# @ECLASS_VARIABLE: SSL_DEPS_SKIP
# @PRE_INHERIT
# @DESCRIPTION:
# Set to non zero to skip adding to DEPEND and IUSE.
-: ${SSL_DEPS_SKIP:=0}
+: "${SSL_DEPS_SKIP:=0}"
if [[ "${SSL_DEPS_SKIP}" == "0" ]]; then
if [[ "${SSL_CERT_MANDATORY}" == "0" ]]; then
- SSL_DEPEND="${SSL_CERT_USE}? ( dev-libs/openssl:0 )"
+ BDEPEND="${SSL_CERT_USE}? ( dev-libs/openssl )"
IUSE="${SSL_CERT_USE}"
else
- SSL_DEPEND="dev-libs/openssl:0"
+ BDEPEND="dev-libs/openssl"
fi
-
- case "${EAPI}" in
- 6)
- DEPEND="${SSL_DEPEND}"
- ;;
- *)
- BDEPEND="${SSL_DEPEND}"
- ;;
- esac
-
- unset SSL_DEPEND
fi
# @FUNCTION: gen_cnf
diff --git a/eclass/stardict.eclass b/eclass/stardict.eclass
index 74a93b3f7c56..080c01f92470 100644
--- a/eclass/stardict.eclass
+++ b/eclass/stardict.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: stardict.eclass
@@ -6,7 +6,7 @@
# No maintainer <maintainer-needed@gentoo.org>
# @AUTHOR:
# Alastair Tse <liquidx@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 8
# @BLURB: Convenience class to do stardict dictionary installations.
# @DESCRIPTION:
# Usage:
@@ -14,42 +14,51 @@
# * FROM_LANG - From this language
# * TO_LANG - To this language
# * DICT_PREFIX - SRC_URI prefix, like "dictd_www.mova.org_"
-# * DICT_SUFFIX - SRC_URI after the prefix.
+# * DICT_SUFFIX - SRC_URI after the prefix.
-case ${EAPI:-0} in
- [67]) ;;
+case ${EAPI} in
+ 8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_compile src_install
-
if [[ -z ${_STARDICT_ECLASS} ]] ; then
_STARDICT_ECLASS=1
+inherit edo
+
RESTRICT="strip"
-[ -z "${DICT_SUFFIX}" ] && DICT_SUFFIX=${PN#stardict-[[:lower:]]*-}
-[ -z "${DICT_P}" ] && DICT_P=stardict-${DICT_PREFIX}${DICT_SUFFIX}-${PV}
+# @ECLASS_VARIABLE: DICT_SUFFIX
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Suffix used for dictionaries.
+: "${DICT_SUFFIX:=${PN#stardict-[[:lower:]]*-}}"
+
+# @ECLASS_VARIABLE: DICT_P
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The filestem used for downloading dictionaries from SourceForge.
+: "${DICT_P:=stardict-${DICT_PREFIX}${DICT_SUFFIX}-${PV}}"
-if [ -n "${FROM_LANG}" -a -n "${TO_LANG}" ]; then
+: "${DESCRIPTION:="Another Stardict Dictionary"}"
+if [[ -n ${FROM_LANG} && -n ${TO_LANG} ]]; then
DESCRIPTION="Stardict Dictionary ${FROM_LANG} to ${TO_LANG}"
-elif [ -z "${DESCRIPTION}" ]; then
- DESCRIPTION="Another Stardict Dictionary"
fi
HOMEPAGE="http://stardict.sourceforge.net/"
SRC_URI="mirror://sourceforge/stardict/${DICT_P}.tar.bz2"
-S="${WORKDIR}"/${DICT_P}
+S="${WORKDIR}/${DICT_P}"
LICENSE="GPL-2"
SLOT="0"
IUSE="+zlib"
-DEPEND="
+BDEPEND="
|| (
>=app-text/stardict-2.4.2
app-text/sdcv
- app-text/goldendict
)
zlib? (
app-arch/gzip
@@ -57,12 +66,13 @@ DEPEND="
)"
stardict_src_compile() {
+ local file
if use zlib; then
for file in *.idx; do
- [[ -f $file ]] && gzip ${file}
+ [[ -f ${file} ]] && edo gzip "${file}"
done
for file in *.dict; do
- [[ -f $file ]] && dictzip ${file}
+ [[ -f ${file} ]] && edo dictzip "${file}"
done
fi
}
@@ -75,3 +85,5 @@ stardict_src_install() {
}
fi
+
+EXPORT_FUNCTIONS src_compile src_install
diff --git a/eclass/subversion.eclass b/eclass/subversion.eclass
index 349878618427..7a787a438ad0 100644
--- a/eclass/subversion.eclass
+++ b/eclass/subversion.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: subversion.eclass
@@ -6,28 +6,26 @@
# Akinori Hattori <hattya@gentoo.org>
# @AUTHOR:
# Original Author: Akinori Hattori <hattya@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Fetch software sources from subversion repositories
# @DESCRIPTION:
# The subversion eclass provides functions to fetch software sources
# from subversion repositories.
case ${EAPI} in
- 6|7|8) inherit estack ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} is not supported" ;;
+ 7|8) inherit estack ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_SUBVERSION_ECLASS} ]]; then
+_SUBVERSION_ECLASS=1
+
PROPERTIES+=" live"
-DEPEND="
+BDEPEND="
dev-vcs/subversion[http(+)]
net-misc/rsync"
-case ${EAPI} in
- 6) ;;
- *) BDEPEND="${DEPEND}"; DEPEND="" ;;
-esac
-
# @ECLASS_VARIABLE: ESVN_STORE_DIR
# @USER_VARIABLE
# @DESCRIPTION:
@@ -147,7 +145,7 @@ ESVN_UMASK="${ESVN_UMASK:-${EVCS_UMASK}}"
# @DESCRIPTION:
# Set the minimum number of hours between svn up'ing in any given svn module. This is particularly
# useful for split KDE ebuilds where we want to ensure that all submodules are compiled for the same
-# revision. It should also be kept user overrideable.
+# revision. It should also be kept user overridable.
ESVN_UP_FREQ="${ESVN_UP_FREQ:=}"
# @ECLASS_VARIABLE: ESCM_LOGDIR
@@ -177,7 +175,7 @@ subversion_fetch() {
die "${ECLASS}: ESVN_REPO_URI (or specified URI) is empty."
fi
- [[ -n "${ESVN_REVISION}" ]] && revision="${ESVN_REVISION}"
+ [[ -n ${ESVN_REVISION} ]] && revision="${ESVN_REVISION}"
# check for the scheme
local scheme="${repo_uri%%:*}"
@@ -196,7 +194,7 @@ subversion_fetch() {
addread "/etc/subversion"
addwrite "${ESVN_STORE_DIR}"
- if [[ -n "${ESVN_UMASK}" ]]; then
+ if [[ -n ${ESVN_UMASK} ]]; then
eumask_push "${ESVN_UMASK}"
fi
@@ -210,9 +208,9 @@ subversion_fetch() {
local wc_path="$(subversion__get_wc_path "${repo_uri}")"
local options="${ESVN_OPTIONS} --config-dir ${ESVN_STORE_DIR}/.subversion"
- [[ -n "${revision}" ]] && options="${options} -r ${revision}"
+ [[ -n ${revision} ]] && options="${options} -r ${revision}"
- if [[ "${ESVN_OPTIONS}" = *-r* ]]; then
+ if [[ ${ESVN_OPTIONS} == *-r* ]]; then
ewarn "\${ESVN_OPTIONS} contains -r, this usage is unsupported. Please"
ewarn "see \${ESVN_REPO_URI}"
fi
@@ -237,7 +235,7 @@ subversion_fetch() {
mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ECLASS}: can't mkdir ${ESVN_PROJECT}."
cd "${ESVN_PROJECT}" || die "${ECLASS}: can't chdir to ${ESVN_PROJECT}"
- if [[ -n "${ESVN_USER}" ]]; then
+ if [[ -n ${ESVN_USER} ]]; then
${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ECLASS}: can't fetch to ${wc_path} from ${repo_uri}."
else
${ESVN_FETCH_CMD} ${options} "${repo_uri}" || die "${ECLASS}: can't fetch to ${wc_path} from ${repo_uri}."
@@ -276,13 +274,13 @@ subversion_fetch() {
einfo " new UUID: $(subversion__svn_info "${repo_uri}" "Repository UUID")"
einfo " repository: ${repo_uri}${revision:+@}${revision}"
- rm -fr "${ESVN_PROJECT}" || die
+ rm -rf "${ESVN_PROJECT}" || die
debug-print "${FUNCNAME}: ${ESVN_FETCH_CMD} ${options} ${repo_uri}"
mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ECLASS}: can't mkdir ${ESVN_PROJECT}."
cd "${ESVN_PROJECT}" || die "${ECLASS}: can't chdir to ${ESVN_PROJECT}"
- if [[ -n "${ESVN_USER}" ]]; then
+ if [[ -n ${ESVN_USER} ]]; then
${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ECLASS}: can't fetch to ${wc_path} from ${repo_uri}."
else
${ESVN_FETCH_CMD} ${options} "${repo_uri}" || die "${ECLASS}: can't fetch to ${wc_path} from ${repo_uri}."
@@ -295,7 +293,7 @@ subversion_fetch() {
debug-print "${FUNCNAME}: ${ESVN_SWITCH_CMD} ${options} ${repo_uri}"
cd "${wc_path}" || die "${ECLASS}: can't chdir to ${wc_path}"
- if [[ -n "${ESVN_USER}" ]]; then
+ if [[ -n ${ESVN_USER} ]]; then
${ESVN_SWITCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" ${repo_uri} || die "${ECLASS}: can't update ${wc_path} from ${repo_uri}."
else
${ESVN_SWITCH_CMD} ${options} ${repo_uri} || die "${ECLASS}: can't update ${wc_path} from ${repo_uri}."
@@ -308,7 +306,7 @@ subversion_fetch() {
debug-print "${FUNCNAME}: ${ESVN_UPDATE_CMD} ${options}"
cd "${wc_path}" || die "${ECLASS}: can't chdir to ${wc_path}"
- if [[ -n "${ESVN_USER}" ]]; then
+ if [[ -n ${ESVN_USER} ]]; then
${ESVN_UPDATE_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" || die "${ECLASS}: can't update ${wc_path} from ${repo_uri}."
else
${ESVN_UPDATE_CMD} ${options} || die "${ECLASS}: can't update ${wc_path} from ${repo_uri}."
@@ -320,7 +318,7 @@ subversion_fetch() {
fi
fi
- if [[ -n "${ESVN_UMASK}" ]]; then
+ if [[ -n ${ESVN_UMASK} ]]; then
eumask_pop
fi
@@ -384,7 +382,7 @@ subversion_src_unpack() {
subversion_pkg_preinst() {
local pkgdate=$(date "+%Y%m%d %H:%M:%S")
if [[ -n ${ESCM_LOGDIR} ]]; then
- local dir="${EROOT%/}${ESCM_LOGDIR}/${CATEGORY}"
+ local dir="${EROOT}${ESCM_LOGDIR}/${CATEGORY}"
if [[ ! -d ${dir} ]]; then
mkdir -p "${dir}" || eerror "Failed to create '${dir}' for logging svn revision"
fi
@@ -450,7 +448,7 @@ subversion__get_peg_revision() {
debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
# repo_uri has peg revision?
- if [[ ${repo_uri} = *@* ]]; then
+ if [[ ${repo_uri} == *@* ]]; then
peg_rev="${repo_uri##*@}"
debug-print "${FUNCNAME}: peg_rev = ${peg_rev}"
else
@@ -460,4 +458,6 @@ subversion__get_peg_revision() {
echo "${peg_rev}"
}
+fi
+
EXPORT_FUNCTIONS src_unpack pkg_preinst
diff --git a/eclass/sword-module.eclass b/eclass/sword-module.eclass
deleted file mode 100644
index 565110d2dd0b..000000000000
--- a/eclass/sword-module.eclass
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 1999-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: sword-module.eclass
-# @MAINTAINER:
-# Marek Szuba <marecki@gentoo.org>
-# @SUPPORTED_EAPIS: 7 8
-# @BLURB: Simplify installation of SWORD modules
-# @DESCRIPTION:
-# This eclass provides dependencies, ebuild environment and the src_install
-# function common to all app-text/sword modules published by the SWORD Project.
-#
-# Note that as of 2021-06-16 module archives published by SWORD are still
-# not versioned and it is necessary to look at respective module pages in
-# order to see what versions the currently available files are. Once
-# a module file has been replicated to the Gentoo mirror network it will be
-# versioned and remain available even after upstream has changed their
-# version, however users not using mirrors will encounter hash conflicts
-# on updated modules. Should that happen, please notify the relevant
-# package maintainers that a new version is available.
-#
-# @EXAMPLE:
-# sword-Personal-1.0.ebuild, a typical ebuild using sword-module.eclass:
-#
-# @CODE
-# EAPI=8
-#
-# SWORD_MINIMUM_VERSION="1.5.1a"
-#
-# inherit sword-module
-#
-# DESCRIPTION="SWORD module for storing one's own commentary"
-# HOMEPAGE="https://crosswire.org/sword/modules/ModInfo.jsp?modName=Personal"
-# LICENSE="public-domain"
-# KEYWORDS="~amd64 ~ppc ~x86"
-#
-# @CODE
-
-case ${EAPI:-0} in
- 0|1|2|3|4|5|6)
- die "Unsupported EAPI=${EAPI} (too old) for ${ECLASS}"
- ;;
- 7|8)
- ;;
- *)
- die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
- ;;
-esac
-
-# @ECLASS_VARIABLE: SWORD_MINIMUM_VERSION
-# @DEFAULT_UNSET
-# @PRE_INHERIT
-# @DESCRIPTION:
-# If set to a non-null value, specifies the minimum version of app-text/sword
-# the module requires. This will be included in RDEPEND. If null or unset,
-# the dependency will be unversioned.
-# Needs to be set before the inherit line.
-
-# @ECLASS_VARIABLE: SWORD_MODULE
-# @PRE_INHERIT
-# @DESCRIPTION:
-# Case-sensitive name of the SWORD-Project module to install. If unset
-# or null, use the name produced by removing the prefix 'sword-' from PN.
-# Needs to be set before the inherit line.
-: ${SWORD_MODULE:=${PN#sword-}}
-
-EXPORT_FUNCTIONS src_install
-
-# Unless overridden at ebuild level, append version to the name of the file
-# fetched from upstream and let the Gentoo mirror network take care of
-# persisting the versioned archive.
-SRC_URI="https://crosswire.org/ftpmirror/pub/sword/packages/rawzip/${SWORD_MODULE}.zip -> ${SWORD_MODULE}-${PV}.zip"
-
-SLOT="0"
-
-# Module archives contain no top-level directory.
-S="${WORKDIR}"
-
-if [[ ${SWORD_MINIMUM_VERSION} ]]; then
- RDEPEND=">=app-text/sword-${SWORD_MINIMUM_VERSION}"
-else
- RDEPEND="app-text/sword"
-fi
-
-BDEPEND="app-arch/unzip"
-
-# @FUNCTION: sword-module_src_install
-# @DESCRIPTION:
-# Install all the module files into directories used by app-text/sword.
-sword-module_src_install() {
- insinto /usr/share/sword/modules
- doins -r modules/*
- insinto /usr/share/sword/mods.d
- doins mods.d/*
-}
diff --git a/eclass/systemd.eclass b/eclass/systemd.eclass
index 7731bede094e..03d6a82fd310 100644
--- a/eclass/systemd.eclass
+++ b/eclass/systemd.eclass
@@ -1,4 +1,4 @@
-# Copyright 2011-2021 Gentoo Authors
+# Copyright 2011-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: systemd.eclass
@@ -53,20 +53,21 @@ _systemd_get_dir() {
if $(tc-getPKG_CONFIG) --exists systemd; then
d=$($(tc-getPKG_CONFIG) --variable="${variable}" systemd) || die
- d=${d#${EPREFIX}}
else
- d=${fallback}
+ d="${EPREFIX}${fallback}"
fi
echo "${d}"
}
-# @FUNCTION: _systemd_get_systemunitdir
+# @FUNCTION: _systemd_unprefix
+# @USAGE: <function-name>
# @INTERNAL
# @DESCRIPTION:
-# Get unprefixed unitdir.
-_systemd_get_systemunitdir() {
- _systemd_get_dir systemdsystemunitdir /lib/systemd/system
+# Calls the specified function and removes ${EPREFIX} from the result.
+_systemd_unprefix() {
+ local d=$("${@}")
+ echo "${d#"${EPREFIX}"}"
}
# @FUNCTION: systemd_get_systemunitdir
@@ -77,7 +78,7 @@ _systemd_get_systemunitdir() {
systemd_get_systemunitdir() {
debug-print-function ${FUNCNAME} "${@}"
- echo "${EPREFIX}$(_systemd_get_systemunitdir)"
+ _systemd_get_dir systemdsystemunitdir /lib/systemd/system
}
# @FUNCTION: systemd_get_unitdir
@@ -89,14 +90,6 @@ systemd_get_unitdir() {
systemd_get_systemunitdir
}
-# @FUNCTION: _systemd_get_userunitdir
-# @INTERNAL
-# @DESCRIPTION:
-# Get unprefixed userunitdir.
-_systemd_get_userunitdir() {
- _systemd_get_dir systemduserunitdir /usr/lib/systemd/user
-}
-
# @FUNCTION: systemd_get_userunitdir
# @DESCRIPTION:
# Output the path for the systemd user unit directory (not including
@@ -105,15 +98,7 @@ _systemd_get_userunitdir() {
systemd_get_userunitdir() {
debug-print-function ${FUNCNAME} "${@}"
- echo "${EPREFIX}$(_systemd_get_userunitdir)"
-}
-
-# @FUNCTION: _systemd_get_utildir
-# @INTERNAL
-# @DESCRIPTION:
-# Get unprefixed utildir.
-_systemd_get_utildir() {
- _systemd_get_dir systemdutildir /lib/systemd
+ _systemd_get_dir systemduserunitdir /usr/lib/systemd/user
}
# @FUNCTION: systemd_get_utildir
@@ -124,15 +109,7 @@ _systemd_get_utildir() {
systemd_get_utildir() {
debug-print-function ${FUNCNAME} "${@}"
- echo "${EPREFIX}$(_systemd_get_utildir)"
-}
-
-# @FUNCTION: _systemd_get_systemgeneratordir
-# @INTERNAL
-# @DESCRIPTION:
-# Get unprefixed systemgeneratordir.
-_systemd_get_systemgeneratordir() {
- _systemd_get_dir systemdsystemgeneratordir /lib/systemd/system-generators
+ _systemd_get_dir systemdutildir /lib/systemd
}
# @FUNCTION: systemd_get_systemgeneratordir
@@ -142,15 +119,7 @@ _systemd_get_systemgeneratordir() {
systemd_get_systemgeneratordir() {
debug-print-function ${FUNCNAME} "${@}"
- echo "${EPREFIX}$(_systemd_get_systemgeneratordir)"
-}
-
-# @FUNCTION: _systemd_get_systempresetdir
-# @INTERNAL
-# @DESCRIPTION:
-# Get unprefixed systempresetdir.
-_systemd_get_systempresetdir() {
- _systemd_get_dir systemdsystempresetdir /lib/systemd/system-preset
+ _systemd_get_dir systemdsystemgeneratordir /lib/systemd/system-generators
}
# @FUNCTION: systemd_get_systempresetdir
@@ -160,7 +129,15 @@ _systemd_get_systempresetdir() {
systemd_get_systempresetdir() {
debug-print-function ${FUNCNAME} "${@}"
- echo "${EPREFIX}$(_systemd_get_systempresetdir)"
+ _systemd_get_dir systemdsystempresetdir /lib/systemd/system-preset
+}
+
+# @FUNCTION: systemd_get_sleepdir
+# @DESCRIPTION:
+# Output the path for the system sleep directory.
+systemd_get_sleepdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+ _systemd_get_dir systemdsleepdir /lib/systemd/system-sleep
}
# @FUNCTION: systemd_dounit
@@ -172,7 +149,7 @@ systemd_dounit() {
(
insopts -m 0644
- insinto "$(_systemd_get_systemunitdir)"
+ insinto "$(_systemd_unprefix systemd_get_systemunitdir)"
doins "${@}"
)
}
@@ -186,7 +163,7 @@ systemd_newunit() {
(
insopts -m 0644
- insinto "$(_systemd_get_systemunitdir)"
+ insinto "$(_systemd_unprefix systemd_get_systemunitdir)"
newins "${@}"
)
}
@@ -200,7 +177,7 @@ systemd_douserunit() {
(
insopts -m 0644
- insinto "$(_systemd_get_userunitdir)"
+ insinto "$(_systemd_unprefix systemd_get_userunitdir)"
doins "${@}"
)
}
@@ -215,7 +192,7 @@ systemd_newuserunit() {
(
insopts -m 0644
- insinto "$(_systemd_get_userunitdir)"
+ insinto "$(_systemd_unprefix systemd_get_userunitdir)"
newins "${@}"
)
}
@@ -250,6 +227,48 @@ systemd_install_serviced() {
)
}
+# @FUNCTION: systemd_install_dropin
+# @USAGE: [--user] <unit> <conf-file>
+# @DESCRIPTION:
+# Install <conf-file> as the dropin file <unit>.d/00gentoo.conf,
+# overriding the settings of <unit>.
+# Defaults to system unit dropins, unless --user is provided,
+# which causes the dropin to be installed for user units.
+# The required argument <conf-file> may be '-', in which case the
+# file is read from stdin and <unit> must also be specified.
+# @EXAMPLE:
+# systemd_install_dropin foo.service "${FILESDIR}/foo.service.conf"
+# systemd_install_dropin foo.service - <<-EOF
+# [Service]
+# RestartSec=120
+# EOF
+systemd_install_dropin() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local basedir
+ if [[ $# -ge 1 ]] && [[ $1 == "--user" ]]; then
+ basedir=$(_systemd_unprefix systemd_get_userunitdir)
+ shift 1
+ else
+ basedir=$(_systemd_unprefix systemd_get_systemunitdir)
+ fi
+
+ local unit=${1}
+ local src=${2}
+
+ [[ ${unit} ]] || die "No unit specified"
+ [[ ${src} ]] || die "No conf file specified"
+
+ # avoid potentially common mistake
+ [[ ${unit} == *.d ]] && die "Unit ${unit} must not have .d suffix"
+
+ (
+ insopts -m 0644
+ insinto "${basedir}/${unit}".d
+ newins "${src}" 00gentoo.conf
+ )
+}
+
# @FUNCTION: systemd_enable_service
# @USAGE: <target> <service>
# @DESCRIPTION:
@@ -262,7 +281,7 @@ systemd_enable_service() {
local target=${1}
local service=${2}
- local ud=$(_systemd_get_systemunitdir)
+ local ud=$(_systemd_unprefix systemd_get_systemunitdir)
local destname=${service##*/}
dodir "${ud}"/"${target}".wants && \
@@ -306,7 +325,7 @@ systemd_enable_ntpunit() {
(
insopts -m 0644
- insinto "$(_systemd_get_utildir)"/ntp-units.d
+ insinto "$(_systemd_unprefix systemd_get_utildir)"/ntp-units.d
doins "${T}"/${ntpunit_name}.list
)
local ret=${?}
diff --git a/eclass/tests/Makefile b/eclass/tests/Makefile
new file mode 100644
index 000000000000..ee4a454912c3
--- /dev/null
+++ b/eclass/tests/Makefile
@@ -0,0 +1,27 @@
+SH_FILES := $(wildcard *.sh)
+TEST_FILES := $(filter-out tests-common.sh, $(SH_FILES))
+TEST_OK_FILES := $(patsubst %.sh, .%.sh.ok,$ $(TEST_FILES))
+
+# We cache a successful test result if the testfile itself did not
+# change (%.sh) and the contents of the eclass/ directory did not
+# change (.eclasssum).
+.%.sh.ok: %.sh .eclasssum
+ ./$<
+ touch $@
+
+.PHONY: test
+test: $(TEST_OK_FILES)
+
+.PHONY: force
+.ONESHELL:
+.eclasssum: SHELL = /bin/bash
+.eclasssum: force
+ set -euo pipefail
+ find .. -maxdepth 1 -type f -name "*.eclass" \
+ -exec stat --format="%n %y" \{} \+ |\
+ sort |\
+ cksum - > $@.cur
+ trap "rm -f $@.cur" EXIT
+ if ! cmp --silent $@.cur $@; then
+ mv $@.cur $@
+ fi
diff --git a/eclass/tests/cargo-bench.sh b/eclass/tests/cargo-bench.sh
new file mode 100755
index 000000000000..d30b04569905
--- /dev/null
+++ b/eclass/tests/cargo-bench.sh
@@ -0,0 +1,114 @@
+#!/bin/bash
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+export LC_ALL=C
+
+ITERATIONS=1000
+RUNS=3
+
+doit() {
+ for (( i = 0; i < ITERATIONS; i++ )); do
+ _cargo_set_crate_uris "${CRATES}"
+ SRC_URI="
+ ${CARGO_CRATE_URIS}
+ "
+ done
+}
+
+timeit() {
+ local real=()
+ local user=()
+ local x vr avg
+
+ for (( x = 0; x < RUNS; x++ )); do
+ while read tt tv; do
+ case ${tt} in
+ real) real+=( ${tv} );;
+ user) user+=( ${tv} );;
+ esac
+ done < <( ( time -p doit ) 2>&1 )
+ done
+
+ [[ ${#real[@]} == ${RUNS} ]] || die "Did not get ${RUNS} real times"
+ [[ ${#user[@]} == ${RUNS} ]] || die "Did not get ${RUNS} user times"
+
+ local xr avg
+ for x in real user; do
+ xr="${x}[*]"
+ avg=$(dc -S 3 -e "${ITERATIONS} ${RUNS} * ${!xr} + + / p")
+
+ printf '%s %4.0f it/s\n' "${x}" "${avg}"
+ done
+}
+
+# taken from cryptograpy-41.0.1
+CRATES="
+ Inflector@0.11.4
+ aliasable@0.1.3
+ asn1@0.15.2
+ asn1_derive@0.15.2
+ autocfg@1.1.0
+ base64@0.13.1
+ bitflags@1.3.2
+ cc@1.0.79
+ cfg-if@1.0.0
+ foreign-types@0.3.2
+ foreign-types-shared@0.1.1
+ indoc@1.0.9
+ libc@0.2.144
+ lock_api@0.4.9
+ memoffset@0.8.0
+ once_cell@1.17.2
+ openssl@0.10.54
+ openssl-macros@0.1.1
+ openssl-sys@0.9.88
+ ouroboros@0.15.6
+ ouroboros_macro@0.15.6
+ parking_lot@0.12.1
+ parking_lot_core@0.9.7
+ pem@1.1.1
+ pkg-config@0.3.27
+ proc-macro-error@1.0.4
+ proc-macro-error-attr@1.0.4
+ proc-macro2@1.0.59
+ pyo3@0.18.3
+ pyo3-build-config@0.18.3
+ pyo3-ffi@0.18.3
+ pyo3-macros@0.18.3
+ pyo3-macros-backend@0.18.3
+ quote@1.0.28
+ redox_syscall@0.2.16
+ scopeguard@1.1.0
+ smallvec@1.10.0
+ syn@1.0.109
+ syn@2.0.18
+ target-lexicon@0.12.7
+ unicode-ident@1.0.9
+ unindent@0.1.11
+ vcpkg@0.2.15
+ version_check@0.9.4
+ windows-sys@0.45.0
+ windows-targets@0.42.2
+ windows_aarch64_gnullvm@0.42.2
+ windows_aarch64_msvc@0.42.2
+ windows_i686_gnu@0.42.2
+ windows_i686_msvc@0.42.2
+ windows_x86_64_gnu@0.42.2
+ windows_x86_64_gnullvm@0.42.2
+ windows_x86_64_msvc@0.42.2
+"
+
+inherit cargo
+
+einfo "CRATES with '@' separator"
+timeit
+
+einfo "CRATES with '-' separator"
+CRATES=${CRATES//@/-}
+timeit
+
+texit
diff --git a/eclass/tests/dist-kernel-utils.sh b/eclass/tests/dist-kernel-utils.sh
new file mode 100755
index 000000000000..7f04f55ce302
--- /dev/null
+++ b/eclass/tests/dist-kernel-utils.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+source tests-common.sh || exit
+
+inherit dist-kernel-utils
+# TODO: hack because tests-common don't implement ver_cut
+EAPI=6 inherit eapi7-ver
+
+test_PV_to_KV() {
+ local kv=${1}
+ local exp_PV=${2}
+
+ tbegin "dist-kernel_PV_to_KV ${kv} -> ${exp_PV}"
+ local val=$(dist-kernel_PV_to_KV "${kv}")
+ [[ ${val} == ${exp_PV} ]]
+ tend $?
+}
+
+test_PV_to_KV 6.0_rc1 6.0.0-rc1
+test_PV_to_KV 6.0 6.0.0
+test_PV_to_KV 6.0.1_rc1 6.0.1-rc1
+test_PV_to_KV 6.0.1 6.0.1
+
+texit
diff --git a/eclass/tests/distutils-r1.sh b/eclass/tests/distutils-r1.sh
deleted file mode 100755
index a42d4cc4641a..000000000000
--- a/eclass/tests/distutils-r1.sh
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/bin/bash
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-PYTHON_COMPAT=( python3_8 )
-source tests-common.sh || exit
-
-test-phase_name_free() {
- local ph=${1}
-
- if declare -f "${ph}"; then
- die "${ph} function declared while name reserved for phase!"
- fi
- if declare -f "${ph}_all"; then
- die "${ph}_all function declared while name reserved for phase!"
- fi
-}
-
-test-distutils_enable_tests() {
- local runner=${1}
- local exp_IUSE=${2}
- local exp_RESTRICT=${3}
- local exp_BDEPEND=${4}
-
- local IUSE=${IUSE}
- local RESTRICT=${RESTRICT}
- local BDEPEND=${BDEPEND}
-
- tbegin "${runner}"
-
- distutils_enable_tests "${runner}"
-
- local ret var val
- for var in IUSE RESTRICT BDEPEND; do
- local exp_var=exp_${var}
- # (this normalizes whitespace)
- read -d $'\0' -r -a val <<<"${!var}"
- val=${val[*]}
- if [[ ${val} != "${!exp_var}" ]]; then
- eindent
- eerror "${var} expected: ${!exp_var}"
- eerror "${var} actual: ${val}"
- eoutdent
- ret=1
- tret=1
- fi
- done
-
- tend ${ret}
-}
-
-test-DISTUTILS_USE_SETUPTOOLS() {
- local DISTUTILS_USE_SETUPTOOLS=${1}
- local exp_BDEPEND=${2}
- local exp_RDEPEND=${3}
-
- tbegin "${1}"
-
- local BDEPEND=
- local RDEPEND=
- unset _DISTUTILS_R1
- inherit distutils-r1
-
- local ret var val
- for var in BDEPEND RDEPEND; do
- local exp_var=exp_${var}
- # (this normalizes whitespace)
- read -d $'\0' -r -a val <<<"${!var}"
- val=${val[*]}
- if [[ ${val} != "${!exp_var}" ]]; then
- eindent
- eerror "${var} expected: ${!exp_var}"
- eerror "${var} actual: ${val}"
- eoutdent
- ret=1
- tret=1
- fi
- done
-
- tend ${ret}
-}
-
-DISTUTILS_USE_SETUPTOOLS=no
-inherit distutils-r1
-
-tbegin "sane function names"
-
-test-phase_name_free python_prepare
-test-phase_name_free python_configure
-test-phase_name_free python_compile
-test-phase_name_free python_test
-test-phase_name_free python_install
-
-tend
-
-einfo distutils_enable_tests
-eindent
-BASE_IUSE="python_targets_python3_8"
-BASE_DEPS="python_targets_python3_8? ( >=dev-lang/python-3.8.12_p1-r1:3.8 )"
-TEST_RESTRICT="!test? ( test )"
-
-einfo "empty RDEPEND"
-eindent
-RDEPEND=""
-test-distutils_enable_tests pytest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( >=dev-python/pytest-4.5.0[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests nose \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( >=dev-python/nose-1.3.7-r4[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests unittest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( dev-python/unittest-or-fail[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests setup.py \
- "${BASE_IUSE}" "" "${BASE_DEPS}"
-eoutdent
-
-einfo "non-empty RDEPEND"
-eindent
-BASE_RDEPEND="dev-python/foo[${PYTHON_USEDEP}]"
-RDEPEND=${BASE_RDEPEND}
-test-distutils_enable_tests pytest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} >=dev-python/pytest-4.5.0[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests nose \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} >=dev-python/nose-1.3.7-r4[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests unittest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} dev-python/unittest-or-fail[${PYTHON_USEDEP}] )"
-test-distutils_enable_tests setup.py \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} )"
-eoutdent
-
-eoutdent
-
-einfo DISTUTILS_USE_SETUPTOOLS
-eindent
-SETUPTOOLS_DEP=">=dev-python/setuptools-42.0.2[python_targets_python3_8(-)?]"
-test-DISTUTILS_USE_SETUPTOOLS no "${BASE_DEPS}" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS bdepend "${BASE_DEPS} ${SETUPTOOLS_DEP}" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS rdepend "${BASE_DEPS} ${SETUPTOOLS_DEP}" "${BASE_DEPS} ${SETUPTOOLS_DEP}"
-test-DISTUTILS_USE_SETUPTOOLS pyproject.toml "${BASE_DEPS} >=dev-python/pyproject2setuppy-22[python_targets_python3_8(-)?]" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS manual "${BASE_DEPS}" "${BASE_DEPS}"
-eoutdent
-
-texit
diff --git a/eclass/tests/distutils-r1_single.sh b/eclass/tests/distutils-r1_single.sh
deleted file mode 100755
index 0a671e2b739b..000000000000
--- a/eclass/tests/distutils-r1_single.sh
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/bin/bash
-# Copyright 1999-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-PYTHON_COMPAT=( python3_8 )
-source tests-common.sh || exit
-
-test-distutils_enable_tests() {
- local runner=${1}
- local exp_IUSE=${2}
- local exp_RESTRICT=${3}
- local exp_BDEPEND=${4}
-
- local IUSE=${IUSE}
- local RESTRICT=${RESTRICT}
- local BDEPEND=${BDEPEND}
-
- tbegin "${runner}"
-
- distutils_enable_tests "${runner}"
-
- local ret var
- for var in IUSE RESTRICT BDEPEND; do
- local exp_var=exp_${var}
- # (this normalizes whitespace)
- read -d $'\0' -r -a val <<<"${!var}"
- val=${val[*]}
- if [[ ${val} != "${!exp_var}" ]]; then
- eindent
- eerror "${var} expected: ${!exp_var}"
- eerror "${var} actual: ${val}"
- eoutdent
- ret=1
- tret=1
- fi
- done
-
- tend ${ret}
-}
-
-test-DISTUTILS_USE_SETUPTOOLS() {
- local DISTUTILS_USE_SETUPTOOLS=${1}
- local exp_BDEPEND=${2}
- local exp_RDEPEND=${3}
-
- tbegin "${1}"
-
- local BDEPEND=
- local RDEPEND=
- unset _DISTUTILS_R1
- inherit distutils-r1
-
- local ret var val
- for var in BDEPEND RDEPEND; do
- local exp_var=exp_${var}
- # (this normalizes whitespace)
- read -d $'\0' -r -a val <<<"${!var}"
- val=${val[*]}
- if [[ ${val} != "${!exp_var}" ]]; then
- eindent
- eerror "${var} expected: ${!exp_var}"
- eerror "${var} actual: ${val}"
- eoutdent
- ret=1
- tret=1
- fi
- done
-
- tend ${ret}
-}
-
-DISTUTILS_USE_SETUPTOOLS=no
-DISTUTILS_SINGLE_IMPL=1
-inherit distutils-r1
-
-einfo distutils_enable_tests
-eindent
-BASE_IUSE="+python_single_target_python3_8"
-BASE_DEPS="python_single_target_python3_8? ( >=dev-lang/python-3.8.12_p1-r1:3.8 )"
-TEST_RESTRICT="!test? ( test )"
-
-einfo "empty RDEPEND"
-eindent
-RDEPEND=""
-test-distutils_enable_tests pytest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( python_single_target_python3_8? ( >=dev-python/pytest-4.5.0[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests nose \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( python_single_target_python3_8? ( >=dev-python/nose-1.3.7-r4[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests unittest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( python_single_target_python3_8? ( dev-python/unittest-or-fail[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests setup.py \
- "${BASE_IUSE}" "" "${BASE_DEPS}"
-eoutdent
-
-einfo "non-empty RDEPEND"
-eindent
-BASE_RDEPEND="dev-python/foo[${PYTHON_SINGLE_USEDEP}]"
-RDEPEND=${BASE_RDEPEND}
-test-distutils_enable_tests pytest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} python_single_target_python3_8? ( >=dev-python/pytest-4.5.0[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests nose \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} python_single_target_python3_8? ( >=dev-python/nose-1.3.7-r4[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests unittest \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} python_single_target_python3_8? ( dev-python/unittest-or-fail[python_targets_python3_8(-)] ) )"
-test-distutils_enable_tests setup.py \
- "${BASE_IUSE} test" "${TEST_RESTRICT}" "${BASE_DEPS} test? ( ${BASE_RDEPEND} )"
-eoutdent
-
-eoutdent
-
-einfo DISTUTILS_USE_SETUPTOOLS
-eindent
-SETUPTOOLS_DEP="python_single_target_python3_8? ( >=dev-python/setuptools-42.0.2[python_targets_python3_8(-)] )"
-test-DISTUTILS_USE_SETUPTOOLS no "${BASE_DEPS}" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS bdepend "${BASE_DEPS} ${SETUPTOOLS_DEP}" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS rdepend "${BASE_DEPS} ${SETUPTOOLS_DEP}" "${BASE_DEPS} ${SETUPTOOLS_DEP}"
-test-DISTUTILS_USE_SETUPTOOLS pyproject.toml "${BASE_DEPS} python_single_target_python3_8? ( >=dev-python/pyproject2setuppy-22[python_targets_python3_8(-)] )" "${BASE_DEPS}"
-test-DISTUTILS_USE_SETUPTOOLS manual "${BASE_DEPS}" "${BASE_DEPS}"
-eoutdent
-
-texit
diff --git a/eclass/tests/eapi8-dosym.sh b/eclass/tests/eapi8-dosym.sh
index 9290026a26de..a0f8961d4d96 100755
--- a/eclass/tests/eapi8-dosym.sh
+++ b/eclass/tests/eapi8-dosym.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Gentoo Authors
+# Copyright 2020-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=7
@@ -50,8 +50,11 @@ done
teq . _dosym8_canonicalize .
teq foo _dosym8_canonicalize foo
teq foo _dosym8_canonicalize ./foo
+teq foo _dosym8_canonicalize foo/.
teq ../foo _dosym8_canonicalize ../foo
teq ../baz _dosym8_canonicalize foo/bar/../../../baz
+teq '*' _dosym8_canonicalize '*'
+teq '-e' _dosym8_canonicalize '-e'
for f in ref_dosym_r "dosym8 -r"; do
teq ../../bin/foo ${f} /bin/foo /usr/bin/foo
diff --git a/eclass/tests/llvm-r1.sh b/eclass/tests/llvm-r1.sh
new file mode 100755
index 000000000000..9958f5bba420
--- /dev/null
+++ b/eclass/tests/llvm-r1.sh
@@ -0,0 +1,151 @@
+#!/bin/bash
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+source tests-common.sh || exit
+
+EAPI=8
+
+test_globals() {
+ local compat=${1}
+ local expected_iuse=${2}
+ local expected_required_use=${3}
+ local expected_usedep=${4}
+ local x
+
+ tbegin "LLVM_COMPAT=( ${compat} )"
+
+ (
+ local fail=0
+ local LLVM_COMPAT=( ${compat} )
+
+ inherit llvm-r1
+
+ if [[ ${IUSE%% } != ${expected_iuse} ]]; then
+ eerror " IUSE: ${IUSE%% }"
+ eerror "does not match: ${expected_iuse}"
+ fail=1
+ fi
+
+ if [[ ${REQUIRED_USE} != ${expected_required_use} ]]; then
+ eerror " REQUIRED_USE: ${REQUIRED_USE}"
+ eerror "does not match: ${expected_required_use}"
+ fail=1
+ fi
+
+ if [[ ${LLVM_USEDEP} != ${expected_usedep} ]]; then
+ eerror " LLVM_USEDEP: ${LLVM_USEDEP}"
+ eerror "does not match: ${expected_usedep}"
+ fail=1
+ fi
+
+ exit "${fail}"
+ )
+
+ tend "${?}"
+}
+
+test_gen_dep() {
+ local arg=${1}
+ local expected
+ read -r -d '' expected
+
+ tbegin "llvm_gen_dep ${arg}"
+ local value=$(llvm_gen_dep "${arg}")
+
+ if [[ ${value} != ${expected} ]]; then
+ eerror "python_get_usedep ${arg}"
+ eerror "gave:"
+ eerror " ${value}"
+ eerror "expected:"
+ eerror " ${expected}"
+ fi
+ tend ${?}
+}
+
+test_fix_clang_version() {
+ local var=${1}
+ local tool=${2}
+ local version=${3}
+ local expected=${4}
+
+ eval "${tool}() {
+ cat <<-EOF
+ clang version ${version}
+ Target: x86_64-pc-linux-gnu
+ Thread model: posix
+ InstalledDir: /usr/lib/llvm/17/bin
+ Configuration file: /etc/clang/x86_64-pc-linux-gnu-clang.cfg
+ EOF
+ }"
+
+ declare -g ${var}=${tool}
+ tbegin "llvm_fix_clang_version ${var}=${tool} for ${version}"
+ llvm_fix_clang_version "${var}"
+ if [[ ${!var} != ${expected} ]]; then
+ eerror "llvm_fix_clang_version ${var}"
+ eerror " gave: ${!var}"
+ eerror "expected: ${expected}"
+ fi
+ tend ${?}
+}
+
+test_fix_tool_path() {
+ local var=${1}
+ local tool=${2}
+ local expected_subst=${3}
+ local expected=${tool}
+
+ tbegin "llvm_fix_tool_path ${1}=${2} (from llvm? ${expected_subst})"
+
+ local matches=( "${BROOT}"/usr/lib/llvm/*/bin/"${tool}" )
+ if [[ ${expected_subst} == 1 ]]; then
+ if [[ ! -x ${matches[0]} ]]; then
+ ewarn "- skipping, test requires ${tool}"
+ return
+ fi
+
+ expected=${matches[0]}
+ local -x PATH=${matches[0]%/*}
+ else
+ local -x PATH=
+ fi
+
+ declare -g ${var}=${tool}
+ llvm_fix_tool_path "${var}"
+ if [[ ${!var} != ${expected} ]]; then
+ eerror "llvm_fix_tool_path ${var}"
+ eerror " gave: ${!var}"
+ eerror "expected: ${expected}"
+ fi
+ tend ${?}
+}
+
+test_globals '14 15 16 17 18' \
+ "+llvm_slot_17 llvm_slot_15 llvm_slot_16 llvm_slot_18" \
+ "^^ ( llvm_slot_15 llvm_slot_16 llvm_slot_17 llvm_slot_18 )" \
+ "llvm_slot_15(-)?,llvm_slot_16(-)?,llvm_slot_17(-)?,llvm_slot_18(-)?"
+test_globals '14 15 16' \
+ "+llvm_slot_16 llvm_slot_15" \
+ "^^ ( llvm_slot_15 llvm_slot_16 )" \
+ "llvm_slot_15(-)?,llvm_slot_16(-)?"
+test_globals '15 18' \
+ "+llvm_slot_15 llvm_slot_18" \
+ "^^ ( llvm_slot_15 llvm_slot_18 )" \
+ "llvm_slot_15(-)?,llvm_slot_18(-)?"
+test_globals '18' \
+ "+llvm_slot_18" \
+ "^^ ( llvm_slot_18 )" \
+ "llvm_slot_18(-)?"
+
+LLVM_COMPAT=( {14..18} )
+inherit llvm-r1
+
+test_gen_dep 'sys-devel/llvm:${LLVM_SLOT} sys-devel/clang:${LLVM_SLOT}' <<-EOF
+ llvm_slot_15? ( sys-devel/llvm:15 sys-devel/clang:15 )
+ llvm_slot_16? ( sys-devel/llvm:16 sys-devel/clang:16 )
+ llvm_slot_17? ( sys-devel/llvm:17 sys-devel/clang:17 )
+ llvm_slot_18? ( sys-devel/llvm:18 sys-devel/clang:18 )
+EOF
+
+texit
diff --git a/eclass/tests/llvm-utils.sh b/eclass/tests/llvm-utils.sh
new file mode 100755
index 000000000000..6fe3da3eda13
--- /dev/null
+++ b/eclass/tests/llvm-utils.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+source tests-common.sh || exit
+
+EAPI=8
+
+inherit llvm-utils
+
+test_fix_clang_version() {
+ local var=${1}
+ local tool=${2}
+ local version=${3}
+ local expected=${4}
+
+ eval "${tool}() {
+ cat <<-EOF
+ clang version ${version}
+ Target: x86_64-pc-linux-gnu
+ Thread model: posix
+ InstalledDir: /usr/lib/llvm/17/bin
+ Configuration file: /etc/clang/x86_64-pc-linux-gnu-clang.cfg
+ EOF
+ }"
+
+ declare -g ${var}=${tool}
+ tbegin "llvm_fix_clang_version ${var}=${tool} for ${version}"
+ llvm_fix_clang_version "${var}"
+ if [[ ${!var} != ${expected} ]]; then
+ eerror "llvm_fix_clang_version ${var}"
+ eerror " gave: ${!var}"
+ eerror "expected: ${expected}"
+ fi
+ tend ${?}
+}
+
+test_fix_tool_path() {
+ local var=${1}
+ local tool=${2}
+ local expected_subst=${3}
+ local expected=${tool}
+
+ tbegin "llvm_fix_tool_path ${1}=${2} (from llvm? ${expected_subst})"
+
+ local matches=( "${BROOT}"/usr/lib/llvm/*/bin/"${tool}" )
+ if [[ ${expected_subst} == 1 ]]; then
+ if [[ ! -x ${matches[0]} ]]; then
+ ewarn "- skipping, test requires ${tool}"
+ return
+ fi
+
+ expected=${matches[0]}
+ local -x PATH=${matches[0]%/*}
+ else
+ local -x PATH=
+ fi
+
+ declare -g ${var}=${tool}
+ llvm_fix_tool_path "${var}"
+ if [[ ${!var} != ${expected} ]]; then
+ eerror "llvm_fix_tool_path ${var}"
+ eerror " gave: ${!var}"
+ eerror "expected: ${expected}"
+ fi
+ tend ${?}
+}
+
+test_prepend_path() {
+ local slot=${1}
+ local -x PATH=${2}
+ local expected=${3}
+
+ tbegin "llvm_prepend_path ${slot} to PATH=${PATH}"
+ llvm_prepend_path "${slot}"
+ if [[ ${PATH} != ${expected} ]]; then
+ eerror "llvm_prepend_path ${var}"
+ eerror " gave: ${PATH}"
+ eerror "expected: ${expected}"
+ fi
+ tend ${?}
+}
+
+test_fix_clang_version CC clang 19.0.0git78b4e7c5 clang-19
+test_fix_clang_version CC clang 17.0.6 clang-17
+test_fix_clang_version CXX clang++ 17.0.6 clang++-17
+test_fix_clang_version CC x86_64-pc-linux-gnu-clang 17.0.6 \
+ x86_64-pc-linux-gnu-clang-17
+test_fix_clang_version CC clang-17 n/a clang-17
+test_fix_clang_version CC gcc n/a gcc
+
+test_fix_tool_path AR llvm-ar 1
+test_fix_tool_path RANLIB llvm-ranlib 1
+test_fix_tool_path AR ar 1
+test_fix_tool_path AR ar 0
+
+ESYSROOT=
+test_prepend_path 17 /usr/bin /usr/bin:/usr/lib/llvm/17/bin
+test_prepend_path 17 /usr/lib/llvm/17/bin:/usr/bin /usr/lib/llvm/17/bin:/usr/bin
+test_prepend_path 17 /usr/bin:/usr/lib/llvm/17/bin /usr/bin:/usr/lib/llvm/17/bin
+test_prepend_path 17 /usr/lib/llvm/17/bin:/usr/bin:/usr/lib/llvm/17/bin \
+ /usr/lib/llvm/17/bin:/usr/bin
+test_prepend_path 17 /usr/lib/llvm/17/bin:/usr/lib/llvm/17/bin:/usr/bin \
+ /usr/lib/llvm/17/bin:/usr/bin
+test_prepend_path 17 /usr/bin:/usr/lib/llvm/17/bin:/usr/lib/llvm/17/bin \
+ /usr/bin:/usr/lib/llvm/17/bin
+test_prepend_path 18 /usr/lib/llvm/17/bin:/usr/bin \
+ /usr/lib/llvm/18/bin:/usr/lib/llvm/17/bin:/usr/bin
+test_prepend_path 18 /usr/bin:/usr/lib/llvm/17/bin \
+ /usr/bin:/usr/lib/llvm/18/bin:/usr/lib/llvm/17/bin
+test_prepend_path 18 /usr/lib/llvm/17/bin:/usr/lib/llvm/16/bin:/usr/bin \
+ /usr/lib/llvm/18/bin:/usr/lib/llvm/17/bin:/usr/lib/llvm/16/bin:/usr/bin
+test_prepend_path 18 /usr/bin:/usr/lib/llvm/17/bin:/usr/lib/llvm/16/bin \
+ /usr/bin:/usr/lib/llvm/18/bin:/usr/lib/llvm/17/bin:/usr/lib/llvm/16/bin
+test_prepend_path 18 /usr/lib/llvm/17/bin:/usr/bin:/usr/lib/llvm/16/bin \
+ /usr/lib/llvm/18/bin:/usr/lib/llvm/17/bin:/usr/bin:/usr/lib/llvm/16/bin
+
+texit
diff --git a/eclass/tests/llvm.sh b/eclass/tests/llvm.sh
index 93bc124a82af..e62fb4b002a2 100755
--- a/eclass/tests/llvm.sh
+++ b/eclass/tests/llvm.sh
@@ -74,13 +74,6 @@ eindent
check_prefix /sysroot/eprefix/usr/lib/llvm/11 -d
eoutdent
-ebegin "Testing check_setup_path EAPI 6 API"
-eindent
- EAPI=6 \
- LLVM_INSTALLED_SLOT=11 \
- check_prefix /usr/lib/llvm/11 -d
-eoutdent
-
BASEPATH=/usr/lib/ccache/bin:/usr/bin:/usr/sbin:/bin:/sbin
# TODO: cross support?
diff --git a/eclass/tests/multiprocessing_makeopts_jobs.sh b/eclass/tests/multiprocessing_makeopts_jobs.sh
index 37d5a7257775..56d73ef48b3c 100755
--- a/eclass/tests/multiprocessing_makeopts_jobs.sh
+++ b/eclass/tests/multiprocessing_makeopts_jobs.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=7
@@ -9,7 +9,13 @@ inherit multiprocessing
test-makeopts_jobs() {
local exp=$1; shift
- tbegin "makeopts_jobs($1${2+; inf=${2}}) == ${exp}"
+ local targs
+ if [[ -v 1 ]]; then
+ targs="$1${2+; inf=${2}}"
+ else
+ targs="MAKEOPTS=\"${MAKEOPTS}\" GNUMAKEFLAGS=\"${GNUMAKEFLAGS}\" MAKEFLAGS=\"${MAKEFLAGS}\""
+ fi
+ tbegin "makeopts_jobs(${targs}) == ${exp}"
local indirect=$(MAKEOPTS="$*" makeopts_jobs)
local direct=$(makeopts_jobs "$@")
if [[ "${direct}" != "${indirect}" ]] ; then
@@ -50,6 +56,20 @@ for (( i = 0; i < ${#tests[@]}; i += 2 )) ; do
test-makeopts_jobs "${tests[i]}" "${tests[i+1]}"
done
+tests=(
+ 7 "" "--jobs 7" ""
+ # MAKEFLAGS override GNUMAKEFLAGS
+ 8 "" "--jobs 7" "--jobs 8"
+)
+
+for (( i = 0; i < ${#tests[@]}; i += 4 )) ; do
+ MAKEOPTS="${tests[i+1]}"
+ GNUMAKEFLAGS="${tests[i+2]}"
+ MAKEFLAGS="${tests[i+3]}"
+ test-makeopts_jobs "${tests[i]}"
+ unset MAKEOPTS GNUMAKEFLAGS MAKEFLAGS
+done
+
# test custom inf value
test-makeopts_jobs 645 "-j" 645
diff --git a/eclass/tests/pypi-bench.sh b/eclass/tests/pypi-bench.sh
new file mode 100755
index 000000000000..cce93527b729
--- /dev/null
+++ b/eclass/tests/pypi-bench.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+export LC_ALL=C
+
+ITERATIONS=1000
+RUNS=3
+
+doit() {
+ local i
+ for (( i = 0; i < ITERATIONS; i++ )); do
+ _pypi_set_globals
+ done
+}
+
+timeit() {
+ einfo "Timing PYPI_PN=\"${PYPI_PN}\" PV=\"${PV}\" PYPI_NO_NORMALIZE=${PYPI_NO_NORMALIZE}"
+
+ local real=()
+ local user=()
+ local x vr avg
+
+ for (( x = 0; x < RUNS; x++ )); do
+ while read tt tv; do
+ case ${tt} in
+ real) real+=( ${tv} );;
+ user) user+=( ${tv} );;
+ esac
+ done < <( ( time -p doit ) 2>&1 )
+ done
+
+ [[ ${#real[@]} == ${RUNS} ]] || die "Did not get ${RUNS} real times"
+ [[ ${#user[@]} == ${RUNS} ]] || die "Did not get ${RUNS} user times"
+
+ local xr avg
+ for x in real user; do
+ xr="${x}[*]"
+ avg=$(dc -S 3 -e "${ITERATIONS} ${RUNS} * ${!xr} + + / p")
+
+ printf '%s %4.0f it/s\n' "${x}" "${avg}"
+ done
+}
+
+PN=foo-bar
+PYPI_PN=Foo.Bar
+PV=1.2.3_beta2
+WORKDIR='<WORKDIR>'
+
+inherit pypi
+timeit
+
+PV=1.2.3
+timeit
+PYPI_NO_NORMALIZE=1 timeit
+
+PN=foobar
+PYPI_PN=FooBar
+timeit
+PYPI_NO_NORMALIZE=1 timeit
+
+PYPI_PN=foobar
+timeit
+PYPI_NO_NORMALIZE=1 timeit
+
+texit
diff --git a/eclass/tests/pypi.sh b/eclass/tests/pypi.sh
new file mode 100755
index 000000000000..471ac048b18a
--- /dev/null
+++ b/eclass/tests/pypi.sh
@@ -0,0 +1,97 @@
+#!/bin/bash
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+PN=foo-bar
+PYPI_PN=Foo.Bar
+PV=1.2.3_beta2
+WORKDIR='<WORKDIR>'
+
+inherit pypi
+
+test-eq() {
+ local call=${1}
+ local exp=${2}
+
+ tbegin "${call} -> ${exp}"
+ local ret=0
+ local have=$(${call})
+ if [[ ${have} != ${exp} ]]; then
+ eindent
+ eerror "incorrect result: ${have}"
+ eoutdent
+ ret=1
+ fi
+ tend "${ret}"
+}
+
+test-eq "pypi_normalize_name foo" foo
+test-eq "pypi_normalize_name foo_bar" foo_bar
+test-eq "pypi_normalize_name foo___bar" foo_bar
+test-eq "pypi_normalize_name Flask-BabelEx" flask_babelex
+test-eq "pypi_normalize_name jaraco.context" jaraco_context
+
+test-eq "pypi_translate_version 1.2.3" 1.2.3
+test-eq "pypi_translate_version 1.2.3_p101" 1.2.3.post101
+test-eq "pypi_translate_version 1.2.3_alpha4" 1.2.3a4
+test-eq "pypi_translate_version 1.2.3_beta1" 1.2.3b1
+test-eq "pypi_translate_version 1.2.3_rc2" 1.2.3rc2
+test-eq "pypi_translate_version 1.2.3_rc2_p1" 1.2.3rc2.post1
+
+test-eq "pypi_wheel_name" foo_bar-1.2.3b2-py3-none-any.whl
+test-eq "pypi_wheel_name Flask-BabelEx" flask_babelex-1.2.3b2-py3-none-any.whl
+test-eq "pypi_wheel_name Flask-BabelEx 4" flask_babelex-4-py3-none-any.whl
+test-eq "pypi_wheel_name Flask-BabelEx 4 py2.py3" \
+ flask_babelex-4-py2.py3-none-any.whl
+test-eq "pypi_wheel_name cryptography 39.0.1 cp36 abi3-manylinux_2_28_x86_64" \
+ cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl
+
+test-eq "pypi_wheel_url" \
+ https://files.pythonhosted.org/packages/py3/F/Foo.Bar/foo_bar-1.2.3b2-py3-none-any.whl
+test-eq "pypi_wheel_url Flask-BabelEx" \
+ https://files.pythonhosted.org/packages/py3/F/Flask-BabelEx/flask_babelex-1.2.3b2-py3-none-any.whl
+test-eq "pypi_wheel_url Flask-BabelEx 4" \
+ https://files.pythonhosted.org/packages/py3/F/Flask-BabelEx/flask_babelex-4-py3-none-any.whl
+test-eq "pypi_wheel_url Flask-BabelEx 4 py2.py3" \
+ https://files.pythonhosted.org/packages/py2.py3/F/Flask-BabelEx/flask_babelex-4-py2.py3-none-any.whl
+test-eq "pypi_wheel_url cryptography 39.0.1 cp36 abi3-manylinux_2_28_x86_64" \
+ https://files.pythonhosted.org/packages/cp36/c/cryptography/cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl
+
+test-eq "pypi_wheel_url --unpack" \
+ "https://files.pythonhosted.org/packages/py3/F/Foo.Bar/foo_bar-1.2.3b2-py3-none-any.whl -> foo_bar-1.2.3b2-py3-none-any.whl.zip"
+test-eq "pypi_wheel_url --unpack Flask-BabelEx" \
+ "https://files.pythonhosted.org/packages/py3/F/Flask-BabelEx/flask_babelex-1.2.3b2-py3-none-any.whl -> flask_babelex-1.2.3b2-py3-none-any.whl.zip"
+test-eq "pypi_wheel_url --unpack Flask-BabelEx 4" \
+ "https://files.pythonhosted.org/packages/py3/F/Flask-BabelEx/flask_babelex-4-py3-none-any.whl -> flask_babelex-4-py3-none-any.whl.zip"
+test-eq "pypi_wheel_url --unpack Flask-BabelEx 4 py2.py3" \
+ "https://files.pythonhosted.org/packages/py2.py3/F/Flask-BabelEx/flask_babelex-4-py2.py3-none-any.whl -> flask_babelex-4-py2.py3-none-any.whl.zip"
+test-eq "pypi_wheel_url --unpack cryptography 39.0.1 cp36 abi3-manylinux_2_28_x86_64" \
+ "https://files.pythonhosted.org/packages/cp36/c/cryptography/cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl -> cryptography-39.0.1-cp36-abi3-manylinux_2_28_x86_64.whl.zip"
+
+test-eq "pypi_sdist_url" \
+ https://files.pythonhosted.org/packages/source/F/Foo.Bar/foo_bar-1.2.3b2.tar.gz
+test-eq "pypi_sdist_url Flask-BabelEx" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/flask_babelex-1.2.3b2.tar.gz
+test-eq "pypi_sdist_url Flask-BabelEx 4" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/flask_babelex-4.tar.gz
+test-eq "pypi_sdist_url Flask-BabelEx 4 .zip" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/flask_babelex-4.zip
+
+test-eq "pypi_sdist_url --no-normalize" \
+ https://files.pythonhosted.org/packages/source/F/Foo.Bar/Foo.Bar-1.2.3b2.tar.gz
+test-eq "pypi_sdist_url --no-normalize Flask-BabelEx" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/Flask-BabelEx-1.2.3b2.tar.gz
+test-eq "pypi_sdist_url --no-normalize Flask-BabelEx 4" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/Flask-BabelEx-4.tar.gz
+test-eq "pypi_sdist_url --no-normalize Flask-BabelEx 4 .zip" \
+ https://files.pythonhosted.org/packages/source/F/Flask-BabelEx/Flask-BabelEx-4.zip
+
+test-eq 'declare -p SRC_URI' \
+ 'declare -- SRC_URI="https://files.pythonhosted.org/packages/source/F/Foo.Bar/foo_bar-1.2.3b2.tar.gz"'
+test-eq 'declare -p S' \
+ 'declare -- S="<WORKDIR>/foo_bar-1.2.3b2"'
+
+texit
diff --git a/eclass/tests/python-utils-bench.sh b/eclass/tests/python-utils-bench.sh
new file mode 100755
index 000000000000..7f27adef5509
--- /dev/null
+++ b/eclass/tests/python-utils-bench.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+export LC_ALL=C
+
+ITERATIONS=10000
+RUNS=3
+
+doit() {
+ local i
+ for (( i = 0; i < ITERATIONS; i++ )); do
+ "${@}"
+ done
+}
+
+timeit() {
+ local real=()
+ local user=()
+ local x vr avg
+
+ einfo "Timing ${*}"
+ for (( x = 0; x < RUNS; x++ )); do
+ while read tt tv; do
+ case ${tt} in
+ real) real+=( ${tv} );;
+ user) user+=( ${tv} );;
+ esac
+ done < <( ( time -p doit "${@}" ) 2>&1 )
+ done
+
+ [[ ${#real[@]} == ${RUNS} ]] || die "Did not get ${RUNS} real times"
+ [[ ${#user[@]} == ${RUNS} ]] || die "Did not get ${RUNS} user times"
+
+ local xr avg
+ for x in real user; do
+ xr="${x}[*]"
+ avg=$(dc -S 3 -e "${ITERATIONS} ${RUNS} * ${!xr} + + / p")
+
+ printf '%s %4.0f it/s\n' "${x}" "${avg}"
+ done
+}
+
+PYTHON_COMPAT=( python3_{10..12} pypy3 )
+
+inherit python-utils-r1
+
+timeit _python_set_impls
+
+texit
diff --git a/eclass/tests/python-utils-r1.sh b/eclass/tests/python-utils-r1.sh
index 6abf10cadabd..81c459765f0f 100755
--- a/eclass/tests/python-utils-r1.sh
+++ b/eclass/tests/python-utils-r1.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
EAPI=7
@@ -64,23 +64,7 @@ tmpfile=$(mktemp)
inherit python-utils-r1
-ebegin "Testing python2.7"
-eindent
-test_var EPYTHON python2_7 python2.7
-test_var PYTHON python2_7 /usr/bin/python2.7
-if [[ -x /usr/bin/python2.7 ]]; then
- test_var PYTHON_SITEDIR python2_7 "/usr/lib*/python2.7/site-packages"
- test_var PYTHON_INCLUDEDIR python2_7 /usr/include/python2.7
- test_var PYTHON_LIBPATH python2_7 "/usr/lib*/libpython2.7$(get_libname)"
- test_var PYTHON_CONFIG python2_7 /usr/bin/python2.7-config
- test_var PYTHON_CFLAGS python2_7 "*-I/usr/include/python2.7*"
- test_var PYTHON_LIBS python2_7 "*-lpython2.7*"
-fi
-test_var PYTHON_PKG_DEP python2_7 '*dev-lang/python*:2.7'
-test_var PYTHON_SCRIPTDIR python2_7 /usr/lib/python-exec/python2.7
-eoutdent
-
-for minor in 6 7 8 9 10 11; do
+for minor in {10..12}; do
ebegin "Testing python3.${minor}"
eindent
test_var EPYTHON "python3_${minor}" "python3.${minor}"
@@ -139,8 +123,8 @@ eindent
test_var EPYTHON pypy3 pypy3
test_var PYTHON pypy3 /usr/bin/pypy3
if [[ -x /usr/bin/pypy3 ]]; then
- test_var PYTHON_SITEDIR pypy3 "/usr/lib*/pypy3.?/site-packages"
- test_var PYTHON_INCLUDEDIR pypy3 "/usr/include/pypy3.?"
+ test_var PYTHON_SITEDIR pypy3 "/usr/lib*/pypy3.*/site-packages"
+ test_var PYTHON_INCLUDEDIR pypy3 "/usr/include/pypy3.*"
fi
test_var PYTHON_PKG_DEP pypy3 '*dev-python/pypy3*:0='
test_var PYTHON_SCRIPTDIR pypy3 /usr/lib/python-exec/pypy3
@@ -215,15 +199,18 @@ test_is "_python_impl_matches python3_6 python*" 0
test_is "_python_impl_matches python3_7 python*" 0
test_is "_python_impl_matches pypy3 python*" 1
set +f
-test_is "_python_impl_matches python3_8 3.8" 0
-test_is "_python_impl_matches python3_8 3.9" 1
-test_is "_python_impl_matches python3_8 3.10" 1
-test_is "_python_impl_matches python3_9 3.8" 1
-test_is "_python_impl_matches python3_9 3.9" 0
-test_is "_python_impl_matches python3_9 3.10" 1
-test_is "_python_impl_matches pypy3 3.8" 1
-test_is "_python_impl_matches pypy3 3.9" 0
-test_is "_python_impl_matches pypy3 3.10" 1
+test_is "_python_impl_matches python3_10 3.10" 0
+test_is "_python_impl_matches python3_10 3.11" 1
+test_is "_python_impl_matches python3_10 3.12" 1
+test_is "_python_impl_matches python3_11 3.10" 1
+test_is "_python_impl_matches python3_11 3.11" 0
+test_is "_python_impl_matches python3_11 3.12" 1
+test_is "_python_impl_matches python3_12 3.10" 1
+test_is "_python_impl_matches python3_12 3.11" 1
+test_is "_python_impl_matches python3_12 3.12" 0
+test_is "_python_impl_matches pypy3 3.10" 0
+test_is "_python_impl_matches pypy3 3.11" 1
+test_is "_python_impl_matches pypy3 3.12" 1
eoutdent
rm "${tmpfile}"
diff --git a/eclass/tests/scons-utils.sh b/eclass/tests/scons-utils.sh
deleted file mode 100755
index 32a0a944706e..000000000000
--- a/eclass/tests/scons-utils.sh
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/bash
-# Copyright 1999-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=7
-source tests-common.sh || exit
-
-inherit scons-utils
-
-test-scons_clean_makeopts() {
- tbegin "scons_clean_makeopts() for ${1}"
-
- local SCONSOPTS ret=0
- _scons_clean_makeopts ${1}
-
- if [[ ${SCONSOPTS} != ${2-${1}} ]]; then
- eerror "Self-test failed:"
- eindent
- eerror "MAKEOPTS: ${1}"
- eerror "Expected: ${2-${1}}"
- eerror "Actual: ${SCONSOPTS}"
- eoutdent
- ret=1
- fi
-
- tend ${ret}
- return ${ret}
-}
-
-# jobcount expected for non-specified state
-jc=$(( $(get_nproc) + 1 ))
-# failed test counter
-failed=0
-
-# sane MAKEOPTS
-test-scons_clean_makeopts '--jobs=14 -k'
-test-scons_clean_makeopts '--jobs=14 -k'
-test-scons_clean_makeopts '--jobs 15 -k'
-test-scons_clean_makeopts '--jobs=16 --keep-going'
-test-scons_clean_makeopts '-j17 --keep-going'
-test-scons_clean_makeopts '-j 18 --keep-going'
-
-# needing cleaning
-test-scons_clean_makeopts '--jobs -k' "--jobs=${jc} -k"
-test-scons_clean_makeopts '--jobs --keep-going' "--jobs=${jc} --keep-going"
-test-scons_clean_makeopts '-kj' "-kj ${jc}"
-
-# broken by definition (but passed as it breaks make as well)
-test-scons_clean_makeopts '-jk'
-test-scons_clean_makeopts '--jobs=randum'
-test-scons_clean_makeopts '-kjrandum'
-
-# needing stripping
-test-scons_clean_makeopts '--load-average=25 -kj16' '-kj16'
-test-scons_clean_makeopts '--load-average 25 -k -j17' '-k -j17'
-test-scons_clean_makeopts '-j2 HOME=/tmp' '-j2'
-test-scons_clean_makeopts '--jobs funnystuff -k' "--jobs=${jc} -k"
-
-# bug #388961
-test-scons_clean_makeopts '--jobs -l3' "--jobs=${jc}"
-test-scons_clean_makeopts '-j -l3' "-j ${jc}"
-
-texit
diff --git a/eclass/tests/systemd.sh b/eclass/tests/systemd.sh
new file mode 100755
index 000000000000..f870df4b7a12
--- /dev/null
+++ b/eclass/tests/systemd.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+inherit systemd
+
+test_system_dir() {
+ local exp1="${EPREFIX}$1"
+ local exp2="${EPREFIX}/usr$1"
+ shift
+ tbegin "$@"
+ local act=$("$@")
+ [[ ${act} == ${exp1} || ${act} == ${exp2} ]]
+ tend $?
+}
+
+test_user_dir() {
+ local exp="${EPREFIX}$1"
+ shift
+ tbegin "$@"
+ local act=$("$@")
+ [[ ${act} == ${exp} ]]
+ tend $?
+}
+
+test_systemd_unprefix() {
+ local exp=$1
+ local EPREFIX=$2
+ shift 2
+ tbegin "EPREFIX=${EPREFIX} _systemd_unprefix $@"
+ [[ "$(_systemd_unprefix "$@")" == "${exp}" ]]
+ tend $?
+}
+
+test_system_dir /lib/systemd/system systemd_get_systemunitdir
+test_system_dir /lib/systemd systemd_get_utildir
+test_system_dir /lib/systemd/system-generators systemd_get_systemgeneratordir
+test_system_dir /lib/systemd/system-preset systemd_get_systempresetdir
+test_system_dir /lib/systemd/system-sleep systemd_get_sleepdir
+
+test_user_dir /usr/lib/systemd/user systemd_get_userunitdir
+
+test_systemd_unprefix /lib/systemd /prefix echo /prefix/lib/systemd
+test_systemd_unprefix /lib/systemd '' echo /lib/systemd
+test_systemd_unprefix /lib/systemd '/*' echo '/*/lib/systemd'
+
+texit
diff --git a/eclass/tests/tests-common.sh b/eclass/tests/tests-common.sh
index a677842b6ac5..45b1e20b933a 100644
--- a/eclass/tests/tests-common.sh
+++ b/eclass/tests/tests-common.sh
@@ -60,6 +60,13 @@ die() {
exit 1
}
+assert() {
+ local x pipestatus=${PIPESTATUS[*]}
+ for x in ${pipestatus} ; do
+ [[ ${x} -eq 0 ]] || die "$@"
+ done
+}
+
has_version() {
while [[ $1 == -* ]]; do
shift
diff --git a/eclass/tests/toolchain-funcs.sh b/eclass/tests/toolchain-funcs.sh
index 56379b10cded..08cfd74611aa 100755
--- a/eclass/tests/toolchain-funcs.sh
+++ b/eclass/tests/toolchain-funcs.sh
@@ -60,20 +60,22 @@ tbegin "tc-ld-is-gold (ld=bfd cc=bfd)"
LD=ld.bfd LDFLAGS=-fuse-ld=bfd tc-ld-is-gold && ret=1 || ret=0
tend ${ret}
-tbegin "tc-ld-is-gold (ld=gold cc=default)"
-LD=ld.gold tc-ld-is-gold
-ret=$?
-tend ${ret}
-
-tbegin "tc-ld-is-gold (ld=gold cc=bfd)"
-LD=ld.gold LDFLAGS=-fuse-ld=bfd tc-ld-is-gold
-ret=$?
-tend ${ret}
-
-tbegin "tc-ld-is-gold (ld=bfd cc=gold)"
-LD=ld.bfd LDFLAGS=-fuse-ld=gold tc-ld-is-gold
-ret=$?
-tend ${ret}
+if type -P ld.gold &>/dev/null; then
+ tbegin "tc-ld-is-gold (ld=gold cc=default)"
+ LD=ld.gold tc-ld-is-gold
+ ret=$?
+ tend ${ret}
+
+ tbegin "tc-ld-is-gold (ld=gold cc=bfd)"
+ LD=ld.gold LDFLAGS=-fuse-ld=bfd tc-ld-is-gold
+ ret=$?
+ tend ${ret}
+
+ tbegin "tc-ld-is-gold (ld=bfd cc=gold)"
+ LD=ld.bfd LDFLAGS=-fuse-ld=gold tc-ld-is-gold
+ ret=$?
+ tend ${ret}
+fi
#
# TEST: tc-ld-disable-gold
@@ -87,23 +89,25 @@ tc-ld-disable-gold
)
tend $?
-tbegin "tc-ld-disable-gold (ld=gold)"
-(
-export LD=ld.gold LDFLAGS=
-ewarn() { :; }
-tc-ld-disable-gold
-[[ ${LD} == "ld.bfd" || ${LDFLAGS} == *"-fuse-ld=bfd"* ]]
-)
-tend $?
+if type -P ld.gold &>/dev/null; then
+ tbegin "tc-ld-disable-gold (ld=gold)"
+ (
+ export LD=ld.gold LDFLAGS=
+ ewarn() { :; }
+ tc-ld-disable-gold
+ [[ ${LD} == "ld.bfd" || ${LDFLAGS} == *"-fuse-ld=bfd"* ]]
+ )
+ tend $?
-tbegin "tc-ld-disable-gold (cc=gold)"
-(
-export LD= LDFLAGS="-fuse-ld=gold"
-ewarn() { :; }
-tc-ld-disable-gold
-[[ ${LD} == *"/ld.bfd" || ${LDFLAGS} == "-fuse-ld=gold -fuse-ld=bfd" ]]
-)
-tend $?
+ tbegin "tc-ld-disable-gold (cc=gold)"
+ (
+ export LD= LDFLAGS="-fuse-ld=gold"
+ ewarn() { :; }
+ tc-ld-disable-gold
+ [[ ${LD} == *"/ld.bfd" || ${LDFLAGS} == "-fuse-ld=gold -fuse-ld=bfd" ]]
+ )
+ tend $?
+fi
unset CPP
@@ -198,4 +202,36 @@ for compiler in gcc clang not-really-a-compiler; do
fi
done
+if type -P gcc &>/dev/null; then
+ tbegin "tc-get-cxx-stdlib (gcc)"
+ [[ $(CXX=g++ tc-get-cxx-stdlib) == libstdc++ ]]
+ tend $?
+
+ tbegin "tc-get-c-rtlib (gcc)"
+ [[ $(CC=gcc tc-get-c-rtlib) == libgcc ]]
+ tend $?
+fi
+
+if type -P clang &>/dev/null; then
+ for stdlib in libc++ libstdc++; do
+ if clang++ -stdlib=${stdlib} -x c++ -E -P - &>/dev/null \
+ <<<'#include <ciso646>'
+ then
+ tbegin "tc-get-cxx-stdlib (clang, ${stdlib})"
+ [[ $(CXX=clang++ CXXFLAGS="-stdlib=${stdlib}" tc-get-cxx-stdlib) == ${stdlib} ]]
+ tend $?
+ fi
+ done
+
+ tbegin "tc-get-cxx-stdlib (clang, invalid)"
+ ! CXX=clang++ CXXFLAGS="-stdlib=invalid" tc-get-cxx-stdlib
+ tend $?
+
+ for rtlib in compiler-rt libgcc; do
+ tbegin "tc-get-c-rtlib (clang, ${rtlib})"
+ [[ $(CC=clang CFLAGS="--rtlib=${rtlib}" tc-get-c-rtlib) == ${rtlib} ]]
+ tend $?
+ done
+fi
+
texit
diff --git a/eclass/tests/toolchain.sh b/eclass/tests/toolchain.sh
index c012448a1478..1f21e7d842b1 100755
--- a/eclass/tests/toolchain.sh
+++ b/eclass/tests/toolchain.sh
@@ -1,16 +1,17 @@
#!/bin/bash
-# Copyright 1999-2019 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-EAPI=5
+EAPI=8
-# apply exlass globals to test version parsing
-TOOLCHAIN_GCC_PV=7.3.0
+# apply eclass globals to test version parsing
+TOOLCHAIN_GCC_PV=11.3.0
PR=r0
source tests-common.sh || exit
-inherit toolchain
+EAPI=6 inherit eapi7-ver
+EAPI=7 inherit toolchain
# Ignore actually running version of gcc and fake new version
# to force downgrade test on all conditions below.
@@ -120,12 +121,12 @@ test_tc_version_is_at_least() {
}
# want mine expect
-test_tc_version_is_at_least 8 '' 1
-test_tc_version_is_at_least 8.0 '' 1
-test_tc_version_is_at_least 7 '' 0
-test_tc_version_is_at_least 7.0 '' 0
+test_tc_version_is_at_least 12 '' 1
+test_tc_version_is_at_least 11.4 '' 1
+test_tc_version_is_at_least 10 '' 0
+test_tc_version_is_at_least 10 '' 0
test_tc_version_is_at_least ${TOOLCHAIN_GCC_PV} '' 0
-test_tc_version_is_at_least 5.0 6.0 0
+test_tc_version_is_at_least 10 11 0
test_tc_version_is_between() {
local exp msg ret=0 lo hi res
@@ -149,11 +150,11 @@ test_tc_version_is_between() {
# lo hi expect
test_tc_version_is_between 1 0 1
test_tc_version_is_between 1 2 1
-test_tc_version_is_between 7 8 0
-test_tc_version_is_between ${TOOLCHAIN_GCC_PV} 8 0
+test_tc_version_is_between 11 12 0
+test_tc_version_is_between ${TOOLCHAIN_GCC_PV} 12 0
test_tc_version_is_between ${TOOLCHAIN_GCC_PV} ${TOOLCHAIN_GCC_PV} 1
-test_tc_version_is_between 7 ${TOOLCHAIN_GCC_PV} 1
-test_tc_version_is_between 8 9 1
+test_tc_version_is_between 10 ${TOOLCHAIN_GCC_PV} 1
+test_tc_version_is_between 12 13 1
# eclass has a few critical global variables worth not breaking
test_var_assert() {
@@ -173,14 +174,14 @@ test_var_assert() {
# TODO: convert these globals to helpers to ease testing against multiple
# ${TOOLCHAIN_GCC_PV} vaues.
-test_var_assert GCC_PV 7.3.0
-test_var_assert GCC_PVR 7.3.0
-test_var_assert GCC_RELEASE_VER 7.3.0
-test_var_assert GCC_BRANCH_VER 7.3
-test_var_assert GCCMAJOR 7
+test_var_assert GCC_PV 11.3.0
+test_var_assert GCC_PVR 11.3.0
+test_var_assert GCC_RELEASE_VER 11.3.0
+test_var_assert GCC_BRANCH_VER 11.3
+test_var_assert GCCMAJOR 11
test_var_assert GCCMINOR 3
test_var_assert GCCMICRO 0
-test_var_assert GCC_CONFIG_VER 7.3.0
+test_var_assert GCC_CONFIG_VER 11.3.0
test_var_assert PREFIX /usr
texit
diff --git a/eclass/tests/unpacker.sh b/eclass/tests/unpacker.sh
new file mode 100755
index 000000000000..ef17e724a851
--- /dev/null
+++ b/eclass/tests/unpacker.sh
@@ -0,0 +1,430 @@
+#!/usr/bin/env bash
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+source tests-common.sh || exit
+
+inherit unpacker
+
+# silence the output
+unpack_banner() { :; }
+
+TESTFILE=test.in
+TESTDIR=$(mktemp -d || die)
+trap 'cd - >/dev/null && rm -r "${TESTDIR}"' EXIT
+
+# prepare some test data
+# NB: we need something "compressible", as compress(1) will return
+# an error if the file "is larger than before compression"
+cp ../unpacker.eclass "${TESTDIR}/${TESTFILE}" || die
+cd "${TESTDIR}" || die
+
+test_unpack() {
+ local archive=${1}
+ local unpacked=${2}
+ local deps=${3}
+ local packcmd=${4}
+
+ local x
+ for x in ${deps}; do
+ if ! type "${x}" &>/dev/null; then
+ ewarn "Skipping ${archive}, tool ${x} not found"
+ return
+ fi
+ done
+
+ rm -rf testdir || die
+ mkdir -p testdir || die
+
+ tbegin "unpacking ${archive}"
+ eval "${packcmd}"
+ assert "packing ${archive} failed"
+ cd testdir || die
+
+ # create a symlink to flush out compressor issues and resemble distdir more
+ # https://bugs.gentoo.org/873352
+ ln -s "../${archive}" "${archive}" || die
+
+ local out
+ out=$(
+ _unpacker "${archive}" 2>&1
+ )
+ ret=$?
+ if [[ ${ret} -eq 0 ]]; then
+ if [[ ! -f ${unpacked} ]]; then
+ eerror "${unpacked} not found after unpacking"
+ ret=1
+ elif ! diff -u "${unpacked}" "../${TESTFILE}"; then
+ eerror "${unpacked} different than input"
+ ret=1
+ fi
+ fi
+ [[ ${ret} -ne 0 ]] && echo "${out}" >&2
+ tend ${ret}
+
+ cd .. || die
+ rm -f "${archive}" || die
+}
+
+test_compressed_file() {
+ local suffix=${1}
+ local tool=${2}
+
+ test_unpack "test${suffix}" test "${tool}" \
+ "${tool} -c \${TESTFILE} > \${archive}"
+}
+
+test_compressed_file_multistream() {
+ local suffix=${1}
+ local tool=${2}
+
+ test_unpack "test+multistream${suffix}" "test+multistream" "${tool}" \
+ "head -n 300 \${TESTFILE} | ${tool} -c > \${archive} &&
+ tail -n +301 \${TESTFILE} | ${tool} -c >> \${archive}"
+}
+
+test_compressed_file_with_junk() {
+ local suffix=${1}
+ local tool=${2}
+ local flag=${3}
+
+ test_unpack "test+junk${suffix}" "test+junk" "${tool}" \
+ "${tool} -c \${TESTFILE} > \${archive} && cat test.in >> \${archive}"
+}
+
+test_compressed_tar() {
+ local suffix=${1}
+ local tool=${2}
+
+ test_unpack "test${suffix}" test.in "tar ${tool}" \
+ "tar -c \${TESTFILE} | ${tool} -c > \${archive}"
+}
+
+test_compressed_cpio() {
+ local suffix=${1}
+ local tool=${2}
+
+ test_unpack "test${suffix}" test.in "cpio ${tool}" \
+ "cpio -o --quiet <<<\${TESTFILE} | ${tool} -c > \${archive}"
+}
+
+create_deb() {
+ local suffix=${1}
+ local tool=${2}
+ local archive=${3}
+ local infile=${4}
+
+ echo 2.0 > debian-binary || die
+ : > control || die
+ tar -cf control.tar control || die
+ tar -c "${infile}" | ${tool} > "data.tar${suffix}"
+ assert "packing data.tar${suffix} failed"
+ ar r "${archive}" debian-binary control.tar "data.tar${suffix}" \
+ 2>/dev/null || die
+ rm -f control control.tar "data.tar${suffix}" debian-binary || die
+}
+
+test_deb() {
+ local suffix=${1}
+ local tool=${2}
+ local tool_cmd
+
+ if [[ -n ${tool} ]]; then
+ tool_cmd="${tool} -c"
+ else
+ tool_cmd=cat
+ fi
+
+ test_unpack "test-${tool}_1.2.3_noarch.deb" test.in "ar tar ${tool}" \
+ "create_deb '${suffix}' '${tool_cmd}' \${archive} \${TESTFILE}"
+ # also test with the handwoven implementation used on Prefix
+ EPREFIX=/foo \
+ test_unpack "test_pfx-${tool}_1.2.3_noarch.deb" test.in "ar tar ${tool}" \
+ "create_deb '${suffix}' '${tool_cmd}' \${archive} \${TESTFILE}"
+}
+
+create_gpkg() {
+ local suffix=${1}
+ local tool=${2}
+ local archive=${3}
+ local infile=${4}
+ local gpkg_dir=${archive%.gpkg.tar}
+
+ mkdir image metadata "${gpkg_dir}" || die
+ cp "${infile}" image/ || die
+ tar -c metadata | ${tool} > "${gpkg_dir}/metadata.tar${suffix}"
+ assert "packing metadata.tar${suffix} failed"
+ : > "${gpkg_dir}/metadata.tar${suffix}.sig" || die
+ tar -c image | ${tool} > "${gpkg_dir}/image.tar${suffix}"
+ assert "packing image.tar${suffix} failed"
+ : > "${gpkg_dir}/image.tar${suffix}.sig" || die
+ : > "${gpkg_dir}"/gpkg-1 || die
+ tar -cf "${archive}" --format=ustar \
+ "${gpkg_dir}"/{gpkg-1,{metadata,image}.tar"${suffix}"} || die
+ rm -r image metadata "${gpkg_dir}" || die
+}
+
+test_gpkg() {
+ local suffix=${1}
+ local tool=${2}
+ local tool_cmd
+
+ if [[ -n ${tool} ]]; then
+ tool_cmd="${tool} -c"
+ else
+ tool_cmd=cat
+ fi
+
+ test_unpack "test-${tool}-1.2.3-1.gpkg.tar" \
+ "test-${tool}-1.2.3-1/image/test.in" "tar ${tool}" \
+ "create_gpkg '${suffix}' '${tool_cmd}' \${archive} \${TESTFILE}"
+}
+
+create_makeself() {
+ local comp_opt=${1}
+ local archive=${2}
+ local infile=${3}
+
+ mkdir test || die
+ cp "${infile}" test/ || die
+ makeself --quiet "${comp_opt}" test "${archive}" test : || die
+ rm -rf test || die
+}
+
+test_makeself() {
+ local comp_opt=${1}
+ local tool=${2}
+
+ test_unpack "makeself-${tool}.sh" test.in "makeself ${tool}" \
+ "create_makeself '${comp_opt}' \${archive} \${TESTFILE}"
+}
+
+test_reject_junk() {
+ local suffix=${1}
+ local archive=test${1}
+
+ rm -rf testdir || die
+ mkdir -p testdir || die
+
+ tbegin "rejecting junk named ${archive}"
+ cat test.in >> "${archive}" || die
+ cd testdir || die
+ (
+ # some decompressors (e.g. cpio) are very verbose about junk
+ _unpacker "../${archive}" &>/dev/null
+ )
+ [[ $? -ne 0 ]]
+ ret=$?
+ tend ${ret}
+
+ cd .. || die
+ rm -f "${archive}" || die
+}
+
+test_online() {
+ local url=${1}
+ local b2sum=${2}
+ local unpacked=${3}
+ local unp_b2sum=${4}
+
+ local filename=${url##*/}
+ local archive=${DISTDIR}/${filename}
+
+ if [[ ! -f ${archive} ]]; then
+ if [[ ${UNPACKER_TESTS_ONLINE} != 1 ]]; then
+ ewarn "Skipping ${filename} test, distfile not found"
+ return
+ fi
+
+ if ! wget -O "${archive}" "${url}"; then
+ die "Fetching ${archive} failed"
+ fi
+ fi
+
+ local real_sum=$(b2sum "${archive}" | cut -d' ' -f1)
+ if [[ ${real_sum} != ${b2sum} ]]; then
+ eerror "Incorrect b2sum on ${filename}"
+ eerror " expected: ${b2sum}"
+ eerror " found: ${real_sum}"
+ die "Incorrect b2sum on ${filename}"
+ fi
+
+ rm -rf testdir || die
+ mkdir -p testdir || die
+
+ tbegin "unpacking ${filename}"
+ cd testdir || die
+
+ ln -s "${archive}" "${filename}" || die
+
+ local out
+ out=$(
+ _unpacker "${archive}" 2>&1
+ )
+ ret=$?
+ if [[ ${ret} -eq 0 ]]; then
+ if [[ ! -f ${unpacked} ]]; then
+ eerror "${unpacked} not found after unpacking"
+ ret=1
+ else
+ real_sum=$(b2sum "${unpacked}" | cut -d' ' -f1)
+ if [[ ${real_sum} != ${unp_b2sum} ]]; then
+ eerror "Incorrect b2sum on unpacked file ${unpacked}"
+ eerror " expected: ${unp_b2sum}"
+ eerror " found: ${real_sum}"
+ ret=1
+ fi
+ fi
+ fi
+ [[ ${ret} -ne 0 ]] && echo "${out}" >&2
+ tend ${ret}
+
+ cd .. || die
+}
+
+test_compressed_file .bz2 bzip2
+test_compressed_file .Z compress
+test_compressed_file .gz gzip
+test_compressed_file .lzma lzma
+test_compressed_file .xz xz
+test_compressed_file .lz lzip
+test_compressed_file .zst zstd
+test_compressed_file .lz4 lz4
+test_compressed_file .lzo lzop
+
+test_compressed_file_multistream .bz2 bzip2
+test_compressed_file_multistream .gz gzip
+test_compressed_file_multistream .xz xz
+test_compressed_file_multistream .lz lzip
+test_compressed_file_multistream .zst zstd
+
+test_compressed_file_with_junk .bz2 bzip2
+test_compressed_file_with_junk .lz lzip
+
+test_unpack test.tar test.in tar 'tar -cf ${archive} ${TESTFILE}'
+test_compressed_tar .tar.bz2 bzip2
+test_compressed_tar .tbz bzip2
+test_compressed_tar .tbz2 bzip2
+test_compressed_tar .tar.Z compress
+test_compressed_tar .tar.gz gzip
+test_compressed_tar .tgz gzip
+test_compressed_tar .tar.lzma lzma
+test_compressed_tar .tar.xz xz
+test_compressed_tar .txz xz
+test_compressed_tar .tar.lz lzip
+test_compressed_tar .tar.zst zstd
+test_compressed_tar .tar.lz4 lz4
+test_compressed_tar .tar.lzo lzop
+
+test_unpack test.cpio test.in cpio 'cpio -o --quiet <<<${TESTFILE} > ${archive}'
+test_compressed_cpio .cpio.bz2 bzip2
+test_compressed_cpio .cpio.Z compress
+test_compressed_cpio .cpio.gz gzip
+test_compressed_cpio .cpio.lzma lzma
+test_compressed_cpio .cpio.xz xz
+test_compressed_cpio .cpio.lz lzip
+test_compressed_cpio .cpio.zst zstd
+test_compressed_cpio .cpio.lz4 lz4
+test_compressed_cpio .cpio.lzo lzop
+
+test_deb
+test_deb .gz gzip
+test_deb .xz xz
+test_deb .bz2 bzip2
+test_deb .lzma lzma
+
+test_gpkg
+test_gpkg .gz gzip
+test_gpkg .bz2 bzip2
+test_gpkg .lz4 lz4
+test_gpkg .lz lzip
+test_gpkg .lzma lzma
+test_gpkg .lzo lzop
+test_gpkg .xz xz
+test_gpkg .zst zstd
+
+test_makeself --gzip gzip
+test_makeself --zstd zstd
+test_makeself --bzip2 bzip2
+test_makeself --xz xz
+test_makeself --lzo lzop
+test_makeself --lz4 lz4
+test_makeself --compress compress
+test_makeself --base64 base64
+test_makeself --nocomp tar
+
+test_unpack test.zip test.in zip 'zip -q ${archive} ${TESTFILE}'
+# test handling non-adjusted zip with junk prepended
+test_unpack test.zip test.in zip \
+ 'zip -q testdir/tmp.zip ${TESTFILE} && cat test.in testdir/tmp.zip > ${archive}'
+test_unpack test.7z test.in 7z '7z -bso0 a ${archive} ${TESTFILE}'
+test_unpack test.lha test.in lha 'lha a -q ${archive} ${TESTFILE}'
+test_unpack test.lzh test.in lha 'lha a -q ${archive} ${TESTFILE}'
+test_unpack test.rar test.in rar 'rar -idq a ${archive} ${TESTFILE}'
+
+# TODO: .run/.sh/.bin
+
+test_reject_junk .bz2
+test_reject_junk .Z
+test_reject_junk .gz
+test_reject_junk .lzma
+test_reject_junk .xz
+test_reject_junk .lz
+test_reject_junk .zst
+test_reject_junk .tar
+test_reject_junk .cpio
+test_reject_junk .gpkg.tar
+test_reject_junk .deb
+test_reject_junk .zip
+test_reject_junk .7z
+test_reject_junk .rar
+test_reject_junk .lha
+test_reject_junk .lzh
+
+DISTDIR=$(portageq envvar DISTDIR)
+if [[ -n ${DISTDIR} ]]; then
+ einfo "Using DISTDIR: ${DISTDIR}"
+ if [[ ${UNPACKER_TESTS_ONLINE} != 1 ]]; then
+ ewarn "Online tests will be skipped if distfiles are not found already."
+ ewarn "Set UNPACKER_TESTS_ONLINE=1 to enable fetching."
+ fi
+
+ # NB: a good idea to list the last file in the archive (to avoid
+ # passing on partial unpack)
+
+ # TODO: find test cases for makeself 2.0/2.0.1, 2.1.1, 2.1.2, 2.1.3
+
+ # makeself 1.5.4, gzip
+ test_online \
+ http://updates.lokigames.com/sof/sof-1.06a-cdrom-x86.run \
+ f76f605af08a19b77548455c0101e03aca7cae69462914e47911da2fadd6d4f3b766e1069556ead0d06c757b179ae2e8105e76ea37852f17796b47b4712aec87 \
+ update.sh \
+ ba7a3f8fa79bbed8ca3a34ead957aeaa308c6e6d6aedd603098aa9867ca745983ff98c83d65572e507f2c3c4e0778ae4984f8b69d2b8279741b06064253c5788
+
+ # makeself 1.6.0-nv*, xz
+ test_online \
+ https://download.nvidia.com/XFree86/Linux-x86/390.154/NVIDIA-Linux-x86-390.154.run \
+ 083d9dd234a37ec39a703ef7e0eb6ec165c24d2fcb5e92ca987c33df643d0604319eb65ef152c861acacd5a41858ab6b82c45c2c8ff270efc62b07727666daae \
+ libEGL_nvidia.so.390.154 \
+ 6665804947e71fb583dc7d5cc3a6f4514f612503000b0a9dbd8da5c362d3c2dcb2895d8cbbf5700a6f0e24cca9b0dd9c2cf5763d6fbb037f55257ac5af7d6084
+
+ # makeself 2.3.0, gzip
+ test_online \
+ http://www.sdrplay.com/software/SDRplay_RSP_API-Linux-3.07.1.run \
+ 059d9a5fbd14c0e7ecb969cd3e5afe8e3f42896175b443bdaa9f9108302a1c9ef5ad9769e62f824465611d74f67191fff71cc6dbe297e399e5b2f6824c650112 \
+ i686/sdrplay_apiService \
+ 806393c310d7e60dca7b8afee225bcc50c0d5771bdd04c3fa575eda2e687dc5c888279a7404316438b633fb91565a49899cf634194d43981151a12c6c284a162
+
+ # makeself 2.4.0, gzip
+ test_online \
+ http://www.sdrplay.com/software/SDRplay_RSP_API-Linux-2.13.1.run \
+ 7eff1aa35190db1ead5b1d96994d24ae2301e3a765d6701756c6304a1719aa32125fedacf6a6859d89b89db5dd6956ec0e8c7e814dbd6242db5614a53e89efb3 \
+ sdrplay_license.txt \
+ 041edb26ffb75b6b59e7a3514c6f81b05b06e0efe61cc56117d24f59733a6a6b1bca73a57dd11e0774ec443740ca55e6938cf6594a032ab4f14b23f2e732a3f2
+else
+ ewarn "Unable to obtain DISTDIR from portageq, skipping online tests"
+fi
+
+texit
diff --git a/eclass/tests/verify-sig.sh b/eclass/tests/verify-sig.sh
new file mode 100755
index 000000000000..a87e2c7703d7
--- /dev/null
+++ b/eclass/tests/verify-sig.sh
@@ -0,0 +1,94 @@
+#!/bin/bash
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+source tests-common.sh || exit
+
+inherit verify-sig
+
+TMP=$(mktemp -d)
+trap 'rm -rf "${TMP}"' EXIT
+cd "${TMP}" || die
+> empty || die
+> fail || die
+echo "The quick brown fox jumps over the lazy dog." > text || die
+
+testit() {
+ local expect=${1}
+ shift
+
+ tbegin "${*@Q}"
+ ( "${@}" )
+ [[ ${?} -eq ${expect} ]]
+ tend "${?}"
+}
+
+test_verify_unsigned_checksums() {
+ local format=${1}
+
+ testit 0 verify-sig_verify_unsigned_checksums checksums.txt "${format}" empty
+ testit 0 verify-sig_verify_unsigned_checksums checksums.txt "${format}" "empty text"
+ testit 1 verify-sig_verify_unsigned_checksums checksums.txt "${format}" other
+ testit 1 verify-sig_verify_unsigned_checksums checksums.txt "${format}" "empty other"
+ testit 1 verify-sig_verify_unsigned_checksums checksums.txt "${format}" fail
+ testit 1 verify-sig_verify_unsigned_checksums checksums.txt "${format}" "empty fail"
+}
+
+einfo "Testing coreutils format."
+eindent
+
+cat > checksums.txt <<-EOF || die
+ # some junk to test junk protection
+ b47cc0f104b62d4c7c30bcd68fd8e67613e287dc4ad8c310ef10cbadea9c4380 empty junk line
+ b47cc0f104b62d4c7c30bcd68gd8e67613e287dc4ad8c310ef10cbadea9c4380 empty
+
+ # sha1sums
+ da39a3ee5e6b4b0d3255bfef95601890afd80709 empty
+ 9c04cd6372077e9b11f70ca111c9807dc7137e4b text
+ 9c04cd6372077e9b11f70ca111c9807dc7137e4b fail
+
+ # sha256sums
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 empty
+ b47cc0f104b62d4c7c30bcd68fd8e67613e287dc4ad8c310ef10cbadea9c4380 text
+ b47cc0f104b62d4c7c30bcd68fd8e67613e287dc4ad8c310ef10cbadea9c4380 fail
+
+ # sha512sums
+ cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e empty
+ 020da0f4d8a4c8bfbc98274027740061d7df52ee07091ed6595a083e0f45327bbe59424312d86f218b74ed2e25507abaf5c7a5fcf4cafcf9538b705808fd55ec text
+ 020da0f4d8a4c8bfbc98274027740061d7df52ee07091ed6595a083e0f45327bbe59424312d86f218b74ed2e25507abaf5c7a5fcf4cafcf9538b705808fd55ec fail
+
+ # duplicate checksum
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 empty
+EOF
+
+test_verify_unsigned_checksums sha256
+eoutdent
+
+einfo "Testing openssl-dgst format."
+eindent
+
+> "annoying ( filename )= yes ).txt" || die
+
+cat > checksums.txt <<-EOF || die
+ junk text that ought to be ignored
+
+ SHA1(empty)=da39a3ee5e6b4b0d3255bfef95601890afd80709
+ SHA1(text)= 9c04cd6372077e9b11f70ca111c9807dc7137e4b
+ SHA1(fail)=9c04cd6372077e9b11f70ca111c9807dc7137e4b
+
+ SHA256(empty)=e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+ SHA256(text)= b47cc0f104b62d4c7c30bcd68fd8e67613e287dc4ad8c310ef10cbadea9c4380
+ SHA256(fail)=b47cc0f104b62d4c7c30bcd68fd8e67613e287dc4ad8c310ef10cbadea9c4380
+
+ SHA256(annoying ( filename )= yes )= e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
+
+ SHA512(empty)=cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e
+ SHA512(text)= 020da0f4d8a4c8bfbc98274027740061d7df52ee07091ed6595a083e0f45327bbe59424312d86f218b74ed2e25507abaf5c7a5fcf4cafcf9538b705808fd55ec
+ SHA512(fail)=020da0f4d8a4c8bfbc98274027740061d7df52ee07091ed6595a083e0f45327bbe59424312d86f218b74ed2e25507abaf5c7a5fcf4cafcf9538b705808fd55ec
+EOF
+
+test_verify_unsigned_checksums openssl-dgst
+eoutdent
+
+texit
diff --git a/eclass/texlive-common.eclass b/eclass/texlive-common.eclass
index 40e2c3530510..b32ea2af1121 100644
--- a/eclass/texlive-common.eclass
+++ b/eclass/texlive-common.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: texlive-common.eclass
@@ -6,7 +6,7 @@
# tex@gentoo.org
# @AUTHOR:
# Original Author: Alexis Ballier <aballier@gentoo.org>
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Provide various functions used by both texlive-core and texlive modules
# @DESCRIPTION:
# Purpose: Provide various functions used by both texlive-core and texlive
@@ -14,55 +14,84 @@
#
# Note that this eclass *must* not assume the presence of any standard tex too
+case ${EAPI} in
+ 7)
+ inherit eapi8-dosym
+ ;;
+ 8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
if [[ -z ${_TEXLIVE_COMMON_ECLASS} ]]; then
_TEXLIVE_COMMON_ECLASS=1
-case ${EAPI:-0} in
- [0-6]) die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}" ;;
- 7) inherit eapi8-dosym ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
-esac
-
-TEXMF_PATH=/usr/share/texmf
-TEXMF_DIST_PATH=/usr/share/texmf-dist
-TEXMF_VAR_PATH=/var/lib/texmf
+# @ECLASS_VARIABLE: CTAN_MIRROR_URL
+# @USER_VARIABLE
+# @DESCRIPTION:
+# This variable can be used to set the CTAN mirror that will be used to fetch
+# CTAN artifacts. Note that this mirror is usually only used as fallback
+# in case the Gentoo mirrors do not hold the requested files.
+#
+# Only Gentoo TeX developers may want to set this.
+#
+# Example:
+# @CODE
+# CTAN_MIRROR_URL='https://ftp.fau.de/ctan/' emerge -1v app-text/texlive-core
+# @CODE
+: "${CTAN_MIRROR_URL:="https://mirrors.ctan.org"}"
# @FUNCTION: texlive-common_handle_config_files
# @DESCRIPTION:
# Has to be called in src_install after having installed the files in ${D}
-# This function will move the relevant files to /etc/texmf and symling them
+# This function will move the relevant files to /etc/texmf and symlink them
# from their original location. This is to allow easy update of texlive's
-# configuration
-
+# configuration.
+# Called by app-text/texlive-core and texlive-module.eclass.
texlive-common_handle_config_files() {
+ local texmf_path
+ # Starting with TeX Live 2023, we install in texmf-dist, where a
+ # distribution-provided TeX Live installation is supposed to be,
+ # instead of texmf.
+ if ver_test -ge 2023; then
+ texmf_path=/usr/share/texmf-dist
+ else
+ texmf_path=/usr/share/texmf
+ fi
+
# Handle config files properly
- [[ -d ${ED}${TEXMF_PATH} ]] || return
- cd "${ED}${TEXMF_PATH}" || die
+ [[ -d ${ED}${texmf_path} ]] || return
+ cd "${ED}${texmf_path}" || die
while read -r f; do
- if [[ ${f#*config} != ${f} || ${f#doc} != ${f} || ${f#source} != ${f} || ${f#tex} != ${f} ]] ; then
+ if [[ ${f#*config} != "${f}" || ${f#doc} != "${f}" || ${f#source} != "${f}" || ${f#tex} != "${f}" ]] ; then
continue
fi
- dodir /etc/texmf/$(dirname ${f}).d
- einfo "Moving (and symlinking) ${EPREFIX}${TEXMF_PATH}/${f} to ${EPREFIX}/etc/texmf/$(dirname ${f}).d"
- mv "${ED}/${TEXMF_PATH}/${f}" "${ED}/etc/texmf/$(dirname ${f}).d" || die "mv ${f} failed."
- dosym8 -r /etc/texmf/$(dirname ${f}).d/$(basename ${f}) ${TEXMF_PATH}/${f}
- done < <(find -name '*.cnf' -type f -o -name '*.cfg' -type f | sed -e "s:\./::g")
+ local rel_dir
+ rel_dir="$(dirname "${f}")"
+
+ dodir "/etc/texmf/${rel_dir}.d"
+ einfo "Moving (and symlinking) ${EPREFIX}${texmf_path}/${f} to ${EPREFIX}/etc/texmf/${rel_dir}.d"
+ mv "${ED}/${texmf_path}/${f}" "${ED}/etc/texmf/${rel_dir}.d" || die "mv ${f} failed."
+
+ local dosym=dosym
+ [[ ${EAPI} == 7 ]] && dosym=dosym8
+ ${dosym} -r "/etc/texmf/${rel_dir}.d/$(basename "${f}")" "${texmf_path}/${f}"
+ done < <(find . -name '*.cnf' -type f -o -name '*.cfg' -type f | sed -e "s:\./::g")
}
# @FUNCTION: texlive-common_is_file_present_in_texmf
# @DESCRIPTION:
# Return if a file is present in the texmf tree
# Call it from the directory containing texmf and texmf-dist
-
+# Called by app-text/texlive-core.
texlive-common_is_file_present_in_texmf() {
local mark="${T}/${1}.found"
if [[ -d texmf ]]; then
- find texmf -name ${1} -exec touch ${mark} {} + || die
+ find texmf -name "${1}" -exec touch "${mark}" {} + || die
fi
if [[ -d texmf-dist ]]; then
- find texmf-dist -name ${1} -exec touch ${mark} {} + || die
+ find texmf-dist -name "${1}" -exec touch "${mark}" {} + || die
fi
[ -f "${mark}" ]
}
@@ -78,7 +107,7 @@ texlive-common_is_file_present_in_texmf() {
# ( Arguments are switched because texlinks main function sends them switched )
# This function should not be called from an ebuild, prefer etexlinks that will
# also do the fmtutil file parsing.
-
+# Called by texlive-common.eclass and texlive-module.eclass.
texlive-common_do_symlinks() {
while [[ ${#} != 0 ]]; do
case ${1} in
@@ -89,13 +118,13 @@ texlive-common_do_symlinks() {
einfo "Symlink ${1} -> ${2} skipped (texlive-core takes care of it)"
;;
*)
- if [[ ${1} == ${2} ]]; then
+ if [[ ${1} == "${2}" ]]; then
einfo "Symlink ${1} -> ${2} skipped"
elif [[ -e ${ED}/usr/bin/${1} || -L ${ED}/usr/bin/${1} ]]; then
einfo "Symlink ${1} skipped (file exists)"
else
einfo "Making symlink from ${1} to ${2}"
- dosym ${2} /usr/bin/${1}
+ dosym "${2}" "/usr/bin/${1}"
fi
;;
esac
@@ -115,7 +144,7 @@ texlive-common_do_symlinks() {
# the same dir as the source)
# Also, as this eclass must not depend on a tex distribution to be installed we
# cannot use texlinks from here.
-
+# Called by texlive-module.eclass.
etexlinks() {
# Install symlinks from formats to engines
texlive-common_do_symlinks $(sed '/^[ ]*#/d; /^[ ]*$/d' "$1" | awk '{print $1, $2}')
@@ -126,13 +155,14 @@ etexlinks() {
# @DESCRIPTION:
# Symlinks a script from the texmf tree to /usr/bin. Requires permissions to be
# correctly set for the file that it will point to.
-
+# Called by app-text/epspdf and texlive-module.eclass.
dobin_texmf_scripts() {
while [[ ${#} -gt 0 ]] ; do
- local trg=$(basename ${1} | sed 's,\.[^/]*$,,' | tr '[:upper:]' '[:lower:]')
+ local trg
+ trg=$(basename "${1}" | sed 's,\.[^/]*$,,' | tr '[:upper:]' '[:lower:]')
einfo "Installing ${1} as ${trg} bin wrapper"
[[ -x ${ED}/usr/share/${1} ]] || die "Trying to install a non existing or non executable symlink to /usr/bin: ${1}"
- dosym ../share/${1} /usr/bin/${trg}
+ dosym "../share/${1}" "/usr/bin/${trg}"
shift
done
}
@@ -142,11 +172,17 @@ dobin_texmf_scripts() {
# Runs texmf-update if it is available and prints a warning otherwise. This
# function helps in factorizing some code. Useful in ebuilds' pkg_postinst and
# pkg_postrm phases.
-
+# Called by app-text/dvipsk, app-text/texlive-core, dev-libs/kpathsea, and
+# texlive-module.eclass.
etexmf-update() {
if has_version 'app-text/texlive-core' ; then
if [[ -z ${ROOT} && -x "${EPREFIX}"/usr/sbin/texmf-update ]] ; then
"${EPREFIX}"/usr/sbin/texmf-update
+ local res="${?}"
+ if [[ "${res}" -ne 0 ]] &&
+ { [[ ${CATEGORY} != dev-texlive ]] || ver_test -ge 2023; } then
+ die -n "texmf-update returned non-zero exit status ${res}"
+ fi
else
ewarn "Cannot run texmf-update for some reason."
ewarn "Your texmf tree might be inconsistent with your configuration"
@@ -160,12 +196,12 @@ etexmf-update() {
# Runs fmtutil-sys if it is available and prints a warning otherwise. This
# function helps in factorizing some code. Used in ebuilds' pkg_postinst to
# force a rebuild of TeX formats.
-
efmtutil-sys() {
if has_version 'app-text/texlive-core' ; then
if [[ -z ${ROOT} && -x "${EPREFIX}"/usr/bin/fmtutil-sys ]] ; then
einfo "Rebuilding formats"
- "${EPREFIX}"/usr/bin/fmtutil-sys --all &> /dev/null || die
+ "${EPREFIX}"/usr/bin/fmtutil-sys --all &> /dev/null ||
+ die -n "fmtutil-sys returned non-zero exit status ${?}"
else
ewarn "Cannot run fmtutil-sys for some reason."
ewarn "Your formats might be inconsistent with your installed ${PN} version"
@@ -174,4 +210,99 @@ efmtutil-sys() {
fi
}
+# @FUNCTION: texlive-common_append_to_src_uri
+# @DESCRIPTION:
+# Takes the name of a variable as input. The variable must contain a
+# list of texlive packages. Every texlive package in the variable is
+# transformed to an URL and appended to SRC_URI.
+texlive-common_append_to_src_uri() {
+ local tl_uri=( ${!1} )
+
+ # Starting from TeX Live 2009, upstream provides .tar.xz modules.
+ local tl_pkgext=tar.xz
+
+ local tl_uri_prefix="https://dev.gentoo.org/~@dev@/distfiles/texlive/tl-"
+ local tl_2023_uri_prefix="https://dev.gentoo.org/~@dev@/distfiles/texlive/"
+
+ local tl_dev
+ # If the version is less than 2023 and the package is the
+ # dev-texlive category, we fallback to the old SRC_URI layout. With
+ # the 2023 bump, packages outside the dev-texlive category start to
+ # inherit texlive-common.eclass.
+ if ver_test -lt 2023 && [[ ${CATEGORY} == dev-texlive ]]; then
+ local texlive_lt_2023_devs=( zlogene dilfridge sam )
+ local tl_uri_suffix="-${PV}.${tl_pkgext}"
+
+ tl_uri=( "${tl_uri[@]/%/${tl_uri_suffix}}" )
+ for tl_dev in "${texlive_lt_2023_devs[@]}"; do
+ SRC_URI+=" ${tl_uri[*]/#/${tl_uri_prefix/@dev@/${tl_dev}}}"
+ done
+ else
+ local texlive_ge_2023_devs=( flow )
+ local tl_mirror="${CTAN_MIRROR_URL%/}/systems/texlive/tlnet/archive/"
+
+ tl_uri=( "${tl_uri[@]/%/.${tl_pkgext}}" )
+ SRC_URI+=" ${tl_uri[*]/#/${tl_mirror}}"
+ for tl_dev in "${texlive_ge_2023_devs[@]}"; do
+ SRC_URI+=" ${tl_uri[*]/#/${tl_2023_uri_prefix/@dev@/${tl_dev}}}"
+ done
+ fi
+}
+
+# @FUNCTION: texlive-common_update_tlpdb
+# @DESCRIPTION:
+# Update the TexLive package database at /usr/share/tlpkg/texlive.tlpdb.
+texlive-common_update_tlpdb() {
+ [[ -v TL_PV && ${TL_PV} -lt 2023 ]] && return
+
+ # If we are updating this package, then there is no need to update
+ # the tlpdb in postrm, as it will be again updated in postinst.
+ [[ ${EBUILD_PHASE} == postrm && -n ${REPLACED_BY_VERSION} ]] && return
+
+ local tlpkg="${EROOT}"/usr/share/tlpkg
+ local tlpobj="${tlpkg}"/tlpobj
+ local tlpdb="${tlpkg}"/texlive.tlpdb
+
+ ebegin "Regenerating TexLive package database"
+
+ local new_tlpdb="${T}"/texlive.tlpdb
+
+ touch "${new_tlpdb}" || die
+
+ if [[ -d "${tlpobj}" ]]; then
+ find "${tlpobj}" -maxdepth 1 -type f -name "*.tlpobj" -print0 |
+ sort -z |
+ xargs -0 --no-run-if-empty cat >> "${new_tlpdb}"
+ assert "generating tlpdb failed"
+ fi
+
+ if [[ -f ${tlpdb} ]]; then
+ cmp -s "${new_tlpdb}" "${tlpdb}"
+ local ret=$?
+ case ${ret} in
+ # content equal
+ 0)
+ # Nothing to do, return.
+ eend 0
+ return
+ ;;
+ # content differs
+ 1)
+ ;;
+ # cmp failed with an error
+ *)
+ eend ${ret} "comparing new and existing tlpdb failed (exit status: ${ret})"
+ die
+ ;;
+ esac
+ fi
+
+ mv "${new_tlpdb}" "${tlpdb}"
+ eend $? "moving tlpdb into position failed (exit status: ${?})" || die
+
+ if [[ ! -s ${tlpdb} ]]; then
+ rm "${tlpdb}" || die
+ fi
+}
+
fi
diff --git a/eclass/texlive-module.eclass b/eclass/texlive-module.eclass
index 68276714a4bc..401b75bc4d11 100644
--- a/eclass/texlive-module.eclass
+++ b/eclass/texlive-module.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: texlive-module.eclass
@@ -6,7 +6,7 @@
# tex@gentoo.org
# @AUTHOR:
# Original Author: Alexis Ballier <aballier@gentoo.org>
-# @SUPPORTED_EAPIS: 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Provide generic install functions so that modular texlive's texmf ebuild will only have to inherit this eclass
# @DESCRIPTION:
# Purpose: Provide generic install functions so that modular texlive's texmf ebuilds will
@@ -56,7 +56,7 @@
# @DEFAULT_UNSET
# @DESCRIPTION:
# A space separated list of links to add for BINSCRIPTS.
-# The systax is: foo:bar to create a symlink bar -> foo.
+# The syntax is: foo:bar to create a symlink bar -> foo.
# @ECLASS_VARIABLE: TL_PV
# @INTERNAL
@@ -71,63 +71,51 @@
# Information to display about the package.
# e.g. for enabling/disabling a feature
-if [[ -z ${_TEXLIVE_MODULE_ECLASS} ]]; then
-_TEXLIVE_MODULE_ECLASS=1
-
-case ${EAPI:-0} in
- [0-6]) die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}" ;;
- 7) inherit texlive-common ;;
- *) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-HOMEPAGE="http://www.tug.org/texlive/"
+if [[ -z ${_TEXLIVE_MODULE_ECLASS} ]]; then
+_TEXLIVE_MODULE_ECLASS=1
-COMMON_DEPEND=">=app-text/texlive-core-${TL_PV:-${PV}}"
+inherit texlive-common
-IUSE="source"
+HOMEPAGE="https://www.tug.org/texlive/"
-# Starting from TeX Live 2009, upstream provides .tar.xz modules.
-PKGEXT=tar.xz
+IUSE="doc source"
-# Now where should we get these files?
-TEXLIVE_DEVS=${TEXLIVE_DEVS:- zlogene dilfridge }
+if [[ -z ${TL_PV} ]] \
+ && [[ ${EAPI} != 7 ]] \
+ && [[ ${CATEGORY} == dev-texlive ]]; then
+ TL_PV=$(ver_cut 1)
+fi
+RDEPEND=">=app-text/texlive-core-${TL_PV:-${PV}}"
# We do not need anything from SYSROOT:
# Everything is built from the texlive install in /
# Generated files are noarch
-BDEPEND="${COMMON_DEPEND}
- app-arch/xz-utils"
+BDEPEND="
+ ${RDEPEND}
+ app-arch/xz-utils
+"
-for i in ${TEXLIVE_MODULE_CONTENTS}; do
- for tldev in ${TEXLIVE_DEVS}; do
- SRC_URI="${SRC_URI} https://dev.gentoo.org/~${tldev}/distfiles/texlive/tl-${i}-${PV}.${PKGEXT}"
- done
-done
+texlive-common_append_to_src_uri TEXLIVE_MODULE_CONTENTS
# Forge doc SRC_URI
-[[ -n ${TEXLIVE_MODULE_DOC_CONTENTS} ]] && SRC_URI="${SRC_URI} doc? ("
-for i in ${TEXLIVE_MODULE_DOC_CONTENTS}; do
- for tldev in ${TEXLIVE_DEVS}; do
- SRC_URI="${SRC_URI} https://dev.gentoo.org/~${tldev}/distfiles/texlive/tl-${i}-${PV}.${PKGEXT}"
- done
-done
-[[ -n ${TEXLIVE_MODULE_DOC_CONTENTS} ]] && SRC_URI="${SRC_URI} )"
+if [[ -n ${TEXLIVE_MODULE_DOC_CONTENTS} ]]; then
+ SRC_URI+=" doc? ("
+ texlive-common_append_to_src_uri TEXLIVE_MODULE_DOC_CONTENTS
+ SRC_URI+=" )"
+fi
# Forge source SRC_URI
-if [[ -n ${TEXLIVE_MODULE_SRC_CONTENTS} ]] ; then
- SRC_URI="${SRC_URI} source? ("
- for i in ${TEXLIVE_MODULE_SRC_CONTENTS}; do
- for tldev in ${TEXLIVE_DEVS}; do
- SRC_URI="${SRC_URI} https://dev.gentoo.org/~${tldev}/distfiles/texlive/tl-${i}-${PV}.${PKGEXT}"
- done
- done
- SRC_URI="${SRC_URI} )"
+if [[ -n ${TEXLIVE_MODULE_SRC_CONTENTS} ]]; then
+ SRC_URI+=" source? ("
+ texlive-common_append_to_src_uri TEXLIVE_MODULE_SRC_CONTENTS
+ SRC_URI+=" )"
fi
-RDEPEND="${COMMON_DEPEND}"
-
-IUSE="${IUSE} doc"
-
# @ECLASS_VARIABLE: TEXLIVE_MODULE_OPTIONAL_ENGINE
# @DEFAULT_UNSET
# @DESCRIPTION:
@@ -147,19 +135,26 @@ S="${WORKDIR}"
# Only for TeX Live 2009 and later.
# After unpacking, the files that need to be relocated are moved accordingly.
-RELOC_TARGET=texmf-dist
-
texlive-module_src_unpack() {
unpack ${A}
+ local RELOC_TARGET=texmf-dist
sed -n -e 's:\s*RELOC/::p' tlpkg/tlpobj/* > "${T}/reloclist" || die
sed -e 's/\/[^/]*$//' -e "s:^:${RELOC_TARGET}/:" "${T}/reloclist" |
sort -u |
xargs mkdir -p || die
- local i
- while read i; do
- mv "${i}" "${RELOC_TARGET}/${i%/*}" || die
+ local i dir="" files=()
+ while read -r i; do
+ if [[ ${RELOC_TARGET}/${i%/*} != "${dir}" ]]; then
+ # new dir, do the previous move
+ [[ -z ${dir} ]] || mv "${files[@]}" "${dir}" || die
+ dir="${RELOC_TARGET}/${i%/*}"
+ files=()
+ fi
+ # collect files with same destination dir
+ files+=( "${i}" )
done < "${T}/reloclist"
+ mv "${files[@]}" "${dir}" || die
}
# @FUNCTION: texlive-module_add_format
@@ -266,7 +261,7 @@ texlive-module_make_language_lua_lines() {
fi
if [[ -n ${luaspecial} ]]; then
- printf "\t\tspecial = '%s',\n" "$luaspecial" >> "${dest}" || die
+ printf "\t\tspecial = '%s',\n" "${luaspecial}" >> "${dest}" || die
fi
printf "\t},\n" >> "${dest}" || die
@@ -294,7 +289,7 @@ texlive-module_src_compile() {
for i in $(<"${T}/jobs");
do
- j="$(echo $i | tr '#' ' ')"
+ j="$(echo "${i}" | tr '#' ' ')"
command=${j%% *}
parameter=${j#* }
case ${command} in
@@ -309,24 +304,24 @@ texlive-module_src_compile() {
addDvipdfmMap)
echo "f ${parameter}" >> "${S}/${PN}-config";;
AddHyphen)
- texlive-module_make_language_def_lines ${parameter}
- texlive-module_make_language_dat_lines ${parameter}
- texlive-module_make_language_lua_lines ${parameter}
+ texlive-module_make_language_def_lines "${parameter}"
+ texlive-module_make_language_dat_lines "${parameter}"
+ texlive-module_make_language_lua_lines "${parameter}"
;;
AddFormat)
- texlive-module_add_format ${parameter};;
+ texlive-module_add_format "${parameter}";;
BuildFormat)
einfo "Format ${parameter} already built.";;
BuildLanguageDat)
- einfo "Language file $parameter already generated.";;
+ einfo "Language file ${parameter} already generated.";;
*)
- die "No rule to proccess ${command}. Please file a bug."
+ die "No rule to process ${command}. Please file a bug."
esac
done
# Determine texlive-core version for fmtutil call
- fmt_call="$(has_version '>=app-text/texlive-core-2019' \
- && echo "fmtutil-user" || echo "fmtutil")"
+ fmt_call="$(has_version '>=app-text/texlive-core-2019' \
+ && echo "fmtutil-user" || echo "fmtutil")"
# Build format files
for i in texmf-dist/fmtutil/format*.cnf; do
@@ -339,7 +334,7 @@ texlive-module_src_compile() {
mkdir texmf-var/web2c || die
fi
VARTEXFONTS="${T}/fonts" TEXMFHOME="${S}/texmf:${S}/texmf-dist:${S}/texmf-var"\
- env -u TEXINPUTS $fmt_call --cnffile "${i}" --fmtdir "${S}/texmf-var/web2c" --all\
+ env -u TEXINPUTS "${fmt_call}" --cnffile "${i}" --fmtdir "${S}/texmf-var/web2c" --all\
|| die "failed to build format ${i}"
fi
done
@@ -386,7 +381,6 @@ texlive-module_src_install() {
cp -pR tlpkg "${ED}/usr/share/" || die
fi
-
if [[ -d texmf-var ]]; then
insinto /var/lib/texmf
doins -r texmf-var/.
@@ -416,14 +410,14 @@ texlive-module_src_install() {
[[ -n ${TEXLIVE_MODULE_BINSCRIPTS} ]] && dobin_texmf_scripts ${TEXLIVE_MODULE_BINSCRIPTS}
if [[ -n ${TEXLIVE_MODULE_BINLINKS} ]] ; then
+ dodir "/usr/bin"
for i in ${TEXLIVE_MODULE_BINLINKS} ; do
- [[ -f ${ED}/usr/bin/${i%:*} ]] || die "Trying to install an invalid BINLINK. This should not happen. Please file a bug."
- dosym ${i%:*} /usr/bin/${i#*:}
+ [[ -f ${ED}/usr/bin/${i%:*} ]] || die "Trying to install an invalid BINLINK ${i%:*}. This should not happen. Please file a bug."
+ dosym "${i%:*}" "/usr/bin/${i#*:}"
done
fi
texlive-common_handle_config_files
- TEXMF_PATH=${TEXMF_DIST_PATH} texlive-common_handle_config_files
}
# @FUNCTION: texlive-module_pkg_postinst
@@ -434,6 +428,7 @@ texlive-module_src_install() {
texlive-module_pkg_postinst() {
etexmf-update
+ texlive-common_update_tlpdb
[[ -n ${TL_MODULE_INFORMATION} ]] && elog "${TL_MODULE_INFORMATION}"
}
@@ -444,9 +439,10 @@ texlive-module_pkg_postinst() {
# installed texmf trees.
texlive-module_pkg_postrm() {
- etexmf-update
+ [[ -z ${REPLACED_BY_VERSION} ]] && etexmf-update
+ texlive-common_update_tlpdb
}
-EXPORT_FUNCTIONS src_unpack src_compile src_install pkg_postinst pkg_postrm
-
fi
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install pkg_postinst pkg_postrm
diff --git a/eclass/toolchain-autoconf.eclass b/eclass/toolchain-autoconf.eclass
index 2c8184f894cc..330ec45dd7d1 100644
--- a/eclass/toolchain-autoconf.eclass
+++ b/eclass/toolchain-autoconf.eclass
@@ -1,29 +1,43 @@
-# Copyright 1999-2019 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: toolchain-autoconf.eclass
# @MAINTAINER:
# <base-system@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
-# @BLURB: Common code for sys-devel/autoconf ebuilds
+# @SUPPORTED_EAPIS: 7 8
+# @BLURB: Common code for dev-build/autoconf ebuilds
# @DESCRIPTION:
# This eclass contains the common phase functions migrated from
-# sys-devel/autoconf eblits.
-
-case ${EAPI:-0} in
- [0-5])
- die "${ECLASS} is banned in EAPI ${EAPI:-0}"
- ;;
- [6-7])
- ;;
- *)
- die "Unknown EAPI ${EAPI:-0}"
- ;;
+# dev-build/autoconf eblits.
+
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_TOOLCHAIN_AUTOCONF_ECLASS} ]]; then
+_TOOLCHAIN_AUTOCONF_ECLASS=1
-EXPORT_FUNCTIONS src_prepare src_configure src_install
+# @ECLASS_VARIABLE: TC_AUTOCONF_BREAK_INFOS
+# @DESCRIPTION:
+# Enables slotting logic on the installed info pages. This includes
+# mangling the pages in order to include a version number. Empty by
+# default, and only exists for old ebuild revisions to use. Do not set
+# in new ebuilds. Set to a non-empty value to enable.
+# @DEPRECATED: none
+: "${TC_AUTOCONF_BREAK_INFOS:=}"
+
+# @ECLASS_VARIABLE: TC_AUTOCONF_INFOPATH
+# @DESCRIPTION:
+# Where to install info files if not slotting.
+TC_AUTOCONF_INFOPATH="${EPREFIX}/usr/share/${P}/info"
+
+# @ECLASS_VARIABLE: TC_AUTOCONF_ENVPREFIX
+# @DESCRIPTION:
+# Prefix number for env.d files produced by this eclass. Defaults to
+# 06. Note that the generated env.d filename format is
+# "${TC_AUTOCONF_ENVPREFIX}${PN}$((99999-(major*1000+minor)))"
+: "${TC_AUTOCONF_ENVPREFIX:=06}"
toolchain-autoconf_src_prepare() {
find -name Makefile.in -exec sed -i '/^pkgdatadir/s:$:-@VERSION@:' {} + || die
@@ -33,7 +47,18 @@ toolchain-autoconf_src_prepare() {
toolchain-autoconf_src_configure() {
# Disable Emacs in the build system since it is in a separate package.
export EMACS=no
- econf --program-suffix="-${PV}" || die
+
+ MY_P="${P#autoconf-}"
+
+ local myconf=(
+ --program-suffix="-${MY_P}"
+ )
+ if [[ -z "${TC_AUTOCONF_BREAK_INFOS}" && "${SLOT}" != 0 ]]; then
+ myconf+=(
+ --infodir="${TC_AUTOCONF_INFOPATH}"
+ )
+ fi
+ econf "${myconf[@]}" "${@}" || die
# econf updates config.{sub,guess} which forces the manpages
# to be regenerated which we dont want to do #146621
touch man/*.1
@@ -72,8 +97,29 @@ slot_info_pages() {
toolchain-autoconf_src_install() {
default
- slot_info_pages
+ if [[ -n "${TC_AUTOCONF_BREAK_INFOS}" ]]; then
+ slot_info_pages
+ else
+ rm -f dir || die
+
+ local major="$(ver_cut 1)"
+ local minor="$(ver_cut 2)"
+ local idx="$((99999-(major*1000+minor)))"
+ newenvd - "${TC_AUTOCONF_ENVPREFIX}${PN}${idx}" <<-EOF
+ INFOPATH="${TC_AUTOCONF_INFOPATH}"
+ EOF
+
+ pushd "${D}/${TC_AUTOCONF_INFOPATH}" >/dev/null || die
+ for f in *.info*; do
+ # Install convenience aliases for versioned Autoconf pages.
+ ln -s "$f" "${f/./-${PV}.}" || die
+ done
+ popd >/dev/null || die
+
+ docompress "${TC_AUTOCONF_INFOPATH}"
+ fi
}
-_TOOLCHAIN_AUTOCONF_ECLASS=1
fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_install
diff --git a/eclass/toolchain-funcs.eclass b/eclass/toolchain-funcs.eclass
index 54d4b0912a6e..cde84e6f34c8 100644
--- a/eclass/toolchain-funcs.eclass
+++ b/eclass/toolchain-funcs.eclass
@@ -1,10 +1,10 @@
-# Copyright 2002-2022 Gentoo Authors
+# Copyright 2002-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: toolchain-funcs.eclass
# @MAINTAINER:
# Toolchain Ninjas <toolchain@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: functions to query common info about the toolchain
# @DESCRIPTION:
# The toolchain-funcs aims to provide a complete suite of functions
@@ -13,9 +13,8 @@
# in such a way that you can rely on the function always returning
# something sane.
-case ${EAPI:-0} in
- # EAPI=0 is still used by crossdev, bug #797367
- 0|5|6|7|8) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -339,7 +338,7 @@ tc-is-static-only() {
tc-stack-grows-down() {
# List the few that grow up.
case ${ARCH} in
- hppa|metag) return 1 ;;
+ hppa|metag) return 1 ;;
esac
# Assume all others grow down.
@@ -356,16 +355,16 @@ tc-export_build_env() {
# Some build envs will initialize vars like:
# : ${BUILD_LDFLAGS:-${LDFLAGS}}
# So make sure all variables are non-empty. #526734
- : ${BUILD_CFLAGS:=-O1 -pipe}
- : ${BUILD_CXXFLAGS:=-O1 -pipe}
- : ${BUILD_CPPFLAGS:= }
- : ${BUILD_LDFLAGS:= }
+ : "${BUILD_CFLAGS:=-O1 -pipe}"
+ : "${BUILD_CXXFLAGS:=-O1 -pipe}"
+ : "${BUILD_CPPFLAGS:= }"
+ : "${BUILD_LDFLAGS:= }"
else
# https://bugs.gentoo.org/654424
- : ${BUILD_CFLAGS:=${CFLAGS}}
- : ${BUILD_CXXFLAGS:=${CXXFLAGS}}
- : ${BUILD_CPPFLAGS:=${CPPFLAGS}}
- : ${BUILD_LDFLAGS:=${LDFLAGS}}
+ : "${BUILD_CFLAGS:=${CFLAGS}}"
+ : "${BUILD_CXXFLAGS:=${CXXFLAGS}}"
+ : "${BUILD_CPPFLAGS:=${CPPFLAGS}}"
+ : "${BUILD_LDFLAGS:=${LDFLAGS}}"
fi
export BUILD_{C,CXX,CPP,LD}FLAGS
@@ -378,7 +377,6 @@ tc-export_build_env() {
# @FUNCTION: tc-env_build
# @USAGE: <command> [command args]
-# @INTERNAL
# @DESCRIPTION:
# Setup the compile environment to the build tools and then execute the
# specified command. We use tc-getBUILD_XX here so that we work with
@@ -424,26 +422,27 @@ tc-env_build() {
# src_configure() {
# ECONF_SOURCE=${S}
# if tc-is-cross-compiler ; then
-# mkdir "${WORKDIR}"/${CBUILD}
-# pushd "${WORKDIR}"/${CBUILD} >/dev/null
+# mkdir "${WORKDIR}"/${CBUILD} || die
+# pushd "${WORKDIR}"/${CBUILD} >/dev/null || die
# econf_build --disable-some-unused-stuff
-# popd >/dev/null
+# popd >/dev/null || die
# fi
# ... normal build paths ...
# }
# src_compile() {
# if tc-is-cross-compiler ; then
-# pushd "${WORKDIR}"/${CBUILD} >/dev/null
+# pushd "${WORKDIR}"/${CBUILD} >/dev/null || die
# emake one-or-two-build-tools
-# ln/mv build-tools to normal build paths in ${S}/
-# popd >/dev/null
+# ln/mv build-tools to normal build paths in ${S}/ || die
+# popd >/dev/null || die
# fi
# ... normal build paths ...
# }
# @CODE
econf_build() {
local CBUILD=${CBUILD:-${CHOST}}
- tc-env_build econf --build=${CBUILD} --host=${CBUILD} "$@"
+ econf_env() { CHOST=${CBUILD} econf "$@"; }
+ tc-env_build econf_env "$@"
}
# @FUNCTION: tc-ld-is-gold
@@ -453,6 +452,9 @@ econf_build() {
tc-ld-is-gold() {
local out
+ # Ensure ld output is in English.
+ local -x LC_ALL=C
+
# First check the linker directly.
out=$($(tc-getLD "$@") --version 2>&1)
if [[ ${out} == *"GNU gold"* ]] ; then
@@ -464,7 +466,7 @@ tc-ld-is-gold() {
# options and not CFLAGS/CXXFLAGS.
local base="${T}/test-tc-gold"
cat <<-EOF > "${base}.c"
- int main() { return 0; }
+ int main(void) { return 0; }
EOF
out=$($(tc-getCC "$@") ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -Wl,--version "${base}.c" -o "${base}" 2>&1)
rm -f "${base}"*
@@ -483,6 +485,9 @@ tc-ld-is-gold() {
tc-ld-is-lld() {
local out
+ # Ensure ld output is in English.
+ local -x LC_ALL=C
+
# First check the linker directly.
out=$($(tc-getLD "$@") --version 2>&1)
if [[ ${out} == *"LLD"* ]] ; then
@@ -494,7 +499,7 @@ tc-ld-is-lld() {
# options and not CFLAGS/CXXFLAGS.
local base="${T}/test-tc-lld"
cat <<-EOF > "${base}.c"
- int main() { return 0; }
+ int main(void) { return 0; }
EOF
out=$($(tc-getCC "$@") ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -Wl,--version "${base}.c" -o "${base}" 2>&1)
rm -f "${base}"*
@@ -529,43 +534,15 @@ tc-ld-force-bfd() {
ewarn "Forcing usage of the BFD linker"
# Set up LD to point directly to bfd if it's available.
- # We need to extract the first word in case there are flags appended
- # to its value (like multilib). #545218
- local ld=$(tc-getLD "$@")
- local bfd_ld="${ld%% *}.bfd"
- local path_ld=$(which "${bfd_ld}" 2>/dev/null)
+ # Unset LD first so we get the default value from tc-getLD.
+ local ld=$(unset LD; tc-getLD "$@")
+ local bfd_ld="${ld}.bfd"
+ local path_ld=$(type -P "${bfd_ld}" 2>/dev/null)
[[ -e ${path_ld} ]] && export LD=${bfd_ld}
# Set up LDFLAGS to select bfd based on the gcc / clang version.
- local fallback="true"
- if tc-is-gcc; then
- local major=$(gcc-major-version "$@")
- local minor=$(gcc-minor-version "$@")
- if [[ ${major} -gt 4 ]] || [[ ${major} -eq 4 && ${minor} -ge 8 ]]; then
- # gcc-4.8+ supports -fuse-ld directly.
- export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"
- fallback="false"
- fi
- elif tc-is-clang; then
- local major=$(clang-major-version "$@")
- local minor=$(clang-minor-version "$@")
- if [[ ${major} -gt 3 ]] || [[ ${major} -eq 3 && ${minor} -ge 5 ]]; then
- # clang-3.5+ supports -fuse-ld directly.
- export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"
- fallback="false"
- fi
- fi
- if [[ ${fallback} == "true" ]] ; then
- # <=gcc-4.7 and <=clang-3.4 require some coercion.
- # Only works if bfd exists.
- if [[ -e ${path_ld} ]] ; then
- local d="${T}/bfd-linker"
- mkdir -p "${d}"
- ln -sf "${path_ld}" "${d}"/ld
- export LDFLAGS="${LDFLAGS} -B${d}"
- else
- die "unable to locate a BFD linker"
- fi
+ if tc-is-gcc || tc-is-clang ; then
+ export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"
fi
}
@@ -578,7 +555,7 @@ _tc-has-openmp() {
local base="${T}/test-tc-openmp"
cat <<-EOF > "${base}.c"
#include <omp.h>
- int main() {
+ int main(void) {
int nthreads, tid, ret = 0;
#pragma omp parallel private(nthreads, tid)
{
@@ -594,23 +571,11 @@ _tc-has-openmp() {
return ${ret}
}
-# @FUNCTION: tc-has-openmp
-# @DEPRECATED: tc-check-openmp
-# @USAGE: [toolchain prefix]
-# @DESCRIPTION:
-# See if the toolchain supports OpenMP. This function is deprecated and will be
-# removed on 2023-01-01.
-tc-has-openmp() {
- _tc-has-openmp "$@"
-}
-
# @FUNCTION: tc-check-openmp
# @DESCRIPTION:
# Test for OpenMP support with the current compiler and error out with
# a clear error message, telling the user how to rectify the missing
-# OpenMP support that has been requested by the ebuild. Using this function
-# to test for OpenMP support should be preferred over tc-has-openmp and
-# printing a custom message, as it presents a uniform interface to the user.
+# OpenMP support that has been requested by the ebuild.
#
# You should test for any necessary OpenMP support in pkg_pretend in order to
# warn the user of required toolchain changes. You must still check for OpenMP
@@ -652,6 +617,7 @@ tc-has-tls() {
return *i ? j : *i;
}
EOF
+
local flags
case $1 in
-s) flags="-S";;
@@ -659,7 +625,8 @@ tc-has-tls() {
-l) ;;
-*) die "Usage: tc-has-tls [-c|-l] [toolchain prefix]";;
esac
- : ${flags:=-fPIC -shared -Wl,-z,defs}
+
+ : "${flags:=-fPIC -shared -Wl,-z,defs}"
[[ $1 == -* ]] && shift
$(tc-getCC "$@") ${flags} "${base}.c" -o "${base}" >&/dev/null
local ret=$?
@@ -671,7 +638,7 @@ tc-has-tls() {
# Parse information from CBUILD/CHOST/CTARGET rather than
# use external variables from the profile.
tc-ninja_magic_to_arch() {
-ninj() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
+ _tc_echo_kernel_alias() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
local type=$1
local host=$2
@@ -680,26 +647,18 @@ ninj() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
case ${host} in
aarch64*) echo arm64;;
alpha*) echo alpha;;
+ arc*) echo arc;;
arm*) echo arm;;
- avr*) ninj avr32 avr;;
- bfin*) ninj blackfin bfin;;
+ avr*) _tc_echo_kernel_alias avr32 avr;;
+ bfin*) _tc_echo_kernel_alias blackfin bfin;;
c6x*) echo c6x;;
cris*) echo cris;;
frv*) echo frv;;
hexagon*) echo hexagon;;
- hppa*) ninj parisc hppa;;
- i?86*)
- # Starting with linux-2.6.24, the 'x86_64' and 'i386'
- # trees have been unified into 'x86'.
- # FreeBSD still uses i386
- if [[ ${type} == "kern" && ${host} == *freebsd* ]] ; then
- echo i386
- else
- echo x86
- fi
- ;;
+ hppa*) _tc_echo_kernel_alias parisc hppa;;
+ i?86*) echo x86;;
ia64*) echo ia64;;
- loongarch*) ninj loongarch loong;;
+ loongarch*) _tc_echo_kernel_alias loongarch loong;;
m68*) echo m68k;;
metag*) echo metag;;
microblaze*) echo microblaze;;
@@ -722,16 +681,15 @@ ninj() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
riscv*) echo riscv;;
s390*) echo s390;;
score*) echo score;;
- sh64*) ninj sh64 sh;;
+ sh64*) _tc_echo_kernel_alias sh64 sh;;
sh*) echo sh;;
- sparc64*) ninj sparc64 sparc;;
+ sparc64*) _tc_echo_kernel_alias sparc64 sparc;;
sparc*) [[ ${PROFILE_ARCH} == "sparc64" ]] \
- && ninj sparc64 sparc \
+ && _tc_echo_kernel_alias sparc64 sparc \
|| echo sparc
;;
tile*) echo tile;;
vax*) echo vax;;
- x86_64*freebsd*) echo amd64;;
x86_64*)
# Starting with linux-2.6.24, the 'x86_64' and 'i386'
# trees have been unified into 'x86'.
@@ -746,7 +704,7 @@ ninj() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
# since our usage of tc-arch is largely concerned with
# normalizing inputs for testing ${CTARGET}, let's filter
# other cross targets (mingw and such) into the unknown.
- *) echo unknown;;
+ *) echo unknown;;
esac
}
# @FUNCTION: tc-arch-kernel
@@ -762,6 +720,13 @@ tc-arch() {
tc-ninja_magic_to_arch portage "$@"
}
+# @FUNCTION: tc-endian
+# @USAGE: [toolchain prefix]
+# @RETURN: 'big' or 'little' corresponding to the passed (or host) endianness
+# @DESCRIPTION:
+# Accepts 'host' as an argument which defaults to CTARGET and falls back to CHOST
+# if unspecified. Returns 'big' or 'little' depending on whether 'host' is
+# big or little endian.
tc-endian() {
local host=$1
[[ -z ${host} ]] && host=${CTARGET:-${CHOST}}
@@ -771,6 +736,8 @@ tc-endian() {
aarch64*be) echo big;;
aarch64) echo little;;
alpha*) echo little;;
+ arc*b*) echo big;;
+ arc*) echo little;;
arm*b*) echo big;;
arm*) echo little;;
cris*) echo little;;
@@ -789,7 +756,7 @@ tc-endian() {
sh*) echo little;;
sparc*) echo big;;
x86_64*) echo little;;
- *) echo wtf;;
+ *) echo wtf;;
esac
}
@@ -810,8 +777,8 @@ tc-get-compiler-type() {
case ${res} in
*HAVE_PATHCC*) echo pathcc;;
*HAVE_CLANG*) echo clang;;
- *HAVE_GCC*) echo gcc;;
- *) echo unknown;;
+ *HAVE_GCC*) echo gcc;;
+ *) echo unknown;;
esac
}
@@ -829,11 +796,11 @@ tc-is-clang() {
# Internal func. The first argument is the version info to expand.
# Query the preprocessor to improve compatibility across different
-# compilers rather than maintaining a --version flag matrix. #335943
+# compilers rather than maintaining a --version flag matrix, bug #335943.
_gcc_fullversion() {
local ver="$1"; shift
set -- $($(tc-getCPP "$@") -E -P - <<<"__GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__")
- eval echo "$ver"
+ eval echo "${ver}"
}
# @FUNCTION: gcc-fullversion
@@ -866,7 +833,7 @@ gcc-micro-version() {
_clang_fullversion() {
local ver="$1"; shift
set -- $($(tc-getCPP "$@") -E -P - <<<"__clang_major__ __clang_minor__ __clang_patchlevel__")
- eval echo "$ver"
+ eval echo "${ver}"
}
# @FUNCTION: clang-fullversion
@@ -991,6 +958,15 @@ gcc-specs-stack-check() {
[[ "${directive/\{!fno-stack-check:}" != "${directive}" ]]
}
+# @FUNCTION: tc-enables-cxx-assertions
+# @RETURN: Truth if the current compiler enables assertions in the C++ standard library
+# @DESCRIPTION:
+# Return truth if the current compiler enables assertions in the C++ standard
+# library. For libstdc++, this is -D_GLIBCXX_ASSERTIONS, and for libcxx/libc++,
+# this is -D_LIBCPP_ENABLE_ASSERTIONS (deprecated) or -D_LIBCPP_ENABLE_HARDENED_MODE.
+tc-enables-cxx-assertions() {
+ tc-cpp-is-true "defined(_GLIBCXX_ASSERTIONS) || defined(_LIBCPP_ENABLE_ASSERTIONS) || defined(_LIBCPP_ENABLE_HARDENED_MODE)" ${CPPFLAGS} ${CXXFLAGS}
+}
# @FUNCTION: tc-enables-pie
# @RETURN: Truth if the current compiler generates position-independent code (PIC) which can be linked into executables
@@ -998,7 +974,16 @@ gcc-specs-stack-check() {
# Return truth if the current compiler generates position-independent code (PIC)
# which can be linked into executables.
tc-enables-pie() {
- tc-cpp-is-true "defined(__PIE__)" ${CPPFLAGS} ${CFLAGS}
+ tc-cpp-is-true "defined(__PIE__)" ${CPPFLAGS} ${CFLAGS} ${CXXFLAGS}
+}
+
+# @FUNCTION: tc-enables-fortify-source
+# @RETURN: Truth if the current compiler enables FORTIFY_SOURCE at any level
+# @DESCRIPTION:
+# Return truth if the current compiler enables fortification (FORTIFY_SOURCE)
+# at any level (-D_FORTIFY_SOURCE).
+tc-enables-fortify-source() {
+ tc-cpp-is-true "defined(_FORTIFY_SOURCE)" ${CPPFLAGS} ${CFLAGS} ${CXXFLAGS}
}
# @FUNCTION: tc-enables-ssp
@@ -1010,7 +995,7 @@ tc-enables-pie() {
# -fstack-protector-strong
# -fstack-protector-all
tc-enables-ssp() {
- tc-cpp-is-true "defined(__SSP__) || defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS}
+ tc-cpp-is-true "defined(__SSP__) || defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} ${CXXFLAGS}
}
# @FUNCTION: tc-enables-ssp-strong
@@ -1021,7 +1006,7 @@ tc-enables-ssp() {
# -fstack-protector-strong
# -fstack-protector-all
tc-enables-ssp-strong() {
- tc-cpp-is-true "defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS}
+ tc-cpp-is-true "defined(__SSP_STRONG__) || defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} ${CXXFLAGS}
}
# @FUNCTION: tc-enables-ssp-all
@@ -1031,7 +1016,7 @@ tc-enables-ssp-strong() {
# on level corresponding to any of the following options:
# -fstack-protector-all
tc-enables-ssp-all() {
- tc-cpp-is-true "defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS}
+ tc-cpp-is-true "defined(__SSP_ALL__)" ${CPPFLAGS} ${CFLAGS} ${CXXFLAGS}
}
@@ -1048,18 +1033,17 @@ gen_usr_ldscript() {
tc-is-static-only && return
- # We only care about stuffing / for the native ABI. #479448
+ # We only care about stuffing / for the native ABI, bug #479448
if [[ $(type -t multilib_is_native_abi) == "function" ]] ; then
multilib_is_native_abi || return 0
fi
- # Eventually we'd like to get rid of this func completely #417451
+ # Eventually we'd like to get rid of this func completely, bug #417451
case ${CTARGET:-${CHOST}} in
- *-darwin*) ;;
- *-android*) return 0 ;;
- *linux*|*-freebsd*|*-openbsd*|*-netbsd*)
- use prefix && return 0 ;;
- *) return 0 ;;
+ *-darwin*) ;;
+ *-android*) return 0 ;;
+ *linux*) use prefix && return 0 ;;
+ *) return 0 ;;
esac
# Just make sure it exists
@@ -1075,10 +1059,10 @@ gen_usr_ldscript() {
# is referenced ... makes multilib saner
local flags=( ${CFLAGS} ${LDFLAGS} -Wl,--verbose )
if $(tc-getLD) --version | grep -q 'GNU gold' ; then
- # If they're using gold, manually invoke the old bfd. #487696
+ # If they're using gold, manually invoke the old bfd, bug #487696
local d="${T}/bfd-linker"
mkdir -p "${d}"
- ln -sf $(which ${CHOST}-ld.bfd) "${d}"/ld
+ ln -sf $(type -P ${CHOST}-ld.bfd) "${d}"/ld
flags+=( -B"${d}" )
fi
output_format=$($(tc-getCC) "${flags[@]}" 2>&1 | sed -n 's/^OUTPUT_FORMAT("\([^"]*\)",.*/\1/p')
@@ -1168,4 +1152,103 @@ gen_usr_ldscript() {
done
}
+# @FUNCTION: tc-get-cxx-stdlib
+# @DESCRIPTION:
+# Attempt to identify the C++ standard library used by the compiler.
+# If the library is identified, the function returns 0 and prints one
+# of the following:
+#
+# - ``libc++`` for ``sys-libs/libcxx``
+# - ``libstdc++`` for ``sys-devel/gcc``'s libstdc++
+#
+# If the library is not recognized, the function returns 1.
+tc-get-cxx-stdlib() {
+ local code='#include <ciso646>
+
+#if defined(_LIBCPP_VERSION)
+ HAVE_LIBCXX
+#elif defined(__GLIBCXX__)
+ HAVE_LIBSTDCPP
+#endif
+'
+ local res=$(
+ $(tc-getCXX) ${CPPFLAGS} ${CXXFLAGS} -x c++ -E -P - \
+ <<<"${code}" 2>/dev/null
+ )
+
+ case ${res} in
+ *HAVE_LIBCXX*)
+ echo libc++;;
+ *HAVE_LIBSTDCPP*)
+ echo libstdc++;;
+ *)
+ return 1;;
+ esac
+
+ return 0
+}
+
+# @FUNCTION: tc-get-c-rtlib
+# @DESCRIPTION:
+# Attempt to identify the runtime used by the C/C++ compiler.
+# If the runtime is identifed, the function returns 0 and prints one
+# of the following:
+#
+# - ``compiler-rt`` for ``sys-libs/compiler-rt``
+# - ``libgcc`` for ``sys-devel/gcc``'s libgcc
+#
+# If the runtime is not recognized, the function returns 1.
+tc-get-c-rtlib() {
+ local res=$(
+ $(tc-getCC) ${CPPFLAGS} ${CFLAGS} ${LDFLAGS} \
+ -print-libgcc-file-name 2>/dev/null
+ )
+
+ case ${res} in
+ *libclang_rt*)
+ echo compiler-rt;;
+ *libgcc*)
+ echo libgcc;;
+ *)
+ return 1;;
+ esac
+
+ return 0
+}
+
+# @FUNCTION: tc-get-ptr-size
+# @RETURN: Size of a pointer in bytes for CHOST (e.g. 4 or 8).
+tc-get-ptr-size() {
+ $(tc-getCPP) -P - <<< __SIZEOF_POINTER__ ||
+ die "Could not determine CHOST pointer size"
+}
+
+# @FUNCTION: tc-get-build-ptr-size
+# @RETURN: Size of a pointer in bytes for CBUILD (e.g. 4 or 8).
+tc-get-build-ptr-size() {
+ $(tc-getBUILD_CPP) -P - <<< __SIZEOF_POINTER__ ||
+ die "Could not determine CBUILD pointer size"
+}
+
+# @FUNCTION: tc-is-lto
+# @RETURN: Shell true if we are using LTO, shell false otherwise
+tc-is-lto() {
+ local f="${T}/test-lto.o"
+
+ case $(tc-get-compiler-type) in
+ clang)
+ $(tc-getCC) ${CFLAGS} -c -o "${f}" -x c - <<<"" || die
+ # If LTO is used, clang will output bytecode and llvm-bcanalyzer
+ # will run successfully. Otherwise, it will output plain object
+ # file and llvm-bcanalyzer will exit with error.
+ llvm-bcanalyzer "${f}" &>/dev/null && return 0
+ ;;
+ gcc)
+ $(tc-getCC) ${CFLAGS} -c -o "${f}" -x c - <<<"" || die
+ [[ $($(tc-getREADELF) -S "${f}") == *.gnu.lto* ]] && return 0
+ ;;
+ esac
+ return 1
+}
+
fi
diff --git a/eclass/toolchain.eclass b/eclass/toolchain.eclass
index 12959958c587..a5d4345e7fbf 100644
--- a/eclass/toolchain.eclass
+++ b/eclass/toolchain.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: toolchain.eclass
@@ -6,27 +6,30 @@
# Toolchain Ninjas <toolchain@gentoo.org>
# @SUPPORTED_EAPIS: 7 8
# @BLURB: Common code for sys-devel/gcc ebuilds
+# @DESCRIPTION:
+# Common code for sys-devel/gcc ebuilds (and occasionally GCC forks, like
+# GNAT for Ada). If not building GCC itself, please use toolchain-funcs.eclass
+# instead.
case ${EAPI} in
- 7) inherit eutils ;;
- 8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_TOOLCHAIN_ECLASS} ]]; then
+if [[ -z ${_TOOLCHAIN_ECLASS} ]]; then
_TOOLCHAIN_ECLASS=1
DESCRIPTION="The GNU Compiler Collection"
HOMEPAGE="https://gcc.gnu.org/"
-inherit flag-o-matic gnuconfig libtool multilib pax-utils toolchain-funcs prefix
+inherit edo flag-o-matic gnuconfig libtool multilib pax-utils python-any-r1 toolchain-funcs prefix
tc_is_live() {
[[ ${PV} == *9999* ]]
}
if tc_is_live ; then
- EGIT_REPO_URI="https://gcc.gnu.org/git/gcc.git"
+ EGIT_REPO_URI="https://gcc.gnu.org/git/gcc.git https://github.com/gcc-mirror/gcc"
# Naming style:
# gcc-10.1.0_pre9999 -> gcc-10-branch
# Note that the micro version is required or lots of stuff will break.
@@ -35,6 +38,8 @@ if tc_is_live ; then
EGIT_BRANCH="releases/${PN}-${PV%.?.?_pre9999}"
EGIT_BRANCH=${EGIT_BRANCH//./_}
inherit git-r3
+elif [[ -n ${TOOLCHAIN_USE_GIT_PATCHES} ]] ; then
+ inherit git-r3
fi
FEATURES=${FEATURES/multilib-strict/}
@@ -47,9 +52,9 @@ if [[ ${CTARGET} = ${CHOST} ]] ; then
export CTARGET=${CATEGORY#cross-}
fi
fi
-: ${TARGET_ABI:=${ABI}}
-: ${TARGET_MULTILIB_ABIS:=${MULTILIB_ABIS}}
-: ${TARGET_DEFAULT_ABI:=${DEFAULT_ABI}}
+: "${TARGET_ABI:=${ABI}}"
+: "${TARGET_MULTILIB_ABIS:=${MULTILIB_ABIS}}"
+: "${TARGET_DEFAULT_ABI:=${DEFAULT_ABI}}"
is_crosscompile() {
[[ ${CHOST} != ${CTARGET} ]]
@@ -79,6 +84,65 @@ tc_version_is_between() {
# Used to override GCC version. Useful for e.g. live ebuilds or snapshots.
# Defaults to ${PV}.
+# @ECLASS_VARIABLE: TOOLCHAIN_GCC_VALIDATE_FAILURES_VERSION
+# @DESCRIPTION:
+# Version of test comparison script (validate_fails.py) to use.
+: "${GCC_VALIDATE_FAILURES_VERSION:=7bbfb01a32b73842f8908de028703510a0e12057}"
+
+# @ECLASS_VARIABLE: TOOLCHAIN_USE_GIT_PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Used to force fetching patches from git. Useful for non-released versions
+# of GCC where we don't want to keep creating patchset tarballs for a new
+# release series (e.g. suppose 12.0 just got released, then adding snapshots
+# for 13.0, we don't want to create new patchsets for every single 13.0 snapshot,
+# so just grab patches from git each time if this variable is set).
+
+# @ECLASS_VARIABLE: GCC_TESTS_COMPARISON_DIR
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Source of previous GCC test results and location to store new results.
+: "${GCC_TESTS_COMPARISON_DIR:=${BROOT}/var/cache/gcc/testresults/${CHOST}}"
+
+# @ECLASS_VARIABLE: GCC_TESTS_COMPARISON_SLOT
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Slot to compare test results with. Defaults to current slot.
+: "${GCC_TESTS_COMPARISON_SLOT:=${SLOT}}"
+
+# @ECLASS_VARIABLE: GCC_TESTS_IGNORE_NO_BASELINE
+# @DEFAULT_UNSET
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Ignore missing baseline/reference data and create new baseline.
+: "${GCC_TESTS_IGNORE_NO_BASELINE:=}"
+
+# @ECLASS_VARIABLE: GCC_TESTS_REGEN_BASELINE
+# @DEFAULT_UNSET
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Ignore baseline/reference data and create new baseline.
+: "${GCC_TESTS_REGEN_BASELINE:=}"
+
+# @ECLASS_VARIABLE: GCC_TESTS_CHECK_TARGET
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Defaults to 'check'. Allows choosing a different test target, e.g.
+# 'test-gcc' (https://gcc.gnu.org/install/test.html).
+: "${GCC_TESTS_CHECK_TARGET:=check}"
+
+# @ECLASS_VARIABLE: GCC_TESTS_RUNTESTFLAGS
+# @DEFAULT_UNSET
+# @USER_VARIABLE
+# @DESCRIPTION:
+# Extra options to pass to DejaGnu as RUNTESTFLAGS.
+: "${GCC_TESTS_RUNTESTFLAGS:=}"
+
+# @ECLASS_VARIABLE: TOOLCHAIN_PATCH_DEV
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Indicate the developer who hosts the patchset for an ebuild.
+
# @ECLASS_VARIABLE: GCC_PV
# @INTERNAL
# @DESCRIPTION:
@@ -121,6 +185,37 @@ GCCMINOR=$(ver_cut 2 ${GCC_PV})
# @DESCRIPTION:
# GCC micro version.
GCCMICRO=$(ver_cut 3 ${GCC_PV})
+# @ECLASS_VARIABLE: GCC_RUN_FIXINCLUDES
+# @INTERNAL
+# @DESCRIPTION:
+# Controls whether fixincludes should be used.
+GCC_RUN_FIXINCLUDES=0
+
+tc_use_major_version_only() {
+ local use_major_version_only=0
+
+ if ! tc_version_is_at_least 10 ; then
+ return 1
+ fi
+
+ if [[ ${GCCMAJOR} -eq 10 ]] && ver_test ${PV} -ge 10.4.1_p20220929 ; then
+ use_major_version_only=1
+ elif [[ ${GCCMAJOR} -eq 11 ]] && ver_test ${PV} -ge 11.3.1_p20220930 ; then
+ use_major_version_only=1
+ elif [[ ${GCCMAJOR} -eq 12 ]] && ver_test ${PV} -ge 12.2.1_p20221001 ; then
+ use_major_version_only=1
+ elif [[ ${GCCMAJOR} -eq 13 ]] && ver_test ${PV} -ge 13.0.0_pre20221002 ; then
+ use_major_version_only=1
+ elif [[ ${GCCMAJOR} -gt 13 ]] ; then
+ use_major_version_only=1
+ fi
+
+ if [[ ${use_major_version_only} -eq 1 ]] ; then
+ return 0
+ fi
+
+ return 1
+}
# @ECLASS_VARIABLE: GCC_CONFIG_VER
# @INTERNAL
@@ -129,7 +224,11 @@ GCCMICRO=$(ver_cut 3 ${GCC_PV})
# of binary and gcc-config names not directly tied to upstream
# versioning. In practice it's hard to untangle from gcc/BASE-VER
# (GCC_RELEASE_VER) value.
-GCC_CONFIG_VER=${GCC_RELEASE_VER}
+if tc_use_major_version_only ; then
+ GCC_CONFIG_VER=${GCCMAJOR}
+else
+ GCC_CONFIG_VER=${GCC_RELEASE_VER}
+fi
# Pre-release support. Versioning schema:
# 1.0.0_pre9999: live ebuild
@@ -149,7 +248,7 @@ fi
# Require minimum gcc version to simplify assumptions.
# Normally we would require gcc-6+ (based on sys-devel/gcc)
# but we still have sys-devel/gcc-apple-4.2.1_p5666.
-tc_version_is_at_least 4.2.1 || die "${ECLASS}: ${GCC_RELEASE_VER} is too old."
+tc_version_is_at_least 8 || die "${ECLASS}: ${GCC_RELEASE_VER} is too old."
PREFIX=${TOOLCHAIN_PREFIX:-${EPREFIX}/usr}
@@ -181,51 +280,47 @@ tc_has_feature() {
has "$1" "${TC_FEATURES[@]}"
}
-if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then
- IUSE+=" debug +cxx +nptl" TC_FEATURES+=( nptl )
- [[ -n ${PIE_VER} ]] && IUSE+=" nopie"
- [[ -n ${SPECS_VER} ]] && IUSE+=" nossp"
+if [[ ${PN} != kgcc64 && ${PN} != gcc-* ]] ; then
+ IUSE+=" debug +cxx"
IUSE+=" +fortran" TC_FEATURES+=( fortran )
IUSE+=" doc hardened multilib objc"
- tc_version_is_between 3 7 && IUSE+=" awt gcj" TC_FEATURES+=( gcj )
IUSE+=" pgo"
IUSE+=" objc-gc" TC_FEATURES+=( objc-gc )
IUSE+=" libssp objc++"
- IUSE+=" +openmp"
-
- tc_version_is_at_least 4.3 && IUSE+=" fixed-point"
- tc_version_is_at_least 4.7 && IUSE+=" go"
-
- # sanitizer support appeared in gcc-4.8, but <gcc-5 does not
- # support modern glibc.
- tc_version_is_at_least 5 && IUSE+=" +sanitize" TC_FEATURES+=( sanitize )
-
- # Note:
- # <gcc-4.8 supported graphite, it required forked ppl
- # versions which we dropped. Since graphite was also experimental in
- # the older versions, we don't want to bother supporting it. #448024
- # <gcc-5 supported graphite, it required cloog
- # <gcc-6.5 supported graphite, it required old incompatible isl
- tc_version_is_at_least 6.5 &&
- IUSE+=" graphite" TC_FEATURES+=( graphite )
-
- tc_version_is_between 4.9 8 && IUSE+=" cilk"
- tc_version_is_at_least 4.9 && IUSE+=" ada"
- tc_version_is_at_least 4.9 && IUSE+=" vtv"
- tc_version_is_at_least 5.0 && IUSE+=" jit"
- tc_version_is_between 5.0 9 && IUSE+=" mpx"
- tc_version_is_at_least 6.0 && IUSE+=" +pie +ssp +pch"
-
- # systemtap is a gentoo-specific switch: bug #654748
- tc_version_is_at_least 8.0 &&
- IUSE+=" systemtap" TC_FEATURES+=( systemtap )
-
- tc_version_is_at_least 9.0 && IUSE+=" d"
+
+ # Stop forcing openmp on by default in the eclass. Gradually phase it out.
+ # See bug #890999.
+ if tc_version_is_at_least 13.0.0_pre20221218 ; then
+ IUSE+=" openmp"
+ else
+ IUSE+=" +openmp"
+ fi
+
+ IUSE+=" fixed-point"
+ IUSE+=" go"
+ IUSE+=" +sanitize" TC_FEATURES+=( sanitize )
+ IUSE+=" graphite" TC_FEATURES+=( graphite )
+ IUSE+=" ada"
+ IUSE+=" vtv"
+ IUSE+=" jit"
+ IUSE+=" +pie +ssp pch"
+
+ IUSE+=" systemtap" TC_FEATURES+=( systemtap )
+
+ tc_version_is_at_least 9.0 && IUSE+=" d" TC_FEATURES+=( d )
tc_version_is_at_least 9.1 && IUSE+=" lto"
tc_version_is_at_least 10 && IUSE+=" cet"
tc_version_is_at_least 10 && IUSE+=" zstd" TC_FEATURES+=( zstd )
tc_version_is_at_least 11 && IUSE+=" valgrind" TC_FEATURES+=( valgrind )
tc_version_is_at_least 11 && IUSE+=" custom-cflags"
+ tc_version_is_at_least 12 && IUSE+=" ieee-long-double"
+ tc_version_is_at_least 12.2.1_p20221203 ${PV} && IUSE+=" default-znow"
+ tc_version_is_at_least 12.2.1_p20221203 ${PV} && IUSE+=" default-stack-clash-protection"
+ tc_version_is_at_least 13.0.0_pre20221218 ${PV} && IUSE+=" modula2"
+ # See https://gcc.gnu.org/pipermail/gcc-patches/2023-April/615944.html
+ # and https://rust-gcc.github.io/2023/04/24/gccrs-and-gcc13-release.html for why
+ # it was disabled in 13.
+ tc_version_is_at_least 14.0.0_pre20230423 ${PV} && IUSE+=" rust"
fi
if tc_version_is_at_least 10; then
@@ -238,24 +333,18 @@ fi
#---->> DEPEND <<----
-RDEPEND="sys-libs/zlib
+RDEPEND="
+ sys-libs/zlib
virtual/libiconv
nls? ( virtual/libintl )
"
GMP_MPFR_DEPS=">=dev-libs/gmp-4.3.2:0= >=dev-libs/mpfr-2.4.2:0="
-if tc_version_is_at_least 4.3 ; then
- RDEPEND+=" ${GMP_MPFR_DEPS}"
-elif tc_has_feature fortran ; then
- RDEPEND+=" fortran? ( ${GMP_MPFR_DEPS} )"
-fi
-
-tc_version_is_at_least 4.5 && RDEPEND+=" >=dev-libs/mpc-0.8.1:0="
+RDEPEND+=" ${GMP_MPFR_DEPS}"
+RDEPEND+=" >=dev-libs/mpc-0.8.1:0="
if tc_has_feature objc-gc ; then
- if tc_version_is_at_least 7 ; then
- RDEPEND+=" objc-gc? ( >=dev-libs/boehm-gc-7.4.2 )"
- fi
+ RDEPEND+=" objc-gc? ( >=dev-libs/boehm-gc-7.4.2 )"
fi
if tc_has_feature graphite ; then
@@ -263,32 +352,22 @@ if tc_has_feature graphite ; then
fi
BDEPEND="
- >=sys-devel/bison-1.875
+ app-alternatives/yacc
>=sys-devel/flex-2.5.4
nls? ( sys-devel/gettext )
test? (
+ ${PYTHON_DEPS}
>=dev-util/dejagnu-1.4.4
>=sys-devel/autogen-5.5.4
- )"
+ )
+"
DEPEND="${RDEPEND}"
-if tc_has_feature gcj ; then
- DEPEND+="
- gcj? (
- awt? (
- x11-base/xorg-proto
- x11-libs/libXt
- x11-libs/libX11
- x11-libs/libXtst
- =x11-libs/gtk+-2*
- x11-libs/pango
- virtual/pkgconfig
- )
- >=media-libs/libart_lgpl-2.1
- app-arch/zip
- app-arch/unzip
- )
- "
+if [[ ${PN} == gcc && ${PV} == *_p* ]] ; then
+ # Snapshots don't contain info pages.
+ # If they start to, adjust gcc_cv_prog_makeinfo_modern logic in toolchain_src_configure.
+ # Needed unless/until https://gcc.gnu.org/PR106899 is fixed
+ BDEPEND+=" sys-apps/texinfo"
fi
if tc_has_feature sanitize ; then
@@ -300,7 +379,7 @@ fi
if tc_has_feature systemtap ; then
# gcc needs sys/sdt.h headers on target
- DEPEND+=" systemtap? ( dev-util/systemtap )"
+ DEPEND+=" systemtap? ( dev-debug/systemtap )"
fi
if tc_has_feature zstd ; then
@@ -308,35 +387,93 @@ if tc_has_feature zstd ; then
RDEPEND+=" zstd? ( app-arch/zstd:= )"
fi
-if tc_has_feature valgrind; then
- BDEPEND+=" valgrind? ( dev-util/valgrind )"
+if tc_has_feature valgrind ; then
+ BDEPEND+=" valgrind? ( dev-debug/valgrind )"
+fi
+
+# TODO: Add a pkg_setup & pkg_pretend check for whether the active compiler
+# supports Ada.
+if tc_has_feature ada ; then
+ BDEPEND+=" ada? ( || ( sys-devel/gcc[ada] dev-lang/gnat-gpl[ada] ) )"
+fi
+
+# TODO: Add a pkg_setup & pkg_pretend check for whether the active compiler
+# supports D.
+if tc_has_feature d && tc_version_is_at_least 12.0 ; then
+ # D in 12+ is self-hosting and needs D to bootstrap.
+ # TODO: package some binary we can use, like for Ada
+ # bug #840182
+ BDEPEND+=" d? ( || ( sys-devel/gcc[d(-)] <sys-devel/gcc-12[d(-)] ) )"
+fi
+
+if tc_has_feature rust && tc_version_is_at_least 14.0.0_pre20230421 ; then
+ # This was added upstream in r14-9968-g3e1e73fc995844 as a temporary measure.
+ # See https://inbox.sourceware.org/gcc/34fec7ea-8762-4cac-a1c8-ff54e20e31ed@embecosm.com/
+ BDEPEND+=" rust? ( virtual/rust )"
fi
-PDEPEND=">=sys-devel/gcc-config-2.3"
+PDEPEND=">=sys-devel/gcc-config-2.11"
#---->> S + SRC_URI essentials <<----
+# @ECLASS_VARIABLE: TOOLCHAIN_PATCH_SUFFIX
+# @DESCRIPTION:
+# Used to override compression used for for patchsets.
+# Default is xz for EAPI 8+ and bz2 for older EAPIs.
+if [[ ${EAPI} == 8 ]] ; then
+ : "${TOOLCHAIN_PATCH_SUFFIX:=xz}"
+else
+ # Older EAPIs
+ : "${TOOLCHAIN_PATCH_SUFFIX:=bz2}"
+fi
+
# @ECLASS_VARIABLE: TOOLCHAIN_SET_S
# @DESCRIPTION:
# Used to override value of S for snapshots and such. Mainly useful
# if needing to set GCC_TARBALL_SRC_URI.
-: ${TOOLCHAIN_SET_S:=yes}
+: "${TOOLCHAIN_SET_S:=yes}"
# Set the source directory depending on whether we're using
# a live git tree, snapshot, or release tarball.
if [[ ${TOOLCHAIN_SET_S} == yes ]] ; then
- S=$(
- if tc_is_live ; then
- echo ${EGIT_CHECKOUT_DIR}
- elif [[ -n ${SNAPSHOT} ]] ; then
- echo ${WORKDIR}/gcc-${SNAPSHOT}
- else
- echo ${WORKDIR}/gcc-${GCC_RELEASE_VER}
- fi
- )
+ if tc_is_live ; then
+ S=${EGIT_CHECKOUT_DIR}
+ elif [[ -n ${SNAPSHOT} ]] ; then
+ S=${WORKDIR}/gcc-${SNAPSHOT}
+ else
+ S=${WORKDIR}/gcc-${GCC_RELEASE_VER}
+ fi
fi
gentoo_urls() {
+ # the list is sorted by likelihood of getting the patches tarball from
+ # respective devspace
+ # slyfox's distfiles are mirrored to sam's devspace
+ declare -A devspace_urls=(
+ [soap]=HTTP~soap/distfiles/URI
+ [sam]=HTTP~sam/distfiles/sys-devel/gcc/URI
+ [slyfox]=HTTP~sam/distfiles/URI
+ [xen0n]=HTTP~xen0n/distfiles/sys-devel/gcc/URI
+ [tamiko]=HTTP~tamiko/distfiles/URI
+ [zorry]=HTTP~zorry/patches/gcc/URI
+ [vapier]=HTTP~vapier/dist/URI
+ [blueness]=HTTP~blueness/dist/URI
+ )
+
+ # Newer ebuilds should set TOOLCHAIN_PATCH_DEV and we'll just
+ # return the full URL from the array.
+ if [[ -n ${TOOLCHAIN_PATCH_DEV} ]] ; then
+ local devspace_url=${devspace_urls[${TOOLCHAIN_PATCH_DEV}]}
+ if [[ -n ${devspace_url} ]] ; then
+ local devspace_url_exp=${devspace_url//HTTP/https:\/\/dev.gentoo.org\/}
+ devspace_url_exp=${devspace_url_exp//URI/$1}
+ echo ${devspace_url_exp}
+ return
+ fi
+ fi
+
+ # But we keep the old fallback list for compatibility with
+ # older ebuilds (overlays etc).
local devspace="
HTTP~soap/distfiles/URI
HTTP~sam/distfiles/URI
@@ -344,7 +481,8 @@ gentoo_urls() {
HTTP~tamiko/distfiles/URI
HTTP~zorry/patches/gcc/URI
HTTP~vapier/dist/URI
- HTTP~blueness/dist/URI"
+ HTTP~blueness/dist/URI
+ "
devspace=${devspace//HTTP/https:\/\/dev.gentoo.org\/}
echo ${devspace//URI/$1} mirror://gentoo/$1
}
@@ -373,43 +511,9 @@ gentoo_urls() {
# The resulting filename of this tarball will be:
# gcc-${PATCH_GCC_VER:-${GCC_RELEASE_VER}}-patches-${PATCH_VER}.tar.xz
#
-# PIE_VER
-# PIE_GCC_VER
-# These variables control patching in various updates for the logic
-# controlling Position Independent Executables. PIE_VER is expected
-# to be the version of this patch, and PIE_GCC_VER the gcc version of
-# the patch:
-# An example:
-# PIE_VER="8.7.6.5"
-# PIE_GCC_VER="3.4.0"
-# The resulting filename of this tarball will be:
-# gcc-${PIE_GCC_VER:-${GCC_RELEASE_VER}}-piepatches-v${PIE_VER}.tar.xz
-#
-# SPECS_VER
-# SPECS_GCC_VER
-# This is for the minispecs files included in the hardened gcc-4.x
-# The specs files for hardenedno*, vanilla and for building the "specs" file.
-# SPECS_VER is expected to be the version of this patch, SPECS_GCC_VER
-# the gcc version of the patch.
-# An example:
-# SPECS_VER="8.7.6.5"
-# SPECS_GCC_VER="3.4.0"
-# The resulting filename of this tarball will be:
-# gcc-${SPECS_GCC_VER:-${GCC_RELEASE_VER}}-specs-${SPECS_VER}.tar.xz
-#
-# CYGWINPORTS_GITREV
-# If set, this variable signals that we should apply additional patches
-# maintained by upstream Cygwin developers at github/cygwinports/gcc,
-# using the specified git commit id there. The list of patches to
-# apply is extracted from gcc.cygport, maintained there as well.
-# This is done for compilers running on Cygwin, not for cross compilers
-# with a Cygwin target.
get_gcc_src_uri() {
export PATCH_GCC_VER=${PATCH_GCC_VER:-${GCC_RELEASE_VER}}
export MUSL_GCC_VER=${MUSL_GCC_VER:-${PATCH_GCC_VER}}
- export PIE_GCC_VER=${PIE_GCC_VER:-${GCC_RELEASE_VER}}
- export HTB_GCC_VER=${HTB_GCC_VER:-${GCC_RELEASE_VER}}
- export SPECS_GCC_VER=${SPECS_GCC_VER:-${GCC_RELEASE_VER}}
# Set where to download gcc itself depending on whether we're using a
# live git tree, snapshot, or release tarball.
@@ -419,48 +523,20 @@ get_gcc_src_uri() {
# Pull gcc tarball from another location. Frequently used by gnat-gpl.
GCC_SRC_URI="${GCC_TARBALL_SRC_URI}"
elif [[ -n ${SNAPSHOT} ]] ; then
- GCC_SRC_URI="https://gcc.gnu.org/pub/gcc/snapshots/${SNAPSHOT}/gcc-${SNAPSHOT}.tar.xz"
+ GCC_SRC_URI="mirror://gcc/snapshots/${SNAPSHOT}/gcc-${SNAPSHOT}.tar.xz"
else
- if tc_version_is_between 5.5 6 || tc_version_is_between 6.4 7 || tc_version_is_at_least 7.2 ; then
- GCC_SRC_URI="mirror://gnu/gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.xz"
- else
- GCC_SRC_URI="mirror://gnu/gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.bz2"
- fi
- fi
-
- local PATCH_SUFFIX="xz"
- if ! tc_version_is_at_least 9.4.1_p20220317 || tc_version_is_between 9 9.5 \
- || tc_version_is_between 10 10.4 || tc_version_is_between 11 11.4 \
- || tc_version_is_between 12 12.0.1_pre20220424 ; then
- # These are versions before we started to use .xz
- PATCH_SUFFIX="bz2"
+ GCC_SRC_URI="
+ mirror://gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.xz
+ mirror://gnu/gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.xz
+ "
fi
[[ -n ${PATCH_VER} ]] && \
- GCC_SRC_URI+=" $(gentoo_urls gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.${PATCH_SUFFIX})"
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.${TOOLCHAIN_PATCH_SUFFIX})"
[[ -n ${MUSL_VER} ]] && \
- GCC_SRC_URI+=" $(gentoo_urls gcc-${MUSL_GCC_VER}-musl-patches-${MUSL_VER}.tar.${PATCH_SUFFIX})"
-
- [[ -n ${PIE_VER} ]] && \
- PIE_CORE=${PIE_CORE:-gcc-${PIE_GCC_VER}-piepatches-v${PIE_VER}.tar.${PATCH_SUFFIX}} && \
- GCC_SRC_URI+=" $(gentoo_urls ${PIE_CORE})"
-
- # gcc minispec for the hardened gcc 4 compiler
- [[ -n ${SPECS_VER} ]] && \
- GCC_SRC_URI+=" $(gentoo_urls gcc-${SPECS_GCC_VER}-specs-${SPECS_VER}.tar.${PATCH_SUFFIX})"
-
- if tc_has_feature gcj ; then
- if tc_version_is_at_least 4.5 ; then
- GCC_SRC_URI+=" gcj? ( ftp://sourceware.org/pub/java/ecj-4.5.jar )"
- elif tc_version_is_at_least 4.3 ; then
- GCC_SRC_URI+=" gcj? ( ftp://sourceware.org/pub/java/ecj-4.3.jar )"
- fi
- fi
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${MUSL_GCC_VER}-musl-patches-${MUSL_VER}.tar.${TOOLCHAIN_PATCH_SUFFIX})"
- # Cygwin patches from https://github.com/cygwinports/gcc
- [[ -n ${CYGWINPORTS_GITREV} ]] && \
- GCC_SRC_URI+=" elibc_Cygwin? ( https://github.com/cygwinports/gcc/archive/${CYGWINPORTS_GITREV}.tar.gz
- -> gcc-cygwinports-${CYGWINPORTS_GITREV}.tar.gz )"
+ GCC_SRC_URI+=" test? ( https://gitweb.gentoo.org/proj/gcc-patches.git/plain/scripts/testsuite-management/validate_failures.py?id=${GCC_VALIDATE_FAILURES_VERSION} -> ${PN}-validate-failures-${GCC_VALIDATE_FAILURES_VERSION}.py )"
echo "${GCC_SRC_URI}"
}
@@ -475,11 +551,7 @@ toolchain_pkg_pretend() {
ewarn 'Go requires a C++ compiler, disabled due to USE="-cxx"'
_tc_use_if_iuse objc++ && \
ewarn 'Obj-C++ requires a C++ compiler, disabled due to USE="-cxx"'
- _tc_use_if_iuse gcj && \
- ewarn 'GCJ requires a C++ compiler, disabled due to USE="-cxx"'
fi
-
- want_minispecs
}
#---->> pkg_setup <<----
@@ -490,16 +562,56 @@ toolchain_pkg_setup() {
# bug #265283
unset LANGUAGES
+
+ # See https://www.gnu.org/software/make/manual/html_node/Parallel-Output.html
+ # Avoid really confusing logs from subconfigure spam, makes logs far
+ # more legible.
+ MAKEOPTS="--output-sync=line ${MAKEOPTS}"
+
+ use test && python-any-r1_pkg_setup
}
#---->> src_unpack <<----
+# @FUNCTION: toolchain_fetch_git_patches
+# @INTERNAL
+# @DESCRIPTION:
+# Fetch patches from Gentoo's gcc-patches repository.
+toolchain_fetch_git_patches() {
+ local gcc_patches_repo="https://anongit.gentoo.org/git/proj/gcc-patches.git https://github.com/gentoo/gcc-patches"
+
+ # If we weren't given a patchset number, pull it from git too.
+ einfo "Fetching patchset from git as PATCH_VER is unset"
+ EGIT_REPO_URI=${gcc_patches_repo} EGIT_BRANCH="master" \
+ EGIT_CHECKOUT_DIR="${WORKDIR}"/patch.tmp \
+ git-r3_src_unpack
+
+ mkdir "${WORKDIR}"/patch || die
+ mv "${WORKDIR}"/patch.tmp/${PATCH_GCC_VER}/gentoo/* "${WORKDIR}"/patch || die
+
+ if [[ -n ${MUSL_VER} || -d "${WORKDIR}"/musl ]] && [[ ${CTARGET} == *musl* ]] ; then
+ mkdir "${WORKDIR}"/musl || die
+ mv "${WORKDIR}"/patch.tmp/${PATCH_GCC_VER}/musl/* "${WORKDIR}"/musl || die
+ fi
+}
+
toolchain_src_unpack() {
if tc_is_live ; then
git-r3_src_unpack
+
+ # Needed for gcc --version to include the upstream commit used
+ # rather than only the commit after we apply our patches.
+ # It includes both with this.
+ echo "${EGIT_VERSION}" > "${S}"/gcc/REVISION || die
+
+ if [[ -z ${PATCH_VER} ]] && ! use vanilla ; then
+ toolchain_fetch_git_patches
+ fi
+ elif [[ -z ${PATCH_VER} && -n ${TOOLCHAIN_USE_GIT_PATCHES} ]] ; then
+ toolchain_fetch_git_patches
fi
- default_src_unpack
+ default
}
#---->> src_prepare <<----
@@ -509,8 +621,6 @@ toolchain_src_prepare() {
cd "${S}" || die
do_gcc_gentoo_patches
- do_gcc_PIE_patches
- do_gcc_CYGWINPORTS_patches
if tc_is_live ; then
BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, commit ${EGIT_VERSION}"
@@ -518,9 +628,13 @@ toolchain_src_prepare() {
eapply_user
- if ( tc_version_is_at_least 4.8.2 || _tc_use_if_iuse hardened ) \
- && ! use vanilla ; then
- make_gcc_hard
+ if ! use vanilla ; then
+ tc_enable_hardened_gcc
+ fi
+
+ if use test ; then
+ cp "${DISTDIR}"/${PN}-validate-failures-${GCC_VALIDATE_FAILURES_VERSION}.py "${T}"/validate_failures.py || die
+ chmod +x "${T}"/validate_failures.py || die
fi
# Make sure the pkg-config files install into multilib dirs.
@@ -530,71 +644,47 @@ toolchain_src_prepare() {
-exec sed -i '/^pkgconfigdir/s:=.*:=$(toolexeclibdir)/pkgconfig:' {} + || die
setup_multilib_osdirnames
- gcc_version_patch
local actual_version=$(< "${S}"/gcc/BASE-VER)
- if [[ "${GCC_RELEASE_VER}" != "${actual_version}" ]] ; then
+ if ! tc_is_live && [[ "${GCC_RELEASE_VER}" != "${actual_version}" ]] ; then
eerror "'${S}/gcc/BASE-VER' contains '${actual_version}', expected '${GCC_RELEASE_VER}'"
die "Please set 'TOOLCHAIN_GCC_PV' to '${actual_version}'"
fi
- # >= gcc-4.3 doesn't bundle ecj.jar, so copy it
- if tc_version_is_at_least 4.3 && _tc_use_if_iuse gcj ; then
- if tc_version_is_at_least 4.5 ; then
- einfo "Copying ecj-4.5.jar"
- cp -pPR "${DISTDIR}/ecj-4.5.jar" "${S}/ecj.jar" || die
- else
- einfo "Copying ecj-4.3.jar"
- cp -pPR "${DISTDIR}/ecj-4.3.jar" "${S}/ecj.jar" || die
- fi
- fi
-
- # Prevent libffi from being installed
- if tc_version_is_between 3.0 4.8 ; then
- sed -i -e 's/\(install.*:\) install-.*recursive/\1/' "${S}"/libffi/Makefile.in || die
- sed -i -e 's/\(install-data-am:\).*/\1/' "${S}"/libffi/include/Makefile.in || die
- fi
-
# Fixup libtool to correctly generate .la files with portage
elibtoolize --portage --shallow --no-uclibc
gnuconfig_update
- # Update configure files
- local f
- einfo "Fixing misc issues in configure files"
- for f in $(grep -l 'autoconf version 2.13' $(find "${S}" -name configure)) ; do
- ebegin " Updating ${f/${S}\/} [LANG]"
- patch "${f}" "${FILESDIR}"/gcc-configure-LANG.patch >& "${T}"/configure-patch.log \
- || eerror "Please file a bug about this"
- eend $?
- done
- # bug #215828
- sed -i 's|A-Za-z0-9|[:alnum:]|g' "${S}"/gcc/*.awk || die
-
- # Prevent new texinfo from breaking old versions (see #198182, bug #464008)
- einfo "Remove texinfo (bug #198182, bug #464008)"
- eapply "${FILESDIR}"/gcc-configure-texinfo.patch
-
- # >=gcc-4
- if [[ -x contrib/gcc_update ]] ; then
- einfo "Touching generated files"
- ./contrib/gcc_update --touch | \
- while read f ; do
- einfo " ${f%%...}"
- done
+ if ! use prefix-guest && [[ -n ${EPREFIX} ]] ; then
+ einfo "Prefixifying dynamic linkers..."
+ for f in gcc/config/*/*linux*.h ; do
+ ebegin " Updating ${f}"
+ if [[ ${f} == gcc/config/rs6000/linux*.h ]]; then
+ sed -i -r "s,(DYNAMIC_LINKER_PREFIX\s+)\"\",\1\"${EPREFIX}\",g" "${f}" || die
+ else
+ sed -i -r "/_DYNAMIC_LINKER/s,([\":])(/lib),\1${EPREFIX}\2,g" "${f}" || die
+ fi
+ eend $?
+ done
fi
+
+ einfo "Touching generated files"
+ ./contrib/gcc_update --touch | \
+ while read f ; do
+ einfo " ${f%%...}"
+ done
}
do_gcc_gentoo_patches() {
if ! use vanilla ; then
- if [[ -n ${PATCH_VER} ]] ; then
+ if [[ -n ${PATCH_VER} || -d "${WORKDIR}"/patch ]] ; then
einfo "Applying Gentoo patches ..."
eapply "${WORKDIR}"/patch/*.patch
BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION} p${PATCH_VER}"
fi
- if [[ -n ${MUSL_VER} ]] && [[ ${CTARGET} == *musl* ]] ; then
+ if [[ -n ${MUSL_VER} || -d "${WORKDIR}"/musl ]] && [[ ${CTARGET} == *musl* ]] ; then
if [[ ${CATEGORY} == cross-* ]] ; then
# We don't want to apply some patches when cross-compiling.
if [[ -d "${WORKDIR}"/musl/nocross ]] ; then
@@ -614,93 +704,48 @@ do_gcc_gentoo_patches() {
fi
}
-do_gcc_PIE_patches() {
- want_pie || return 0
- use vanilla && return 0
+# configure to build with the hardened GCC specs as the default
+tc_enable_hardened_gcc() {
+ local hardened_gcc_flags=""
- einfo "Applying PIE patches ..."
- eapply "${WORKDIR}"/piepatch/*.patch
+ if _tc_use_if_iuse pie ; then
+ einfo "Updating gcc to use automatic PIE building ..."
+ fi
- BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, pie-${PIE_VER}"
-}
+ if _tc_use_if_iuse ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ fi
-do_gcc_CYGWINPORTS_patches() {
- [[ -n ${CYGWINPORTS_GITREV} ]] || return 0
- use elibc_Cygwin || return 0
-
- local p d="${WORKDIR}/gcc-${CYGWINPORTS_GITREV}"
- # readarray -t is available since bash-4.4 only, bug #690686
- local patches=( $(
- for p in $(
- sed -e '1,/PATCH_URI="/d;/"/,$d' < "${d}"/gcc.cygport
- ); do
- echo "${d}/${p}"
- done
- ) )
- einfo "Applying cygwin port patches ..."
- eapply -- "${patches[@]}"
-}
+ if _tc_use_if_iuse default-stack-clash-protection ; then
+ # The define DEF_GENTOO_SCP is checked in 24_all_DEF_GENTOO_SCP-fstack-clash-protection.patch
+ einfo "Updating gcc to use automatic stack clash protection ..."
+ hardened_gcc_flags+=" -DDEF_GENTOO_SCP"
+ fi
-# configure to build with the hardened GCC specs as the default
-make_gcc_hard() {
- local gcc_hard_flags=""
+ if _tc_use_if_iuse default-znow ; then
+ # The define DEF_GENTOO_ZNOW is checked in 23_all_DEF_GENTOO_ZNOW-z-now.patch
+ einfo "Updating gcc to request symbol resolution at start (-z now) ..."
+ hardened_gcc_flags+=" -DDEF_GENTOO_ZNOW"
+ fi
- # If we use gcc-6 or newer with PIE enabled to compile older gcc,
- # we need to pass -no-pie to stage1; bug #618908
- if ! tc_version_is_at_least 6.0 && [[ $(gcc-major-version) -ge 6 ]] ; then
- einfo "Disabling PIE in stage1 (only) ..."
- sed -i -e "/^STAGE1_LDFLAGS/ s/$/ -no-pie/" "${S}"/Makefile.in || die
+ if _tc_use_if_iuse cet && [[ ${CTARGET} == *x86_64*-linux-gnu* ]] ; then
+ einfo "Updating gcc to use x86-64 control flow protection by default ..."
+ hardened_gcc_flags+=" -DEXTRA_OPTIONS_CF"
fi
- # For gcc >= 6.x, we can use configuration options to turn PIE/SSP
- # on as default
- if tc_version_is_at_least 6.0 ; then
- if _tc_use_if_iuse pie ; then
- einfo "Updating gcc to use automatic PIE building ..."
- fi
- if _tc_use_if_iuse ssp ; then
- einfo "Updating gcc to use automatic SSP building ..."
- fi
- if _tc_use_if_iuse hardened ; then
- # Will add some hardened options as default, like:
- # * -fstack-clash-protection
- # * -z now
- # See gcc *_all_extra-options.patch patches.
- gcc_hard_flags+=" -DEXTRA_OPTIONS"
-
- if _tc_use_if_iuse cet && [[ ${CTARGET} == *x86_64*-linux* ]] ; then
- gcc_hard_flags+=" -DEXTRA_OPTIONS_CF"
- fi
+ if _tc_use_if_iuse hardened ; then
+ # Will add some hardened options as default, e.g. for gcc-12
+ # * -fstack-clash-protection
+ # * -z now
+ # See gcc *_all_extra-options.patch patches.
+ hardened_gcc_flags+=" -DEXTRA_OPTIONS"
+ # Default to -D_FORTIFY_SOURCE=3 instead of -D_FORTIFY_SOURCE=2
+ hardened_gcc_flags+=" -DGENTOO_FORTIFY_SOURCE_LEVEL=3"
+ # Add -D_GLIBCXX_ASSERTIONS
+ hardened_gcc_flags+=" -DDEF_GENTOO_GLIBCXX_ASSERTIONS"
- # Rebrand to make bug reports easier
- BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
- fi
- else
- if _tc_use_if_iuse hardened ; then
- # Rebrand to make bug reports easier
- BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
- if hardened_gcc_works ; then
- einfo "Updating gcc to use automatic PIE + SSP building ..."
- gcc_hard_flags+=" -DEFAULT_PIE_SSP"
- elif hardened_gcc_works pie ; then
- einfo "Updating gcc to use automatic PIE building ..."
- ewarn "SSP has not been enabled by default"
- gcc_hard_flags+=" -DEFAULT_PIE"
- elif hardened_gcc_works ssp ; then
- einfo "Updating gcc to use automatic SSP building ..."
- ewarn "PIE has not been enabled by default"
- gcc_hard_flags+=" -DEFAULT_SSP"
- else
- # Do nothing if hardened isn't supported, but don't die either
- ewarn "hardened is not supported for this arch in this gcc version"
- return 0
- fi
- else
- if hardened_gcc_works ssp ; then
- einfo "Updating gcc to use automatic SSP building ..."
- gcc_hard_flags+=" -DEFAULT_SSP"
- fi
- fi
+ # Rebrand to make bug reports easier
+ BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
fi
# We want to be able to control the PIE patch logic via something other
@@ -708,15 +753,13 @@ make_gcc_hard() {
sed -e '/^ALL_CFLAGS/iHARD_CFLAGS = ' \
-e 's|^ALL_CFLAGS = |ALL_CFLAGS = $(HARD_CFLAGS) |' \
-i "${S}"/gcc/Makefile.in || die
- # Need to add HARD_CFLAGS to ALL_CXXFLAGS on >= 4.7
- if tc_version_is_at_least 4.7 ; then
- sed -e '/^ALL_CXXFLAGS/iHARD_CFLAGS = ' \
- -e 's|^ALL_CXXFLAGS = |ALL_CXXFLAGS = $(HARD_CFLAGS) |' \
- -i "${S}"/gcc/Makefile.in || die
- fi
+
+ sed -e '/^ALL_CXXFLAGS/iHARD_CFLAGS = ' \
+ -e 's|^ALL_CXXFLAGS = |ALL_CXXFLAGS = $(HARD_CFLAGS) |' \
+ -i "${S}"/gcc/Makefile.in || die
sed -i \
- -e "/^HARD_CFLAGS = /s|=|= ${gcc_hard_flags} |" \
+ -e "/^HARD_CFLAGS = /s|=|= ${hardened_gcc_flags} |" \
"${S}"/gcc/Makefile.in || die
}
@@ -728,7 +771,7 @@ make_gcc_hard() {
# Most other distros use the logic (including mainline gcc):
# lib - 32bit binaries (x86)
# lib64 - 64bit binaries (x86_64)
-# Over time, Gentoo is migrating to the latter form.
+# Over time, Gentoo is migrating to the latter form (17.1 profiles).
#
# Unfortunately, due to distros picking the lib32 behavior, newer gcc
# versions will dynamically detect whether to use lib or lib32 for its
@@ -752,16 +795,10 @@ setup_multilib_osdirnames() {
config+="/t-linux64"
local sed_args=()
- if tc_version_is_at_least 4.6 ; then
- sed_args+=( -e 's:$[(]call if_multiarch[^)]*[)]::g' )
- fi
+ sed_args+=( -e 's:$[(]call if_multiarch[^)]*[)]::g' )
if [[ ${SYMLINK_LIB} == "yes" ]] ; then
einfo "Updating multilib directories to be: ${libdirs}"
- if tc_version_is_at_least 4.6.4 || tc_version_is_at_least 4.7 ; then
- sed_args+=( -e '/^MULTILIB_OSDIRNAMES.*lib32/s:[$][(]if.*):../lib32:' )
- else
- sed_args+=( -e "/^MULTILIB_OSDIRNAMES/s:=.*:= ${libdirs}:" )
- fi
+ sed_args+=( -e '/^MULTILIB_OSDIRNAMES.*lib32/s:[$][(]if.*):../lib32:' )
else
einfo "Using upstream multilib; disabling lib32 autodetection"
sed_args+=( -r -e 's:[$][(]if.*,(.*)[)]:\1:' )
@@ -769,30 +806,21 @@ setup_multilib_osdirnames() {
sed -i "${sed_args[@]}" "${S}"/gcc/config/${config} || die
}
-gcc_version_patch() {
- # gcc-4.3+ has configure flags (whoo!)
- tc_version_is_at_least 4.3 && return 0
-
- local version_string=${GCC_RELEASE_VER}
-
- einfo "Patching gcc version: ${version_string} (${BRANDING_GCC_PKGVERSION})"
-
- local gcc_sed=( -e 's:gcc\.gnu\.org/bugs\.html:bugs\.gentoo\.org/:' )
- if grep -qs VERSUFFIX "${S}"/gcc/version.c ; then
- gcc_sed+=( -e "/VERSUFFIX \"\"/s:\"\":\" (${BRANDING_GCC_PKGVERSION})\":" )
- else
- version_string="${version_string} (${BRANDING_GCC_PKGVERSION})"
- gcc_sed+=( -e "/const char version_string\[\] = /s:= \".*\":= \"${version_string}\":" )
- fi
- sed -i "${gcc_sed[@]}" "${S}"/gcc/version.c || die
-}
-
#---->> src_configure <<----
toolchain_src_configure() {
+ BUILD_CONFIG_TARGETS=()
+ is-flagq '-O3' && BUILD_CONFIG_TARGETS+=( bootstrap-O3 )
+
downgrade_arch_flags
gcc_do_filter_flags
+ if ! tc_version_is_at_least 11 && [[ $(gcc-major-version) -ge 12 ]] ; then
+ # https://gcc.gnu.org/PR105695
+ # bug #849359
+ export ac_cv_std_swap_in_utility=no
+ fi
+
einfo "CFLAGS=\"${CFLAGS}\""
einfo "CXXFLAGS=\"${CXXFLAGS}\""
einfo "LDFLAGS=\"${LDFLAGS}\""
@@ -801,12 +829,6 @@ toolchain_src_configure() {
# issues with 3rd party jar implementations. bug #384291
export JAR=no
- # For hardened gcc 4.3: add the pie patchset to build the hardened specs
- # file (build.specs) to use when building gcc.
- if ! tc_version_is_at_least 4.4 && want_minispecs ; then
- setup_minispecs_gcc_build_specs
- fi
-
local confgcc=( --host=${CHOST} )
if is_crosscompile || tc-is-cross-compiler ; then
@@ -826,8 +848,19 @@ toolchain_src_configure() {
--mandir="${DATAPATH}/man"
--infodir="${DATAPATH}/info"
--with-gxx-include-dir="${STDCXX_INCDIR}"
+
+ # portage's econf() does not detect presence of --d-s-r
+ # because it greps only top-level ./configure. But not
+ # libiberty's or gcc's configure.
+ --disable-silent-rules
)
+ if tc_version_is_at_least 10 ; then
+ confgcc+=(
+ --disable-dependency-tracking
+ )
+ fi
+
# Stick the python scripts in their own slotted directory (bug #279252)
#
# --with-python-dir=DIR
@@ -837,18 +870,14 @@ toolchain_src_configure() {
# then --with-python-dir=/lib/python2.5/site-packages should be passed.
#
# This should translate into "/share/gcc-data/${CTARGET}/${GCC_CONFIG_VER}/python"
- if tc_version_is_at_least 4.4 ; then
- confgcc+=( --with-python-dir=${DATAPATH/$PREFIX/}/python )
- fi
+ confgcc+=( --with-python-dir=${DATAPATH/$PREFIX/}/python )
### language options
local GCC_LANG="c"
is_cxx && GCC_LANG+=",c++"
is_d && GCC_LANG+=",d"
- is_gcj && GCC_LANG+=",java"
is_go && GCC_LANG+=",go"
- is_jit && GCC_LANG+=",jit"
if is_objc || is_objcxx ; then
GCC_LANG+=",objc"
use objc-gc && confgcc+=( --enable-objc-gc )
@@ -861,8 +890,9 @@ toolchain_src_configure() {
is_fortran && GCC_LANG+=",fortran"
is_f77 && GCC_LANG+=",f77"
is_f95 && GCC_LANG+=",f95"
-
is_ada && GCC_LANG+=",ada"
+ is_modula2 && GCC_LANG+=",m2"
+ is_rust && GCC_LANG+=",rust"
confgcc+=( --enable-languages=${GCC_LANG} )
@@ -883,46 +913,60 @@ toolchain_src_configure() {
confgcc+=( --disable-libunwind-exceptions )
- # Use the default ("release") checking because upstream usually neglects
- # to test "disabled" so it has a history of breaking. bug #317217
if in_iuse debug ; then
- # The "release" keyword is new to 4.0. bug #551636
- local off=$(tc_version_is_at_least 4.0 && echo release || echo no)
- confgcc+=( --enable-checking="${GCC_CHECKS_LIST:-$(usex debug yes ${off})}" )
+ # Non-released versions get extra checks, follow configure.ac's default to for those
+ # unless USE=debug. Note that snapshots on stable branches don't count as "non-released"
+ # for these purposes.
+ if grep -q "experimental" gcc/DEV-PHASE ; then
+ # - USE=debug for pre-releases: yes,extra,rtl
+ # - USE=-debug for pre-releases: yes,extra (following upstream default)
+ confgcc+=( --enable-checking="${GCC_CHECKS_LIST:-$(usex debug yes,extra,rtl yes,extra)}" )
+ else
+ # - Use the default ("release") checking because upstream usually neglects
+ # to test "disabled" so it has a history of breaking. bug #317217.
+ # - The "release" keyword is new to 4.0. bug #551636.
+ # - After discussing in #gcc, we concluded that =yes,extra,rtl makes
+ # more sense when a user explicitly requests USE=debug. If rtl is too slow,
+ # we can change this to yes,extra.
+ confgcc+=( --enable-checking="${GCC_CHECKS_LIST:-$(usex debug yes,extra,rtl release)}" )
+ fi
fi
# Branding
- tc_version_is_at_least 4.3 && confgcc+=(
+ confgcc+=(
--with-bugurl=https://bugs.gentoo.org/
--with-pkgversion="${BRANDING_GCC_PKGVERSION}"
)
- # If we want hardened support with the newer PIE patchset for >=gcc 4.4
- if tc_version_is_at_least 4.4 && want_minispecs && in_iuse hardened ; then
- confgcc+=( $(use_enable hardened esp) )
+ if tc_use_major_version_only ; then
+ confgcc+=( --with-gcc-major-version-only )
fi
# Allow gcc to search for clock funcs in the main C lib.
# if it can't find them, then tough cookies -- we aren't
# going to link in -lrt to all C++ apps. bug #411681
- if tc_version_is_at_least 4.4 && is_cxx ; then
+ if is_cxx ; then
confgcc+=( --enable-libstdcxx-time )
fi
+ # This only controls whether the compiler *supports* LTO, not whether
+ # it's *built using* LTO. Hence we do it without a USE flag.
+ confgcc+=( --enable-lto )
+
# Build compiler itself using LTO
if tc_version_is_at_least 9.1 && _tc_use_if_iuse lto ; then
- confgcc+=( --with-build-config=bootstrap-lto )
+ BUILD_CONFIG_TARGETS+=( bootstrap-lto )
+ fi
+
+ if tc_version_is_at_least 12 && _tc_use_if_iuse cet && [[ ${CTARGET} == x86_64-*-gnu* ]] ; then
+ BUILD_CONFIG_TARGETS+=( bootstrap-cet )
fi
# Support to disable PCH when building libstdcxx
- if tc_version_is_at_least 6.0 && ! _tc_use_if_iuse pch ; then
+ if ! _tc_use_if_iuse pch ; then
confgcc+=( --disable-libstdcxx-pch )
fi
- # The JIT support requires this.
- # But see bug #843341.
- is_jit && confgcc+=( --enable-host-shared )
-
# build-id was disabled for file collisions: bug #526144
#
# # Turn on the -Wl,--build-id flag by default for ELF targets. bug #525942
@@ -935,12 +979,6 @@ toolchain_src_configure() {
# ;;
# esac
- # Newer gcc versions like to bootstrap themselves with C++,
- # so we need to manually disable it ourselves
- if tc_version_is_between 4.7 4.8 && ! is_cxx ; then
- confgcc+=( --disable-build-with-cxx --disable-build-poststage1-with-cxx )
- fi
-
### Cross-compiler options
if is_crosscompile ; then
# Enable build warnings by default with cross-compilers when system
@@ -974,23 +1012,24 @@ toolchain_src_configure() {
*-musl*)
needed_libc=musl
;;
- *-cygwin)
- needed_libc=cygwin
- ;;
x86_64-*-mingw*|*-w64-mingw*)
needed_libc=mingw64-runtime
;;
avr)
confgcc+=( --enable-shared --disable-threads )
;;
+ nvptx*)
+ # "LTO is not supported for this target"
+ confgcc+=( --disable-lto )
+ ;;
esac
if [[ -n ${needed_libc} ]] ; then
local confgcc_no_libc=( --disable-shared )
# requires libc: bug #734820
- tc_version_is_at_least 4.6 && confgcc_no_libc+=( --disable-libquadmath )
+ confgcc_no_libc+=( --disable-libquadmath )
# requires libc
- tc_version_is_at_least 4.8 && confgcc_no_libc+=( --disable-libatomic )
+ confgcc_no_libc+=( --disable-libatomic )
if ! has_version ${CATEGORY}/${needed_libc} ; then
confgcc+=(
@@ -1017,7 +1056,13 @@ toolchain_src_configure() {
fi
fi
- confgcc+=( --disable-bootstrap )
+ confgcc+=(
+ # https://gcc.gnu.org/PR100289
+ # TOOD: Find a way to disable this just for stage1 cross?
+ --disable-gcov
+
+ --disable-bootstrap
+ )
else
if tc-is-static-only ; then
confgcc+=( --disable-shared )
@@ -1032,6 +1077,21 @@ toolchain_src_configure() {
confgcc+=( --enable-threads=posix )
;;
esac
+
+ if ! use prefix-guest ; then
+ # GNU ld scripts, such as those in glibc, reference unprefixed paths
+ # as the sysroot given here is automatically prepended. For
+ # prefix-guest, we use the host's libc instead.
+ if [[ -n ${EPREFIX} ]] ; then
+ confgcc+=( --with-sysroot="${EPREFIX}" )
+ fi
+
+ # We need to build against the right headers and libraries. Again,
+ # for prefix-guest, this is the host's.
+ if [[ -n ${ESYSROOT} ]] ; then
+ confgcc+=( --with-build-sysroot="${ESYSROOT}" )
+ fi
+ fi
fi
# __cxa_atexit is "essential for fully standards-compliant handling of
@@ -1061,7 +1121,7 @@ toolchain_src_configure() {
# gcc has fixed-point arithmetic support in 4.3 for mips targets that can
# significantly increase compile time by several hours. This will allow
# users to control this feature in the event they need the support.
- tc_version_is_at_least 4.3 && in_iuse fixed-point && confgcc+=( $(use_enable fixed-point) )
+ in_iuse fixed-point && confgcc+=( $(use_enable fixed-point) )
case $(tc-is-softfloat) in
yes)
@@ -1076,14 +1136,14 @@ toolchain_src_configure() {
case ${CTARGET//_/-} in
*-hardfloat-*|*eabihf)
confgcc+=( --with-float=hard )
- ;;
+ ;;
esac
esac
local with_abi_map=()
case $(tc-arch) in
arm)
- # bug 264534, bug #414395
+ # bug #264534, bug #414395
local a arm_arch=${CTARGET%%-*}
# Remove trailing endian variations first: eb el be bl b l
for a in e{b,l} {b,l}e b l ; do
@@ -1097,19 +1157,16 @@ toolchain_src_configure() {
[[ ${arm_arch} == armv7? ]] && arm_arch=${arm_arch/7/7-}
# See if this is a valid --with-arch flag
if (srcdir=${S}/gcc target=${CTARGET} with_arch=${arm_arch};
- . "${srcdir}"/config.gcc) &>/dev/null
+ . "${srcdir}"/config.gcc) &>/dev/null
then
confgcc+=( --with-arch=${arm_arch} )
fi
- # Make default mode thumb for microcontroller classes #418209
+ # Make default mode thumb for microcontroller classes, bug #418209
[[ ${arm_arch} == *-m ]] && confgcc+=( --with-mode=thumb )
# Enable hardvfp
- if [[ $(tc-is-softfloat) == "no" ]] && \
- [[ ${CTARGET} == armv[67]* ]] && \
- tc_version_is_at_least 4.5
- then
+ if [[ $(tc-is-softfloat) == "no" ]] && [[ ${CTARGET} == armv[67]* ]] ; then
# Follow the new arm hardfp distro standard by default
confgcc+=( --with-float=hard )
case ${CTARGET} in
@@ -1117,6 +1174,15 @@ toolchain_src_configure() {
armv7*) confgcc+=( --with-fpu=vfpv3-d16 ) ;;
esac
fi
+
+ # If multilib is used, make the compiler build multilibs
+ # for A or R and M architecture profiles. Do this only
+ # when no specific arch/mode/float is specified, e.g.
+ # for target arm-none-eabi, since doing this is
+ # incompatible with --with-arch/cpu/float/fpu.
+ if is_multilib && [[ ${arm_arch} == arm ]] ; then
+ confgcc+=( --with-multilib-list=aprofile,rmprofile )
+ fi
;;
mips)
# Add --with-abi flags to set default ABI
@@ -1124,34 +1190,38 @@ toolchain_src_configure() {
;;
amd64)
- # drop the older/ABI checks once this get's merged into some
+ # drop the older/ABI checks once this gets merged into some
# version of gcc upstream
- if tc_version_is_at_least 4.8 && has x32 $(get_all_abis TARGET) ; then
+ if has x32 $(get_all_abis TARGET) ; then
confgcc+=( --with-abi=$(gcc-abi-map ${TARGET_DEFAULT_ABI}) )
fi
;;
x86)
- # Default arch for x86 is normally i386, lets give it a bump
+ # Default arch for x86 is normally i386, let's give it a bump
# since glibc will do so based on CTARGET anyways
confgcc+=( --with-arch=${CTARGET%%-*} )
;;
- hppa)
- # Enable sjlj exceptions for backward compatibility on hppa
- [[ ${GCCMAJOR} == "3" ]] && confgcc+=( --enable-sjlj-exceptions )
- ;;
ppc)
# Set up defaults based on current CFLAGS
is-flagq -mfloat-gprs=double && confgcc+=( --enable-e500-double )
[[ ${CTARGET//_/-} == *-e500v2-* ]] && confgcc+=( --enable-e500-double )
;;
ppc64)
- # On ppc64 big endian target gcc assumes elfv1 by default,
- # and elfv2 on little endian
- # but musl does not support elfv1 at all on any endian ppc64
- # see https://git.musl-libc.org/cgit/musl/tree/INSTALL
- # bug #704784
- # https://gcc.gnu.org/PR93157
+ # On ppc64, the big endian target gcc assumes elfv1 by default,
+ # and elfv2 on little endian.
+ # But musl does not support elfv1 at all on any endian ppc64.
+ # See:
+ # - https://git.musl-libc.org/cgit/musl/tree/INSTALL
+ # - bug #704784
+ # - https://gcc.gnu.org/PR93157
[[ ${CTARGET} == powerpc64-*-musl ]] && confgcc+=( --with-abi=elfv2 )
+
+ if in_iuse ieee-long-double; then
+ # musl requires 64-bit long double, not IBM double-double or IEEE quad.
+ if [[ ${CTARGET} == powerpc64le-*-gnu ]]; then
+ use ieee-long-double && confgcc+=( --with-long-double-format=ieee )
+ fi
+ fi
;;
riscv)
# Add --with-abi flags to set default ABI
@@ -1159,23 +1229,19 @@ toolchain_src_configure() {
;;
esac
- # if the target can do biarch (-m32/-m64), enable it. overhead should
+ # If the target can do biarch (-m32/-m64), enable it. overhead should
# be small, and should simplify building of 64bit kernels in a 32bit
# userland by not needing sys-devel/kgcc64. bug #349405
case $(tc-arch) in
- ppc|ppc64)
+ amd64|ppc|ppc64|sparc|x86)
confgcc+=( --enable-targets=all )
;;
- sparc)
- tc_version_is_at_least 4.4 && confgcc+=( --enable-targets=all )
- ;;
- amd64|x86)
- tc_version_is_at_least 4.3 && confgcc+=( --enable-targets=all )
+ *)
;;
esac
# On Darwin we need libdir to be set in order to get correct install names
- # for things like libobjc-gnu, libgcj and libfortran. If we enable it on
+ # for things like libobjc-gnu and libfortran. If we enable it on
# non-Darwin we screw up the behaviour this eclass relies on. We in
# particular need this over --libdir for bug #255315.
[[ ${CTARGET} == *-darwin* ]] && \
@@ -1183,26 +1249,17 @@ toolchain_src_configure() {
### library options
- if tc_version_is_between 3.0 7.0 ; then
- if is_gcj ; then
- confgcc+=( --disable-gjdoc )
- use awt && confgcc+=( --enable-java-awt=gtk )
- else
- confgcc+=( --disable-libgcj )
- fi
- fi
-
if in_iuse openmp ; then
# Make sure target has pthreads support: bug #326757, bug #335883
# There shouldn't be a chicken & egg problem here as openmp won't
- # build without a C library, and you can't build that w/out
- # already having a compiler ...
+ # build without a C library, and you can't build that w/o
+ # already having a compiler...
if ! is_crosscompile || \
$(tc-getCPP ${CTARGET}) -E - <<<"#include <pthread.h>" >& /dev/null
then
confgcc+=( $(use_enable openmp libgomp) )
else
- # Force disable as the configure script can be dumb #359855
+ # Force disable as the configure script can be dumb, bug #359855
confgcc+=( --disable-libgomp )
fi
else
@@ -1213,12 +1270,9 @@ toolchain_src_configure() {
if _tc_use_if_iuse libssp ; then
confgcc+=( --enable-libssp )
else
- if hardened_gcc_is_stable ssp; then
- export gcc_cv_libc_provides_ssp=yes
- fi
if _tc_use_if_iuse ssp; then
# On some targets USE="ssp -libssp" is an invalid
- # configuration as target libc does not provide
+ # configuration as the target libc does not provide
# stack_chk_* functions. Do not disable libssp there.
case ${CTARGET} in
mingw*|*-mingw*)
@@ -1238,15 +1292,8 @@ toolchain_src_configure() {
fi
if in_iuse cet ; then
- confgcc+=( $(use_enable cet) )
- fi
-
- if in_iuse cilk ; then
- confgcc+=( $(use_enable cilk libcilkrts) )
- fi
-
- if in_iuse mpx ; then
- confgcc+=( $(use_enable mpx libmpx) )
+ [[ ${CTARGET} == x86_64-*-gnu* ]] && confgcc+=( $(use_enable cet) )
+ [[ ${CTARGET} == aarch64-*-gnu* ]] && confgcc+=( $(use_enable cet standard-branch-protection) )
fi
if in_iuse systemtap ; then
@@ -1269,48 +1316,82 @@ toolchain_src_configure() {
confgcc+=( $(use_with zstd) )
fi
- if tc_version_is_at_least 4.6 ; then
- confgcc+=( --enable-lto )
- elif tc_version_is_at_least 4.5 ; then
- confgcc+=( --disable-lto )
- fi
-
# graphite was added in 4.4 but we only support it in 6.5+ due to external
# library issues. bug #448024, bug #701270
- if tc_version_is_at_least 6.5 && in_iuse graphite ; then
+ if in_iuse graphite ; then
confgcc+=( $(use_with graphite isl) )
use graphite && confgcc+=( --disable-isl-version-check )
- elif tc_version_is_at_least 5.0 ; then
+ else
confgcc+=( --without-isl )
- elif tc_version_is_at_least 4.8 ; then
- confgcc+=( --without-cloog )
- elif tc_version_is_at_least 4.4 ; then
- confgcc+=( --without-{cloog,ppl} )
fi
- if tc_version_is_at_least 4.8; then
- if in_iuse sanitize ; then
- # See Note [implicitly enabled flags]
- confgcc+=( $(usex sanitize '' --disable-libsanitizer) )
- else
- confgcc+=( --disable-libsanitizer )
- fi
+ if in_iuse sanitize ; then
+ # See Note [implicitly enabled flags]
+ confgcc+=( $(usex sanitize '' --disable-libsanitizer) )
+ else
+ confgcc+=( --disable-libsanitizer )
fi
- if tc_version_is_at_least 6.0 && in_iuse pie ; then
+ if in_iuse pie ; then
confgcc+=( $(use_enable pie default-pie) )
+
+ if tc_version_is_at_least 14.0.0_pre20230612 ${PV} ; then
+ confgcc+=( --enable-host-pie )
+ fi
fi
- if tc_version_is_at_least 6.0 && in_iuse ssp ; then
+ if in_iuse default-znow && tc_version_is_at_least 14.0.0_pre20230619 ${PV}; then
+ # See https://gcc.gnu.org/git/?p=gcc.git;a=commit;h=33ebb0dff9bb022f1e0709e0e73faabfc3df7931.
+ # TODO: Add to LDFLAGS_FOR_TARGET?
+ confgcc+=(
+ $(use_enable default-znow host-bind-now)
+ )
+ fi
+
+ if in_iuse ssp ; then
confgcc+=(
# This defaults to -fstack-protector-strong.
$(use_enable ssp default-ssp)
)
fi
- # Disable gcc info regeneration -- it ships with generated info pages
- # already. Our custom version/urls/etc... trigger it. bug #464008
- export gcc_cv_prog_makeinfo_modern=no
+ if tc_version_is_at_least 13.1 ; then
+ # Re-enable fixincludes for >= GCC 13 with older glibc
+ # https://gcc.gnu.org/PR107128
+ if ! is_crosscompile && use elibc_glibc && has_version "<sys-libs/glibc-2.38" ; then
+ GCC_RUN_FIXINCLUDES=1
+ fi
+
+ case ${CBUILD}-${CHOST}-${CTARGET} in
+ *i686-w64-mingw32*|*x86_64-w64-mingw32*)
+ # config/i386/t-cygming requires fixincludes (bug #925204)
+ GCC_RUN_FIXINCLUDES=1
+ ;;
+ *mips*-sde-elf*)
+ # config/mips/t-sdemtk needs fixincludes too (bug #925204)
+ # It maps to mips*-sde-elf*, but only with --without-newlib.
+ if [[ ${confgcc} != *with-newlib* ]] ; then
+ GCC_RUN_FIXINCLUDES=1
+ fi
+ ;;
+ *)
+ ;;
+ esac
+
+ if [[ ${GCC_RUN_FIXINCLUDES} == 1 ]] ; then
+ confgcc+=( --enable-fixincludes )
+ else
+ confgcc+=( --disable-fixincludes )
+ fi
+ fi
+
+ # TODO: Ignore RCs here (but TOOLCHAIN_IS_RC isn't yet an eclass var)
+ if [[ ${PV} == *_p* && -f "${S}"/gcc/doc/gcc.info ]] ; then
+ # Safeguard against https://gcc.gnu.org/PR106899 being fixed
+ # without corresponding ebuild changes.
+ eqawarn "Snapshot release with pre-generated info pages found!"
+ eqawarn "The BDEPEND in the ebuild should be updated to drop texinfo."
+ fi
# Do not let the X detection get in our way. We know things can be found
# via system paths, so no need to hardcode things that'll break multilib.
@@ -1318,7 +1399,13 @@ toolchain_src_configure() {
# killing the 32bit builds which want /usr/lib.
export ac_cv_have_x='have_x=yes ac_x_includes= ac_x_libraries='
- confgcc+=( "$@" ${EXTRA_ECONF} )
+ eval "local -a EXTRA_ECONF=(${EXTRA_ECONF})"
+ confgcc+=( "$@" "${EXTRA_ECONF[@]}" )
+
+ if ! is_crosscompile && ! tc-is-cross-compiler && [[ -n ${BUILD_CONFIG_TARGETS} ]] ; then
+ # e.g. ./configure --with-build-config='bootstrap-lto bootstrap-cet'
+ confgcc+=( --with-build-config="${BUILD_CONFIG_TARGETS[*]}" )
+ fi
# Nothing wrong with a good dose of verbosity
echo
@@ -1327,27 +1414,73 @@ toolchain_src_configure() {
einfo "LIBPATH: ${LIBPATH}"
einfo "DATAPATH: ${DATAPATH}"
einfo "STDCXX_INCDIR: ${STDCXX_INCDIR}"
- echo
einfo "Languages: ${GCC_LANG}"
echo
- einfo "Configuring GCC with: ${confgcc[@]//--/\n\t--}"
- echo
# Build in a separate build tree
- mkdir -p "${WORKDIR}"/build
- pushd "${WORKDIR}"/build > /dev/null
+ mkdir -p "${WORKDIR}"/build || die
+ pushd "${WORKDIR}"/build > /dev/null || die
# ...and now to do the actual configuration
addwrite /dev/zero
- echo "${S}"/configure "${confgcc[@]}"
+ local gcc_shell="${BROOT}"/bin/bash
# Older gcc versions did not detect bash and re-exec itself, so force the
- # use of bash. Newer ones will auto-detect, but this is not harmful.
- CONFIG_SHELL="${BROOT}/bin/bash" \
- "${BROOT}"/bin/bash "${S}"/configure "${confgcc[@]}" || die "failed to run configure"
+ # use of bash for them.
+ if tc_version_is_at_least 11.2 ; then
+ gcc_shell="${BROOT}"/bin/sh
+ fi
+
+ if is_jit ; then
+ einfo "Configuring JIT gcc"
+
+ local confgcc_jit=(
+ "${confgcc[@]}"
+
+ --enable-lto
+ --disable-analyzer
+ --disable-bootstrap
+ --disable-cet
+ --disable-default-pie
+ --disable-default-ssp
+ --disable-gcov
+ --disable-libada
+ --disable-libatomic
+ --disable-libgomp
+ --disable-libitm
+ --disable-libquadmath
+ --disable-libsanitizer
+ --disable-libssp
+ --disable-libstdcxx-pch
+ --disable-libvtv
+ --disable-nls
+ --disable-objc-gc
+ --disable-systemtap
+ --enable-host-shared
+ --enable-languages=jit
+ # Might be used for the just-built GCC. Easier to just
+ # respect USE=graphite here in case the user passes some
+ # graphite flags rather than try strip them out.
+ $(use_with graphite isl)
+ $(use_with zstd)
+ --with-system-zlib
+ )
+
+ if tc_version_is_at_least 13.1 ; then
+ confgcc_jit+=( --disable-fixincludes )
+ fi
+
+ mkdir -p "${WORKDIR}"/build-jit || die
+ pushd "${WORKDIR}"/build-jit > /dev/null || die
+
+ CONFIG_SHELL="${gcc_shell}" edo "${gcc_shell}" "${S}"/configure "${confgcc_jit[@]}"
+ popd > /dev/null || die
+ fi
+
+ CONFIG_SHELL="${gcc_shell}" edo "${gcc_shell}" "${S}"/configure "${confgcc[@]}"
# Return to whatever directory we were in before
- popd > /dev/null
+ popd > /dev/null || die
}
# Replace -m flags unsupported by the version being built with the best
@@ -1369,6 +1502,7 @@ downgrade_arch_flags() {
# "added" "arch" "replacement"
local archlist=(
+ 12.3 znver4 znver3
10 znver3 znver2
9 znver2 znver1
4.9 bdver4 bdver3
@@ -1474,10 +1608,61 @@ gcc_do_filter_flags() {
# Lock gcc at -O2; we want to be conservative here.
filter-flags '-O?'
- append-flags -O2
+
+ # We allow -O3 given it's a supported option upstream.
+ # Only add -O2 if we're not doing -O3.
+ if [[ ${BUILD_CONFIG_TARGETS[@]} == *bootstrap-O3* ]] ; then
+ append-flags '-O3'
+ else
+ append-flags '-O2'
+ fi
+ fi
+
+ declare -A l1_cache_sizes=()
+ # Workaround for inconsistent cache sizes on hybrid P/E cores
+ # See PR111768 (and bug #904426, bug #908523, and bug #915389)
+ if [[ ${CBUILD} == @(x86_64|i?86)* ]] && [[ ${CFLAGS} == *-march=native* ]] && tc-is-gcc ; then
+ local x
+ local l1_cache_size
+ # Iterate over all cores and find their L1 cache size
+ for x in $(seq 0 $(($(nproc)-1))) ; do
+ [[ -z ${x} || ${x} -gt 64 ]] && break
+ l1_cache_size=$(taskset --cpu-list ${x} $(tc-getCC) -Q --help=params -O2 -march=native \
+ | awk '{ if ($1 ~ /^.*param.*l1-cache-size/) print $2; }' || die)
+ [[ -n ${l1_cache_size} && ${l1_cache_size} =~ ^[0-9]+$ ]] || break
+ l1_cache_sizes[${l1_cache_size}]=1
+ done
+ # If any of them are different, abort. We can't just pass one value of
+ # l1-cache-size because it doesn't cancel out the -march=native one.
+ if [[ ${#l1_cache_sizes[@]} -gt 1 ]] ; then
+ eerror "Different values of l1-cache-size detected!"
+ eerror "GCC will fail to bootstrap when comparing files with these flags."
+ eerror "This CPU is likely big.little/hybrid hardware with power/efficiency cores."
+ eerror "Please install app-misc/resolve-march-native and run 'resolve-march-native'"
+ eerror "to find a safe value of CFLAGS for this CPU. Note that this may vary"
+ eerror "depending on the core it ran on. taskset can be used to fix the cores used."
+ die "Varying l1-cache-size found, aborting (bug #915389, gcc PR#111768)"
+ fi
+ fi
+
+ if ver_test -lt 13.6 ; then
+ # These aren't supported by the just-built compiler either.
+ filter-flags -fharden-compares -fharden-conditional-branches \
+ -fharden-control-flow-redundancy -fno-harden-control-flow-redundancy \
+ -fhardcfr-skip-leaf -fhardcfr-check-exceptions \
+ -fhardcfr-check-returning-calls '-fhardcfr-check-noreturn-calls=*'
+
+ # New in GCC 14.
+ filter-flags -Walloc-size
+ else
+ # Makes things painfully slow and no real benefit for the compiler.
+ append-flags $(test-flags-CC -fno-harden-control-flow-redundancy)
fi
- # Don't want to funk ourselves
+ # Please use USE=lto instead (bug #906007).
+ filter-lto
+
+ # Avoid shooting self in foot
filter-flags '-mabi*' -m31 -m32 -m64
# bug #490738
@@ -1487,39 +1672,14 @@ gcc_do_filter_flags() {
filter-flags '-fsanitize=*'
- if tc_version_is_between 6 8 ; then
- # -mstackrealign triggers crashes in exception throwing
- # at least on ada: bug #688580
- # The reason is unknown. Drop the flag for now.
- filter-flags -mstackrealign
- fi
-
case $(tc-arch) in
amd64|x86)
filter-flags '-mcpu=*'
-
- # bug #357287
- tc_version_is_between 4.4 4.5 && append-flags -mno-avx
-
- if tc_version_is_between 4.6 4.7 ; then
- # bug #411333, bug #466454
- replace-cpu-flags c3-2 pentium2 pentium3 pentium3m pentium-m i686
- fi
;;
alpha)
# bug #454426
append-ldflags -Wl,--no-relax
;;
- sparc)
- # Temporary workaround for random ICEs reproduced by multiple users
- # bug #457062
- tc_version_is_between 4.6 4.8 && MAKEOPTS+=" -j1"
- ;;
- *-macos)
- # https://gcc.gnu.org/PR25127
- tc_version_is_between 4.0 4.2 && \
- filter-flags '-mcpu=*' '-march=*' '-mtune=*'
- ;;
esac
strip-unsupported-flags
@@ -1535,24 +1695,6 @@ gcc_do_filter_flags() {
local VAR="CFLAGS_"${CTARGET//[-.]/_}
CXXFLAGS=${!VAR-${CFLAGS}}
fi
-
- export GCJFLAGS=${GCJFLAGS:-${CFLAGS}}
-}
-
-setup_minispecs_gcc_build_specs() {
- # Setup the "build.specs" file for gcc 4.3 to use when building.
- if hardened_gcc_works pie ; then
- cat "${WORKDIR}"/specs/pie.specs >> "${WORKDIR}"/build.specs
- fi
- if hardened_gcc_works ssp ; then
- for s in ssp sspall ; do
- cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs
- done
- fi
- for s in nostrict znow ; do
- cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs
- done
- export GCC_SPECS="${WORKDIR}"/build.specs
}
gcc-multilib-configure() {
@@ -1572,10 +1714,11 @@ gcc-multilib-configure() {
local l=$(gcc-abi-map ${abi})
[[ -n ${l} ]] && list+=",${l}"
done
+
if [[ -n ${list} ]] ; then
case ${CTARGET} in
- x86_64*)
- tc_version_is_at_least 4.8 && confgcc+=( --with-multilib-list=${list:1} )
+ x86_64*)
+ confgcc+=( --with-multilib-list=${list:1} )
;;
esac
fi
@@ -1606,10 +1749,10 @@ gcc-abi-map() {
#----> src_compile <----
toolchain_src_compile() {
- touch "${S}"/gcc/c-gperf.h
+ touch "${S}"/gcc/c-gperf.h || die
# Do not make manpages if we do not have perl ...
- [[ ! -x /usr/bin/perl ]] \
+ [[ ! -x "${BROOT}"/usr/bin/perl ]] \
&& find "${WORKDIR}"/build -name '*.[17]' -exec touch {} +
# To compile ada library standard files special compiler options are passed
@@ -1618,10 +1761,15 @@ toolchain_src_compile() {
unset ADAFLAGS
# Older gcc versions did not detect bash and re-exec itself, so force the
- # use of bash. Newer ones will auto-detect, but this is not harmful.
+ # use of bash for them.
# This needs to be set for compile as well, as it's used in libtool
# generation, which will break install otherwise (at least in 3.3.6): bug #664486
- CONFIG_SHELL="${EPREFIX}/bin/bash" \
+ local gcc_shell="${BROOT}"/bin/bash
+ if tc_version_is_at_least 11.2 ; then
+ gcc_shell="${BROOT}"/bin/sh
+ fi
+
+ CONFIG_SHELL="${gcc_shell}" \
gcc_do_make ${GCC_MAKE_TARGET}
}
@@ -1637,57 +1785,85 @@ gcc_do_make() {
# default target
if is_crosscompile || tc-is-cross-compiler ; then
- # 3 stage bootstrapping doesnt quite work when you cant run the
- # resulting binaries natively ^^;
+ # 3 stage bootstrapping doesn't quite work when you can't run the
+ # resulting binaries natively
GCC_MAKE_TARGET=${GCC_MAKE_TARGET-all}
else
- if _tc_use_if_iuse pgo; then
+ if [[ ${EXTRA_ECONF} == *--disable-bootstrap* ]] ; then
+ GCC_MAKE_TARGET=${GCC_MAKE_TARGET-all}
+
+ ewarn "Disabling bootstrapping. ONLY recommended for development."
+ ewarn "This is NOT a safe configuration for end users!"
+ ewarn "This compiler may not be safe or reliable for production use!"
+ elif _tc_use_if_iuse pgo; then
GCC_MAKE_TARGET=${GCC_MAKE_TARGET-profiledbootstrap}
else
GCC_MAKE_TARGET=${GCC_MAKE_TARGET-bootstrap-lean}
fi
fi
- # Older versions of GCC could not do profiledbootstrap in parallel due to
- # collisions with profiling info.
- if [[ ${GCC_MAKE_TARGET} == "profiledbootstrap" ]]; then
- ! tc_version_is_at_least 4.6 && export MAKEOPTS="${MAKEOPTS} -j1"
- fi
-
- if [[ ${GCC_MAKE_TARGET} == "all" ]] ; then
- STAGE1_CFLAGS=${STAGE1_CFLAGS-"${CFLAGS}"}
- elif [[ $(gcc-version) == "3.4" && ${GCC_BRANCH_VER} == "3.4" ]] && gcc-specs-ssp ; then
- # See bug #79852
- STAGE1_CFLAGS=${STAGE1_CFLAGS-"-O2"}
- fi
+ local emakeargs=(
+ LDFLAGS="${LDFLAGS}"
+ LIBPATH="${LIBPATH}"
+ )
if is_crosscompile; then
# In 3.4, BOOT_CFLAGS is never used on a crosscompile...
# but I'll leave this in anyways as someone might have had
# some reason for putting it in here... --eradicator
BOOT_CFLAGS=${BOOT_CFLAGS-"-O2"}
+ emakeargs+=( BOOT_CFLAGS="${BOOT_CFLAGS}" )
else
- # we only want to use the system's CFLAGS if not building a
+ # XXX: Hack for bug #914881, clean this up when fixed and go back
+ # to just calling get_abi_LDFLAGS as before.
+ local abi_ldflags="$(get_abi_LDFLAGS ${TARGET_DEFAULT_ABI})"
+ if [[ -n ${abi_ldflags} ]] ; then
+ printf -v abi_ldflags -- "-Wl,%s " ${abi_ldflags}
+ fi
+
+ # If the host compiler is too old, let's use -O0 per the upstream
+ # default to be safe (to avoid a bootstrap comparison failure later).
+ #
+ # The last known issues are with < GCC 4.9 or so, but it's easier
+ # to keep this bound somewhat fresh just to avoid problems. Ultimately,
+ # using not-O0 is just a build-time speed improvement anyway.
+ if ! tc-is-gcc || ver_test $(gcc-fullversion) -lt 10 ; then
+ STAGE1_CFLAGS="-O0"
+ fi
+
+ # We only want to use the system's CFLAGS if not building a
# cross-compiler.
+ STAGE1_CFLAGS=${STAGE1_CFLAGS-"$(get_abi_CFLAGS ${TARGET_DEFAULT_ABI}) ${CFLAGS}"}
+ STAGE1_LDFLAGS=${STAGE1_LDFLAGS-"${abi_ldflags} ${LDFLAGS}"}
BOOT_CFLAGS=${BOOT_CFLAGS-"$(get_abi_CFLAGS ${TARGET_DEFAULT_ABI}) ${CFLAGS}"}
+ BOOT_LDFLAGS=${BOOT_LDFLAGS-"${abi_ldflags} ${LDFLAGS}"}
+ LDFLAGS_FOR_TARGET="${LDFLAGS_FOR_TARGET:-${LDFLAGS}}"
+
+ emakeargs+=(
+ STAGE1_CFLAGS="${STAGE1_CFLAGS}"
+ STAGE1_LDFLAGS="${STAGE1_LDFLAGS}"
+ BOOT_CFLAGS="${BOOT_CFLAGS}"
+ BOOT_LDFLAGS="${BOOT_LDFLAGS}"
+ LDFLAGS_FOR_TARGET="${LDFLAGS_FOR_TARGET}"
+ )
fi
- einfo "Compiling ${PN} (${GCC_MAKE_TARGET})..."
-
- pushd "${WORKDIR}"/build >/dev/null
+ if is_jit ; then
+ # TODO: docs for jit?
+ einfo "Building JIT"
+ emake -C "${WORKDIR}"/build-jit "${emakeargs[@]}"
+ fi
- emake \
- LDFLAGS="${LDFLAGS}" \
- STAGE1_CFLAGS="${STAGE1_CFLAGS}" \
- LIBPATH="${LIBPATH}" \
- BOOT_CFLAGS="${BOOT_CFLAGS}" \
- ${GCC_MAKE_TARGET}
+ einfo "Compiling ${PN} (${GCC_MAKE_TARGET})..."
+ pushd "${WORKDIR}"/build >/dev/null || die
+ emake "${emakeargs[@]}" ${GCC_MAKE_TARGET}
if is_ada; then
- # Without these links it is not getting the good compiler
- # Need to check why
+ # Without these links, it is not getting the good compiler
+ # TODO: Need to check why
ln -s gcc ../build/prev-gcc || die
ln -s ${CHOST} ../build/prev-${CHOST} || die
+
# Building standard ada library
emake -C gcc gnatlib-shared
# Building gnat toold
@@ -1696,15 +1872,11 @@ gcc_do_make() {
if ! is_crosscompile && _tc_use_if_iuse cxx && _tc_use_if_iuse doc ; then
if type -p doxygen > /dev/null ; then
- if tc_version_is_at_least 4.3 ; then
- cd "${CTARGET}"/libstdc++-v3/doc
- emake doc-man-doxygen
- else
- cd "${CTARGET}"/libstdc++-v3
- emake doxygen-man
- fi
+ cd "${CTARGET}"/libstdc++-v3/doc || die
+ emake doc-man-doxygen
+
# Clean bogus manpages. bug #113902
- find -name '*_build_*' -delete
+ find -name '*_build_*' -delete || die
# Blow away generated directory references. Newer versions of gcc
# have gotten better at this, but not perfect. This is easier than
@@ -1716,56 +1888,150 @@ gcc_do_make() {
fi
fi
- popd >/dev/null
+ popd >/dev/null || die
}
#---->> src_test <<----
toolchain_src_test() {
- cd "${WORKDIR}"/build
+ # GCC's testsuite is a special case.
+ #
+ # * Generally, people work off comparisons rather than a full set of
+ # passing tests.
+ #
+ # * The guality (sic) tests are for debug info quality and are especially
+ # unreliable.
+ #
+ # * The execute torture tests are hopefully a good way for us to smoketest
+ # and find critical regresions.
+
+ # From opensuse's spec file: "asan needs a whole shadow address space"
+ ulimit -v unlimited
# 'asan' wants to be preloaded first, so does 'sandbox'.
- # To make asan tests work disable sandbox for all of test suite.
- # 'backtrace' tests also does not like 'libsandbox.so' presence.
- SANDBOX_ON=0 LD_PRELOAD= emake -k check
+ # To make asan tests work, we disable sandbox for all of test suite.
+ # The 'backtrace' tests also do not like the presence of 'libsandbox.so'.
+ local -x SANDBOX_ON=0
+ local -x LD_PRELOAD=
+
+ # Controls running expensive tests in e.g. the torture testsuite.
+ local -x GCC_TEST_RUN_EXPENSIVE=1
+
+ # nonfatal here as we die if the comparison below fails. Also, note that
+ # the exit code of targets other than 'check' may be unreliable.
+ nonfatal emake -C "${WORKDIR}"/build -k "${GCC_TESTS_CHECK_TARGET}" RUNTESTFLAGS="${GCC_TESTS_RUNTESTFLAGS}"
+
+ # Produce an updated failure manifest.
+ einfo "Generating a new failure manifest ${T}/${CHOST}.xfail"
+ rm -f "${T}"/${CHOST}.xfail
+ edo "${T}"/validate_failures.py \
+ --srcpath="${S}" \
+ --build_dir="${WORKDIR}"/build \
+ --manifest="${T}"/${CHOST}.xfail \
+ --produce_manifest &> /dev/null
+
+ local manifest="${GCC_TESTS_COMPARISON_DIR}/${GCC_TESTS_COMPARISON_SLOT}/${CHOST}.xfail"
+
+ if [[ -f "${manifest}" ]] ; then
+ # TODO: Distribute some baseline results in e.g. gcc-patches.git?
+ # validate_failures.py manifest files support include directives.
+ einfo "Comparing with previous cached results at ${manifest}"
+
+ nonfatal edo "${T}"/validate_failures.py \
+ --srcpath="${S}" \
+ --build_dir="${WORKDIR}"/build \
+ --manifest="${manifest}"
+ ret=$?
+
+ if [[ -n ${GCC_TESTS_REGEN_BASELINE} ]] ; then
+ eerror "GCC_TESTS_REGEN_BASELINE is set, ignoring test result and creating a new baseline..."
+ elif [[ ${ret} != 0 ]]; then
+ die "Tests failed (failures not listed in the baseline data)"
+ fi
+ else
+ nonfatal edo "${T}"/validate_failures.py \
+ --srcpath="${S}" \
+ --build_dir="${WORKDIR}"/build
+ ret=$?
+
+ # We have no reference data saved from a previous run to know if
+ # the failures are tolerable or not, so we bail out.
+ eerror "No reference test data at ${manifest}!"
+ eerror "GCC's tests require a baseline to compare with for any reasonable interpretation of results."
+
+ if [[ -n ${GCC_TESTS_IGNORE_NO_BASELINE} ]] ; then
+ eerror "GCC_TESTS_IGNORE_NO_BASELINE is set, ignoring test result and creating a new baseline..."
+ elif [[ -n ${GCC_TESTS_REGEN_BASELINE} ]] ; then
+ eerror "GCC_TESTS_REGEN_BASELINE is set, ignoring test result and creating using a new baseline..."
+ elif [[ ${ret} != 0 ]]; then
+ eerror "(Set GCC_TESTS_IGNORE_NO_BASELINE=1 to make this non-fatal and generate a baseline.)"
+ die "Tests failed (failures occurred with no reference data)"
+ fi
+ fi
}
#---->> src_install <<----
toolchain_src_install() {
- cd "${WORKDIR}"/build
+ cd "${WORKDIR}"/build || die
# Don't allow symlinks in private gcc include dir as this can break the build
- find gcc/include*/ -type l -delete
+ find gcc/include*/ -type l -delete || die
- # Copy over the info pages. We disabled their generation earlier, but the
- # build system only expects to install out of the build dir, not the source. bug #464008
- mkdir -p gcc/doc
- local x=
- for x in "${S}"/gcc/doc/*.info* ; do
- if [[ -f ${x} ]] ; then
- cp "${x}" gcc/doc/ || die
- fi
- done
+ if [[ ${GCC_RUN_FIXINCLUDES} == 0 ]] ; then
+ # We remove the generated fixincludes, as they can cause things to break
+ # (ncurses, openssl, etc). We do not prevent them from being built, as
+ # in the following commit which we revert:
+ # https://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/eclass/toolchain.eclass?r1=1.647&r2=1.648
+ # This is because bsd userland needs fixedincludes to build gcc, while
+ # linux does not. Both can dispose of them afterwards.
+ while read x ; do
+ grep -q 'It has been auto-edited by fixincludes from' "${x}" \
+ && rm -f "${x}"
+ done < <(find gcc/include*/ -name '*.h')
+ fi
- # We remove the generated fixincludes, as they can cause things to break
- # (ncurses, openssl, etc). We do not prevent them from being built, as
- # in the following commit which we revert:
- # https://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/eclass/toolchain.eclass?r1=1.647&r2=1.648
- # This is because bsd userland needs fixedincludes to build gcc, while
- # linux does not. Both can dispose of them afterwards.
- while read x ; do
- grep -q 'It has been auto-edited by fixincludes from' "${x}" \
- && rm -f "${x}"
- done < <(find gcc/include*/ -name '*.h')
+ if is_jit ; then
+ # See https://gcc.gnu.org/onlinedocs/gcc-11.3.0/jit/internals/index.html#packaging-notes
+ # and bug #843341.
+ #
+ # Both of the non-JIT and JIT builds are configured to install to $(DESTDIR)
+ # Install the configuration with --enable-host-shared first
+ # *then* the one without, so that the faster build
+ # of "cc1" et al overwrites the slower build.
+ #
+ # Do the 'make install' from the build directory
+ pushd "${WORKDIR}"/build-jit > /dev/null || die
+ S="${WORKDIR}"/build-jit emake DESTDIR="${D}" -j1 install
+
+ # Punt some tools which are really only useful while building gcc
+ find "${ED}" -name install-tools -prune -type d -exec rm -rf "{}" \; || die
+ # This one comes with binutils
+ find "${ED}" -name libiberty.a -delete || die
+
+ # Move the libraries to the proper location
+ gcc_movelibs
+
+ popd > /dev/null || die
+ fi
# Do the 'make install' from the build directory
- S="${WORKDIR}"/build emake -j1 DESTDIR="${D}" install || die
+ #
+ # Unfortunately, we have to use -j1 for make install. Upstream
+ # don't really test it and there's not much appetite for fixing bugs
+ # with it. Several reported bugs exist where the resulting image
+ # was wrong, rather than a simple compile/install failure:
+ # - bug #906155
+ # - https://gcc.gnu.org/PR42980
+ # - https://gcc.gnu.org/PR51814
+ # - https://gcc.gnu.org/PR103656
+ # - https://gcc.gnu.org/PR109898
+ S="${WORKDIR}"/build emake DESTDIR="${D}" -j1 install
# Punt some tools which are really only useful while building gcc
- find "${ED}" -name install-tools -prune -type d -exec rm -rf "{}" \;
+ find "${ED}" -name install-tools -prune -type d -exec rm -rf "{}" \; || die
# This one comes with binutils
- find "${ED}" -name libiberty.a -delete
+ find "${ED}" -name libiberty.a -delete || die
# Move the libraries to the proper location
gcc_movelibs
@@ -1781,18 +2047,11 @@ toolchain_src_install() {
create_gcc_env_entry
create_revdep_rebuild_entry
- # Setup the gcc_env_entry for hardened gcc 4 with minispecs
- want_minispecs && copy_minispecs_gcc_specs
-
- # Make sure we don't have stuff lying around that
- # can nuke multiple versions of gcc
- gcc_slot_java
-
dodir /usr/bin
- cd "${D}"${BINPATH}
+ cd "${D}"${BINPATH} || die
# Ugh: we really need to auto-detect this list.
# It's constantly out of date.
- for x in cpp gcc g++ c++ gcov g77 gcj gcjh gfortran gccgo gnat* ; do
+ for x in cpp gcc gccrs g++ c++ gcov g77 gfortran gccgo gnat* ; do
# For some reason, g77 gets made instead of ${CTARGET}-g77...
# this should take care of that
if [[ -f ${x} ]] ; then
@@ -1824,7 +2083,7 @@ toolchain_src_install() {
if ! is_crosscompile; then
# Rename the main go binaries as we don't want to clobber dev-lang/go
# when gcc-config runs. bug #567806
- if tc_version_is_at_least 5 && is_go ; then
+ if is_go ; then
for x in go gofmt; do
mv ${x} ${x}-${GCCMAJOR} || die
done
@@ -1848,9 +2107,9 @@ toolchain_src_install() {
rm -rf "${ED}"/usr/share/{man,info}
rm -rf "${D}"${DATAPATH}/{man,info}
else
- local cxx_mandir=$(find "${WORKDIR}/build/${CTARGET}/libstdc++-v3" -name man)
+ local cxx_mandir=$(find "${WORKDIR}/build/${CTARGET}/libstdc++-v3" -name man || die)
if [[ -d ${cxx_mandir} ]] ; then
- cp -r "${cxx_mandir}"/man? "${D}${DATAPATH}"/man/
+ cp -r "${cxx_mandir}"/man? "${D}${DATAPATH}"/man/ || die
fi
fi
@@ -1861,7 +2120,10 @@ toolchain_src_install() {
rm "${D}${DATAPATH}"/info/dir || die
fi
- # Prune empty dirs left behind
+ docompress "${DATAPATH}"/{info,man}
+
+ # Prune empty dirs left behind. It's fine not to die here as we may
+ # really have no empty dirs left.
find "${ED}" -depth -type d -delete 2>/dev/null
# libstdc++.la: Delete as it doesn't add anything useful: g++ itself
@@ -1879,8 +2141,6 @@ toolchain_src_install() {
# libgfortran.la: gfortran itself handles linkage correctly in the
# dynamic & static case (libgfortran.spec). bug #573302
# libgfortranbegin.la: Same as above, and it's an internal lib.
- # libmpx.la: gcc itself handles linkage correctly (libmpx.spec).
- # libmpxwrappers.la: See above.
# libitm.la: gcc itself handles linkage correctly (libitm.spec).
# libvtv.la: gcc itself handles linkage correctly.
# lib*san.la: Sanitizer linkage is handled internally by gcc, and they
@@ -1889,6 +2149,7 @@ toolchain_src_install() {
'(' \
-name libstdc++.la -o \
-name libstdc++fs.la -o \
+ -name libstdc++exp.la -o \
-name libsupc++.la -o \
-name libcc1.la -o \
-name libcc1plugin.la -o \
@@ -1897,22 +2158,20 @@ toolchain_src_install() {
-name 'libgomp-plugin-*.la' -o \
-name libgfortran.la -o \
-name libgfortranbegin.la -o \
- -name libmpx.la -o \
- -name libmpxwrappers.la -o \
-name libitm.la -o \
-name libvtv.la -o \
-name 'lib*san.la' \
- ')' -type f -delete
+ ')' -type f -delete || die
# Use gid of 0 because some stupid ports don't have
# the group 'root' set to gid 0. Send to /dev/null
# for people who are testing as non-root.
- chown -R 0:0 "${D}${LIBPATH}" 2>/dev/null
+ chown -R 0:0 "${D}${LIBPATH}" 2>/dev/null || die
# Installing gdb pretty-printers into gdb-specific location.
local py gdbdir=/usr/share/gdb/auto-load${LIBPATH}
- pushd "${D}${LIBPATH}" >/dev/null
- for py in $(find . -name '*-gdb.py') ; do
+ pushd "${D}${LIBPATH}" >/dev/null || die
+ for py in $(find . -name '*-gdb.py' || die) ; do
local multidir=${py%/*}
insinto "${gdbdir}/${multidir}"
@@ -1922,22 +2181,20 @@ toolchain_src_install() {
rm "${py}" || die
done
- popd >/dev/null
+ popd >/dev/null || die
# Don't scan .gox files for executable stacks - false positives
export QA_EXECSTACK="usr/lib*/go/*/*.gox"
export QA_WX_LOAD="usr/lib*/go/*/*.gox"
# Disable RANDMMAP so PCH works, bug #301299
- if tc_version_is_at_least 4.3 ; then
- pax-mark -r "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1"
- pax-mark -r "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1plus"
- fi
+ pax-mark -r "${ED}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1"
+ pax-mark -r "${ED}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1plus"
- # Disable MPROTECT so java works, bug #574808
- if is_gcj ; then
- pax-mark -m "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/ecj1"
- pax-mark -m "${D}${PREFIX}/${CTARGET}/gcc-bin/${GCC_CONFIG_VER}/gij"
+ if use test ; then
+ mkdir "${T}"/test-results || die
+ cd "${WORKDIR}"/build || die
+ find . -name \*.sum -exec cp --parents -v {} "${T}"/test-results \; || die
fi
}
@@ -1949,13 +2206,14 @@ gcc_movelibs() {
# move them to the compiler-specific CHOST internal dir. This is stuff
# that you want to link against when building tools rather than building
# code to run on the target.
- if tc_version_is_at_least 5 && is_crosscompile ; then
+ if is_crosscompile ; then
dodir "${HOSTLIBPATH#${EPREFIX}}"
mv "${ED}"/usr/$(get_libdir)/libcc1* "${D}${HOSTLIBPATH}" || die
fi
+
# libgccjit gets installed to /usr/lib, not /usr/$(get_libdir). Probably
# due to a bug in gcc build system.
- if is_jit ; then
+ if [[ ${PWD} == "${WORKDIR}"/build-jit ]] && is_jit ; then
dodir "${LIBPATH#${EPREFIX}}"
mv "${ED}"/usr/lib/libgccjit* "${D}${LIBPATH}" || die
fi
@@ -1983,21 +2241,13 @@ gcc_movelibs() {
removedirs="${removedirs} ${FROMDIR}"
FROMDIR=${D}${FROMDIR}
if [[ ${FROMDIR} != "${TODIR}" && -d ${FROMDIR} ]] ; then
- local files=$(find "${FROMDIR}" -maxdepth 1 ! -type d 2>/dev/null)
+ local files=$(find "${FROMDIR}" -maxdepth 1 ! -type d 2>/dev/null || die)
if [[ -n ${files} ]] ; then
mv ${files} "${TODIR}" || die
fi
fi
done
fix_libtool_libdir_paths "${LIBPATH}/${MULTIDIR}"
-
- # SLOT up libgcj.pc if it's available (and let gcc-config worry about links)
- FROMDIR="${PREFIX}/lib/${OS_MULTIDIR}"
- for x in "${D}${FROMDIR}"/pkgconfig/libgcj*.pc ; do
- [[ -f ${x} ]] || continue
- sed -i "/^libdir=/s:=.*:=${LIBPATH}/${MULTIDIR}:" "${x}" || die
- mv "${x}" "${D}${FROMDIR}"/pkgconfig/libgcj-${GCC_PV}.pc || die
- done
done
# We remove directories separately to avoid this case:
@@ -2007,6 +2257,7 @@ gcc_movelibs() {
for FROMDIR in ${removedirs} ; do
rmdir "${D}"${FROMDIR} >& /dev/null
done
+ # XXX: Intentionally no die, here to remove empty dirs
find -depth "${ED}" -type d -exec rmdir {} + >& /dev/null
}
@@ -2016,13 +2267,13 @@ gcc_movelibs() {
fix_libtool_libdir_paths() {
local libpath="$1"
- pushd "${D}" >/dev/null
+ pushd "${D}" >/dev/null || die
- pushd "./${libpath}" >/dev/null
+ pushd "./${libpath}" >/dev/null || die
local dir="${PWD#${D%/}}"
local allarchives=$(echo *.la)
allarchives="\(${allarchives// /\\|}\)"
- popd >/dev/null
+ popd >/dev/null || die
# The libdir might not have any .la files. bug #548782
find "./${dir}" -maxdepth 1 -name '*.la' \
@@ -2034,18 +2285,18 @@ fix_libtool_libdir_paths() {
find "./${dir}/" -maxdepth 1 -name '*.la' \
-exec sed -i -e "/^dependency_libs=/s:/[^ ]*/${allarchives}:${libpath}/\1:g" {} + || die
- popd >/dev/null
+ popd >/dev/null || die
}
create_gcc_env_entry() {
dodir /etc/env.d/gcc
- local gcc_envd_base="/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}"
+ local gcc_envd_base="/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}"
local gcc_specs_file
local gcc_envd_file="${ED}${gcc_envd_base}"
if [[ -z $1 ]] ; then
# I'm leaving the following commented out to remind me that it
- # was an insanely -bad- idea. Stuff broke. GCC_SPECS isnt unset
+ # was an insanely -bad- idea. Stuff broke. GCC_SPECS isn't unset
# on chroot or in non-toolchain.eclass gcc ebuilds!
#gcc_specs_file="${LIBPATH}/specs"
gcc_specs_file=""
@@ -2096,71 +2347,31 @@ create_revdep_rebuild_entry() {
EOF
}
-copy_minispecs_gcc_specs() {
- # On gcc 6, we don't need minispecs
- if tc_version_is_at_least 6.0 ; then
- return 0
- fi
-
- # Setup the hardenedno* specs files and the vanilla specs file.
- if hardened_gcc_works ; then
- create_gcc_env_entry hardenednopiessp
- fi
- if hardened_gcc_works pie ; then
- create_gcc_env_entry hardenednopie
- fi
- if hardened_gcc_works ssp ; then
- create_gcc_env_entry hardenednossp
- fi
- create_gcc_env_entry vanilla
- insinto ${LIBPATH#${EPREFIX}}
- doins "${WORKDIR}"/specs/*.specs || die "failed to install specs"
- # Build system specs file which, if it exists, must be a complete set of
- # specs as it completely and unconditionally overrides the builtin specs.
- if ! tc_version_is_at_least 4.4 ; then
- $(XGCC) -dumpspecs > "${WORKDIR}"/specs/specs
- cat "${WORKDIR}"/build.specs >> "${WORKDIR}"/specs/specs
- doins "${WORKDIR}"/specs/specs || die "failed to install the specs file"
- fi
-}
-
-gcc_slot_java() {
- local x
-
- # Move Java headers to compiler-specific dir
- for x in "${D}${PREFIX}"/include/gc*.h "${D}${PREFIX}"/include/j*.h ; do
- [[ -f ${x} ]] && mv -f "${x}" "${D}${LIBPATH}"/include/
- done
- for x in gcj gnu java javax org ; do
- if [[ -d ${D}${PREFIX}/include/${x} ]] ; then
- dodir /${LIBPATH#${EPREFIX}}/include/${x}
- mv -f "${D}${PREFIX}"/include/${x}/* "${D}${LIBPATH}"/include/${x}/
- rm -rf "${D}${PREFIX}"/include/${x}
- fi
- done
-
- if [[ -d ${D}${PREFIX}/lib/security ]] || [[ -d ${D}${PREFIX}/$(get_libdir)/security ]] ; then
- dodir /${LIBPATH#${EPREFIX}}/security
- mv -f "${D}${PREFIX}"/lib*/security/* "${D}${LIBPATH}"/security
- rm -rf "${D}${PREFIX}"/lib*/security
+#---->> pkg_pre* <<----
+
+toolchain_pkg_preinst() {
+ if [[ ${MERGE_TYPE} != binary ]] && use test ; then
+ # Install as orphaned to allow comparison across more versions even
+ # after unmerged. Also useful for historical records and tracking
+ # down regressions a while after they first appeared, but were only
+ # just reported.
+ einfo "Copying test results to ${GCC_TESTS_COMPARISON_DIR}/${SLOT}/${CHOST}.xfail for future comparison"
+ (
+ mkdir -p "${GCC_TESTS_COMPARISON_DIR}/${SLOT}" || die
+ cd "${T}"/test-results || die
+ # May not exist with test-fail-continue
+ if [[ -f "${T}"/${CHOST}.xfail ]] ; then
+ cp -v "${T}"/${CHOST}.xfail "${GCC_TESTS_COMPARISON_DIR}/${SLOT}" || die
+ fi
+ )
fi
-
- # Move random gcj files to compiler-specific directories
- for x in libgcj.spec logging.properties ; do
- x="${D}${PREFIX}/lib/${x}"
- [[ -f ${x} ]] && mv -f "${x}" "${D}${LIBPATH}"/
- done
-
- # Rename jar because it could clash with Kaffe's jar if this gcc is
- # primary compiler (aka doesn't have the -<version> extension)
- cd "${D}${BINPATH}"
- [[ -f jar ]] && mv -f jar gcj-jar
}
#---->> pkg_post* <<----
toolchain_pkg_postinst() {
do_gcc_config
+
if [[ ! ${ROOT} && -f ${EPREFIX}/usr/share/eselect/modules/compiler-shadow.eselect ]] ; then
eselect compiler-shadow update all
fi
@@ -2218,14 +2429,14 @@ do_gcc_config() {
ewarn "The currently selected specs-specific gcc config,"
ewarn "${current_specs}, doesn't exist anymore. This is usually"
ewarn "due to enabling/disabling hardened or switching to a version"
- ewarn "of gcc that doesnt create multiple specs files. The default"
+ ewarn "of gcc that doesn't create multiple specs files. The default"
ewarn "config will be used, and the previous preference forgotten."
use_specs=""
fi
target="${CTARGET}-${GCC_CONFIG_VER}${use_specs}"
else
- # The curent target is invalid. Attempt to switch to a valid one.
+ # The current target is invalid. Attempt to switch to a valid one.
# Blindly pick the latest version. bug #529608
# TODO: Should update gcc-config to accept `-l ${CTARGET}` rather than
# doing a partial grep like this.
@@ -2248,7 +2459,9 @@ should_we_gcc_config() {
local curr_branch_ver=$(ver_cut 1-2 ${curr_config_ver})
- if [[ ${curr_branch_ver} == ${GCC_BRANCH_VER} ]] ; then
+ if tc_use_major_version_only && [[ ${curr_config_ver} == ${GCCMAJOR} ]] ; then
+ return 0
+ elif ! tc_use_major_version_only && [[ ${curr_branch_ver} == ${GCC_BRANCH_VER} ]] ; then
return 0
else
# If we're installing a genuinely different compiler version,
@@ -2278,7 +2491,7 @@ should_we_gcc_config() {
#---->> support and misc functions <<----
# This is to make sure we don't accidentally try to enable support for a
-# language that doesnt exist. GCC 3.4 supports f77, while 4.0 supports f95, etc.
+# language that doesn't exist. GCC 3.4 supports f77, while 4.0 supports f95, etc.
#
# Also add a hook so special ebuilds (kgcc64) can control which languages
# exactly get enabled
@@ -2322,11 +2535,6 @@ is_fortran() {
_tc_use_if_iuse fortran
}
-is_gcj() {
- gcc-lang-supported java || return 1
- _tc_use_if_iuse cxx && _tc_use_if_iuse gcj
-}
-
is_go() {
gcc-lang-supported go || return 1
_tc_use_if_iuse cxx && _tc_use_if_iuse go
@@ -2357,6 +2565,16 @@ is_objcxx() {
_tc_use_if_iuse cxx && _tc_use_if_iuse objc++
}
+is_modula2() {
+ gcc-lang-supported m2 || return 1
+ _tc_use_if_iuse cxx && _tc_use_if_iuse modula2
+}
+
+is_rust() {
+ gcc-lang-supported rust || return 1
+ _tc_use_if_iuse rust
+}
+
# Grab a variable from the build system (taken from linux-info.eclass)
get_make_var() {
local var=$1 makefile=${2:-${WORKDIR}/build/Makefile}
@@ -2366,91 +2584,17 @@ get_make_var() {
XGCC() { get_make_var GCC_FOR_TARGET ; }
-# The gentoo pie-ssp patches allow for 3 configurations:
-# 1) PIE+SSP by default
-# 2) PIE by default
-# 3) SSP by default
-hardened_gcc_works() {
- if [[ $1 == "pie" ]] ; then
- # $gcc_cv_ld_pie is unreliable as it simply take the output of
- # `ld --help | grep -- -pie`, that reports the option in all cases, also if
- # the loader doesn't actually load the resulting executables.
-
- want_pie || return 1
- _tc_use_if_iuse nopie && return 1
- hardened_gcc_is_stable pie
- return $?
- elif [[ $1 == "ssp" ]] ; then
- [[ -n ${SPECS_VER} ]] || return 1
- _tc_use_if_iuse nossp && return 1
- hardened_gcc_is_stable ssp
- return $?
- else
- # laziness ;)
- hardened_gcc_works pie || return 1
- hardened_gcc_works ssp || return 1
- return 0
- fi
-}
-
-hardened_gcc_is_stable() {
- local tocheck
- if [[ $1 == "pie" ]] ; then
- tocheck=${PIE_GLIBC_STABLE}
- elif [[ $1 == "ssp" ]] ; then
- tocheck=${SSP_STABLE}
- else
- die "hardened_gcc_stable needs to be called with pie or ssp"
- fi
-
- has $(tc-arch) ${tocheck} && return 0
- return 1
-}
-
-want_minispecs() {
- # On gcc 6, we don't need minispecs
- if tc_version_is_at_least 6.0 ; then
- return 0
- fi
- if tc_version_is_at_least 4.3.2 && _tc_use_if_iuse hardened ; then
- if ! want_pie ; then
- ewarn "PIE_VER or SPECS_VER is not defined in the GCC ebuild."
- elif use vanilla ; then
- ewarn "You will not get hardened features if you have the vanilla USE-flag."
- elif _tc_use_if_iuse nopie && _tc_use_if_iuse nossp ; then
- ewarn "You will not get hardened features if you have the nopie and nossp USE-flag."
- elif ! hardened_gcc_works ; then
- ewarn "Your $(tc-arch) arch is not supported."
- else
- return 0
- fi
- ewarn "Hope you know what you are doing. Hardened will not work."
- return 0
- fi
- return 1
-}
-
-want_pie() {
- ! _tc_use_if_iuse hardened && [[ -n ${PIE_VER} ]] \
- && _tc_use_if_iuse nopie && return 1
- [[ -n ${PIE_VER} ]] && [[ -n ${SPECS_VER} ]] && return 0
- tc_version_is_at_least 4.3.2 && return 1
- [[ -z ${PIE_VER} ]] && return 1
- _tc_use_if_iuse nopie || return 0
- return 1
-}
-
has toolchain_death_notice ${EBUILD_DEATH_HOOKS} || EBUILD_DEATH_HOOKS+=" toolchain_death_notice"
toolchain_death_notice() {
if [[ -e "${WORKDIR}"/build ]] ; then
pushd "${WORKDIR}"/build >/dev/null
(echo '' | $(tc-getCC ${CTARGET}) ${CFLAGS} -v -E - 2>&1) > gccinfo.log
[[ -e "${T}"/build.log ]] && cp "${T}"/build.log .
- tar jcf "${WORKDIR}"/gcc-build-logs.tar.bz2 \
+ tar -acf "${WORKDIR}"/gcc-build-logs.tar.xz \
gccinfo.log build.log $(find -name config.log)
rm gccinfo.log build.log
eerror
- eerror "Please include ${WORKDIR}/gcc-build-logs.tar.bz2 in your bug report."
+ eerror "Please include ${WORKDIR}/gcc-build-logs.tar.xz in your bug report."
eerror
popd >/dev/null
fi
@@ -2458,9 +2602,6 @@ toolchain_death_notice() {
fi
-EXPORT_FUNCTIONS pkg_pretend pkg_setup src_unpack src_prepare src_configure \
- src_compile src_test src_install pkg_postinst pkg_postrm
-
# Note [implicitly enabled flags]
# -------------------------------
# Usually configure-based packages handle explicit feature requests
@@ -2478,3 +2619,5 @@ EXPORT_FUNCTIONS pkg_pretend pkg_setup src_unpack src_prepare src_configure \
# Thus safer way to enable/disable the feature is to rely on implicit
# enabled-by-default state:
# econf $(usex foo '' --disable-foo)
+
+EXPORT_FUNCTIONS pkg_pretend pkg_setup src_unpack src_prepare src_configure src_compile src_test src_install pkg_preinst pkg_postinst pkg_postrm
diff --git a/eclass/tree-sitter-grammar.eclass b/eclass/tree-sitter-grammar.eclass
index 69ad467f8be7..b5e020065547 100644
--- a/eclass/tree-sitter-grammar.eclass
+++ b/eclass/tree-sitter-grammar.eclass
@@ -1,35 +1,33 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: tree-sitter-grammar.eclass
# @MAINTAINER:
# Matthew Smith <matthew@gentoo.org>
# Nick Sarnie <sarnex@gentoo.org>
+# Arthur Zamarin <arthurzam@gentoo.org>
# @AUTHOR:
# Matthew Smith <matthew@gentoo.org>
# @SUPPORTED_EAPIS: 8
# @BLURB: Common functions and variables for Tree Sitter grammars
-inherit edo
-
-if [[ -z ${_TREE_SITTER_GRAMMAR_ECLASS} ]]; then
-_TREE_SITTER_GRAMMAR_ECLASS=1
-
case ${EAPI} in
8) ;;
- *) die "EAPI=${EAPI:-0} is not supported" ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-inherit multilib toolchain-funcs
+if [[ -z ${_TREE_SITTER_GRAMMAR_ECLASS} ]]; then
+_TREE_SITTER_GRAMMAR_ECLASS=1
+
+inherit edo multilib toolchain-funcs
SRC_URI="https://github.com/tree-sitter/${PN}/archive/${TS_PV:-v${PV}}.tar.gz
-> ${P}.tar.gz"
-S="${WORKDIR}"/${PN}-${TS_PV:-${PV}}/src
-
-# Needed for tree_sitter/parser.h
-DEPEND="dev-libs/tree-sitter"
+S="${WORKDIR}"/${PN}-${TS_PV:-${PV}}
-EXPORT_FUNCTIONS src_compile src_install
+BDEPEND+=" test? ( dev-util/tree-sitter-cli )"
+IUSE+=" test"
+RESTRICT+=" !test? ( test )"
# @ECLASS_VARIABLE: TS_PV
# @PRE_INHERIT
@@ -38,6 +36,44 @@ EXPORT_FUNCTIONS src_compile src_install
# Used to override upstream tag name if tagged differently, e.g. most releases
# are v${PV} but some are tagged as rust-${PV}.
+# @ECLASS_VARIABLE: TS_BINDINGS
+# @PRE_INHERIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array of bindings language to build. Currently only "python" is supported.
+
+for _BINDING in "${TS_BINDINGS[@]}"; do
+ case ${_BINDING} in
+ python)
+ DISTUTILS_EXT=1
+ DISTUTILS_OPTIONAL=1
+ DISTUTILS_USE_PEP517=setuptools
+ PYTHON_COMPAT=( python3_{10..12} )
+ inherit distutils-r1
+
+ IUSE+=" python"
+ REQUIRED_USE+=" python? ( ${PYTHON_REQUIRED_USE} )"
+
+ DEPEND+=" python? (
+ ${PYTHON_DEPS}
+ )"
+ RDEPEND+=" python? (
+ ${PYTHON_DEPS}
+ >=dev-python/tree-sitter-0.21.0[${PYTHON_USEDEP}]
+ )"
+ BDEPEND+=" python? (
+ ${PYTHON_DEPS}
+ ${DISTUTILS_DEPS}
+ dev-python/wheel[${PYTHON_USEDEP}]
+ )"
+ ;;
+ *)
+ die "Unknown binding: ${_BINDING}"
+ ;;
+ esac
+done
+unset _BINDING
+
# @FUNCTION: _get_tsg_abi_ver
# @INTERNAL
# @DESCRIPTION:
@@ -47,52 +83,141 @@ _get_tsg_abi_ver() {
# This sed script finds ABI definition string in parser source file,
# substitutes all the string until the ABI number, and prints remains
# (the ABI number itself)
- sed -n 's/#define LANGUAGE_VERSION //p' "${S}"/parser.c ||
+ sed -n 's/#define LANGUAGE_VERSION //p' "${S}"/src/parser.c ||
die "Unable to extract ABI version for this grammar"
}
-# @FUNCTION: tree-sitter-grammar_src_compile
-# @DESCRIPTION:
-# Compiles the Tree Sitter parser as a shared library.
-tree-sitter-grammar_src_compile() {
+tree-sitter-grammar_src_prepare() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ default
+
+ local binding
+ for binding in "${TS_BINDINGS[@]}"; do
+ case ${binding} in
+ python)
+ use python && distutils-r1_src_prepare
+ ;;
+ esac
+ done
+}
+
+tree-sitter-grammar_src_configure() {
debug-print-function ${FUNCNAME} "${@}"
+ local binding
+ for binding in "${TS_BINDINGS[@]}"; do
+ case ${binding} in
+ python)
+ use python && distutils-r1_src_configure
+ ;;
+ esac
+ done
+}
+
+# @FUNCTION: _tree-sitter-grammar_legacy_compile
+# @INTERNAL
+# @DESCRIPTION:
+# Compiles the Tree Sitter parser as a shared library, the legacy way.
+_tree-sitter-grammar_legacy_compile() {
+ cd "${S}/src" || die
+
# Grammars always contain parser.c, and sometimes a scanner.c,
# or scanner.cc.
tc-export CC CXX
- export CFLAGS="${CFLAGS} -fPIC"
- export CXXFLAGS="${CXXFLAGS} -fPIC"
+ # We want to use the bundled parser.h, not anything lurking on the system, hence -I
+ # See https://github.com/tree-sitter/tree-sitter-bash/issues/199#issuecomment-1694416505
+ local -x CFLAGS="${CFLAGS} -fPIC -I. -Itree_sitter"
+ local -x CXXFLAGS="${CXXFLAGS} -fPIC -I. -Itree_sitter"
local objects=( parser.o )
- if [[ -f "${S}"/scanner.c || -f "${S}"/scanner.cc ]]; then
+ if [[ -f "${S}"/src/scanner.c || -f "${S}"/src/scanner.cc ]]; then
objects+=( scanner.o )
fi
emake "${objects[@]}"
local link="$(tc-getCC) ${CFLAGS}"
- if [[ -f "${S}/scanner.cc" ]]; then
+ if [[ -f "${S}/src/scanner.cc" ]]; then
link="$(tc-getCXX) ${CXXFLAGS}"
fi
local soname=lib${PN}$(get_libname $(_get_tsg_abi_ver))
+
+ local soname_args="-Wl,--soname=${soname}"
+ if [[ ${CHOST} == *darwin* ]] ; then
+ soname_args="-Wl,-install_name,${EPREFIX}/usr/$(get_libdir)/${soname}"
+ fi
+
edo ${link} ${LDFLAGS} \
-shared \
*.o \
- -Wl,--soname=${soname} \
- -o "${WORKDIR}"/${soname} || die
+ "${soname_args}" \
+ -o "${WORKDIR}"/${soname}
}
-# @FUNCTION: tree-sitter-grammar_src_install
+tree-sitter-grammar_src_compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # legacy grammars don't have a pyproject.toml
+ if [[ -f "${S}/pyproject.toml" ]]; then
+ sed -e "/SONAME_MINOR :=/s/:=.*$/:= $(_get_tsg_abi_ver)/" -i "${S}/Makefile" || die
+ emake \
+ STRIP="" \
+ PREFIX="${EPREFIX}/usr" \
+ LIBDIR="${EPREFIX}/usr/$(get_libdir)"
+ else
+ _tree-sitter-grammar_legacy_compile
+ fi
+
+ local binding
+ for binding in "${TS_BINDINGS[@]}"; do
+ case ${binding} in
+ python)
+ use python && distutils-r1_src_compile
+ ;;
+ esac
+ done
+}
+
+# @FUNCTION: tree-sitter-grammar_src_test
# @DESCRIPTION:
-# Installs the Tree Sitter parser library.
+# Runs the Tree Sitter parser's test suite.
+# See: https://tree-sitter.github.io/tree-sitter/creating-parsers#command-test
+tree-sitter-grammar_src_test() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ tree-sitter test || die "Test suite failed"
+}
+
tree-sitter-grammar_src_install() {
debug-print-function ${FUNCNAME} "${@}"
- local soname=lib${PN}$(get_libname $(_get_tsg_abi_ver))
+ # legacy grammars don't have a pyproject.toml
+ if [[ -f "${S}/pyproject.toml" ]]; then
+ emake \
+ PREFIX="${EPREFIX}/usr" \
+ LIBDIR="${EPREFIX}/usr/$(get_libdir)" \
+ DESTDIR="${D}/" \
+ install
+ find "${D}" -name '*.a' -delete || die "failed to remove static libraries"
+ else
+ local soname=lib${PN}$(get_libname $(_get_tsg_abi_ver))
+
+ dolib.so "${WORKDIR}/${soname}"
+ dosym "${soname}" /usr/$(get_libdir)/lib${PN}$(get_libname)
+ fi
- dolib.so "${WORKDIR}/${soname}"
- dosym "${soname}" \
- /usr/$(get_libdir)/lib${PN}$(get_libname)
+ local binding
+ for binding in "${TS_BINDINGS[@]}"; do
+ case ${binding} in
+ python)
+ use python && distutils-r1_src_install
+ ;;
+ esac
+ done
}
+
fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
diff --git a/eclass/udev.eclass b/eclass/udev.eclass
index 073e5d8acbc9..ac94f98221aa 100644
--- a/eclass/udev.eclass
+++ b/eclass/udev.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: udev.eclass
@@ -26,6 +26,14 @@
# # udev_dorules contrib/99-foomatic
# # udev_newrules contrib/98-foomatic 99-foomatic
# }
+#
+# pkg_postinst() {
+# udev_reload
+# }
+#
+# pkg_postrm() {
+# udev_reload
+# }
# @CODE
case ${EAPI} in
@@ -53,7 +61,7 @@ _udev_get_udevdir() {
local -x PKG_CONFIG_FDO_SYSROOT_RULES=1
if $($(tc-getPKG_CONFIG) --exists udev); then
local udevdir="$($(tc-getPKG_CONFIG) --variable=udevdir udev)"
- echo "${udevdir#${EPREFIX%/}}"
+ echo "${udevdir#${EPREFIX}}"
else
echo /lib/udev
fi
@@ -110,7 +118,9 @@ udev_newrules() {
# @FUNCTION: udev_reload
# @DESCRIPTION:
-# Run udevadm control --reload to refresh rules and databases
+# Run "udevadm control --reload" to refresh rules and databases.
+# Should be called from pkg_postinst and pkg_postrm in packages which install
+# udev rules or hwdb data.
udev_reload() {
if [[ -n ${ROOT%/} ]]; then
return 0
diff --git a/eclass/unpacker.eclass b/eclass/unpacker.eclass
index f6e83c53bf23..2957ca02d3f4 100644
--- a/eclass/unpacker.eclass
+++ b/eclass/unpacker.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: unpacker.eclass
# @MAINTAINER:
# base-system@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: helpers for extraneous file formats and consistent behavior across EAPIs
# @DESCRIPTION:
# Some extraneous file formats are not part of PMS, or are only in certain
@@ -15,22 +15,23 @@
# - merge rpm unpacking
# - support partial unpacks?
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_UNPACKER_ECLASS} ]]; then
_UNPACKER_ECLASS=1
-inherit toolchain-funcs
+inherit multiprocessing toolchain-funcs
# @ECLASS_VARIABLE: UNPACKER_BZ2
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Utility to use to decompress bzip2 files. Will dynamically pick between
-# `pbzip2` and `bzip2`. Make sure your choice accepts the "-dc" options.
+# `lbzip2`, `pbzip2`, and `bzip2`. Make sure your choice accepts the "-dc"
+# options.
# Note: this is meant for users to set, not ebuilds.
# @ECLASS_VARIABLE: UNPACKER_LZIP
@@ -38,7 +39,7 @@ inherit toolchain-funcs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Utility to use to decompress lzip files. Will dynamically pick between
-# `plzip`, `pdlzip` and `lzip`. Make sure your choice accepts the "-dc" options.
+# `xz`, `plzip`, `pdlzip`, and `lzip`. Make sure your choice accepts the "-dc" options.
# Note: this is meant for users to set, not ebuilds.
# for internal use only (unpack_pdv and unpack_makeself)
@@ -121,7 +122,7 @@ unpack_pdv() {
local tmpfile="${T}/${FUNCNAME}"
tail -c +$((${tailskip}+1)) ${src} 2>/dev/null | head -c 512 > "${tmpfile}"
- local iscompressed=$(file -b "${tmpfile}")
+ local iscompressed=$(file -S -b "${tmpfile}")
if [[ ${iscompressed:0:8} == "compress" ]] ; then
iscompressed=1
mv "${tmpfile}"{,.Z}
@@ -129,14 +130,14 @@ unpack_pdv() {
else
iscompressed=0
fi
- local istar=$(file -b "${tmpfile}")
+ local istar=$(file -S -b "${tmpfile}")
if [[ ${istar:0:9} == "POSIX tar" ]] ; then
istar=1
else
istar=0
fi
- #for some reason gzip dies with this ... dd cant provide buffer fast enough ?
+ # For some reason gzip dies with this ... dd can't provide buffer fast enough ?
#dd if=${src} ibs=${metaskip} count=1 \
# | dd ibs=${tailskip} skip=1 \
# | gzip -dc \
@@ -217,6 +218,14 @@ unpack_makeself() {
skip=$(head -n ${skip} "${src}" | wc -c)
exe="dd"
;;
+ 2.4.5)
+ # e.g.: skip="713"
+ skip=$(
+ sed -n -e '/^skip=/{s:skip="\(.*\)":\1:p;q}' "${src}"
+ )
+ skip=$(head -n "${skip}" "${src}" | wc -c)
+ exe="dd"
+ ;;
*)
eerror "I'm sorry, but I was unable to support the Makeself file."
eerror "The version I detected was '${ver}'."
@@ -230,34 +239,47 @@ unpack_makeself() {
case ${exe} in
tail) exe=( tail -n +${skip} "${src}" );;
dd) exe=( dd ibs=${skip} skip=1 if="${src}" );;
- *) die "makeself cant handle exe '${exe}'"
+ *) die "makeself can't handle exe '${exe}'"
esac
# lets grab the first few bytes of the file to figure out what kind of archive it is
- local filetype tmpfile="${T}/${FUNCNAME}"
- "${exe[@]}" 2>/dev/null | head -c 512 > "${tmpfile}"
- filetype=$(file -b "${tmpfile}") || die
+ local decomp= filetype suffix
+ filetype=$("${exe[@]}" 2>/dev/null | head -c 512 | file -S -b -) || die
case ${filetype} in
*tar\ archive*)
- "${exe[@]}" | tar --no-same-owner -xf -
+ decomp=cat
;;
bzip2*)
- "${exe[@]}" | bzip2 -dc | tar --no-same-owner -xf -
+ suffix=bz2
;;
gzip*)
- "${exe[@]}" | tar --no-same-owner -xzf -
+ suffix=gz
;;
compress*)
- "${exe[@]}" | gunzip | tar --no-same-owner -xf -
+ suffix=z
;;
XZ*)
- "${exe[@]}" | unxz | tar --no-same-owner -xf -
+ suffix=xz
+ ;;
+ Zstandard*)
+ suffix=zst
+ ;;
+ lzop*)
+ suffix=lzo
+ ;;
+ LZ4*)
+ suffix=lz4
+ ;;
+ "ASCII text"*)
+ decomp='base64 -d'
;;
*)
- eerror "Unknown filetype \"${filetype}\" ?"
- false
+ die "Unknown filetype \"${filetype}\", for makeself ${src##*/} ('${ver}' +${skip})"
;;
esac
+
+ [[ -z ${decomp} ]] && decomp=$(_unpacker_get_decompressor ".${suffix}")
+ "${exe[@]}" | ${decomp} | tar --no-same-owner -xf -
assert "failure unpacking (${filetype}) makeself ${src##*/} ('${ver}' +${skip})"
}
@@ -272,31 +294,39 @@ unpack_deb() {
unpack_banner "${deb}"
- # on AIX ar doesn't work out as their ar used a different format
- # from what GNU ar (and thus what .deb files) produce
- if [[ -n ${EPREFIX} ]] ; then
- {
- read # global header
- [[ ${REPLY} = "!<arch>" ]] || die "${deb} does not seem to be a deb archive"
- local f timestamp uid gid mode size magic
- while read f timestamp uid gid mode size magic ; do
- [[ -n ${f} && -n ${size} ]] || continue # ignore empty lines
- if [[ ${f} = "data.tar"* ]] ; then
- head -c "${size}" > "${f}"
- else
- head -c "${size}" > /dev/null # trash it
- fi
- done
- } < "${deb}"
- else
- $(tc-getBUILD_AR) x "${deb}" || die
- fi
-
- unpacker ./data.tar*
-
- # Clean things up #458658. No one seems to actually care about
- # these, so wait until someone requests to do something else ...
- rm -f debian-binary {control,data}.tar*
+ {
+ # on AIX ar doesn't work out as their ar used a different format
+ # from what GNU ar (and thus what .deb files) produce
+ if [[ -n ${EPREFIX} ]] ; then
+ {
+ read # global header
+ [[ ${REPLY} = "!<arch>" ]] || die "${deb} does not seem to be a deb archive"
+ local f timestamp uid gid mode size magic
+ while read f timestamp uid gid mode size magic ; do
+ [[ -n ${f} && -n ${size} ]] || continue # ignore empty lines
+ # GNU ar uses / as filename terminator (and .deb permits that)
+ f=${f%/}
+ if [[ ${f} = "data.tar"* ]] ; then
+ local decomp=$(_unpacker_get_decompressor "${f}")
+ head -c "${size}" | ${decomp:-cat}
+ assert "unpacking ${f} from ${deb} failed"
+ break
+ else
+ head -c "${size}" > /dev/null # trash it
+ fi
+ done
+ } < "${deb}"
+ else
+ local f=$(
+ $(tc-getBUILD_AR) t "${deb}" | grep ^data.tar
+ assert "data not found in ${deb}"
+ )
+ local decomp=$(_unpacker_get_decompressor "${f}")
+ $(tc-getBUILD_AR) p "${deb}" "${f}" | ${decomp:-cat}
+ assert "unpacking ${f} from ${deb} failed"
+ fi
+ } | tar --no-same-owner -xf -
+ assert "unpacking ${deb} failed"
}
# @FUNCTION: unpack_cpio
@@ -344,8 +374,11 @@ unpack_7z() {
local p7z=$(find_unpackable_file "$1")
unpack_banner "${p7z}"
- local output="$(7z x -y "${p7z}")"
+ # warning: putting local and command substitution in a single call
+ # discards the exit status!
+ local output
+ output="$(7z x -y "${p7z}")"
if [ $? -ne 0 ]; then
echo "${output}" >&2
die "unpacking ${p7z} failed (arch=unpack_7z)"
@@ -376,6 +409,88 @@ unpack_lha() {
lha xfq "${lha}" || die "unpacking ${lha} failed (arch=unpack_lha)"
}
+# @FUNCTION: _unpacker_get_decompressor
+# @INTERNAL
+# @USAGE: <filename>
+# @DESCRIPTION:
+# Get decompressor command for specified filename.
+_unpacker_get_decompressor() {
+ case ${1} in
+ *.bz2|*.tbz|*.tbz2)
+ local bzcmd=${PORTAGE_BZIP2_COMMAND:-$(
+ type -P lbzip2 || type -P pbzip2 || type -P bzip2
+ )}
+ local bzuncmd=${PORTAGE_BUNZIP2_COMMAND:-${bzcmd} -d}
+ : "${UNPACKER_BZ2:=${bzuncmd}}"
+ echo "${UNPACKER_BZ2} -c"
+ ;;
+ *.z|*.gz|*.tgz)
+ echo "gzip -dc" ;;
+ *.lzma|*.xz|*.txz)
+ echo "xz -T$(makeopts_jobs) -dc" ;;
+ *.lz)
+ find_lz_unpacker() {
+ local has_version_arg="-b"
+
+ [[ ${EAPI} == 6 ]] && has_version_arg="--host-root"
+ if has_version "${has_version_arg}" ">=app-arch/xz-utils-5.4.0" ; then
+ echo xz
+ return
+ fi
+
+ local x
+ for x in plzip pdlzip lzip ; do
+ type -P ${x} && break
+ done
+ }
+
+ : "${UNPACKER_LZIP:=$(find_lz_unpacker)}"
+ echo "${UNPACKER_LZIP} -dc" ;;
+ *.zst)
+ echo "zstd -dc" ;;
+ *.lz4)
+ echo "lz4 -dc" ;;
+ *.lzo)
+ echo "lzop -dc" ;;
+ esac
+}
+
+# @FUNCTION: unpack_gpkg
+# @USAGE: <gpkg file>
+# @DESCRIPTION:
+# Unpack the image subarchive of a GPKG package on-the-fly, preserving
+# the original directory structure (i.e. into <gpkg-dir>/image).
+unpack_gpkg() {
+ [[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
+
+ local gpkg=$(find_unpackable_file "$1")
+ unpack_banner "${gpkg}"
+
+ local l images=()
+ while read -r l; do
+ case ${l} in
+ */image.tar*.sig)
+ ;;
+ */image.tar*)
+ images+=( "${l}" )
+ ;;
+ esac
+ done < <(tar -tf "${gpkg}" || die "unable to list ${gpkg}")
+
+ if [[ ${#images[@]} -eq 0 ]]; then
+ die "No image.tar found in ${gpkg}"
+ elif [[ ${#images[@]} -gt 1 ]]; then
+ die "More than one image.tar found in ${gpkg}"
+ fi
+
+ local decomp=$(_unpacker_get_decompressor "${images[0]}")
+ local dirname=${images[0]%/*}
+ mkdir -p "${dirname}" || die
+ tar -xOf "${gpkg}" "${images[0]}" | ${decomp:-cat} |
+ tar --no-same-owner -C "${dirname}" -xf -
+ assert "Unpacking ${gpkg} failed"
+}
+
# @FUNCTION: _unpacker
# @USAGE: <one archive to unpack>
# @INTERNAL
@@ -386,32 +501,17 @@ _unpacker() {
[[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
local a=$1
- local m=$(echo "${a}" | tr '[:upper:]' '[:lower:]')
+ local m=${a,,}
a=$(find_unpackable_file "${a}")
# first figure out the decompression method
- local comp=""
- case ${m} in
- *.bz2|*.tbz|*.tbz2)
- local bzcmd=${PORTAGE_BZIP2_COMMAND:-$(type -P pbzip2 || type -P bzip2)}
- local bzuncmd=${PORTAGE_BUNZIP2_COMMAND:-${bzcmd} -d}
- : ${UNPACKER_BZ2:=${bzuncmd}}
- comp="${UNPACKER_BZ2} -c"
- ;;
- *.z|*.gz|*.tgz)
- comp="gzip -dc" ;;
- *.lzma|*.xz|*.txz)
- comp="xz -dc" ;;
- *.lz)
- : ${UNPACKER_LZIP:=$(type -P plzip || type -P pdlzip || type -P lzip)}
- comp="${UNPACKER_LZIP} -dc" ;;
- *.zst)
- comp="zstd -dfc" ;;
- esac
+ local comp=$(_unpacker_get_decompressor "${m}")
# then figure out if there are any archiving aspects
local arch=""
case ${m} in
+ *.gpkg.tar)
+ arch="unpack_gpkg" ;;
*.tgz|*.tbz|*.tbz2|*.txz|*.tar.*|*.tar)
arch="tar --no-same-owner -xof" ;;
*.cpio.*|*.cpio)
@@ -437,13 +537,13 @@ _unpacker() {
esac
# 7z, rar and lha/lzh are handled by package manager in EAPI < 8
- if [[ ${EAPI} != [567] ]]; then
+ if [[ ${EAPI} != [67] ]]; then
case ${m} in
*.7z)
arch="unpack_7z" ;;
- *.rar|*.RAR)
+ *.rar)
arch="unpack_rar" ;;
- *.LHA|*.LHa|*.lha|*.lzh)
+ *.lha|*.lzh)
arch="unpack_lha" ;;
esac
fi
@@ -459,11 +559,11 @@ _unpacker() {
if [[ -z ${arch} ]] ; then
# Need to decompress the file into $PWD #408801
local _a=${a%.*}
- ${comp} "${a}" > "${_a##*/}"
+ ${comp} < "${a}" > "${_a##*/}"
elif [[ -z ${comp} ]] ; then
${arch} "${a}"
else
- ${comp} "${a}" | ${arch} -
+ ${comp} < "${a}" | ${arch} -
fi
assert "unpacking ${a} failed (comp=${comp} arch=${arch})"
@@ -496,7 +596,8 @@ unpacker_src_unpack() {
#
# Note: USE flags are not yet handled.
unpacker_src_uri_depends() {
- local uri deps d
+ local uri
+ local -A deps
if [[ $# -eq 0 ]] ; then
# Disable path expansion for USE conditionals. #654960
@@ -506,30 +607,41 @@ unpacker_src_uri_depends() {
fi
for uri in "$@" ; do
- case ${uri} in
+ case ${uri,,} in
*.cpio.*|*.cpio)
- d="app-arch/cpio" ;;
- *.rar|*.RAR)
- d="app-arch/unrar" ;;
+ deps[cpio]="app-alternatives/cpio" ;;
+ *.rar)
+ deps[rar]="app-arch/unrar" ;;
*.7z)
- d="app-arch/p7zip" ;;
+ deps[7z]="app-arch/p7zip" ;;
*.xz)
- d="app-arch/xz-utils" ;;
+ deps[xz]="app-arch/xz-utils" ;;
*.zip)
- d="app-arch/unzip" ;;
+ deps[zip]="app-arch/unzip" ;;
*.lz)
- d="|| ( app-arch/plzip app-arch/pdlzip app-arch/lzip )" ;;
+ deps[lz]="
+ || (
+ >=app-arch/xz-utils-5.4.0
+ app-arch/plzip
+ app-arch/pdlzip
+ app-arch/lzip
+ )
+ "
+ ;;
*.zst)
- d="app-arch/zstd" ;;
- *.LHA|*.LHa|*.lha|*.lzh)
- d="app-arch/lha" ;;
+ deps[zst]="app-arch/zstd" ;;
+ *.lha|*.lzh)
+ deps[lhah]="app-arch/lha" ;;
+ *.lz4)
+ deps[lz4]="app-arch/lz4" ;;
+ *.lzo)
+ deps[lzo]="app-arch/lzop" ;;
esac
- deps+=" ${d}"
done
- echo "${deps}"
+ echo "${deps[*]}"
}
-EXPORT_FUNCTIONS src_unpack
-
fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/user-info.eclass b/eclass/user-info.eclass
index 5550e4f08eeb..1cc7b8250309 100644
--- a/eclass/user-info.eclass
+++ b/eclass/user-info.eclass
@@ -1,15 +1,15 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: user-info.eclass
# @MAINTAINER:
# base-system@gentoo.org (Linux)
# Michał Górny <mgorny@gentoo.org> (NetBSD)
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Read-only access to user and group information
case ${EAPI} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -64,7 +64,7 @@ egetent() {
getent "${db}" "${key}"
else
if [[ ${key} =~ ^[[:digit:]]+$ ]]; then
- grep -E "^([^:]*:){2}${key}" "${ROOT}/etc/${db}"
+ grep -E "^([^:]*:){2}${key}:" "${ROOT}/etc/${db}"
else
grep "^${key}:" "${ROOT}/etc/${db}"
fi
diff --git a/eclass/user.eclass b/eclass/user.eclass
deleted file mode 100644
index 906e84e83c69..000000000000
--- a/eclass/user.eclass
+++ /dev/null
@@ -1,684 +0,0 @@
-# Copyright 1999-2022 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-# @ECLASS: user.eclass
-# @MAINTAINER:
-# base-system@gentoo.org (Linux)
-# Michał Górny <mgorny@gentoo.org> (NetBSD)
-# @SUPPORTED_EAPIS: 6 7 8
-# @BLURB: user management in ebuilds
-# @DEPRECATED: acct-user/acct-group packages
-# @DESCRIPTION:
-# The user eclass contains a suite of functions that allow ebuilds
-# to quickly make sure users in the installed system are sane.
-
-case ${EAPI} in
- 6|7) ;;
- 8)
- if [[ ${CATEGORY} != acct-* ]]; then
- eerror "In EAPI ${EAPI}, packages must not inherit user.eclass"
- eerror "unless they are in the acct-user or acct-group category."
- eerror "Migrate your package to GLEP 81 user/group management,"
- eerror "or inherit user-info if you need only the query functions."
- die "Invalid \"inherit user\" in EAPI ${EAPI}"
- fi
- ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
-esac
-
-if [[ -z ${_USER_ECLASS} ]]; then
-_USER_ECLASS=1
-
-inherit user-info
-
-# @FUNCTION: _user_assert_pkg_phase
-# @INTERNAL
-# @USAGE: <calling func name>
-# @DESCRIPTION:
-# Raises an alert if the phase is not suitable for user.eclass usage.
-_user_assert_pkg_phase() {
- case ${EBUILD_PHASE} in
- setup|preinst|postinst|prerm|postrm) ;;
- *)
- eerror "'$1()' called from '${EBUILD_PHASE}' phase which is not OK:"
- eerror "You may only call from pkg_{setup,{pre,post}{inst,rm}} functions."
- eerror "Package has serious QA issues. Please file a bug."
- die "Bad package! ${1} is only for use in some pkg_* functions!"
- esac
-}
-
-# @FUNCTION: user_get_nologin
-# @INTERNAL
-# @DESCRIPTION:
-# Find an appropriate 'nologin' shell for the platform, and output
-# its path.
-user_get_nologin() {
- local eshell
-
- for eshell in /sbin/nologin /usr/sbin/nologin /bin/false /usr/bin/false /dev/null ; do
- [[ -x ${ROOT}${eshell} ]] && break
- done
-
- if [[ ${eshell} == "/dev/null" ]] ; then
- ewarn "Unable to identify the shell to use, proceeding with userland default."
- case ${USERLAND} in
- GNU) eshell="/bin/false" ;;
- BSD) eshell="/sbin/nologin" ;;
- Darwin) eshell="/usr/sbin/nologin" ;;
- *) die "Unable to identify the default shell for userland ${USERLAND}"
- esac
- fi
-
- echo "${eshell}"
-}
-
-# @FUNCTION: enewuser
-# @USAGE: <user> [-F] [-M] [uid] [shell] [homedir] [groups]
-# @DESCRIPTION:
-# Same as enewgroup, you are not required to understand how to properly add
-# a user to the system. The only required parameter is the username.
-# Default uid is (pass -1 for this) next available, default shell is
-# /bin/false, default homedir is /dev/null, and there are no default groups.
-#
-# If -F is passed, enewuser will always enforce specified UID and fail if it
-# can not be assigned.
-# If -M is passed, enewuser does not create the home directory if it does not
-# exist.
-enewuser() {
- if [[ ${EUID} -ne 0 ]] ; then
- ewarn "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
- fi
- _user_assert_pkg_phase ${FUNCNAME}
-
- local create_home=1 force_uid=
- while [[ ${1} == -* ]]; do
- case ${1} in
- -F) force_uid=1;;
- -M) create_home=;;
- *) die "${FUNCNAME}: invalid option ${1}";;
- esac
- shift
- done
-
- # get the username
- local euser=${1}; shift
- if [[ -z ${euser} ]] ; then
- eerror "No username specified!"
- die "Cannot call enewuser without a username"
- fi
-
- # lets see if the username already exists
- if [[ -n $(egetent passwd "${euser}") ]] ; then
- return 0
- fi
- elog "Adding user '${euser}' to your system ..."
-
- # options to pass to useradd
- local opts=()
-
- # handle for ROOT != /
- [[ -n ${ROOT} ]] && opts+=( --prefix "${ROOT}" )
-
- # handle uid
- local euid=${1}; shift
- if [[ -n ${euid} && ${euid} != -1 ]] ; then
- if [[ ${euid} -gt 0 ]] ; then
- if [[ -n $(egetent passwd ${euid}) ]] ; then
- [[ -n ${force_uid} ]] && die "${FUNCNAME}: UID ${euid} already taken"
- euid="next"
- fi
- else
- eerror "Userid given but is not greater than 0!"
- die "${euid} is not a valid UID"
- fi
- else
- [[ -n ${force_uid} ]] && die "${FUNCNAME}: -F with uid==-1 makes no sense"
- euid="next"
- fi
- if [[ ${euid} == "next" ]] ; then
- for ((euid = 999; euid >= 101; euid--)); do
- [[ -z $(egetent passwd ${euid}) ]] && break
- done
- [[ ${euid} -ge 101 ]] || die "${FUNCNAME}: no free UID found"
- fi
- opts+=( -u ${euid} )
- elog " - Userid: ${euid}"
-
- # handle shell
- local eshell=${1}; shift
- if [[ ! -z ${eshell} ]] && [[ ${eshell} != "-1" ]] ; then
- if [[ ! -e ${ROOT}${eshell} ]] ; then
- eerror "A shell was specified but it does not exist!"
- die "${eshell} does not exist in ${ROOT}"
- fi
- if [[ ${eshell} == */false || ${eshell} == */nologin ]] ; then
- eerror "Do not specify ${eshell} yourself, use -1"
- die "Pass '-1' as the shell parameter"
- fi
- else
- eshell=$(user_get_nologin)
- fi
- elog " - Shell: ${eshell}"
- opts+=( -s "${eshell}" )
-
- # handle homedir
- local ehome=${1}; shift
- if [[ -z ${ehome} ]] || [[ ${ehome} == "-1" ]] ; then
- ehome="/dev/null"
- fi
- elog " - Home: ${ehome}"
- opts+=( -d "${ehome}" )
-
- # handle groups
- local egroups=${1}; shift
- local g egroups_arr
- IFS="," read -r -a egroups_arr <<<"${egroups}"
- if [[ ${#egroups_arr[@]} -gt 0 ]] ; then
- local defgroup exgroups
- for g in "${egroups_arr[@]}" ; do
- if [[ -z $(egetent group "${g}") ]] ; then
- eerror "You must add group ${g} to the system first"
- die "${g} is not a valid GID"
- fi
- if [[ -z ${defgroup} ]] ; then
- defgroup=${g}
- else
- exgroups+=",${g}"
- fi
- done
- opts+=( -g "${defgroup}" )
- if [[ ! -z ${exgroups} ]] ; then
- opts+=( -G "${exgroups:1}" )
- fi
- fi
- elog " - Groups: ${egroups:-(none)}"
-
- # handle extra args
- if [[ $# -gt 0 ]] ; then
- die "extra arguments no longer supported; please file a bug"
- else
- local comment="added by portage for ${PN}"
- opts+=( -c "${comment}" )
- elog " - GECOS: ${comment}"
- fi
-
- # add the user
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw useradd "${euser}" "${opts[@]}" || die
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix option."
- ewarn "Please use: \"useradd ${opts[@]} ${euser}\" in a chroot"
- else
- useradd "${opts[@]}" "${euser}" || die
- fi
- ;;
-
- *-openbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "OpenBSD's usermod does not support --prefix option."
- ewarn "Please use: \"useradd ${opts[@]} ${euser}\" in a chroot"
- else
- # all ops the same, except the -g vs -g/-G ...
- useradd -u ${euid} -s "${eshell}" \
- -d "${ehome}" -g "${egroups}" "${euser}" || die
- fi
-
- ;;
-
- *)
- useradd -M -N -r "${opts[@]}" "${euser}" || die
- ;;
- esac
-
- if [[ -n ${create_home} && ! -e ${ROOT}/${ehome} ]] ; then
- elog " - Creating ${ehome} in ${ROOT}"
- mkdir -p "${ROOT}/${ehome}"
- # Use UID if we are in another ROOT than /
- if [[ -n "${ROOT}" ]]; then
- euser=$(egetent passwd ${euser} | cut -d: -f3)
- fi
- chown "${euser}" "${ROOT}/${ehome}"
- chmod 755 "${ROOT}/${ehome}"
- fi
-}
-
-# @FUNCTION: enewgroup
-# @USAGE: <group> [gid]
-# @DESCRIPTION:
-# This function does not require you to understand how to properly add a
-# group to the system. Just give it a group name to add and enewgroup will
-# do the rest. You may specify the gid for the group or allow the group to
-# allocate the next available one.
-#
-# If -F is passed, enewgroup will always enforce specified GID and fail if it
-# can not be assigned.
-enewgroup() {
- if [[ ${EUID} -ne 0 ]] ; then
- ewarn "Insufficient privileges to execute ${FUNCNAME[0]}"
- return 0
- fi
- _user_assert_pkg_phase ${FUNCNAME}
-
- local force_gid=
- while [[ ${1} == -* ]]; do
- case ${1} in
- -F) force_gid=1;;
- *) die "${FUNCNAME}: invalid option ${1}";;
- esac
- shift
- done
-
- # get the group
- local egroup=${1}; shift
- if [[ -z ${egroup} ]] ; then
- eerror "No group specified!"
- die "Cannot call enewgroup without a group"
- fi
-
- # see if group already exists
- if [[ -n $(egetent group "${egroup}") ]] ; then
- return 0
- fi
- elog "Adding group '${egroup}' to your system ..."
-
- # handle gid
- local egid=${1}; shift
- if [[ -n ${egid} && ${egid} != -1 ]] ; then
- if [[ ${egid} -gt 0 ]] ; then
- if [[ -n $(egetent group ${egid}) ]] ; then
- [[ -n ${force_gid} ]] && die "${FUNCNAME}: GID ${egid} already taken"
- egid="next available; requested gid taken"
- fi
- else
- eerror "Groupid given but is not greater than 0!"
- die "${egid} is not a valid GID"
- fi
- else
- [[ -n ${force_gid} ]] && die "${FUNCNAME}: -F with gid==-1 makes no sense"
- egid="next available"
- fi
- elog " - Groupid: ${egid}"
-
- # handle different ROOT
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- # handle extra
- if [[ $# -gt 0 ]] ; then
- die "extra arguments no longer supported; please file a bug"
- fi
-
- # Some targets need to find the next available GID manually
- _enewgroup_next_gid() {
- if [[ ${egid} == *[!0-9]* ]] ; then
- # Non numeric
- for ((egid = 999; egid >= 101; egid--)) ; do
- [[ -z $(egetent group ${egid}) ]] && break
- done
- [[ ${egid} -ge 101 ]] || die "${FUNCNAME}: no free GID found"
- fi
- }
-
- # add the group
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- _enewgroup_next_gid
- pw groupadd "${opts[@]}" "${egroup}" -g ${egid} || die
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"groupadd -g ${egid} ${opts[@]} ${egroup}\" in a chroot"
- else
- _enewgroup_next_gid
- groupadd -g ${egid} "${opts[@]}" "${egroup}" || die
- fi
- ;;
-
- *)
- if [[ ${egid} == *[!0-9]* ]] ; then
- # Non numeric; let groupadd figure out a GID for us
- #
- true # Do nothing but keep the previous comment.
- else
- opts+=( -g ${egid} )
- fi
- # We specify -r so that we get a GID in the system range from login.defs
- groupadd -r "${opts[@]}" "${egroup}" || die
- ;;
- esac
-}
-
-# @FUNCTION: esethome
-# @USAGE: <user> <homedir>
-# @DESCRIPTION:
-# Update the home directory in a platform-agnostic way.
-# Required parameters is the username and the new home directory.
-# Specify -1 if you want to set home to the enewuser default
-# of /dev/null.
-# If the new home directory does not exist, it is created.
-# Any previously existing home directory is NOT moved.
-esethome() {
- _user_assert_pkg_phase ${FUNCNAME}
-
- # get the username
- local euser=${1}; shift
- if [[ -z ${euser} ]] ; then
- eerror "No username specified!"
- die "Cannot call esethome without a username"
- fi
-
- # lets see if the username already exists
- if [[ -z $(egetent passwd "${euser}") ]] ; then
- ewarn "User does not exist, cannot set home dir -- skipping."
- return 1
- fi
-
- # Handle different ROOT
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- # handle homedir
- local ehome=${1}; shift
- if [[ -z ${ehome} ]] ; then
- eerror "No home directory specified!"
- die "Cannot call esethome without a home directory or '-1'"
- fi
-
- if [[ ${ehome} == "-1" ]] ; then
- ehome="/dev/null"
- fi
-
- # exit with no message if home dir is up to date
- if [[ $(egethome "${euser}") == ${ehome} ]]; then
- return 0
- fi
-
- elog "Updating home for user '${euser}' ..."
- elog " - Home: ${ehome}"
-
- # ensure home directory exists, otherwise update will fail
- if [[ ! -e ${ROOT}/${ehome} ]] ; then
- elog " - Creating ${ehome} in ${ROOT}"
- mkdir -p "${ROOT}/${ehome}"
- chown "${euser}" "${ROOT}/${ehome}"
- chmod 755 "${ROOT}/${ehome}"
- fi
-
- # update the home directory
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw usermod "${opts[@]}" "${euser}" -d "${ehome}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update home"
- eerror "There was an error when attempting to update the home directory for ${euser}"
- eerror "Please update it manually on your system:"
- eerror "\t pw usermod \"${euser}\" -d \"${ehome}\""
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"usermod ${opts[@]} -d ${ehome} ${euser}\" in a chroot"
- else
- usermod "${opts[@]}" -d "${ehome}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update home"
- eerror "There was an error when attempting to update the home directory for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -d \"${ehome}\" \"${euser}\""
- fi
- ;;
-
- *)
- usermod "${opts[@]}" -d "${ehome}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update home"
- eerror "There was an error when attempting to update the home directory for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -d \"${ehome}\" \"${euser}\""
- ;;
- esac
-}
-
-# @FUNCTION: esetshell
-# @USAGE: <user> <shell>
-# @DESCRIPTION:
-# Update the shell in a platform-agnostic way.
-# Required parameters is the username and the new shell.
-# Specify -1 if you want to set shell to platform-specific nologin.
-esetshell() {
- _user_assert_pkg_phase ${FUNCNAME}
-
- # get the username
- local euser=${1}; shift
- if [[ -z ${euser} ]] ; then
- eerror "No username specified!"
- die "Cannot call esetshell without a username"
- fi
-
- # lets see if the username already exists
- if [[ -z $(egetent passwd "${euser}") ]] ; then
- ewarn "User does not exist, cannot set shell -- skipping."
- return 1
- fi
-
- # Handle different ROOT
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- # handle shell
- local eshell=${1}; shift
- if [[ -z ${eshell} ]] ; then
- eerror "No shell specified!"
- die "Cannot call esetshell without a shell or '-1'"
- fi
-
- if [[ ${eshell} == "-1" ]] ; then
- eshell=$(user_get_nologin)
- fi
-
- # exit with no message if shell is up to date
- if [[ $(egetshell "${euser}") == ${eshell} ]]; then
- return 0
- fi
-
- elog "Updating shell for user '${euser}' ..."
- elog " - Shell: ${eshell}"
-
- # update the shell
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw usermod "${opts[@]}" "${euser}" -s "${eshell}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update shell"
- eerror "There was an error when attempting to update the shell for ${euser}"
- eerror "Please update it manually on your system:"
- eerror "\t pw usermod \"${euser}\" -s \"${eshell}\""
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"usermod ${opts[@]} -s ${eshell} ${euser}\" in a chroot"
- else
- usermod "${opts[@]}" -s "${eshell}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update shell"
- eerror "There was an error when attempting to update the shell for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -s \"${eshell}\" \"${euser}\""
- fi
- ;;
-
- *)
- usermod "${opts[@]}" -s "${eshell}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update shell"
- eerror "There was an error when attempting to update the shell for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -s \"${eshell}\" \"${euser}\""
- ;;
- esac
-}
-
-# @FUNCTION: esetcomment
-# @USAGE: <user> <comment>
-# @DESCRIPTION:
-# Update the comment field in a platform-agnostic way.
-# Required parameters is the username and the new comment.
-esetcomment() {
- _user_assert_pkg_phase ${FUNCNAME}
-
- # get the username
- local euser=${1}; shift
- if [[ -z ${euser} ]] ; then
- eerror "No username specified!"
- die "Cannot call esetcomment without a username"
- fi
-
- # lets see if the username already exists
- if [[ -z $(egetent passwd "${euser}") ]] ; then
- ewarn "User does not exist, cannot set comment -- skipping."
- return 1
- fi
-
- # Handle different ROOT
- local opts
- [[ -n ${ROOT} ]] && opts=( --prefix "${ROOT}" )
-
- # handle comment
- local ecomment=${1}; shift
- if [[ -z ${ecomment} ]] ; then
- eerror "No comment specified!"
- die "Cannot call esetcomment without a comment"
- fi
-
- # exit with no message if comment is up to date
- if [[ $(egetcomment "${euser}") == ${ecomment} ]]; then
- return 0
- fi
-
- elog "Updating comment for user '${euser}' ..."
- elog " - Comment: ${ecomment}"
-
- # update the comment
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw usermod "${opts[@]}" "${euser}" -c "${ecomment}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update comment"
- eerror "There was an error when attempting to update the comment for ${euser}"
- eerror "Please update it manually on your system:"
- eerror "\t pw usermod \"${euser}\" -c \"${ecomment}\""
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"usermod ${opts[@]} -c ${ecomment} ${euser}\" in a chroot"
- else
- usermod "${opts[@]}" -c "${ecomment}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update shell"
- eerror "There was an error when attempting to update the shell for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -s \"${eshell}\" \"${euser}\""
- fi
- ;;
-
- *)
- usermod "${opts[@]}" -c "${ecomment}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update comment"
- eerror "There was an error when attempting to update the comment for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -c \"${ecomment}\" \"${euser}\""
- ;;
- esac
-}
-
-# @FUNCTION: esetgroups
-# @USAGE: <user> <groups>
-# @DESCRIPTION:
-# Update the group field in a platform-agnostic way.
-# Required parameters is the username and the new list of groups,
-# primary group first.
-esetgroups() {
- _user_assert_pkg_phase ${FUNCNAME}
-
- [[ ${#} -eq 2 ]] || die "Usage: ${FUNCNAME} <user> <groups>"
-
- # get the username
- local euser=${1}; shift
-
- # lets see if the username already exists
- if [[ -z $(egetent passwd "${euser}") ]] ; then
- ewarn "User does not exist, cannot set group -- skipping."
- return 1
- fi
-
- # handle group
- local egroups=${1}; shift
-
- local g egroups_arr=()
- IFS="," read -r -a egroups_arr <<<"${egroups}"
- [[ ${#egroups_arr[@]} -gt 0 ]] || die "${FUNCNAME}: no groups specified"
-
- for g in "${egroups_arr[@]}" ; do
- if [[ -z $(egetent group "${g}") ]] ; then
- eerror "You must add group ${g} to the system first"
- die "${g} is not a valid GID"
- fi
- done
-
- local defgroup=${egroups_arr[0]} exgroups_arr=()
- # sort supplementary groups to make comparison possible
- readarray -t exgroups_arr < <(printf '%s\n' "${egroups_arr[@]:1}" | sort)
- local exgroups=${exgroups_arr[*]}
- exgroups=${exgroups// /,}
- egroups=${defgroup}${exgroups:+,${exgroups}}
-
- # exit with no message if group membership is up to date
- if [[ $(egetgroups "${euser}") == ${egroups} ]]; then
- return 0
- fi
-
- local opts=( -g "${defgroup}" -G "${exgroups}" )
- elog "Updating groups for user '${euser}' ..."
- elog " - Groups: ${egroups}"
-
- # Handle different ROOT
- [[ -n ${ROOT} ]] && opts+=( --prefix "${ROOT}" )
-
- # update the group
- case ${CHOST} in
- *-freebsd*|*-dragonfly*)
- pw usermod "${euser}" "${opts[@]}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update groups"
- eerror "There was an error when attempting to update the groups for ${euser}"
- eerror "Please update it manually on your system:"
- eerror "\t pw usermod \"${euser}\" ${opts[*]}"
- ;;
-
- *-netbsd*)
- if [[ -n "${ROOT}" ]]; then
- ewarn "NetBSD's usermod does not support --prefix <dir> option."
- ewarn "Please use: \"usermod ${opts[@]} ${euser}\" in a chroot"
- else
- usermod "${opts[@]}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update shell"
- eerror "There was an error when attempting to update the shell for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod -s \"${eshell}\" \"${euser}\""
- fi
- ;;
-
- *)
- usermod "${opts[@]}" "${euser}" && return 0
- [[ $? == 8 ]] && eerror "${euser} is in use, cannot update groups"
- eerror "There was an error when attempting to update the groups for ${euser}"
- eerror "Please update it manually on your system (as root):"
- eerror "\t usermod ${opts[*]} \"${euser}\""
- ;;
- esac
-}
-
-fi
diff --git a/eclass/usr-ldscript.eclass b/eclass/usr-ldscript.eclass
index a8229ed2ac2e..e52de9e658d4 100644
--- a/eclass/usr-ldscript.eclass
+++ b/eclass/usr-ldscript.eclass
@@ -1,14 +1,14 @@
-# Copyright 2019-2022 Gentoo Authors
+# Copyright 2019-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: usr-ldscript.eclass
# @MAINTAINER:
# Toolchain Ninjas <toolchain@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Defines the gen_usr_ldscript function.
case ${EAPI} in
- 6|7|8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
@@ -39,6 +39,13 @@ gen_usr_ldscript() {
tc-is-static-only && return
use prefix && return
+ # The toolchain's sysroot is automatically prepended to paths in this
+ # script. We therefore need to omit EPREFIX on standalone prefix (RAP)
+ # systems. prefix-guest (non-RAP) systems don't apply a sysroot so EPREFIX
+ # is still needed in that case. This is moot because the above line makes
+ # the function a noop on prefix, but we keep this in case that changes.
+ local prefix=$(usex prefix-guest "${EPREFIX}" "")
+
# We only care about stuffing / for the native ABI. #479448
if [[ $(type -t multilib_is_native_abi) == "function" ]] ; then
multilib_is_native_abi || return 0
@@ -48,7 +55,7 @@ gen_usr_ldscript() {
case ${CTARGET:-${CHOST}} in
*-darwin*) ;;
*-android*) return 0 ;;
- *linux*|*-freebsd*|*-openbsd*|*-netbsd*)
+ *linux*)
use split-usr || return 0
;;
*) return 0 ;;
@@ -70,7 +77,7 @@ gen_usr_ldscript() {
# If they're using gold, manually invoke the old bfd. #487696
local d="${T}/bfd-linker"
mkdir -p "${d}"
- ln -sf $(which ${CHOST}-ld.bfd) "${d}"/ld
+ ln -sf $(type -P ${CHOST}-ld.bfd) "${d}"/ld
flags+=( -B"${d}" )
fi
output_format=$($(tc-getCC) "${flags[@]}" 2>&1 | sed -n 's/^OUTPUT_FORMAT("\([^"]*\)",.*/\1/p')
@@ -84,27 +91,27 @@ gen_usr_ldscript() {
# Ensure /lib/${lib} exists to avoid dangling scripts/symlinks.
# This especially is for AIX where $(get_libname) can return ".a",
# so /lib/${lib} might be moved to /usr/lib/${lib} (by accident).
- [[ -r ${ED%/}/${libdir}/${lib} ]] || continue
+ [[ -r ${ED}/${libdir}/${lib} ]] || continue
#TODO: better die here?
fi
case ${CTARGET:-${CHOST}} in
*-darwin*)
if ${auto} ; then
- tlib=$(scanmacho -qF'%S#F' "${ED%/}"/usr/${libdir}/${lib})
+ tlib=$(scanmacho -qF'%S#F' "${ED}"/usr/${libdir}/${lib})
else
- tlib=$(scanmacho -qF'%S#F' "${ED%/}"/${libdir}/${lib})
+ tlib=$(scanmacho -qF'%S#F' "${ED}"/${libdir}/${lib})
fi
[[ -z ${tlib} ]] && die "unable to read install_name from ${lib}"
tlib=${tlib##*/}
if ${auto} ; then
- mv "${ED%/}"/usr/${libdir}/${lib%${suffix}}.*${suffix#.} "${ED%/}"/${libdir}/ || die
+ mv "${ED}"/usr/${libdir}/${lib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die
# some install_names are funky: they encode a version
if [[ ${tlib} != ${lib%${suffix}}.*${suffix#.} ]] ; then
- mv "${ED%/}"/usr/${libdir}/${tlib%${suffix}}.*${suffix#.} "${ED%/}"/${libdir}/ || die
+ mv "${ED}"/usr/${libdir}/${tlib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die
fi
- rm -f "${ED%/}"/${libdir}/${lib}
+ rm -f "${ED}"/${libdir}/${lib}
fi
# Mach-O files have an id, which is like a soname, it tells how
@@ -114,34 +121,36 @@ gen_usr_ldscript() {
# libdir=/lib because that messes up libtool files.
# Make sure we don't lose the specific version, so just modify the
# existing install_name
- if [[ ! -w "${ED%/}/${libdir}/${tlib}" ]] ; then
- chmod u+w "${ED%/}/${libdir}/${tlib}" # needed to write to it
+ if [[ ! -w "${ED}/${libdir}/${tlib}" ]] ; then
+ chmod u+w "${ED}/${libdir}/${tlib}" || die # needed to write to it
local nowrite=yes
fi
install_name_tool \
-id "${EPREFIX}"/${libdir}/${tlib} \
- "${ED%/}"/${libdir}/${tlib} || die "install_name_tool failed"
- [[ -n ${nowrite} ]] && chmod u-w "${ED%/}/${libdir}/${tlib}"
+ "${ED}"/${libdir}/${tlib} || die "install_name_tool failed"
+ if [[ -n ${nowrite} ]] ; then
+ chmod u-w "${ED}/${libdir}/${tlib}" || die
+ fi
# Now as we don't use GNU binutils and our linker doesn't
# understand linker scripts, just create a symlink.
- pushd "${ED%/}/usr/${libdir}" > /dev/null
+ pushd "${ED}/usr/${libdir}" > /dev/null
ln -snf "../../${libdir}/${tlib}" "${lib}"
popd > /dev/null
;;
*)
if ${auto} ; then
- tlib=$(scanelf -qF'%S#F' "${ED%/}"/usr/${libdir}/${lib})
+ tlib=$(scanelf -qF'%S#F' "${ED}"/usr/${libdir}/${lib})
[[ -z ${tlib} ]] && die "unable to read SONAME from ${lib}"
- mv "${ED%/}"/usr/${libdir}/${lib}* "${ED%/}"/${libdir}/ || die
+ mv "${ED}"/usr/${libdir}/${lib}* "${ED}"/${libdir}/ || die
# some SONAMEs are funky: they encode a version before the .so
if [[ ${tlib} != ${lib}* ]] ; then
- mv "${ED%/}"/usr/${libdir}/${tlib}* "${ED%/}"/${libdir}/ || die
+ mv "${ED}"/usr/${libdir}/${tlib}* "${ED}"/${libdir}/ || die
fi
- rm -f "${ED%/}"/${libdir}/${lib}
+ rm -f "${ED}"/${libdir}/${lib}
else
tlib=${lib}
fi
- cat > "${ED%/}/usr/${libdir}/${lib}" <<-END_LDSCRIPT
+ cat > "${ED}/usr/${libdir}/${lib}" <<-END_LDSCRIPT
/* GNU ld script
Since Gentoo has critical dynamic libraries in /lib, and the static versions
in /usr/lib, we need to have a "fake" dynamic lib in /usr/lib, otherwise we
@@ -152,7 +161,7 @@ gen_usr_ldscript() {
See bug https://bugs.gentoo.org/4411 for more info.
*/
${output_format}
- GROUP ( ${EPREFIX}/${libdir}/${tlib} )
+ GROUP ( ${prefix}/${libdir}/${tlib} )
END_LDSCRIPT
;;
esac
diff --git a/eclass/vala.eclass b/eclass/vala.eclass
index 076ef9066067..be5fac99a767 100644
--- a/eclass/vala.eclass
+++ b/eclass/vala.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: vala.eclass
@@ -6,7 +6,7 @@
# gnome@gentoo.org
# @AUTHOR:
# Alexandre Rostovtsev <tetromino@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: Sets up the environment for using a specific version of vala.
# @DESCRIPTION:
# This eclass sets up commonly used environment variables for using a specific
@@ -15,18 +15,19 @@
# executables, pkgconfig files, etc., which Gentoo does not provide.
case ${EAPI} in
- 6|7) inherit eutils multilib ;;
- 8) ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
if [[ -z ${_VALA_ECLASS} ]] ; then
_VALA_ECLASS=1
+inherit flag-o-matic
+
# @ECLASS_VARIABLE: VALA_MIN_API_VERSION
# @DESCRIPTION:
-# Minimum vala API version (e.g. 0.50).
-VALA_MIN_API_VERSION=${VALA_MIN_API_VERSION:-0.50}
+# Minimum vala API version (e.g. 0.56).
+VALA_MIN_API_VERSION=${VALA_MIN_API_VERSION:-0.56}
# @ECLASS_VARIABLE: VALA_MAX_API_VERSION
# @DESCRIPTION:
@@ -49,11 +50,10 @@ vala_api_versions() {
local minimal_supported_minor_version minor_version
# Dependency atoms are not generated for Vala versions older than 0.${minimal_supported_minor_version}.
- minimal_supported_minor_version="46"
+ minimal_supported_minor_version="56"
for ((minor_version = ${VALA_MAX_API_VERSION#*.}; minor_version >= ${VALA_MIN_API_VERSION#*.}; minor_version = minor_version - 2)); do
- # 0.42 is EOL and removed from tree; remove special case once minimal_support_minor_version >= 44
- if ((minor_version >= minimal_supported_minor_version)) && ((minor_version != 42)); then
+ if ((minor_version >= minimal_supported_minor_version)); then
echo "0.${minor_version}"
fi
done
@@ -100,13 +100,11 @@ vala_depend() {
# VALA_MAX_API_VERSION, VALA_MIN_API_VERSION, and VALA_USE_DEPEND.
vala_best_api_version() {
local u v
- local hv_opt="-b"
- [[ ${EAPI} == 6 ]] && hv_opt=""
u=$(_vala_use_depend)
for v in $(vala_api_versions); do
- has_version ${hv_opt} "dev-lang/vala:${v}${u}" && echo "${v}" && return
+ has_version -b "dev-lang/vala:${v}${u}" && echo "${v}" && return
done
}
@@ -122,8 +120,6 @@ vala_best_api_version() {
# version is not available.
vala_setup() {
local p d valafoo version ignore_use
- local hv_opt="-b"
- [[ ${EAPI} == 6 ]] && hv_opt=""
while [[ $1 ]]; do
case $1 in
@@ -142,7 +138,7 @@ vala_setup() {
fi
if [[ ${version} ]]; then
- has_version ${hv_opt} "dev-lang/vala:${version}" || die "No installed vala:${version}"
+ has_version -b "dev-lang/vala:${version}" || die "No installed vala:${version}"
else
version=$(vala_best_api_version)
[[ ${version} ]] || die "No installed vala in $(vala_depend)"
@@ -173,14 +169,18 @@ vala_setup() {
fi
done
done
- : ${PKG_CONFIG_PATH:="${EPREFIX}/usr/$(get_libdir)/pkgconfig:${EPREFIX}/usr/share/pkgconfig"}
+ : "${PKG_CONFIG_PATH:="${EPREFIX}/usr/$(get_libdir)/pkgconfig:${EPREFIX}/usr/share/pkgconfig"}"
export PKG_CONFIG_PATH="${T}/pkgconfig:${PKG_CONFIG_PATH}"
+
+ # See bug #892708.
+ # Workaround for https://gitlab.gnome.org/GNOME/vala/-/issues/1408.
+ append-cflags $(test-flags-CC -Wno-incompatible-pointer-types)
}
# @FUNCTION: vala_src_prepare
# @DESCRIPTION:
-# For backwards compatibility in EAPIs 6 and 7. Calls vala_setup.
-if [[ ${EAPI} == [67] ]]; then
+# For backwards compatibility in EAPI 7. Calls vala_setup.
+if [[ ${EAPI} == 7 ]]; then
vala_src_prepare() { vala_setup "$@"; }
fi
diff --git a/eclass/vcs-snapshot.eclass b/eclass/vcs-snapshot.eclass
index 64bc1da040f4..1b7299b92a3b 100644
--- a/eclass/vcs-snapshot.eclass
+++ b/eclass/vcs-snapshot.eclass
@@ -43,7 +43,8 @@
# in ${WORKDIR}/${P} and ${WORKDIR}/${P}-otherstuff respectively.
case ${EAPI} in
- 6|7|8) ;;
+ 6) inherit eqawarn ;;
+ 7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
diff --git a/eclass/vdr-plugin-2.eclass b/eclass/vdr-plugin-2.eclass
index f8d5a18854ed..8f56511032c8 100644
--- a/eclass/vdr-plugin-2.eclass
+++ b/eclass/vdr-plugin-2.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: vdr-plugin-2.eclass
@@ -9,7 +9,7 @@
# Joerg Bornkessel <hd_brummy@gentoo.org>
# Christian Ruppert <idl0r@gentoo.org>
# (undisclosed contributors)
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: common vdr plugin ebuild functions
# @DESCRIPTION:
# Eclass for easing maintenance of vdr plugin ebuilds
@@ -60,29 +60,15 @@
# PO_SUBDIR="bla foo/bla"
# @CODE
-# Applying your own local/user patches:
-# This is done by using the
-# (EAPI = 5) epatch_user() function of the eutils.eclass,
-# (EAPI = 6,7) eapply_user function integrated in EAPI = 6.
-# Simply add your patches into one of these directories:
-# /etc/portage/patches/<CATEGORY>/<PF|P|PN>/
-# Quote: where the first of these three directories to exist will be the one to
-# use, ignoring any more general directories which might exist as well.
-#
-# For more details about it please take a look at the eutils.class.
-
-[[ ${EAPI} == [5] ]] && inherit multilib
-[[ ${EAPI} == [56] ]] && inherit eutils
-inherit flag-o-matic strip-linguas toolchain-funcs unpacker
-
case ${EAPI} in
- 5|6|7|8)
- ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} not supported"
- ;;
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm pkg_config
+if [[ -z ${_VDR_PLUGIN_2_ECLASS} ]]; then
+_VDR_PLUGIN_2_ECLASS=1
+
+inherit flag-o-matic strip-linguas toolchain-funcs unpacker
# Name of the plugin stripped from all vdrplugin-, vdr- and -cvs pre- and postfixes
VDRPLUGIN="${PN/#vdrplugin-/}"
@@ -95,14 +81,9 @@ DESCRIPTION="vdr Plugin: ${VDRPLUGIN} (based on vdr-plugin-2.eclass)"
S="${WORKDIR}/${VDRPLUGIN}-${PV}"
# depend on headers for DVB-driver and vdr-scripts
-case ${EAPI} in
- 5|6) DEPEND="media-tv/gentoo-vdr-scripts
- virtual/linuxtv-dvb-headers
- virtual/pkgconfig" ;;
- *) BDEPEND="virtual/pkgconfig"
- DEPEND="media-tv/gentoo-vdr-scripts
- virtual/linuxtv-dvb-headers" ;;
-esac
+BDEPEND="virtual/pkgconfig"
+DEPEND="media-tv/gentoo-vdr-scripts
+ sys-kernel/linux-headers"
RDEPEND="media-tv/gentoo-vdr-scripts
app-eselect/eselect-vdr"
@@ -276,10 +257,9 @@ vdr_gettext_missing() {
# DIR ${S}/po or DIR ${S]/_subdir_/po
vdr_detect_po_dir() {
[[ -f po ]] && local po_dir="${S}"
- local po_subdir=( ${S}/${PO_SUBDIR} )
- local f
+ local po_subdir=( "${S}"/${PO_SUBDIR} )
- pofile_dir=( ${po_dir} ${po_subdir[*]} )
+ pofile_dir=( ${po_dir} "${po_subdir[@]}" )
}
# @FUNCTION: vdr_linguas_support
@@ -413,7 +393,7 @@ vdr-plugin-2_pkg_setup() {
VDR_RC_DIR="/usr/share/vdr/rcscript"
- # Pathes to includes
+ # Paths to includes
VDR_INCLUDE_DIR="/usr/include/vdr"
DVB_INCLUDE_DIR="/usr/include"
@@ -468,11 +448,7 @@ vdr-plugin-2_src_util() {
;;
add_local_patch)
cd "${S}" || die "Could not change to plugin-source-directory (src_util)"
- if [[ ${EAPI} != [5] ]]; then
- eapply_user
- else
- epatch_user
- fi
+ eapply_user
;;
patchmakefile)
cd "${S}" || die "Could not change to plugin-source-directory (src_util)"
@@ -515,8 +491,7 @@ vdr-plugin-2_src_prepare() {
die "vdr-plugin-2_src_prepare not called!"
fi
- [[ ${EAPI} == [5] ]] && [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
- [[ ${EAPI} != [5] ]] && [[ ${PATCHES[@]} ]] && eapply "${PATCHES[@]}"
+ [[ -n ${PATCHES[@]} ]] && eapply "${PATCHES[@]}"
debug-print "$FUNCNAME: applying user patches"
@@ -605,7 +580,7 @@ vdr-plugin-2_src_install() {
local linguas
for linguas in ${LINGUAS[*]}; do
insinto "${LOCDIR}"
- cp -r --parents ${linguas}* ${D%/}/${LOCDIR} \
+ cp -r --parents ${linguas}* "${D%/}"/${LOCDIR} \
|| die "could not copy linguas files"
done
fi
@@ -627,14 +602,7 @@ vdr-plugin-2_src_install() {
vdr_create_header_checksum_file ${vdr_plugin_list}
vdr_create_plugindb_file ${vdr_plugin_list}
- if [[ ${EAPI} != [45] ]]; then
- einstalldocs
- else
- local docfile
- for docfile in README* HISTORY CHANGELOG; do
- [[ -f ${docfile} ]] && dodoc ${docfile}
- done
- fi
+ einstalldocs
# if VDR_CONFD_FILE is empty and ${FILESDIR}/confd exists take it
[[ -z ${VDR_CONFD_FILE} ]] && [[ -e ${FILESDIR}/confd ]] && VDR_CONFD_FILE=${FILESDIR}/confd
@@ -669,3 +637,7 @@ vdr-plugin-2_pkg_postrm() {
vdr-plugin-2_pkg_config() {
:
}
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm pkg_config
diff --git a/eclass/verify-sig.eclass b/eclass/verify-sig.eclass
index 1d7c62d4dce2..b74ed78290aa 100644
--- a/eclass/verify-sig.eclass
+++ b/eclass/verify-sig.eclass
@@ -1,4 +1,4 @@
-# Copyright 2020-2022 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: verify-sig.eclass
@@ -16,7 +16,7 @@
# the developer's work.
#
# To use the eclass, start by packaging the upstream's key
-# as app-crypt/openpgp-keys-*. Then inherit the eclass, add detached
+# as sec-keys/openpgp-keys-*. Then inherit the eclass, add detached
# signatures to SRC_URI and set VERIFY_SIG_OPENPGP_KEY_PATH. The eclass
# provides verify-sig USE flag to toggle the verification.
#
@@ -35,9 +35,9 @@
# SRC_URI="https://example.org/${P}.tar.gz
# verify-sig? ( https://example.org/${P}.tar.gz.sig )"
# BDEPEND="
-# verify-sig? ( app-crypt/openpgp-keys-example )"
+# verify-sig? ( sec-keys/openpgp-keys-example )"
#
-# VERIFY_SIG_OPENPGP_KEY_PATH=${BROOT}/usr/share/openpgp-keys/example.asc
+# VERIFY_SIG_OPENPGP_KEY_PATH=/usr/share/openpgp-keys/example.asc
# @CODE
case ${EAPI} in
@@ -45,9 +45,8 @@ case ${EAPI} in
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack
-
-if [[ ! ${_VERIFY_SIG_ECLASS} ]]; then
+if [[ -z ${_VERIFY_SIG_ECLASS} ]]; then
+_VERIFY_SIG_ECLASS=1
IUSE="verify-sig"
@@ -56,17 +55,22 @@ IUSE="verify-sig"
# @DESCRIPTION:
# Signature verification method to use. The allowed value are:
#
-# - openpgp -- verify PGP signatures using app-crypt/gnupg (the default)
-# - signify -- verify signatures with Ed25519 public key using app-crypt/signify
-: ${VERIFY_SIG_METHOD:=openpgp}
+# - minisig -- verify signatures with (base64) Ed25519 public key using app-crypt/minisign
+# - openpgp -- verify PGP signatures using app-crypt/gnupg (the default)
+# - signify -- verify signatures with Ed25519 public key using app-crypt/signify
+: "${VERIFY_SIG_METHOD:=openpgp}"
case ${VERIFY_SIG_METHOD} in
+ minisig)
+ BDEPEND="verify-sig? ( app-crypt/minisign )"
+ ;;
openpgp)
BDEPEND="
verify-sig? (
app-crypt/gnupg
- >=app-portage/gemato-16
- )"
+ >=app-portage/gemato-20
+ )
+ "
;;
signify)
BDEPEND="verify-sig? ( app-crypt/signify )"
@@ -83,6 +87,8 @@ esac
# when using default src_unpack. Alternatively, the key path can be
# passed directly to the verification functions.
#
+# The value of BROOT will be prepended to this path automatically.
+#
# NB: this variable is also used for non-OpenPGP signatures. The name
# contains "OPENPGP" for historical reasons.
@@ -103,7 +109,7 @@ esac
# connection.
#
# Supported for OpenPGP only.
-: ${VERIFY_SIG_OPENPGP_KEY_REFRESH:=no}
+: "${VERIFY_SIG_OPENPGP_KEY_REFRESH:=no}"
# @FUNCTION: verify-sig_verify_detached
# @USAGE: <file> <sig-file> [<key-file>]
@@ -115,10 +121,15 @@ esac
verify-sig_verify_detached() {
local file=${1}
local sig=${2}
- local key=${3:-${VERIFY_SIG_OPENPGP_KEY_PATH}}
+ local key=${3}
- [[ -n ${key} ]] ||
- die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ if [[ -z ${key} ]]; then
+ if [[ -z ${VERIFY_SIG_OPENPGP_KEY_PATH} ]]; then
+ die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ else
+ key="${BROOT}${VERIFY_SIG_OPENPGP_KEY_PATH}"
+ fi
+ fi
local extra_args=()
[[ ${VERIFY_SIG_OPENPGP_KEY_REFRESH} == yes ]] || extra_args+=( -R )
@@ -140,9 +151,17 @@ verify-sig_verify_detached() {
[[ ${file} == - ]] && filename='(stdin)'
einfo "Verifying ${filename} ..."
case ${VERIFY_SIG_METHOD} in
+ minisig)
+ minisign -V -P "$(<"${key}")" -x "${sig}" -m "${file}" ||
+ die "minisig signature verification failed"
+ ;;
openpgp)
- gemato gpg-wrap -K "${key}" "${extra_args[@]}" -- \
- gpg --verify "${sig}" "${file}" ||
+ # gpg can't handle very long TMPDIR
+ # https://bugs.gentoo.org/854492
+ local -x TMPDIR=/tmp
+ gemato openpgp-verify-detached -K "${key}" \
+ "${extra_args[@]}" --no-require-all-good \
+ "${sig}" "${file}" ||
die "PGP signature verification failed"
;;
signify)
@@ -164,10 +183,15 @@ verify-sig_verify_detached() {
verify-sig_verify_message() {
local file=${1}
local output_file=${2}
- local key=${3:-${VERIFY_SIG_OPENPGP_KEY_PATH}}
+ local key=${3}
- [[ -n ${key} ]] ||
- die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ if [[ -z ${key} ]]; then
+ if [[ -z ${VERIFY_SIG_OPENPGP_KEY_PATH} ]]; then
+ die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ else
+ key="${BROOT}${VERIFY_SIG_OPENPGP_KEY_PATH}"
+ fi
+ fi
local extra_args=()
[[ ${VERIFY_SIG_OPENPGP_KEY_REFRESH} == yes ]] || extra_args+=( -R )
@@ -189,7 +213,14 @@ verify-sig_verify_message() {
[[ ${file} == - ]] && filename='(stdin)'
einfo "Verifying ${filename} ..."
case ${VERIFY_SIG_METHOD} in
+ minisig)
+ minisign -V -P "$(<"${key}")" -x "${sig}" -o "${output_file}" -m "${file}" ||
+ die "minisig signature verification failed"
+ ;;
openpgp)
+ # gpg can't handle very long TMPDIR
+ # https://bugs.gentoo.org/854492
+ local -x TMPDIR=/tmp
gemato gpg-wrap -K "${key}" "${extra_args[@]}" -- \
gpg --verify --output="${output_file}" "${file}" ||
die "PGP signature verification failed"
@@ -202,12 +233,15 @@ verify-sig_verify_message() {
}
# @FUNCTION: verify-sig_verify_unsigned_checksums
-# @USAGE: <checksum-file> <algo> <files>
+# @USAGE: <checksum-file> <format> <files>
# @DESCRIPTION:
# Verify the checksums for all files listed in the space-separated list
-# <files> (akin to ${A}) using a <checksum-file>. <algo> specifies
-# the checksum algorithm (e.g. sha256). <checksum-file> can be "-"
-# for stdin.
+# <files> (akin to ${A}) using a <checksum-file>. <format> specifies
+# the checksum file format. <checksum-file> can be "-" for stdin.
+#
+# The following formats are supported:
+# - sha256 -- sha256sum (<hash> <filename>)
+# - openssl-dgst -- openssl dgst (<algo>(<filename>)=<hash>)
#
# The function dies if one of the files does not match checksums or
# is missing from the checksum file.
@@ -219,36 +253,52 @@ verify-sig_verify_message() {
# verify-sig_verify_signed_checksums instead.
verify-sig_verify_unsigned_checksums() {
local checksum_file=${1}
- local algo=${2}
+ local format=${2}
local files=()
read -r -d '' -a files <<<"${3}"
- local chksum_prog chksum_len
+ local chksum_prog chksum_len algo=${format}
- case ${algo} in
+ case ${format} in
sha256)
- chksum_prog=sha256sum
chksum_len=64
;;
+ openssl-dgst)
+ ;;
*)
- die "${FUNCNAME}: unknown checksum algo ${algo}"
+ die "${FUNCNAME}: unknown checksum format ${format}"
;;
esac
[[ ${checksum_file} == - ]] && checksum_file=/dev/stdin
- local checksum filename junk ret=0 count=0
- while read -r checksum filename junk; do
- if [[ ${checksum} == "-----BEGIN" ]]; then
+ local line checksum filename junk ret=0 count=0
+ local -A verified
+ while read -r line; do
+ if [[ ${line} == "-----BEGIN"* ]]; then
die "${FUNCNAME}: PGP armor found, use verify-sig_verify_signed_checksums instead"
fi
- [[ ${#checksum} -eq ${chksum_len} ]] || continue
- [[ -z ${checksum//[0-9a-f]} ]] || continue
- has "${filename}" "${files[@]}" || continue
- [[ -z ${junk} ]] || continue
+ case ${format} in
+ sha256)
+ read -r checksum filename junk <<<"${line}"
+ [[ ${#checksum} -ne ${chksum_len} ]] && continue
+ [[ -n ${checksum//[0-9a-f]} ]] && continue
+ [[ -n ${junk} ]] && continue
+ ;;
+ openssl-dgst)
+ [[ ${line} != *"("*")="* ]] && continue
+ checksum=${line##*)=}
+ algo=${line%%(*}
+ filename=${line#*(}
+ filename=${filename%)=*}
+ ;;
+ esac
+
+ if ! has "${filename}" "${files[@]}"; then
+ continue
+ fi
- "${chksum_prog}" -c --strict - <<<"${checksum} ${filename}"
- if [[ ${?} -eq 0 ]]; then
- (( count++ ))
+ if "${algo,,}sum" -c --strict - <<<"${checksum} ${filename}"; then
+ verified["${filename}"]=1
else
ret=1
fi
@@ -256,7 +306,7 @@ verify-sig_verify_unsigned_checksums() {
[[ ${ret} -eq 0 ]] ||
die "${FUNCNAME}: at least one file did not verify successfully"
- [[ ${count} -eq ${#files[@]} ]] ||
+ [[ ${#verified[@]} -eq ${#files[@]} ]] ||
die "${FUNCNAME}: checksums for some of the specified files were missing"
}
@@ -269,7 +319,7 @@ _gpg_verify_signed_checksums() {
local checksum_file=${1}
local algo=${2}
local files=${3}
- local key=${4:-${VERIFY_SIG_OPENPGP_KEY_PATH}}
+ local key=${4}
verify-sig_verify_unsigned_checksums - "${algo}" "${files}" < <(
verify-sig_verify_message "${checksum_file}" - "${key}"
@@ -292,10 +342,15 @@ verify-sig_verify_signed_checksums() {
local algo=${2}
local files=()
read -r -d '' -a files <<<"${3}"
- local key=${4:-${VERIFY_SIG_OPENPGP_KEY_PATH}}
+ local key=${4}
- [[ -n ${key} ]] ||
- die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ if [[ -z ${key} ]]; then
+ if [[ -z ${VERIFY_SIG_OPENPGP_KEY_PATH} ]]; then
+ die "${FUNCNAME}: no key passed and VERIFY_SIG_OPENPGP_KEY_PATH unset"
+ else
+ key="${BROOT}${VERIFY_SIG_OPENPGP_KEY_PATH}"
+ fi
+ fi
case ${VERIFY_SIG_METHOD} in
openpgp)
@@ -325,7 +380,7 @@ verify-sig_src_unpack() {
# find all distfiles and signatures, and combine them
for f in ${A}; do
found=
- for suffix in .asc .sig; do
+ for suffix in .asc .sig .minisig; do
if [[ ${f} == *${suffix} ]]; then
signatures+=( "${f}" )
found=sig
@@ -377,5 +432,6 @@ verify-sig_src_unpack() {
default_src_unpack
}
-_VERIFY_SIG_ECLASS=1
fi
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/vim-doc.eclass b/eclass/vim-doc.eclass
index ba9d00f4f5e8..119ce793071d 100644
--- a/eclass/vim-doc.eclass
+++ b/eclass/vim-doc.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: vim-doc.eclass
# @MAINTAINER:
# vim@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: Eclass for vim{,-plugin}.eclass to update documentation tags.
# @DESCRIPTION:
# This eclass is used by vim.eclass and vim-plugin.eclass to update
@@ -16,26 +16,31 @@
# DEPEND in vim-plugin or by whatever version of vim is being
# installed by the eclass.
-case ${EAPI:-0} in
- [67]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ -z ${_VIM_DOC_ECLASS} ]] ; then
-_VIM_DOC_ECLASS=1
+if [[ ! ${_VIM_DOC_ECLASS} ]] ; then
+# @FUNCTION: update_vim_helptags
+# @USAGE:
+# @DESCRIPTION:
+# Update the documentation tags in the versioned Vim directory.
update_vim_helptags() {
- local vimfiles vim d s
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local vimfiles helpfile files vim d
# This is where vim plugins are installed
vimfiles="${EROOT}"/usr/share/vim/vimfiles
- if [[ $PN != vim-core ]]; then
+ if [[ ${PN} != vim-core ]]; then
# Find a suitable vim binary for updating tags :helptags
vim=$(type -P vim 2>/dev/null)
- [[ -z "$vim" ]] && vim=$(type -P gvim 2>/dev/null)
- [[ -z "$vim" ]] && vim=$(type -P kvim 2>/dev/null)
- if [[ -z "$vim" ]]; then
+ [[ -z "${vim}" ]] && vim=$(type -P gvim 2>/dev/null)
+ [[ -z "${vim}" ]] && vim=$(type -P kvim 2>/dev/null)
+ if [[ -z "${vim}" ]]; then
ewarn "No suitable vim binary to rebuild documentation tags"
fi
fi
@@ -43,44 +48,57 @@ update_vim_helptags() {
# Make vim not try to connect to X. See :help gui-x11-start
# in vim for how this evil trickery works.
if [[ -n "${vim}" ]] ; then
- ln -s "${vim}" "${T}/tagvim"
+ ln -s "${vim}" "${T}/tagvim" || die
vim="${T}/tagvim"
fi
# Install the documentation symlinks into the versioned vim
# directory and run :helptags
for d in "${EROOT%/}"/usr/share/vim/vim[0-9]*; do
- [[ -d "$d/doc" ]] || continue # catch a failed glob
+ [[ -d "${d}/doc" ]] || continue # catch a failed glob
- # Remove links, and possibly remove stale dirs
- find $d/doc -name \*.txt -type l | while read s; do
- [[ $(readlink "$s") = $vimfiles/* ]] && rm -f "$s"
+ # Remove links
+ readarray -d '' files < <(find "${d}"/doc -name "*.txt" -type l -print0 || die "cannot traverse ${d}/doc" )
+ for helpfile in "${files[@]}"; do
+ if [[ $(readlink -f "${helpfile}") == "${vimfiles}"/* ]]; then
+ rm "${helpfile}" || die
+ fi
done
- if [[ -f "$d/doc/tags" && $(find "$d" | wc -l | tr -d ' ') = 3 ]]; then
+
+ # Remove stale dirs, if possible
+ readarray -d '' files < <(find "${d}" -print0 || die "cannot traverse ${d}")
+ if [[ -f "${d}/doc/tags" && ${#files[@]} -eq 3 ]]; then
# /usr/share/vim/vim61
# /usr/share/vim/vim61/doc
# /usr/share/vim/vim61/doc/tags
- einfo "Removing $d"
- rm -r "$d"
+ einfo "Removing ${d}"
+ rm -r "${d}" || die
continue
fi
# Re-create / install new links
- if [[ -d $vimfiles/doc ]]; then
- ln -s $vimfiles/doc/*.txt $d/doc 2>/dev/null
+ if [[ -d "${vimfiles}"/doc ]]; then
+ for helpfile in "${vimfiles}"/doc/*.txt; do
+ if [[ ! -e "${d}/doc/$(basename "${helpfile}")" ]]; then
+ ln -s "${helpfile}" "${d}/doc" || die
+ fi
+ done
fi
# Update tags; need a vim binary for this
- if [[ -n "$vim" ]]; then
- einfo "Updating documentation tags in $d"
- DISPLAY= $vim -u NONE -U NONE -T xterm -X -n -f \
+ if [[ -n "${vim}" ]]; then
+ einfo "Updating documentation tags in ${d}"
+ DISPLAY= "${vim}" -u NONE -U NONE -T xterm -X -n -f \
'+set nobackup nomore' \
- "+helptags $d/doc" \
- '+qa!' </dev/null &>/dev/null
+ "+helptags ${d}/doc" \
+ '+qa!' </dev/null &>/dev/null || die
fi
done
- [[ -n "${vim}" && -f "${vim}" ]] && rm "${vim}"
+ if [[ -n "${vim}" && -f "${vim}" ]]; then
+ rm "${vim}" || die
+ fi
}
+_VIM_DOC_ECLASS=1
fi
diff --git a/eclass/vim-plugin.eclass b/eclass/vim-plugin.eclass
index 50e727e98f4e..ee4f1b6e0f81 100644
--- a/eclass/vim-plugin.eclass
+++ b/eclass/vim-plugin.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: vim-plugin.eclass
# @MAINTAINER:
# vim@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: used for installing vim plugins
# @DESCRIPTION:
# This eclass simplifies installation of app-vim plugins into
@@ -13,14 +13,21 @@
# documentation, for which we make a special case via vim-doc.eclass.
case ${EAPI} in
- 6|7);;
- *) die "EAPI ${EAPI:-0} unsupported (too old)";;
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ -z ${_VIM_PLUGIN_ECLASS} ]]; then
+_VIM_PLUGIN_ECLASS=1
+
inherit vim-doc
-EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
-VIM_PLUGIN_VIM_VERSION="${VIM_PLUGIN_VIM_VERSION:-7.3}"
+[[ ${EAPI} != [67] ]] && _DEFINE_VIM_PLUGIN_SRC_PREPARE=true
+
+# @ECLASS_VARIABLE: VIM_PLUGIN_VIM_VERSION
+# @DESCRIPTION:
+# Minimum Vim version the plugin supports.
+: "${VIM_PLUGIN_VIM_VERSION:=7.3}"
DEPEND="|| ( >=app-editors/vim-${VIM_PLUGIN_VIM_VERSION}
>=app-editors/gvim-${VIM_PLUGIN_VIM_VERSION} )"
@@ -31,14 +38,59 @@ if [[ ${PV} != 9999* ]] ; then
fi
SLOT="0"
+if [[ ${_DEFINE_VIM_PLUGIN_SRC_PREPARE} ]]; then
+# @FUNCTION: vim-plugin_src_prepare
+# @USAGE:
+# @DESCRIPTION:
+# Moves "after/syntax" plugins to directories to avoid file collisions with
+# other packages.
+# Note that this function is only defined and exported in EAPIs >= 8.
+vim-plugin_src_prepare() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ default_src_prepare
+
+ # return if there's nothing to do
+ [[ -d after/syntax ]] || return
+
+ pushd after/syntax >/dev/null || die
+ for file in *.vim; do
+ [[ -f "${file}" ]] || continue
+ mkdir "${file%.vim}" || die
+ mv "${file}" "${file%.vim}/${PN}.vim" || die
+ done
+ popd >/dev/null || die
+}
+fi
+
+# @ECLASS_VARIABLE: _VIM_PLUGIN_ALLOWED_DIRS
+# @INTERNAL
+# @DESCRIPTION:
+# Vanilla Vim dirs.
+# See /usr/share/vim/vim* for reference.
+_VIM_PLUGIN_ALLOWED_DIRS=(
+ after autoload colors compiler doc ftdetect ftplugin indent keymap
+ macros plugin spell syntax
+)
+
# @FUNCTION: vim-plugin_src_install
+# @USAGE: [<dir>...]
# @DESCRIPTION:
# Overrides the default src_install phase. In order, this function:
-# * fixes file permission across all files in ${S}.
+#
# * installs help and documentation files.
-# * installs all files in "${ED}"/usr/share/vim/vimfiles.
+#
+# * installs all files recognized by default Vim installation and directories
+# passed to this function as arguments in "${ED}"/usr/share/vim/vimfiles.
+#
+# Example use:
+# @CODE
+# src_install() {
+# vim-plugin_src_install syntax_checkers
+# }
+# @CODE
vim-plugin_src_install() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
+ debug-print-function ${FUNCNAME} "${@}"
# Install non-vim-help-docs
einstalldocs
@@ -46,21 +98,35 @@ vim-plugin_src_install() {
# Install remainder of plugin
insinto /usr/share/vim/vimfiles/
local d
- for d in *; do
- [[ -d "${d}" ]] || continue
- doins -r "${d}"
- done
+ case ${EAPI:-0} in
+ 6|7)
+ for d in *; do
+ [[ -d "${d}" ]] || continue
+ doins -r "${d}"
+ done ;;
+ *)
+ for d in "${_VIM_PLUGIN_ALLOWED_DIRS[@]}" "${@}"; do
+ [[ -d "${d}" ]] || continue
+ doins -r "${d}"
+ done ;;
+ esac
}
# @FUNCTION: vim-plugin_pkg_postinst
+# @USAGE:
# @DESCRIPTION:
# Overrides the pkg_postinst phase for this eclass.
# The following functions are called:
+#
# * update_vim_helptags
+#
# * update_vim_afterscripts
+#
# * display_vim_plugin_help
vim-plugin_pkg_postinst() {
- update_vim_helptags # from vim-doc
+ debug-print-function ${FUNCNAME} "${@}"
+
+ update_vim_helptags # from vim-doc
update_vim_afterscripts # see below
display_vim_plugin_help # see below
}
@@ -71,8 +137,9 @@ vim-plugin_pkg_postinst() {
# This function calls the update_vim_helptags and update_vim_afterscripts
# functions and eventually removes a bunch of empty directories.
vim-plugin_pkg_postrm() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
- update_vim_helptags # from vim-doc
+ debug-print-function ${FUNCNAME} "${@}"
+
+ update_vim_helptags # from vim-doc
update_vim_afterscripts # see below
# Remove empty dirs; this allows
@@ -82,25 +149,26 @@ vim-plugin_pkg_postrm() {
}
# @FUNCTION: update_vim_afterscripts
+# @USAGE:
# @DESCRIPTION:
# Creates scripts in /usr/share/vim/vimfiles/after/*
# comprised of the snippets in /usr/share/vim/vimfiles/after/*/*.d
update_vim_afterscripts() {
- has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
- has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ debug-print-function ${FUNCNAME} "${@}"
+
local d f afterdir="${EROOT}"/usr/share/vim/vimfiles/after
# Nothing to do if the dir isn't there
- [ -d "${afterdir}" ] || return 0
+ [[ -d "${afterdir}" ]] || return 0
- einfo "Updating scripts in ${EPREFIX}/usr/share/vim/vimfiles/after"
+ einfo "Updating scripts in ${afterdir}"
find "${afterdir}" -type d -name \*.vim.d | while read d; do
echo '" Generated by update_vim_afterscripts' > "${d%.d}" || die
find "${d}" -name \*.vim -type f -maxdepth 1 -print0 | sort -z | \
xargs -0 cat >> "${d%.d}" || die "update_vim_afterscripts failed"
done
- einfo "Removing dead scripts in ${EPREFIX}/usr/share/vim/vimfiles/after"
+ einfo "Removing dead scripts in ${afterdir}"
find "${afterdir}" -type f -name \*.vim | \
while read f; do
[[ "$(head -n 1 ${f})" == '" Generated by update_vim_afterscripts' ]] \
@@ -115,6 +183,7 @@ update_vim_afterscripts() {
}
# @FUNCTION: display_vim_plugin_help
+# @USAGE:
# @DESCRIPTION:
# Displays a message with the plugin's help file if one is available. Uses the
# VIM_PLUGIN_HELPFILES env var. If multiple help files are available, they
@@ -124,6 +193,8 @@ update_vim_afterscripts() {
# extra message regarding enabling filetype plugins is displayed if
# VIM_PLUGIN_MESSAGES includes the word "filetype".
display_vim_plugin_help() {
+ debug-print-function ${FUNCNAME} "${@}"
+
local h
if [[ -z ${REPLACING_VERSIONS} ]]; then
@@ -160,3 +231,10 @@ display_vim_plugin_help() {
fi
fi
}
+
+fi
+
+# src_prepare is only exported in EAPI >= 8
+[[ ${_DEFINE_VIM_PLUGIN_SRC_PREPARE} ]] && EXPORT_FUNCTIONS src_prepare
+
+EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
diff --git a/eclass/vim-spell.eclass b/eclass/vim-spell.eclass
index 8acf29a5580d..855518d23e5b 100644
--- a/eclass/vim-spell.eclass
+++ b/eclass/vim-spell.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: vim-spell.eclass
@@ -63,13 +63,11 @@
# spell files. It's best to let upstream know if you've generated spell files
# for another language rather than keeping them Gentoo-specific.
-case ${EAPI:-0} in
- [67]) ;;
+case ${EAPI} in
+ 6|7) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_install pkg_postinst
-
if [[ -z ${_VIM_SPELL_ECLASS} ]] ; then
_VIM_SPELL_ECLASS=1
@@ -81,30 +79,30 @@ SLOT="0"
# This variable defines the language for the spell package being
# installed.
# The default value is "English".
-: ${VIM_SPELL_LANGUAGE:="English"}
+: "${VIM_SPELL_LANGUAGE:="English"}"
# @ECLASS_VARIABLE: VIM_SPELL_LOCALE
# @INTERNAL
# @DESCRIPTION:
# This variable defines the locale for the current ebuild.
# The default value is ${PN} stripped of the "vim-spell-" string.
-: ${VIM_SPELL_LOCALE:="${PN/vim-spell-/}"}
+: "${VIM_SPELL_LOCALE:="${PN/vim-spell-/}"}"
# @ECLASS_VARIABLE: VIM_SPELL_DIRECTORY
# @INTERNAL
# @DESCRIPTION:
# This variable defines the path to Vim spell files.
-: ${VIM_SPELL_DIRECTORY:="${EPREFIX}/usr/share/vim/vimfiles/spell/"}
+: "${VIM_SPELL_DIRECTORY:=/usr/share/vim/vimfiles/spell}"
# @ECLASS_VARIABLE: DESCRIPTION
# @DESCRIPTION:
# This variable defines the DESCRIPTION for Vim spell ebuilds.
-: ${DESCRIPTION:="vim spell files: ${VIM_SPELL_LANGUAGE} (${VIM_SPELL_LOCALE})"}
+: "${DESCRIPTION:="vim spell files: ${VIM_SPELL_LANGUAGE} (${VIM_SPELL_LOCALE})"}"
# @ECLASS_VARIABLE: HOMEPAGE
# @DESCRIPTION:
# This variable defines the HOMEPAGE for Vim spell ebuilds.
-: ${HOMEPAGE:="https://www.vim.org"}
+: "${HOMEPAGE:="https://www.vim.org"}"
# @FUNCTION: vim-spell_src_install
# @DESCRIPTION:
@@ -157,3 +155,5 @@ vim-spell_pkg_postinst() {
}
fi
+
+EXPORT_FUNCTIONS src_install pkg_postinst
diff --git a/eclass/virtualx.eclass b/eclass/virtualx.eclass
index b7a25c897b4a..6e77cd662bdd 100644
--- a/eclass/virtualx.eclass
+++ b/eclass/virtualx.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: virtualx.eclass
@@ -11,10 +11,10 @@
case ${EAPI} in
6|7|8) ;;
- *) die "${ECLASS}: EAPI ${EAPI:-0} is not supported." ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-if [[ ! ${_VIRTUALX_ECLASS} ]]; then
+if [[ -z ${_VIRTUALX_ECLASS} ]]; then
_VIRTUALX_ECLASS=1
# @ECLASS_VARIABLE: VIRTUALX_REQUIRED
@@ -22,11 +22,11 @@ _VIRTUALX_ECLASS=1
# @DESCRIPTION:
# Variable specifying the dependency on xorg-server and xhost.
# Possible special values are "always" and "manual", which specify
-# the dependency to be set unconditionaly or not at all.
+# the dependency to be set unconditionally or not at all.
# Any other value is taken as useflag desired to be in control of
# the dependency (eg. VIRTUALX_REQUIRED="kde" will add the dependency
# into "kde? ( )" and add kde into IUSE.
-: ${VIRTUALX_REQUIRED:=test}
+: "${VIRTUALX_REQUIRED:=test}"
# @ECLASS_VARIABLE: VIRTUALX_DEPEND
# @OUTPUT_VARIABLE
@@ -53,7 +53,7 @@ case ${VIRTUALX_REQUIRED} in
*)
BDEPEND="${VIRTUALX_REQUIRED}? ( ${VIRTUALX_DEPEND} )"
IUSE="${VIRTUALX_REQUIRED}"
- [[ ${VIRTUALX_REQUIRED} == test ]] &&
+ [[ ${VIRTUALX_REQUIRED} == "test" ]] &&
RESTRICT+=" !test? ( test )"
;;
esac
@@ -107,68 +107,37 @@ virtx() {
local i=0
local retval=0
- local OLD_SANDBOX_ON="${SANDBOX_ON}"
- local XVFB XHOST XDISPLAY
- local xvfbargs="-screen 0 1280x1024x24 +extension RANDR"
- XVFB=$(type -p Xvfb) || die
- XHOST=$(type -p xhost) || die
+ local xvfbargs=( -screen 0 1280x1024x24 +extension RANDR )
debug-print "${FUNCNAME}: running Xvfb hack"
export XAUTHORITY=
- # The following is derived from Mandrake's hack to allow
- # compiling without the X display
-
- einfo "Scanning for an open DISPLAY to start Xvfb ..."
- # If we are in a chrooted environment, and there is already a
- # X server started outside of the chroot, Xvfb will fail to start
- # on the same display (most cases this is :0 ), so make sure
- # Xvfb is started, else bump the display number
- #
- # Azarah - 5 May 2002
- # GNOME GDM may have started X on DISPLAY :0 with a
- # lock file /tmp/.X1024-lock, therefore start the search at 1.
- # Else a leftover /tmp/.X1-lock will prevent finding an available display.
- XDISPLAY=$(i=1; while [[ -f /tmp/.X${i}-lock ]] ; do ((i++));done; echo ${i})
- debug-print "${FUNCNAME}: XDISPLAY=${XDISPLAY}"
-
- # We really do not want SANDBOX enabled here
- export SANDBOX_ON="0"
-
- debug-print "${FUNCNAME}: ${XVFB} :${XDISPLAY} ${xvfbargs}"
- ${XVFB} :${XDISPLAY} ${xvfbargs} &>/dev/null &
- sleep 2
-
- local start=${XDISPLAY}
- while [[ ! -f /tmp/.X${XDISPLAY}-lock ]]; do
- # Stop trying after 15 tries
- if ((XDISPLAY - start > 15)) ; then
- eerror "'${XVFB} :${XDISPLAY} ${xvfbargs}' returns:"
- echo
- ${XVFB} :${XDISPLAY} ${xvfbargs}
- echo
- eerror "If possible, correct the above error and try your emerge again."
- die "Unable to start Xvfb"
- fi
- ((XDISPLAY++))
- debug-print "${FUNCNAME}: ${XVFB} :${XDISPLAY} ${xvfbargs}"
- ${XVFB} :${XDISPLAY} ${xvfbargs} &>/dev/null &
- sleep 2
- done
-
- # Now enable SANDBOX again if needed.
- export SANDBOX_ON="${OLD_SANDBOX_ON}"
-
- einfo "Starting Xvfb on \$DISPLAY=${XDISPLAY} ..."
-
- export DISPLAY=:${XDISPLAY}
- # Do not break on error, but setup $retval, as we need
- # to kill Xvfb
+
+ einfo "Starting Xvfb ..."
+
+ debug-print "${FUNCNAME}: Xvfb -displayfd 1 ${xvfbargs[*]}"
+ local logfile=${T}/Xvfb.log
+ local pidfile=${T}/Xvfb.pid
+ # NB: bash command substitution blocks until Xvfb prints fd to stdout
+ # and then closes the fd; only then it backgrounds properly
+ export DISPLAY=:$(
+ Xvfb -displayfd 1 "${xvfbargs[@]}" 2>"${logfile}" &
+ echo "$!" > "${pidfile}"
+ )
+
+ if [[ ${DISPLAY} == : ]]; then
+ eerror "Xvfb failed to start, reprinting error log"
+ cat "${logfile}"
+ die "Xvfb failed to start"
+ fi
+
+ # Do not break on error, but setup $retval, as we need to kill Xvfb
+ einfo "Xvfb started on DISPLAY=${DISPLAY}"
debug-print "${FUNCNAME}: $@"
nonfatal "$@"
retval=$?
# Now kill Xvfb
- kill $(cat /tmp/.X${XDISPLAY}-lock)
+ kill "$(<"${pidfile}")"
# die if our command failed
[[ ${retval} -ne 0 ]] && die "Failed to run '$@'"
diff --git a/eclass/waf-utils.eclass b/eclass/waf-utils.eclass
index d5d52b9af247..f8d4b0aa94b4 100644
--- a/eclass/waf-utils.eclass
+++ b/eclass/waf-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: waf-utils.eclass
@@ -8,26 +8,29 @@
# Original Author: Gilles Dartiguelongue <eva@gentoo.org>
# Various improvements based on cmake-utils.eclass: Tomáš Chvátal <scarabeus@gentoo.org>
# Proper prefix support: Jonathan Callen <jcallen@gentoo.org>
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 7 8
# @BLURB: common ebuild functions for waf-based packages
# @DESCRIPTION:
# The waf-utils eclass contains functions that make creating ebuild for
# waf-based packages much easier.
# Its main features are support of common portage default settings.
-inherit multilib toolchain-funcs multiprocessing
-
-case ${EAPI:-0} in
- 6|7) EXPORT_FUNCTIONS src_configure src_compile src_install ;;
- *) die "EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
+if [[ ! ${_WAF_UTILS_ECLASS} ]]; then
+_WAF_UTILS_ECLASS=1
+
+inherit multilib toolchain-funcs multiprocessing
+
# @ECLASS_VARIABLE: WAF_VERBOSE
# @USER_VARIABLE
# @DESCRIPTION:
# Set to OFF to disable verbose messages during compilation
# this is _not_ meant to be set in ebuilds
-: ${WAF_VERBOSE:=ON}
+: "${WAF_VERBOSE:=ON}"
# @ECLASS_VARIABLE: WAF_BINARY
# @DESCRIPTION:
@@ -41,7 +44,7 @@ waf-utils_src_configure() {
debug-print-function ${FUNCNAME} "$@"
local fail
- if [[ ! ${_PYTHON_ANY_R1} && ! ${_PYTHON_SINGLE_R1} && ! ${_PYTHON_R1} ]]; then
+ if [[ ! ${_PYTHON_ANY_R1_ECLASS} && ! ${_PYTHON_SINGLE_R1_ECLASS} && ! ${_PYTHON_R1_ECLASS} ]]; then
eerror "Using waf-utils.eclass without any python-r1 suite eclass is not supported."
eerror "Please make sure to configure and inherit appropriate -r1 eclass."
eerror "For more information and examples, please see:"
@@ -51,9 +54,9 @@ waf-utils_src_configure() {
if [[ ! ${EPYTHON} ]]; then
eerror "EPYTHON is unset while calling waf-utils. This most likely means that"
eerror "the ebuild did not call the appropriate eclass function before calling waf."
- if [[ ${_PYTHON_ANY_R1} ]]; then
+ if [[ ${_PYTHON_ANY_R1_ECLASS} ]]; then
eerror "Please ensure that python-any-r1_pkg_setup is called in pkg_setup()."
- elif [[ ${_PYTHON_SINGLE_R1} ]]; then
+ elif [[ ${_PYTHON_SINGLE_R1_ECLASS} ]]; then
eerror "Please ensure that python-single-r1_pkg_setup is called in pkg_setup()."
else # python-r1
eerror "Please ensure that python_setup is called before waf-utils_src_configure(),"
@@ -64,7 +67,7 @@ waf-utils_src_configure() {
fi
if [[ ${PYTHON_REQ_USE} != *threads* ]]; then
- eerror "Waf requires threading support in Python. To accomodate this requirement,"
+ eerror "Waf requires threading support in Python. To accommodate this requirement,"
eerror "please add 'threads(+)' to PYTHON_REQ_USE variable (above inherit line)."
eerror "For more information and examples, please see:"
eerror " https://wiki.gentoo.org/wiki/Project:Python/waf-utils_integration"
@@ -74,7 +77,7 @@ waf-utils_src_configure() {
[[ ${fail} ]] && die "Invalid use of waf-utils.eclass"
- : ${WAF_BINARY:="${S}/waf"}
+ : "${WAF_BINARY:="${S}/waf"}"
local conf_args=()
@@ -88,17 +91,23 @@ waf-utils_src_configure() {
if [[ ${waf_help} == *--libdir* ]]; then
conf_args+=( --libdir="${EPREFIX}/usr/$(get_libdir)" )
fi
+ if [[ ${waf_help} == *--mandir* ]]; then
+ conf_args+=( --mandir="${EPREFIX}"/usr/share/man )
+ fi
tc-export AR CC CPP CXX RANLIB
local CMD=(
+ PYTHONHASHSEED=1
CCFLAGS="${CFLAGS}"
LINKFLAGS="${CFLAGS} ${LDFLAGS}"
PKGCONFIG="$(tc-getPKG_CONFIG)"
"${WAF_BINARY}"
+ "--jobs=1"
"--prefix=${EPREFIX}/usr"
"${conf_args[@]}"
"${@}"
+ ${EXTRA_ECONF}
configure
)
@@ -114,6 +123,8 @@ waf-utils_src_compile() {
local _mywafconfig
[[ ${WAF_VERBOSE} == ON ]] && _mywafconfig="--verbose"
+ export PYTHONHASHSEED=1
+
local jobs="--jobs=$(makeopts_jobs)"
echo "\"${WAF_BINARY}\" build ${_mywafconfig} ${jobs} ${*}"
"${WAF_BINARY}" ${_mywafconfig} ${jobs} "${@}" || die "build failed"
@@ -125,9 +136,16 @@ waf-utils_src_compile() {
waf-utils_src_install() {
debug-print-function ${FUNCNAME} "$@"
- echo "\"${WAF_BINARY}\" --destdir=\"${D}\" ${*} install"
- "${WAF_BINARY}" --destdir="${D}" "${@}" install || die "Make install failed"
+ export PYTHONHASHSEED=1
+
+ local jobs="--jobs=$(makeopts_jobs)"
+ echo "\"${WAF_BINARY}\" ${jobs} --destdir=\"${D}\" ${*} install"
+ "${WAF_BINARY}" ${jobs} --destdir="${D}" "${@}" install || die "Make install failed"
# Manual document installation
einstalldocs
}
+
+fi
+
+EXPORT_FUNCTIONS src_configure src_compile src_install
diff --git a/eclass/webapp.eclass b/eclass/webapp.eclass
index 1efe4e66b6a9..5b091c84851f 100644
--- a/eclass/webapp.eclass
+++ b/eclass/webapp.eclass
@@ -1,22 +1,20 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: webapp.eclass
# @MAINTAINER:
# web-apps@gentoo.org
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @BLURB: functions for installing applications to run under a web server
# @DESCRIPTION:
# The webapp eclass contains functions to handle web applications with
# webapp-config. Part of the implementation of GLEP #11
-case ${EAPI:-0} in
- [5678]) ;;
+case ${EAPI} in
+ 6|7|8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_postinst pkg_setup src_install pkg_prerm
-
if [[ -z ${_WEBAPP_ECLASS} ]]; then
_WEBAPP_ECLASS=1
@@ -98,21 +96,6 @@ webapp_check_installedat() {
${WEBAPP_CONFIG} --show-installed -h localhost -d "${INSTALL_DIR}" 2> /dev/null
}
-webapp_strip_appdir() {
- debug-print-function $FUNCNAME $*
- echo "${1#${MY_APPDIR}/}"
-}
-
-webapp_strip_d() {
- debug-print-function $FUNCNAME $*
- echo "${1#${D}}"
-}
-
-webapp_strip_cwd() {
- debug-print-function $FUNCNAME $*
- echo "${1/#.\///}"
-}
-
webapp_getinstalltype() {
debug-print-function $FUNCNAME $*
@@ -197,11 +180,14 @@ webapp_configfile() {
for m in "$@"; do
webapp_checkfileexists "${m}" "${D}"
- local my_file="$(webapp_strip_appdir "${m}")"
- my_file="$(webapp_strip_cwd "${my_file}")"
+ local my_file
+ # Strip appdir
+ my_file="${m#${MY_APPDIR}/}"
+ # Strip cwd
+ my_file="${my_file/#.\///}"
elog "(config) ${my_file}"
- echo "${my_file}" >> ${D}/${WA_CONFIGLIST}
+ echo "${my_file}" >> "${D}/${WA_CONFIGLIST}"
done
}
@@ -251,8 +237,11 @@ _webapp_serverowned() {
debug-print-function $FUNCNAME $*
webapp_checkfileexists "${1}" "${D}"
- local my_file="$(webapp_strip_appdir "${1}")"
- my_file="$(webapp_strip_cwd "${my_file}")"
+ local my_file
+ # Strip appdir
+ my_file="${1#${MY_APPDIR}/}"
+ # Strip cwd
+ my_file="${my_file/#.\///}"
echo "${my_file}" >> "${D}/${WA_SOLIST}"
}
@@ -266,14 +255,15 @@ _webapp_serverowned() {
webapp_serverowned() {
debug-print-function $FUNCNAME $*
- local a m
+ local m
if [[ "${1}" == "-R" ]]; then
shift
for m in "$@"; do
- find "${D}${m}" | while read a; do
- a=$(webapp_strip_d "${a}")
- _webapp_serverowned "${a}"
- done
+ pushd "${D}${MY_APPDIR}" > /dev/null || die
+ # Strip appdir
+ m="${m#${MY_APPDIR}/}"
+ find "${m}" >> "${D}/${WA_SOLIST}" || die
+ popd > /dev/null || die
done
else
for m in "$@"; do
@@ -391,7 +381,7 @@ webapp_pkg_setup() {
# webapp_src_install() within the same shell process
touch "${T}/${SETUP_CHECK_FILE}"
- # special case - some ebuilds *do* need to overwride the SLOT
+ # special case - some ebuilds *do* need to override the SLOT
if [[ "${SLOT}+" != "${PVR}+" && "${WEBAPP_MANUAL_SLOT}" != "yes" ]]; then
die "Set WEBAPP_MANUAL_SLOT=\"yes\" if you need to SLOT manually"
fi
@@ -588,3 +578,5 @@ webapp_pkg_prerm() {
}
fi
+
+EXPORT_FUNCTIONS pkg_postinst pkg_setup src_install pkg_prerm
diff --git a/eclass/wxwidgets.eclass b/eclass/wxwidgets.eclass
index afc4aae5f7d3..0a58c44bbf40 100644
--- a/eclass/wxwidgets.eclass
+++ b/eclass/wxwidgets.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: wxwidgets.eclass
@@ -36,14 +36,14 @@ _WXWIDGETS_ECLASS=1
# The SLOT of the x11-libs/wxGTK you're targeting. Needs to be defined before
# inheriting the eclass. Can be either "3.0" or "3.0-gtk3".
case ${WX_GTK_VER} in
- 3.0-gtk3) ;;
+ 3.0-gtk3 | 3.2-gtk3) ;;
3.0)
if [[ ${EAPI} != 7 ]]; then
die "${ECLASS}: GTK 2 no longer supported in EAPI ${EAPI}"
fi
;;
"") die "WX_GTK_VER not declared" ;;
- *) die "Invalid WX_GTK_VER: must be set to a valid wxGTK SLOT ('3.0' or '3.0-gtk3')" ;;
+ *) die "Invalid WX_GTK_VER: must be set to a valid wxGTK SLOT ('3.0', '3.0-gtk3', or '3.2-gtk3')" ;;
esac
readonly WX_GTK_VER
@@ -67,7 +67,7 @@ setup-wxwidgets() {
local w wxtoolkit wxconf
case ${WX_GTK_VER} in
- 3.0-gtk3) wxtoolkit=gtk3 ;;
+ 3.0-gtk3 | 3.2-gtk3) wxtoolkit=gtk3 ;;
3.0) wxtoolkit=gtk2
eqawarn "This package relies on the deprecated GTK 2 slot, which will go away soon (https://bugs.gentoo.org/618642)"
;;
diff --git a/eclass/xdg-utils.eclass b/eclass/xdg-utils.eclass
index ae2b71cd2495..34535a129e33 100644
--- a/eclass/xdg-utils.eclass
+++ b/eclass/xdg-utils.eclass
@@ -1,4 +1,4 @@
-# Copyright 2004-2021 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: xdg-utils.eclass
@@ -26,13 +26,13 @@ esac
# @INTERNAL
# @DESCRIPTION:
# Directory where .desktop files database is stored
-: ${DESKTOP_DATABASE_DIR="/usr/share/applications"}
+: "${DESKTOP_DATABASE_DIR="/usr/share/applications"}"
# @ECLASS_VARIABLE: MIMEINFO_DATABASE_DIR
# @INTERNAL
# @DESCRIPTION:
# Directory where .desktop files database is stored
-: ${MIMEINFO_DATABASE_DIR:="/usr/share/mime"}
+: "${MIMEINFO_DATABASE_DIR:="/usr/share/mime"}"
# @FUNCTION: xdg_environment_reset
# @DESCRIPTION:
@@ -42,9 +42,10 @@ xdg_environment_reset() {
export XDG_DATA_HOME="${HOME}/.local/share"
export XDG_CONFIG_HOME="${HOME}/.config"
export XDG_CACHE_HOME="${HOME}/.cache"
+ export XDG_STATE_HOME="${HOME}/.local/state"
export XDG_RUNTIME_DIR="${T}/run"
mkdir -p "${XDG_DATA_HOME}" "${XDG_CONFIG_HOME}" "${XDG_CACHE_HOME}" \
- "${XDG_RUNTIME_DIR}" || die
+ "${XDG_STATE_HOME}" "${XDG_RUNTIME_DIR}" || die
# This directory needs to be owned by the user, and chmod 0700
# https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
chmod 0700 "${XDG_RUNTIME_DIR}" || die
diff --git a/eclass/xdg.eclass b/eclass/xdg.eclass
index a3e75103a046..14c56047af45 100644
--- a/eclass/xdg.eclass
+++ b/eclass/xdg.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: xdg.eclass
@@ -6,56 +6,54 @@
# freedesktop-bugs@gentoo.org
# @AUTHOR:
# Original author: Gilles Dartiguelongue <eva@gentoo.org>
-# @SUPPORTED_EAPIS: 5 6 7 8
+# @SUPPORTED_EAPIS: 6 7 8
# @PROVIDES: xdg-utils
# @BLURB: Provides phases for XDG compliant packages.
# @DESCRIPTION:
# Utility eclass to update the desktop, icon and shared mime info as laid
# out in the freedesktop specs & implementations
-inherit xdg-utils
-
-_DEFINE_XDG_SRC_PREPARE=false
-case "${EAPI}" in
- 5|6|7)
- # src_prepare is only exported in EAPI < 8.
- EXPORT_FUNCTIONS src_prepare
- _DEFINE_XDG_SRC_PREPARE=true
- ;;
- 8)
- ;;
- *) die "${ECLASS}: EAPI=${EAPI} is not supported" ;;
+case ${EAPI} in
+ 6|7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS pkg_preinst pkg_postinst pkg_postrm
+
+if [[ -z ${_XDG_ECLASS} ]]; then
+_XDG_ECLASS=1
+
+inherit xdg-utils
# Avoid dependency loop as both depend on glib-2
-if [[ ${CATEGORY}/${P} != dev-libs/glib-2.* ]] ; then
-_XDG_DEPEND="
+[[ ${CATEGORY}/${P} != dev-libs/glib-2.* ]] && _XDG_DEPEND="
dev-util/desktop-file-utils
x11-misc/shared-mime-info
"
-case "${EAPI}" in
- 5|6|7)
+case ${EAPI} in
+ 6|7)
+ # src_prepare is only exported in EAPI < 8.
+ # @FUNCTION: xdg_src_prepare
+ # @DESCRIPTION:
+ # Prepare sources to work with XDG standards.
+ # Note that this function is only defined and exported in EAPIs < 8.
+ xdg_src_prepare() {
+ xdg_environment_reset
+ default
+ }
+
+ EXPORT_FUNCTIONS src_prepare
+
DEPEND="${_XDG_DEPEND}"
;;
*)
+ xdg_src_prepare() {
+ die "Called xdg_src_prepare in EAPI >= 8"
+ }
+
IDEPEND="${_XDG_DEPEND}"
;;
esac
-fi
-
-if ${_DEFINE_XDG_SRC_PREPARE}; then
-# @FUNCTION: xdg_src_prepare
-# @DESCRIPTION:
-# Prepare sources to work with XDG standards.
-# Note that this function is only defined and exported in EAPIs < 8.
-xdg_src_prepare() {
- xdg_environment_reset
-
- [[ ${EAPI} != 5 ]] && default
-}
-fi
+unset _XDG_DEPEND
# @FUNCTION: xdg_pkg_preinst
# @DESCRIPTION:
@@ -127,3 +125,6 @@ xdg_pkg_postrm() {
fi
}
+fi
+
+EXPORT_FUNCTIONS pkg_preinst pkg_postinst pkg_postrm
diff --git a/eclass/xemacs-packages.eclass b/eclass/xemacs-packages.eclass
index 2440ef835291..ca60e0c3b3b7 100644
--- a/eclass/xemacs-packages.eclass
+++ b/eclass/xemacs-packages.eclass
@@ -1,10 +1,10 @@
-# Copyright 1999-2021 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: xemacs-packages.eclass
# @MAINTAINER:
# xemacs@gentoo.org
-# @SUPPORTED_EAPIS: 6 7
+# @SUPPORTED_EAPIS: 8
# @BLURB: Eclass to support elisp packages distributed by XEmacs.
# @DESCRIPTION:
# This eclass supports ebuilds for packages distributed by XEmacs.
@@ -12,7 +12,7 @@
# @ECLASS_VARIABLE: XEMACS_PKG_CAT
# @REQUIRED
# @DESCRIPTION:
-# The package category that the package is in. Can be either standard,
+# The package category that the package is in. Can be either standard,
# mule, or contrib.
# @ECLASS_VARIABLE: XEMACS_EXPERIMENTAL
@@ -20,34 +20,28 @@
# @DEFAULT_UNSET
# @DESCRIPTION:
# If set then the package is downloaded from the experimental packages
-# repository, which is the staging area for packages upstream. Packages
+# repository, which is the staging area for packages upstream. Packages
# in the experimental repository are auto-generated from XEmacs VCS, so
# they may not be well-tested.
-case ${EAPI:-0} in
- [67]) ;;
+case ${EAPI} in
+ 8) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
-EXPORT_FUNCTIONS src_unpack src_install
-
if [[ -z ${_XEMACS_PACKAGES_ECLASS} ]] ; then
_XEMACS_PACKAGES_ECLASS=1
RDEPEND="app-editors/xemacs"
S="${WORKDIR}"
-: ${HOMEPAGE:="http://xemacs.org/"}
-: ${LICENSE:="GPL-2+"}
-
-# Backwards compatibility code, to be removed after 2017-05-03
-: ${XEMACS_PKG_CAT:=${PKG_CAT}}
-: ${XEMACS_EXPERIMENTAL:=${EXPERIMENTAL}}
+: "${HOMEPAGE:="http://xemacs.org/"}"
+: "${LICENSE:="GPL-2+"}"
if [[ -n ${XEMACS_EXPERIMENTAL} ]]; then
- : ${SRC_URI:="http://ftp.xemacs.org/pub/xemacs/beta/experimental/packages/${P}-pkg.tar.gz"}
+ : "${SRC_URI:="http://ftp.xemacs.org/pub/xemacs/beta/experimental/packages/${P}-pkg.tar.gz"}"
else
- : ${SRC_URI:="http://ftp.xemacs.org/pub/xemacs/packages/${P}-pkg.tar.gz"}
+ : "${SRC_URI:="http://ftp.xemacs.org/pub/xemacs/packages/${P}-pkg.tar.gz"}"
fi
xemacs-packages_src_unpack() { :; }
@@ -69,3 +63,5 @@ xemacs-packages_src_install() {
}
fi
+
+EXPORT_FUNCTIONS src_unpack src_install
diff --git a/eclass/xorg-3.eclass b/eclass/xorg-3.eclass
index 378a7b8c4103..dde954ca77aa 100644
--- a/eclass/xorg-3.eclass
+++ b/eclass/xorg-3.eclass
@@ -1,4 +1,4 @@
-# Copyright 1999-2022 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: xorg-3.eclass
@@ -22,10 +22,18 @@
# with the other X packages, you don't need to set SRC_URI. Pretty much
# everything else should be automatic.
+case ${EAPI} in
+ 7|8) ;;
+ *) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
+esac
+
+if [[ -z ${_XORG_3_ECLASS} ]]; then
+_XORG_3_ECLASS=1
+
GIT_ECLASS=""
if [[ ${PV} == *9999* ]]; then
GIT_ECLASS="git-r3"
- : ${XORG_EAUTORECONF:="yes"}
+ : "${XORG_EAUTORECONF:="yes"}"
fi
# If we're a font package, but not the font.alias one
@@ -47,40 +55,28 @@ fi
# @DESCRIPTION:
# If set to 'yes', the multilib support for package will be enabled. Set
# before inheriting this eclass.
-: ${XORG_MULTILIB:="no"}
+: "${XORG_MULTILIB:="no"}"
# we need to inherit autotools first to get the deps
inherit autotools libtool multilib toolchain-funcs flag-o-matic \
${FONT_ECLASS} ${GIT_ECLASS}
unset FONT_ECLASS GIT_ECLASS
-if [[ ${XORG_MULTILIB} == yes ]]; then
- inherit multilib-minimal
-fi
-
-case "${EAPI:-0}" in
- [7-8]) ;;
- *) die "EAPI=${EAPI} is not supported" ;;
-esac
-
-# exports must be ALWAYS after inherit
-EXPORT_FUNCTIONS src_prepare src_configure src_unpack src_compile src_install pkg_postinst pkg_postrm
-
-IUSE=""
+[[ ${XORG_MULTILIB} == yes ]] && inherit multilib-minimal
# @ECLASS_VARIABLE: XORG_EAUTORECONF
# @PRE_INHERIT
# @DESCRIPTION:
# If set to 'yes' and configure.ac exists, eautoreconf will run. Set
# before inheriting this eclass.
-: ${XORG_EAUTORECONF:="no"}
+: "${XORG_EAUTORECONF:="no"}"
# @ECLASS_VARIABLE: XORG_BASE_INDIVIDUAL_URI
# @PRE_INHERIT
# @DESCRIPTION:
# Set up SRC_URI for individual modular releases. If set to an empty
# string, no SRC_URI will be provided by the eclass.
-: ${XORG_BASE_INDIVIDUAL_URI="https://www.x.org/releases/individual"}
+: "${XORG_BASE_INDIVIDUAL_URI="https://www.x.org/releases/individual"}"
# @ECLASS_VARIABLE: XORG_MODULE
# @PRE_INHERIT
@@ -88,7 +84,7 @@ IUSE=""
# The subdirectory to download source from. Possible settings are app,
# doc, data, util, driver, font, lib, proto, xserver. Set above the
# inherit to override the default autoconfigured module.
-: ${XORG_MODULE:="auto"}
+: "${XORG_MODULE:="auto"}"
if [[ ${XORG_MODULE} == auto ]]; then
case "${CATEGORY}/${P}" in
app-doc/*) XORG_MODULE=doc/ ;;
@@ -97,7 +93,6 @@ if [[ ${XORG_MODULE} == auto ]]; then
x11-misc/*|x11-themes/*) XORG_MODULE=util/ ;;
x11-base/*) XORG_MODULE=xserver/ ;;
x11-drivers/*) XORG_MODULE=driver/ ;;
- x11-libs/xcb-util-*) XORG_MODULE=xcb/ ;;
x11-libs/*) XORG_MODULE=lib/ ;;
*) XORG_MODULE= ;;
esac
@@ -108,7 +103,7 @@ fi
# @DESCRIPTION:
# For git checkout the git repository might differ from package name.
# This variable can be used for proper directory specification
-: ${XORG_PACKAGE_NAME:=${PN}}
+: "${XORG_PACKAGE_NAME:=${PN}}"
HOMEPAGE="https://www.x.org/wiki/ https://gitlab.freedesktop.org/xorg/${XORG_MODULE}${XORG_PACKAGE_NAME}"
@@ -117,36 +112,31 @@ HOMEPAGE="https://www.x.org/wiki/ https://gitlab.freedesktop.org/xorg/${XORG_MOD
# @DESCRIPTION:
# Most X11 projects provide tarballs as tar.bz2 or tar.xz. This eclass defaults
# to bz2.
-: ${XORG_TARBALL_SUFFIX:="bz2"}
+: "${XORG_TARBALL_SUFFIX:="bz2"}"
if [[ ${PV} == *9999* ]]; then
- : ${EGIT_REPO_URI:="https://gitlab.freedesktop.org/xorg/${XORG_MODULE}${XORG_PACKAGE_NAME}.git"}
+ : "${EGIT_REPO_URI:="https://gitlab.freedesktop.org/xorg/${XORG_MODULE}${XORG_PACKAGE_NAME}.git"}"
elif [[ -n ${XORG_BASE_INDIVIDUAL_URI} ]]; then
SRC_URI="${XORG_BASE_INDIVIDUAL_URI}/${XORG_MODULE}${P}.tar.${XORG_TARBALL_SUFFIX}"
fi
-: ${SLOT:=0}
+: "${SLOT:=0}"
# Set the license for the package. This can be overridden by setting
# LICENSE after the inherit. Nearly all FreeDesktop-hosted X packages
# are under the MIT license. (This is what Red Hat does in their rpms)
-: ${LICENSE:=MIT}
+: "${LICENSE:=MIT}"
# Set up autotools shared dependencies
# Remember that all versions here MUST be stable
-XORG_EAUTORECONF_ARCHES="x86-winnt"
EAUTORECONF_DEPEND+="
- >=sys-devel/libtool-2.2.6a
+ >=dev-build/libtool-2.2.6a
sys-devel/m4"
if [[ ${PN} != util-macros ]] ; then
EAUTORECONF_DEPEND+=" >=x11-misc/util-macros-1.18"
# Required even by xorg-server
[[ ${PN} == "font-util" ]] || EAUTORECONF_DEPEND+=" >=media-fonts/font-util-1.2.0"
fi
-for arch in ${XORG_EAUTORECONF_ARCHES}; do
- EAUTORECONF_DEPENDS+=" ${arch}? ( ${EAUTORECONF_DEPEND} )"
-done
-unset arch XORG_EAUTORECONF_ARCHES
BDEPEND+=" ${EAUTORECONF_DEPENDS}"
[[ ${XORG_EAUTORECONF} != no ]] && BDEPEND+=" ${EAUTORECONF_DEPEND}"
unset EAUTORECONF_DEPENDS
@@ -185,7 +175,7 @@ BDEPEND+=" virtual/pkgconfig"
# are required for. Default value is "no"
#
# Eg. XORG_DRI="opengl" will pull all dri dependent deps for opengl useflag
-: ${XORG_DRI:="no"}
+: "${XORG_DRI:="no"}"
DRI_COMMON_DEPEND="
x11-base/xorg-server[-minimal]
@@ -219,7 +209,7 @@ fi
# are required for. Default value is "no"
#
# Eg. XORG_DOC="manual" will pull all doc dependent deps for manual useflag
-: ${XORG_DOC:="no"}
+: "${XORG_DOC:="no"}"
DOC_DEPEND="
doc? (
@@ -282,21 +272,11 @@ xorg-3_src_unpack() {
xorg-3_reconf_source() {
debug-print-function ${FUNCNAME} "$@"
- case ${CHOST} in
- *-aix* | *-winnt*)
- # some hosts need full eautoreconf
- [[ -e "./configure.ac" || -e "./configure.in" ]] \
- && XORG_EAUTORECONF=yes
- ;;
- *)
- # elibtoolize required for BSD
- [[ ${XORG_EAUTORECONF} != no && ( -e "./configure.ac" || -e "./configure.in" ) ]] \
- && XORG_EAUTORECONF=yes
- ;;
- esac
-
- [[ ${XORG_EAUTORECONF} != no ]] && eautoreconf
- elibtoolize --patch-only
+ if [[ ${XORG_EAUTORECONF} != no ]] ; then
+ eautoreconf
+ else
+ elibtoolize --patch-only
+ fi
}
# @FUNCTION: xorg-3_src_prepare
@@ -335,11 +315,11 @@ xorg-3_font_configure() {
xorg-3_flags_setup() {
debug-print-function ${FUNCNAME} "$@"
- # Win32 require special define
- [[ ${CHOST} == *-winnt* ]] && append-cppflags -DWIN32 -D__STDC__
- # hardened ldflags
- [[ ${PN} == xorg-server || ${PN} == xf86-video-* || ${PN} == xf86-input-* ]] \
- && append-ldflags -Wl,-z,lazy
+ # Hardened flags break module autoloading et al (also fixes #778494)
+ if [[ ${PN} == xorg-server || ${PN} == xf86-video-* || ${PN} == xf86-input-* ]]; then
+ filter-flags -fno-plt
+ append-ldflags -Wl,-z,lazy
+ fi
# Quite few libraries fail on runtime without these:
if has static-libs ${IUSE//+}; then
@@ -374,7 +354,7 @@ xorg-3_src_configure() {
# Check if package supports disabling of dep tracking
# Fixes warnings like:
# WARNING: unrecognized options: --disable-dependency-tracking
- if grep -q -s "disable-depencency-tracking" ${ECONF_SOURCE:-.}/configure; then
+ if grep -q -s "disable-dependency-tracking" ${ECONF_SOURCE:-.}/configure; then
local dep_track="--disable-dependency-tracking"
fi
@@ -412,7 +392,7 @@ xorg-3_src_configure() {
}
multilib_src_compile() {
- emake "$@" || die 'emake failed'
+ emake "$@"
}
# @FUNCTION: xorg-3_src_compile
@@ -424,12 +404,12 @@ xorg-3_src_compile() {
if [[ ${XORG_MULTILIB} == yes ]]; then
multilib-minimal_src_compile "$@"
else
- emake "$@" || die 'emake failed'
+ emake "$@"
fi
}
multilib_src_install() {
- emake DESTDIR="${D}" "${install_args[@]}" "$@" install || die "emake install failed"
+ emake DESTDIR="${D}" "${install_args[@]}" "$@" install
}
# @FUNCTION: xorg-3_src_install
@@ -443,7 +423,7 @@ xorg-3_src_install() {
if [[ ${XORG_MULTILIB} == yes ]]; then
multilib-minimal_src_install "$@"
else
- emake DESTDIR="${D}" "${install_args[@]}" "$@" install || die "emake install failed"
+ emake DESTDIR="${D}" "${install_args[@]}" "$@" install
einstalldocs
fi
@@ -460,7 +440,13 @@ xorg-3_src_install() {
# Don't install libtool archives (even for modules)
find "${D}" -type f -name '*.la' -delete || die
- [[ -n ${FONT} ]] && remove_font_metadata
+ if [[ -n ${FONT} ]] ; then
+ if [[ -n ${FONT_OPENTYPE_COMPAT} ]] && in_iuse opentype-compat && use opentype-compat ; then
+ font_wrap_opentype_compat
+ fi
+
+ remove_font_metadata
+ fi
}
# @FUNCTION: xorg-3_pkg_postinst
@@ -537,3 +523,7 @@ create_fonts_dir() {
-- "${EROOT}/usr/share/fonts/${FONT_DIR}"
eend $?
}
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_configure src_unpack src_compile src_install pkg_postinst pkg_postrm