From 9c417bacd51da6d8b57fa9f37425161d30d4b95b Mon Sep 17 00:00:00 2001 From: V3n3RiX Date: Sat, 28 Nov 2020 20:40:51 +0000 Subject: gentoo resync : 28.11.2020 --- sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild | 84 +++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild (limited to 'sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild') diff --git a/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild new file mode 100644 index 000000000000..99aa5cd404a1 --- /dev/null +++ b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild @@ -0,0 +1,84 @@ +# Copyright 1999-2020 Gentoo Authors +# Distributed under the terms of the GNU General Public License v2 + +EAPI=7 + +inherit java-pkg-2 + +DESCRIPTION="Lightning-fast unified analytics engine" +HOMEPAGE="https://spark.apache.org" +SRC_URI=" + !scala212? ( scala211? ( mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop.tgz -> ${P}-nohadoop-scala211.tgz ) ) + !scala211? ( scala212? ( mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop-scala-2.12.tgz -> ${P}-nohadoop-scala212.tgz ) ) +" + +REQUIRED_USE="^^ ( scala211 scala212 )" + +LICENSE="Apache-2.0" +SLOT="2" +KEYWORDS="~amd64" + +IUSE="+scala211 scala212" + +RDEPEND=" + >=virtual/jre-1.8" + +DEPEND=" + >=virtual/jdk-1.8" + +DOCS=( LICENSE NOTICE README.md RELEASE ) + +src_unpack() { + unpack ${A} + use scala211 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop" + use scala212 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop-scala-2.12" +} + +# Nothing to compile here. +src_compile() { :; } + +src_install() { + dodir usr/lib/spark-${SLOT} + into usr/lib/spark-${SLOT} + + local SPARK_SCRIPTS=( + bin/beeline + bin/load-spark-env.sh + bin/pyspark + bin/spark-class + bin/spark-shell + bin/spark-sql + bin/spark-submit + ) + + local s + for s in "${SPARK_SCRIPTS[@]}"; do + ebegin "Setting SPARK_HOME to /usr/lib/spark-${SLOT} in $(basename ${s}) script ..." + sed -i -e "2iSPARK_HOME=/usr/lib/spark-${SLOT}" "${s}" + eend $? + dobin "${s}" + done + + insinto usr/lib/spark-${SLOT} + + local SPARK_DIRS=( conf jars python sbin yarn ) + + local d + for d in "${SPARK_DIRS[@]}"; do + doins -r "${d}" + done + + einstalldocs +} + +pkg_postinst() { + einfo + einfo "Spark is now slotted. You have installed Spark ${SLOT}." + einfo + einfo "Make sure to add /usr/lib/spark-${SLOT}/{bin,sbin} directories" + einfo "to your PATH in order to run Spark shell scripts:" + einfo + einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/bin" + einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/sbin" + einfo +} -- cgit v1.2.3