summaryrefslogtreecommitdiff
path: root/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
diff options
context:
space:
mode:
authorV3n3RiX <venerix@redcorelinux.org>2020-11-28 20:40:51 +0000
committerV3n3RiX <venerix@redcorelinux.org>2020-11-28 20:40:51 +0000
commit9c417bacd51da6d8b57fa9f37425161d30d4b95b (patch)
tree47c9d6e4243f39a1f48afd54c969b65b00a5c649 /sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
parentd934827bf44b7cfcf6711964418148fa60877668 (diff)
gentoo resync : 28.11.2020
Diffstat (limited to 'sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild')
-rw-r--r--sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild84
1 files changed, 84 insertions, 0 deletions
diff --git a/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
new file mode 100644
index 000000000000..99aa5cd404a1
--- /dev/null
+++ b/sys-cluster/spark-bin/spark-bin-2.4.7-r1.ebuild
@@ -0,0 +1,84 @@
+# Copyright 1999-2020 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=7
+
+inherit java-pkg-2
+
+DESCRIPTION="Lightning-fast unified analytics engine"
+HOMEPAGE="https://spark.apache.org"
+SRC_URI="
+ !scala212? ( scala211? ( mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop.tgz -> ${P}-nohadoop-scala211.tgz ) )
+ !scala211? ( scala212? ( mirror://apache/spark/spark-${PV}/spark-${PV}-bin-without-hadoop-scala-2.12.tgz -> ${P}-nohadoop-scala212.tgz ) )
+"
+
+REQUIRED_USE="^^ ( scala211 scala212 )"
+
+LICENSE="Apache-2.0"
+SLOT="2"
+KEYWORDS="~amd64"
+
+IUSE="+scala211 scala212"
+
+RDEPEND="
+ >=virtual/jre-1.8"
+
+DEPEND="
+ >=virtual/jdk-1.8"
+
+DOCS=( LICENSE NOTICE README.md RELEASE )
+
+src_unpack() {
+ unpack ${A}
+ use scala211 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop"
+ use scala212 && S="${WORKDIR}/spark-${PV}-bin-without-hadoop-scala-2.12"
+}
+
+# Nothing to compile here.
+src_compile() { :; }
+
+src_install() {
+ dodir usr/lib/spark-${SLOT}
+ into usr/lib/spark-${SLOT}
+
+ local SPARK_SCRIPTS=(
+ bin/beeline
+ bin/load-spark-env.sh
+ bin/pyspark
+ bin/spark-class
+ bin/spark-shell
+ bin/spark-sql
+ bin/spark-submit
+ )
+
+ local s
+ for s in "${SPARK_SCRIPTS[@]}"; do
+ ebegin "Setting SPARK_HOME to /usr/lib/spark-${SLOT} in $(basename ${s}) script ..."
+ sed -i -e "2iSPARK_HOME=/usr/lib/spark-${SLOT}" "${s}"
+ eend $?
+ dobin "${s}"
+ done
+
+ insinto usr/lib/spark-${SLOT}
+
+ local SPARK_DIRS=( conf jars python sbin yarn )
+
+ local d
+ for d in "${SPARK_DIRS[@]}"; do
+ doins -r "${d}"
+ done
+
+ einstalldocs
+}
+
+pkg_postinst() {
+ einfo
+ einfo "Spark is now slotted. You have installed Spark ${SLOT}."
+ einfo
+ einfo "Make sure to add /usr/lib/spark-${SLOT}/{bin,sbin} directories"
+ einfo "to your PATH in order to run Spark shell scripts:"
+ einfo
+ einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/bin"
+ einfo "$ export PATH=\$PATH:/usr/lib/spark-${SLOT}/sbin"
+ einfo
+}