Skip to content

Commit

Permalink
devel/spark: Revive port
Browse files Browse the repository at this point in the history
PR:	266484
  • Loading branch information
Martinfx authored and neelchauhan committed Sep 19, 2022
1 parent 4ddd2f0 commit 0d25fd7
Show file tree
Hide file tree
Showing 8 changed files with 1,388 additions and 0 deletions.
1 change: 1 addition & 0 deletions devel/Makefile
Expand Up @@ -7206,6 +7206,7 @@
SUBDIR += sope
SUBDIR += sope2
SUBDIR += sord
SUBDIR += spark
SUBDIR += sparsebitset
SUBDIR += sparsehash
SUBDIR += spatialindex
Expand Down
75 changes: 75 additions & 0 deletions devel/spark/Makefile
@@ -0,0 +1,75 @@
PORTNAME= spark
PORTVERSION= 3.3.0
CATEGORIES= devel java
MASTER_SITES= https://archive.apache.org/dist/${PORTNAME}/${PORTNAME}-${PORTVERSION}/
PKGNAMEPREFIX= apache-
DISTFILES= ${PORTNAME}-${PORTVERSION}.tgz

MAINTAINER= freebsd@sysctl.cz
COMMENT= Fast big data processing engine

LICENSE= APACHE20

BUILD_DEPENDS= ${LOCALBASE}/lib/libsnappyjava.so:archivers/snappy-java \
bash:shells/bash \
mvn:devel/maven

RUN_DEPENDS= bash:shells/bash

USES= cpe python shebangfix
CPE_VENDOR= apache
USE_JAVA= yes
MAKE_ENV+= JAVA_HOME=${JAVA_HOME} \
MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m"

SHEBANG_FILES= bin/sparkR

USERS= spark
GROUPS= spark

USE_RC_SUBR= spark_master spark_worker
PLIST_SUB+= SPARK_GROUP=spark \
SPARK_USER=spark \
VER=${PORTVERSION}
SUB_LIST+= SPARK_GROUP=spark \
SPARK_USER=spark

.include <bsd.port.pre.mk>

.if ${ARCH} == "amd64"
JAVA_ARCH= x86_64
.elif ${ARCH} == "i386"
JAVA_ARCH= x86
.else
JAVA_ARCH= ${ARCH}
.endif

do-build:
${MKDIR} ${WRKDIR}/snappy/org/xerial/snappy/native/${OPSYS}/${JAVA_ARCH}
${CP} ${LOCALBASE}/lib/libsnappyjava.so \
${WRKDIR}/snappy/org/xerial/snappy/native/${OPSYS}/${JAVA_ARCH}
cd ${WRKSRC} && ${SETENV} ${MAKE_ENV} \
${LOCALBASE}/bin/mvn \
-Dmaven.repo.local=${WRKDIR}/m2 clean package \
-Dhadoop.version=3.3.1 -Pyarn -Phive -Phive-thriftserver -DskipTests \
-Duser.home=${WRKDIR}
${JAR} uvf ${WRKSRC}/assembly/target/scala*/jars/snappy-java-*.jar \
-C ${WRKDIR}/snappy org

post-build:
${RM} ${WRKSRC}/bin/*.cmd ${WRKSRC}/sbin/spark-daemon.sh.orig

do-install:
${MKDIR} ${STAGEDIR}${DATADIR}/lib ${STAGEDIR}${DATADIR}/examples/jars ${STAGEDIR}${DATADIR}/bin ${STAGEDIR}${DATADIR}/sbin ${STAGEDIR}${DATADIR}/conf
${ECHO_CMD} "Spark ${PORTVERSION} built for Hadoop 3.3.1" > ${STAGEDIR}${DATADIR}/RELEASE
(cd ${WRKSRC}/assembly/target/scala* && ${COPYTREE_SHARE} jars ${STAGEDIR}${DATADIR})
${INSTALL_DATA} ${WRKSRC}/examples/target/spark-examples*.jar ${STAGEDIR}${DATADIR}/examples/jars
cd ${WRKSRC}/examples && ${COPYTREE_SHARE} src ${STAGEDIR}${DATADIR}/examples
cd ${WRKSRC}/bin && ${INSTALL_SCRIPT} * ${STAGEDIR}${DATADIR}/bin/
cd ${WRKSRC}/sbin && ${INSTALL_SCRIPT} * ${STAGEDIR}${DATADIR}/sbin/
cd ${WRKSRC} && ${COPYTREE_SHARE} python ${STAGEDIR}${DATADIR}/
${INSTALL_DATA} ${WRKSRC}/conf/*.template ${STAGEDIR}${DATADIR}/conf/
${MKDIR} ${STAGEDIR}/var/run/spark
${MKDIR} ${STAGEDIR}/var/log/spark

.include <bsd.port.post.mk>
3 changes: 3 additions & 0 deletions devel/spark/distinfo
@@ -0,0 +1,3 @@
TIMESTAMP = 1663492153
SHA256 (spark-3.3.0.tgz) = 9b357aa165e3d78820702f0eee3fa32097839d42c9d0f5b19563fd23b796d13c
SIZE (spark-3.3.0.tgz) = 29907712
26 changes: 26 additions & 0 deletions devel/spark/files/patch-sbin-spark-daemon.sh
@@ -0,0 +1,26 @@
--- sbin/spark-daemon.sh.orig 2015-10-26 09:05:25.709519603 +0000
+++ sbin/spark-daemon.sh 2015-10-26 09:07:31.767513027 +0000
@@ -99,14 +99,6 @@
if [ "$SPARK_LOG_DIR" = "" ]; then
export SPARK_LOG_DIR="$SPARK_HOME/logs"
fi
-mkdir -p "$SPARK_LOG_DIR"
-touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
-TEST_LOG_DIR=$?
-if [ "${TEST_LOG_DIR}" = "0" ]; then
- rm -f "$SPARK_LOG_DIR"/.spark_test
-else
- chown "$SPARK_IDENT_STRING" "$SPARK_LOG_DIR"
-fi

if [ "$SPARK_PID_DIR" = "" ]; then
SPARK_PID_DIR=/tmp
@@ -125,8 +117,6 @@
mode="$1"
shift

- mkdir -p "$SPARK_PID_DIR"
-
if [ -f "$pid" ]; then
TARGET_ID="$(cat "$pid")"
if [[ $(ps -p "$TARGET_ID" -o comm=) =~ "java" ]]; then
32 changes: 32 additions & 0 deletions devel/spark/files/spark_master.in
@@ -0,0 +1,32 @@
#!/bin/sh
#
# PROVIDE: spark_master
# REQUIRE: LOGIN
# KEYWORD: shutdown
#

. /etc/rc.subr

name=spark_master
rcvar=spark_master_enable
load_rc_config $name

: ${spark_master_enable:=NO}
: ${spark_master_ip=:`hostname`}
: ${spark_master_port:=7077}
: ${spark_master_webui_port:=8080}

export SPARK_PID_DIR=/var/run/spark
export SPARK_LOG_DIR=/var/log/spark
export SPARK_MASTER_IP=${spark_master_ip}
export SPARK_MASTER_PORT=${spark_master_port}
export SPARK_MASTER_WEBUI_PORT=${spark_master_webui_port}
export SPARK_IDENT_STRING=%%SPARK_USER%%

pidfile=${SPARK_PID_DIR}/spark-${SPARK_IDENT_STRING}-org.apache.spark.deploy.master.Master-1.pid
start_cmd="/usr/bin/su -m %%SPARK_USER%% -c %%DATADIR%%/sbin/start-master.sh"
stop_cmd="/usr/bin/su -m %%SPARK_USER%% -c %%DATADIR%%/sbin/stop-master.sh"

export PATH=$PATH:%%LOCALBASE%%/bin

run_rc_command "$1"
29 changes: 29 additions & 0 deletions devel/spark/files/spark_worker.in
@@ -0,0 +1,29 @@
#!/bin/sh
#
# PROVIDE: spark_worker
# REQUIRE: LOGIN
# KEYWORD: shutdown
#

. /etc/rc.subr

name=spark_worker
rcvar=spark_worker_enable
load_rc_config $name

: ${spark_worker_enable:=NO}
: ${spark_worker_master:="spark://`hostname`:7077"}
: ${spark_worker_dir:="/tmp/spark/worker"}

export SPARK_PID_DIR=/var/run/spark
export SPARK_LOG_DIR=/var/log/spark
export SPARK_IDENT_STRING=%%SPARK_USER%%
export SPARK_WORKER_DIR=${spark_worker_dir}

pidfile=${SPARK_PID_DIR}/spark-${SPARK_IDENT_STRING}-org.apache.spark.deploy.worker.Worker-1.pid
start_cmd="/usr/bin/su -m %%SPARK_USER%% -c \"%%DATADIR%%/sbin/start-slave.sh ${spark_worker_master}\""
stop_cmd='/usr/bin/su -m %%SPARK_USER%% -c "%%DATADIR%%/sbin/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker"'

export PATH=$PATH:%%LOCALBASE%%/bin

run_rc_command "$1"
3 changes: 3 additions & 0 deletions devel/spark/pkg-descr
@@ -0,0 +1,3 @@
[description of the port]

WWW: http://example.com

0 comments on commit 0d25fd7

Please sign in to comment.