forked from amplab/shark
/
run
executable file
·156 lines (126 loc) · 4.11 KB
/
run
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
#!/bin/bash
# This file is used to launch Shark on the master.
export SCALA_VERSION=2.9.2
# Figure out where the framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
export SHARK_HOME="$FWDIR"
# Load environment variables from conf/shark-env.sh, if it exists
if [ -e $SHARK_HOME/conf/shark-env.sh ] ; then
. $SHARK_HOME/conf/shark-env.sh
fi
if [[ -n "${SCALA_HOME:-}" && ! -f "${SCALA_HOME:-}/lib/scala-library.jar" ]] ; then
echo "Cannot find $SCALA_HOME/lib/scala-library.jar."
echo "Are you sure your SCALA_HOME is set correctly?"
echo "SCALA_HOME = $SCALA_HOME"
exit 1
fi
# Hive related section.
if [ "x$HIVE_HOME" == "x" ] ; then
echo "No HIVE_HOME specified. Please set HIVE_HOME."
exit 1
fi
if [ -z "${HIVE_VERSION:-}" ]; then
HIVE_VERSION="0.9.0"
fi
if [ ! -f "$HIVE_HOME/lib/hive-exec-$HIVE_VERSION.jar" ] ; then
echo "Cannot find $HIVE_HOME/lib/hive-exec-$HIVE_VERSION.jar."
echo "Are you sure your HIVE_HOME is set correctly?"
echo "HIVE_HOME = $HIVE_HOME"
exit 1
fi
if [ -n "$MASTER" ] ; then
if [ -z $SPARK_HOME ] ; then
echo "No SPARK_HOME specified. Please set SPARK_HOME for cluster mode."
exit 1
fi
fi
# Check for optionally specified configuration file path
if [ "x$HIVE_CONF_DIR" == "x" ] ; then
HIVE_CONF_DIR="$HIVE_HOME/conf"
fi
if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
. "${HIVE_CONF_DIR}/hive-env.sh"
fi
# Add Shark jars.
for jar in `find $SHARK_HOME/lib -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
# sbt-specific jar directories.
if [ -d "$SHARK_HOME/lib_managed/jars" ]; then
for jar in `find $SHARK_HOME/lib_managed/jars -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
fi
if [ -d "$SHARK_HOME/lib_managed/bundles" ]; then
for jar in `find $SHARK_HOME/lib_managed/bundles -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
fi
# Add Hive jars.
for jar in `find $HIVE_HOME/lib -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
SPARK_CLASSPATH+=:$HIVE_CONF_DIR
if [ -d "$SHARK_HOME/target" ]; then
# Build up classpath in development mode
if [ -f "$SHARK_HOME/target/scala-$SCALA_VERSION/shark_$SCALA_VERSION-0.2.jar" ] ; then
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/shark_$SCALA_VERSION-0.2.jar"
else
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/classes"
fi
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/test-classes"
fi
# Use Hadoop configuration from $HADOOP_CONF_DIR or if it is not set from $HADOOP_HOME/conf.
if [ -z "${HADOOP_CONF_DIR:-}" ]; then
if [ -z "${HADOOP_HOME:-}" ] ; then
echo "No HADOOP_HOME specified. Shark will run in local-mode"
else
HADOOP_CONF_DIR="$HADOOP_HOME/conf"
fi
fi
if [ -n "${HADOOP_CONF_DIR:-}" ]; then
SPARK_CLASSPATH+=:$HADOOP_CONF_DIR
fi
# TODO(rxin): Check aux classpath and aux java opts.
#CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
export SPARK_CLASSPATH
export CLASSPATH+=$SPARK_CLASSPATH # Needed for spark-shell
export SPARK_JAVA_OPTS+=" $TEST_JAVA_OPTS"
if [ "x$SHARK_MASTER_MEM" == "x" ] ; then
SHARK_MASTER_MEM="512m"
fi
# Set JAVA_OPTS to be able to load native libraries and to set heap size
JAVA_OPTS+="$SPARK_JAVA_OPTS"
JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH"
JAVA_OPTS+=" -Xms$SHARK_MASTER_MEM -Xmx$SHARK_MASTER_MEM"
export JAVA_OPTS
# In case we are running Ant
export ANT_OPTS=$JAVA_OPTS
if [ "x$RUNNER" == "x" ] ; then
if [ "$SHARK_LAUNCH_WITH_JAVA" == "1" ]; then
JAVA_WRAPPER=/usr/local/bin/java_wrapper.sh
if [ -n "$SCALA_HOME" ]; then
CLASSPATH+=":$SCALA_HOME/lib/scala-library.jar"
CLASSPATH+=":$SCALA_HOME/lib/scala-compiler.jar"
CLASSPATH+=":$SCALA_HOME/lib/jline.jar"
fi
if [ -x "$JAVA_WRAPPER" ]; then
RUNNER="$JAVA_WRAPPER"
elif [ -n "$JAVA_HOME" ]; then
RUNNER="${JAVA_HOME}/bin/java"
else
RUNNER=java
fi
# The JVM doesn't read JAVA_OPTS by default so we need to pass it in
EXTRA_ARGS="$JAVA_OPTS"
else
if [ -z "$SCALA_HOME" ]; then
echo "Neither SHARK_LAUNCH_WITH_JAVA nor SCALA_HOME are set" >&2
exit 1
fi
SCALA=${SCALA_HOME}/bin/scala
RUNNER="$SCALA -cp \"$CLASSPATH\""
EXTRA_ARGS=""
fi
fi
exec $RUNNER $EXTRA_ARGS "$@"