Skip to content
Permalink
Browse files
Merge pull request #121 from ctubbsii/hash-algorithms-and-more
Support more hash algorithms and other fixes
  • Loading branch information
ctubbsii committed Oct 7, 2016
2 parents b5ae2f4 + 3674bc3 commit 2927f7136af7c6eeab5c8f7bf8caa4245800d62d
Show file tree
Hide file tree
Showing 12 changed files with 58 additions and 60 deletions.
@@ -82,7 +82,7 @@ The `setup` command will install the downloaded tarballs to the directory set by
env.sh and run you local development cluster. The command can be run in several different ways:

1. Sets up Apache Accumulo and its dependencies of Hadoop, Zookeeper. This starts all processes and
will wipe Accumulo/Hadoop if this command was run previously. This command also sets up Spark
will wipe Accumulo/Hadoop if this command was run previously. This command also sets up Spark
and starts Spark's History Server (set `START_SPARK_HIST_SERVER=false` in your env.sh to turn
off). This command is useful if you are using Uno for Accumulo development.

@@ -14,23 +14,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.

source "$FLUO_DEV"/bin/impl/util.sh

function download_verify() {
url_prefix=$1
tarball=$2
expected_hash=$3

if [[ ! $expected_hash =~ $HASH_REGEX ]]; then
echo "Expected checksum ($expected_hash) of $tarball does not match regex $HASH_REGEX"
exit 1
fi

wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
actual_hash=$($HASH_CMD "$DOWNLOADS/$tarball" | awk '{print $1}')

if [[ "$actual_hash" != "$expected_hash" ]]; then
echo "The actual checksum ($actual_hash) of $tarball does not match the expected checksum ($expected_hash)"
exit 1
fi
verify_exist_hash "$tarball" "$expected_hash"
echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
}

@@ -84,7 +76,8 @@ fluo)
fi
cp "$fluo_built_tarball" "$DOWNLOADS"/
else
download_verify "$APACHE_MIRROR/incubator/fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
[[ $FLUO_VERSION =~ .*-incubating ]] && APACHE_MIRROR="${APACHE_MIRROR}/incubator"
download_verify "$APACHE_MIRROR/fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
fi
;;
metrics)
@@ -25,12 +25,12 @@ impl="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
bin="$( cd -P "$( dirname "$impl" )" && pwd )"
# Stop: Resolve Script Directory

# Determine FLUO_DEV - Use env variable set by user. If none set, calculate using bin dir
# Determine FLUO_DEV - Use env variable set by user. If none set, calculate using bin dir
FLUO_DEV="${FLUO_DEV:-$( cd -P "${bin}"/.. && pwd )}"
export FLUO_DEV
if [[ -z "$FLUO_DEV" || ! -d "$FLUO_DEV" ]]
then
echo "FLUO_DEV=$FLUO_DEV is not a valid directory. Please make sure it exists"
echo "FLUO_DEV=$FLUO_DEV is not a valid directory. Please make sure it exists"
exit 1
fi

@@ -43,10 +43,10 @@ FH=$FLUO_HOME

# Load env configuration
if [[ -f "$FLUO_DEV/conf/env.sh" ]]; then
. "$FLUO_DEV"/conf/env.sh
source "$FLUO_DEV"/conf/env.sh
else
if [[ ! "version env" =~ $1 ]]; then echo "WARNING: uno is using default configuration at $FLUO_DEV/conf/env.sh.example"; fi
. "$FLUO_DEV"/conf/env.sh.example
source "$FLUO_DEV"/conf/env.sh.example
fi

# Confirm that hadoop, accumulo, and zookeeper env variables are not set
@@ -79,11 +79,11 @@ fi

# Confirm that env variables were set correctly
if [[ -n "$FLUO_REPO" && ! -d "$FLUO_REPO" ]]; then
echo "FLUO_REPO=$FLUO_REPO is not a valid directory. Please make sure it exists"
echo "FLUO_REPO=$FLUO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi
if [[ -n "$ACCUMULO_REPO" && ! -d "$ACCUMULO_REPO" ]]; then
echo "ACCUMULO_REPO=$ACCUMULO_REPO is not a valid directory. Please make sure it exists"
echo "ACCUMULO_REPO=$ACCUMULO_REPO is not a valid directory. Please make sure it exists"
exit 1
fi

@@ -120,10 +120,8 @@ fi
: "${YARN_LOG_DIR:?"YARN_LOG_DIR is not set in env.sh"}"
: "${ZOO_LOG_DIR:?"ZOO_LOG_DIR is not set in env.sh"}"

hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
export HASH_CMD="shasum -a 256"
export HASH_REGEX="^[a-f0-9]{64}$"
hash sed 2>/dev/null || { echo >&2 "sed must be installed & on PATH. Aborting."; exit 1; }
hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
hash sed 2>/dev/null || { echo >&2 "sed must be installed & on PATH. Aborting."; exit 1; }

if [[ "$OSTYPE" == "darwin"* ]]; then
export SED="sed -i .bak"
@@ -15,18 +15,18 @@
# limitations under the License.

if [[ -z "$1" || "$1" == "--vars" ]]; then
echo "export HADOOP_PREFIX=$HADOOP_PREFIX"
echo "export HADOOP_CONF_DIR=$HADOOP_CONF_DIR"
echo "export ZOOKEEPER_HOME=$ZOOKEEPER_HOME"
echo "export SPARK_HOME=$SPARK_HOME"
echo "export ACCUMULO_HOME=$ACCUMULO_HOME"
echo "export FLUO_HOME=$FLUO_HOME"
echo "export HADOOP_PREFIX=\"$HADOOP_PREFIX\""
echo "export HADOOP_CONF_DIR=\"$HADOOP_CONF_DIR\""
echo "export ZOOKEEPER_HOME=\"$ZOOKEEPER_HOME\""
echo "export SPARK_HOME=\"$SPARK_HOME\""
echo "export ACCUMULO_HOME=\"$ACCUMULO_HOME\""
echo "export FLUO_HOME=\"$FLUO_HOME\""
fi

if [[ -z "$1" || "$1" == "--paths" ]]; then
echo -n "export PATH=\$PATH:$FLUO_DEV/bin:$HADOOP_PREFIX/bin:$ZOOKEEPER_HOME/bin:$SPARK_HOME/bin:$ACCUMULO_HOME/bin:$FLUO_HOME/bin"
echo -n "export PATH=\"\$PATH:$FLUO_DEV/bin:$HADOOP_PREFIX/bin:$ZOOKEEPER_HOME/bin:$SPARK_HOME/bin:$ACCUMULO_HOME/bin:$FLUO_HOME/bin"
if [[ "$SETUP_METRICS" == "true" ]]; then
echo -n ":$INFLUXDB_HOME/bin:$GRAFANA_HOME/bin"
fi
echo ""
echo '"'
fi
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

source $FLUO_DEV/bin/impl/util.sh
source "$FLUO_DEV"/bin/impl/util.sh

if [[ -z "$ACCUMULO_REPO" ]]; then
verify_exist_hash "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
@@ -33,7 +33,7 @@ if [[ "$OSTYPE" != "darwin"* ]]; then
echo "Found ${hostname} in DNS."
else
echo "ERROR - Your machine was unable to find its own hostname in /etc/hosts or by using 'host $hostname'."
echo "This is an issue that can cause uno services (such as Hadoop) to not start up. You should"
echo "This is an issue that can cause uno services (such as Hadoop) to not start up. You should"
echo "confirm that there is an entry in /etc/hosts or that /etc/resolv.conf is correct."
exit 1
fi
@@ -117,7 +117,6 @@ fi

echo "Starting Hadoop..."
rm -rf "$DATA_DIR"/hadoop
echo $HADOOP_LOG_DIR
"$HADOOP_PREFIX"/bin/hdfs namenode -format
"$HADOOP_PREFIX"/sbin/start-dfs.sh
"$HADOOP_PREFIX"/sbin/start-yarn.sh
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

source $FLUO_DEV/bin/impl/util.sh
source "$FLUO_DEV"/bin/impl/util.sh

# stop if any command fails
set -e
@@ -62,7 +62,7 @@ if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
} >> "$FLUO_PROPS"
fi

$FLUO_HOME/lib/fetch.sh extra
"$FLUO_HOME"/lib/fetch.sh extra
else
echo "WARNING: Fluo tarball '$FLUO_TARBALL' was not found in $DOWNLOADS."
echo "Fluo will not be set up!"
@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

source $FLUO_DEV/bin/impl/util.sh
source "$FLUO_DEV"/bin/impl/util.sh

"$FLUO_DEV"/bin/impl/kill.sh

@@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

source $FLUO_DEV/bin/impl/util.sh
source "$FLUO_DEV"/bin/impl/util.sh

echo "Killing InfluxDB & Grafana (if running)"
pkill -f influxdb
@@ -84,7 +84,7 @@ while [[ $retcode != 0 ]]; do
--data-binary '{"name":"fluo_metrics","type":"influxdb","url":"http://localhost:8086","access":"direct","isDefault":true,"database":"fluo_metrics","user":"fluo","password":"secret"}'
retcode=$?
if [[ $retcode != 0 ]]; then
echo "Failed to add Grafana data source. Retrying in 5 sec.."
echo "Failed to add Grafana data source. Retrying in 5 sec.."
sleep 5
fi
done
@@ -16,17 +16,24 @@

function verify_exist_hash() {
tarball=$1
expected_hash=$2
expected_hash=$(echo "${2// /}" | tr '[:upper:]' '[:lower:]')

if [[ ! $expected_hash =~ $HASH_REGEX ]]; then
echo "Expected checksum ($expected_hash) of $tarball does not match regex $HASH_REGEX"
exit 1
fi
if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
echo "The tarball $tarball does not exists in downloads/"
echo "The tarball $tarball does not exist in downloads/"
exit 1
fi

local HASH_CMD
case "${#expected_hash}" in
32) HASH_CMD='md5sum' ;;
40) HASH_CMD='shasum -a 1' ;;
64) HASH_CMD='shasum -a 256' ;;
128) HASH_CMD='shasum -a 512' ;;
*)
echo "Expected checksum ($expected_hash) of $tarball is not an MD5, SHA1, SHA256, or SHA512 sum"
exit 1
;;
esac
actual_hash=$($HASH_CMD "$DOWNLOADS/$tarball" | awk '{print $1}')

if [[ "$actual_hash" != "$expected_hash" ]]; then
22 bin/uno
@@ -24,33 +24,33 @@ done
bin="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Stop: Resolve Script Directory

. "$bin"/impl/load-env.sh "$1"
source "$bin"/impl/load-env.sh "$1"

function check_dirs() {
if [[ ! -d "$HADOOP_PREFIX" ]]; then
echo "HADOOP_PREFIX=$HADOOP_PREFIX is not a valid directory. Please make sure it exists"
echo "HADOOP_PREFIX=$HADOOP_PREFIX is not a valid directory. Please make sure it exists"
exit 1
fi
if [[ ! -d "$ZOOKEEPER_HOME" ]]; then
echo "ZOOKEEPER_HOME=$ZOOKEEPER_HOME is not a valid directory. Please make sure it exists"
echo "ZOOKEEPER_HOME=$ZOOKEEPER_HOME is not a valid directory. Please make sure it exists"
exit 1
fi
if [[ ! -d "$ACCUMULO_HOME" ]]; then
echo "ACCUMULO_HOME=$ACCUMULO_HOME is not a valid directory. Please make sure it exists"
echo "ACCUMULO_HOME=$ACCUMULO_HOME is not a valid directory. Please make sure it exists"
exit 1
fi
}

case "$1" in
fetch)
hash mvn 2>/dev/null || { echo >&2 "Maven must be installed & on PATH. Aborting."; exit 1; }
hash wget 2>/dev/null || { echo >&2 "wget must be installed & on PATH. Aborting."; exit 1; }
hash mvn 2>/dev/null || { echo >&2 "Maven must be installed & on PATH. Aborting."; exit 1; }
hash wget 2>/dev/null || { echo >&2 "wget must be installed & on PATH. Aborting."; exit 1; }
if [[ "$2" == "all" ]]; then
"$bin"/impl/fetch.sh accumulo
"$bin"/impl/fetch.sh fluo
"$bin"/impl/fetch.sh hadoop
"$bin"/impl/fetch.sh metrics
"$bin"/impl/fetch.sh spark
"$bin"/impl/fetch.sh accumulo && \
"$bin"/impl/fetch.sh fluo && \
"$bin"/impl/fetch.sh hadoop && \
"$bin"/impl/fetch.sh metrics && \
"$bin"/impl/fetch.sh spark && \
"$bin"/impl/fetch.sh zookeeper
else
"$bin"/impl/fetch.sh "${@:2}"
@@ -8,9 +8,9 @@ export ZOOKEEPER_VERSION=3.4.8
export ACCUMULO_VERSION=1.7.2
export SPARK_VERSION=1.6.2

# SHA-256 Hashes
# Hashes (supports MD5, SHA-1, SHA-256, and SHA-512; MD5 requires md5sum)
# --------------
# Hashes below match default versions above. If you change a version above,
# Hashes below match default versions above. If you change a version above,
# you must also change the hash below.
export FLUO_HASH=d54b7b7b470b3ebb51ec08b797137f2f1c1ddea1cd7ccab449d5f94129be1635
export HADOOP_HASH=49ad740f85d27fa39e744eb9e3b1d9442ae63d62720f0aabdae7aa9a718b03f7
@@ -104,7 +104,7 @@ export ACCUMULO_PASSWORD=secret
# ---------------------
# If true, the command 'uno setup fluo' will set up metrics.
# This setting has no impact on 'uno setup metrics'.
# Metrics can only be set up on Linux. Mac OS X is not supported.
# Metrics can only be set up on Linux. Mac OS X is not supported.
export SETUP_METRICS=false
# InfuxDB configuration
export INFLUXDB_VERSION=0.9.4.2
@@ -20,4 +20,5 @@
# This file is sourced when running various Spark programs.
# Copy it as spark-env.sh and edit that to configure Spark for your site.

export SPARK_DIST_CLASSPATH=$($HADOOP_PREFIX/bin/hadoop classpath)
SPARK_DIST_CLASSPATH=$("$HADOOP_PREFIX"/bin/hadoop classpath)
export SPARK_DIST_CLASSPATH

0 comments on commit 2927f71

Please sign in to comment.