Skip to content
This repository was archived by the owner on Dec 15, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion bin/functions/assert.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

function assert(){ # assertion help function
if [ -z "$1" ]; then
echo -e "${IRed}ASSERT! $2${Color_Off}" > /dev/stderr
echo -e "${IRed}ASSERT! $2${Color_Off}" 1>&2
exit 1
fi;
}
16 changes: 8 additions & 8 deletions bin/functions/workload_functions.sh
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ function rmr_hdfs(){ # rm -r for hdfs
assert $1 "dir parameter missing"
RMDIR_CMD="fs -rm -r -skipTrash"
local CMD="$HADOOP_EXECUTABLE --config $HADOOP_CONF_DIR $RMDIR_CMD $1"
echo -e "${BCyan}hdfs rm -r: ${Cyan}${CMD}${Color_Off}" > /dev/stderr
echo -e "${BCyan}hdfs rm -r: ${Cyan}${CMD}${Color_Off}" 1>&2
execute_withlog ${CMD}
}

Expand All @@ -121,35 +121,35 @@ function upload_to_hdfs(){
assert $2 "remote parameter missing"
LOCAL_FILE_PATH=$1
REMOTE_FILE_PATH=$2
echo "REMOTE_FILE_PATH:$REMOTE_FILE_PATH" > /dev/stderr
echo "REMOTE_FILE_PATH:$REMOTE_FILE_PATH" 1>&2
if [[ `echo $REMOTE_FILE_PATH | tr A-Z a-z` = hdfs://* ]]; then # strip leading "HDFS://xxx:xxx/" string
echo "HDFS_MASTER:$HDFS_MASTER" > /dev/stderr
echo "HDFS_MASTER:$HDFS_MASTER" 1>&2
local LEADING_HDFS_STRING_LENGTH=${#HDFS_MASTER}
REMOTE_FILE_PATH=${REMOTE_FILE_PATH:$LEADING_HDFS_STRING_LENGTH}
echo "stripped REMOTE_FILE_PATH:$REMOTE_FILE_PATH" > /dev/stderr
echo "stripped REMOTE_FILE_PATH:$REMOTE_FILE_PATH" 1>&2
fi

# clear previous package file
local CMD="$HADOOP_EXECUTABLE --config $HADOOP_CONF_DIR fs -rm $REMOTE_FILE_PATH"
echo -e "${BCyan}hdfs rm : ${Cyan}${CMD}${Color_Off}" > /dev/stderr
echo -e "${BCyan}hdfs rm : ${Cyan}${CMD}${Color_Off}" 1>&2
execute_withlog ${CMD}

# prepare parent folder
CMD="$HADOOP_EXECUTABLE --config $HADOOP_CONF_DIR fs -mkdir `dirname $REMOTE_FILE_PATH`"
echo -e "${BCyan}hdfs mkdir : ${Cyan}${CMD}${Color_Off}" > /dev/stderr
echo -e "${BCyan}hdfs mkdir : ${Cyan}${CMD}${Color_Off}" 1>&2
execute_withlog ${CMD}

# upload
CMD="$HADOOP_EXECUTABLE --config $HADOOP_CONF_DIR fs -put $LOCAL_FILE_PATH $REMOTE_FILE_PATH"
echo -e "${BCyan}hdfs put : ${Cyan}${CMD}${Color_Off}" > /dev/stderr
echo -e "${BCyan}hdfs put : ${Cyan}${CMD}${Color_Off}" 1>&2
execute_withlog ${CMD}
}

function dus_hdfs(){ # du -s for hdfs
assert $1 "dir parameter missing"
DUS_CMD="fs -du -s"
local CMD="$HADOOP_EXECUTABLE --config $HADOOP_CONF_DIR $DUS_CMD $1"
echo -e "${BPurple}hdfs du -s: ${Purple}${CMD}${Color_Off}" > /dev/stderr
echo -e "${BPurple}hdfs du -s: ${Purple}${CMD}${Color_Off}" 1>&2
execute_withlog ${CMD}
}

Expand Down