-
Notifications
You must be signed in to change notification settings - Fork 15
/
s3t.sh
74 lines (71 loc) · 2.33 KB
/
s3t.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/usr/bin/bash
#
# Date: December 7th, 2023
# Author: Robin T. Miller
#
# Description:
# Simple script to use dt as a data generation tool for testing S3 object storage.
# Note: Assumes S3 bucket already exists, and credentials have been created.\
# This script uses the default profile, aka 's3'.
#
# Modification History:
# April 19th, 2024
# Minor updates, fix bash error redirection.
#
# December 18th, 2023 by Robin T. Miller
# When reading files, remove the min/max limit options to ensure
# the whole file is verified, otherwise only a portion is read.
#
# December 13th, 2023 by Robin T. Miller
# Add S3 bucket name to dt prefix string.
# Change the default S3 bucket name to "dt-bucket".
# If the S3 bucket does not exist, make the bucket.
#
function check_error
{
exit_status=$?
if [[ $exit_status -ne 0 ]]; then
echo "Error occurred, last exit status is ${exit_status}"
exit $exit_status
fi
return
}
# Set defaults:
dtpath=${DTPATH:-~/dt}
bucket=${BUCKET:-dt-bucket}
dtdir=${DTDIR:-dtfiles}
s3dir=${s3DIR:-s3files}
files=${FILES:-10}
limit=${LIMIT:-10m}
passes=${PASSES:-3}
threads=${THREADS:-5}
s3uri="s3://${bucket}"
echo "--> Verify Bucket ${s3uri} Exists <--"
aws s3 ls ${s3uri} 2>/dev/null >/dev/null
if [[ $? -ne 0 ]]; then
echo "--> Making Bucket ${s3uri} <--"
aws s3 mb ${s3uri}
check_error
fi
for pass in $(seq $passes);
do
echo "--> Starting Pass ${pass} <--"
echo "--> Removing previous test files <--"
rm -rf ${dtdir} ${s3dir}
echo "--> Creating dt files <--"
${dtpath} of=${dtdir}/dt.data bs=random min_limit=4k max_limit=${limit} incr_limit=vary workload=high_validation threads=${threads} files=${files} dispose=keep iotpass=$pass disable=pstats prefix="%d@%h@${bucket}"
check_error
ls -lsR ${dtdir}
echo "--> Uploading dt files to S3 server <--"
aws s3 cp ${dtdir} ${s3uri}/ --recursive
check_error
echo "--> Downloading S3 dt files <--"
aws s3 cp ${s3uri}/ ${s3dir} --recursive
check_error
echo "--> Verifying downloaded S3 dt files <--"
${dtpath} if=${s3dir}/dt.data bs=random workload=high_validation vflags=~inode threads=${threads} files=${files} iotpass=${pass} disable=verbose prefix="%d@%h@${bucket}"
check_error
echo "--> Removing S3 dt files <--"
aws s3 rm --recursive ${s3uri}
check_error
done