-
Notifications
You must be signed in to change notification settings - Fork 256
/
testrunner
executable file
·115 lines (94 loc) · 2.95 KB
/
testrunner
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
#!/bin/bash
set -e
set -o pipefail
set -u
#
# This systemtest tests the plugin functionality
# of the Bareos FD by using the supplied module
# bareos-fd-local-fileset.py
#
# The module will backup some files.
# This plugin is not intended for production,
# but is only a minimal example that shows
# how to use the python plugin interface.
# File attributes like uses and times will not be saved.
#
TestName="$(basename "$(pwd)")"
export TestName
bucket_name=bareos-test
JobName=backup-bareos-fd
#shellcheck source=../environment.in
. ./environment
JobName=backup-bareos-fd
#shellcheck source=../scripts/functions
. "${rscripts}"/functions
"${rscripts}"/cleanup
"${rscripts}"/setup
# shortcut for s3cmd
S3="${S3CMD} --no-check-certificate --config ${S3CFG}"
# Fill ${BackupDirectory} with data.
setup_data
# create files to test the temporary-file and the stream-download path
prefetch_size=$(( $(grep prefetch_size etc/libcloud_config.ini | cut -d '=' -f 2) ))
#backup via temp file
dd if=/dev/random \
of="${tmp}/data/object-size-downloads-to-temporary-file" \
bs=$(( prefetch_size -1 )) count=1
#backup via stream object using the plugin process itself
dd if=/dev/random \
of="${tmp}/data/object-size-downloads-with-plugin-process" \
bs=$(( prefetch_size +1 )) count=1
"${rscripts}"/start_minio.sh "$MINIO_PORT" "$TestName"
# create s3 content for test
${S3} rb --recursive --force s3://$bucket_name || echo "s3://$bucket_name does not exist"
${S3} mb s3://$bucket_name
# this test does not work with links and some other weird files as they would already
# have a changed name by syncing to S3 using s3cmd
find ${tmp}/data/weird-files -type l -exec rm {} \;
find ${tmp}/data/weird-files -links +1 -type f -exec rm {} \;
rm ${tmp}/data/weird-files/fifo*
rm ${tmp}/data/weird-files/newline*
rm ${tmp}/data/weird-files/tab*
# the following file also makes problems
rm ${tmp}/data/weird-files/filename-with-non-utf8-bytestring*
# s3cmd does not sync empty dirs
rmdir ${tmp}/data/weird-files/big-X
rmdir ${tmp}/data/weird-files/subdir
${S3} sync "$BackupDirectory" s3://$bucket_name
start_test
cat <<END_OF_DATA >$tmp/bconcmds
@$out /dev/null
messages
@$out $tmp/log1.out
setdebug level=100 storage=File
setdebug level=100 client=bareos-fd trace=1 timestamp=1
label volume=TestVolume001 storage=File pool=Full
run job=$JobName yes
status director
status client
status storage=File
wait
messages
@#
@# now do a restore
@#
@$out $tmp/log2.out
wait
restore client=bareos-fd fileset=PluginTest where=$tmp/bareos-restores select all done
yes
wait
messages
quit
END_OF_DATA
run_bareos "$@"
check_for_zombie_jobs storage=File
check_two_logs
list=( $(find "${BackupDirectory}" -type f) )
# Using check_restore_only_files_diff instead of check_restore_diff
# to don'"t diff the file attributes, because they are not saved
#check_restore_only_files_diff "${list[@]}"
if ! diff -r tmp/data tmp/bareos-restores/$bucket_name/data; then
export estat=1
fi
"${rscripts}"/stop_minio.sh "$TestName"
end_test