Skip to content

Commit

Permalink
libcloud-plugin: fix filenames with non-ascii characters
Browse files Browse the repository at this point in the history
Also modified the systemtest to include such filenames.
Additional changes to make the plugin compatible with both
Python 2 and 3, and some minor changes to successfully
run the systemtest.

For enabling the plugin build and systemtest, the following is
required:

- The Python 3 version of the s3cmd tool must be installed, may be
  necessary to use
  pip3 install s3cmd
  instead of OS provided s3cmd package
- Apache Libcloud must be installed, use
  pip2 install apache-libcloud
  or pip3 install apache-libcloud
- minio must be installed, use
  curl -o /usr/local/bin/minio https://dl.min.io/server/minio/release/linux-amd64/minio
  chmod 755 /usr/local/bin/minio
- Use the systemtests global path for s3cfg file
  • Loading branch information
sduehr authored and franku committed Sep 23, 2020
1 parent 85e8ff3 commit 7392cae
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 18 deletions.
43 changes: 33 additions & 10 deletions core/src/plugins/filed/python/libcloud/BareosFdPluginLibcloud.py
Expand Up @@ -23,7 +23,11 @@
import BareosFdPluginBaseclass
import bareosfd
from bareosfd import *
import ConfigParser as configparser

try:
import ConfigParser as configparser
except ImportError:
import configparser
import datetime
import dateutil.parser
from bareos_libcloud_api.bucket_explorer import TASK_TYPE
Expand All @@ -45,6 +49,21 @@
from sys import version_info
from distutils.util import strtobool

class StringCodec:
@staticmethod
def encode_for_backup(var):
if version_info.major < 3:
return var.encode("utf-8")
else:
return var

@staticmethod
def encode_for_restore(var):
if version_info.major < 3:
return var
else:
return var.encode("utf-8")


class FilenameConverter:
__pathprefix = "PYLIBCLOUD:/"
Expand Down Expand Up @@ -229,8 +248,7 @@ def start_backup_job(self):
return bRC_Error

jobmessage(
M_INFO,
"Connected, last backup: %s (ts: %s)" % (self.last_run, self.since),
M_INFO, "Connected, last backup: %s (ts: %s)" % (self.last_run, self.since),
)

try:
Expand Down Expand Up @@ -310,13 +328,13 @@ def start_backup_file(self, savepkt):
debugmessage(100, "Backup file: %s" % (filename,))

statp = bareosfd.StatPacket()
# statp.size = self.current_backup_task["size"]
# statp.mtime = self.current_backup_task["mtime"]
# statp.atime = 0
# statp.ctime = 0
# statp.size = self.current_backup_task["size"]
# statp.mtime = self.current_backup_task["mtime"]
# statp.atime = 0
# statp.ctime = 0

savepkt.statp = statp
savepkt.fname = filename
savepkt.fname = StringCodec.encode_for_backup(filename)
savepkt.type = FT_REG

if self.current_backup_task["type"] == TASK_TYPE.DOWNLOADED:
Expand Down Expand Up @@ -357,7 +375,7 @@ def create_file(self, restorepkt):
100, "create_file() entry point in Python called with %s\n" % (restorepkt)
)
FNAME = FilenameConverter.BackupToBucket(restorepkt.ofname)
dirname = os.path.dirname(FNAME)
dirname = StringCodec.encode_for_restore(os.path.dirname(FNAME))
if not os.path.exists(dirname):
jobmessage(M_INFO, "Directory %s does not exist, creating it\n" % dirname)
os.makedirs(dirname)
Expand All @@ -371,7 +389,12 @@ def plugin_io(self, IOP):
if IOP.func == IO_OPEN:
# Only used by the 'restore' path
if IOP.flags & (os.O_CREAT | os.O_WRONLY):
self.FILE = open(FilenameConverter.BackupToBucket(IOP.fname), "wb")
self.FILE = open(
StringCodec.encode_for_restore(
FilenameConverter.BackupToBucket(IOP.fname)
),
"wb",
)
return bRC_OK

elif IOP.func == IO_READ:
Expand Down
Expand Up @@ -30,4 +30,9 @@ def jobmessage(message_type, message):

def debugmessage(level, message):
message = "BareosFdPluginLibcloud [%s]: %s\n" % (os.getpid(), message)
bareosfd.DebugMessage(level, message)
#bareosfd.DebugMessage(level, message)
#bareosfd.DebugMessage(level, message.encode("utf-8"))
try:
bareosfd.DebugMessage(level, message)
except UnicodeError:
bareosfd.DebugMessage(level, message.encode("utf-8"))
Expand Up @@ -25,7 +25,11 @@
from bareos_libcloud_api.queue_message import AbortMessage
from bareos_libcloud_api.queue_message import DebugMessage
from bareos_libcloud_api.queue_message import MESSAGE_TYPE
import Queue as Q

try:
import Queue as Q
except ImportError:
import queue as Q


class ProcessBase(Process):
Expand Down
Expand Up @@ -23,7 +23,7 @@
import io
from libcloud.common.types import LibcloudError
from libcloud.storage.types import ObjectDoesNotExistError
from utils import silentremove
from bareos_libcloud_api.utils import silentremove
from time import sleep
import uuid

Expand Down
4 changes: 2 additions & 2 deletions systemtests/scripts/start_minio.sh
Expand Up @@ -32,15 +32,15 @@ while pidof "${MINIO}" > /dev/null; do
done

export MINIO_DOMAIN=localhost,127.0.0.1
"${MINIO}" server --address \':$minio_port_number\' "$minio_tmp_data_dir" > "$logdir"/minio.log
"${MINIO}" server --address :${minio_port_number} "$minio_tmp_data_dir" > "$logdir"/minio.log &

if ! pidof ${MINIO} > /dev/null; then
echo "$0: could not start minio server"
exit 2
fi

tries=0
while ! s3cmd --config=etc/s3cfg-local-minio ls S3:// > /dev/null 2>&1; do
while ! s3cmd --config=${S3CFG} ls S3:// > /dev/null 2>&1; do
sleep 0.1
(( tries++ )) && [ $tries == '20' ] \
&& { echo "$0: could not start minio server"; exit 3; }
Expand Down
16 changes: 13 additions & 3 deletions systemtests/tests/py2plug-fd-libcloud/testrunner
Expand Up @@ -45,8 +45,17 @@ ${S3} rb --recursive --force s3://$bucket_name || echo "s3://$bucket_name does n
${S3} mb s3://$bucket_name


# this test does not work with links because of the restore objects
rm -r "${tmp}"/data/weird-files >/dev/null 2>&1
# this test does not work with links and some other weird files as they would already
# have a changed name by syncing to S3 using s3cmd
find ${tmp}/data/weird-files -type l -exec rm {} \;
find ${tmp}/data/weird-files -links +1 -type f -exec rm {} \;
rm ${tmp}/data/weird-files/fifo*
rm ${tmp}/data/weird-files/newline*
rm ${tmp}/data/weird-files/tab*
# s3cmd does not sync empty dirs
rmdir ${tmp}/data/weird-files/big-X
rmdir ${tmp}/data/weird-files/subdir

${S3} sync "$BackupDirectory" s3://$bucket_name

start_test
Expand All @@ -56,6 +65,7 @@ cat <<END_OF_DATA >$tmp/bconcmds
messages
@$out $tmp/log1.out
setdebug level=100 storage=File
setdebug level=100 client=bareos-fd trace=1 timestamp=1
label volume=TestVolume001 storage=File pool=Full
run job=$JobName yes
status director
Expand Down Expand Up @@ -89,6 +99,6 @@ if ! diff -r tmp/data tmp/bareos-restores/$bucket_name/data; then
export estat=1
fi

"${rscripts}"/stop_minio.sh
"${SYSTEMTESTS_DIR}"/scripts/stop_minio.sh

end_test

0 comments on commit 7392cae

Please sign in to comment.