diff --git a/core/src/plugins/filed/python/libcloud/BareosFdPluginLibcloud.py b/core/src/plugins/filed/python/libcloud/BareosFdPluginLibcloud.py index 3974d7e109f..fd44ae2fd08 100755 --- a/core/src/plugins/filed/python/libcloud/BareosFdPluginLibcloud.py +++ b/core/src/plugins/filed/python/libcloud/BareosFdPluginLibcloud.py @@ -23,7 +23,11 @@ import BareosFdPluginBaseclass import bareosfd from bareosfd import * -import ConfigParser as configparser + +try: + import ConfigParser as configparser +except ImportError: + import configparser import datetime import dateutil.parser from bareos_libcloud_api.bucket_explorer import TASK_TYPE @@ -45,6 +49,21 @@ from sys import version_info from distutils.util import strtobool +class StringCodec: + @staticmethod + def encode_for_backup(var): + if version_info.major < 3: + return var.encode("utf-8") + else: + return var + + @staticmethod + def encode_for_restore(var): + if version_info.major < 3: + return var + else: + return var.encode("utf-8") + class FilenameConverter: __pathprefix = "PYLIBCLOUD:/" @@ -229,8 +248,7 @@ def start_backup_job(self): return bRC_Error jobmessage( - M_INFO, - "Connected, last backup: %s (ts: %s)" % (self.last_run, self.since), + M_INFO, "Connected, last backup: %s (ts: %s)" % (self.last_run, self.since), ) try: @@ -310,13 +328,13 @@ def start_backup_file(self, savepkt): debugmessage(100, "Backup file: %s" % (filename,)) statp = bareosfd.StatPacket() -# statp.size = self.current_backup_task["size"] -# statp.mtime = self.current_backup_task["mtime"] -# statp.atime = 0 -# statp.ctime = 0 + # statp.size = self.current_backup_task["size"] + # statp.mtime = self.current_backup_task["mtime"] + # statp.atime = 0 + # statp.ctime = 0 savepkt.statp = statp - savepkt.fname = filename + savepkt.fname = StringCodec.encode_for_backup(filename) savepkt.type = FT_REG if self.current_backup_task["type"] == TASK_TYPE.DOWNLOADED: @@ -357,7 +375,7 @@ def create_file(self, restorepkt): 100, "create_file() entry point in Python called with %s\n" % (restorepkt) ) FNAME = FilenameConverter.BackupToBucket(restorepkt.ofname) - dirname = os.path.dirname(FNAME) + dirname = StringCodec.encode_for_restore(os.path.dirname(FNAME)) if not os.path.exists(dirname): jobmessage(M_INFO, "Directory %s does not exist, creating it\n" % dirname) os.makedirs(dirname) @@ -371,7 +389,12 @@ def plugin_io(self, IOP): if IOP.func == IO_OPEN: # Only used by the 'restore' path if IOP.flags & (os.O_CREAT | os.O_WRONLY): - self.FILE = open(FilenameConverter.BackupToBucket(IOP.fname), "wb") + self.FILE = open( + StringCodec.encode_for_restore( + FilenameConverter.BackupToBucket(IOP.fname) + ), + "wb", + ) return bRC_OK elif IOP.func == IO_READ: diff --git a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/debug.py b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/debug.py index deb143bc774..8ffbbd04eec 100644 --- a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/debug.py +++ b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/debug.py @@ -30,4 +30,9 @@ def jobmessage(message_type, message): def debugmessage(level, message): message = "BareosFdPluginLibcloud [%s]: %s\n" % (os.getpid(), message) - bareosfd.DebugMessage(level, message) + #bareosfd.DebugMessage(level, message) + #bareosfd.DebugMessage(level, message.encode("utf-8")) + try: + bareosfd.DebugMessage(level, message) + except UnicodeError: + bareosfd.DebugMessage(level, message.encode("utf-8")) diff --git a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/process_base.py b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/process_base.py index 0abe4247099..031399914f2 100644 --- a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/process_base.py +++ b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/process_base.py @@ -25,7 +25,11 @@ from bareos_libcloud_api.queue_message import AbortMessage from bareos_libcloud_api.queue_message import DebugMessage from bareos_libcloud_api.queue_message import MESSAGE_TYPE -import Queue as Q + +try: + import Queue as Q +except ImportError: + import queue as Q class ProcessBase(Process): diff --git a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/worker.py b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/worker.py index 5e886c1f7f5..692f65ab918 100644 --- a/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/worker.py +++ b/core/src/plugins/filed/python/libcloud/bareos_libcloud_api/worker.py @@ -23,7 +23,7 @@ import io from libcloud.common.types import LibcloudError from libcloud.storage.types import ObjectDoesNotExistError -from utils import silentremove +from bareos_libcloud_api.utils import silentremove from time import sleep import uuid diff --git a/systemtests/scripts/start_minio.sh b/systemtests/scripts/start_minio.sh index e9ace16fc52..d76d9eae799 100755 --- a/systemtests/scripts/start_minio.sh +++ b/systemtests/scripts/start_minio.sh @@ -32,7 +32,7 @@ while pidof "${MINIO}" > /dev/null; do done export MINIO_DOMAIN=localhost,127.0.0.1 -"${MINIO}" server --address \':$minio_port_number\' "$minio_tmp_data_dir" > "$logdir"/minio.log +"${MINIO}" server --address :${minio_port_number} "$minio_tmp_data_dir" > "$logdir"/minio.log & if ! pidof ${MINIO} > /dev/null; then echo "$0: could not start minio server" @@ -40,7 +40,7 @@ if ! pidof ${MINIO} > /dev/null; then fi tries=0 -while ! s3cmd --config=etc/s3cfg-local-minio ls S3:// > /dev/null 2>&1; do +while ! s3cmd --config=${S3CFG} ls S3:// > /dev/null 2>&1; do sleep 0.1 (( tries++ )) && [ $tries == '20' ] \ && { echo "$0: could not start minio server"; exit 3; } diff --git a/systemtests/tests/py2plug-fd-libcloud/testrunner b/systemtests/tests/py2plug-fd-libcloud/testrunner index 7be6d5eefec..f0f1e462ce2 100755 --- a/systemtests/tests/py2plug-fd-libcloud/testrunner +++ b/systemtests/tests/py2plug-fd-libcloud/testrunner @@ -45,8 +45,17 @@ ${S3} rb --recursive --force s3://$bucket_name || echo "s3://$bucket_name does n ${S3} mb s3://$bucket_name -# this test does not work with links because of the restore objects -rm -r "${tmp}"/data/weird-files >/dev/null 2>&1 +# this test does not work with links and some other weird files as they would already +# have a changed name by syncing to S3 using s3cmd +find ${tmp}/data/weird-files -type l -exec rm {} \; +find ${tmp}/data/weird-files -links +1 -type f -exec rm {} \; +rm ${tmp}/data/weird-files/fifo* +rm ${tmp}/data/weird-files/newline* +rm ${tmp}/data/weird-files/tab* +# s3cmd does not sync empty dirs +rmdir ${tmp}/data/weird-files/big-X +rmdir ${tmp}/data/weird-files/subdir + ${S3} sync "$BackupDirectory" s3://$bucket_name start_test @@ -56,6 +65,7 @@ cat <$tmp/bconcmds messages @$out $tmp/log1.out setdebug level=100 storage=File +setdebug level=100 client=bareos-fd trace=1 timestamp=1 label volume=TestVolume001 storage=File pool=Full run job=$JobName yes status director @@ -89,6 +99,6 @@ if ! diff -r tmp/data tmp/bareos-restores/$bucket_name/data; then export estat=1 fi -"${rscripts}"/stop_minio.sh +"${SYSTEMTESTS_DIR}"/scripts/stop_minio.sh end_test