diff --git a/rgw/v2/lib/resource_op.py b/rgw/v2/lib/resource_op.py index 009b701ca..4bd97616d 100644 --- a/rgw/v2/lib/resource_op.py +++ b/rgw/v2/lib/resource_op.py @@ -231,6 +231,7 @@ def read(self): self.ceph_conf = self.doc["config"].get("ceph_conf") self.gc_verification = self.doc["config"].get("gc_verification", False) self.bucket_sync_crash = self.doc["config"].get("bucket_sync_crash", False) + self.bucket_sync_status = self.doc["config"].get("bucket_sync_status", False) self.bucket_sync_run = self.doc["config"].get("bucket_sync_run", False) self.bucket_stats = self.doc["config"].get("bucket_stats", False) self.header_size = self.doc["config"].get("header_size", False) diff --git a/rgw/v2/tests/s3_swift/multisite_configs/test_bucket_sync_status.yaml b/rgw/v2/tests/s3_swift/multisite_configs/test_bucket_sync_status.yaml new file mode 100644 index 000000000..25d8da961 --- /dev/null +++ b/rgw/v2/tests/s3_swift/multisite_configs/test_bucket_sync_status.yaml @@ -0,0 +1,21 @@ +# upload type: non multipart +# script: test_Mbuckets_with_Nobjects.py +config: + user_count: 1 + bucket_count: 1 + objects_count: 2 + objects_size_range: + min: 5M + max: 15M + bucket_sync_status: true + test_ops: + create_bucket: true + create_object: true + download_object: false + delete_bucket_object: false + sharding: + enable: false + max_shards: 0 + compression: + enable: false + type: zlib diff --git a/rgw/v2/tests/s3_swift/test_Mbuckets_with_Nobjects.py b/rgw/v2/tests/s3_swift/test_Mbuckets_with_Nobjects.py index 6ec24311c..34855a778 100644 --- a/rgw/v2/tests/s3_swift/test_Mbuckets_with_Nobjects.py +++ b/rgw/v2/tests/s3_swift/test_Mbuckets_with_Nobjects.py @@ -332,6 +332,13 @@ def test_exec(config): raise TestExecError( "Command failed....Bucket is not added into reshard queue" ) + if config.bucket_sync_status: + out = utils.bucket_sync_status(bucket.name, retry=10, delay=15) + if out is False: + log.info( + "Bucket sync is not caught up with source. Try bucket sync run to update bucket sync status." + ) + if config.bucket_sync_run: out = utils.check_bucket_sync(bucket.name) if out is False: diff --git a/rgw/v2/utils/utils.py b/rgw/v2/utils/utils.py index 490ae028c..75f15b28f 100644 --- a/rgw/v2/utils/utils.py +++ b/rgw/v2/utils/utils.py @@ -11,6 +11,7 @@ from random import randint import yaml +from v2.lib.exceptions import SyncFailedError BUCKET_NAME_PREFIX = "bucky" + "-" + str(random.randrange(1, 5000)) S3_OBJECT_NAME_PREFIX = "key" @@ -554,6 +555,27 @@ def check_bucket_sync(name): return out +def bucket_sync_status(name, retry=10, delay=15): + log.info( + f"check if bucket sync is in progress, if bucket sync is in progress retry {retry} times with {delay}secs of sleep between each retry" + ) + cmd = f"radosgw-admin bucket sync status --bucket={name}" + out = exec_shell_cmd(cmd) + if "behind shards" in out: + log.info("bucket sync is in progress") + log.info(f"sleep of {delay} secs for sync to complete") + for retry_count in range(retry): + time.sleep(delay) + if (retry_count > retry) and ("behind shards" in out): + out = utils.check_bucket_sync(bucket.name) + if out is False: + raise SyncFailedError( + f"Bucket sync status not caught up with source after performing bucket sync run with {retry} retries and sleep of {delay}secs between each retry" + ) + else: + log.info("bucket is caught up with source zone.") + + def get_hostname_ip(): try: hostname = socket.gethostname()