Skip to content

Commit

Permalink
CEPH-83575917: Test archive site on/off with IOs in progress
Browse files Browse the repository at this point in the history
Signed-off-by: viduship <[email protected]>
  • Loading branch information
viduship committed Nov 21, 2023
1 parent 7752576 commit 28b5a7a
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 14 deletions.
8 changes: 8 additions & 0 deletions rgw/v2/tests/s3cmd/multisite_configs/test_s3cmd.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# test case id: CEPH-83575917
config:
haproxy: true
container_count: 1
objects_count: 5000
objects_size_range:
min: 5
max: 15
37 changes: 37 additions & 0 deletions rgw/v2/tests/s3cmd/reusable.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,14 @@
Reusable methods for S3CMD
"""

import datetime
import json
import logging
import os
import socket
import subprocess
import sys
import time

import boto
import boto.s3.connection
Expand Down Expand Up @@ -284,3 +287,37 @@ def rate_limit_write(bucket, max_write_ops, ssl=None):
)
stdout, stderr = run_subprocess(cmd)
assert "503" in str(stderr), "Rate limit slowdown not observed, failing!"


def test_full_sync_at_archive(bucket_name):
"""
test_full_sync_at_archive zone for a bucket
"""
zone_name = "archive"
bucket_name = f"tenant/{bucket_name}"
cmd_bucket_stats = f"radosgw-admin bucket stats --bucket {bucket_name}"
log.info(f"collect bucket stats for {bucket_name} at local site")
local_bucket_stats = json.loads(utils.exec_shell_cmd(cmd_bucket_stats))
local_num_objects = local_bucket_stats["usage"]["rgw.main"]["num_objects"]
local_size = local_bucket_stats["usage"]["rgw.main"]["size"]
log.info(f"remote zone is {zone_name}")
remote_ip = utils.get_rgw_ip_zone(zone_name)
remote_site_ssh_con = utils.connect_remote(remote_ip)
log.info("Restart the gateways at the archive site")
remote_site_ssh_con.exec_command("ceph orch restart rgw.shared.arc")
log.info(f"collect bucket stats for {bucket_name} at remote site {zone_name}")
log.info("Wait for the sync lease period of 1200 seconds")
time.sleep(1200)
stdin, stdout, stderr = remote_site_ssh_con.exec_command(cmd_bucket_stats)
cmd_output = stdout.read().decode()
stats_remote = json.loads(cmd_output)
log.info(
f"bucket stats at remote site {zone_name} for {bucket_name} is {stats_remote}"
)
log.info("Verify num_objects and size is consistent across local and remote site")
remote_num_objects = stats_remote["usage"]["rgw.main"]["num_objects"]
remote_size = stats_remote["usage"]["rgw.main"]["size"]
if remote_size == local_size and remote_num_objects == local_num_objects:
log.info(f"Data is consistent for bucket {bucket_name}")
else:
raise TestExecError(f"Data is inconsistent for {bucket_name} across sites")
42 changes: 28 additions & 14 deletions rgw/v2/tests/s3cmd/test_s3cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,18 @@ def test_exec(config, ssh_con):
rgw_service = RGWService()

ip_and_port = s3cmd_reusable.get_rgw_ip_and_port(ssh_con)
if config.haproxy:
hostname = socket.gethostname()
ip = socket.gethostbyname(hostname)
port = 5000
ip_and_port = f"{ip}:{port}"

# CEPH-83575477 - Verify s3cmd get: Bug 2174863 - [cee/sd][RGW] 's3cmd get' fails with EOF error for few objects
if config.test_ops.get("s3cmd_get_objects", False):
log.info(f"Verify 's3cmd get' or download of objects")
user_info = resource_op.create_users(no_of_users_to_create=config.user_count)
s3_auth.do_auth(user_info[0], ip_and_port)
auth = Auth(user_info[0], ssh_con, ssl=config.ssl)
auth = Auth(user_info[0], ssh_con, ssl=config.ssl, haproxy=config.haproxy)
rgw_conn = auth.do_auth()
for bc in range(config.bucket_count):
bucket_name = utils.gen_bucket_name_from_userid(
Expand Down Expand Up @@ -209,21 +214,31 @@ def test_exec(config, ssh_con):
# Create a bucket
s3cmd_reusable.create_bucket(bucket_name)
log.info(f"Bucket {bucket_name} created")
object_count = config.objects_count

# Upload file to bucket
uploaded_file_info = s3cmd_reusable.upload_file(
bucket_name, test_data_path=TEST_DATA_PATH
)
uploaded_file = uploaded_file_info["name"]
log.info(f"Uploaded file {uploaded_file} to bucket {bucket_name}")
if object_count > 500:
s3cmd_path = "/home/cephuser/venv/bin/s3cmd"
utils.exec_shell_cmd(f"fallocate -l 4K obj4K")
for obj in range(object_count):
cmd = f"{s3cmd_path} put obj4K s3://{bucket_name}/object-{obj}"
utils.exec_shell_cmd(cmd)
s3cmd_reusable.test_full_sync_at_archive(bucket_name)

# Delete file from bucket
s3cmd_reusable.delete_file(bucket_name, uploaded_file)
log.info(f"Deleted file {uploaded_file} from bucket {bucket_name}")
else:
# Upload file to bucket
uploaded_file_info = s3cmd_reusable.upload_file(
bucket_name, test_data_path=TEST_DATA_PATH
)
uploaded_file = uploaded_file_info["name"]
log.info(f"Uploaded file {uploaded_file} to bucket {bucket_name}")

# Delete bucket
s3cmd_reusable.delete_bucket(bucket_name)
log.info(f"Bucket {bucket_name} deleted")
# Delete file from bucket
s3cmd_reusable.delete_file(bucket_name, uploaded_file)
log.info(f"Deleted file {uploaded_file} from bucket {bucket_name}")

# Delete bucket
s3cmd_reusable.delete_bucket(bucket_name)
log.info(f"Bucket {bucket_name} deleted")

# check for any crashes during the execution
crash_info = reusable.check_for_crash()
Expand All @@ -232,7 +247,6 @@ def test_exec(config, ssh_con):


if __name__ == "__main__":

test_info = AddTestInfo("rgw test using s3cmd")

try:
Expand Down

0 comments on commit 28b5a7a

Please sign in to comment.