Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Write file to rgw bucket from client #9680

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 60 additions & 4 deletions ocs_ci/ocs/resources/rgw.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,17 @@
import base64
import logging

import boto3

from ocs_ci.framework import config
from ocs_ci.ocs.ocp import OCP
from ocs_ci.ocs import constants
from ocs_ci.helpers.helpers import storagecluster_independent_check


logger = logging.getLogger(name=__file__)


class RGW(object):
"""
Wrapper class for interaction with a cluster's RGW service
Expand All @@ -28,12 +34,30 @@ def __init__(self, namespace=None):
self.storageclass.get().get("parameters").get("endpoint")
)
self.region = self.storageclass.get().get("parameters").get("region")
# Todo: Implement retrieval in cases where CephObjectStoreUser is available
self.s3_endpoint = None
self.key_id = None
self.secret_key = None
self.s3_resource = None
if config.ENV_DATA["platform"].lower() in constants.ON_PREM_PLATFORMS:
self.s3_endpoint, self.key_id, self.secret_key = self.get_credentials()

self.s3_resource = boto3.resource(
"s3",
endpoint_url=self.s3_endpoint,
aws_access_key_id=self.key_id,
aws_secret_access_key=self.secret_key,
)
else:
logger.warning(
f"Platform {config.ENV_DATA['platform']} doesn't support RGW"
)

def get_credentials(self, secret_name=constants.NOOBAA_OBJECTSTOREUSER_SECRET):
def get_credentials(
self,
secret_name=constants.NOOBAA_OBJECTSTOREUSER_SECRET,
access_key_field="AccessKey",
secret_key_field="SecretKey",
):
"""
Get Endpoint, Access key and Secret key from OCS secret. Endpoint is
taken from rgw exposed service. Use rgw_endpoint fixture in test to get
Expand All @@ -42,6 +66,10 @@ def get_credentials(self, secret_name=constants.NOOBAA_OBJECTSTOREUSER_SECRET):
Args:
secret_name (str): Name of secret to be used
for getting RGW credentials
access_key_field (str): Name of a field of provided secret in
which is stored access key credential
secret_key_field (str): Name of a field of provided secret in
which is stored secret key credential

Returns:
tuple: Endpoint, Access key, Secret key
Expand Down Expand Up @@ -70,9 +98,37 @@ def get_credentials(self, secret_name=constants.NOOBAA_OBJECTSTOREUSER_SECRET):

creds_secret_obj = secret_ocp_obj.get(secret_name)
access_key = base64.b64decode(
creds_secret_obj.get("data").get("AccessKey")
creds_secret_obj.get("data").get(access_key_field)
).decode("utf-8")
secret_key = base64.b64decode(
creds_secret_obj.get("data").get("SecretKey")
creds_secret_obj.get("data").get(secret_key_field)
).decode("utf-8")
return endpoint, access_key, secret_key

def update_s3_creds(self, access_key, secret_key):
"""
Set the S3 credentials and s3_resource stored in RGW object.

Args:
access_key (str): access key credential
secret_key (str): secret key credential
"""
self.key_id = access_key
self.secret_key = secret_key
self.s3_resource = boto3.resource(
"s3",
endpoint_url=self.s3_endpoint,
aws_access_key_id=self.key_id,
aws_secret_access_key=self.secret_key,
)

def s3_list_all_objects_in_bucket(self, bucketname):
"""
Args:
bucketname (str): Name of rgw bucket

Returns:
list: A list of all bucket objects

"""
return {obj for obj in self.s3_resource.Bucket(bucketname).objects.all()}
76 changes: 76 additions & 0 deletions tests/functional/object/rgw/test_provider_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import logging
import pytest

from ocs_ci.framework import config
from ocs_ci.framework.pytest_customization.marks import (
tier1,
red_squad,
rgw,
runs_on_provider,
provider_client_ms_platform_required,
)
from ocs_ci.ocs import constants
from ocs_ci.ocs.bucket_utils import sync_object_directory

log = logging.getLogger(__name__)


@pytest.fixture
def return_to_original_context():
"""
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This fixture looks useful and generic, are you sure you want to use it only for rgw tests?

Make sure that original context is restored after the test.
"""
original_cluster = config.cluster_ctx.MULTICLUSTER["multicluster_index"]
return
log.info(f"Switching back to original cluster with index {original_cluster}")
config.switch_ctx(original_cluster)


@rgw
@red_squad
@tier1
@runs_on_provider
@provider_client_ms_platform_required
@pytest.mark.polarion_id("OCS-5765")
def test_write_file_to_bucket_on_client(
rgw_bucket_factory, rgw_obj, awscli_pod_client_session, return_to_original_context
):
"""
Test object IO using the S3 SDK on rgw bucket created on provider and used on client.
"""
awscli_pod, client_cluster = awscli_pod_client_session
# Retrieve a list of all objects on the test-objects bucket and
# downloads them to the pod
bucketname = rgw_bucket_factory(1, "rgw-oc")[0].name
full_object_path = f"s3://{bucketname}"
credentials = rgw_obj.get_credentials(
secret_name=bucketname,
access_key_field="AWS_ACCESS_KEY_ID",
secret_key_field="AWS_SECRET_ACCESS_KEY",
)
rgw_obj.update_s3_creds(credentials[1], credentials[2])

config.switch_ctx(client_cluster)
log.info(f"Switched to client cluster with index {client_cluster}")
downloaded_files = awscli_pod.exec_cmd_on_pod(
f"ls -A1 {constants.AWSCLI_TEST_OBJ_DIR}"
).split(" ")
# create s3_creds structure with s3_endpoint so that s3_internal_endpoint is not used
# TODO(fbalak): remove ssl=False option and provide correct certificate
s3_creds = {
"access_key_id": credentials[1],
"access_key": credentials[2],
"endpoint": credentials[0],
"ssl": False,
}
# Write all downloaded objects to the new bucket
sync_object_directory(
awscli_pod,
constants.AWSCLI_TEST_OBJ_DIR,
full_object_path,
signed_request_creds=s3_creds,
)

assert set(downloaded_files).issubset(
obj.key for obj in rgw_obj.s3_list_all_objects_in_bucket(bucketname)
)
Comment on lines +74 to +76
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@fbalak We can use verify_s3_object_integrity method from bucket_utils.py file for object integrity.

Please refer below link:

def verify_s3_object_integrity(

Loading