From 2e278c879dd49375b6faa8df94144d9b62e850cc Mon Sep 17 00:00:00 2001 From: Uday Kurundwade Date: Tue, 10 Sep 2024 11:56:53 +0530 Subject: [PATCH] Fixes process timeout issue while uploading data and adds timeout while deleting bucket Signed-off-by: Uday Kurundwade --- ocs_ci/ocs/bucket_utils.py | 12 +++++++++--- ocs_ci/ocs/resources/objectbucket.py | 3 +++ .../app-pods/fedora_with_linuxtar_files.yaml | 2 +- tests/conftest.py | 3 +++ 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/ocs_ci/ocs/bucket_utils.py b/ocs_ci/ocs/bucket_utils.py index 578f6224e30..8a074d0ba70 100644 --- a/ocs_ci/ocs/bucket_utils.py +++ b/ocs_ci/ocs/bucket_utils.py @@ -49,7 +49,7 @@ def craft_s3_command(cmd, mcg_obj=None, api=False, signed_request_creds=None): api = "api" if api else "" no_ssl = ( "--no-verify-ssl" - if signed_request_creds and signed_request_creds.get("ssl") is False + if (signed_request_creds and signed_request_creds.get("ssl")) is False else "" ) if mcg_obj: @@ -64,6 +64,7 @@ def craft_s3_command(cmd, mcg_obj=None, api=False, signed_request_creds=None): f"{region}" f"aws s3{api} " f"--endpoint={mcg_obj.s3_internal_endpoint} " + f"{no_ssl} " ) string_wrapper = '"' elif signed_request_creds: @@ -343,10 +344,15 @@ def copy_objects( """ logger.info(f"Copying object {src_obj} to {target}") + no_ssl = ( + "--no-verify-ssl" + if (signed_request_creds and signed_request_creds.get("ssl")) is False + else "" + ) if recursive: - retrieve_cmd = f"cp {src_obj} {target} --recursive" + retrieve_cmd = f"cp {src_obj} {target} --recursive {no_ssl}" else: - retrieve_cmd = f"cp {src_obj} {target}" + retrieve_cmd = f"cp {src_obj} {target} {no_ssl}" if s3_obj: secrets = [s3_obj.access_key_id, s3_obj.access_key, s3_obj.s3_internal_endpoint] elif signed_request_creds: diff --git a/ocs_ci/ocs/resources/objectbucket.py b/ocs_ci/ocs/resources/objectbucket.py index dce30cf1ccf..82e8187552c 100644 --- a/ocs_ci/ocs/resources/objectbucket.py +++ b/ocs_ci/ocs/resources/objectbucket.py @@ -28,6 +28,7 @@ from ocs_ci.ocs.utils import oc_get_all_obc_names from ocs_ci.utility import templating, version from ocs_ci.utility.utils import TimeoutSampler, mask_secrets +from time import sleep logger = logging.getLogger(name=__file__) @@ -475,6 +476,8 @@ def internal_delete(self): obj_version.delete() else: self.s3resource.Bucket(self.name).objects.all().delete() + if any("scale" in mark for mark in get_current_test_marks()): + sleep(1800) self.s3resource.Bucket(self.name).delete() except botocore.exceptions.ClientError as e: if e.response["Error"]["Code"] == "NoSuchBucket": diff --git a/ocs_ci/templates/app-pods/fedora_with_linuxtar_files.yaml b/ocs_ci/templates/app-pods/fedora_with_linuxtar_files.yaml index eb9a4460a4a..3ad3423bf53 100644 --- a/ocs_ci/templates/app-pods/fedora_with_linuxtar_files.yaml +++ b/ocs_ci/templates/app-pods/fedora_with_linuxtar_files.yaml @@ -6,7 +6,7 @@ metadata: spec: containers: - name: fedora - image: quay.io/ocsci/fedora-with-linuxtar-files:latest + image: quay.io/ocsci/fedora-with-linuxtar-files:200k-files # Override the default `aws` entrypoint in order to # allow the pod to run continuously and act as a relay command: ['/bin/sh'] diff --git a/tests/conftest.py b/tests/conftest.py index a203a5a0f36..6ff0529aafa 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7037,6 +7037,9 @@ def fedora_pod_fixture(request, scope_name): helpers.wait_for_resource_state( fedora_pod_obj, constants.STATUS_RUNNING, timeout=240 ) + fedora_pod_obj.exec_cmd_on_pod( + f"cp {constants.SERVICE_CA_CRT_AWSCLI_PATH} {constants.AWSCLI_CA_BUNDLE_PATH}" + ) def fedora_pod_cleanup(): fedora_pod_obj.delete()