Skip to content

Commit

Permalink
Fixes process timeout issue while uploading data and adds timeout whi…
Browse files Browse the repository at this point in the history
…le deleting bucket

Signed-off-by: Uday Kurundwade <[email protected]>
  • Loading branch information
udaysk23 committed Sep 30, 2024
1 parent 88de3ec commit 2e278c8
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 4 deletions.
12 changes: 9 additions & 3 deletions ocs_ci/ocs/bucket_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def craft_s3_command(cmd, mcg_obj=None, api=False, signed_request_creds=None):
api = "api" if api else ""
no_ssl = (
"--no-verify-ssl"
if signed_request_creds and signed_request_creds.get("ssl") is False
if (signed_request_creds and signed_request_creds.get("ssl")) is False
else ""
)
if mcg_obj:
Expand All @@ -64,6 +64,7 @@ def craft_s3_command(cmd, mcg_obj=None, api=False, signed_request_creds=None):
f"{region}"
f"aws s3{api} "
f"--endpoint={mcg_obj.s3_internal_endpoint} "
f"{no_ssl} "
)
string_wrapper = '"'
elif signed_request_creds:
Expand Down Expand Up @@ -343,10 +344,15 @@ def copy_objects(
"""

logger.info(f"Copying object {src_obj} to {target}")
no_ssl = (
"--no-verify-ssl"
if (signed_request_creds and signed_request_creds.get("ssl")) is False
else ""
)
if recursive:
retrieve_cmd = f"cp {src_obj} {target} --recursive"
retrieve_cmd = f"cp {src_obj} {target} --recursive {no_ssl}"
else:
retrieve_cmd = f"cp {src_obj} {target}"
retrieve_cmd = f"cp {src_obj} {target} {no_ssl}"
if s3_obj:
secrets = [s3_obj.access_key_id, s3_obj.access_key, s3_obj.s3_internal_endpoint]
elif signed_request_creds:
Expand Down
3 changes: 3 additions & 0 deletions ocs_ci/ocs/resources/objectbucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from ocs_ci.ocs.utils import oc_get_all_obc_names
from ocs_ci.utility import templating, version
from ocs_ci.utility.utils import TimeoutSampler, mask_secrets
from time import sleep

logger = logging.getLogger(name=__file__)

Expand Down Expand Up @@ -475,6 +476,8 @@ def internal_delete(self):
obj_version.delete()
else:
self.s3resource.Bucket(self.name).objects.all().delete()
if any("scale" in mark for mark in get_current_test_marks()):
sleep(1800)
self.s3resource.Bucket(self.name).delete()
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "NoSuchBucket":
Expand Down
2 changes: 1 addition & 1 deletion ocs_ci/templates/app-pods/fedora_with_linuxtar_files.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ metadata:
spec:
containers:
- name: fedora
image: quay.io/ocsci/fedora-with-linuxtar-files:latest
image: quay.io/ocsci/fedora-with-linuxtar-files:200k-files
# Override the default `aws` entrypoint in order to
# allow the pod to run continuously and act as a relay
command: ['/bin/sh']
Expand Down
3 changes: 3 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7037,6 +7037,9 @@ def fedora_pod_fixture(request, scope_name):
helpers.wait_for_resource_state(
fedora_pod_obj, constants.STATUS_RUNNING, timeout=240
)
fedora_pod_obj.exec_cmd_on_pod(
f"cp {constants.SERVICE_CA_CRT_AWSCLI_PATH} {constants.AWSCLI_CA_BUNDLE_PATH}"
)

def fedora_pod_cleanup():
fedora_pod_obj.delete()
Expand Down

0 comments on commit 2e278c8

Please sign in to comment.