From 7d51ff3f3bbc9fcde6ba7ed1ad166fbb854abd9c Mon Sep 17 00:00:00 2001 From: Romain Komorn Date: Fri, 27 Sep 2024 13:24:48 +0100 Subject: [PATCH 1/4] add requests for fetching tests to client --- ddtrace/internal/ci_visibility/_api_client.py | 312 +++++++++++------- ddtrace/internal/ci_visibility/constants.py | 1 + ddtrace/internal/ci_visibility/recorder.py | 5 + .../ci_visibility/telemetry/api_request.py | 46 +++ .../telemetry/early_flake_detection.py | 24 ++ .../internal/ci_visibility/telemetry/git.py | 23 +- .../internal/ci_visibility/telemetry/itr.py | 36 -- tests/ci_visibility/api_client/_util.py | 126 +++++-- .../test_ci_visibility_api_client.py | 121 +++++-- ...visibility_api_client_setting_responses.py | 24 +- ...ility_api_client_unique_tests_responses.py | 106 ++++++ 11 files changed, 596 insertions(+), 228 deletions(-) create mode 100644 ddtrace/internal/ci_visibility/telemetry/api_request.py create mode 100644 ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py create mode 100644 tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py diff --git a/ddtrace/internal/ci_visibility/_api_client.py b/ddtrace/internal/ci_visibility/_api_client.py index 6194744595a..6be18bd69b2 100644 --- a/ddtrace/internal/ci_visibility/_api_client.py +++ b/ddtrace/internal/ci_visibility/_api_client.py @@ -21,12 +21,18 @@ from ddtrace.internal.ci_visibility.constants import SKIPPABLE_ENDPOINT from ddtrace.internal.ci_visibility.constants import SUITE from ddtrace.internal.ci_visibility.constants import TEST +from ddtrace.internal.ci_visibility.constants import UNIQUE_TESTS_ENDPOINT from ddtrace.internal.ci_visibility.errors import CIVisibilityAuthenticationException from ddtrace.internal.ci_visibility.git_data import GitData +from ddtrace.internal.ci_visibility.telemetry.api_request import APIRequestMetricNames +from ddtrace.internal.ci_visibility.telemetry.api_request import record_api_request +from ddtrace.internal.ci_visibility.telemetry.api_request import record_api_request_error from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES -from ddtrace.internal.ci_visibility.telemetry.git import record_settings -from ddtrace.internal.ci_visibility.telemetry.itr import record_itr_skippable_request -from ddtrace.internal.ci_visibility.telemetry.itr import record_itr_skippable_request_error +from ddtrace.internal.ci_visibility.telemetry.constants import GIT_TELEMETRY +from ddtrace.internal.ci_visibility.telemetry.early_flake_detection import EARLY_FLAKE_DETECTION_TELEMETRY +from ddtrace.internal.ci_visibility.telemetry.early_flake_detection import record_early_flake_detection_tests_count +from ddtrace.internal.ci_visibility.telemetry.git import record_settings_response +from ddtrace.internal.ci_visibility.telemetry.itr import SKIPPABLE_TESTS_TELEMETRY from ddtrace.internal.ci_visibility.telemetry.itr import record_skippable_count from ddtrace.internal.ci_visibility.utils import combine_url_path from ddtrace.internal.logger import get_logger @@ -56,6 +62,7 @@ _SKIPPABLE_ITEM_ID_TYPE = t.Union[InternalTestId, TestSuiteId] _CONFIGURATIONS_TYPE = t.Dict[str, t.Union[str, t.Dict[str, str]]] +_UNIQUE_TESTS_TYPE = t.Set[InternalTestId] _NETWORK_ERRORS = (TimeoutError, socket.timeout, RemoteDisconnected) @@ -80,6 +87,13 @@ class EarlyFlakeDetectionSettings: faulty_session_threshold: float = 30.0 +@dataclasses.dataclass(frozen=True) +class UniqueTestsData: + test_modules: t.Dict[str, TestModuleId] + test_suites: t.Dict[str, TestSuiteId] + test_cases: t.Dict[str, InternalTestId] + + @dataclasses.dataclass(frozen=True) class TestVisibilityAPISettings: __test__ = False @@ -272,11 +286,57 @@ def _do_request(self, method: str, endpoint: str, payload: str, timeout: t.Optio if conn is not None: conn.close() + def _do_request_with_telemetry( + self, + method: str, + endpoint: str, + payload: str, + metric_names: APIRequestMetricNames, + timeout: t.Optional[float] = None, + ) -> t.Any: + """Performs a request with telemetry submitted according to given names""" + sw = StopWatch() + sw.start() + error_type: t.Optional[ERROR_TYPES] = None + response_bytes: t.Optional[int] = None + try: + try: + response = self._do_request(method, endpoint, payload, timeout=timeout) + except _NETWORK_ERRORS: + error_type = ERROR_TYPES.TIMEOUT + raise + if response.status >= 400: + error_type = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX + if response.status == 403: + raise CIVisibilityAuthenticationException() + raise ValueError("API response status code: %d", response.status) + try: + sw.stop() # Stop the timer before parsing the response + response_bytes = len(response.body) + if isinstance(response.body, bytes): + parsed = json.loads(response.body.decode()) + else: + parsed = json.loads(response.body) + return parsed + except JSONDecodeError: + error_type = ERROR_TYPES.BAD_JSON + raise + finally: + record_api_request(metric_names, sw.elapsed() * 1000, response_bytes=response_bytes, error=error_type) + def fetch_settings(self) -> TestVisibilityAPISettings: """Fetches settings from the test visibility API endpoint This raises encountered exceptions because fetch_settings may be used multiple times during a session. """ + + metric_names = APIRequestMetricNames( + GIT_TELEMETRY.SETTINGS_COUNT, + GIT_TELEMETRY.SETTINGS_MS, + GIT_TELEMETRY.SETTINGS_RESPONSE, + GIT_TELEMETRY.SETTINGS_ERRORS, + ) + payload = { "data": { "id": str(uuid4()), @@ -293,56 +353,40 @@ def fetch_settings(self) -> TestVisibilityAPISettings: } } - sw = StopWatch() - sw.start() - try: - response = self._do_request("POST", SETTING_ENDPOINT, json.dumps(payload), timeout=self._timeout) - except _NETWORK_ERRORS: - record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.TIMEOUT) - raise - if response.status >= 400: - error_code = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX - record_settings(sw.elapsed() * 1000, error=error_code) - if response.status == 403: - raise CIVisibilityAuthenticationException() - raise ValueError("API response status code: %d", response.status) - try: - if isinstance(response.body, bytes): - parsed = json.loads(response.body.decode()) - else: - parsed = json.loads(response.body) - except JSONDecodeError: - record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.BAD_JSON) - raise + parsed_response = self._do_request_with_telemetry( + "POST", SETTING_ENDPOINT, json.dumps(payload), metric_names, timeout=self._timeout + ) - if "errors" in parsed: - record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.UNKNOWN) + if "errors" in parsed_response: + record_api_request_error(metric_names.error, ERROR_TYPES.UNKNOWN) raise ValueError("Settings response contained an error, disabling Intelligent Test Runner") - log.debug("Parsed API response: %s", parsed) + log.debug("Parsed API response: %s", parsed_response) try: - attributes = parsed["data"]["attributes"] + attributes = parsed_response["data"]["attributes"] coverage_enabled = attributes["code_coverage"] skipping_enabled = attributes["tests_skipping"] require_git = attributes["require_git"] itr_enabled = attributes["itr_enabled"] flaky_test_retries_enabled = attributes["flaky_test_retries_enabled"] - early_flake_detection = EarlyFlakeDetectionSettings( - enabled=attributes["early_flake_detection"]["enabled"], - faulty_session_threshold=attributes["early_flake_detection"]["faulty_session_threshold"], - slow_test_retries_5s=attributes["early_flake_detection"]["slow_test_retries"]["5s"], - slow_test_retries_10s=attributes["early_flake_detection"]["slow_test_retries"]["10s"], - slow_test_retries_30s=attributes["early_flake_detection"]["slow_test_retries"]["30s"], - slow_test_retries_5m=attributes["early_flake_detection"]["slow_test_retries"]["5m"], - ) + + if attributes["early_flake_detection"]["enabled"]: + early_flake_detection = EarlyFlakeDetectionSettings( + enabled=attributes["early_flake_detection"]["enabled"], + faulty_session_threshold=attributes["early_flake_detection"]["faulty_session_threshold"], + slow_test_retries_5s=attributes["early_flake_detection"]["slow_test_retries"]["5s"], + slow_test_retries_10s=attributes["early_flake_detection"]["slow_test_retries"]["10s"], + slow_test_retries_30s=attributes["early_flake_detection"]["slow_test_retries"]["30s"], + slow_test_retries_5m=attributes["early_flake_detection"]["slow_test_retries"]["5m"], + ) + else: + early_flake_detection = EarlyFlakeDetectionSettings() except KeyError: - record_settings(sw.elapsed() * 1000, error=ERROR_TYPES.UNKNOWN) + record_api_request_error(metric_names.error, ERROR_TYPES.UNKNOWN) raise - record_settings(sw.elapsed() * 1000, coverage_enabled, skipping_enabled, require_git, itr_enabled) - - return TestVisibilityAPISettings( + api_settings = TestVisibilityAPISettings( coverage_enabled=coverage_enabled, skipping_enabled=skipping_enabled, require_git=require_git, @@ -351,9 +395,32 @@ def fetch_settings(self) -> TestVisibilityAPISettings: early_flake_detection=early_flake_detection, ) - def _do_skippable_request(self, timeout: float) -> t.Optional[_SkippableResponse]: + record_settings_response( + api_settings.coverage_enabled, + api_settings.skipping_enabled, + api_settings.require_git, + api_settings.itr_enabled, + api_settings.early_flake_detection.enabled, + ) + + return api_settings + + def fetch_skippable_items( + self, timeout: t.Optional[float] = None, ignore_test_parameters: bool = False + ) -> t.Optional[ITRData]: + if timeout is None: + timeout = DEFAULT_ITR_SKIPPABLE_TIMEOUT + + metric_names = APIRequestMetricNames( + SKIPPABLE_TESTS_TELEMETRY.REQUEST, + SKIPPABLE_TESTS_TELEMETRY.REQUEST_MS, + SKIPPABLE_TESTS_TELEMETRY.RESPONSE_TESTS, + SKIPPABLE_TESTS_TELEMETRY.REQUEST_ERRORS, + ) + payload = { "data": { + "id": str(uuid4()), "type": "test_params", "attributes": { "service": self._service, @@ -366,102 +433,107 @@ def _do_skippable_request(self, timeout: float) -> t.Optional[_SkippableResponse } } - error_type: t.Optional[ERROR_TYPES] = None - response_bytes: int = 0 - sw = StopWatch() - try: - try: - sw.start() - response = self._do_request("POST", SKIPPABLE_ENDPOINT, json.dumps(payload), timeout) - sw.stop() - except _NETWORK_ERRORS as e: - sw.stop() - log.warning("Error while fetching skippable tests: ", exc_info=True) - error_type = ERROR_TYPES.NETWORK if isinstance(e, RemoteDisconnected) else ERROR_TYPES.TIMEOUT - return None + skippable_response: _SkippableResponse = self._do_request_with_telemetry( + "POST", SKIPPABLE_ENDPOINT, json.dumps(payload), metric_names, timeout + ) + except Exception: # noqa: E722 + return None - if response.status >= 400: - error_type = ERROR_TYPES.CODE_4XX if response.status < 500 else ERROR_TYPES.CODE_5XX - log.warning("Skippable tests request responded with status %d", response.status) - return None - try: - response_bytes = len(response.body) - if isinstance(response.body, bytes): - parsed = json.loads(response.body.decode()) - else: - parsed = json.loads(response.body) - except json.JSONDecodeError: - log.warning("Skippable tests request responded with invalid JSON '%s'", response.body) - error_type = ERROR_TYPES.BAD_JSON - return None + covered_files: t.Optional[t.Dict[str, CoverageLines]] = None - return parsed + if skippable_response is None: + # We did not fetch any data, but telemetry has already been recorded, and a warning has been logged + return None - except Exception: - log.warning("Error retrieving skippable test data, no tests will be skipped", exc_info=True) - error_type = ERROR_TYPES.UNKNOWN + meta = skippable_response.get("meta") + if meta is None: + log.debug("SKippable tests response did not contain metadata field, no tests will be skipped") + record_api_request_error(metric_names.error, ERROR_TYPES.BAD_JSON) return None - finally: - record_itr_skippable_request( - sw.elapsed() * 1000, - response_bytes, - TEST if self._itr_skipping_level == ITR_SKIPPING_LEVEL.TEST else SUITE, - error=error_type, - ) + correlation_id = meta.get("correlation_id") + if correlation_id is None: + log.debug("Skippable tests response missing correlation_id") + else: + log.debug("Skippable tests response correlation_id: %s", correlation_id) - def fetch_skippable_items( - self, timeout: t.Optional[float] = None, ignore_test_parameters: bool = False - ) -> t.Optional[ITRData]: - if timeout is None: - timeout = DEFAULT_ITR_SKIPPABLE_TIMEOUT + covered_files_data = meta.get("coverage") + if covered_files_data is not None: + covered_files = _parse_covered_files(covered_files_data) - error_type: t.Optional[ERROR_TYPES] = None + items_to_skip_data = skippable_response.get("data") + if items_to_skip_data is None: + log.warning("Skippable tests request missing data, no tests will be skipped") + record_api_request_error(metric_names.error, ERROR_TYPES.BAD_JSON) + return None - skippable_response = self._do_skippable_request(timeout) + if self._itr_skipping_level == ITR_SKIPPING_LEVEL.TEST: + items_to_skip = _parse_skippable_tests(items_to_skip_data, ignore_test_parameters) + else: + items_to_skip = _parse_skippable_suites(items_to_skip_data) + return ITRData( + correlation_id=correlation_id, + covered_files=covered_files, + skippable_items=items_to_skip, + ) - covered_files: t.Optional[t.Dict[str, CoverageLines]] = None + def fetch_unique_tests(self) -> t.Optional[t.Set[InternalTestId]]: + metric_names = APIRequestMetricNames( + EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST, + EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_MS, + EARLY_FLAKE_DETECTION_TELEMETRY.RESPONSE_BYTES, + EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_ERRORS, + ) - if skippable_response is None: - # We did not fetch any data, but telemetry has already been recorded, and a warning has been logged - return None + unique_test_ids: t.Set[InternalTestId] = set() + + payload = { + "data": { + "id": str(uuid4()), + "type": "ci_app_libraries_tests_request", + "attributes": { + "service": self._service, + "env": self._dd_env, + "repository_url": self._git_data.repository_url, + "configurations": self._configurations, + }, + } + } try: - meta = skippable_response.get("meta") - if meta is None: - log.debug("SKippable tests response did not contain metadata field, no tests will be skipped") - error_type = ERROR_TYPES.BAD_JSON - return None + parsed_response = self._do_request_with_telemetry( + "POST", UNIQUE_TESTS_ENDPOINT, json.dumps(payload), metric_names + ) + except Exception: # noqa: E722 + return None - correlation_id = meta.get("correlation_id") - if correlation_id is None: - log.debug("Skippable tests response missing correlation_id") - else: - log.debug("Skippable tests response correlation_id: %s", correlation_id) + if "errors" in parsed_response: + record_api_request_error(metric_names.error, ERROR_TYPES.UNKNOWN) + log.debug("Unique tests response contained an error") + return None - covered_files_data = meta.get("coverage") - if covered_files_data is not None: - covered_files = _parse_covered_files(covered_files_data) + try: + tests_data = parsed_response["data"]["attributes"]["tests"] + except KeyError: + record_api_request_error(metric_names.error, ERROR_TYPES.UNKNOWN) + return None - items_to_skip_data = skippable_response.get("data") - if items_to_skip_data is None: - log.warning("Skippable tests request missing data, no tests will be skipped") - error_type = ERROR_TYPES.BAD_JSON - return None + try: + for module, suites in tests_data.items(): + module_id = TestModuleId(module) + for suite, tests in suites.items(): + suite_id = TestSuiteId(module_id, suite) + for test in tests: + unique_test_ids.add(InternalTestId(suite_id, test)) + except Exception: # noqa: E722 + log.debug("Failed to parse unique tests data", exc_info=True) + record_api_request_error(metric_names.error, ERROR_TYPES.UNKNOWN) + return None - if self._itr_skipping_level == ITR_SKIPPING_LEVEL.TEST: - items_to_skip = _parse_skippable_tests(items_to_skip_data, ignore_test_parameters) - else: - items_to_skip = _parse_skippable_suites(items_to_skip_data) - return ITRData( - correlation_id=correlation_id, - covered_files=covered_files, - skippable_items=items_to_skip, - ) - finally: - if error_type is not None: - record_itr_skippable_request_error(error_type) + record_early_flake_detection_tests_count(len(unique_test_ids)) + + return unique_test_ids class AgentlessTestVisibilityAPIClient(_TestVisibilityAPIClientBase): diff --git a/ddtrace/internal/ci_visibility/constants.py b/ddtrace/internal/ci_visibility/constants.py index 6f90873f9b5..2af9dc53c39 100644 --- a/ddtrace/internal/ci_visibility/constants.py +++ b/ddtrace/internal/ci_visibility/constants.py @@ -46,6 +46,7 @@ GIT_API_BASE_PATH = "/api/v2/git" SETTING_ENDPOINT = "/api/v2/libraries/tests/services/setting" SKIPPABLE_ENDPOINT = "/api/v2/ci/tests/skippable" +UNIQUE_TESTS_ENDPOINT = "/api/v2/ci/libraries/tests" # Intelligent Test Runner constants ITR_UNSKIPPABLE_REASON = "datadog_itr_unskippable" diff --git a/ddtrace/internal/ci_visibility/recorder.py b/ddtrace/internal/ci_visibility/recorder.py index 328fe1077fe..db792ea7163 100644 --- a/ddtrace/internal/ci_visibility/recorder.py +++ b/ddtrace/internal/ci_visibility/recorder.py @@ -195,6 +195,7 @@ def __init__(self, tracer=None, config=None, service=None): self._should_upload_git_metadata = True self._itr_meta = {} # type: Dict[str, Any] self._itr_data: Optional[ITRData] = None + self._unique_tests: Set[InternalTestId] = set() self._session: Optional[TestVisibilitySession] = None @@ -270,6 +271,10 @@ def __init__(self, tracer=None, config=None, service=None): self._api_settings.itr_enabled, self._api_settings.skipping_enabled, ) + log.info( + "API-provided settings: early flake detection enabled: %s", + self._api_settings.early_flake_detection.enabled, + ) log.info("Detected configurations: %s", str(self._configurations)) try: diff --git a/ddtrace/internal/ci_visibility/telemetry/api_request.py b/ddtrace/internal/ci_visibility/telemetry/api_request.py new file mode 100644 index 00000000000..21c26ccb8cb --- /dev/null +++ b/ddtrace/internal/ci_visibility/telemetry/api_request.py @@ -0,0 +1,46 @@ +import dataclasses +from typing import Optional + +from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE +from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES +from ddtrace.internal.logger import get_logger +from ddtrace.internal.telemetry import telemetry_writer + + +log = get_logger(__name__) + + +@dataclasses.dataclass(frozen=True) +class APIRequestMetricNames: + count: str + duration: str + response_bytes: str + error: str + + +def record_api_request( + metric_names: APIRequestMetricNames, + duration: float, + response_bytes: Optional[int] = None, + error: Optional[ERROR_TYPES] = None, +): + log.debug( + "Recording early flake detection telemetry for %s: %s, %s, %s", + metric_names.count, + duration, + response_bytes, + error, + ) + + telemetry_writer.add_count_metric(_NAMESPACE, f"{metric_names.count}", 1) + telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.duration}", duration) + if response_bytes is not None: + telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.response_bytes}", response_bytes) + + if error is not None: + record_api_request_error(metric_names.error, error) + + +def record_api_request_error(error_metric_name: str, error: ERROR_TYPES): + log.debug("Recording early flake detection request error telemetry: %s", error) + telemetry_writer.add_count_metric(_NAMESPACE, error_metric_name, 1, (("error_type", error),)) diff --git a/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py b/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py new file mode 100644 index 00000000000..7c9bf9dd916 --- /dev/null +++ b/ddtrace/internal/ci_visibility/telemetry/early_flake_detection.py @@ -0,0 +1,24 @@ +from enum import Enum + +from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE +from ddtrace.internal.logger import get_logger +from ddtrace.internal.telemetry import telemetry_writer + + +log = get_logger(__name__) + +EARLY_FLAKE_DETECTION_TELEMETRY_PREFIX = "early_flake_detection." +RESPONSE_TESTS = f"{EARLY_FLAKE_DETECTION_TELEMETRY_PREFIX}response_tests" + + +class EARLY_FLAKE_DETECTION_TELEMETRY(str, Enum): + REQUEST = "early_flake_detection.request" + REQUEST_MS = "early_flake_detection.request_ms" + REQUEST_ERRORS = "early_flake_detection.request_errors" + RESPONSE_BYTES = "early_flake_detection.response_bytes" + RESPONSE_TESTS = "early_flake_detection.response_tests" + + +def record_early_flake_detection_tests_count(early_flake_detection_count: int): + log.debug("Recording early flake detection tests count telemetry: %s", early_flake_detection_count) + telemetry_writer.add_count_metric(_NAMESPACE, RESPONSE_TESTS, early_flake_detection_count) diff --git a/ddtrace/internal/ci_visibility/telemetry/git.py b/ddtrace/internal/ci_visibility/telemetry/git.py index c656c3d4361..000ddfb2249 100644 --- a/ddtrace/internal/ci_visibility/telemetry/git.py +++ b/ddtrace/internal/ci_visibility/telemetry/git.py @@ -45,22 +45,20 @@ def record_objects_pack_data(num_files: int, num_bytes: int) -> None: telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.OBJECTS_PACK_FILES, num_files) -def record_settings( - duration: float, +def record_settings_response( coverage_enabled: Optional[bool] = False, skipping_enabled: Optional[bool] = False, require_git: Optional[bool] = False, itr_enabled: Optional[bool] = False, - error: Optional[ERROR_TYPES] = None, + early_flake_detection_enabled: Optional[bool] = False, ) -> None: log.debug( - "Recording settings telemetry: %s, %s, %s, %s, %s, %s", - duration, + "Recording settings telemetry: %s, %s, %s, %s, %s", coverage_enabled, skipping_enabled, require_git, itr_enabled, - error, + early_flake_detection_enabled, ) # Telemetry "booleans" are true if they exist, otherwise false response_tags = [] @@ -72,13 +70,8 @@ def record_settings( response_tags.append(("require_git", "1")) if itr_enabled: response_tags.append(("itrskip_enabled", "1")) + if early_flake_detection_enabled: + response_tags.append(("early_flake_detection_enabled", "1")) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_COUNT, 1) - telemetry_writer.add_distribution_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_MS, duration) - - telemetry_writer.add_count_metric( - _NAMESPACE, GIT_TELEMETRY.SETTINGS_RESPONSE, 1, tuple(response_tags) if response_tags else None - ) - if error is not None: - error_tags = (("error", error),) - telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_ERRORS, 1, error_tags) + if response_tags: + telemetry_writer.add_count_metric(_NAMESPACE, GIT_TELEMETRY.SETTINGS_RESPONSE, 1, tuple(response_tags)) diff --git a/ddtrace/internal/ci_visibility/telemetry/itr.py b/ddtrace/internal/ci_visibility/telemetry/itr.py index cabe51fd903..210a4103734 100644 --- a/ddtrace/internal/ci_visibility/telemetry/itr.py +++ b/ddtrace/internal/ci_visibility/telemetry/itr.py @@ -1,10 +1,8 @@ from enum import Enum import functools -from typing import Optional from ddtrace.internal.ci_visibility.constants import SUITE from ddtrace.internal.ci_visibility.telemetry.constants import CIVISIBILITY_TELEMETRY_NAMESPACE as _NAMESPACE -from ddtrace.internal.ci_visibility.telemetry.constants import ERROR_TYPES from ddtrace.internal.ci_visibility.telemetry.constants import EVENT_TYPES from ddtrace.internal.logger import get_logger from ddtrace.internal.telemetry import telemetry_writer @@ -63,37 +61,3 @@ def record_skippable_count(skippable_count: int, skipping_level: str): else SKIPPABLE_TESTS_TELEMETRY.RESPONSE_TESTS ) telemetry_writer.add_count_metric(_NAMESPACE, skippable_count_metric, skippable_count) - - -def record_itr_skippable_request_error(error: ERROR_TYPES): - log.debug("Recording itr skippable request error telemetry") - telemetry_writer.add_count_metric(_NAMESPACE, SKIPPABLE_TESTS_TELEMETRY.REQUEST_ERRORS, 1, (("error_type", error),)) - - -def record_itr_skippable_request( - duration: float, - response_bytes: int, - skipping_level: str, - skippable_count: Optional[int] = None, - error: Optional[ERROR_TYPES] = None, -): - log.debug( - "Recording itr skippable request telemetry: %s, %s, %s, %s, %s", - duration, - response_bytes, - skippable_count, - skipping_level, - error, - ) - - telemetry_writer.add_count_metric(_NAMESPACE, SKIPPABLE_TESTS_TELEMETRY.REQUEST, 1) - telemetry_writer.add_distribution_metric(_NAMESPACE, SKIPPABLE_TESTS_TELEMETRY.REQUEST_MS, duration) - telemetry_writer.add_distribution_metric(_NAMESPACE, SKIPPABLE_TESTS_TELEMETRY.RESPONSE_BYTES, response_bytes) - - if error is not None: - record_itr_skippable_request_error(error) - # If there was an error, assume no skippable items can be counted - return - - if skippable_count is not None: - record_skippable_count(skippable_count, skipping_level) diff --git a/tests/ci_visibility/api_client/_util.py b/tests/ci_visibility/api_client/_util.py index b8f5dd32673..64c17235982 100644 --- a/tests/ci_visibility/api_client/_util.py +++ b/tests/ci_visibility/api_client/_util.py @@ -45,6 +45,7 @@ def _get_setting_api_response( require_git=False, itr_enabled=False, flaky_test_retries_enabled=False, + efd_present=False, # This controls whether a default EFD response is present (instead of only {"enabled": false} efd_detection_enabled=False, efd_5s=10, efd_10s=5, @@ -52,31 +53,58 @@ def _get_setting_api_response( efd_5m=2, efd_session_threshold=30.0, ): + body = { + "data": { + "id": "1234", + "type": "ci_app_tracers_test_service_settings", + "attributes": { + "code_coverage": code_coverage, + "early_flake_detection": { + "enabled": False, + }, + "flaky_test_retries_enabled": flaky_test_retries_enabled, + "itr_enabled": itr_enabled, + "require_git": require_git, + "tests_skipping": tests_skipping, + }, + } + } + + if efd_present or efd_detection_enabled: + body["data"]["attributes"]["early_flake_detection"].update( + { + "enabled": efd_detection_enabled, + "slow_test_retries": {"10s": efd_10s, "30s": efd_30s, "5m": efd_5m, "5s": efd_5s}, + "faulty_session_threshold": efd_session_threshold, + } + ) + + return Response(status=status_code, body=json.dumps(body)) + + +def _get_skippable_api_response(): return Response( - status=status_code, - body=json.dumps( + 200, + json.dumps( { - "data": { - "id": "1234", - "type": "ci_app_tracers_test_service_settings", - "attributes": { - "code_coverage": code_coverage, - "early_flake_detection": { - "enabled": efd_detection_enabled, - "slow_test_retries": {"10s": efd_10s, "30s": efd_30s, "5m": efd_5m, "5s": efd_5s}, - "faulty_session_threshold": efd_session_threshold, - }, - "flaky_test_retries_enabled": flaky_test_retries_enabled, - "itr_enabled": itr_enabled, - "require_git": require_git, - "tests_skipping": tests_skipping, - }, - } + "data": [], + "meta": { + "correlation_id": "1234ideclareacorrelationid", + }, } ), ) +def _get_tests_api_response(tests_body: t.Optional[t.Dict] = None): + response = {"data": {"id": "J0ucvcSApX8", "type": "ci_app_libraries_tests", "attributes": {"tests": {}}}} + + if tests_body is not None: + response["data"]["attributes"]["tests"].update(tests_body) + + return Response(200, json.dumps(response)) + + def _make_fqdn_internal_test_id(module_name: str, suite_name: str, test_name: str, parameters: t.Optional[str] = None): """An easy way to create a test id "from the bottom up" @@ -164,7 +192,7 @@ def _get_test_client( client_timeout, ) - def _get_expected_do_request_payload( + def _get_expected_do_request_setting_payload( self, itr_skipping_level: ITR_SKIPPING_LEVEL = ITR_SKIPPING_LEVEL.TEST, git_data: GitData = None, @@ -195,6 +223,64 @@ def _get_expected_do_request_payload( }, } + def _get_expected_do_request_skippable_payload( + self, + itr_skipping_level: ITR_SKIPPING_LEVEL = ITR_SKIPPING_LEVEL.TEST, + git_data: GitData = None, + dd_service: t.Optional[str] = None, + dd_env: t.Optional[str] = None, + ): + git_data = self.default_git_data if git_data is None else git_data + + return { + "data": { + "id": "checkoutmyuuid4", + "type": "test_params", + "attributes": { + "test_level": "test" if itr_skipping_level == ITR_SKIPPING_LEVEL.TEST else "suite", + "service": dd_service, + "env": dd_env, + "repository_url": git_data.repository_url, + "sha": git_data.commit_sha, + "configurations": { + "os.architecture": "arm64", + "os.platform": "PlatForm", + "os.version": "9.8.a.b", + "runtime.name": "RPython", + "runtime.version": "11.5.2", + }, + }, + }, + } + + def _get_expected_do_request_tests_payload( + self, + repository_url: str = None, + dd_service: t.Optional[str] = None, + dd_env: t.Optional[str] = None, + ): + if repository_url is None: + repository_url = self.default_git_data.repository_url + + return { + "data": { + "id": "checkoutmyuuid4", + "type": "ci_app_libraries_tests_request", + "attributes": { + "service": dd_service, + "env": dd_env, + "repository_url": repository_url, + "configurations": { + "os.architecture": "arm64", + "os.platform": "PlatForm", + "os.version": "9.8.a.b", + "runtime.name": "RPython", + "runtime.version": "11.5.2", + }, + }, + }, + } + @staticmethod def _get_mock_civisibility(requests_mode, suite_skipping_mode): with mock.patch.object(CIVisibility, "__init__", return_value=None): @@ -234,5 +320,5 @@ def _get_mock_civisibility(requests_mode, suite_skipping_mode): def _test_context_manager(self): with mock.patch("ddtrace.internal.ci_visibility._api_client.uuid4", return_value="checkoutmyuuid4"), mock.patch( "ddtrace.internal.ci_visibility._api_client.DEFAULT_TIMEOUT", 12.34 - ): + ), mock.patch("ddtrace.internal.ci_visibility._api_client.DEFAULT_ITR_SKIPPABLE_TIMEOUT", 43.21): yield diff --git a/tests/ci_visibility/api_client/test_ci_visibility_api_client.py b/tests/ci_visibility/api_client/test_ci_visibility_api_client.py index 0a23d786baa..8e84d277e40 100644 --- a/tests/ci_visibility/api_client/test_ci_visibility_api_client.py +++ b/tests/ci_visibility/api_client/test_ci_visibility_api_client.py @@ -1,6 +1,5 @@ from contextlib import contextmanager import json -import re from unittest import mock import pytest @@ -9,6 +8,7 @@ from ddtrace.internal.ci_visibility import CIVisibility from ddtrace.internal.ci_visibility._api_client import AgentlessTestVisibilityAPIClient from ddtrace.internal.ci_visibility._api_client import EVPProxyTestVisibilityAPIClient +from ddtrace.internal.ci_visibility._api_client import ITRData from ddtrace.internal.ci_visibility._api_client import TestVisibilityAPISettings from ddtrace.internal.ci_visibility.constants import ITR_SKIPPING_LEVEL from ddtrace.internal.ci_visibility.constants import REQUESTS_MODE @@ -19,6 +19,8 @@ from tests.ci_visibility.api_client._util import TestTestVisibilityAPIClientBase from tests.ci_visibility.api_client._util import _get_mock_connection from tests.ci_visibility.api_client._util import _get_setting_api_response +from tests.ci_visibility.api_client._util import _get_skippable_api_response +from tests.ci_visibility.api_client._util import _get_tests_api_response from tests.ci_visibility.test_ci_visibility import _dummy_noop_git_client from tests.ci_visibility.util import _ci_override_env @@ -64,33 +66,53 @@ class TestTestVisibilityAPIClient(TestTestVisibilityAPIClientBase): "api_key": "myfakeapikey", "agentless_url": None, "dd_site": None, - "expected_url": "https://api.datadoghq.com/api/v2/libraries/tests/services/setting", + "expected_urls": { + "setting": "https://api.datadoghq.com/api/v2/libraries/tests/services/setting", + "skippable": "https://api.datadoghq.com/api/v2/ci/tests/skippable", + "tests": "https://api.datadoghq.com/api/v2/ci/libraries/tests", + }, }, { "mode": _AGENTLESS, "api_key": "myfakeapikey", "agentless_url": None, "dd_site": "datad0g.com", - "expected_url": "https://api.datad0g.com/api/v2/libraries/tests/services/setting", + "expected_urls": { + "setting": "https://api.datad0g.com/api/v2/libraries/tests/services/setting", + "skippable": "https://api.datad0g.com/api/v2/ci/tests/skippable", + "tests": "https://api.datad0g.com/api/v2/ci/libraries/tests", + }, }, { "mode": _AGENTLESS, "api_key": "myfakeapikey", "agentless_url": "http://dd", "dd_site": None, - "expected_url": "http://dd/api/v2/libraries/tests/services/setting", + "expected_urls": { + "setting": "http://dd/api/v2/libraries/tests/services/setting", + "skippable": "http://dd/api/v2/ci/tests/skippable", + "tests": "http://dd/api/v2/ci/libraries/tests", + }, }, { "mode": _AGENTLESS, "api_key": "myfakeapikey", "agentless_url": "http://dd", "dd_site": "datad0g.com", - "expected_url": "http://dd/api/v2/libraries/tests/services/setting", + "expected_urls": { + "setting": "http://dd/api/v2/libraries/tests/services/setting", + "skippable": "http://dd/api/v2/ci/tests/skippable", + "tests": "http://dd/api/v2/ci/libraries/tests", + }, }, { "mode": _EVP_PROXY, "agent_url": "http://myagent:1234", - "expected_url": "http://myagent:1234/evp_proxy/v2/api/v2/libraries/tests/services/setting", + "expected_urls": { + "setting": "http://myagent:1234/evp_proxy/v2/api/v2/libraries/tests/services/setting", + "skippable": "http://myagent:1234/evp_proxy/v2/api/v2/ci/tests/skippable", + "tests": "http://myagent:1234/evp_proxy/v2/api/v2/ci/libraries/tests", + }, }, ] @@ -102,25 +124,24 @@ class TestTestVisibilityAPIClient(TestTestVisibilityAPIClientBase): ] # All requests to setting endpoint are the same within a call of _check_enabled_features() - expected_do_request_method = "POST" - expected_do_request_urls = { - REQUESTS_MODE.AGENTLESS_EVENTS: re.compile( - r"^https://api\.datad0g\.com/api/v2/libraries/tests/services/setting$" - ), - REQUESTS_MODE.EVP_PROXY_EVENTS: re.compile( - r"^http://notahost:1234/evp_proxy/v2/api/v2/libraries/tests/services/setting$" - ), - } expected_items = { _AGENTLESS: { - "endpoint": "/api/v2/libraries/tests/services/setting", + "endpoints": { + "setting": "/api/v2/libraries/tests/services/setting", + "skippable": "/api/v2/ci/tests/skippable", + "tests": "/api/v2/ci/libraries/tests", + }, "headers": { "dd-api-key": "myfakeapikey", "Content-Type": "application/json", }, }, _EVP_PROXY: { - "endpoint": "/evp_proxy/v2/api/v2/libraries/tests/services/setting", + "endpoints": { + "setting": "/evp_proxy/v2/api/v2/libraries/tests/services/setting", + "skippable": "/evp_proxy/v2/api/v2/ci/tests/skippable", + "tests": "/evp_proxy/v2/api/v2/ci/libraries/tests", + }, "headers": { "X-Datadog-EVP-Subdomain": "api", "Content-Type": "application/json", @@ -155,13 +176,14 @@ def test_civisibility_api_client_settings_do_request_connection(self, client_tim settings = client.fetch_settings() assert settings == TestVisibilityAPISettings() mock_get_connection.assert_called_once_with( - requests_mode_settings["expected_url"], client_timeout if client_timeout is not None else 12.34 + requests_mode_settings["expected_urls"]["setting"], + client_timeout if client_timeout is not None else 12.34, ) mock_connection.request.assert_called_once() call_args = mock_connection.request.call_args_list[0][0] assert call_args[0] == "POST" - assert call_args[1] == self.expected_items[requests_mode_settings["mode"]]["endpoint"] - assert json.loads(call_args[2]) == self._get_expected_do_request_payload( + assert call_args[1] == self.expected_items[requests_mode_settings["mode"]]["endpoints"]["setting"] + assert json.loads(call_args[2]) == self._get_expected_do_request_setting_payload( ITR_SKIPPING_LEVEL.TEST, dd_service="a_test_service", dd_env="a_test_env" ) assert call_args[3] == self.expected_items[requests_mode_settings["mode"]]["headers"] @@ -193,7 +215,7 @@ def test_civisibility_api_client_settings_do_request_call_optionals( assert mock_do_request.call_count == 1 call_args = mock_do_request.call_args_list[0][0] assert call_args[0] == "POST" - assert json.loads(call_args[2]) == self._get_expected_do_request_payload( + assert json.loads(call_args[2]) == self._get_expected_do_request_setting_payload( itr_skipping_level, git_data=git_data, dd_service=dd_service, dd_env=dd_env ) @@ -207,8 +229,6 @@ def test_civisibility_api_client_skippable_do_request( self, requests_mode_settings, client_timeout, request_timeout ): """Tests that the correct payload and headers are sent to the correct API URL for skippable requests""" - pass - client = self._get_test_client( requests_mode=requests_mode_settings["mode"], api_key=requests_mode_settings.get("api_key"), @@ -220,26 +240,69 @@ def test_civisibility_api_client_skippable_do_request( client_timeout=client_timeout, ) - mock_connection = _get_mock_connection(_get_setting_api_response().body) + mock_connection = _get_mock_connection(_get_skippable_api_response().body) with mock.patch( "ddtrace.internal.ci_visibility._api_client.get_connection", return_value=mock_connection ) as mock_get_connection: - settings = client.fetch_settings() - assert settings == TestVisibilityAPISettings() + skippable_items = client.fetch_skippable_items(timeout=request_timeout) + assert skippable_items == ITRData(correlation_id="1234ideclareacorrelationid") mock_get_connection.assert_called_once_with( - requests_mode_settings["expected_url"], client_timeout if client_timeout is not None else 12.34 + requests_mode_settings["expected_urls"]["skippable"], + request_timeout if request_timeout is not None else 43.21, ) mock_connection.request.assert_called_once() call_args = mock_connection.request.call_args_list[0][0] assert call_args[0] == "POST" - assert call_args[1] == self.expected_items[requests_mode_settings["mode"]]["endpoint"] - assert json.loads(call_args[2]) == self._get_expected_do_request_payload( + assert call_args[1] == self.expected_items[requests_mode_settings["mode"]]["endpoints"]["skippable"] + assert json.loads(call_args[2]) == self._get_expected_do_request_skippable_payload( ITR_SKIPPING_LEVEL.TEST, dd_service="a_test_service", dd_env="a_test_env" ) assert call_args[3] == self.expected_items[requests_mode_settings["mode"]]["headers"] mock_connection.close.assert_called_once() + @pytest.mark.parametrize("client_timeout", [None, 5]) + @pytest.mark.parametrize("request_timeout", [None, 10]) + @pytest.mark.parametrize( + "requests_mode_settings", + request_mode_settings_parameters, + ) + def test_civisibility_api_client_unique_tests_do_request( + self, requests_mode_settings, client_timeout, request_timeout + ): + """Tests that the correct payload and headers are sent to the correct API URL for unique tests requests""" + client = self._get_test_client( + requests_mode=requests_mode_settings["mode"], + api_key=requests_mode_settings.get("api_key"), + dd_site=requests_mode_settings.get("dd_site"), + agentless_url=requests_mode_settings.get("agentless_url"), + agent_url=requests_mode_settings.get("agent_url"), + dd_service="a_test_service", + dd_env="a_test_env", + client_timeout=client_timeout, + ) + + mock_connection = _get_mock_connection(_get_tests_api_response().body) + + with mock.patch( + "ddtrace.internal.ci_visibility._api_client.get_connection", return_value=mock_connection + ) as mock_get_connection: + unique_tests = client.fetch_unique_tests() + assert unique_tests == set() + mock_get_connection.assert_called_once_with( + requests_mode_settings["expected_urls"]["tests"], + client_timeout if client_timeout is not None else 12.34, + ) + mock_connection.request.assert_called_once() + call_args = mock_connection.request.call_args_list[0][0] + assert call_args[0] == "POST" + assert call_args[1] == self.expected_items[requests_mode_settings["mode"]]["endpoints"]["tests"] + assert json.loads(call_args[2]) == self._get_expected_do_request_tests_payload( + dd_service="a_test_service", dd_env="a_test_env" + ) + assert call_args[3] == self.expected_items[requests_mode_settings["mode"]]["headers"] + mock_connection.close.assert_called_once() + @pytest.mark.parametrize( "env_vars,expected_config", [ diff --git a/tests/ci_visibility/api_client/test_ci_visibility_api_client_setting_responses.py b/tests/ci_visibility/api_client/test_ci_visibility_api_client_setting_responses.py index fc8c96ef0a4..6cf0e8b70e3 100644 --- a/tests/ci_visibility/api_client/test_ci_visibility_api_client_setting_responses.py +++ b/tests/ci_visibility/api_client/test_ci_visibility_api_client_setting_responses.py @@ -51,15 +51,26 @@ class TestTestVisibilityAPIClientSettingResponses(TestTestVisibilityAPIClientBas [ _get_setting_api_response(tests_skipping=True, itr_enabled=True, efd_detection_enabled=True), TestVisibilityAPISettings( - skipping_enabled=True, itr_enabled=True, early_flake_detection=EarlyFlakeDetectionSettings(True) + skipping_enabled=True, + itr_enabled=True, + early_flake_detection=EarlyFlakeDetectionSettings(enabled=True), ), ], # EFD matrix-y-testing + # EFD should have default values regardless of whether it's present in the response + [ + _get_setting_api_response(tests_skipping=True, itr_enabled=True, efd_present=False), + TestVisibilityAPISettings(skipping_enabled=True, itr_enabled=True), + ], + [ + _get_setting_api_response(tests_skipping=True, itr_enabled=True, efd_present=True), + TestVisibilityAPISettings(skipping_enabled=True, itr_enabled=True), + ], [ _get_setting_api_response( code_coverage=True, flaky_test_retries_enabled=True, - efd_detection_enabled=False, + efd_detection_enabled=True, efd_5s=10, efd_10s=25, efd_30s=15, @@ -70,7 +81,7 @@ class TestTestVisibilityAPIClientSettingResponses(TestTestVisibilityAPIClientBas coverage_enabled=True, flaky_test_retries_enabled=True, early_flake_detection=EarlyFlakeDetectionSettings( - enabled=False, + enabled=True, slow_test_retries_5s=10, slow_test_retries_10s=25, slow_test_retries_30s=15, @@ -79,6 +90,8 @@ class TestTestVisibilityAPIClientSettingResponses(TestTestVisibilityAPIClientBas ), ), ], + # If EFD is not enabled, the defaults should be returned even if the response has values (because we + # ignore whatever other values are in the response on the assumption they will not be used) [ _get_setting_api_response( efd_detection_enabled=False, @@ -91,11 +104,6 @@ class TestTestVisibilityAPIClientSettingResponses(TestTestVisibilityAPIClientBas TestVisibilityAPISettings( early_flake_detection=EarlyFlakeDetectionSettings( enabled=False, - slow_test_retries_5s=1, - slow_test_retries_10s=2, - slow_test_retries_30s=3, - slow_test_retries_5m=4, - faulty_session_threshold=5, ), ), ], diff --git a/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py b/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py new file mode 100644 index 00000000000..e6887a23497 --- /dev/null +++ b/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py @@ -0,0 +1,106 @@ +"""NOTE: this lives in its own file simply because some of the test variables are unwieldy""" +from http.client import RemoteDisconnected +import socket +import textwrap +from unittest import mock + +import pytest + +from ddtrace.internal.utils.http import Response +from tests.ci_visibility.api_client._util import TestTestVisibilityAPIClientBase +from tests.ci_visibility.api_client._util import _get_tests_api_response +from tests.ci_visibility.api_client._util import _make_fqdn_test_ids + + +class TestTestVisibilityAPIClientUniqueTestResponses(TestTestVisibilityAPIClientBase): + """Tests that setting responses from the API client are parsed properly""" + + @pytest.mark.parametrize( + "unique_test_response,expected_tests", + [ + # Defaults + (_get_tests_api_response({}), set()), + # Single item + ( + _get_tests_api_response({"module1": {"suite1.py": ["test1"]}}), + set(_make_fqdn_test_ids([("module1", "suite1.py", "test1")])), + ), + # Multiple unique items + ( + _get_tests_api_response({"module1": {"suite1.py": ["test1"]}}), + set(_make_fqdn_test_ids([("module1", "suite1.py", "test1")])), + ), + ( + _get_tests_api_response({"module1": {"suite1.py": ["test1"]}, "module2": {"suite2.py": ["test2"]}}), + set(_make_fqdn_test_ids([("module1", "suite1.py", "test1"), ("module2", "suite2.py", "test2")])), + ), + # Multiple items with same name + ( + _get_tests_api_response( + { + "module1": { + "suite1.py": ["test1", "test2"], + "suite2.py": ["test1"], + }, + "module2": { + "suite1.py": ["test1"], + "suite2.py": ["test1", "test2"], + }, + } + ), + set( + _make_fqdn_test_ids( + [ + ("module1", "suite1.py", "test1"), + ("module1", "suite1.py", "test2"), + ("module1", "suite2.py", "test1"), + ("module2", "suite1.py", "test1"), + ("module2", "suite2.py", "test1"), + ("module2", "suite2.py", "test2"), + ] + ) + ), + ), + ], + ) + def test_civisibility_api_client_unique_tests_parsed(self, unique_test_response, expected_tests): + """Tests that the client correctly returns unique tests from API response""" + client = self._get_test_client() + with mock.patch.object(client, "_do_request", return_value=unique_test_response): + assert client.fetch_unique_tests() == expected_tests + + @pytest.mark.parametrize( + "do_request_side_effect", + [ + TimeoutError, + socket.timeout, + RemoteDisconnected, + Response(403), + Response(500), + Response(200, "this is not json"), + Response(200, '{"valid_json": "invalid_structure"}'), + Response(200, '{"errors": "there was an error"}'), + Response( + 200, + textwrap.dedent( + """ + { + "data": { + "id": "J0ucvcSApX8", + "type": "ci_app_libraries_tests", + "attributes": { + "potatoes_but_not_tests": {} + } + } + } + """ + ), + ), + ], + ) + def test_civisibility_api_client_unique_tests_errors(self, do_request_side_effect): + """Tests that the client correctly handles errors in the unique test API response""" + client = self._get_test_client() + with mock.patch.object(client, "_do_request", side_effect=[do_request_side_effect]): + settings = client.fetch_unique_tests() + assert settings is None From 717261bcd5f4408168db121a4e7820f05cb25e77 Mon Sep 17 00:00:00 2001 From: Romain Komorn Date: Fri, 27 Sep 2024 13:44:59 +0100 Subject: [PATCH 2/4] fix tests to account for UUID --- tests/ci_visibility/test_ci_visibility.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/ci_visibility/test_ci_visibility.py b/tests/ci_visibility/test_ci_visibility.py index fba71ae1302..b6d95f943a4 100644 --- a/tests/ci_visibility/test_ci_visibility.py +++ b/tests/ci_visibility/test_ci_visibility.py @@ -1019,6 +1019,8 @@ def test_fetch_tests_to_skip_custom_configurations(dd_ci_visibility_agentless_ur "ddtrace.internal.ci_visibility.git_client._build_git_packfiles_with_details" ) as mock_build_packfiles, mock.patch( "ddtrace.internal.ci_visibility.recorder.ddconfig", _get_default_civisibility_ddconfig() + ), mock.patch( + "ddtrace.internal.ci_visibility._api_client.uuid4", return_value="checkoutmyuuid4" ): mock_build_packfiles.return_value.__enter__.return_value = "myprefix", _GitSubprocessDetails("", "", 10, 0) CIVisibility.enable(service="test-service") @@ -1026,6 +1028,7 @@ def test_fetch_tests_to_skip_custom_configurations(dd_ci_visibility_agentless_ur expected_data_arg = json.dumps( { "data": { + "id": "checkoutmyuuid4", "type": "test_params", "attributes": { "service": "test-service", @@ -1052,7 +1055,7 @@ def test_fetch_tests_to_skip_custom_configurations(dd_ci_visibility_agentless_ur "POST", "/api/v2/ci/tests/skippable", expected_data_arg, - 20, + timeout=20.0, ) CIVisibility.disable() From 987ae52232fbef39013f34894b9feab75aefb880 Mon Sep 17 00:00:00 2001 From: Romain Komorn Date: Fri, 27 Sep 2024 13:57:45 +0100 Subject: [PATCH 3/4] update comments before Vitor notices... --- .../test_ci_visibility_api_client_unique_tests_responses.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py b/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py index e6887a23497..21e449e622e 100644 --- a/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py +++ b/tests/ci_visibility/api_client/test_ci_visibility_api_client_unique_tests_responses.py @@ -13,7 +13,7 @@ class TestTestVisibilityAPIClientUniqueTestResponses(TestTestVisibilityAPIClientBase): - """Tests that setting responses from the API client are parsed properly""" + """Tests that unique tests responses from the API client are parsed properly""" @pytest.mark.parametrize( "unique_test_response,expected_tests", From e5fc7506f2fc00766f4308d558f3c7f6cd9f2eb4 Mon Sep 17 00:00:00 2001 From: Romain Komorn Date: Fri, 27 Sep 2024 15:39:12 +0100 Subject: [PATCH 4/4] PR feedback fixes --- ddtrace/internal/ci_visibility/_api_client.py | 38 +++++++------------ .../ci_visibility/telemetry/api_request.py | 7 +++- 2 files changed, 19 insertions(+), 26 deletions(-) diff --git a/ddtrace/internal/ci_visibility/_api_client.py b/ddtrace/internal/ci_visibility/_api_client.py index 6be18bd69b2..debe1d41aaf 100644 --- a/ddtrace/internal/ci_visibility/_api_client.py +++ b/ddtrace/internal/ci_visibility/_api_client.py @@ -87,13 +87,6 @@ class EarlyFlakeDetectionSettings: faulty_session_threshold: float = 30.0 -@dataclasses.dataclass(frozen=True) -class UniqueTestsData: - test_modules: t.Dict[str, TestModuleId] - test_suites: t.Dict[str, TestSuiteId] - test_cases: t.Dict[str, InternalTestId] - - @dataclasses.dataclass(frozen=True) class TestVisibilityAPISettings: __test__ = False @@ -313,10 +306,7 @@ def _do_request_with_telemetry( try: sw.stop() # Stop the timer before parsing the response response_bytes = len(response.body) - if isinstance(response.body, bytes): - parsed = json.loads(response.body.decode()) - else: - parsed = json.loads(response.body) + parsed = json.loads(response.body) return parsed except JSONDecodeError: error_type = ERROR_TYPES.BAD_JSON @@ -331,10 +321,10 @@ def fetch_settings(self) -> TestVisibilityAPISettings: """ metric_names = APIRequestMetricNames( - GIT_TELEMETRY.SETTINGS_COUNT, - GIT_TELEMETRY.SETTINGS_MS, - GIT_TELEMETRY.SETTINGS_RESPONSE, - GIT_TELEMETRY.SETTINGS_ERRORS, + count=GIT_TELEMETRY.SETTINGS_COUNT, + duration=GIT_TELEMETRY.SETTINGS_MS, + response_bytes=None, + error=GIT_TELEMETRY.SETTINGS_ERRORS, ) payload = { @@ -412,10 +402,10 @@ def fetch_skippable_items( timeout = DEFAULT_ITR_SKIPPABLE_TIMEOUT metric_names = APIRequestMetricNames( - SKIPPABLE_TESTS_TELEMETRY.REQUEST, - SKIPPABLE_TESTS_TELEMETRY.REQUEST_MS, - SKIPPABLE_TESTS_TELEMETRY.RESPONSE_TESTS, - SKIPPABLE_TESTS_TELEMETRY.REQUEST_ERRORS, + count=SKIPPABLE_TESTS_TELEMETRY.REQUEST, + duration=SKIPPABLE_TESTS_TELEMETRY.REQUEST_MS, + response_bytes=SKIPPABLE_TESTS_TELEMETRY.RESPONSE_BYTES, + error=SKIPPABLE_TESTS_TELEMETRY.REQUEST_ERRORS, ) payload = { @@ -448,7 +438,7 @@ def fetch_skippable_items( meta = skippable_response.get("meta") if meta is None: - log.debug("SKippable tests response did not contain metadata field, no tests will be skipped") + log.debug("Skippable tests response did not contain metadata field, no tests will be skipped") record_api_request_error(metric_names.error, ERROR_TYPES.BAD_JSON) return None @@ -480,10 +470,10 @@ def fetch_skippable_items( def fetch_unique_tests(self) -> t.Optional[t.Set[InternalTestId]]: metric_names = APIRequestMetricNames( - EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST, - EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_MS, - EARLY_FLAKE_DETECTION_TELEMETRY.RESPONSE_BYTES, - EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_ERRORS, + count=EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST, + duration=EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_MS, + response_bytes=EARLY_FLAKE_DETECTION_TELEMETRY.RESPONSE_BYTES, + error=EARLY_FLAKE_DETECTION_TELEMETRY.REQUEST_ERRORS, ) unique_test_ids: t.Set[InternalTestId] = set() diff --git a/ddtrace/internal/ci_visibility/telemetry/api_request.py b/ddtrace/internal/ci_visibility/telemetry/api_request.py index 21c26ccb8cb..076cc0cca77 100644 --- a/ddtrace/internal/ci_visibility/telemetry/api_request.py +++ b/ddtrace/internal/ci_visibility/telemetry/api_request.py @@ -14,7 +14,7 @@ class APIRequestMetricNames: count: str duration: str - response_bytes: str + response_bytes: Optional[str] error: str @@ -35,7 +35,10 @@ def record_api_request( telemetry_writer.add_count_metric(_NAMESPACE, f"{metric_names.count}", 1) telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.duration}", duration) if response_bytes is not None: - telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.response_bytes}", response_bytes) + if metric_names.response_bytes is not None: + # We don't always want to record response bytes (for settings requests), so assume that no metric name + # means we don't want to record it. + telemetry_writer.add_distribution_metric(_NAMESPACE, f"{metric_names.response_bytes}", response_bytes) if error is not None: record_api_request_error(metric_names.error, error)