From e4e789a470b69a1556aac62a1006b6814ff0fb40 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 27 May 2025 17:05:23 +0200 Subject: [PATCH 001/100] Add core TLS context changes --- .../datadog_checks/base/utils/http.py | 115 +++++++++++++----- .../datadog_checks/base/utils/network.py | 16 --- .../datadog_checks/base/utils/tls.py | 15 ++- 3 files changed, 92 insertions(+), 54 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index d6e531bf08167..d7d51b9764daf 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -22,12 +22,13 @@ from datadog_checks.base.agent import datadog_agent from datadog_checks.base.utils import _http_utils +from datadog_checks.base.utils.tls import TlsContextWrapper from ..config import is_affirmative from ..errors import ConfigurationError from .common import ensure_bytes, ensure_unicode from .headers import get_default_headers, update_headers -from .network import CertAdapter, create_socket_connection +from .network import create_socket_connection from .time import get_timestamp # See Performance Optimizations in this package's README.md. @@ -85,12 +86,14 @@ 'skip_proxy': False, 'tls_ca_cert': None, 'tls_cert': None, + 'tls_ciphers': 'ALL', 'tls_use_host_header': False, 'tls_ignore_warning': False, 'tls_private_key': None, + 'tls_private_key_password': None, 'tls_protocols_allowed': DEFAULT_PROTOCOL_VERSIONS, + 'tls_validate_hostname': True, 'tls_verify': True, - 'tls_ciphers': 'ALL', 'timeout': DEFAULT_TIMEOUT, 'use_legacy_auth_encoding': True, 'username': None, @@ -115,6 +118,22 @@ UDS_SCHEME = 'unix' +class HTTPAdapterWrapper(requests.adapters.HTTPAdapter): + """ + HTTPS adapter that uses TlsContextWrapper to create SSL contexts. + This ensures consistent TLS configuration across all HTTPS requests. + """ + + def __init__(self, tls_context_wrapper, **kwargs): + self.tls_context_wrapper = tls_context_wrapper + super(HTTPAdapterWrapper, self).__init__() + + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + # Use the TLS context from TlsContextWrapper + pool_kwargs['ssl_context'] = self.tls_context_wrapper.tls_context + return super(HTTPAdapterWrapper, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) + + class ResponseWrapper(ObjectProxy): def __init__(self, response, default_chunk_size): super(ResponseWrapper, self).__init__(response) @@ -152,7 +171,7 @@ class RequestsWrapper(object): 'auth_token_handler', 'request_size', 'tls_protocols_allowed', - 'tls_ciphers_allowed', + 'tls_context_wrapper', ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -254,7 +273,8 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No allow_redirects = is_affirmative(config['allow_redirects']) - # https://requests.readthedocs.io/en/latest/user/advanced/#ssl-cert-verification + # For TLS verification, we now rely on the TLS context wrapper + # but still need to set verify for requests compatibility verify = True if isinstance(config['tls_ca_cert'], str): verify = config['tls_ca_cert'] @@ -347,13 +367,8 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No if config['kerberos_cache']: self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) - ciphers = config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - self.tls_ciphers_allowed = updated_ciphers + # Create TLS context wrapper for consistent TLS configuration + self.tls_context_wrapper = TlsContextWrapper(config, remapper) def get(self, url, **options): return self._request('get', url, options) @@ -397,6 +412,11 @@ def _request(self, method, url, options): new_options['headers'] = new_options['headers'].copy() new_options['headers'].update(extra_headers) + if new_options['verify'] != self.tls_context_wrapper.config['tls_verify']: + # The verify option needs to be synchronized + self.tls_context_wrapper.config['tls_verify'] = new_options['verify'] + self.tls_context_wrapper.refresh_tls_context() + if is_uds_url(url): persist = True # UDS support is only enabled on the shared session. url = quote_uds_url(url) @@ -409,7 +429,8 @@ def _request(self, method, url, options): if persist: request_method = getattr(self.session, method) else: - request_method = getattr(requests, method) + # Create a new session for non-persistent requests + request_method = getattr(self.create_session(), method) if self.auth_token_handler: try: @@ -435,16 +456,18 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e + self.tls_context_wrapper.config['tls_ca_cert'] = certs + self.tls_context_wrapper.refresh_tls_context() # retry the connection via session object - certadapter = CertAdapter(certs=certs) if not persist: session = requests.Session() for option, value in self.options.items(): setattr(session, option, value) + certadapter = HTTPAdapterWrapper(self.tls_context_wrapper) + session.mount(url, certadapter) else: session = self.session request_method = getattr(session, method) - session.mount(url, certadapter) response = request_method(url, **new_options) return response @@ -472,8 +495,17 @@ def fetch_intermediate_certs(self, hostname, port=443): with sock: try: context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + # Override verify mode for intermediate cert discovery context.verify_mode = ssl.CERT_NONE - context.set_ciphers(self.tls_ciphers_allowed) + # Set the ciphers + ciphers = self.tls_context_wrapper.config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) with context.wrap_socket(sock, server_hostname=hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) @@ -521,7 +553,7 @@ def load_intermediate_certs(self, der_cert, certs): # Assume HTTP for now try: - response = requests.get(uri) # SKIP_HTTP_VALIDATION + response = self.get(uri, verify=False) # SKIP_HTTP_VALIDATION except Exception as e: self.logger.error('Error fetching intermediate certificate from `%s`: %s', uri, e) continue @@ -532,23 +564,46 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - @property - def session(self): - if self._session is None: - self._session = requests.Session() + def create_session(self): + session = requests.Session() + + # Use TlsContextHTTPSAdapter for consistent TLS configuration + https_adapter = HTTPAdapterWrapper(self.tls_context_wrapper) + + # Enables HostHeaderSSLAdapter if needed + # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter + if self.tls_use_host_header: + # Create a combined adapter that supports both TLS context and host headers + class TlsContextHostHeaderAdapter(HTTPAdapterWrapper, _http_utils.HostHeaderSSLAdapter): + def __init__(self, tls_context_wrapper, **kwargs): + HTTPAdapterWrapper.__init__(self, tls_context_wrapper, **kwargs) + _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) + + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + # Use TLS context from wrapper + pool_kwargs['ssl_context'] = self.tls_context_wrapper.tls_context + return _http_utils.HostHeaderSSLAdapter.init_poolmanager( + self, connections, maxsize, block=block, **pool_kwargs + ) - # Enables HostHeaderSSLAdapter - # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter - if self.tls_use_host_header: - self._session.mount('https://', _http_utils.HostHeaderSSLAdapter()) - # Enable Unix Domain Socket (UDS) support. - # See: https://github.com/msabramo/requests-unixsocket - self._session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) + https_adapter = TlsContextHostHeaderAdapter(self.tls_context_wrapper) - # Attributes can't be passed to the constructor - for option, value in self.options.items(): - setattr(self._session, option, value) + session.mount('https://', https_adapter) + # Enable Unix Domain Socket (UDS) support. + # See: https://github.com/msabramo/requests-unixsocket + session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) + + # Attributes can't be passed to the constructor + for option, value in self.options.items(): + setattr(session, option, value) + return session + + @property + def session(self): + if self._session is None: + # Create a new session if it doesn't exist + self._session = self.create_session() return self._session def handle_auth_token(self, **request): diff --git a/datadog_checks_base/datadog_checks/base/utils/network.py b/datadog_checks_base/datadog_checks/base/utils/network.py index 4c51899f00f3e..c8f135c65a6b3 100644 --- a/datadog_checks_base/datadog_checks/base/utils/network.py +++ b/datadog_checks_base/datadog_checks/base/utils/network.py @@ -1,7 +1,4 @@ import socket -import ssl - -from requests.adapters import HTTPAdapter, PoolManager def create_socket_connection(hostname, port=443, sock_type=socket.SOCK_STREAM, timeout=10): @@ -34,16 +31,3 @@ def create_socket_connection(hostname, port=443, sock_type=socket.SOCK_STREAM, t raise socket.error('Unable to resolve host, check your DNS: {}'.format(message)) # noqa: G raise - - -class CertAdapter(HTTPAdapter): - def __init__(self, **kwargs): - self.certs = kwargs['certs'] - super(CertAdapter, self).__init__() - - def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): - context = ssl.create_default_context() - for cert in self.certs: - context.load_verify_locations(cadata=cert) - pool_kwargs['ssl_context'] = context - self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 0fb43c028a112..384177b114672 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -104,18 +104,17 @@ def _create_tls_context(self): # type: () -> ssl.SSLContext # https://docs.python.org/3/library/ssl.html#ssl.SSLContext - # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname + context.check_hostname = ( + False if not self.config['tls_verify'] else self.config.get('tls_validate_hostname', True) + ) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode context.verify_mode = ssl.CERT_REQUIRED if self.config['tls_verify'] else ssl.CERT_NONE - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - if context.verify_mode == ssl.CERT_REQUIRED: - context.check_hostname = self.config.get('tls_validate_hostname', True) - else: - context.check_hostname = False - ciphers = self.config.get('tls_ciphers') if ciphers: if 'ALL' in ciphers: From 09bf9ccf01460d28e92db5293d2cbafe0bf2168d Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 27 May 2025 17:06:16 +0200 Subject: [PATCH 002/100] Fix base package tests --- .../test_legacy/test_openmetrics.py | 20 +- .../base/checks/prometheus/test_prometheus.py | 80 ++++---- .../tests/base/utils/http/test_api.py | 56 ++--- .../tests/base/utils/http/test_authtoken.py | 24 +-- .../tests/base/utils/http/test_config.py | 19 +- .../tests/base/utils/http/test_headers.py | 2 +- .../tests/base/utils/http/test_http.py | 20 +- .../base/utils/http/test_kerberos_unit.py | 10 +- .../tests/base/utils/http/test_proxy.py | 31 +-- .../base/utils/http/test_tls_and_certs.py | 194 ++++++++++++++++-- .../datadog_checks/dev/plugin/pytest.py | 2 +- 11 files changed, 319 insertions(+), 139 deletions(-) diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py index 1cdd641a42579..1435ee2d2f6e1 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_legacy/test_openmetrics.py @@ -157,7 +157,7 @@ def test_poll_text_plain(mocked_prometheus_check, mocked_prometheus_scraper_conf mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll(mocked_prometheus_scraper_config) messages = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) messages.sort(key=lambda x: x.name) @@ -174,7 +174,7 @@ def test_poll_octet_stream(mocked_prometheus_check, mocked_prometheus_scraper_co mock_response.status_code = 200 mock_response.headers = {'Content-Type': 'application/octet-stream'} - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll(mocked_prometheus_scraper_config) messages = list(check.parse_metric_family(response, mocked_prometheus_scraper_config)) assert len(messages) == 40 @@ -1695,7 +1695,7 @@ def test_ignore_metrics_multiple_wildcards( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) # Make sure metrics are ignored @@ -1803,7 +1803,7 @@ def test_metrics_with_ignore_label_values( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) # Make sure metrics are ignored @@ -1854,7 +1854,7 @@ def test_match_metrics_multiple_wildcards( mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(config) aggregator.assert_metric('prometheus.go_memstats_mcache_inuse_bytes', count=1) @@ -2304,7 +2304,7 @@ def test_label_joins_gc(aggregator, mocked_prometheus_check, mocked_prometheus_s mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(mocked_prometheus_scraper_config) assert 'dd-agent-1337' in mocked_prometheus_scraper_config['_label_mapping']['pod'] assert 'dd-agent-62bgh' not in mocked_prometheus_scraper_config['_label_mapping']['pod'] @@ -2464,7 +2464,7 @@ def test_label_join_state_change(aggregator, mocked_prometheus_check, mocked_pro mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': text_content_type} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process(mocked_prometheus_scraper_config) assert 15 == len(mocked_prometheus_scraper_config['_label_mapping']['pod']) assert mocked_prometheus_scraper_config['_label_mapping']['pod']['dd-agent-62bgh']['phase'] == 'Test' @@ -2582,7 +2582,7 @@ def mock_filter_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), @@ -2672,7 +2672,7 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_openmetrics_check_factory, check = mocked_openmetrics_check_factory(instance) scraper_config = check.get_scraper_config(instance) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') @@ -2696,7 +2696,7 @@ def test_send_request_with_dynamic_prometheus_url(caplog, mocked_openmetrics_che # `prometheus_url` changed just before calling `send_request` scraper_config['prometheus_url'] = 'https://www.example.com/foo/bar' - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.send_request('https://httpbin.org/get', scraper_config) assert "httpbin.org" in resp.content.decode('utf-8') diff --git a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py index 9003b2b96060f..479a85e38bf1a 100644 --- a/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py +++ b/datadog_checks_base/tests/base/checks/prometheus/test_prometheus.py @@ -5,6 +5,7 @@ # Licensed under Simplified BSD License (see LICENSE) import logging import os +import ssl from collections import OrderedDict import mock @@ -334,7 +335,7 @@ def test_poll_protobuf(mocked_prometheus_check, bin_data): """Tests poll using the protobuf format""" check = mocked_prometheus_check mock_response = mock.MagicMock(status_code=200, content=bin_data, headers={'Content-Type': protobuf_content_type}) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll("http://fake.endpoint:10055/metrics") messages = list(check.parse_metric_family(response)) assert len(messages) == 61 @@ -347,7 +348,7 @@ def test_poll_text_plain(mocked_prometheus_check, text_data): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): response = check.poll("http://fake.endpoint:10055/metrics") messages = list(check.parse_metric_family(response)) messages.sort(key=lambda x: x.name) @@ -1284,7 +1285,7 @@ def test_label_joins(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = { @@ -1634,7 +1635,7 @@ def test_label_joins_gc(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node', 'pod_ip']}} @@ -1684,7 +1685,7 @@ def test_label_joins_gc(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check.process("http://fake.endpoint:10055/metrics") assert 'dd-agent-1337' in check._label_mapping['pod'] assert 'dd-agent-62bgh' not in check._label_mapping['pod'] @@ -1700,7 +1701,7 @@ def test_label_joins_missconfigured(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node', 'not_existing']}} @@ -1753,7 +1754,7 @@ def test_label_join_not_existing(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'not_existing', 'labels_to_get': ['node', 'pod_ip']}} @@ -1792,7 +1793,7 @@ def test_label_join_metric_not_existing(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'not_existing': {'label_to_match': 'pod', 'labels_to_get': ['node', 'pod_ip']}} @@ -1831,7 +1832,7 @@ def test_label_join_with_hostname(sorted_tags_check): mock_response = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ) - with mock.patch('requests.get', return_value=mock_response, __name__="get"): + with mock.patch('requests.Session.get', return_value=mock_response, __name__="get"): check = sorted_tags_check check.NAMESPACE = 'ksm' check.label_joins = {'kube_pod_info': {'label_to_match': 'pod', 'labels_to_get': ['node']}} @@ -1883,7 +1884,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() mock_get = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -1966,7 +1967,7 @@ def test_ssl_verify_not_raise_warning(caplog, mocked_prometheus_check, text_data check = mocked_prometheus_check - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') @@ -1982,7 +1983,7 @@ def test_ssl_verify_not_raise_warning_cert_false(caplog, mocked_prometheus_check check = mocked_prometheus_check check.ssl_ca_cert = False - with caplog.at_level(logging.DEBUG), mock.patch('requests.get', return_value=MockResponse('httpbin.org')): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get', return_value=MockResponse('httpbin.org')): resp = check.poll('https://httpbin.org/get') assert 'httpbin.org' in resp.content.decode('utf-8') @@ -2018,29 +2019,32 @@ def test_requests_wrapper_config(): ] ) - with mock.patch("requests.get") as get: - check.poll(instance_http['prometheus_endpoint'], instance=instance_http) - get.assert_called_with( - instance_http['prometheus_endpoint'], - stream=False, - headers=expected_headers, - auth=requests.auth.HTTPDigestAuth('data', 'dog'), - cert='/path/to/cert', - timeout=(42.0, 42.0), - proxies=None, - verify=True, - allow_redirects=True, - ) - - check.poll(instance_http['prometheus_endpoint']) - get.assert_called_with( - instance_http['prometheus_endpoint'], - stream=False, - headers=expected_headers, - auth=requests.auth.HTTPDigestAuth('data', 'dog'), - cert='/path/to/cert', - timeout=(42.0, 42.0), - proxies=None, - verify=True, - allow_redirects=True, - ) + with mock.patch("requests.Session.get") as get: + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + mock_load_cert_chain.return_value = None + + check.poll(instance_http['prometheus_endpoint'], instance=instance_http) + get.assert_called_with( + instance_http['prometheus_endpoint'], + stream=False, + headers=expected_headers, + auth=requests.auth.HTTPDigestAuth('data', 'dog'), + cert='/path/to/cert', + timeout=(42.0, 42.0), + proxies=None, + verify=True, + allow_redirects=True, + ) + + check.poll(instance_http['prometheus_endpoint']) + get.assert_called_with( + instance_http['prometheus_endpoint'], + stream=False, + headers=expected_headers, + auth=requests.auth.HTTPDigestAuth('data', 'dog'), + cert='/path/to/cert', + timeout=(42.0, 42.0), + proxies=None, + verify=True, + allow_redirects=True, + ) diff --git a/datadog_checks_base/tests/base/utils/http/test_api.py b/datadog_checks_base/tests/base/utils/http/test_api.py index 3440c300147d2..6986ae297d477 100644 --- a/datadog_checks_base/tests/base/utils/http/test_api.py +++ b/datadog_checks_base/tests/base/utils/http/test_api.py @@ -13,9 +13,9 @@ def test_get(): http = RequestsWrapper({}, {}) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): http.get('https://www.google.com') - requests.get.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.get.assert_called_once_with('https://www.google.com', **http.options) def test_get_session(): @@ -31,9 +31,9 @@ def test_get_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): http.get('https://www.google.com', auth=options['auth']) - requests.get.assert_called_once_with('https://www.google.com', **options) + requests.Session.get.assert_called_once_with('https://www.google.com', **options) def test_get_session_option_override(): @@ -49,9 +49,9 @@ def test_get_session_option_override(): def test_post(): http = RequestsWrapper({}, {}) - with mock.patch('requests.post'): + with mock.patch('requests.Session.post'): http.post('https://www.google.com') - requests.post.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.post.assert_called_once_with('https://www.google.com', **http.options) def test_post_session(): @@ -67,9 +67,9 @@ def test_post_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.post'): + with mock.patch('requests.Session.post'): http.post('https://www.google.com', auth=options['auth']) - requests.post.assert_called_once_with('https://www.google.com', **options) + requests.Session.post.assert_called_once_with('https://www.google.com', **options) def test_post_session_option_override(): @@ -85,9 +85,9 @@ def test_post_session_option_override(): def test_head(): http = RequestsWrapper({}, {}) - with mock.patch('requests.head'): + with mock.patch('requests.Session.head'): http.head('https://www.google.com') - requests.head.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.head.assert_called_once_with('https://www.google.com', **http.options) def test_head_session(): @@ -103,9 +103,9 @@ def test_head_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.head'): + with mock.patch('requests.Session.head'): http.head('https://www.google.com', auth=options['auth']) - requests.head.assert_called_once_with('https://www.google.com', **options) + requests.Session.head.assert_called_once_with('https://www.google.com', **options) def test_head_session_option_override(): @@ -121,9 +121,9 @@ def test_head_session_option_override(): def test_put(): http = RequestsWrapper({}, {}) - with mock.patch('requests.put'): + with mock.patch('requests.Session.put'): http.put('https://www.google.com') - requests.put.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.put.assert_called_once_with('https://www.google.com', **http.options) def test_put_session(): @@ -139,9 +139,9 @@ def test_put_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.put'): + with mock.patch('requests.Session.put'): http.put('https://www.google.com', auth=options['auth']) - requests.put.assert_called_once_with('https://www.google.com', **options) + requests.Session.put.assert_called_once_with('https://www.google.com', **options) def test_put_session_option_override(): @@ -157,9 +157,9 @@ def test_put_session_option_override(): def test_patch(): http = RequestsWrapper({}, {}) - with mock.patch('requests.patch'): + with mock.patch('requests.Session.patch'): http.patch('https://www.google.com') - requests.patch.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.patch.assert_called_once_with('https://www.google.com', **http.options) def test_patch_session(): @@ -175,9 +175,9 @@ def test_patch_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.patch'): + with mock.patch('requests.Session.patch'): http.patch('https://www.google.com', auth=options['auth']) - requests.patch.assert_called_once_with('https://www.google.com', **options) + requests.Session.patch.assert_called_once_with('https://www.google.com', **options) def test_patch_session_option_override(): @@ -193,9 +193,9 @@ def test_patch_session_option_override(): def test_delete(): http = RequestsWrapper({}, {}) - with mock.patch('requests.delete'): + with mock.patch('requests.Session.delete'): http.delete('https://www.google.com') - requests.delete.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.delete.assert_called_once_with('https://www.google.com', **http.options) def test_delete_session(): @@ -211,9 +211,9 @@ def test_delete_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.delete'): + with mock.patch('requests.Session.delete'): http.delete('https://www.google.com', auth=options['auth']) - requests.delete.assert_called_once_with('https://www.google.com', **options) + requests.Session.delete.assert_called_once_with('https://www.google.com', **options) def test_delete_session_option_override(): @@ -229,9 +229,9 @@ def test_delete_session_option_override(): def test_options(): http = RequestsWrapper({}, {}) - with mock.patch('requests.options'): + with mock.patch('requests.Session.options'): http.options_method('https://www.google.com') - requests.options.assert_called_once_with('https://www.google.com', **http.options) + requests.Session.options.assert_called_once_with('https://www.google.com', **http.options) def test_options_session(): @@ -247,9 +247,9 @@ def test_options_option_override(): options = http.options.copy() options['auth'] = ('user', 'pass') - with mock.patch('requests.options'): + with mock.patch('requests.Session.options'): http.options_method('https://www.google.com', auth=options['auth']) - requests.options.assert_called_once_with('https://www.google.com', **options) + requests.Session.options.assert_called_once_with('https://www.google.com', **options) def test_options_session_option_override(): diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 26b461a699090..36db5905501ed 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -399,7 +399,7 @@ def test_pattern_no_match(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret\nsecret\n') with pytest.raises( @@ -424,7 +424,7 @@ def test_pattern_match(self): expected_headers = {'Authorization': 'Bearer bar'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nfoobar\nfoobaz\n') http.get('https://www.google.com') @@ -464,7 +464,7 @@ def fetch_token(self, *args, **kwargs): return {'error': 'unauthorized_client'} with ( - mock.patch('requests.get'), + mock.patch('requests.Session.get'), mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch('requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session), ): @@ -506,7 +506,7 @@ def fetch_token(self, *args, **kwargs): return token_response with ( - mock.patch('requests.get') as get, + mock.patch('requests.Session.get') as get, mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch('requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session), mock.patch('datadog_checks.base.utils.http.get_timestamp', return_value=0), @@ -556,7 +556,7 @@ def fetch_token(self, *args, **kwargs): return {'access_token': 'foo', 'expires_in': 9000} with ( - mock.patch('requests.get') as get, + mock.patch('requests.Session.get') as get, mock.patch('oauthlib.oauth2.BackendApplicationClient'), mock.patch('requests_oauthlib.OAuth2Session', side_effect=MockOAuth2Session), ): @@ -617,7 +617,7 @@ def auth(*args, **kwargs): return MockResponse(status_code=404) http = RequestsWrapper(instance, init_config) - with mock.patch('requests.post', side_effect=login), mock.patch('requests.get', side_effect=auth): + with mock.patch('requests.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): http.get('https://leader.mesos/service/some-service') @@ -636,7 +636,7 @@ def test_default_placeholder_same_as_value(self): expected_headers = {'X-Vault-Token': 'foobar'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nfoobar\n') http.get('https://www.google.com') @@ -669,7 +669,7 @@ def test_read_before_first_request(self): expected_headers = {'Authorization': 'Bearer secret1'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get') as get: + with mock.patch('requests.Session.get') as get: write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -715,7 +715,7 @@ def test_refresh_after_connection_error(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -730,7 +730,7 @@ def raise_error_once(*args, **kwargs): expected_headers = {'Authorization': 'Bearer secret2'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get', side_effect=raise_error_once) as get: + with mock.patch('requests.Session.get', side_effect=raise_error_once) as get: write_file(token_file, '\nsecret2\n') http.get('https://www.google.com') @@ -760,7 +760,7 @@ def test_refresh_after_bad_status_code(self): init_config = {} http = RequestsWrapper(instance, init_config) - with mock.patch('requests.get'): + with mock.patch('requests.Session.get'): write_file(token_file, '\nsecret1\n') http.get('https://www.google.com') @@ -769,7 +769,7 @@ def error(): expected_headers = {'Authorization': 'Bearer secret2'} expected_headers.update(DEFAULT_OPTIONS['headers']) - with mock.patch('requests.get', return_value=mock.MagicMock(raise_for_status=error)) as get: + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=error)) as get: write_file(token_file, '\nsecret2\n') http.get('https://www.google.com') diff --git a/datadog_checks_base/tests/base/utils/http/test_config.py b/datadog_checks_base/tests/base/utils/http/test_config.py index 5080f231d4fe1..8263c48b1a90e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_config.py +++ b/datadog_checks_base/tests/base/utils/http/test_config.py @@ -1,6 +1,10 @@ # (C) Datadog, Inc. 2022-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +import ssl + +import mock + from datadog_checks.base.utils.http import STANDARD_FIELDS, RequestsWrapper @@ -72,16 +76,23 @@ def test_config_verify(self): def test_config_ca_cert(self): instance = {'tls_ca_cert': 'ca_cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['verify'] == 'ca_cert' + with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: + RequestsWrapper(instance, init_config) + + assert mock_load_verify_locations.call_count == 1 + assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' def test_config_verify_and_ca_cert(self): instance = {'tls_verify': True, 'tls_ca_cert': 'ca_cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['verify'] == 'ca_cert' + with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: + http = RequestsWrapper(instance, init_config) + + assert http.options['verify'] == 'ca_cert' + assert mock_load_verify_locations.call_count == 1 + assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' class TestRemapper: diff --git a/datadog_checks_base/tests/base/utils/http/test_headers.py b/datadog_checks_base/tests/base/utils/http/test_headers.py index 030561e9993ec..ab42172973153 100644 --- a/datadog_checks_base/tests/base/utils/http/test_headers.py +++ b/datadog_checks_base/tests/base/utils/http/test_headers.py @@ -77,7 +77,7 @@ def test_extra_headers_on_http_method_call(): complete_headers.update({'answer': '42'}) extra_headers = {"foo": "bar"} - with mock.patch("requests.get") as get: + with mock.patch("requests.Session.get") as get: http.get("http://example.com/hello", extra_headers=extra_headers) expected_options = dict(complete_headers) diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index 0419e05bf9aa4..8b7270eb96c52 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -50,14 +50,12 @@ class TestTLSCiphers: ) def test_cipher_construction(self, instance, expected_ciphers): init_config = {} - http = RequestsWrapper(instance, init_config) - mock_socket = mock.MagicMock() - with ( - mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers, - mock.patch('datadog_checks.base.utils.http.create_socket_connection', return_value=mock_socket), - ): - http.fetch_intermediate_certs('https://www.google.com') + # Mock SSL context creation before RequestsWrapper initialization + with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: + RequestsWrapper(instance, init_config) + + # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) @@ -177,7 +175,7 @@ class TestLogger: def test_default(self, caplog): check = AgentCheck('test', {}, [{}]) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -191,7 +189,7 @@ def test_instance(self, caplog): assert check.http.logger is check.log - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -208,7 +206,7 @@ def test_init_config(self, caplog): assert check.http.logger is check.log - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' @@ -223,7 +221,7 @@ def test_instance_override(self, caplog): init_config = {'log_requests': True} check = AgentCheck('test', init_config, [instance]) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): check.http.get('https://www.google.com') expected_message = 'Sending GET request to https://www.google.com' diff --git a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py index 2e6ba622bd541..513eb2213d40e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py +++ b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py @@ -114,7 +114,7 @@ def test_config_kerberos_keytab_file(): assert os.environ.get('KRB5_CLIENT_KTNAME') is None with mock.patch( - 'requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')) + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')) ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -130,7 +130,9 @@ def test_config_kerberos_cache(): assert os.environ.get('KRB5CCNAME') is None - with mock.patch('requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', ''))): + with mock.patch( + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -145,7 +147,7 @@ def test_config_kerberos_cache_restores_rollback(): with EnvVars({'KRB5CCNAME': 'old'}): with mock.patch( - 'requests.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5CCNAME', '')) ): response = http.get('https://www.google.com') assert response.text == '/test/file' @@ -163,7 +165,7 @@ def test_config_kerberos_keytab_file_rollback(): assert os.environ.get('KRB5_CLIENT_KTNAME') == 'old' with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') diff --git a/datadog_checks_base/tests/base/utils/http/test_proxy.py b/datadog_checks_base/tests/base/utils/http/test_proxy.py index ff8251b82b11a..c9b6898ce7113 100644 --- a/datadog_checks_base/tests/base/utils/http/test_proxy.py +++ b/datadog_checks_base/tests/base/utils/http/test_proxy.py @@ -198,23 +198,24 @@ def test_no_proxy_uris_coverage(): ), ], ) -@mock.patch('datadog_checks.base.utils.http.requests') -def test_proxy_passes_right_params_to_requests(requests, proxy, expected_proxy, url): +def test_proxy_passes_right_params_to_requests(proxy, expected_proxy, url): instance = {'proxy': proxy} init_config = {} http = RequestsWrapper(instance, init_config) - http.get(url) - call_args = { - 'auth': None, - 'cert': None, - 'headers': OrderedDict( - [('User-Agent', 'Datadog Agent/0.0.0'), ('Accept', '*/*'), ('Accept-Encoding', 'gzip, deflate')] - ), - 'proxies': expected_proxy, - 'timeout': (10.0, 10.0), - 'verify': True, - 'allow_redirects': True, - } - requests.get.assert_called_with(url, **call_args) + with mock.patch('requests.Session.get') as mock_get: + http.get(url) + + call_args = { + 'auth': None, + 'cert': None, + 'headers': OrderedDict( + [('User-Agent', 'Datadog Agent/0.0.0'), ('Accept', '*/*'), ('Accept-Encoding', 'gzip, deflate')] + ), + 'proxies': expected_proxy, + 'timeout': (10.0, 10.0), + 'verify': True, + 'allow_redirects': True, + } + mock_get.assert_called_with(url, **call_args) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 3301a6a8f39c8..da5a7cd4cd936 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -2,15 +2,22 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging +import ssl import mock import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import RequestsWrapper +from datadog_checks.base.utils.http import HTTPAdapterWrapper, RequestsWrapper +from datadog_checks.base.utils.tls import TlsContextWrapper pytestmark = [pytest.mark.unit] +IANA_TO_OPENSSL_NAME = { + 'TLS_RSA_WITH_AES_128_CBC_SHA': 'AES128-SHA', + 'TLS_RSA_WITH_AES_256_CBC_SHA': 'AES256-SHA', +} + class TestCert: def test_config_default(self): @@ -23,16 +30,23 @@ def test_config_default(self): def test_config_cert(self): instance = {'tls_cert': 'cert'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['cert'] == 'cert' + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + RequestsWrapper(instance, init_config) + + assert mock_load_cert_chain.call_count == 1 + assert mock_load_cert_chain.call_args[0][0] == 'cert' def test_config_cert_and_private_key(self): instance = {'tls_cert': 'cert', 'tls_private_key': 'key'} init_config = {} - http = RequestsWrapper(instance, init_config) - assert http.options['cert'] == ('cert', 'key') + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + RequestsWrapper(instance, init_config) + + assert mock_load_cert_chain.call_count == 1 + assert mock_load_cert_chain.call_args[0][0] == 'cert' + assert mock_load_cert_chain.call_args[1]["keyfile"] == 'key' class TestIgnoreTLSWarning: @@ -71,7 +85,7 @@ def test_default_no_ignore(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -86,7 +100,7 @@ def test_default_no_ignore_http(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('http://www.google.com', verify=False) assert sum(1 for _, level, _ in caplog.record_tuples if level == logging.WARNING) == 0 @@ -96,7 +110,7 @@ def test_ignore(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -108,7 +122,7 @@ def test_default_no_ignore_session(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -123,7 +137,7 @@ def test_ignore_session(self, caplog): init_config = {} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -135,7 +149,7 @@ def test_init_ignore(self, caplog): init_config = {'tls_ignore_warning': True} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -147,7 +161,7 @@ def test_default_init_no_ignore(self, caplog): init_config = {'tls_ignore_warning': False} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -162,7 +176,7 @@ def test_instance_ignore(self, caplog): init_config = {'tls_ignore_warning': False} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -174,7 +188,7 @@ def test_instance_no_ignore(self, caplog): init_config = {'tls_ignore_warning': True} http = RequestsWrapper(instance, init_config) - with caplog.at_level(logging.DEBUG), mock.patch('requests.get'): + with caplog.at_level(logging.DEBUG), mock.patch('requests.Session.get'): http.get('https://www.google.com', verify=False) expected_message = 'An unverified HTTPS request is being made to https://www.google.com' @@ -228,7 +242,157 @@ def test_fetch_intermediate_certs(self, port): with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.handle_auth_token'): with pytest.raises(SSLError): - with mock.patch('requests.get', side_effect=SSLError): + with mock.patch('requests.Session.get', side_effect=SSLError): http.get('https://localhost:{}'.format(port)) mock_create_socket_connection.assert_called_with('localhost', port) + + def test_fetch_intermediate_certs_tls_ciphers(self): + """Test that fetch_intermediate_certs uses the correct ciphers.""" + instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA']} + init_config = {} + http = RequestsWrapper(instance, init_config) + + with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: + mock_socket = mock.MagicMock() + mock_create_socket_connection.return_value = mock_socket + + with mock.patch('datadog_checks.base.utils.http.ssl.SSLContext') as mock_ssl_context_class: + mock_context = mock.MagicMock() + mock_ssl_context_class.return_value = mock_context + + # Mock the wrapped socket to avoid actual SSL operations + mock_wrapped_socket = mock.MagicMock() + mock_context.wrap_socket.return_value.__enter__.return_value = mock_wrapped_socket + mock_wrapped_socket.getpeercert.return_value = b'fake_cert_data' + mock_wrapped_socket.version.return_value = 'TLSv1.2' + + # Mock the certificate loading to avoid cryptography operations + with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): + http.fetch_intermediate_certs('example.com', 443) + + # Verify set_ciphers was called with the correct cipher list + mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_128_CBC_SHA') + + +class TestTlsContext: + """Test the core TLS context functionality.""" + + def test_requests_wrapper_creates_tls_context_wrapper(self): + """Test that RequestsWrapper creates a TlsContextWrapper instance.""" + instance = {'tls_verify': True} + init_config = {} + http = RequestsWrapper(instance, init_config) + + assert hasattr(http, 'tls_context_wrapper') + assert isinstance(http.tls_context_wrapper, TlsContextWrapper) + + def test_session_uses_tls_context_adapter(self): + """Test that the session uses HTTPAdapterWrapper for consistent TLS configuration.""" + instance = {'tls_verify': True} + init_config = {} + http = RequestsWrapper(instance, init_config) + + session = http.session + https_adapter = session.get_adapter('https://example.com') + + assert isinstance(https_adapter, HTTPAdapterWrapper) + assert https_adapter.tls_context_wrapper is http.tls_context_wrapper + + def test_tls_ciphers_applied_consistently(self): + """Test that tls_ciphers are applied consistently through TlsContextWrapper.""" + instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA', 'TLS_RSA_WITH_AES_256_CBC_SHA']} + init_config = {} + http = RequestsWrapper(instance, init_config) + + # Verify the TLS context wrapper has the cipher configuration + assert http.tls_context_wrapper.config['tls_ciphers'] == [ + 'TLS_RSA_WITH_AES_128_CBC_SHA', + 'TLS_RSA_WITH_AES_256_CBC_SHA', + ] + + # Verify the session adapter uses the same TLS context + session = http.session + https_adapter = session.get_adapter('https://example.com') + assert https_adapter.tls_context_wrapper.tls_context is http.tls_context_wrapper.tls_context + # Verify that the ciphers are set correctly in the TLS context + for cipher in instance['tls_ciphers']: + # At least one entry's name field should match the OpenSSL name + assert any( + IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') + for c in https_adapter.tls_context_wrapper.tls_context.get_ciphers() + ) + + def test_host_header_compatibility(self): + """Test that host header functionality works with TLS context unification.""" + instance = { + 'tls_use_host_header': True, + 'headers': {'Host': 'custom-host.example.com'}, + 'tls_verify': True, + 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA'], + } + init_config = {} + http = RequestsWrapper(instance, init_config) + + session = http.session + https_adapter = session.get_adapter('https://example.com') + + # Should be the combined adapter that supports both TLS context and host headers + assert hasattr(https_adapter, 'tls_context_wrapper') + assert https_adapter.tls_context_wrapper is http.tls_context_wrapper + assert 'TlsContextHostHeaderAdapter' in str(type(https_adapter)) + + def test_remapper_functionality_preserved(self): + """Test that config remapping functionality is preserved with TLS context unification.""" + instance = { + 'disable_ssl_validation': True, + 'custom_private_key': '/path/to/key.pem', + } + remapper = { + 'disable_ssl_validation': {'name': 'tls_verify', 'invert': True}, + 'custom_private_key': {'name': 'tls_private_key'}, + } + init_config = {} + + # Mock the TLS context creation to avoid file operations + with mock.patch.object(TlsContextWrapper, '_create_tls_context') as mock_create_context: + mock_context = mock.MagicMock() + mock_create_context.return_value = mock_context + + http = RequestsWrapper(instance, init_config, remapper=remapper) + + # Verify remapping worked - disable_ssl_validation: True should become tls_verify: False + assert http.tls_context_wrapper.config['tls_verify'] is False + assert http.tls_context_wrapper.config['tls_private_key'] == '/path/to/key.pem' + + def test_backward_compatibility_maintained(self): + """Test that all existing TLS configuration options still work.""" + instance = { + 'tls_verify': True, + 'tls_ca_cert': '/path/to/ca.pem', + 'tls_cert': '/path/to/cert.pem', + 'tls_private_key': '/path/to/key.pem', + 'tls_validate_hostname': True, + 'tls_ignore_warning': False, + 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], + 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA'], + } + init_config = {} + + # Mock the TLS context creation to avoid file operations + with mock.patch.object(TlsContextWrapper, '_create_tls_context') as mock_create_context: + mock_context = mock.MagicMock() + mock_create_context.return_value = mock_context + + # Should not raise any exceptions + http = RequestsWrapper(instance, init_config) + + # Verify all options are preserved in RequestsWrapper + assert http.options['verify'] == '/path/to/ca.pem' + assert http.options['cert'] == ('/path/to/cert.pem', '/path/to/key.pem') + assert http.ignore_tls_warning is False + assert http.tls_protocols_allowed == ['TLSv1.2', 'TLSv1.3'] + + # Verify TLS context wrapper has the right config + assert http.tls_context_wrapper.config['tls_verify'] is True + assert http.tls_context_wrapper.config['tls_ciphers'] == ['TLS_RSA_WITH_AES_128_CBC_SHA'] diff --git a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py index 6e5989b2ab0e0..b71295f9d2aff 100644 --- a/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py +++ b/datadog_checks_dev/datadog_checks/dev/plugin/pytest.py @@ -292,7 +292,7 @@ def mock_http_response(mocker): from ..http import MockResponse yield lambda *args, **kwargs: mocker.patch( - kwargs.pop('method', 'requests.get'), return_value=MockResponse(*args, **kwargs) + kwargs.pop('method', 'requests.Session.get'), return_value=MockResponse(*args, **kwargs) ) From cc99a037dcdca39f5d6143bf7dca35ee974f1357 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 27 May 2025 17:28:43 +0200 Subject: [PATCH 003/100] Run formatter --- .../tests/base/utils/http/test_kerberos_unit.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py index 513eb2213d40e..fd4a51a07b0e6 100644 --- a/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py +++ b/datadog_checks_base/tests/base/utils/http/test_kerberos_unit.py @@ -114,7 +114,8 @@ def test_config_kerberos_keytab_file(): assert os.environ.get('KRB5_CLIENT_KTNAME') is None with mock.patch( - 'requests.Session.get', side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')) + 'requests.Session.get', + side_effect=lambda *args, **kwargs: MockResponse(os.environ.get('KRB5_CLIENT_KTNAME', '')), ): response = http.get('https://www.google.com') assert response.text == '/test/file' From e65809d1ece9d8b55de273e746c0c25f0f83af24 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 1 Jul 2025 18:15:02 +0200 Subject: [PATCH 004/100] Turn requests.X mocks into requests.Session.X --- amazon_msk/tests/conftest.py | 2 +- arangodb/tests/test_arangodb.py | 2 +- cert_manager/tests/test_cert_manager.py | 4 +-- citrix_hypervisor/tests/conftest.py | 2 +- .../tests/base/utils/http/test_authtoken.py | 2 +- envoy/tests/legacy/test_unit.py | 2 +- haproxy/tests/conftest.py | 6 ++-- haproxy/tests/legacy/test_unit.py | 2 +- .../tests/test_kube_controller_manager.py | 6 ++-- .../tests/test_sli_metrics.py | 8 ++--- kube_dns/tests/test_kube_dns.py | 6 ++-- .../tests/test_kube_metrics_server.py | 6 ++-- kube_proxy/tests/test_kube_proxy.py | 8 ++--- .../tests/test_kube_scheduler_1_14.py | 6 ++-- kube_scheduler/tests/test_sli_metrics.py | 8 ++--- kubelet/tests/test_kubelet.py | 8 ++--- powerdns_recursor/tests/test_metadata.py | 6 ++-- spark/tests/test_spark.py | 28 ++++++++-------- twistlock/tests/test_twistlock.py | 8 ++--- vault/tests/test_vault.py | 32 +++++++++---------- vllm/tests/test_unit.py | 6 ++-- voltdb/tests/test_integration.py | 4 +-- vsphere/tests/test_check.py | 2 +- 23 files changed, 82 insertions(+), 82 deletions(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 1298134d93e65..b5775c6f638cc 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -32,7 +32,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): yield diff --git a/arangodb/tests/test_arangodb.py b/arangodb/tests/test_arangodb.py index 9786150bf5eb0..365c776ee6b71 100644 --- a/arangodb/tests/test_arangodb.py +++ b/arangodb/tests/test_arangodb.py @@ -56,7 +56,7 @@ def mock_requests_get(url, *args, **kwargs): fixture = url.rsplit('/', 1)[-1] return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture)) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) aggregator.assert_service_check( diff --git a/cert_manager/tests/test_cert_manager.py b/cert_manager/tests/test_cert_manager.py index b67f638659c27..ee1097015556e 100644 --- a/cert_manager/tests/test_cert_manager.py +++ b/cert_manager/tests/test_cert_manager.py @@ -15,7 +15,7 @@ @pytest.fixture() def error_metrics(): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock(status_code=502, headers={'Content-Type': "text/plain"}), ): yield @@ -34,7 +34,7 @@ def test_check(aggregator, dd_run_check): def mock_requests_get(url, *args, **kwargs): return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', 'cert_manager.txt')) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(check) expected_metrics = dict(CERT_METRICS) diff --git a/citrix_hypervisor/tests/conftest.py b/citrix_hypervisor/tests/conftest.py index 01062c83e2f83..9670622329d2e 100644 --- a/citrix_hypervisor/tests/conftest.py +++ b/citrix_hypervisor/tests/conftest.py @@ -50,5 +50,5 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_responses(): - with mock.patch('requests.get', side_effect=mock_requests_get): + with mock.patch('requests.Session.get', side_effect=mock_requests_get): yield diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 36db5905501ed..5e50d7cda1221 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -617,7 +617,7 @@ def auth(*args, **kwargs): return MockResponse(status_code=404) http = RequestsWrapper(instance, init_config) - with mock.patch('requests.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): + with mock.patch('requests.Session.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): http.get('https://leader.mesos/service/some-service') diff --git a/envoy/tests/legacy/test_unit.py b/envoy/tests/legacy/test_unit.py index 0f76041d1a618..09c3e63e77226 100644 --- a/envoy/tests/legacy/test_unit.py +++ b/envoy/tests/legacy/test_unit.py @@ -189,7 +189,7 @@ def test_metadata_with_exception( check.check_id = 'test:123' check.log = mock.MagicMock() - with mock.patch('requests.get', side_effect=exception): + with mock.patch('requests.Session.get', side_effect=exception): check._collect_metadata() datadog_agent.assert_metadata_count(0) check.log.warning.assert_called_with(*log_call_parameters) diff --git a/haproxy/tests/conftest.py b/haproxy/tests/conftest.py index aa7b63a27c696..f8d18d2db7606 100644 --- a/haproxy/tests/conftest.py +++ b/haproxy/tests/conftest.py @@ -209,7 +209,7 @@ def haproxy_mock(): filepath = os.path.join(HERE, 'fixtures', 'mock_data') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.get', return_value=mock.Mock(content=data)) + p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) yield p.start() p.stop() @@ -227,7 +227,7 @@ def haproxy_mock_evil(): filepath = os.path.join(HERE, 'fixtures', 'mock_data_evil') with open(filepath, 'rb') as f: data = f.read() - p = mock.patch('requests.get', return_value=mock.Mock(content=data)) + p = mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) yield p.start() p.stop() @@ -237,7 +237,7 @@ def haproxy_mock_enterprise_version_info(): filepath = os.path.join(HERE, 'fixtures', 'enterprise_version_info.html') with open(filepath, 'rb') as f: data = f.read() - with mock.patch('requests.get', return_value=mock.Mock(content=data)) as p: + with mock.patch('requests.Session.get', return_value=mock.Mock(content=data)) as p: yield p diff --git a/haproxy/tests/legacy/test_unit.py b/haproxy/tests/legacy/test_unit.py index b7edf8efc7f48..f93fca547f5ce 100644 --- a/haproxy/tests/legacy/test_unit.py +++ b/haproxy/tests/legacy/test_unit.py @@ -433,7 +433,7 @@ def test_version_failure(aggregator, check, datadog_agent): filepath = os.path.join(os.path.dirname(common.HERE), 'fixtures', 'mock_data') with open(filepath, 'rb') as f: data = f.read() - with mock.patch('requests.get') as m: + with mock.patch('requests.Session.get') as m: m.side_effect = [RuntimeError("Ooops"), mock.Mock(content=data)] haproxy_check.check(config) diff --git a/kube_controller_manager/tests/test_kube_controller_manager.py b/kube_controller_manager/tests/test_kube_controller_manager.py index 7a16d6665e847..4c4ac8ceaf4e3 100644 --- a/kube_controller_manager/tests/test_kube_controller_manager.py +++ b/kube_controller_manager/tests/test_kube_controller_manager.py @@ -37,7 +37,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -146,13 +146,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_controller_manager/tests/test_sli_metrics.py b/kube_controller_manager/tests/test_sli_metrics.py index c3ff350c550ef..420e7255ce8e2 100644 --- a/kube_controller_manager/tests/test_sli_metrics.py +++ b/kube_controller_manager/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -205,7 +205,7 @@ def mock_request(): def test_detect_sli_endpoint(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 200 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -213,7 +213,7 @@ def test_detect_sli_endpoint(mock_metrics, instance): def test_detect_sli_endpoint_404(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 404 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -221,7 +221,7 @@ def test_detect_sli_endpoint_404(mock_metrics, instance): def test_detect_sli_endpoint_403(mock_metrics, mock_request, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 403 c = KubeControllerManagerCheck(CHECK_NAME, {}, [instance]) c.check(instance) diff --git a/kube_dns/tests/test_kube_dns.py b/kube_dns/tests/test_kube_dns.py index 6b92c0089388c..dcb44c2df6efd 100644 --- a/kube_dns/tests/test_kube_dns.py +++ b/kube_dns/tests/test_kube_dns.py @@ -24,7 +24,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -96,13 +96,13 @@ def test_service_check_ok(self, monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_metrics_server/tests/test_kube_metrics_server.py b/kube_metrics_server/tests/test_kube_metrics_server.py index 96fea70a8ea66..4453dabbba072 100644 --- a/kube_metrics_server/tests/test_kube_metrics_server.py +++ b/kube_metrics_server/tests/test_kube_metrics_server.py @@ -28,7 +28,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -76,13 +76,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_proxy/tests/test_kube_proxy.py b/kube_proxy/tests/test_kube_proxy.py index 941d6dbe4da83..c31b02b722f32 100644 --- a/kube_proxy/tests/test_kube_proxy.py +++ b/kube_proxy/tests/test_kube_proxy.py @@ -25,7 +25,7 @@ def mock_iptables(): with open(f_name, 'r') as f: text_data = f.read() mock_iptables = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -40,7 +40,7 @@ def mock_userspace(): with open(f_name, 'r') as f: text_data = f.read() mock_userspace = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -119,13 +119,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_scheduler/tests/test_kube_scheduler_1_14.py b/kube_scheduler/tests/test_kube_scheduler_1_14.py index 4f6e4bfb75eaf..03b6ae525e03b 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_14.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_14.py @@ -25,7 +25,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -118,13 +118,13 @@ def test_service_check_ok(monkeypatch): ] # successful health check - with mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(status_code=200)): check._perform_service_check(instance) # failed health check raise_error = mock.Mock() raise_error.side_effect = requests.HTTPError('health check failed') - with mock.patch("requests.get", return_value=mock.MagicMock(raise_for_status=raise_error)): + with mock.patch('requests.Session.get', return_value=mock.MagicMock(raise_for_status=raise_error)): check._perform_service_check(instance) check.service_check.assert_has_calls(calls) diff --git a/kube_scheduler/tests/test_sli_metrics.py b/kube_scheduler/tests/test_sli_metrics.py index 706050f730f43..dd98007f4e9d8 100644 --- a/kube_scheduler/tests/test_sli_metrics.py +++ b/kube_scheduler/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -97,7 +97,7 @@ def mock_request(): def test_detect_sli_endpoint(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 200 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -105,7 +105,7 @@ def test_detect_sli_endpoint(mock_metrics, instance): def test_detect_sli_endpoint_404(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 404 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) @@ -113,7 +113,7 @@ def test_detect_sli_endpoint_404(mock_metrics, instance): def test_detect_sli_endpoint_403(mock_metrics, instance): - with mock.patch('requests.get') as mock_request: + with mock.patch('requests.Session.get') as mock_request: mock_request.return_value.status_code = 403 c = KubeSchedulerCheck(CHECK_NAME, {}, [instance]) c.check(instance) diff --git a/kubelet/tests/test_kubelet.py b/kubelet/tests/test_kubelet.py index c32b7a8281368..911b03c628836 100644 --- a/kubelet/tests/test_kubelet.py +++ b/kubelet/tests/test_kubelet.py @@ -563,7 +563,7 @@ def test_kubelet_credentials_update(monkeypatch, aggregator): get = mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: mock_from_file('kubelet_metrics_1_14.txt').splitlines() ) - with mock.patch('requests.get', return_value=get): + with mock.patch('requests.Session.get', return_value=get): check.check(instance) assert check._http_handlers[instance['kubelet_metrics_endpoint']].options['verify'] is True @@ -574,7 +574,7 @@ def test_kubelet_credentials_update(monkeypatch, aggregator): ) kubelet_conn_info = {'url': 'http://127.0.0.1:10255', 'ca_cert': False} with ( - mock.patch('requests.get', return_value=get), + mock.patch('requests.Sesison.get', return_value=get), mock.patch('datadog_checks.kubelet.kubelet.get_connection_info', return_value=kubelet_conn_info), ): check.check(instance) @@ -1050,7 +1050,7 @@ def test_perform_kubelet_check(monkeypatch): instance_tags = ["one:1"] get = MockedResponse() - with mock.patch("requests.get", side_effect=get): + with mock.patch('requests.Session.get', side_effect=get): check._perform_kubelet_check(instance_tags) get.assert_has_calls( @@ -1094,7 +1094,7 @@ def test_report_node_metrics_kubernetes1_18(monkeypatch, aggregator): get = mock.MagicMock(status_code=404, iter_lines=lambda **kwargs: "Error Code") get.raise_for_status.side_effect = requests.HTTPError('error') - with mock.patch('requests.get', return_value=get): + with mock.patch('requests.Session.get', return_value=get): check._report_node_metrics(['foo:bar']) aggregator.assert_all_metrics_covered() diff --git a/powerdns_recursor/tests/test_metadata.py b/powerdns_recursor/tests/test_metadata.py index a7f8511818c68..36ff99eebd981 100644 --- a/powerdns_recursor/tests/test_metadata.py +++ b/powerdns_recursor/tests/test_metadata.py @@ -24,19 +24,19 @@ def test_metadata_unit(datadog_agent): config_obj, tags = check._get_config(instance) - with mock.patch('requests.get', side_effect=requests.exceptions.Timeout()): + with mock.patch('requests.Session.get', side_effect=requests.exceptions.Timeout()): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with('Error collecting PowerDNS Recursor version: %s', '') datadog_agent.reset() - with mock.patch('requests.get', return_value=MockResponse()): + with mock.patch('requests.Session.get', return_value=MockResponse()): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with("Couldn't find the PowerDNS Recursor Server version header") datadog_agent.reset() - with mock.patch('requests.get', return_value=MockResponse(headers={'Server': 'wrong_stuff'})): + with mock.patch('requests.Session.get', return_value=MockResponse(headers={'Server': 'wrong_stuff'})): check._collect_metadata(config_obj) datadog_agent.assert_metadata_count(0) check.log.debug.assert_called_with( diff --git a/spark/tests/test_spark.py b/spark/tests/test_spark.py index 364cd15a3949b..f5a143e436d3d 100644 --- a/spark/tests/test_spark.py +++ b/spark/tests/test_spark.py @@ -688,7 +688,7 @@ def _assert(aggregator, values_and_tags): @pytest.mark.unit def test_yarn(aggregator, dd_run_check): - with mock.patch('requests.get', yarn_requests_get_mock): + with mock.patch('requests.Session.get', yarn_requests_get_mock): c = SparkCheck('spark', {}, [YARN_CONFIG]) dd_run_check(c) @@ -743,7 +743,7 @@ def test_yarn(aggregator, dd_run_check): @pytest.mark.unit def test_auth_yarn(aggregator, dd_run_check): - with mock.patch('requests.get', yarn_requests_auth_mock): + with mock.patch('requests.Session.get', yarn_requests_auth_mock): c = SparkCheck('spark', {}, [YARN_AUTH_CONFIG]) dd_run_check(c) @@ -763,7 +763,7 @@ def test_auth_yarn(aggregator, dd_run_check): @pytest.mark.unit def test_mesos(aggregator, dd_run_check): - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [MESOS_CONFIG]) dd_run_check(c) _assert( @@ -822,7 +822,7 @@ def test_mesos(aggregator, dd_run_check): @pytest.mark.unit def test_mesos_filter(aggregator, dd_run_check): - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [MESOS_FILTERED_CONFIG]) dd_run_check(c) @@ -835,7 +835,7 @@ def test_mesos_filter(aggregator, dd_run_check): @pytest.mark.unit def test_driver_unit(aggregator, dd_run_check): - with mock.patch('requests.get', driver_requests_get_mock): + with mock.patch('requests.Session.get', driver_requests_get_mock): c = SparkCheck('spark', {}, [DRIVER_CONFIG]) dd_run_check(c) @@ -895,7 +895,7 @@ def test_driver_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_unit(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_get_mock): + with mock.patch('requests.Session.get', standalone_requests_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG]) dd_run_check(c) @@ -947,7 +947,7 @@ def test_standalone_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_stage_disabled_unit(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_get_mock): + with mock.patch('requests.Session.get', standalone_requests_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG_STAGE_DISABLED]) dd_run_check(c) @@ -996,7 +996,7 @@ def test_standalone_stage_disabled_unit(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_unit_with_proxy_warning_page(aggregator, dd_run_check): c = SparkCheck('spark', {}, [STANDALONE_CONFIG]) - with mock.patch('requests.get', proxy_with_warning_page_mock): + with mock.patch('requests.Session.get', proxy_with_warning_page_mock): dd_run_check(c) _assert( @@ -1048,7 +1048,7 @@ def test_standalone_unit_with_proxy_warning_page(aggregator, dd_run_check): @pytest.mark.unit def test_standalone_pre20(aggregator, dd_run_check): - with mock.patch('requests.get', standalone_requests_pre20_get_mock): + with mock.patch('requests.Session.get', standalone_requests_pre20_get_mock): c = SparkCheck('spark', {}, [STANDALONE_CONFIG_PRE_20]) dd_run_check(c) @@ -1101,7 +1101,7 @@ def test_standalone_pre20(aggregator, dd_run_check): @pytest.mark.unit def test_metadata(aggregator, datadog_agent, dd_run_check): - with mock.patch('requests.get', standalone_requests_pre20_get_mock): + with mock.patch('requests.Session.get', standalone_requests_pre20_get_mock): c = SparkCheck(CHECK_NAME, {}, [STANDALONE_CONFIG_PRE_20]) c.check_id = "test:123" dd_run_check(c) @@ -1127,7 +1127,7 @@ def test_disable_legacy_cluster_tags(aggregator, dd_run_check): instance = MESOS_FILTERED_CONFIG instance['disable_legacy_cluster_tag'] = True - with mock.patch('requests.get', mesos_requests_get_mock): + with mock.patch('requests.Session.get', mesos_requests_get_mock): c = SparkCheck('spark', {}, [instance]) dd_run_check(c) @@ -1156,7 +1156,7 @@ def test_enable_query_name_tag_for_structured_streaming( ): instance['enable_query_name_tag'] = True - with mock.patch('requests.get', requests_get_mock): + with mock.patch('requests.Session.get', requests_get_mock): c = SparkCheck('spark', {}, [instance]) dd_run_check(c) @@ -1240,7 +1240,7 @@ def test_do_not_crash_on_single_app_failure(): ids=["driver", "yarn", "mesos", "standalone", "standalone_pre_20"], ) def test_no_running_apps(aggregator, dd_run_check, instance, service_check, caplog): - with mock.patch('requests.get', return_value=MockResponse("{}")): + with mock.patch('requests.Session.get', return_value=MockResponse("{}")): with caplog.at_level(logging.WARNING): dd_run_check(SparkCheck('spark', {}, [instance])) @@ -1324,7 +1324,7 @@ def get_without_json(url, *args, **kwargs): else: return yarn_requests_get_mock(url, *args, **kwargs) - mocker.patch('requests.get', get_without_json) + mocker.patch('requests.Session.get', get_without_json) dd_run_check(SparkCheck('spark', {}, [YARN_CONFIG])) for m in missing_metrics: aggregator.assert_metric_has_tag(m, 'app_name:PySparkShell', count=0) diff --git a/twistlock/tests/test_twistlock.py b/twistlock/tests/test_twistlock.py index 26b778426e241..8a0ad2fef3336 100644 --- a/twistlock/tests/test_twistlock.py +++ b/twistlock/tests/test_twistlock.py @@ -47,7 +47,7 @@ def mock_get(url, *args, **kwargs): def test_check(aggregator, instance, fixture_group): check = TwistlockCheck('twistlock', {}, [instance]) - with mock.patch('requests.get', side_effect=mock_get_factory(fixture_group), autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_get_factory(fixture_group), autospec=True): check.check(instance) check.check(instance) @@ -68,7 +68,7 @@ def test_config_project(aggregator, instance, fixture_group): instance['project'] = project check = TwistlockCheck('twistlock', {}, [instance]) - with mock.patch('requests.get', side_effect=mock_get_factory(fixture_group), autospec=True) as r: + with mock.patch('requests.Session.get', side_effect=mock_get_factory(fixture_group), autospec=True) as r: check.check(instance) r.assert_called_with( @@ -99,7 +99,7 @@ def mock_get(url, *args, **kwargs): return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) return MockResponse(file_path=os.path.join(HERE, 'fixtures', 'empty_images.json')) - with mock.patch('requests.get', side_effect=mock_get): + with mock.patch('requests.Session.get', side_effect=mock_get): check.check(instance) check.check(instance) @@ -108,7 +108,7 @@ def test_err_response(aggregator, instance): check = TwistlockCheck('twistlock', {}, [instance]) with pytest.raises(Exception, match='^Error in response'): - with mock.patch('requests.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True): + with mock.patch('requests.Session.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True): check.check(instance) diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index 81c37dceff840..0e8c2892f9cdb 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -97,7 +97,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_CONNECT, status=Vault.OK, tags=global_tags, count=1) @@ -135,7 +135,7 @@ def test_service_check_500_fail(self, aggregator, dd_run_check, global_tags): instance.update(INSTANCES['main']) c = Vault(Vault.CHECK_NAME, {}, [instance]) - with mock.patch('requests.get', return_value=MockResponse(status_code=500)): + with mock.patch('requests.Session.get', return_value=MockResponse(status_code=500)): with pytest.raises( Exception, match=r'^The Vault endpoint `{}.+?` returned 500$'.format(re.escape(instance['api_url'])) ): @@ -190,7 +190,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -230,7 +230,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_UNSEALED, status=Vault.CRITICAL, count=1) @@ -274,7 +274,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -314,7 +314,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_service_check(Vault.SERVICE_CHECK_INITIALIZED, status=Vault.CRITICAL, count=1) @@ -348,7 +348,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) expected_tags = [ @@ -387,7 +387,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) c.log.debug.assert_called_with( "Detected vault in replication DR secondary mode, skipping Prometheus metric collection." @@ -426,7 +426,7 @@ def mock_requests_get(url, *args, **kwargs): metric_collection = 'OpenMetrics' if use_openmetrics else 'Prometheus' - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) c.log.debug.assert_called_with( "Detected vault in replication DR secondary mode but also detected that " @@ -470,7 +470,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert not c._skip_dr_metric_collection aggregator.assert_service_check(Vault.SERVICE_CHECK_CONNECT, status=Vault.OK, count=1) @@ -516,7 +516,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert len(aggregator.events) > 0 @@ -557,7 +557,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) assert len(aggregator.events) == 0 @@ -583,7 +583,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 1) @@ -609,7 +609,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 0) @@ -643,7 +643,7 @@ def mock_requests_get(url, *args, **kwargs): ) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', 1) @@ -663,7 +663,7 @@ def mock_requests_get(url, *args, **kwargs): return MockResponse(json_data={'errors': ["Vault is sealed"]}, status_code=503) return requests_get(url, *args, **kwargs) - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): dd_run_check(c) aggregator.assert_metric('vault.is_leader', count=0) diff --git a/vllm/tests/test_unit.py b/vllm/tests/test_unit.py index a7b4cc136cb6b..2f75885a0c032 100644 --- a/vllm/tests/test_unit.py +++ b/vllm/tests/test_unit.py @@ -23,7 +23,7 @@ def test_check_vllm(dd_run_check, aggregator, datadog_agent, instance): MockResponse(file_path=get_fixture_path("vllm_version.json")), ] - with mock.patch('requests.get', side_effect=mock_responses): + with mock.patch('requests.Session.get', side_effect=mock_responses): dd_run_check(check) for metric in METRICS_MOCK: @@ -47,7 +47,7 @@ def test_check_vllm_w_ray_prefix(dd_run_check, aggregator, datadog_agent, ray_in MockResponse(file_path=get_fixture_path("vllm_version.json")), ] - with mock.patch('requests.get', side_effect=mock_responses): + with mock.patch('requests.Session.get', side_effect=mock_responses): dd_run_check(check) for metric in METRICS_MOCK: @@ -81,7 +81,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = vLLMCheck("vllm", {}, [instance]) - with pytest.raises(Exception, match="requests.exceptions.HTTPError"): + with pytest.raises(Exception, match='requests.Session.exceptions.HTTPError"): dd_run_check(check) aggregator.assert_all_metrics_covered() diff --git a/voltdb/tests/test_integration.py b/voltdb/tests/test_integration.py index 3aff8ab036f10..985094e4e4a59 100644 --- a/voltdb/tests/test_integration.py +++ b/voltdb/tests/test_integration.py @@ -74,7 +74,7 @@ def test_http_error(self, aggregator, instance): # type: (AggregatorStub, Instance) -> None check = VoltDBCheck('voltdb', {}, [instance]) - with mock.patch('requests.get', side_effect=requests.RequestException('Something failed')): + with mock.patch('requests.Session.get', side_effect=requests.RequestException('Something failed')): error = check.run() assert 'Something failed' in error @@ -88,7 +88,7 @@ def test_http_response_error(self, aggregator, instance): resp = requests.Response() resp.status_code = 503 - with mock.patch('requests.get', return_value=resp): + with mock.patch('requests.Session.get', return_value=resp): error = check.run() assert '503 Server Error' in error diff --git a/vsphere/tests/test_check.py b/vsphere/tests/test_check.py index 38814dafd75d0..8b5fa250bb0c9 100644 --- a/vsphere/tests/test_check.py +++ b/vsphere/tests/test_check.py @@ -282,7 +282,7 @@ def test_continue_if_tag_collection_fail(aggregator, dd_run_check, realtime_inst check = VSphereCheck('vsphere', {}, [realtime_instance]) check.log = MagicMock() - with mock.patch('requests.post', side_effect=Exception, autospec=True): + with mock.patch('requests.Session.post', side_effect=Exception, autospec=True): dd_run_check(check) aggregator.assert_metric('vsphere.cpu.usage.avg', tags=['vcenter_server:FAKE'], hostname='10.0.0.104') From 5eecd4d03a60907778d09d06226aff287ba89e9e Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 1 Jul 2025 18:18:48 +0200 Subject: [PATCH 005/100] Fix airflow unit tests --- airflow/tests/test_unit.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/airflow/tests/test_unit.py b/airflow/tests/test_unit.py index 17427ba5414b3..7b69855fdde2a 100644 --- a/airflow/tests/test_unit.py +++ b/airflow/tests/test_unit.py @@ -31,10 +31,11 @@ def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_stat check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + mock_session = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [json_resp] - req.get.return_value = mock_resp + mock_session.get.return_value = mock_resp check.check(None) @@ -59,13 +60,14 @@ def test_service_checks_healthy_stable( check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + mock_session = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=mock_session): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': metadb_status}, 'scheduler': {'status': scheduler_status}}, {'status': 'OK'}, ] - req.get.return_value = mock_resp + mock_session.get.return_value = mock_resp check.check(None) From c8891d5731c042268551184da094714848e00f42 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 27 May 2025 18:33:28 +0200 Subject: [PATCH 006/100] Run formatter --- datadog_checks_base/tests/base/utils/http/test_authtoken.py | 4 +++- vllm/tests/test_unit.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 5e50d7cda1221..fd1464bb08dcd 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -617,7 +617,9 @@ def auth(*args, **kwargs): return MockResponse(status_code=404) http = RequestsWrapper(instance, init_config) - with mock.patch('requests.Session.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): + with mock.patch('requests.Session.post', side_effect=login), mock.patch( + 'requests.Session.get', side_effect=auth + ): http.get('https://leader.mesos/service/some-service') diff --git a/vllm/tests/test_unit.py b/vllm/tests/test_unit.py index 2f75885a0c032..f6aa9f5cca85f 100644 --- a/vllm/tests/test_unit.py +++ b/vllm/tests/test_unit.py @@ -81,7 +81,7 @@ def test_emits_critical_openemtrics_service_check_when_service_is_down( """ mock_http_response(status_code=404) check = vLLMCheck("vllm", {}, [instance]) - with pytest.raises(Exception, match='requests.Session.exceptions.HTTPError"): + with pytest.raises(Exception, match='requests.exceptions.HTTPError'): dd_run_check(check) aggregator.assert_all_metrics_covered() From 9c7a16a92dd2d0623618c8f85f00fd67266ce4f1 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 28 May 2025 12:01:44 +0200 Subject: [PATCH 007/100] Add tls_ciphers to TLS Remote fetch_intermediate_certs --- tls/datadog_checks/tls/tls_remote.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tls/datadog_checks/tls/tls_remote.py b/tls/datadog_checks/tls/tls_remote.py index b1ec3398519be..34430a7fd7fab 100644 --- a/tls/datadog_checks/tls/tls_remote.py +++ b/tls/datadog_checks/tls/tls_remote.py @@ -178,8 +178,16 @@ def fetch_intermediate_certs(self): with sock: try: - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) context.verify_mode = ssl.CERT_NONE + ciphers = self.agent_check.tls_context_wrapper.config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) with context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) From b70def03e35f7c0fb12f4f0cfdf7967e73e35ce7 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 4 Jun 2025 16:02:57 +0200 Subject: [PATCH 008/100] Rework Adapter class --- .../datadog_checks/base/utils/http.py | 39 +++++++++++++------ .../base/utils/http/test_tls_and_certs.py | 6 +-- 2 files changed, 30 insertions(+), 15 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index d7d51b9764daf..dc099d3e30b4e 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -118,7 +118,7 @@ UDS_SCHEME = 'unix' -class HTTPAdapterWrapper(requests.adapters.HTTPAdapter): +class TlsContextAdapter(requests.adapters.HTTPAdapter): """ HTTPS adapter that uses TlsContextWrapper to create SSL contexts. This ensures consistent TLS configuration across all HTTPS requests. @@ -126,12 +126,32 @@ class HTTPAdapterWrapper(requests.adapters.HTTPAdapter): def __init__(self, tls_context_wrapper, **kwargs): self.tls_context_wrapper = tls_context_wrapper - super(HTTPAdapterWrapper, self).__init__() + super(TlsContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # Use the TLS context from TlsContextWrapper pool_kwargs['ssl_context'] = self.tls_context_wrapper.tls_context - return super(HTTPAdapterWrapper, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) + return super(TlsContextAdapter, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) + + def cert_verify(self, conn, url, verify, cert): + """ + This method is overridden to ensure that the SSL context + is configured on the integration side. + """ + pass + + def build_connection_pool_key_attributes(self, request, verify, cert=None): + """ + This method is overridden according to the requests library's + expectations to ensure that the custom SSL context is passed to urllib3. + """ + # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 + host_params, _ = super(TlsContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) + context = self.context + if verify != (self.context.verify_mode == ssl.CERT_REQUIRED): + # TODO: Handle the case where `verify` is not the same as the context's verify mode. + context = ssl.create_default_context() + return host_params, {"ssl_context":context} class ResponseWrapper(ObjectProxy): @@ -412,11 +432,6 @@ def _request(self, method, url, options): new_options['headers'] = new_options['headers'].copy() new_options['headers'].update(extra_headers) - if new_options['verify'] != self.tls_context_wrapper.config['tls_verify']: - # The verify option needs to be synchronized - self.tls_context_wrapper.config['tls_verify'] = new_options['verify'] - self.tls_context_wrapper.refresh_tls_context() - if is_uds_url(url): persist = True # UDS support is only enabled on the shared session. url = quote_uds_url(url) @@ -463,7 +478,7 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per session = requests.Session() for option, value in self.options.items(): setattr(session, option, value) - certadapter = HTTPAdapterWrapper(self.tls_context_wrapper) + certadapter = TlsContextAdapter(self.tls_context_wrapper) session.mount(url, certadapter) else: session = self.session @@ -568,15 +583,15 @@ def create_session(self): session = requests.Session() # Use TlsContextHTTPSAdapter for consistent TLS configuration - https_adapter = HTTPAdapterWrapper(self.tls_context_wrapper) + https_adapter = TlsContextAdapter(self.tls_context_wrapper) # Enables HostHeaderSSLAdapter if needed # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter if self.tls_use_host_header: # Create a combined adapter that supports both TLS context and host headers - class TlsContextHostHeaderAdapter(HTTPAdapterWrapper, _http_utils.HostHeaderSSLAdapter): + class TlsContextHostHeaderAdapter(TlsContextAdapter, _http_utils.HostHeaderSSLAdapter): def __init__(self, tls_context_wrapper, **kwargs): - HTTPAdapterWrapper.__init__(self, tls_context_wrapper, **kwargs) + TlsContextAdapter.__init__(self, tls_context_wrapper, **kwargs) _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index da5a7cd4cd936..3eb84d51d9d50 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -8,7 +8,7 @@ import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import HTTPAdapterWrapper, RequestsWrapper +from datadog_checks.base.utils.http import TlsContextAdapter, RequestsWrapper from datadog_checks.base.utils.tls import TlsContextWrapper pytestmark = [pytest.mark.unit] @@ -288,7 +288,7 @@ def test_requests_wrapper_creates_tls_context_wrapper(self): assert isinstance(http.tls_context_wrapper, TlsContextWrapper) def test_session_uses_tls_context_adapter(self): - """Test that the session uses HTTPAdapterWrapper for consistent TLS configuration.""" + """Test that the session uses TlsContextAdapter for consistent TLS configuration.""" instance = {'tls_verify': True} init_config = {} http = RequestsWrapper(instance, init_config) @@ -296,7 +296,7 @@ def test_session_uses_tls_context_adapter(self): session = http.session https_adapter = session.get_adapter('https://example.com') - assert isinstance(https_adapter, HTTPAdapterWrapper) + assert isinstance(https_adapter, TlsContextAdapter) assert https_adapter.tls_context_wrapper is http.tls_context_wrapper def test_tls_ciphers_applied_consistently(self): From 370a5d63e139a2df7311e430c3c8fa27a2b830cf Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 4 Jun 2025 16:13:19 +0200 Subject: [PATCH 009/100] Revert TLS protocol change --- datadog_checks_base/datadog_checks/base/utils/tls.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 384177b114672..8dec069db338d 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -105,12 +105,10 @@ def _create_tls_context(self): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - context.check_hostname = ( - False if not self.config['tls_verify'] else self.config.get('tls_validate_hostname', True) - ) + context.check_hostname = self.config['tls_verify'] and self.config.get('tls_validate_hostname', True) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode context.verify_mode = ssl.CERT_REQUIRED if self.config['tls_verify'] else ssl.CERT_NONE From ebd64ed74137e5792d2812bdfeb189af0cdeb270 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 5 Jun 2025 15:18:38 +0200 Subject: [PATCH 010/100] Fix error in TlsContextAdapter --- datadog_checks_base/datadog_checks/base/utils/http.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index dc099d3e30b4e..3b193d648f6bc 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -147,7 +147,7 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): """ # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 host_params, _ = super(TlsContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) - context = self.context + context = self.tls_context_wrapper.tls_context if verify != (self.context.verify_mode == ssl.CERT_REQUIRED): # TODO: Handle the case where `verify` is not the same as the context's verify mode. context = ssl.create_default_context() From a0f840163ed1920728a1b9df9556e9acbdd2ccd6 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 12:21:59 +0200 Subject: [PATCH 011/100] Stop using TlsContextWrapper in RequestsWrapper --- .../datadog_checks/base/utils/http.py | 108 +++++++++++------- .../datadog_checks/base/utils/tls.py | 98 ++++++++-------- .../base/utils/http/test_tls_and_certs.py | 36 +++--- 3 files changed, 133 insertions(+), 109 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 3b193d648f6bc..6b2623e499ba6 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -22,7 +22,7 @@ from datadog_checks.base.agent import datadog_agent from datadog_checks.base.utils import _http_utils -from datadog_checks.base.utils.tls import TlsContextWrapper +from datadog_checks.base.utils.tls import create_tls_context from ..config import is_affirmative from ..errors import ConfigurationError @@ -120,17 +120,16 @@ class TlsContextAdapter(requests.adapters.HTTPAdapter): """ - HTTPS adapter that uses TlsContextWrapper to create SSL contexts. + HTTPS adapter that uses SSL contexts. This ensures consistent TLS configuration across all HTTPS requests. """ - def __init__(self, tls_context_wrapper, **kwargs): - self.tls_context_wrapper = tls_context_wrapper + def __init__(self, tls_context, **kwargs): + self.tls_context = tls_context super(TlsContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): - # Use the TLS context from TlsContextWrapper - pool_kwargs['ssl_context'] = self.tls_context_wrapper.tls_context + pool_kwargs['ssl_context'] = self.tls_context return super(TlsContextAdapter, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) def cert_verify(self, conn, url, verify, cert): @@ -147,11 +146,7 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): """ # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 host_params, _ = super(TlsContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) - context = self.tls_context_wrapper.tls_context - if verify != (self.context.verify_mode == ssl.CERT_REQUIRED): - # TODO: Handle the case where `verify` is not the same as the context's verify mode. - context = ssl.create_default_context() - return host_params, {"ssl_context":context} + return host_params, {"ssl_context":self.tls_context} class ResponseWrapper(ObjectProxy): @@ -191,7 +186,8 @@ class RequestsWrapper(object): 'auth_token_handler', 'request_size', 'tls_protocols_allowed', - 'tls_context_wrapper', + 'tls_config', + 'tls_context' ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -387,8 +383,9 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No if config['kerberos_cache']: self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) - # Create TLS context wrapper for consistent TLS configuration - self.tls_context_wrapper = TlsContextWrapper(config, remapper) + # Create TLS context for consistent TLS configuration + self.tls_context = create_tls_context(config) + self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} def get(self, url, **options): return self._request('get', url, options) @@ -441,11 +438,17 @@ def _request(self, method, url, options): with ExitStack() as stack: for hook in self.request_hooks: stack.enter_context(hook()) + + new_context = self._get_new_context(url, new_options) if persist: - request_method = getattr(self.session, method) + session = self.session + if new_context is not None: + # If the context has changed, we need to create a new adapter + certadapter = TlsContextAdapter(new_context) + session.mount(url, certadapter) else: - # Create a new session for non-persistent requests - request_method = getattr(self.create_session(), method) + session = self.create_session(tls_context=new_context) + request_method = getattr(session, method) if self.auth_token_handler: try: @@ -460,6 +463,35 @@ def _request(self, method, url, options): return ResponseWrapper(response, self.request_size) + def _get_new_context(self, url, new_options): + """ + Creates a new TLS context if the new request options differ from the default ones. + """ + # If `verify` or `cert` options differ from the defaults, a new adapter is created. + if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: + return + # If the URL is not HTTPS, no need to update the TLS context. + if not url.startswith('https'): + return + + request_overrides = {} + if new_options.get('verify') != self.options['verify']: + if new_options.get('verify') is True: + request_overrides["tls_verify"] = True + elif new_options.get('verify') is False: + request_overrides["tls_verify"] = False + elif isinstance(new_options.get('verify'), str): + request_overrides["tls_verify"] = True + request_overrides["tls_ca_cert"] = new_options.get('verify') + if new_options.get('cert') != self.options['cert']: + if isinstance(new_options.get('cert'), str): + request_overrides["tls_cert"] = new_options.get('cert') + elif isinstance(new_options.get('cert'), tuple) and len(new_options.get('cert')) == 2: + request_overrides["tls_cert"] = new_options.get('cert')[0] + request_overrides["tls_private_key"] = new_options.get('cert')[1] + + return create_tls_context(self.tls_config, overrides=request_overrides) + def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: response = request_method(url, **new_options) @@ -471,17 +503,14 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - self.tls_context_wrapper.config['tls_ca_cert'] = certs - self.tls_context_wrapper.refresh_tls_context() + new_context = create_tls_context(self.tls_config, overrides={'tls_ca_cert': certs}) # retry the connection via session object if not persist: - session = requests.Session() - for option, value in self.options.items(): - setattr(session, option, value) - certadapter = TlsContextAdapter(self.tls_context_wrapper) - session.mount(url, certadapter) + session = self.create_session(new_context) else: session = self.session + certadapter = TlsContextAdapter(new_context) + session.mount(url, certadapter) request_method = getattr(session, method) response = request_method(url, **new_options) return response @@ -509,18 +538,7 @@ def fetch_intermediate_certs(self, hostname, port=443): with sock: try: - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - # Override verify mode for intermediate cert discovery - context.verify_mode = ssl.CERT_NONE - # Set the ciphers - ciphers = self.tls_context_wrapper.config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - - context.set_ciphers(updated_ciphers) + context = create_tls_context(self.tls_config, overrides={'tls_verify': False}) with context.wrap_socket(sock, server_hostname=hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) @@ -579,29 +597,31 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - def create_session(self): + def create_session(self, tls_context=None): session = requests.Session() - - # Use TlsContextHTTPSAdapter for consistent TLS configuration - https_adapter = TlsContextAdapter(self.tls_context_wrapper) + context = self.tls_context if tls_context is None else tls_context # Enables HostHeaderSSLAdapter if needed # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter if self.tls_use_host_header: # Create a combined adapter that supports both TLS context and host headers class TlsContextHostHeaderAdapter(TlsContextAdapter, _http_utils.HostHeaderSSLAdapter): - def __init__(self, tls_context_wrapper, **kwargs): - TlsContextAdapter.__init__(self, tls_context_wrapper, **kwargs) + def __init__(self, tls_context, **kwargs): + TlsContextAdapter.__init__(self, tls_context, **kwargs) _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # Use TLS context from wrapper - pool_kwargs['ssl_context'] = self.tls_context_wrapper.tls_context + pool_kwargs['ssl_context'] = self.tls_context return _http_utils.HostHeaderSSLAdapter.init_poolmanager( self, connections, maxsize, block=block, **pool_kwargs ) - https_adapter = TlsContextHostHeaderAdapter(self.tls_context_wrapper) + https_adapter = TlsContextHostHeaderAdapter(context) + else: + # Use TlsContextHTTPSAdapter for consistent TLS configuration + https_adapter = TlsContextAdapter(context) + session.mount('https://', https_adapter) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 8dec069db338d..c84d27eb3aa39 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -32,11 +32,58 @@ } +def create_tls_context(config, overrides=None): + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext + # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + + if overrides is not None: + config = config.copy() + config.update(overrides) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname + context.check_hostname = config['tls_verify'] and config.get('tls_validate_hostname', True) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode + context.verify_mode = ssl.CERT_REQUIRED if config['tls_verify'] else ssl.CERT_NONE + + ciphers = config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs + ca_cert = config['tls_ca_cert'] + if ca_cert: + ca_cert = os.path.expanduser(ca_cert) + if os.path.isdir(ca_cert): + context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + else: + context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) + else: + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain + client_cert, client_key = config['tls_cert'], config['tls_private_key'] + client_key_pass = config['tls_private_key_password'] + if client_key: + client_key = os.path.expanduser(client_key) + if client_cert: + client_cert = os.path.expanduser(client_cert) + context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + + return context + + class TlsContextWrapper(object): __slots__ = ('logger', 'config', 'tls_context') def __init__(self, instance, remapper=None, overrides=None): - # type: (InstanceType, Dict[AnyStr, Dict[AnyStr, Any]], Dict[AnyStr, Any]) -> None default_fields = dict(STANDARD_FIELDS) # Override existing config options if there exists any overrides @@ -98,53 +145,8 @@ def __init__(self, instance, remapper=None, overrides=None): del config[unique_name] self.config = config - self.tls_context = self._create_tls_context() - - def _create_tls_context(self): - # type: () -> ssl.SSLContext - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext - # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - context.check_hostname = self.config['tls_verify'] and self.config.get('tls_validate_hostname', True) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode - context.verify_mode = ssl.CERT_REQUIRED if self.config['tls_verify'] else ssl.CERT_NONE - - ciphers = self.config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - - context.set_ciphers(updated_ciphers) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs - ca_cert = self.config['tls_ca_cert'] - if ca_cert: - ca_cert = os.path.expanduser(ca_cert) - if os.path.isdir(ca_cert): - context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) - else: - context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) - else: - context.load_default_certs(ssl.Purpose.SERVER_AUTH) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain - client_cert, client_key = self.config['tls_cert'], self.config['tls_private_key'] - client_key_pass = self.config['tls_private_key_password'] - if client_key: - client_key = os.path.expanduser(client_key) - if client_cert: - client_cert = os.path.expanduser(client_cert) - context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) - - return context + self.tls_context = create_tls_context(self.config) def refresh_tls_context(self): # type: () -> None - self.tls_context = self._create_tls_context() + self.tls_context = create_tls_context(self.config) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 3eb84d51d9d50..5be0c931fe540 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -278,14 +278,14 @@ def test_fetch_intermediate_certs_tls_ciphers(self): class TestTlsContext: """Test the core TLS context functionality.""" - def test_requests_wrapper_creates_tls_context_wrapper(self): - """Test that RequestsWrapper creates a TlsContextWrapper instance.""" + def test_requests_wrapper_creates_tls_context(self): + """Test that RequestsWrapper creates an SSLContext instance.""" instance = {'tls_verify': True} init_config = {} http = RequestsWrapper(instance, init_config) - assert hasattr(http, 'tls_context_wrapper') - assert isinstance(http.tls_context_wrapper, TlsContextWrapper) + assert hasattr(http, 'tls_context') + assert isinstance(http.tls_context, ssl.SSLContext) def test_session_uses_tls_context_adapter(self): """Test that the session uses TlsContextAdapter for consistent TLS configuration.""" @@ -297,16 +297,16 @@ def test_session_uses_tls_context_adapter(self): https_adapter = session.get_adapter('https://example.com') assert isinstance(https_adapter, TlsContextAdapter) - assert https_adapter.tls_context_wrapper is http.tls_context_wrapper + assert https_adapter.tls_context is http.tls_context def test_tls_ciphers_applied_consistently(self): - """Test that tls_ciphers are applied consistently through TlsContextWrapper.""" + """Test that tls_ciphers are applied consistently.""" instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA', 'TLS_RSA_WITH_AES_256_CBC_SHA']} init_config = {} http = RequestsWrapper(instance, init_config) # Verify the TLS context wrapper has the cipher configuration - assert http.tls_context_wrapper.config['tls_ciphers'] == [ + assert http.tls_config['tls_ciphers'] == [ 'TLS_RSA_WITH_AES_128_CBC_SHA', 'TLS_RSA_WITH_AES_256_CBC_SHA', ] @@ -314,13 +314,15 @@ def test_tls_ciphers_applied_consistently(self): # Verify the session adapter uses the same TLS context session = http.session https_adapter = session.get_adapter('https://example.com') - assert https_adapter.tls_context_wrapper.tls_context is http.tls_context_wrapper.tls_context + + assert isinstance(https_adapter, TlsContextAdapter) + assert https_adapter.tls_context is http.tls_context # Verify that the ciphers are set correctly in the TLS context for cipher in instance['tls_ciphers']: # At least one entry's name field should match the OpenSSL name assert any( IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') - for c in https_adapter.tls_context_wrapper.tls_context.get_ciphers() + for c in https_adapter.tls_context.get_ciphers() ) def test_host_header_compatibility(self): @@ -338,8 +340,8 @@ def test_host_header_compatibility(self): https_adapter = session.get_adapter('https://example.com') # Should be the combined adapter that supports both TLS context and host headers - assert hasattr(https_adapter, 'tls_context_wrapper') - assert https_adapter.tls_context_wrapper is http.tls_context_wrapper + assert hasattr(https_adapter, 'tls_context') + assert https_adapter.tls_context is http.tls_context assert 'TlsContextHostHeaderAdapter' in str(type(https_adapter)) def test_remapper_functionality_preserved(self): @@ -355,15 +357,15 @@ def test_remapper_functionality_preserved(self): init_config = {} # Mock the TLS context creation to avoid file operations - with mock.patch.object(TlsContextWrapper, '_create_tls_context') as mock_create_context: + with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: mock_context = mock.MagicMock() mock_create_context.return_value = mock_context http = RequestsWrapper(instance, init_config, remapper=remapper) # Verify remapping worked - disable_ssl_validation: True should become tls_verify: False - assert http.tls_context_wrapper.config['tls_verify'] is False - assert http.tls_context_wrapper.config['tls_private_key'] == '/path/to/key.pem' + assert http.tls_config['tls_verify'] is False + assert http.tls_config['tls_private_key'] == '/path/to/key.pem' def test_backward_compatibility_maintained(self): """Test that all existing TLS configuration options still work.""" @@ -380,7 +382,7 @@ def test_backward_compatibility_maintained(self): init_config = {} # Mock the TLS context creation to avoid file operations - with mock.patch.object(TlsContextWrapper, '_create_tls_context') as mock_create_context: + with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: mock_context = mock.MagicMock() mock_create_context.return_value = mock_context @@ -394,5 +396,5 @@ def test_backward_compatibility_maintained(self): assert http.tls_protocols_allowed == ['TLSv1.2', 'TLSv1.3'] # Verify TLS context wrapper has the right config - assert http.tls_context_wrapper.config['tls_verify'] is True - assert http.tls_context_wrapper.config['tls_ciphers'] == ['TLS_RSA_WITH_AES_128_CBC_SHA'] + assert http.tls_config['tls_verify'] is True + assert http.tls_config['tls_ciphers'] == ['TLS_RSA_WITH_AES_128_CBC_SHA'] From c8fe5e0e8f7ea7db75fe0996048a9b9930131c17 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 12:25:18 +0200 Subject: [PATCH 012/100] Run formatter --- datadog_checks_base/datadog_checks/base/utils/http.py | 8 +++----- .../tests/base/utils/http/test_tls_and_certs.py | 6 ++---- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 6b2623e499ba6..e9dea19e07f35 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -6,7 +6,6 @@ import logging import os import re -import ssl import warnings from contextlib import ExitStack, contextmanager from copy import deepcopy @@ -141,12 +140,12 @@ def cert_verify(self, conn, url, verify, cert): def build_connection_pool_key_attributes(self, request, verify, cert=None): """ - This method is overridden according to the requests library's + This method is overridden according to the requests library's expectations to ensure that the custom SSL context is passed to urllib3. """ # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 host_params, _ = super(TlsContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) - return host_params, {"ssl_context":self.tls_context} + return host_params, {"ssl_context": self.tls_context} class ResponseWrapper(ObjectProxy): @@ -187,7 +186,7 @@ class RequestsWrapper(object): 'request_size', 'tls_protocols_allowed', 'tls_config', - 'tls_context' + 'tls_context', ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -622,7 +621,6 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # Use TlsContextHTTPSAdapter for consistent TLS configuration https_adapter = TlsContextAdapter(context) - session.mount('https://', https_adapter) # Enable Unix Domain Socket (UDS) support. diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 5be0c931fe540..709882ec1eb6b 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -8,8 +8,7 @@ import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import TlsContextAdapter, RequestsWrapper -from datadog_checks.base.utils.tls import TlsContextWrapper +from datadog_checks.base.utils.http import RequestsWrapper, TlsContextAdapter pytestmark = [pytest.mark.unit] @@ -321,8 +320,7 @@ def test_tls_ciphers_applied_consistently(self): for cipher in instance['tls_ciphers']: # At least one entry's name field should match the OpenSSL name assert any( - IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') - for c in https_adapter.tls_context.get_ciphers() + IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.tls_context.get_ciphers() ) def test_host_header_compatibility(self): From f07f71623164cf99ce149f8a6c79f6f9b208f7ac Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 16:32:57 +0200 Subject: [PATCH 013/100] Add test for ciphers, switch naming to SSL and fix hole in persistent session logic --- .../datadog_checks/base/utils/http.py | 108 +++++++++++++----- .../datadog_checks/base/utils/tls.py | 48 -------- .../base/utils/http/test_tls_and_certs.py | 34 ++++-- 3 files changed, 107 insertions(+), 83 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index e9dea19e07f35..c50d53adf5530 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -6,6 +6,7 @@ import logging import os import re +import ssl import warnings from contextlib import ExitStack, contextmanager from copy import deepcopy @@ -21,7 +22,6 @@ from datadog_checks.base.agent import datadog_agent from datadog_checks.base.utils import _http_utils -from datadog_checks.base.utils.tls import create_tls_context from ..config import is_affirmative from ..errors import ConfigurationError @@ -116,20 +116,70 @@ UDS_SCHEME = 'unix' +def create_ssl_context(config, overrides=None): + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext + # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) -class TlsContextAdapter(requests.adapters.HTTPAdapter): + if overrides is not None: + config = config.copy() + config.update(overrides) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname + context.check_hostname = config['tls_verify'] and config.get('tls_validate_hostname', True) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode + context.verify_mode = ssl.CERT_REQUIRED if config['tls_verify'] else ssl.CERT_NONE + + ciphers = config.get('tls_ciphers') + if ciphers: + if 'ALL' in ciphers: + updated_ciphers = "ALL" + else: + updated_ciphers = ":".join(ciphers) + + context.set_ciphers(updated_ciphers) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs + ca_cert = config['tls_ca_cert'] + if ca_cert: + ca_cert = os.path.expanduser(ca_cert) + if os.path.isdir(ca_cert): + context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + else: + context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) + else: + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain + client_cert, client_key = config['tls_cert'], config['tls_private_key'] + client_key_pass = config['tls_private_key_password'] + if client_key: + client_key = os.path.expanduser(client_key) + if client_cert: + client_cert = os.path.expanduser(client_cert) + context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + + return context + + +class SSLContextAdapter(requests.adapters.HTTPAdapter): """ HTTPS adapter that uses SSL contexts. This ensures consistent TLS configuration across all HTTPS requests. """ - def __init__(self, tls_context, **kwargs): - self.tls_context = tls_context - super(TlsContextAdapter, self).__init__() + def __init__(self, ssl_context, is_custom_context=False, **kwargs): + self.ssl_context = ssl_context + # This is used to determine if the adapter was created with a custom context + # for the purpose of reverting to the default context when needed. + self.is_custom_context = is_custom_context + super(SSLContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): - pool_kwargs['ssl_context'] = self.tls_context - return super(TlsContextAdapter, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) + pool_kwargs['ssl_context'] = self.ssl_context + return super(SSLContextAdapter, self).init_poolmanager(connections, maxsize, block=block, **pool_kwargs) def cert_verify(self, conn, url, verify, cert): """ @@ -144,8 +194,8 @@ def build_connection_pool_key_attributes(self, request, verify, cert=None): expectations to ensure that the custom SSL context is passed to urllib3. """ # See: https://github.com/psf/requests/blob/7341690e842a23cf18ded0abd9229765fa88c4e2/src/requests/adapters.py#L419-L423 - host_params, _ = super(TlsContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) - return host_params, {"ssl_context": self.tls_context} + host_params, _ = super(SSLContextAdapter, self).build_connection_pool_key_attributes(request, verify, cert) + return host_params, {"ssl_context": self.ssl_context} class ResponseWrapper(ObjectProxy): @@ -186,7 +236,7 @@ class RequestsWrapper(object): 'request_size', 'tls_protocols_allowed', 'tls_config', - 'tls_context', + 'ssl_context', ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -383,7 +433,7 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) # Create TLS context for consistent TLS configuration - self.tls_context = create_tls_context(config) + self.ssl_context = create_ssl_context(config) self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} def get(self, url, **options): @@ -443,10 +493,16 @@ def _request(self, method, url, options): session = self.session if new_context is not None: # If the context has changed, we need to create a new adapter - certadapter = TlsContextAdapter(new_context) + certadapter = SSLContextAdapter(new_context, is_custom_context=True) session.mount(url, certadapter) + else: + current_adapter = session.get_adapter(url) + if isinstance(current_adapter, SSLContextAdapter) and current_adapter.is_custom_context: + # If we are using a custom context, we need to revert to the original context + certadapter = SSLContextAdapter(self.ssl_context) + session.mount(url, certadapter) else: - session = self.create_session(tls_context=new_context) + session = self.create_session(ssl_context=new_context) request_method = getattr(session, method) if self.auth_token_handler: @@ -489,7 +545,7 @@ def _get_new_context(self, url, new_options): request_overrides["tls_cert"] = new_options.get('cert')[0] request_overrides["tls_private_key"] = new_options.get('cert')[1] - return create_tls_context(self.tls_config, overrides=request_overrides) + return create_ssl_context(self.tls_config, overrides=request_overrides) def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: @@ -502,13 +558,13 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - new_context = create_tls_context(self.tls_config, overrides={'tls_ca_cert': certs}) + new_context = create_ssl_context(self.tls_config, overrides={'tls_ca_cert': certs}) # retry the connection via session object if not persist: session = self.create_session(new_context) else: session = self.session - certadapter = TlsContextAdapter(new_context) + certadapter = SSLContextAdapter(new_context) session.mount(url, certadapter) request_method = getattr(session, method) response = request_method(url, **new_options) @@ -537,7 +593,7 @@ def fetch_intermediate_certs(self, hostname, port=443): with sock: try: - context = create_tls_context(self.tls_config, overrides={'tls_verify': False}) + context = create_ssl_context(self.tls_config, overrides={'tls_verify': False}) with context.wrap_socket(sock, server_hostname=hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) @@ -596,30 +652,30 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - def create_session(self, tls_context=None): + def create_session(self, ssl_context=None): session = requests.Session() - context = self.tls_context if tls_context is None else tls_context + context = self.ssl_context if ssl_context is None else ssl_context # Enables HostHeaderSSLAdapter if needed # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter if self.tls_use_host_header: # Create a combined adapter that supports both TLS context and host headers - class TlsContextHostHeaderAdapter(TlsContextAdapter, _http_utils.HostHeaderSSLAdapter): - def __init__(self, tls_context, **kwargs): - TlsContextAdapter.__init__(self, tls_context, **kwargs) + class SSLContextHostHeaderAdapter(SSLContextAdapter, _http_utils.HostHeaderSSLAdapter): + def __init__(self, ssl_context, **kwargs): + SSLContextAdapter.__init__(self, ssl_context, **kwargs) _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # Use TLS context from wrapper - pool_kwargs['ssl_context'] = self.tls_context + pool_kwargs['ssl_context'] = self.ssl_context return _http_utils.HostHeaderSSLAdapter.init_poolmanager( self, connections, maxsize, block=block, **pool_kwargs ) - https_adapter = TlsContextHostHeaderAdapter(context) + https_adapter = SSLContextHostHeaderAdapter(context) else: - # Use TlsContextHTTPSAdapter for consistent TLS configuration - https_adapter = TlsContextAdapter(context) + # Use SSLContextHTTPSAdapter for consistent TLS configuration + https_adapter = SSLContextAdapter(context) session.mount('https://', https_adapter) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index c84d27eb3aa39..69e36ac1fd5bb 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -32,54 +32,6 @@ } -def create_tls_context(config, overrides=None): - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext - # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - - if overrides is not None: - config = config.copy() - config.update(overrides) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - context.check_hostname = config['tls_verify'] and config.get('tls_validate_hostname', True) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode - context.verify_mode = ssl.CERT_REQUIRED if config['tls_verify'] else ssl.CERT_NONE - - ciphers = config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - - context.set_ciphers(updated_ciphers) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs - ca_cert = config['tls_ca_cert'] - if ca_cert: - ca_cert = os.path.expanduser(ca_cert) - if os.path.isdir(ca_cert): - context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) - else: - context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) - else: - context.load_default_certs(ssl.Purpose.SERVER_AUTH) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain - client_cert, client_key = config['tls_cert'], config['tls_private_key'] - client_key_pass = config['tls_private_key_password'] - if client_key: - client_key = os.path.expanduser(client_key) - if client_cert: - client_cert = os.path.expanduser(client_cert) - context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) - - return context - - class TlsContextWrapper(object): __slots__ = ('logger', 'config', 'tls_context') diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 709882ec1eb6b..07030d1121d16 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -248,7 +248,7 @@ def test_fetch_intermediate_certs(self, port): def test_fetch_intermediate_certs_tls_ciphers(self): """Test that fetch_intermediate_certs uses the correct ciphers.""" - instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA']} + instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384']} init_config = {} http = RequestsWrapper(instance, init_config) @@ -264,14 +264,14 @@ def test_fetch_intermediate_certs_tls_ciphers(self): mock_wrapped_socket = mock.MagicMock() mock_context.wrap_socket.return_value.__enter__.return_value = mock_wrapped_socket mock_wrapped_socket.getpeercert.return_value = b'fake_cert_data' - mock_wrapped_socket.version.return_value = 'TLSv1.2' + mock_wrapped_socket.version.return_value = 'TLSv1.3' # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http.fetch_intermediate_certs('example.com', 443) # Verify set_ciphers was called with the correct cipher list - mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_128_CBC_SHA') + mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') class TestTlsContext: @@ -300,14 +300,14 @@ def test_session_uses_tls_context_adapter(self): def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" - instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA', 'TLS_RSA_WITH_AES_256_CBC_SHA']} + instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384', 'TLS_RSA_WITH_AES_128_GCM_SHA256']} init_config = {} http = RequestsWrapper(instance, init_config) # Verify the TLS context wrapper has the cipher configuration assert http.tls_config['tls_ciphers'] == [ - 'TLS_RSA_WITH_AES_128_CBC_SHA', - 'TLS_RSA_WITH_AES_256_CBC_SHA', + 'TLS_RSA_WITH_AES_256_GCM_SHA384', + 'TLS_RSA_WITH_AES_128_GCM_SHA256', ] # Verify the session adapter uses the same TLS context @@ -323,13 +323,29 @@ def test_tls_ciphers_applied_consistently(self): IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.tls_context.get_ciphers() ) + def test_default_tls_ciphers(self): + """Test that default TLS ciphers are applied when none are specified.""" + instance = {'tls_verify': True} + init_config = {} + + # Mock the TLS context creation to avoid file operations + with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: + mock_context = mock.MagicMock() + mock_create_context.return_value = mock_context + RequestsWrapper(instance, init_config) + + # Verify that the default ciphers are set + assert mock_context.set_ciphers.call_count == 1 + assert mock_context.set_ciphers.called_with('ALL') + + def test_host_header_compatibility(self): """Test that host header functionality works with TLS context unification.""" instance = { 'tls_use_host_header': True, 'headers': {'Host': 'custom-host.example.com'}, 'tls_verify': True, - 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA'], + 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], } init_config = {} http = RequestsWrapper(instance, init_config) @@ -375,7 +391,7 @@ def test_backward_compatibility_maintained(self): 'tls_validate_hostname': True, 'tls_ignore_warning': False, 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], - 'tls_ciphers': ['TLS_RSA_WITH_AES_128_CBC_SHA'], + 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], } init_config = {} @@ -395,4 +411,4 @@ def test_backward_compatibility_maintained(self): # Verify TLS context wrapper has the right config assert http.tls_config['tls_verify'] is True - assert http.tls_config['tls_ciphers'] == ['TLS_RSA_WITH_AES_128_CBC_SHA'] + assert http.tls_config['tls_ciphers'] == ['TLS_RSA_WITH_AES_256_GCM_SHA384'] From 2d535fcd7b65dc782ade9d49dc123632cfd2a3eb Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 16:38:18 +0200 Subject: [PATCH 014/100] Run formatter --- datadog_checks_base/datadog_checks/base/utils/http.py | 1 + datadog_checks_base/datadog_checks/base/utils/tls.py | 7 +++---- .../tests/base/utils/http/test_tls_and_certs.py | 8 +++++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index c50d53adf5530..5eebcdf409b80 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -116,6 +116,7 @@ UDS_SCHEME = 'unix' + def create_ssl_context(config, overrides=None): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 69e36ac1fd5bb..e6fc7643a18dd 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -2,12 +2,11 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging -import os -import ssl from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict # noqa: F401 from ..config import is_affirmative +from .http import create_ssl_context if TYPE_CHECKING: from ..types import InstanceType # noqa: F401 @@ -97,8 +96,8 @@ def __init__(self, instance, remapper=None, overrides=None): del config[unique_name] self.config = config - self.tls_context = create_tls_context(self.config) + self.tls_context = create_ssl_context(self.config) def refresh_tls_context(self): # type: () -> None - self.tls_context = create_tls_context(self.config) + self.tls_context = create_ssl_context(self.config) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 07030d1121d16..cca84f906c823 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -300,7 +300,10 @@ def test_session_uses_tls_context_adapter(self): def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" - instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384', 'TLS_RSA_WITH_AES_128_GCM_SHA256']} + instance = { + 'tls_verify': True, + 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384', 'TLS_RSA_WITH_AES_128_GCM_SHA256'], + } init_config = {} http = RequestsWrapper(instance, init_config) @@ -333,12 +336,11 @@ def test_default_tls_ciphers(self): mock_context = mock.MagicMock() mock_create_context.return_value = mock_context RequestsWrapper(instance, init_config) - + # Verify that the default ciphers are set assert mock_context.set_ciphers.call_count == 1 assert mock_context.set_ciphers.called_with('ALL') - def test_host_header_compatibility(self): """Test that host header functionality works with TLS context unification.""" instance = { From 680ac8e48760109c0df387b46e356405d6f79722 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 16:53:13 +0200 Subject: [PATCH 015/100] Naming --- datadog_checks_base/datadog_checks/base/utils/http.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 5eebcdf409b80..2c509094218d8 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -171,11 +171,11 @@ class SSLContextAdapter(requests.adapters.HTTPAdapter): This ensures consistent TLS configuration across all HTTPS requests. """ - def __init__(self, ssl_context, is_custom_context=False, **kwargs): + def __init__(self, ssl_context, has_custom_context=False, **kwargs): self.ssl_context = ssl_context # This is used to determine if the adapter was created with a custom context # for the purpose of reverting to the default context when needed. - self.is_custom_context = is_custom_context + self.has_custom_context = has_custom_context super(SSLContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): @@ -494,11 +494,11 @@ def _request(self, method, url, options): session = self.session if new_context is not None: # If the context has changed, we need to create a new adapter - certadapter = SSLContextAdapter(new_context, is_custom_context=True) + certadapter = SSLContextAdapter(new_context, has_custom_context=True) session.mount(url, certadapter) else: current_adapter = session.get_adapter(url) - if isinstance(current_adapter, SSLContextAdapter) and current_adapter.is_custom_context: + if isinstance(current_adapter, SSLContextAdapter) and current_adapter.has_custom_context: # If we are using a custom context, we need to revert to the original context certadapter = SSLContextAdapter(self.ssl_context) session.mount(url, certadapter) From 5356e429cf96a553257ac86a39dab225621cd877 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 17:41:09 +0200 Subject: [PATCH 016/100] Naming and test fixes --- .../datadog_checks/base/checks/base.py | 2 +- .../base/utils/http/test_tls_and_certs.py | 60 ++++++++----------- 2 files changed, 27 insertions(+), 35 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/checks/base.py b/datadog_checks_base/datadog_checks/base/checks/base.py index fee2d288ad77f..1728911961350 100644 --- a/datadog_checks_base/datadog_checks/base/checks/base.py +++ b/datadog_checks_base/datadog_checks/base/checks/base.py @@ -442,7 +442,7 @@ def diagnosis(self) -> Diagnosis: def get_tls_context(self, refresh=False, overrides=None): # type: (bool, Dict[AnyStr, Any]) -> ssl.SSLContext """ - Creates and cache an SSLContext instance based on user configuration. + Creates and cache an TlsContext instance based on user configuration. Note that user configuration can be overridden by using `overrides`. This should only be applied to older integration that manually set config values. diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index cca84f906c823..c6f55c47629fb 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -8,13 +8,13 @@ import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import RequestsWrapper, TlsContextAdapter +from datadog_checks.base.utils.http import RequestsWrapper, SSLContextAdapter pytestmark = [pytest.mark.unit] IANA_TO_OPENSSL_NAME = { - 'TLS_RSA_WITH_AES_128_CBC_SHA': 'AES128-SHA', - 'TLS_RSA_WITH_AES_256_CBC_SHA': 'AES256-SHA', + 'TLS_RSA_WITH_AES_256_GCM_SHA384': 'AES256-GCM-SHA384', + 'TLS_RSA_WITH_AES_128_GCM_SHA256': 'AES128-GCM-SHA256', } @@ -274,20 +274,20 @@ def test_fetch_intermediate_certs_tls_ciphers(self): mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') -class TestTlsContext: - """Test the core TLS context functionality.""" +class TestSSLContext: + """Test the core SSL context functionality.""" - def test_requests_wrapper_creates_tls_context(self): + def test_requests_wrapper_creates_ssl_context(self): """Test that RequestsWrapper creates an SSLContext instance.""" instance = {'tls_verify': True} init_config = {} http = RequestsWrapper(instance, init_config) - assert hasattr(http, 'tls_context') - assert isinstance(http.tls_context, ssl.SSLContext) + assert hasattr(http, 'ssl_context') + assert isinstance(http.ssl_context, ssl.SSLContext) - def test_session_uses_tls_context_adapter(self): - """Test that the session uses TlsContextAdapter for consistent TLS configuration.""" + def test_session_uses_ssl_context_adapter(self): + """Test that the session uses SSLContextAdapter for consistent TLS configuration.""" instance = {'tls_verify': True} init_config = {} http = RequestsWrapper(instance, init_config) @@ -295,35 +295,29 @@ def test_session_uses_tls_context_adapter(self): session = http.session https_adapter = session.get_adapter('https://example.com') - assert isinstance(https_adapter, TlsContextAdapter) - assert https_adapter.tls_context is http.tls_context + assert isinstance(https_adapter, SSLContextAdapter) + assert https_adapter.ssl_context is http.ssl_context def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" - instance = { - 'tls_verify': True, - 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384', 'TLS_RSA_WITH_AES_128_GCM_SHA256'], - } + instance = {'tls_verify': True, 'tls_ciphers': list(IANA_TO_OPENSSL_NAME.keys())} init_config = {} http = RequestsWrapper(instance, init_config) # Verify the TLS context wrapper has the cipher configuration - assert http.tls_config['tls_ciphers'] == [ - 'TLS_RSA_WITH_AES_256_GCM_SHA384', - 'TLS_RSA_WITH_AES_128_GCM_SHA256', - ] + assert http.tls_config['tls_ciphers'] == instance['tls_ciphers'] # Verify the session adapter uses the same TLS context session = http.session https_adapter = session.get_adapter('https://example.com') - assert isinstance(https_adapter, TlsContextAdapter) - assert https_adapter.tls_context is http.tls_context + assert isinstance(https_adapter, SSLContextAdapter) + assert https_adapter.ssl_context is http.ssl_context # Verify that the ciphers are set correctly in the TLS context for cipher in instance['tls_ciphers']: # At least one entry's name field should match the OpenSSL name assert any( - IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.tls_context.get_ciphers() + IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.ssl_context.get_ciphers() ) def test_default_tls_ciphers(self): @@ -331,15 +325,13 @@ def test_default_tls_ciphers(self): instance = {'tls_verify': True} init_config = {} - # Mock the TLS context creation to avoid file operations - with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: - mock_context = mock.MagicMock() - mock_create_context.return_value = mock_context + # Mock the SSLContext creation + with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: RequestsWrapper(instance, init_config) # Verify that the default ciphers are set - assert mock_context.set_ciphers.call_count == 1 - assert mock_context.set_ciphers.called_with('ALL') + assert mock_set_ciphers.call_count == 1 + assert mock_set_ciphers.call_args[0][0] == 'ALL' def test_host_header_compatibility(self): """Test that host header functionality works with TLS context unification.""" @@ -356,9 +348,9 @@ def test_host_header_compatibility(self): https_adapter = session.get_adapter('https://example.com') # Should be the combined adapter that supports both TLS context and host headers - assert hasattr(https_adapter, 'tls_context') - assert https_adapter.tls_context is http.tls_context - assert 'TlsContextHostHeaderAdapter' in str(type(https_adapter)) + assert hasattr(https_adapter, 'ssl_context') + assert https_adapter.ssl_context is http.ssl_context + assert 'SSLContextHostHeaderAdapter' in str(type(https_adapter)) def test_remapper_functionality_preserved(self): """Test that config remapping functionality is preserved with TLS context unification.""" @@ -373,7 +365,7 @@ def test_remapper_functionality_preserved(self): init_config = {} # Mock the TLS context creation to avoid file operations - with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: mock_context = mock.MagicMock() mock_create_context.return_value = mock_context @@ -398,7 +390,7 @@ def test_backward_compatibility_maintained(self): init_config = {} # Mock the TLS context creation to avoid file operations - with mock.patch('datadog_checks.base.utils.http.create_tls_context') as mock_create_context: + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: mock_context = mock.MagicMock() mock_create_context.return_value = mock_context From 5942ed65064c73a6a50ef8dec926ce0c7f803476 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:02:17 +0200 Subject: [PATCH 017/100] Add unit test for debugging --- .../tests/base/utils/http/test_tls_and_certs.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index c6f55c47629fb..481ea658e0159 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -298,6 +298,11 @@ def test_session_uses_ssl_context_adapter(self): assert isinstance(https_adapter, SSLContextAdapter) assert https_adapter.ssl_context is http.ssl_context + def test_ssl_context_ciphers_can_be_modified(self): + """Test that the chosen ciphers can be set on CI runners.""" + context = ssl.create_default_context() + context.set_ciphers('TLS_RSA_WITH_AES_256_GCM_SHA384') + def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" instance = {'tls_verify': True, 'tls_ciphers': list(IANA_TO_OPENSSL_NAME.keys())} From faccf4bcc1c4ab10961b9bfceb93c53cb582095b Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:09:44 +0200 Subject: [PATCH 018/100] Fix consul test --- consul/tests/test_unit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/consul/tests/test_unit.py b/consul/tests/test_unit.py index 6e93105a5734c..f0e57b8e3e68c 100644 --- a/consul/tests/test_unit.py +++ b/consul/tests/test_unit.py @@ -158,7 +158,7 @@ def test_get_nodes_with_service_critical(aggregator): def test_consul_request(aggregator, instance, mocker): consul_check = ConsulCheck(common.CHECK_NAME, {}, [consul_mocks.MOCK_CONFIG]) mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch("datadog_checks.consul.consul.requests.get") as mock_requests_get: + with mock.patch("datadog_checks.consul.consul.requests.Session.get") as mock_requests_get: consul_check.consul_request("foo") url = "{}/{}".format(instance["url"], "foo") aggregator.assert_service_check("consul.can_connect", ConsulCheck.OK, tags=["url:{}".format(url)], count=1) From 9e6a65e6c08eb726654fbc5bca57ea70cfaf87c7 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:16:31 +0200 Subject: [PATCH 019/100] Fix consul config assertion --- consul/tests/test_unit.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/consul/tests/test_unit.py b/consul/tests/test_unit.py index f0e57b8e3e68c..a0588e2a86e77 100644 --- a/consul/tests/test_unit.py +++ b/consul/tests/test_unit.py @@ -549,8 +549,10 @@ def test_config(test_case, extra_config, expected_http_kwargs, mocker): check = ConsulCheck(common.CHECK_NAME, {}, instances=[instance]) mocker.patch("datadog_checks.base.utils.serialization.json.loads") - with mock.patch('datadog_checks.base.utils.http.requests') as r: - r.get.return_value = mock.MagicMock(status_code=200) + with mock.patch('datadog_checks.base.utils.http.requests.Session') as session: + mock_session = mock.MagicMock() + session.return_value = mock_session + mock_session.get.return_value = mock.MagicMock(status_code=200) check.check(None) @@ -564,4 +566,4 @@ def test_config(test_case, extra_config, expected_http_kwargs, mocker): 'allow_redirects': mock.ANY, } http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('/v1/status/leader', **http_wargs) + mock_session.get.assert_called_with('/v1/status/leader', **http_wargs) From 796fbbdb924dc218f0bd034833452625274ff855 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:17:15 +0200 Subject: [PATCH 020/100] Fail SSLContext certificate loading with WARNING instead of ERROR --- .../datadog_checks/base/utils/http.py | 38 +++++++++++++------ 1 file changed, 26 insertions(+), 12 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 2c509094218d8..60414c28ae8e1 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -144,23 +144,37 @@ def create_ssl_context(config, overrides=None): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs ca_cert = config['tls_ca_cert'] - if ca_cert: - ca_cert = os.path.expanduser(ca_cert) - if os.path.isdir(ca_cert): - context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + try: + if ca_cert: + ca_cert = os.path.expanduser(ca_cert) + if os.path.isdir(ca_cert): + context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + else: + context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) else: - context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) - else: - context.load_default_certs(ssl.Purpose.SERVER_AUTH) + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + except FileNotFoundError: + LOGGER.warning( + 'TLS CA certificate file not found: %s. ' + 'Please check the `tls_ca_cert` configuration option.', + ca_cert, + ) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain client_cert, client_key = config['tls_cert'], config['tls_private_key'] client_key_pass = config['tls_private_key_password'] - if client_key: - client_key = os.path.expanduser(client_key) - if client_cert: - client_cert = os.path.expanduser(client_cert) - context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + try: + if client_key: + client_key = os.path.expanduser(client_key) + if client_cert: + client_cert = os.path.expanduser(client_cert) + context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + except FileNotFoundError: + LOGGER.warning( + 'TLS client certificate file not found: %s. ' + 'Please check the `tls_cert` configuration option.', + client_cert, + ) return context From cba0c7d55df5c889fa663b7bd548c779634d0e1c Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:19:38 +0200 Subject: [PATCH 021/100] Lint --- datadog_checks_base/datadog_checks/base/utils/http.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 60414c28ae8e1..37fd6d8b390dc 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -155,8 +155,7 @@ def create_ssl_context(config, overrides=None): context.load_default_certs(ssl.Purpose.SERVER_AUTH) except FileNotFoundError: LOGGER.warning( - 'TLS CA certificate file not found: %s. ' - 'Please check the `tls_ca_cert` configuration option.', + 'TLS CA certificate file not found: %s. ' 'Please check the `tls_ca_cert` configuration option.', ca_cert, ) @@ -171,8 +170,7 @@ def create_ssl_context(config, overrides=None): context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) except FileNotFoundError: LOGGER.warning( - 'TLS client certificate file not found: %s. ' - 'Please check the `tls_cert` configuration option.', + 'TLS client certificate file not found: %s. ' 'Please check the `tls_cert` configuration option.', client_cert, ) From 549578b7eea2f440932395a8f506181e8edf21db Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:53:24 +0200 Subject: [PATCH 022/100] Change mocks from requests.method to requests.Session.method --- cilium/tests/conftest.py | 4 ++-- couchbase/tests/test_unit.py | 4 ++-- crio/tests/test_crio.py | 2 +- datadog_checks_dev/tests/tooling/test_github.py | 2 +- datadog_cluster_agent/tests/conftest.py | 2 +- dcgm/tests/conftest.py | 4 ++-- ecs_fargate/tests/test_unit.py | 10 +++++----- ecs_fargate/tests/test_unit_v4.py | 2 +- elastic/tests/test_unit.py | 8 ++++---- external_dns/tests/conftest.py | 2 +- fluxcd/tests/conftest.py | 4 ++-- fly_io/tests/conftest.py | 2 +- hdfs_datanode/tests/conftest.py | 6 +++--- hdfs_namenode/tests/conftest.py | 4 ++-- impala/tests/conftest.py | 2 +- .../tests/test_kube_apiserver_metrics_1_25.py | 2 +- kube_apiserver_metrics/tests/test_sli_metrics.py | 2 +- kube_proxy/tests/test_kube_proxy_1_21_1.py | 4 ++-- kube_scheduler/tests/test_kube_scheduler_1_13.py | 2 +- kube_scheduler/tests/test_kube_scheduler_1_23.py | 2 +- kube_scheduler/tests/test_kube_scheduler_1_26.py | 2 +- kube_scheduler/tests/test_kube_scheduler_1_29.py | 2 +- kubevirt_api/tests/test_unit.py | 14 +++++++------- kubevirt_controller/tests/test_unit.py | 4 ++-- kubevirt_handler/tests/test_unit.py | 8 ++++---- mapreduce/tests/conftest.py | 4 ++-- nginx/tests/test_vts.py | 2 +- .../tests/test_nginx_ingress_controller.py | 2 +- nvidia_nim/tests/test_unit.py | 2 +- octopus_deploy/tests/conftest.py | 2 +- openstack_controller/tests/conftest.py | 4 ++-- .../tests/legacy/test_openstack.py | 2 +- .../tests/legacy/test_simple_api.py | 2 +- prometheus/tests/conftest.py | 2 +- rabbitmq/tests/test_openmetrics.py | 4 ++-- ray/tests/test_unit.py | 4 ++-- scylla/tests/conftest.py | 2 +- sonatype_nexus/tests/test_check.py | 2 +- squid/tests/test_squid.py | 2 +- squid/tests/test_unit.py | 8 ++++---- strimzi/tests/test_unit.py | 4 ++-- teamcity/tests/test_teamcity.py | 2 +- tekton/tests/test_unit.py | 4 ++-- temporal/tests/conftest.py | 2 +- torchserve/tests/inference/test_unit.py | 2 +- torchserve/tests/management/test_unit.py | 14 +++++++------- torchserve/tests/openmetrics/test_unit.py | 2 +- vsphere/tests/conftest.py | 4 ++-- yarn/tests/conftest.py | 12 ++++++------ 49 files changed, 95 insertions(+), 95 deletions(-) diff --git a/cilium/tests/conftest.py b/cilium/tests/conftest.py index c6096c3fd4d7d..caa56b731ba88 100644 --- a/cilium/tests/conftest.py +++ b/cilium/tests/conftest.py @@ -203,7 +203,7 @@ def mock_agent_data(): with open(f_name, "r") as f: text_data = f.read() with mock.patch( - "requests.get", + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={"Content-Type": "text/plain"} ), @@ -217,7 +217,7 @@ def mock_operator_data(): with open(f_name, "r") as f: text_data = f.read() with mock.patch( - "requests.get", + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={"Content-Type": "text/plain"} ), diff --git a/couchbase/tests/test_unit.py b/couchbase/tests/test_unit.py index 1078a07b7801c..4577d7582f409 100644 --- a/couchbase/tests/test_unit.py +++ b/couchbase/tests/test_unit.py @@ -126,7 +126,7 @@ def test_extract_index_tags(instance, test_input, expected_tags): def test_unit(dd_run_check, check, instance, mocker, aggregator): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) @@ -142,7 +142,7 @@ def test_unit(dd_run_check, check, instance, mocker, aggregator): def test_unit_query_metrics(dd_run_check, check, instance_query, mocker, aggregator): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance_query)) diff --git a/crio/tests/test_crio.py b/crio/tests/test_crio.py index f6b77953ac2ca..adfd92a32ce34 100644 --- a/crio/tests/test_crio.py +++ b/crio/tests/test_crio.py @@ -19,7 +19,7 @@ def mock_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/datadog_checks_dev/tests/tooling/test_github.py b/datadog_checks_dev/tests/tooling/test_github.py index ab96b415fcd3e..a5671bcc2c4c8 100644 --- a/datadog_checks_dev/tests/tooling/test_github.py +++ b/datadog_checks_dev/tests/tooling/test_github.py @@ -27,7 +27,7 @@ def test_get_pr_approvers(github_response, expected_approvers): mock_response.json.return_value = github_response with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock_response, ): assert get_pr_approvers("integrations-core", "42", {}) == expected_approvers diff --git a/datadog_cluster_agent/tests/conftest.py b/datadog_cluster_agent/tests/conftest.py index 084b6164a174a..4ccf2c30a4710 100644 --- a/datadog_cluster_agent/tests/conftest.py +++ b/datadog_cluster_agent/tests/conftest.py @@ -26,7 +26,7 @@ def mock_metrics_endpoint(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/dcgm/tests/conftest.py b/dcgm/tests/conftest.py index fe3a75dd256b7..ab5c6d305432e 100644 --- a/dcgm/tests/conftest.py +++ b/dcgm/tests/conftest.py @@ -45,7 +45,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -59,7 +59,7 @@ def mock_label_remap(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/ecs_fargate/tests/test_unit.py b/ecs_fargate/tests/test_unit.py index ed46749111f04..22f9ae0c83839 100644 --- a/ecs_fargate/tests/test_unit.py +++ b/ecs_fargate/tests/test_unit.py @@ -42,7 +42,7 @@ def test_failing_check(check, aggregator, dd_run_check): Testing fargate metadata endpoint error. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.get', return_value=MockResponse('{}', status_code=500) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=500) ): dd_run_check(check) @@ -55,7 +55,7 @@ def test_invalid_response_check(check, aggregator, dd_run_check): Testing invalid fargate metadata payload. """ with mock.patch( - 'datadog_checks.ecs_fargate.ecs_fargate.requests.get', return_value=MockResponse('{}', status_code=200) + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', return_value=MockResponse('{}', status_code=200) ): dd_run_check(check) @@ -67,7 +67,7 @@ def test_successful_check_linux(check, aggregator, dd_run_check): """ Testing successful fargate check on Linux. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_linux): + with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -146,7 +146,7 @@ def test_successful_check_windows(check, aggregator, dd_run_check): """ Testing successful fargate check on Windows. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_windows): + with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_windows): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -204,7 +204,7 @@ def test_successful_check_wrong_sys_delta(check, aggregator, dd_run_check): """ Testing successful fargate check. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_sys_delta): + with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_sys_delta): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) diff --git a/ecs_fargate/tests/test_unit_v4.py b/ecs_fargate/tests/test_unit_v4.py index d3d3c78703f9c..3c06d4cee0e75 100644 --- a/ecs_fargate/tests/test_unit_v4.py +++ b/ecs_fargate/tests/test_unit_v4.py @@ -42,7 +42,7 @@ def test_successful_check_linux_v4(check, aggregator, dd_run_check): Testing successful fargate check on Linux on ECS ENDPOINT API v4. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.get', side_effect=mocked_requests_get_linux_v4): + with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux_v4): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags_v4): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) diff --git a/elastic/tests/test_unit.py b/elastic/tests/test_unit.py index ce0ca93e21346..551133e47a4fd 100644 --- a/elastic/tests/test_unit.py +++ b/elastic/tests/test_unit.py @@ -134,7 +134,7 @@ def test_get_template_metrics(aggregator, instance, mock_http_response): def test_get_template_metrics_raise_exception(aggregator, instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=403), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -151,7 +151,7 @@ def test_get_value_from_path(): def test__get_data_throws_authentication_error(instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=400), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -162,7 +162,7 @@ def test__get_data_throws_authentication_error(instance): def test__get_data_creates_critical_service_alert(aggregator, instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[instance]) @@ -193,7 +193,7 @@ def test__get_data_creates_critical_service_alert(aggregator, instance): ) def test_disable_legacy_sc_tags(aggregator, es_instance): with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=MockResponse(status_code=500), ): check = ESCheck('elastic', {}, instances=[es_instance]) diff --git a/external_dns/tests/conftest.py b/external_dns/tests/conftest.py index 48e4c325e9ec8..659cb475e7f29 100644 --- a/external_dns/tests/conftest.py +++ b/external_dns/tests/conftest.py @@ -19,7 +19,7 @@ def mock_external_dns(): text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split('\n'), headers={'Content-Type': 'text/plain'} ), diff --git a/fluxcd/tests/conftest.py b/fluxcd/tests/conftest.py index 2b43fc5bca29e..55a944c2d90bd 100644 --- a/fluxcd/tests/conftest.py +++ b/fluxcd/tests/conftest.py @@ -76,7 +76,7 @@ def mock_metrics_v1(): content = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), @@ -94,7 +94,7 @@ def mock_metrics_v2(): content = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), diff --git a/fly_io/tests/conftest.py b/fly_io/tests/conftest.py index f636ecf09527b..959a905ee23dd 100644 --- a/fly_io/tests/conftest.py +++ b/fly_io/tests/conftest.py @@ -127,5 +127,5 @@ def get(url, *args, **kwargs): return mock.MagicMock(json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get diff --git a/hdfs_datanode/tests/conftest.py b/hdfs_datanode/tests/conftest.py index 48b97b46c3a53..d8248b94a20a1 100644 --- a/hdfs_datanode/tests/conftest.py +++ b/hdfs_datanode/tests/conftest.py @@ -37,19 +37,19 @@ def instance(): @pytest.fixture def mocked_request(): - with patch('requests.get', new=requests_get_mock): + with patch('requests.Session.get', new=requests_get_mock): yield @pytest.fixture def mocked_metadata_request(): - with patch('requests.get', new=requests_metadata_mock): + with patch('requests.Session.get', new=requests_metadata_mock): yield @pytest.fixture def mocked_auth_request(): - with patch('requests.get', new=requests_auth_mock): + with patch('requests.Session.get', new=requests_auth_mock): yield diff --git a/hdfs_namenode/tests/conftest.py b/hdfs_namenode/tests/conftest.py index 47d43f92010d0..40191a59a3c4a 100644 --- a/hdfs_namenode/tests/conftest.py +++ b/hdfs_namenode/tests/conftest.py @@ -44,13 +44,13 @@ def check(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @pytest.fixture def mocked_auth_request(): - with patch("requests.get", new=requests_auth_mock): + with patch("requests.Session.get", new=requests_auth_mock): yield diff --git a/impala/tests/conftest.py b/impala/tests/conftest.py index bb81744b034d7..fed60c3fed3bf 100644 --- a/impala/tests/conftest.py +++ b/impala/tests/conftest.py @@ -100,7 +100,7 @@ def mock_metrics(request): content = fixture_file.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: content.split("\n"), diff --git a/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py b/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py index afec1846bb1d5..b1ff0388c4741 100644 --- a/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py +++ b/kube_apiserver_metrics/tests/test_kube_apiserver_metrics_1_25.py @@ -25,7 +25,7 @@ def mock_get(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/kube_apiserver_metrics/tests/test_sli_metrics.py b/kube_apiserver_metrics/tests/test_sli_metrics.py index d3a726bb592c4..b550165a81955 100644 --- a/kube_apiserver_metrics/tests/test_sli_metrics.py +++ b/kube_apiserver_metrics/tests/test_sli_metrics.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, "r") as f: text_data = f.read() with mock.patch( - "requests.get", + "requests.Session.get", return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/kube_proxy/tests/test_kube_proxy_1_21_1.py b/kube_proxy/tests/test_kube_proxy_1_21_1.py index 925bf54ee78c1..51eaddfbac5b6 100644 --- a/kube_proxy/tests/test_kube_proxy_1_21_1.py +++ b/kube_proxy/tests/test_kube_proxy_1_21_1.py @@ -22,7 +22,7 @@ def mock_iptables(): with open(f_name, 'r') as f: text_data = f.read() mock_iptables = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), @@ -37,7 +37,7 @@ def mock_userspace(): with open(f_name, 'r') as f: text_data = f.read() mock_userspace = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_13.py b/kube_scheduler/tests/test_kube_scheduler_1_13.py index ac60b73837462..e7ae1df38bf1a 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_13.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_13.py @@ -23,7 +23,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_23.py b/kube_scheduler/tests/test_kube_scheduler_1_23.py index 6ea03cbe2e17d..4e97823166a79 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_23.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_23.py @@ -23,7 +23,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_26.py b/kube_scheduler/tests/test_kube_scheduler_1_26.py index 853f7a7072bf7..0655cc65558cd 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_26.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_26.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kube_scheduler/tests/test_kube_scheduler_1_29.py b/kube_scheduler/tests/test_kube_scheduler_1_29.py index 01ec0d749e391..16fe535ce6816 100644 --- a/kube_scheduler/tests/test_kube_scheduler_1_29.py +++ b/kube_scheduler/tests/test_kube_scheduler_1_29.py @@ -22,7 +22,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/kubevirt_api/tests/test_unit.py b/kubevirt_api/tests/test_unit.py index 2324214c30057..1a4b040e86d05 100644 --- a/kubevirt_api/tests/test_unit.py +++ b/kubevirt_api/tests/test_unit.py @@ -18,7 +18,7 @@ def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -95,7 +95,7 @@ def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -116,7 +116,7 @@ def test_check_sends_zero_count_for_vms(dd_run_check, aggregator, instance, mock def test_check_sends_zero_count_for_vmis(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) @@ -154,7 +154,7 @@ def test_emits_zero_can_connect_when_service_is_down(dd_run_check, aggregator, i def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) check._setup_kube_client = lambda: None @@ -171,7 +171,7 @@ def test_emits_one_can_connect_when_service_is_up(dd_run_check, aggregator, inst def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [BAD_METRICS_HOSTNAME_INSTANCE]) check._setup_kube_client = lambda: None @@ -190,7 +190,7 @@ def test_raise_exception_when_metrics_endpoint_is_bad(dd_run_check, aggregator, def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtApiCheck("kubevirt_api", {}, [instance]) with pytest.raises( @@ -202,7 +202,7 @@ def test_raise_exception_cannot_connect_to_kubernetes_api(dd_run_check, aggregat def test_log_warning_healthz_endpoint_not_provided(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) new_instance = deepcopy(instance) new_instance.pop("kubevirt_api_healthz_endpoint") diff --git a/kubevirt_controller/tests/test_unit.py b/kubevirt_controller/tests/test_unit.py index 50cd1fd3fe596..0a6bcb988a40c 100644 --- a/kubevirt_controller/tests/test_unit.py +++ b/kubevirt_controller/tests/test_unit.py @@ -16,7 +16,7 @@ def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -36,7 +36,7 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i def test_check_collects_all_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtControllerCheck("kubevirt_controller", {}, [instance]) diff --git a/kubevirt_handler/tests/test_unit.py b/kubevirt_handler/tests/test_unit.py index feb3121468339..01e8e42c4a9ba 100644 --- a/kubevirt_handler/tests/test_unit.py +++ b/kubevirt_handler/tests/test_unit.py @@ -17,7 +17,7 @@ def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -122,7 +122,7 @@ def test_check_collects_metrics(dd_run_check, aggregator, instance, mocker): def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, instance, mocker, caplog): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) del instance["kubevirt_handler_healthz_endpoint"] check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) @@ -134,7 +134,7 @@ def test_logs_warning_when_healthz_endpoint_is_missing(dd_run_check, aggregator, def test_emits_can_connect_one_when_service_is_up(dd_run_check, aggregator, instance, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) dd_run_check(check) aggregator.assert_metric( @@ -157,7 +157,7 @@ def test_emits_can_connect_zero_when_service_is_down(dd_run_check, aggregator, i def test_version_metadata(instance, dd_run_check, datadog_agent, aggregator, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) check = KubeVirtHandlerCheck("kubevirt_handler", {}, [instance]) check.check_id = "test:123" dd_run_check(check) diff --git a/mapreduce/tests/conftest.py b/mapreduce/tests/conftest.py index 087f9adc5b68b..ce15c2e06934d 100644 --- a/mapreduce/tests/conftest.py +++ b/mapreduce/tests/conftest.py @@ -52,13 +52,13 @@ def instance(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @pytest.fixture def mocked_auth_request(): - with patch("requests.get", new=requests_auth_mock): + with patch("requests.Session.get", new=requests_auth_mock): yield diff --git a/nginx/tests/test_vts.py b/nginx/tests/test_vts.py index 696f8498c397a..8a06abe7b5b5e 100644 --- a/nginx/tests/test_vts.py +++ b/nginx/tests/test_vts.py @@ -49,7 +49,7 @@ def test_vts(check, instance_vts, aggregator): @pytest.mark.unit def test_vts_unit(dd_run_check, aggregator, mocked_instance_vts, check, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) c = check(mocked_instance_vts) dd_run_check(c) diff --git a/nginx_ingress_controller/tests/test_nginx_ingress_controller.py b/nginx_ingress_controller/tests/test_nginx_ingress_controller.py index 8066617d61bc5..5ac427b270c62 100644 --- a/nginx_ingress_controller/tests/test_nginx_ingress_controller.py +++ b/nginx_ingress_controller/tests/test_nginx_ingress_controller.py @@ -21,7 +21,7 @@ def mock_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/nvidia_nim/tests/test_unit.py b/nvidia_nim/tests/test_unit.py index 980580e11fab3..deaeffedf3ae8 100644 --- a/nvidia_nim/tests/test_unit.py +++ b/nvidia_nim/tests/test_unit.py @@ -18,7 +18,7 @@ def test_check_nvidia_nim(dd_run_check, aggregator, datadog_agent, instance): check = NvidiaNIMCheck("nvidia_nim", {}, [instance]) check.check_id = "test:123" with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=[ MockResponse(file_path=get_fixture_path("nim_metrics.txt")), MockResponse(file_path=get_fixture_path("nim_version.json")), diff --git a/octopus_deploy/tests/conftest.py b/octopus_deploy/tests/conftest.py index fee5a733376d9..7ec3897c84a13 100644 --- a/octopus_deploy/tests/conftest.py +++ b/octopus_deploy/tests/conftest.py @@ -214,5 +214,5 @@ def get(url, *args, **kwargs): return CopyingMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = CopyingMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get diff --git a/openstack_controller/tests/conftest.py b/openstack_controller/tests/conftest.py index 2583431328406..e6b933bb66871 100644 --- a/openstack_controller/tests/conftest.py +++ b/openstack_controller/tests/conftest.py @@ -1009,7 +1009,7 @@ def get(url, *args, **kwargs): return mock.MagicMock(elapsed=mock_elapsed, json=mock_json, status_code=mock_status_code) mock_get = mock.MagicMock(side_effect=get) - monkeypatch.setattr('requests.get', mock_get) + monkeypatch.setattr('requests.Session.get', mock_get) return mock_get @@ -1041,5 +1041,5 @@ def post(url, *args, **kwargs): return MockResponse(json_data=json_data, status_code=200, headers=headers) mock_post = mock.MagicMock(side_effect=post) - monkeypatch.setattr('requests.post', mock_post) + monkeypatch.setattr('requests.Session.post', mock_post) return mock_post diff --git a/openstack_controller/tests/legacy/test_openstack.py b/openstack_controller/tests/legacy/test_openstack.py index f048d5dc7a84f..72095fc3e637f 100644 --- a/openstack_controller/tests/legacy/test_openstack.py +++ b/openstack_controller/tests/legacy/test_openstack.py @@ -41,7 +41,7 @@ def test_parse_uptime_string(aggregator): def test_api_error_log_no_password(check, instance, caplog): with caplog.at_level(logging.DEBUG): with pytest.raises(KeystoneUnreachable): - with mock.patch('datadog_checks.base.utils.http.requests.post') as req: + with mock.patch('datadog_checks.base.utils.http.requests.Session.post') as req: req.side_effect = HTTPError(mock.Mock(status=404), 'not found') check._api = SimpleApi(check.log, instance.get("keystone_server_url"), check.http) identity = Authenticator._get_user_identity(instance.get("user")) diff --git a/openstack_controller/tests/legacy/test_simple_api.py b/openstack_controller/tests/legacy/test_simple_api.py index 08eba8cd3556f..ad0cd7be57b2e 100644 --- a/openstack_controller/tests/legacy/test_simple_api.py +++ b/openstack_controller/tests/legacy/test_simple_api.py @@ -770,7 +770,7 @@ def test__make_request_failure(requests_wrapper): api = ApiFactory.create(log, instance, requests_wrapper) response_mock = mock.MagicMock() - with mock.patch("datadog_checks.openstack_controller.legacy.api.requests.get", return_value=response_mock): + with mock.patch("datadog_checks.openstack_controller.legacy.api.requests.Session.get", return_value=response_mock): response_mock.raise_for_status.side_effect = requests.exceptions.HTTPError response_mock.status_code = 401 with pytest.raises(AuthenticationNeeded): diff --git a/prometheus/tests/conftest.py b/prometheus/tests/conftest.py index dfffd2ff931a3..853e8f5e99efd 100644 --- a/prometheus/tests/conftest.py +++ b/prometheus/tests/conftest.py @@ -63,7 +63,7 @@ def poll_mock(): g3.labels(matched_label="foobar", node="host2", timestamp="456").set(float('inf')) poll_mock_patch = mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: ensure_unicode(generate_latest(registry)).split("\n"), diff --git a/rabbitmq/tests/test_openmetrics.py b/rabbitmq/tests/test_openmetrics.py index b10b0ba523c2a..a96b40987d0fa 100644 --- a/rabbitmq/tests/test_openmetrics.py +++ b/rabbitmq/tests/test_openmetrics.py @@ -245,7 +245,7 @@ def test_aggregated_and_unaggregated_endpoints(endpoint, metrics, aggregator, dd 'include_aggregated_endpoint': True, } ) - mocker.patch('requests.get', wraps=mock_http_responses) + mocker.patch('requests.Session.get', wraps=mock_http_responses) dd_run_check(check) meta_metrics = {'rabbitmq.build_info', 'rabbitmq.identity_info'} @@ -289,7 +289,7 @@ def test_detailed_only_metrics(aggregator, dd_run_check, mocker): 'include_aggregated_endpoint': True, } ) - mocker.patch('requests.get', wraps=mock_http_responses) + mocker.patch('requests.Session.get', wraps=mock_http_responses) dd_run_check(check) detailed_only_metrics = ( diff --git a/ray/tests/test_unit.py b/ray/tests/test_unit.py index c11b8ad455759..f2e7725b5287f 100644 --- a/ray/tests/test_unit.py +++ b/ray/tests/test_unit.py @@ -17,7 +17,7 @@ ], ) def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) for expected_metric in metrics: @@ -33,7 +33,7 @@ def test_check(dd_run_check, aggregator, mocker, check, instance, metrics): def test_invalid_url(dd_run_check, aggregator, check, mocked_head_instance, mocker): mocked_head_instance["openmetrics_endpoint"] = "http://unknowwn" - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) with pytest.raises(Exception): dd_run_check(check(mocked_head_instance)) diff --git a/scylla/tests/conftest.py b/scylla/tests/conftest.py index b4b653494b3ae..5b28e13609d02 100644 --- a/scylla/tests/conftest.py +++ b/scylla/tests/conftest.py @@ -45,7 +45,7 @@ def mock_db_data(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), diff --git a/sonatype_nexus/tests/test_check.py b/sonatype_nexus/tests/test_check.py index 7f6a8064ac9dd..eda941ff13609 100644 --- a/sonatype_nexus/tests/test_check.py +++ b/sonatype_nexus/tests/test_check.py @@ -15,7 +15,7 @@ @pytest.fixture def mock_http_response(mocker): yield lambda *args, **kwargs: mocker.patch( - kwargs.pop("method", "requests.get"), return_value=MockResponse(*args, **kwargs) + kwargs.pop("method", "requests.Session.get"), return_value=MockResponse(*args, **kwargs) ) diff --git a/squid/tests/test_squid.py b/squid/tests/test_squid.py index 9732198af7bf9..55a40cbe7c835 100644 --- a/squid/tests/test_squid.py +++ b/squid/tests/test_squid.py @@ -79,7 +79,7 @@ def test_check_ok(aggregator, check, instance): ) @pytest.mark.usefixtures("dd_environment") def test_version_metadata(check, instance, datadog_agent, raw_version, version_metadata, count): - with mock.patch('datadog_checks.base.utils.http.requests.get') as g: + with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: g.return_value.headers = {'Server': raw_version} check.check_id = 'test:123' diff --git a/squid/tests/test_unit.py b/squid/tests/test_unit.py index b6bffb0936202..332e8c9123e2d 100644 --- a/squid/tests/test_unit.py +++ b/squid/tests/test_unit.py @@ -62,7 +62,7 @@ def test_get_counters(check): due to a missing = character. See https://github.com/DataDog/integrations-core/pull/1643 """ - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -73,7 +73,7 @@ def test_get_counters(check): def test_host_without_protocol(check, instance): - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -83,7 +83,7 @@ def test_host_without_protocol(check, instance): def test_host_https(check, instance): instance['host'] = 'https://localhost' - with mock.patch('datadog_checks.squid.squid.requests.get') as g: + with mock.patch('datadog_checks.squid.squid.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): g.return_value = mock.MagicMock(text="client_http.requests=42\n\n") check.parse_counter = mock.MagicMock(return_value=('foo', 'bar')) @@ -103,7 +103,7 @@ def test_legacy_username_password(instance, auth_config): instance.update(auth_config) check = SquidCheck(common.CHECK_NAME, {}, {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests.get') as g: + with mock.patch('datadog_checks.base.utils.http.requests.Session.get') as g: with mock.patch('datadog_checks.squid.SquidCheck.submit_version'): check.get_counters('host', 'port', []) diff --git a/strimzi/tests/test_unit.py b/strimzi/tests/test_unit.py index 71da37cde60ab..0c9475cefab1a 100644 --- a/strimzi/tests/test_unit.py +++ b/strimzi/tests/test_unit.py @@ -56,7 +56,7 @@ def test_check_unique_operator( tag, mocker, ): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(instance)) for expected_metric in metrics: @@ -76,7 +76,7 @@ def test_check_unique_operator( def test_check_all_operators(dd_run_check, aggregator, check, mocker): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check( check( { diff --git a/teamcity/tests/test_teamcity.py b/teamcity/tests/test_teamcity.py index caa815916ae1c..65c23a4a12dab 100644 --- a/teamcity/tests/test_teamcity.py +++ b/teamcity/tests/test_teamcity.py @@ -66,7 +66,7 @@ def test_config(dd_run_check, extra_config, expected_http_kwargs): instance.update(extra_config) check = TeamCityRest('teamcity', {}, [instance]) - with patch('datadog_checks.base.utils.http.requests.get') as r: + with patch('datadog_checks.base.utils.http.requests.Session.get') as r: dd_run_check(check) http_wargs = { diff --git a/tekton/tests/test_unit.py b/tekton/tests/test_unit.py index 83bd63353b527..0010a2eae466d 100644 --- a/tekton/tests/test_unit.py +++ b/tekton/tests/test_unit.py @@ -17,7 +17,7 @@ ], ) def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, namespace): - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) dd_run_check(check(request.getfixturevalue(instance))) for expected_metric in metrics: @@ -33,7 +33,7 @@ def test_check(dd_run_check, aggregator, mocker, instance, metrics, request, nam def test_invalid_url(dd_run_check, aggregator, pipelines_instance, mocker): pipelines_instance["pipelines_controller_endpoint"] = "http://unknowwn" - mocker.patch("requests.get", wraps=mock_http_responses) + mocker.patch("requests.Session.get", wraps=mock_http_responses) with pytest.raises(Exception): dd_run_check(check(pipelines_instance)) diff --git a/temporal/tests/conftest.py b/temporal/tests/conftest.py index c31e432914f2e..2ed04281aa8c5 100644 --- a/temporal/tests/conftest.py +++ b/temporal/tests/conftest.py @@ -87,7 +87,7 @@ def mock_metrics(): with open(f_name, 'r') as f: text_data = f.read() with mock.patch( - 'requests.get', + 'requests.Session.get', return_value=mock.MagicMock( status_code=200, iter_lines=lambda **kwargs: text_data.split("\n"), headers={'Content-Type': "text/plain"} ), diff --git a/torchserve/tests/inference/test_unit.py b/torchserve/tests/inference/test_unit.py index 9cae5f90b3c8e..a965bd8b23fce 100644 --- a/torchserve/tests/inference/test_unit.py +++ b/torchserve/tests/inference/test_unit.py @@ -13,7 +13,7 @@ def test_check(dd_run_check, aggregator, check, mocked_inference_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_inference_instance)) aggregator.assert_service_check( diff --git a/torchserve/tests/management/test_unit.py b/torchserve/tests/management/test_unit.py index 7a2f7705ba133..15cd3b694faff 100644 --- a/torchserve/tests/management/test_unit.py +++ b/torchserve/tests/management/test_unit.py @@ -14,7 +14,7 @@ def test_check(dd_run_check, aggregator, check, mocked_management_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_management_instance)) for metric in METRICS: @@ -43,7 +43,7 @@ def custom_mock_http_responses(url, **_params): return mock_http_responses()(url) - mocker.patch('requests.get', wraps=custom_mock_http_responses) + mocker.patch('requests.Session.get', wraps=custom_mock_http_responses) dd_run_check(check(mocked_management_instance)) # We should not get anything for these models @@ -157,7 +157,7 @@ def custom_mock_http_responses(url, **_params): def test_check_with_discovery( dd_run_check, aggregator, check, mocked_management_instance, mocker, include, exclude, limit, expected_models ): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) mocked_management_instance["limit"] = limit mocked_management_instance["exclude"] = exclude mocked_management_instance["include"] = include @@ -308,13 +308,13 @@ def test_check_with_events( datadog_agent.reset() check_instance = check(mocked_management_instance) - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check_instance) dd_run_check(check_instance) assert len(aggregator.events) == 0 - mocker.patch('requests.get', wraps=mock_http_responses(new_response)) + mocker.patch('requests.Session.get', wraps=mock_http_responses(new_response)) dd_run_check(check_instance) for expected_event, actual_event in zip(expected_events, aggregator.events, strict=True): @@ -338,13 +338,13 @@ def test_check_disable_events(dd_run_check, datadog_agent, aggregator, check, mo mocked_management_instance["submit_events"] = False check_instance = check(mocked_management_instance) - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check_instance) dd_run_check(check_instance) assert len(aggregator.events) == 0 - mocker.patch('requests.get', wraps=mock_http_responses("management/events/models_all_dropped.json")) + mocker.patch('requests.Session.get', wraps=mock_http_responses("management/events/models_all_dropped.json")) dd_run_check(check_instance) assert len(aggregator.events) == 0 diff --git a/torchserve/tests/openmetrics/test_unit.py b/torchserve/tests/openmetrics/test_unit.py index 5964f02bcae23..78693ed4ab288 100644 --- a/torchserve/tests/openmetrics/test_unit.py +++ b/torchserve/tests/openmetrics/test_unit.py @@ -13,7 +13,7 @@ def test_check(dd_run_check, aggregator, check, mocked_openmetrics_instance, mocker): - mocker.patch('requests.get', wraps=mock_http_responses()) + mocker.patch('requests.Session.get', wraps=mock_http_responses()) dd_run_check(check(mocked_openmetrics_instance)) for name, expected_values in METRICS.items(): diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index 553e3096a1bfb..ea974ab75f0a8 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -250,8 +250,8 @@ def mock_http_api(monkeypatch): http = MockHttpV7() else: http = MockHttpV6() - monkeypatch.setattr('requests.get', MagicMock(side_effect=http.get)) - monkeypatch.setattr('requests.post', MagicMock(side_effect=http.post)) + monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=http.get)) + monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=http.post)) yield http diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index 16456c68fe4b8..44fed2240ee67 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -57,7 +57,7 @@ def instance(): @pytest.fixture def mocked_request(): - with patch("requests.get", new=requests_get_mock): + with patch("requests.Session.get", new=requests_get_mock): yield @@ -65,15 +65,15 @@ def mocked_request(): def mocked_auth_request(): def requests_auth_get(*args, **kwargs): # Make sure we're passing in authentication - assert 'auth' in kwargs, 'Missing "auth" argument in requests.get(...) call' + assert 'auth' in kwargs, 'Missing "auth" argument in requests.Session.get(...) call' # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password in requests.get" + assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password in requests.Session.get" # Return mocked request.get(...) return requests_get_mock(*args, **kwargs) - with patch("requests.get", new=requests_auth_get): + with patch("requests.Session.get", new=requests_auth_get): yield @@ -85,7 +85,7 @@ def mocked_bad_cert_request(): def requests_bad_cert_get(*args, **kwargs): # Make sure we're passing in the 'verify' argument - assert 'verify' in kwargs, 'Missing "verify" argument in requests.get(...) call' + assert 'verify' in kwargs, 'Missing "verify" argument in requests.Session.get(...) call' if kwargs['verify']: raise SSLError("certificate verification failed for {}".format(args[0])) @@ -93,7 +93,7 @@ def requests_bad_cert_get(*args, **kwargs): # Return the actual response return requests_get_mock(*args, **kwargs) - with patch("requests.get", new=requests_bad_cert_get): + with patch("requests.Session.get", new=requests_bad_cert_get): yield From b96d1ec2110d2a402dd3c8bc8f497c3cc5902589 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 6 Jun 2025 18:58:55 +0200 Subject: [PATCH 023/100] Lint --- ecs_fargate/tests/test_unit.py | 12 +++++++++--- ecs_fargate/tests/test_unit_v4.py | 4 +++- yarn/tests/conftest.py | 5 ++++- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/ecs_fargate/tests/test_unit.py b/ecs_fargate/tests/test_unit.py index 22f9ae0c83839..4f49d663c211b 100644 --- a/ecs_fargate/tests/test_unit.py +++ b/ecs_fargate/tests/test_unit.py @@ -67,7 +67,9 @@ def test_successful_check_linux(check, aggregator, dd_run_check): """ Testing successful fargate check on Linux. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -146,7 +148,9 @@ def test_successful_check_windows(check, aggregator, dd_run_check): """ Testing successful fargate check on Windows. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_windows): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_windows + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) @@ -204,7 +208,9 @@ def test_successful_check_wrong_sys_delta(check, aggregator, dd_run_check): """ Testing successful fargate check. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_sys_delta): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_sys_delta + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) diff --git a/ecs_fargate/tests/test_unit_v4.py b/ecs_fargate/tests/test_unit_v4.py index 3c06d4cee0e75..e7bf70546bfb9 100644 --- a/ecs_fargate/tests/test_unit_v4.py +++ b/ecs_fargate/tests/test_unit_v4.py @@ -42,7 +42,9 @@ def test_successful_check_linux_v4(check, aggregator, dd_run_check): Testing successful fargate check on Linux on ECS ENDPOINT API v4. """ - with mock.patch('datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux_v4): + with mock.patch( + 'datadog_checks.ecs_fargate.ecs_fargate.requests.Session.get', side_effect=mocked_requests_get_linux_v4 + ): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.get_tags", side_effect=mocked_get_tags_v4): with mock.patch("datadog_checks.ecs_fargate.ecs_fargate.c_is_excluded", side_effect=mocked_is_excluded): dd_run_check(check) diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index 44fed2240ee67..60e86e1c27963 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -68,7 +68,10 @@ def requests_auth_get(*args, **kwargs): assert 'auth' in kwargs, 'Missing "auth" argument in requests.Session.get(...) call' # Make sure we've got the correct username and password - assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password in requests.Session.get" + assert kwargs['auth'] == ( + TEST_USERNAME, + TEST_PASSWORD, + ), "Incorrect username or password in requests.Session.get" # Return mocked request.get(...) return requests_get_mock(*args, **kwargs) From 15e2ea5afb2d9526613135dbe2eb9ebcb535ce94 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 13:26:19 +0200 Subject: [PATCH 024/100] Fix couchbase tests --- couchbase/tests/test_unit.py | 33 ++++++++++++++++----------------- 1 file changed, 16 insertions(+), 17 deletions(-) diff --git a/couchbase/tests/test_unit.py b/couchbase/tests/test_unit.py index 4577d7582f409..25a437c02f842 100644 --- a/couchbase/tests/test_unit.py +++ b/couchbase/tests/test_unit.py @@ -72,26 +72,25 @@ def test__get_query_monitoring_data(instance_query): ("legacy config", {'user': 'new_foo', 'ssl_verify': False}, {'auth': ('new_foo', 'password'), 'verify': False}), ], ) -def test_config(test_case, dd_run_check, extra_config, expected_http_kwargs, instance): +def test_config(test_case, extra_config, expected_http_kwargs, instance): + """ + Test that the legacy and new auth configurations are both supported. + """ instance.update(extra_config) + check = Couchbase('couchbase', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: - r.get.return_value = mock.MagicMock(status_code=200) - - dd_run_check(check) - - http_wargs = { - 'auth': mock.ANY, - 'cert': mock.ANY, - 'headers': mock.ANY, - 'proxies': mock.ANY, - 'timeout': mock.ANY, - 'verify': mock.ANY, - 'allow_redirects': mock.ANY, - } - http_wargs.update(expected_http_kwargs) - r.get.assert_called_with('http://localhost:8091/pools/default/tasks', **http_wargs) + http_wargs = { + 'auth': mock.ANY, + 'cert': mock.ANY, + 'headers': mock.ANY, + 'proxies': mock.ANY, + 'timeout': mock.ANY, + 'verify': mock.ANY, + 'allow_redirects': mock.ANY, + } + http_wargs.update(expected_http_kwargs) + assert check.http.options == http_wargs @pytest.mark.parametrize( From 31b7728cde5a4b87b6e9689d49175b6c2501a44e Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 15:05:17 +0200 Subject: [PATCH 025/100] Fix amazon_msk tests - mocking requests.get because the boto3 lib uses it --- amazon_msk/tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index b5775c6f638cc..ace67250b8fa2 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -32,7 +32,8 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): - with mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): + # Mock requests.get because it is used internally within boto3 + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): yield From 97cb44e250f4075153bd847b56ba8028bad0a185 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 15:15:26 +0200 Subject: [PATCH 026/100] Fix apache tests by reworking mock with requests.Session --- apache/tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/apache/tests/conftest.py b/apache/tests/conftest.py index 6292ce634df45..63c78ecf14d26 100644 --- a/apache/tests/conftest.py +++ b/apache/tests/conftest.py @@ -49,7 +49,8 @@ def check_status_page_ready(): @pytest.fixture def mock_hide_server_version(): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): def mock_requests_get_headers(*args, **kwargs): r = requests.get(*args, **kwargs) From 182b4b4e7ebb1c42a47691d64545a0819200a865 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 15:17:40 +0200 Subject: [PATCH 027/100] Fix arangodb tests by updating mock side effect --- arangodb/tests/test_arangodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/arangodb/tests/test_arangodb.py b/arangodb/tests/test_arangodb.py index 365c776ee6b71..14ec1ce25b8c1 100644 --- a/arangodb/tests/test_arangodb.py +++ b/arangodb/tests/test_arangodb.py @@ -52,7 +52,7 @@ def test_invalid_endpoint(aggregator, instance_invalid_endpoint, dd_run_check): def test_check(instance, dd_run_check, aggregator, tag_condition, base_tags): check = ArangodbCheck('arangodb', {}, [instance]) - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): fixture = url.rsplit('/', 1)[-1] return MockResponse(file_path=os.path.join(os.path.dirname(__file__), 'fixtures', tag_condition, fixture)) From 62378f1de03c9d1cae91608cea98eab077baa249 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 15:24:21 +0200 Subject: [PATCH 028/100] Fix couch tests by updating mock with requests.Session --- couch/tests/test_unit.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/couch/tests/test_unit.py b/couch/tests/test_unit.py index bfa3ba06dadae..7f2ad8678ae2d 100644 --- a/couch/tests/test_unit.py +++ b/couch/tests/test_unit.py @@ -30,7 +30,8 @@ def test_config(test_case, extra_config, expected_http_kwargs): instance.update(extra_config) check = CouchDb(common.CHECK_NAME, {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content='{}') check.check(instance) From bca9b9deaaf10cb0bc6608b4980485cc2d821e2c Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 15:35:51 +0200 Subject: [PATCH 029/100] Fix remaining occurrences of mock.patch("datadog_checks.base.utils.http.requests") --- airflow/tests/test_unit.py | 9 ++++++--- avi_vantage/tests/conftest.py | 3 ++- druid/tests/test_unit.py | 6 ++++-- ecs_fargate/tests/test_unit.py | 3 ++- envoy/tests/legacy/test_unit.py | 3 ++- etcd/tests/test_integration.py | 3 ++- gitlab_runner/tests/test_unit.py | 3 ++- marathon/tests/test_unit.py | 3 ++- mesos_master/tests/test_check.py | 3 ++- mesos_slave/tests/test_unit.py | 9 ++++++--- nginx/tests/test_unit.py | 6 ++++-- php_fpm/tests/test_unit.py | 12 ++++++++---- rabbitmq/tests/test_unit.py | 6 ++++-- torchserve/tests/management/test_model_discovery.py | 3 ++- 14 files changed, 48 insertions(+), 24 deletions(-) diff --git a/airflow/tests/test_unit.py b/airflow/tests/test_unit.py index 7b69855fdde2a..cbdef5dc22efd 100644 --- a/airflow/tests/test_unit.py +++ b/airflow/tests/test_unit.py @@ -82,7 +82,8 @@ def test_dag_total_tasks(aggregator, task_instance): check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, @@ -100,7 +101,8 @@ def test_dag_task_ongoing_duration(aggregator, task_instance): check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, @@ -144,7 +146,8 @@ def test_config_collect_ongoing_duration(collect_ongoing_duration, should_call_m check = AirflowCheck('airflow', common.FULL_CONFIG, [instance]) with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.side_effect = [ {'metadatabase': {'status': 'healthy'}, 'scheduler': {'status': 'healthy'}}, diff --git a/avi_vantage/tests/conftest.py b/avi_vantage/tests/conftest.py index 68cbb5fb70668..cb6d79793b997 100644 --- a/avi_vantage/tests/conftest.py +++ b/avi_vantage/tests/conftest.py @@ -58,7 +58,8 @@ def _get_metrics(metrics_folder=NO_TENANT_METRICS_FOLDER, endpoint=None): @pytest.fixture def mock_client(): - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): def get(url: AnyStr, *_: Any, **__: Any): parsed = urlparse(url) diff --git a/druid/tests/test_unit.py b/druid/tests/test_unit.py index ef7ee18b45228..3a21281fbc8bc 100644 --- a/druid/tests/test_unit.py +++ b/druid/tests/test_unit.py @@ -22,7 +22,8 @@ def test_missing_url_config(aggregator): def test_service_check_can_connect_success(aggregator, instance): check = DruidCheck('druid', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): mock_resp = mock.MagicMock(status_code=200) mock_resp.json.return_value = {'abc': '123'} req.get.return_value = mock_resp @@ -41,7 +42,8 @@ def test_service_check_can_connect_success(aggregator, instance): def test_service_check_can_connect_failure(aggregator, instance, exception_class): check = DruidCheck('druid', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): attrs = {'raise_for_status.side_effect': exception_class} req.get.side_effect = [mock.MagicMock(status_code=500, **attrs)] diff --git a/ecs_fargate/tests/test_unit.py b/ecs_fargate/tests/test_unit.py index 4f49d663c211b..c4f967166bcec 100644 --- a/ecs_fargate/tests/test_unit.py +++ b/ecs_fargate/tests/test_unit.py @@ -293,7 +293,8 @@ def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): instance = extra_config check = FargateCheck('ecs_fargate', {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/envoy/tests/legacy/test_unit.py b/envoy/tests/legacy/test_unit.py index 09c3e63e77226..f0c6a19ad38f8 100644 --- a/envoy/tests/legacy/test_unit.py +++ b/envoy/tests/legacy/test_unit.py @@ -139,7 +139,8 @@ def test_config(extra_config, expected_http_kwargs, check, dd_run_check): instance.update(extra_config) check = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/etcd/tests/test_integration.py b/etcd/tests/test_integration.py index 734bd86b67da7..da94be9a527c3 100644 --- a/etcd/tests/test_integration.py +++ b/etcd/tests/test_integration.py @@ -78,7 +78,8 @@ def test_config(instance, test_case, extra_config, expected_http_kwargs, dd_run_ instance.update(extra_config) check = Etcd(CHECK_NAME, {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/gitlab_runner/tests/test_unit.py b/gitlab_runner/tests/test_unit.py index 4d44c97dd137e..9d3fb039050aa 100644 --- a/gitlab_runner/tests/test_unit.py +++ b/gitlab_runner/tests/test_unit.py @@ -28,7 +28,8 @@ def test_timeout(test_case, timeout_config, expected_timeout): gitlab_runner = GitlabRunnerCheck('gitlab_runner', common.CONFIG['init_config'], instances=config['instances']) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) gitlab_runner.check(config['instances'][0]) diff --git a/marathon/tests/test_unit.py b/marathon/tests/test_unit.py index 4c39a7afe3be3..ef104e6264e90 100644 --- a/marathon/tests/test_unit.py +++ b/marathon/tests/test_unit.py @@ -107,7 +107,8 @@ def test_config(test_case, init_config, extra_config, expected_http_kwargs): instance.update(extra_config) check = Marathon('marathon', init_config, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) check.check(instance) diff --git a/mesos_master/tests/test_check.py b/mesos_master/tests/test_check.py index 721acce7a3613..4c026dcabc9c2 100644 --- a/mesos_master/tests/test_check.py +++ b/mesos_master/tests/test_check.py @@ -152,7 +152,8 @@ def test_can_connect_service_check( ): check = MesosMaster('mesos_master', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_side_effects try: diff --git a/mesos_slave/tests/test_unit.py b/mesos_slave/tests/test_unit.py index 0cc7b699b6d96..098e3683313cf 100644 --- a/mesos_slave/tests/test_unit.py +++ b/mesos_slave/tests/test_unit.py @@ -180,7 +180,8 @@ def test_can_connect_service_check_state( instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status ): check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_effects try: check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) @@ -198,7 +199,8 @@ def test_can_connect_service_with_instance_cluster_name(instance, aggregator): expected_tags = ['url:http://hello.com/state'] + cluster_name_tag + additional_tags expected_status = AgentCheck.OK check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = [mock.MagicMock(status_code=200, content='{}')] try: check._process_state_info('http://hello.com', instance['tasks'], 5050, instance['tags']) @@ -216,7 +218,8 @@ def test_can_connect_service_check_stats( instance, aggregator, test_case_name, request_mock_effects, expected_tags, expect_exception, expected_status ): check = MesosSlave('mesos_slave', {}, [instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = request_mock_effects try: check._process_stats_info('http://hello.com', instance['tags']) diff --git a/nginx/tests/test_unit.py b/nginx/tests/test_unit.py index ad06f8a25d2cc..66de9c5dfd748 100644 --- a/nginx/tests/test_unit.py +++ b/nginx/tests/test_unit.py @@ -68,7 +68,8 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): c = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content=b'{}') c.check(instance) @@ -90,7 +91,8 @@ def test_config(check, instance, test_case, extra_config, expected_http_kwargs): def test_no_version(check, instance, caplog): c = check(instance) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200, content=b'{}', headers={'server': 'nginx'}) c.check(instance) diff --git a/php_fpm/tests/test_unit.py b/php_fpm/tests/test_unit.py index ec4212b8e585b..b0f46d613c077 100644 --- a/php_fpm/tests/test_unit.py +++ b/php_fpm/tests/test_unit.py @@ -38,7 +38,8 @@ def test_should_not_retry(check, instance): backoff only works when response code is 503, otherwise the error should bubble up """ - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = FooException("Generic http error here") with pytest.raises(FooException): check._process_status(instance['status_url'], [], None, False) @@ -48,7 +49,8 @@ def test_should_bail_out(check, instance): """ backoff should give up after 3 attempts """ - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): attrs = {'raise_for_status.side_effect': FooException()} r.get.side_effect = [ mock.MagicMock(status_code=503, **attrs), @@ -65,7 +67,8 @@ def test_backoff_success(check, instance, aggregator, payload): Success after 2 failed attempts """ instance['ping_url'] = None - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): attrs = {'json.return_value': payload} r.get.side_effect = [ mock.MagicMock(status_code=503), @@ -103,7 +106,8 @@ def test_config(test_case, extra_config, expected_http_kwargs, dd_run_check): instance.update(extra_config) check = PHPFPMCheck('php_fpm', {}, instances=[instance]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) dd_run_check(check) diff --git a/rabbitmq/tests/test_unit.py b/rabbitmq/tests/test_unit.py index 8dd5548ed6c32..4e0d95ea9f06a 100644 --- a/rabbitmq/tests/test_unit.py +++ b/rabbitmq/tests/test_unit.py @@ -25,7 +25,8 @@ def test__get_data(check): - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.side_effect = [requests.exceptions.HTTPError, ValueError] with pytest.raises(RabbitMQException) as e: check._get_data('') @@ -138,7 +139,8 @@ def test_config(check, test_case, extra_config, expected_http_kwargs): config.update(extra_config) check = RabbitMQ('rabbitmq', {}, instances=[config]) - with mock.patch('datadog_checks.base.utils.http.requests') as r: + r = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=r): r.get.return_value = mock.MagicMock(status_code=200) check.check(config) diff --git a/torchserve/tests/management/test_model_discovery.py b/torchserve/tests/management/test_model_discovery.py index de5f97c04fc23..ea17df181c441 100644 --- a/torchserve/tests/management/test_model_discovery.py +++ b/torchserve/tests/management/test_model_discovery.py @@ -109,7 +109,8 @@ def test_get_models(check, mocked_management_instance, expected_models, fixture_ mock_resp.raise_for_status.side_effect = HTTPError() if status_code != 200 else None responses.append(mock_resp) - with mock.patch('datadog_checks.base.utils.http.requests') as req: + req = mock.MagicMock() + with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): discovery = ModelDiscovery(check(mocked_management_instance), include=[".*"]) req.get.side_effect = responses assert [('.*', model['modelName'], model, None) for model in expected_models] == list(discovery.get_items()) From 001a3e4e22b05c02ef349e8a89b99389399a5e10 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 17:16:33 +0200 Subject: [PATCH 030/100] Fix gitlab tests --- gitlab/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gitlab/tests/conftest.py b/gitlab/tests/conftest.py index 9b8ce8222338c..b0d47e010b803 100644 --- a/gitlab/tests/conftest.py +++ b/gitlab/tests/conftest.py @@ -104,7 +104,7 @@ def dd_environment(): @pytest.fixture() def mock_data(): with mock.patch( - 'requests.get', + 'requests.Session.get', side_effect=mocked_requests_get, ): yield From 4855a922510f972a3af22e50252d2a6256c7d81c Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 12 Jun 2025 17:42:29 +0200 Subject: [PATCH 031/100] Fix harbor tests --- harbor/tests/conftest.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/harbor/tests/conftest.py b/harbor/tests/conftest.py index ea2a80f91a529..65fffa87bd2ca 100644 --- a/harbor/tests/conftest.py +++ b/harbor/tests/conftest.py @@ -107,7 +107,7 @@ def harbor_api(harbor_check, admin_instance, patch_requests): @pytest.fixture def patch_requests(): - with patch("requests.api.request", side_effect=mocked_requests): + with patch("requests.Session.request", side_effect=mocked_requests): yield @@ -117,32 +117,32 @@ def get_docker_compose_file(): return os.path.join(HERE, 'compose', harbor_folder, 'docker-compose.yml') -def mocked_requests(_, *args, **kwargs): +def mocked_requests(_, url, **kwargs): def match(url, *candidates_url): for c in candidates_url: if url == c.format(base_url=URL): return True return False - if match(args[0], LOGIN_URL): + if match(url, LOGIN_URL): return MockResponse() - elif match(args[0], HEALTH_URL): + elif match(url, HEALTH_URL): return MockResponse(json_data=HEALTH_FIXTURE) - elif match(args[0], PING_URL): + elif match(url, PING_URL): return MockResponse('Pong') - elif match(args[0], CHARTREPO_HEALTH_URL): + elif match(url, CHARTREPO_HEALTH_URL): return MockResponse(json_data=CHARTREPO_HEALTH_FIXTURE) - elif match(args[0], PROJECTS_URL): + elif match(url, PROJECTS_URL): return MockResponse(json_data=PROJECTS_FIXTURE) - elif match(args[0], REGISTRIES_URL): + elif match(url, REGISTRIES_URL): return MockResponse(json_data=REGISTRIES_FIXTURE) - elif match(args[0], REGISTRIES_PING_URL): + elif match(url, REGISTRIES_PING_URL): return MockResponse() - elif match(args[0], VOLUME_INFO_URL): + elif match(url, VOLUME_INFO_URL): if HARBOR_VERSION < VERSION_2_2: return MockResponse(json_data=VOLUME_INFO_PRE_2_2_FIXTURE) return MockResponse(json_data=VOLUME_INFO_FIXTURE) - elif match(args[0], SYSTEM_INFO_URL): + elif match(url, SYSTEM_INFO_URL): return MockResponse(json_data=SYSTEM_INFO_FIXTURE) return MockResponse(status_code=404) From 65e97699126c28a5d9dd5405156415d7a2c02d85 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 11:54:08 +0200 Subject: [PATCH 032/100] Fix hdfs_namenode tests --- hdfs_namenode/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hdfs_namenode/tests/conftest.py b/hdfs_namenode/tests/conftest.py index 40191a59a3c4a..f6a0da366dfab 100644 --- a/hdfs_namenode/tests/conftest.py +++ b/hdfs_namenode/tests/conftest.py @@ -54,7 +54,7 @@ def mocked_auth_request(): yield -def requests_get_mock(url, *args, **kwargs): +def requests_get_mock(session, url, *args, **kwargs): if url == NAME_SYSTEM_STATE_URL: return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'hdfs_namesystem_state.json')) elif url == NAME_SYSTEM_URL: From cc6c14a0ebe57ce4a7cfca2d996ff00d3fa19ffb Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 14:50:45 +0200 Subject: [PATCH 033/100] Fix http_check test --- datadog_checks_base/datadog_checks/base/utils/http.py | 2 +- http_check/tests/test_http_integration.py | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 37fd6d8b390dc..16bbd9d34979f 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -155,7 +155,7 @@ def create_ssl_context(config, overrides=None): context.load_default_certs(ssl.Purpose.SERVER_AUTH) except FileNotFoundError: LOGGER.warning( - 'TLS CA certificate file not found: %s. ' 'Please check the `tls_ca_cert` configuration option.', + 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.', ca_cert, ) diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 27714ba2adac6..08c5bd69984c4 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -428,7 +428,7 @@ def test_data_methods(aggregator, http_check): aggregator.reset() -def test_unexisting_ca_cert_should_throw_error(aggregator, dd_run_check): +def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): instance = { 'name': 'Test Web VM HTTPS SSL', 'url': 'https://foo.bar.net/', @@ -441,10 +441,11 @@ def test_unexisting_ca_cert_should_throw_error(aggregator, dd_run_check): } check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) + with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning: - dd_run_check(check) - aggregator.assert_service_check(HTTPCheck.SC_STATUS, status=AgentCheck.CRITICAL) - assert 'invalid path: /tmp/unexisting.crt' in aggregator._service_checks[HTTPCheck.SC_STATUS][0].message + dd_run_check(check) + mock_warning.assert_called_once() + assert any(instance['tls_ca_cert'] in arg for arg in mock_warning.call_args) def test_instance_auth_token(dd_run_check): From d9ff6ba5f967498a098ca8b53d3872538df4fad8 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 15:02:40 +0200 Subject: [PATCH 034/100] Fix mapreduce tests --- mapreduce/tests/conftest.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mapreduce/tests/conftest.py b/mapreduce/tests/conftest.py index ce15c2e06934d..dea2ab20b4951 100644 --- a/mapreduce/tests/conftest.py +++ b/mapreduce/tests/conftest.py @@ -66,7 +66,7 @@ def get_custom_hosts(): return [(host, '127.0.0.1') for host in MOCKED_E2E_HOSTS] -def requests_get_mock(*args, **kwargs): +def requests_get_mock(session, *args, **kwargs): url = args[0] # The parameter that creates the query params (kwargs) is an unordered dict, # so the query params can be in any order @@ -94,7 +94,7 @@ def requests_get_mock(*args, **kwargs): raise Exception("There is no mock request for {}".format(url)) -def requests_auth_mock(*args, **kwargs): +def requests_auth_mock(session, *args, **kwargs): # Make sure we're passing in authentication assert 'auth' in kwargs, "Error, missing authentication" @@ -102,4 +102,4 @@ def requests_auth_mock(*args, **kwargs): assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) From ba6b51a97dc5bddc4d813c06b5b8a30eae977f27 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 15:08:22 +0200 Subject: [PATCH 035/100] Fix proxysql test --- proxysql/tests/test_unit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/proxysql/tests/test_unit.py b/proxysql/tests/test_unit.py index fc292e22b0908..0c842bdb1f1a3 100644 --- a/proxysql/tests/test_unit.py +++ b/proxysql/tests/test_unit.py @@ -101,7 +101,7 @@ def test_config_ok(dd_run_check, instance_basic): @pytest.mark.unit def test_tls_config_ok(dd_run_check, instance_basic_tls): - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: with mock.patch('datadog_checks.proxysql.proxysql.pymysql') as pymysql: check = get_check(instance_basic_tls) From 99b3c1b177319df5a318ee92f1d0f3dae2e57069 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 17:31:42 +0200 Subject: [PATCH 036/100] Fix spark tests --- spark/tests/test_spark.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/spark/tests/test_spark.py b/spark/tests/test_spark.py index f5a143e436d3d..666f7d8c050fa 100644 --- a/spark/tests/test_spark.py +++ b/spark/tests/test_spark.py @@ -178,7 +178,7 @@ def get_default_mock(url): raise KeyError(f"{url} does not match any response fixtures.") -def yarn_requests_get_mock(url, *args, **kwargs): +def yarn_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == YARN_APP_URL: @@ -188,7 +188,7 @@ def yarn_requests_get_mock(url, *args, **kwargs): return get_default_mock(url) -def yarn_requests_auth_mock(*args, **kwargs): +def yarn_requests_auth_mock(session, url, *args, **kwargs): # Make sure we're passing in authentication assert 'auth' in kwargs, "Error, missing authentication" @@ -196,10 +196,10 @@ def yarn_requests_auth_mock(*args, **kwargs): assert kwargs['auth'] == (TEST_USERNAME, TEST_PASSWORD), "Incorrect username or password" # Return mocked request.get(...) - return yarn_requests_get_mock(*args, **kwargs) + return yarn_requests_get_mock(session, url, *args, **kwargs) -def mesos_requests_get_mock(url, *args, **kwargs): +def mesos_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == MESOS_APP_URL: @@ -220,7 +220,7 @@ def mesos_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def driver_requests_get_mock(url, *args, **kwargs): +def driver_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == DRIVER_APP_URL: @@ -241,7 +241,7 @@ def driver_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def standalone_requests_get_mock(url, *args, **kwargs): +def standalone_requests_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: @@ -264,7 +264,7 @@ def standalone_requests_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def standalone_requests_pre20_get_mock(url, *args, **kwargs): +def standalone_requests_pre20_get_mock(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == STANDALONE_APP_URL: @@ -299,7 +299,7 @@ def standalone_requests_pre20_get_mock(url, *args, **kwargs): return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'metrics_json')) -def proxy_with_warning_page_mock(url, *args, **kwargs): +def proxy_with_warning_page_mock(session, url, *args, **kwargs): cookies = kwargs.get('cookies') or {} proxy_cookie = cookies.get('proxy_cookie') url_parts = list(urlparse(url)) @@ -307,7 +307,7 @@ def proxy_with_warning_page_mock(url, *args, **kwargs): if proxy_cookie and query.get('proxyapproved') == 'true': del query['proxyapproved'] url_parts[4] = urlencode(query) - return standalone_requests_get_mock(urlunparse(url_parts), *args[1:], **kwargs) + return standalone_requests_get_mock(session, urlunparse(url_parts), *args[1:], **kwargs) else: # Display the html warning page with the redirect link query['proxyapproved'] = 'true' @@ -1288,7 +1288,7 @@ def test_yarn_no_json_for_app_properties( In these cases we skip only the specific missing apps and metrics while collecting all others. """ - def get_without_json(url, *args, **kwargs): + def get_without_json(session, url, *args, **kwargs): arg_url = Url(url) if arg_url == property_url: return mock_response @@ -1322,7 +1322,7 @@ def get_without_json(url, *args, **kwargs): ] ) else: - return yarn_requests_get_mock(url, *args, **kwargs) + return yarn_requests_get_mock(session, url, *args, **kwargs) mocker.patch('requests.Session.get', get_without_json) dd_run_check(SparkCheck('spark', {}, [YARN_CONFIG])) From d7028de221866d55ae2dd3884af7d1f6a7d6bfe0 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 17:44:44 +0200 Subject: [PATCH 037/100] Fix spark ssl test by fixing REMAPPER --- spark/datadog_checks/spark/spark.py | 1 + spark/tests/test_spark.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/spark/datadog_checks/spark/spark.py b/spark/datadog_checks/spark/spark.py index 8a0901c49d7ac..0d3025d2a969c 100644 --- a/spark/datadog_checks/spark/spark.py +++ b/spark/datadog_checks/spark/spark.py @@ -51,6 +51,7 @@ class SparkCheck(AgentCheck): HTTP_CONFIG_REMAPPER = { 'ssl_verify': {'name': 'tls_verify'}, 'ssl_cert': {'name': 'tls_cert'}, + 'ssl_ca_cert': {'name': 'tls_ca_cert'}, 'ssl_key': {'name': 'tls_private_key'}, } diff --git a/spark/tests/test_spark.py b/spark/tests/test_spark.py index 666f7d8c050fa..4cdab4c4201f0 100644 --- a/spark/tests/test_spark.py +++ b/spark/tests/test_spark.py @@ -404,7 +404,8 @@ def proxy_with_warning_page_mock(session, url, *args, **kwargs): 'spark_url': SSL_SERVER_URL, 'cluster_name': CLUSTER_NAME, 'spark_cluster_mode': 'spark_standalone_mode', - 'ssl_verify': os.path.join(CERTIFICATE_DIR, 'cert.cert'), + 'ssl_verify': True, + 'ssl_ca_cert': os.path.join(CERTIFICATE_DIR, 'cert.cert'), 'executor_level_metrics': True, } From 4777e6c26e5dda71d720cf41943f9fb4adc73b45 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 17:48:08 +0200 Subject: [PATCH 038/100] Fix twistlock tests --- twistlock/tests/test_twistlock.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/twistlock/tests/test_twistlock.py b/twistlock/tests/test_twistlock.py index 8a0ad2fef3336..c5e7457be9963 100644 --- a/twistlock/tests/test_twistlock.py +++ b/twistlock/tests/test_twistlock.py @@ -35,7 +35,7 @@ def mock_get_factory(fixture_group): - def mock_get(url, *args, **kwargs): + def mock_get(session, url, *args, **kwargs): split_url = url.split('/') path = split_url[-1] return MockResponse(file_path=os.path.join(HERE, 'fixtures', fixture_group, '{}.json'.format(path))) @@ -72,7 +72,8 @@ def test_config_project(aggregator, instance, fixture_group): check.check(instance) r.assert_called_with( - mock.ANY, + mock.ANY, # The session object is passed as the first argument + mock.ANY, # The URL is passed as the second argument params=qparams, auth=mock.ANY, cert=mock.ANY, From 8dd1d361bd85f6bd0317b3190750ab2b2a039b63 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 18:34:34 +0200 Subject: [PATCH 039/100] Fix vault tests --- vault/tests/test_vault.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index 0e8c2892f9cdb..12691eedfc1d7 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -76,7 +76,7 @@ def test_service_check_connect_ok_all_tags(self, aggregator, dd_run_check, globa # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -169,7 +169,7 @@ def test_service_check_unsealed_ok_all_tags(self, aggregator, dd_run_check, glob # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -212,7 +212,7 @@ def test_service_check_unsealed_fail(self, aggregator, dd_run_check, use_openmet # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -253,7 +253,7 @@ def test_service_check_initialized_ok_all_tags(self, aggregator, dd_run_check, g # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -296,7 +296,7 @@ def test_service_check_initialized_fail(self, aggregator, dd_run_check, use_open # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -327,7 +327,7 @@ def test_disable_legacy_cluster_tag(self, aggregator, dd_run_check, global_tags) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={'ha_enabled': False, 'is_self': True, 'leader_address': '', 'leader_cluster_address': ''} @@ -368,7 +368,7 @@ def test_replication_dr_mode(self, aggregator, dd_run_check, use_openmetrics): # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -405,7 +405,7 @@ def test_replication_dr_mode_collect_secondary(self, aggregator, dd_run_check, u # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -445,7 +445,7 @@ def test_replication_dr_mode_changed(self, aggregator, dd_run_check, use_openmet # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': if getattr(mock_requests_get, 'first_health_call', True): mock_requests_get.first_health_call = False @@ -504,7 +504,7 @@ def test_event_leader_change(self, aggregator, dd_run_check, cluster, use_openme # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -545,7 +545,7 @@ def test_leader_change_not_self(self, aggregator, dd_run_check, use_openmetrics) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -571,7 +571,7 @@ def test_is_leader_metric_true(self, aggregator, dd_run_check, use_openmetrics): # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -597,7 +597,7 @@ def test_is_leader_metric_false(self, aggregator, dd_run_check, use_openmetrics) # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse( json_data={ @@ -624,7 +624,7 @@ def test_sys_health_non_standard_status_codes(self, aggregator, dd_run_check, st # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/health': return MockResponse( json_data={ @@ -658,7 +658,7 @@ def test_sys_leader_non_standard_status_codes(self, aggregator, dd_run_check, us # Keep a reference for use during mock requests_get = requests.get - def mock_requests_get(url, *args, **kwargs): + def mock_requests_get(session, url, *args, **kwargs): if url == instance['api_url'] + '/sys/leader': return MockResponse(json_data={'errors': ["Vault is sealed"]}, status_code=503) return requests_get(url, *args, **kwargs) @@ -807,9 +807,9 @@ def test_x_vault_request_header_is_set(monkeypatch, instance, dd_run_check, use_ c = Vault(Vault.CHECK_NAME, {}, [instance]) - requests_get = requests.get + requests_get = requests.Session.get mock_get = mock.Mock(side_effect=requests_get) - monkeypatch.setattr(requests, 'get', mock_get) + monkeypatch.setattr(requests.Session, 'get', mock_get) dd_run_check(c) From 442f8857438a60d4bae426d9bbf509ef6010814d Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 18:38:15 +0200 Subject: [PATCH 040/100] Fix vertica tests --- vertica/tests/test_unit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vertica/tests/test_unit.py b/vertica/tests/test_unit.py index 09ecd3baeff81..34068a2f6cd0f 100644 --- a/vertica/tests/test_unit.py +++ b/vertica/tests/test_unit.py @@ -19,7 +19,7 @@ def test_ssl_config_ok(aggregator, tls_instance): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context @@ -37,7 +37,7 @@ def test_ssl_config_ok(aggregator, tls_instance): def test_ssl_legacy_config_ok(aggregator, tls_instance_legacy): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context From ca2621075892a20d3d49e2225b8fccf49e2fdf2c Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 13 Jun 2025 19:19:45 +0200 Subject: [PATCH 041/100] Fix yarn tests --- yarn/tests/conftest.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/yarn/tests/conftest.py b/yarn/tests/conftest.py index 60e86e1c27963..1ca370584a43b 100644 --- a/yarn/tests/conftest.py +++ b/yarn/tests/conftest.py @@ -63,7 +63,7 @@ def mocked_request(): @pytest.fixture def mocked_auth_request(): - def requests_auth_get(*args, **kwargs): + def requests_auth_get(session, *args, **kwargs): # Make sure we're passing in authentication assert 'auth' in kwargs, 'Missing "auth" argument in requests.Session.get(...) call' @@ -74,7 +74,7 @@ def requests_auth_get(*args, **kwargs): ), "Incorrect username or password in requests.Session.get" # Return mocked request.get(...) - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) with patch("requests.Session.get", new=requests_auth_get): yield @@ -83,10 +83,10 @@ def requests_auth_get(*args, **kwargs): @pytest.fixture def mocked_bad_cert_request(): """ - Mock request.get to an endpoint with a badly configured ssl cert + Mock request.Session.get to an endpoint with a badly configured ssl cert """ - def requests_bad_cert_get(*args, **kwargs): + def requests_bad_cert_get(session, *args, **kwargs): # Make sure we're passing in the 'verify' argument assert 'verify' in kwargs, 'Missing "verify" argument in requests.Session.get(...) call' @@ -94,13 +94,13 @@ def requests_bad_cert_get(*args, **kwargs): raise SSLError("certificate verification failed for {}".format(args[0])) # Return the actual response - return requests_get_mock(*args, **kwargs) + return requests_get_mock(session, *args, **kwargs) with patch("requests.Session.get", new=requests_bad_cert_get): yield -def requests_get_mock(*args, **kwargs): +def requests_get_mock(session, *args, **kwargs): if args[0] == YARN_CLUSTER_METRICS_URL: return MockResponse(file_path=os.path.join(FIXTURE_DIR, 'cluster_metrics')) elif args[0] == YARN_APPS_URL: From 291708e22517428a31c41bf15e46769dc8aa2769 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 14:21:57 +0200 Subject: [PATCH 042/100] Fix avi_vantage test --- .../datadog_checks/avi_vantage/check.py | 2 ++ avi_vantage/tests/conftest.py | 36 +++++++++---------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/avi_vantage/datadog_checks/avi_vantage/check.py b/avi_vantage/datadog_checks/avi_vantage/check.py index 5552b1c7259ce..f9b4aba2c355c 100644 --- a/avi_vantage/datadog_checks/avi_vantage/check.py +++ b/avi_vantage/datadog_checks/avi_vantage/check.py @@ -96,7 +96,9 @@ def login(self): @AgentCheck.metadata_entrypoint def collect_avi_version(self): + self.log.warning(f"http get is: {self.http.get}") response = self.http.get(self.base_url + "/api/cluster/version") + self.log.warning(f"Response is: {response}") response.raise_for_status() version = response.json()['Version'] self.set_metadata('version', version) diff --git a/avi_vantage/tests/conftest.py b/avi_vantage/tests/conftest.py index cb6d79793b997..5ca10cb6e6e43 100644 --- a/avi_vantage/tests/conftest.py +++ b/avi_vantage/tests/conftest.py @@ -58,30 +58,30 @@ def _get_metrics(metrics_folder=NO_TENANT_METRICS_FOLDER, endpoint=None): @pytest.fixture def mock_client(): - req = mock.MagicMock() - with mock.patch('datadog_checks.base.utils.http.requests.Session', return_value=req): + def mock_get(url: AnyStr, *__: Any, **___: Any): + parsed = urlparse(url) + resource = [part for part in parsed.path.split('/') if len(part) > 0][-1] + query_params = parsed.query - def get(url: AnyStr, *_: Any, **__: Any): - parsed = urlparse(url) - resource = [part for part in parsed.path.split('/') if len(part) > 0][-1] - query_params = parsed.query + path = {} - path = {} - - path['tenant=admin'] = ADMIN_TENANT_METRICS_FOLDER - path['tenant=admin%2Ctenant_a%2Ctenant_b'] = MULTIPLE_TENANTS_METRICS_FOLDER - - if query_params: - return MockResponse( - file_path=os.path.join(HERE, 'compose', 'fixtures', path[query_params], f'{resource}_metrics') - ) + path['tenant=admin'] = ADMIN_TENANT_METRICS_FOLDER + path['tenant=admin%2Ctenant_a%2Ctenant_b'] = MULTIPLE_TENANTS_METRICS_FOLDER + if query_params: return MockResponse( - file_path=os.path.join(HERE, 'compose', 'fixtures', NO_TENANT_METRICS_FOLDER, f'{resource}_metrics') + file_path=os.path.join(HERE, 'compose', 'fixtures', path[query_params], f'{resource}_metrics') ) - req.Session = mock.MagicMock(return_value=mock.MagicMock(get=get)) - yield + return MockResponse( + file_path=os.path.join(HERE, 'compose', 'fixtures', NO_TENANT_METRICS_FOLDER, f'{resource}_metrics') + ) + def mock_post(url: AnyStr, *__: Any, **___: Any): + return mock.MagicMock(status_code=200, content=b'{"results": []}') + + with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.get', side_effect=mock_get): + with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.post', new=mock_post): + yield @pytest.fixture(scope='session') From 785bf0a19302e1990a6d07a510551faf5a1dabe8 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 16:58:38 +0200 Subject: [PATCH 043/100] Fix http warning test --- http_check/tests/test_http_integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 08c5bd69984c4..741d2615f3da7 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -444,7 +444,7 @@ def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning: dd_run_check(check) - mock_warning.assert_called_once() + mock_warning.assert_called() assert any(instance['tls_ca_cert'] in arg for arg in mock_warning.call_args) From 78fbd1e95060e7af150757c82bdb62b7d36842f4 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 18:02:49 +0200 Subject: [PATCH 044/100] Extract options to tls_config logic in separate function --- .../datadog_checks/base/utils/http.py | 41 ++++++++++------- .../base/utils/http/test_tls_and_certs.py | 19 ++++++++ .../tests/base/utils/test_tls.py | 44 +++++++++++++++++++ 3 files changed, 87 insertions(+), 17 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 16bbd9d34979f..3db9887edabbc 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -176,6 +176,27 @@ def create_ssl_context(config, overrides=None): return context +def get_tls_config_from_options(new_options): + '''Extract TLS configuration from request options.''' + tls_config = {} + verify = new_options.get('verify') + cert = new_options.get('cert') + + if verify is True: + tls_config["tls_verify"] = True + elif verify is False: + tls_config["tls_verify"] = False + elif isinstance(verify, str): + tls_config["tls_verify"] = True + tls_config["tls_ca_cert"] = verify + + if isinstance(cert, str): + tls_config["tls_cert"] = cert + elif isinstance(cert, tuple) and len(cert) == 2: + tls_config["tls_cert"] = cert[0] + tls_config["tls_private_key"] = cert[1] + return tls_config + class SSLContextAdapter(requests.adapters.HTTPAdapter): """ @@ -542,23 +563,9 @@ def _get_new_context(self, url, new_options): if not url.startswith('https'): return - request_overrides = {} - if new_options.get('verify') != self.options['verify']: - if new_options.get('verify') is True: - request_overrides["tls_verify"] = True - elif new_options.get('verify') is False: - request_overrides["tls_verify"] = False - elif isinstance(new_options.get('verify'), str): - request_overrides["tls_verify"] = True - request_overrides["tls_ca_cert"] = new_options.get('verify') - if new_options.get('cert') != self.options['cert']: - if isinstance(new_options.get('cert'), str): - request_overrides["tls_cert"] = new_options.get('cert') - elif isinstance(new_options.get('cert'), tuple) and len(new_options.get('cert')) == 2: - request_overrides["tls_cert"] = new_options.get('cert')[0] - request_overrides["tls_private_key"] = new_options.get('cert')[1] - - return create_ssl_context(self.tls_config, overrides=request_overrides) + new_tls_config = get_tls_config_from_options(new_options) + + return create_ssl_context(self.tls_config, overrides=new_tls_config) def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 481ea658e0159..d03e14578c4c3 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -47,6 +47,25 @@ def test_config_cert_and_private_key(self): assert mock_load_cert_chain.call_args[0][0] == 'cert' assert mock_load_cert_chain.call_args[1]["keyfile"] == 'key' + @pytest.mark.parametrize( + 'options, expected_cert, expected_key', + [ + pytest.param({}, None, None, id='cert foo'), + pytest.param({'cert': 'foo'}, 'foo', None, id='cert foo'), + pytest.param({'cert': ('foo','bar')}, 'foo', 'bar', id='cert foo,bar'), + ] + ) + def test_request_cert_gets_read(self, options, expected_cert, expected_key): + '''Test that the request options are set correctly in the new context.''' + with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: + RequestsWrapper({}, {}).get('https://google.com', **options) + + if options.get('cert') is None: + assert mock_load_cert_chain.call_count == 0 + return + + mock_load_cert_chain.assert_called_once() + mock_load_cert_chain.assert_called_with(expected_cert, keyfile=expected_key, password=None) class TestIgnoreTLSWarning: def test_config_default(self): diff --git a/datadog_checks_base/tests/base/utils/test_tls.py b/datadog_checks_base/tests/base/utils/test_tls.py index 26979b9d25b6a..7f7fdf262aacb 100644 --- a/datadog_checks_base/tests/base/utils/test_tls.py +++ b/datadog_checks_base/tests/base/utils/test_tls.py @@ -10,6 +10,7 @@ from datadog_checks.base import AgentCheck from datadog_checks.base.utils.tls import TlsContextWrapper +from datadog_checks.base.utils.http import get_tls_config_from_options from datadog_checks.dev import TempDir @@ -357,6 +358,49 @@ def test_ciphers(self, instance, expected_ciphers): assert actual_ciphers == expected_ciphers_list + @pytest.mark.parametrize( + 'options, expected_config', + [ + pytest.param( + {'verify': False, 'cert': 'foo'}, + {'tls_verify': False, 'tls_cert': 'foo'}, + id='verify false and cert', + ), + pytest.param( + {'verify': True, 'cert': 'foo'}, + {'tls_verify': True, 'tls_cert': 'foo'}, + id='verify true and cert', + ), + pytest.param( + {'verify': 'bar', 'cert': 'foo'}, + {'tls_verify': True, 'tls_cert': 'foo', 'tls_ca_cert': 'bar'}, + id='verify string and cert', + ), + pytest.param( + {'verify': 'bar', 'cert': ('foo','key')}, + {'tls_verify': True, 'tls_cert': 'foo', 'tls_ca_cert': 'bar', 'tls_private_key': 'key'}, + id='verify string and cert with key', + ), + pytest.param( + {'cert': 'foo'}, + {'tls_cert': 'foo'}, + id='only cert', + ), + pytest.param( + {'verify': 'foo'}, + {'tls_verify': True, 'tls_ca_cert': 'foo'}, + id='only verify', + ), + pytest.param( + {}, + {}, + id='empty', + ), + ], + ) + def test_tls_config_from_options(self, options, expected_config): + assert get_tls_config_from_options(options) == expected_config + class TestTLSContextOverrides: def test_override_context(self): From dff0f881199662129fe309d988fd410b2caae770 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 18:14:01 +0200 Subject: [PATCH 045/100] Add test for ssl_context initialization --- .../tests/base/utils/http/test_tls_and_certs.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index d03e14578c4c3..060fe956fcb5e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -304,6 +304,23 @@ def test_requests_wrapper_creates_ssl_context(self): assert hasattr(http, 'ssl_context') assert isinstance(http.ssl_context, ssl.SSLContext) + + @pytest.mark.parametrize( + "instance, verify_mode, check_hostname", + [ + pytest.param({'tls_verify': True}, ssl.CERT_REQUIRED, True, id='tls_verify true'), + pytest.param({'tls_verify': False}, ssl.CERT_NONE, False, id='tls_verify false'), + pytest.param({'tls_verify': True, 'tls_validate_hostname': False}, ssl.CERT_REQUIRED, False, id='tls_verify true and tls_validate_hostname false'), + ] + ) + def test_ssl_context_has_correct_attributes(self, instance, verify_mode, check_hostname): + """Test that RequestsWrapper creates an SSLContext instance.""" + http = RequestsWrapper(instance, {}) + + assert hasattr(http, 'ssl_context') + assert isinstance(http.ssl_context, ssl.SSLContext) + assert http.ssl_context.verify_mode == verify_mode + assert http.ssl_context.check_hostname == check_hostname def test_session_uses_ssl_context_adapter(self): """Test that the session uses SSLContextAdapter for consistent TLS configuration.""" From d67b169aae2e721fc86136b8182da33f74e298fc Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 18:19:46 +0200 Subject: [PATCH 046/100] Run linter --- avi_vantage/datadog_checks/avi_vantage/check.py | 2 -- avi_vantage/tests/conftest.py | 1 + .../datadog_checks/base/utils/http.py | 1 + .../tests/base/utils/http/test_tls_and_certs.py | 16 +++++++++++----- datadog_checks_base/tests/base/utils/test_tls.py | 4 ++-- vsphere/tests/conftest.py | 12 ++++++++---- vsphere/tests/mocked_api.py | 4 ++-- 7 files changed, 25 insertions(+), 15 deletions(-) diff --git a/avi_vantage/datadog_checks/avi_vantage/check.py b/avi_vantage/datadog_checks/avi_vantage/check.py index f9b4aba2c355c..5552b1c7259ce 100644 --- a/avi_vantage/datadog_checks/avi_vantage/check.py +++ b/avi_vantage/datadog_checks/avi_vantage/check.py @@ -96,9 +96,7 @@ def login(self): @AgentCheck.metadata_entrypoint def collect_avi_version(self): - self.log.warning(f"http get is: {self.http.get}") response = self.http.get(self.base_url + "/api/cluster/version") - self.log.warning(f"Response is: {response}") response.raise_for_status() version = response.json()['Version'] self.set_metadata('version', version) diff --git a/avi_vantage/tests/conftest.py b/avi_vantage/tests/conftest.py index 5ca10cb6e6e43..8de057987afac 100644 --- a/avi_vantage/tests/conftest.py +++ b/avi_vantage/tests/conftest.py @@ -76,6 +76,7 @@ def mock_get(url: AnyStr, *__: Any, **___: Any): return MockResponse( file_path=os.path.join(HERE, 'compose', 'fixtures', NO_TENANT_METRICS_FOLDER, f'{resource}_metrics') ) + def mock_post(url: AnyStr, *__: Any, **___: Any): return mock.MagicMock(status_code=200, content=b'{"results": []}') diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 3db9887edabbc..0ca8cdda004cb 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -176,6 +176,7 @@ def create_ssl_context(config, overrides=None): return context + def get_tls_config_from_options(new_options): '''Extract TLS configuration from request options.''' tls_config = {} diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 060fe956fcb5e..7857d7b935697 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -52,8 +52,8 @@ def test_config_cert_and_private_key(self): [ pytest.param({}, None, None, id='cert foo'), pytest.param({'cert': 'foo'}, 'foo', None, id='cert foo'), - pytest.param({'cert': ('foo','bar')}, 'foo', 'bar', id='cert foo,bar'), - ] + pytest.param({'cert': ('foo', 'bar')}, 'foo', 'bar', id='cert foo,bar'), + ], ) def test_request_cert_gets_read(self, options, expected_cert, expected_key): '''Test that the request options are set correctly in the new context.''' @@ -67,6 +67,7 @@ def test_request_cert_gets_read(self, options, expected_cert, expected_key): mock_load_cert_chain.assert_called_once() mock_load_cert_chain.assert_called_with(expected_cert, keyfile=expected_key, password=None) + class TestIgnoreTLSWarning: def test_config_default(self): instance = {} @@ -304,14 +305,19 @@ def test_requests_wrapper_creates_ssl_context(self): assert hasattr(http, 'ssl_context') assert isinstance(http.ssl_context, ssl.SSLContext) - + @pytest.mark.parametrize( "instance, verify_mode, check_hostname", [ pytest.param({'tls_verify': True}, ssl.CERT_REQUIRED, True, id='tls_verify true'), pytest.param({'tls_verify': False}, ssl.CERT_NONE, False, id='tls_verify false'), - pytest.param({'tls_verify': True, 'tls_validate_hostname': False}, ssl.CERT_REQUIRED, False, id='tls_verify true and tls_validate_hostname false'), - ] + pytest.param( + {'tls_verify': True, 'tls_validate_hostname': False}, + ssl.CERT_REQUIRED, + False, + id='tls_verify true and tls_validate_hostname false', + ), + ], ) def test_ssl_context_has_correct_attributes(self, instance, verify_mode, check_hostname): """Test that RequestsWrapper creates an SSLContext instance.""" diff --git a/datadog_checks_base/tests/base/utils/test_tls.py b/datadog_checks_base/tests/base/utils/test_tls.py index 7f7fdf262aacb..355372c487522 100644 --- a/datadog_checks_base/tests/base/utils/test_tls.py +++ b/datadog_checks_base/tests/base/utils/test_tls.py @@ -9,8 +9,8 @@ from mock import MagicMock, patch # noqa: F401 from datadog_checks.base import AgentCheck -from datadog_checks.base.utils.tls import TlsContextWrapper from datadog_checks.base.utils.http import get_tls_config_from_options +from datadog_checks.base.utils.tls import TlsContextWrapper from datadog_checks.dev import TempDir @@ -377,7 +377,7 @@ def test_ciphers(self, instance, expected_ciphers): id='verify string and cert', ), pytest.param( - {'verify': 'bar', 'cert': ('foo','key')}, + {'verify': 'bar', 'cert': ('foo', 'key')}, {'tls_verify': True, 'tls_cert': 'foo', 'tls_ca_cert': 'bar', 'tls_private_key': 'key'}, id='verify string and cert with key', ), diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index ea974ab75f0a8..94d5075b6c85a 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -250,16 +250,20 @@ def mock_http_api(monkeypatch): http = MockHttpV7() else: http = MockHttpV6() - monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=http.get)) - monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=http.post)) + + def mock_get(session, *args, **kwargs): + return http.get(*args, **kwargs) + + monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=mock_get)) + monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=mock_get)) yield http @pytest.fixture def mock_rest_api(): if VSPHERE_VERSION.startswith('7.'): - with patch('requests.api.request', mock_http_rest_api_v7): + with patch('requests.Session.request', mock_http_rest_api_v7): yield else: - with patch('requests.api.request', mock_http_rest_api_v6): + with patch('requests.Session.request', mock_http_rest_api_v6): yield diff --git a/vsphere/tests/mocked_api.py b/vsphere/tests/mocked_api.py index a601b1525dd16..36949532afcc1 100644 --- a/vsphere/tests/mocked_api.py +++ b/vsphere/tests/mocked_api.py @@ -193,7 +193,7 @@ def json(self): return self.json_data -def mock_http_rest_api_v6(method, url, *args, **kwargs): +def mock_http_rest_api_v6(session, method, url, *args, **kwargs): if '/api/' in url: return MockResponse({}, 404) if method == 'get': @@ -248,7 +248,7 @@ def mock_http_rest_api_v6(method, url, *args, **kwargs): raise Exception("Rest api mock request not matched: method={}, url={}".format(method, url)) -def mock_http_rest_api_v7(method, url, *args, **kwargs): +def mock_http_rest_api_v7(session, method, url, *args, **kwargs): if method == 'get': if re.match(r'.*/category/.*$', url): parts = url.split('_') From cf9572caedbac4590273839dfc58a9f7cedebe5b Mon Sep 17 00:00:00 2001 From: David Kirov Date: Mon, 16 Jun 2025 18:27:13 +0200 Subject: [PATCH 047/100] Fix falco test --- falco/tests/test_unit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/falco/tests/test_unit.py b/falco/tests/test_unit.py index df21d53445121..c2a544b1f780a 100644 --- a/falco/tests/test_unit.py +++ b/falco/tests/test_unit.py @@ -27,7 +27,7 @@ def test_check_falco(dd_run_check, aggregator, instance): MockResponse(file_path=get_fixture_path("falco_metrics.txt")), ] - with mock.patch('requests.get', side_effect=mock_responses): + with mock.patch('requests.Session.get', side_effect=mock_responses): dd_run_check(FalcoCheck('falco', {}, [instance])) for metric in METRICS: From 7138a48fa7f8f9674d46d46cd59de2408c503547 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 11:13:17 +0200 Subject: [PATCH 048/100] Fix `tls_verify` parsing in SSL context creation logic --- datadog_checks_base/datadog_checks/base/utils/http.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 0ca8cdda004cb..c82438157fd58 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -126,11 +126,12 @@ def create_ssl_context(config, overrides=None): config = config.copy() config.update(overrides) + LOGGER.debug('Creating SSL context with config: %s', config) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - context.check_hostname = config['tls_verify'] and config.get('tls_validate_hostname', True) + context.check_hostname = is_affirmative(config['tls_verify']) and config.get('tls_validate_hostname', True) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode - context.verify_mode = ssl.CERT_REQUIRED if config['tls_verify'] else ssl.CERT_NONE + context.verify_mode = ssl.CERT_REQUIRED if is_affirmative(config['tls_verify']) else ssl.CERT_NONE ciphers = config.get('tls_ciphers') if ciphers: @@ -468,8 +469,8 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) # Create TLS context for consistent TLS configuration - self.ssl_context = create_ssl_context(config) self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} + self.ssl_context = create_ssl_context(self.tls_config) def get(self, url, **options): return self._request('get', url, options) @@ -557,6 +558,8 @@ def _get_new_context(self, url, new_options): """ Creates a new TLS context if the new request options differ from the default ones. """ + self.logger.debug('Default options: %s', self.options) + LOGGER.debug('New options: %s', new_options) # If `verify` or `cert` options differ from the defaults, a new adapter is created. if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: return @@ -566,6 +569,8 @@ def _get_new_context(self, url, new_options): new_tls_config = get_tls_config_from_options(new_options) + self.logger.debug('Overrides for new context config: %s', new_tls_config) + return create_ssl_context(self.tls_config, overrides=new_tls_config) def make_request_aia_chasing(self, request_method, method, url, new_options, persist): From b9a4c3da4eb3b5c63f61348bc496c908a4419534 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 12:52:57 +0200 Subject: [PATCH 049/100] Add tests with custom server --- .../tests/base/utils/http/test_http.py | 87 +++++++++++++++++++ 1 file changed, 87 insertions(+) diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index 8b7270eb96c52..c1ba5f86fb2fd 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -3,6 +3,10 @@ # Licensed under a 3-clause BSD style license (see LICENSE) import logging import ssl +import subprocess +import tempfile +import time +import os import mock import pytest @@ -14,6 +18,43 @@ from datadog_checks.dev.utils import ON_WINDOWS +@pytest.fixture(scope="module") +def openssl_https_server(): + # Generate cert and key + cert_file = tempfile.NamedTemporaryFile(delete=False, suffix=".crt") + key_file = tempfile.NamedTemporaryFile(delete=False, suffix=".key") + cert_file.close() + key_file.close() + subprocess.run([ + "openssl", "req", "-x509", "-newkey", "rsa:2048", + "-keyout", key_file.name, "-out", cert_file.name, + "-days", "3", "-nodes", "-subj", "/CN=localhost" + ], check=True) + + # Start OpenSSL server with a single cipher + port = 8443 + cipher = "AES256-GCM-SHA384" + server_proc = subprocess.Popen([ + "openssl", "s_server", + "-accept", str(port), + "-cert", cert_file.name, + "-key", key_file.name, + "-cipher", cipher, + "-tls1_2", + "-www" + ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + # Wait for server to be ready + time.sleep(1) + url = f"https://localhost:{port}" + yield url, cert_file.name, cipher + + server_proc.terminate() + server_proc.wait() + os.unlink(cert_file.name) + os.unlink(key_file.name) + + class TestAttribute: def test_default(self): check = AgentCheck('test', {}, [{}]) @@ -57,7 +98,53 @@ def test_cipher_construction(self, instance, expected_ciphers): # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) + + def test_http_success_with_default_ciphers(self, openssl_https_server): + ''' + Test that the default ciphers are sufficient to connect to the server. + ''' + url, cert_path, _ = openssl_https_server + instance = { + 'tls_verify': True, + 'tls_ca_cert': cert_path, + } + init_config = {} + http = RequestsWrapper(instance, init_config) + response = http.get(url) + assert response.status_code == 200 + def test_http_failure_with_wrong_cipher(self, openssl_https_server): + ''' + Test that setting a TLS1.2 cipher that is not allowed by the server raises an error. + ''' + url, cert_path, _ = openssl_https_server + # Use a TLS1.2 cipher not allowed by the server + instance = { + 'tls_verify': True, + 'tls_ciphers': ['AES128-GCM-SHA256'], + 'tls_ca_cert': cert_path, + } + init_config = {} + http = RequestsWrapper(instance, init_config) + with pytest.raises(requests.exceptions.SSLError): + http.get(url) + + def test_http_failure_with_ssl_defaults(self, openssl_https_server): + ''' + Test that the SSL default ciphers are not sufficient to connect to the server. + ''' + url, cert_path, _ = openssl_https_server + # Use a cipher not allowed by the server + instance = { + 'tls_verify': True, + 'tls_ca_cert': cert_path, + } + init_config = {} + # Mock SSL set_ciphers to disable it and use the default ciphers + with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: + http = RequestsWrapper(instance, init_config) + with pytest.raises(requests.exceptions.SSLError): + http.get(url) class TestRequestSize: def test_behavior_correct(self, mock_http_response): From bdab774ce66aaa1409451aafcdfef3463cd87f37 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 13:38:52 +0200 Subject: [PATCH 050/100] Fix request method use --- .../datadog_checks/base/utils/http.py | 12 +++++------ vault/tests/test_vault.py | 21 ++++++++----------- 2 files changed, 15 insertions(+), 18 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index c82438157fd58..d3ef603362a03 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -543,14 +543,14 @@ def _request(self, method, url, options): if self.auth_token_handler: try: - response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) response.raise_for_status() except Exception as e: self.logger.debug('Renewing auth token, as an error occurred: %s', e) self.handle_auth_token(method=method, url=url, default_options=self.options, error=str(e)) - response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) else: - response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) return ResponseWrapper(response, self.request_size) @@ -573,9 +573,9 @@ def _get_new_context(self, url, new_options): return create_ssl_context(self.tls_config, overrides=new_tls_config) - def make_request_aia_chasing(self, request_method, method, url, new_options, persist): + def make_request_aia_chasing(self, session, request_method, method, url, new_options, persist): try: - response = request_method(url, **new_options) + response = request_method(session, url, **new_options) except SSLError as e: # fetch the intermediate certs parsed_url = urlparse(url) @@ -593,7 +593,7 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certadapter = SSLContextAdapter(new_context) session.mount(url, certadapter) request_method = getattr(session, method) - response = request_method(url, **new_options) + response = request_method(session, url, **new_options) return response def populate_options(self, options): diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index 12691eedfc1d7..8fe1633508228 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -801,20 +801,17 @@ def test_wal_merkle_metrics(self, aggregator, instance, dd_run_check, global_tag @pytest.mark.parametrize('use_openmetrics', [False, True], indirect=True) -def test_x_vault_request_header_is_set(monkeypatch, instance, dd_run_check, use_openmetrics): +def test_x_vault_request_header_is_set(instance, dd_run_check, use_openmetrics): instance = instance() instance['use_openmetrics'] = use_openmetrics - c = Vault(Vault.CHECK_NAME, {}, [instance]) - requests_get = requests.Session.get - mock_get = mock.Mock(side_effect=requests_get) - monkeypatch.setattr(requests.Session, 'get', mock_get) - - dd_run_check(c) + with mock.patch('datadog_checks.base.utils.http.requests.Session.get', side_effect=requests_get) as mock_get: + c = Vault(Vault.CHECK_NAME, {}, [instance]) + dd_run_check(c) - assert mock_get.call_count > 0 - for call in mock_get.call_args_list: - headers = dict(call.kwargs['headers']) - assert 'X-Vault-Request' in headers - assert headers['X-Vault-Request'] == 'true' + assert mock_get.call_count > 0 + for call in mock_get.call_args_list: + headers = dict(call.kwargs['headers']) + assert 'X-Vault-Request' in headers + assert headers['X-Vault-Request'] == 'true' From 1b34f33306df3da474971617bdf6081f2ea55854 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 15:57:49 +0200 Subject: [PATCH 051/100] Fixing vsphere tests WIP --- vsphere/datadog_checks/vsphere/api.py | 8 +++----- vsphere/datadog_checks/vsphere/vsphere.py | 1 + vsphere/tests/common.py | 2 ++ vsphere/tests/conftest.py | 5 ++++- vsphere/tests/mocked_api.py | 14 ++++++++++++-- vsphere/tests/test_check.py | 6 ++++++ 6 files changed, 28 insertions(+), 8 deletions(-) diff --git a/vsphere/datadog_checks/vsphere/api.py b/vsphere/datadog_checks/vsphere/api.py index f9ab8678172ee..1c5f30669574f 100644 --- a/vsphere/datadog_checks/vsphere/api.py +++ b/vsphere/datadog_checks/vsphere/api.py @@ -11,6 +11,7 @@ from pyVmomi import SoapStubAdapter, vim, vmodl from datadog_checks.base.log import CheckLoggingAdapter # noqa: F401 +from datadog_checks.base.utils.http import create_ssl_context from datadog_checks.vsphere.config import VSphereConfig # noqa: F401 from datadog_checks.vsphere.constants import ( ALL_PROPERTIES, @@ -117,14 +118,11 @@ def smart_connect(self): context = None if not self.config.ssl_verify: # Remove type ignore when this is merged https://github.com/python/typeshed/pull/3855 - context = ssl.SSLContext(ssl.PROTOCOL_TLS) # type: ignore - context.verify_mode = ssl.CERT_NONE + context = create_ssl_context({"tls_verify": False}) # type: ignore elif self.config.ssl_capath or self.config.ssl_cafile: # Remove type ignore when this is merged https://github.com/python/typeshed/pull/3855 - context = ssl.SSLContext(ssl.PROTOCOL_TLS) # type: ignore - context.verify_mode = ssl.CERT_REQUIRED # `check_hostname` must be enabled as well to verify the authenticity of a cert. - context.check_hostname = True + context = create_ssl_context({"tls_verify": True, 'tls_check_hostname': True}) # type: ignore if self.config.ssl_capath: context.load_verify_locations(cafile=None, capath=self.config.ssl_capath) else: diff --git a/vsphere/datadog_checks/vsphere/vsphere.py b/vsphere/datadog_checks/vsphere/vsphere.py index b9a7e7e16422b..cab75a947d3ba 100644 --- a/vsphere/datadog_checks/vsphere/vsphere.py +++ b/vsphere/datadog_checks/vsphere/vsphere.py @@ -127,6 +127,7 @@ def initiate_api_connection(self): if major_version >= 7: try: # Try to connect to REST API vSphere v7 + self.log.warning("Config: %s", self._config) self.api_rest = VSphereRestAPI(self._config, self.log, False) return except Exception: diff --git a/vsphere/tests/common.py b/vsphere/tests/common.py index 562b990e34dbb..2267b37878b22 100644 --- a/vsphere/tests/common.py +++ b/vsphere/tests/common.py @@ -1092,6 +1092,8 @@ def __init__(self): def get(self, url, *args, **kwargs): if '/api/' in url: return MockResponse({}, 404) + import logging + logging.getLogger(__name__).warning("MockHttpV6 get request: %s", url) parsed_url = urlparse(url) path_and_args = parsed_url.path + "?" + parsed_url.query if parsed_url.query else parsed_url.path path_parts = path_and_args.split('/') diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index 94d5075b6c85a..eecafb23c43e1 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -254,8 +254,11 @@ def mock_http_api(monkeypatch): def mock_get(session, *args, **kwargs): return http.get(*args, **kwargs) + def mock_post(session, *args, **kwargs): + return http.post(*args, **kwargs) + monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=mock_get)) - monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=mock_get)) + monkeypatch.setattr('requests.Session.post', MagicMock(side_effect=mock_post)) yield http diff --git a/vsphere/tests/mocked_api.py b/vsphere/tests/mocked_api.py index 36949532afcc1..b3e2be87c3609 100644 --- a/vsphere/tests/mocked_api.py +++ b/vsphere/tests/mocked_api.py @@ -193,7 +193,12 @@ def json(self): return self.json_data -def mock_http_rest_api_v6(session, method, url, *args, **kwargs): +def mock_http_rest_api_v6(session, method, url, **kwargs): + import logging + logging.getLogger(__name__).warning( + "Mocked API request: method=%s, url=%s, kwargs=%s", + method, url, kwargs + ) if '/api/' in url: return MockResponse({}, 404) if method == 'get': @@ -248,7 +253,12 @@ def mock_http_rest_api_v6(session, method, url, *args, **kwargs): raise Exception("Rest api mock request not matched: method={}, url={}".format(method, url)) -def mock_http_rest_api_v7(session, method, url, *args, **kwargs): +def mock_http_rest_api_v7(session, method, url, **kwargs): + import logging + logging.getLogger(__name__).warning( + "Mocked API request: method=%s, url=%s, kwargs=%s", + method, url, kwargs + ) if method == 'get': if re.match(r'.*/category/.*$', url): parts = url.split('_') diff --git a/vsphere/tests/test_check.py b/vsphere/tests/test_check.py index 8b5fa250bb0c9..0b45b33d04898 100644 --- a/vsphere/tests/test_check.py +++ b/vsphere/tests/test_check.py @@ -260,6 +260,12 @@ def test_collect_tags(aggregator, dd_run_check, realtime_instance): ) aggregator.assert_metric('datadog.vsphere.query_tags.time', tags=['vcenter_server:FAKE']) +@pytest.mark.usefixtures('mock_rest_api') +def test_collect_tags_debug(aggregator, realtime_instance): + realtime_instance.update({'collect_tags': True, 'excluded_host_tags': ['my_cat_name_1', 'my_cat_name_2']}) + check = VSphereCheck('vsphere', {}, [{}]) + check.http.get("https://fake-vcenter.com/api/vcenter/vm", timeout=5) + @pytest.mark.usefixtures('mock_type', 'mock_threadpool', 'mock_api', 'mock_rest_api') def test_tag_prefix(aggregator, dd_run_check, realtime_instance): From 98ee50bf56608882d78d04a2ed6f43826b210559 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 16:30:19 +0200 Subject: [PATCH 052/100] Fix vsphere tests --- vsphere/datadog_checks/vsphere/vsphere.py | 1 - vsphere/tests/common.py | 2 -- vsphere/tests/conftest.py | 8 ++++++-- vsphere/tests/mocked_api.py | 22 ++++++---------------- vsphere/tests/test_check.py | 6 ------ 5 files changed, 12 insertions(+), 27 deletions(-) diff --git a/vsphere/datadog_checks/vsphere/vsphere.py b/vsphere/datadog_checks/vsphere/vsphere.py index cab75a947d3ba..b9a7e7e16422b 100644 --- a/vsphere/datadog_checks/vsphere/vsphere.py +++ b/vsphere/datadog_checks/vsphere/vsphere.py @@ -127,7 +127,6 @@ def initiate_api_connection(self): if major_version >= 7: try: # Try to connect to REST API vSphere v7 - self.log.warning("Config: %s", self._config) self.api_rest = VSphereRestAPI(self._config, self.log, False) return except Exception: diff --git a/vsphere/tests/common.py b/vsphere/tests/common.py index 2267b37878b22..562b990e34dbb 100644 --- a/vsphere/tests/common.py +++ b/vsphere/tests/common.py @@ -1092,8 +1092,6 @@ def __init__(self): def get(self, url, *args, **kwargs): if '/api/' in url: return MockResponse({}, 404) - import logging - logging.getLogger(__name__).warning("MockHttpV6 get request: %s", url) parsed_url = urlparse(url) path_and_args = parsed_url.path + "?" + parsed_url.query if parsed_url.query else parsed_url.path path_parts = path_and_args.split('/') diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index eecafb23c43e1..fa3e2e8fc353f 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -251,10 +251,14 @@ def mock_http_api(monkeypatch): else: http = MockHttpV6() - def mock_get(session, *args, **kwargs): + def mock_get(*args, **kwargs): return http.get(*args, **kwargs) - def mock_post(session, *args, **kwargs): + def mock_post(*args, **kwargs): + import logging + logger = logging.getLogger(__name__) + logger.warning(args) + return http.post(*args, **kwargs) monkeypatch.setattr('requests.Session.get', MagicMock(side_effect=mock_get)) diff --git a/vsphere/tests/mocked_api.py b/vsphere/tests/mocked_api.py index b3e2be87c3609..53b64eb0b3ba7 100644 --- a/vsphere/tests/mocked_api.py +++ b/vsphere/tests/mocked_api.py @@ -193,15 +193,10 @@ def json(self): return self.json_data -def mock_http_rest_api_v6(session, method, url, **kwargs): - import logging - logging.getLogger(__name__).warning( - "Mocked API request: method=%s, url=%s, kwargs=%s", - method, url, kwargs - ) +def mock_http_rest_api_v6(self, method, url, **kwargs): if '/api/' in url: return MockResponse({}, 404) - if method == 'get': + if method.lower() == 'get': if re.match(r'.*/category/id:.*$', url): parts = url.split('_') num = parts[len(parts) - 1] @@ -232,7 +227,7 @@ def mock_http_rest_api_v6(session, method, url, **kwargs): }, 200, ) - elif method == 'post': + elif method.lower() == 'post': assert kwargs['headers']['Content-Type'] == 'application/json' if re.match(r'.*/session$', url): return MockResponse( @@ -253,13 +248,8 @@ def mock_http_rest_api_v6(session, method, url, **kwargs): raise Exception("Rest api mock request not matched: method={}, url={}".format(method, url)) -def mock_http_rest_api_v7(session, method, url, **kwargs): - import logging - logging.getLogger(__name__).warning( - "Mocked API request: method=%s, url=%s, kwargs=%s", - method, url, kwargs - ) - if method == 'get': +def mock_http_rest_api_v7(self, method, url, **kwargs): + if method.lower() == 'get': if re.match(r'.*/category/.*$', url): parts = url.split('_') num = parts[len(parts) - 1] @@ -287,7 +277,7 @@ def mock_http_rest_api_v7(session, method, url, **kwargs): }, 200, ) - elif method == 'post': + elif method.lower() == 'post': assert kwargs['headers']['Content-Type'] == 'application/json' if re.match(r'.*/session$', url): return MockResponse( diff --git a/vsphere/tests/test_check.py b/vsphere/tests/test_check.py index 0b45b33d04898..8b5fa250bb0c9 100644 --- a/vsphere/tests/test_check.py +++ b/vsphere/tests/test_check.py @@ -260,12 +260,6 @@ def test_collect_tags(aggregator, dd_run_check, realtime_instance): ) aggregator.assert_metric('datadog.vsphere.query_tags.time', tags=['vcenter_server:FAKE']) -@pytest.mark.usefixtures('mock_rest_api') -def test_collect_tags_debug(aggregator, realtime_instance): - realtime_instance.update({'collect_tags': True, 'excluded_host_tags': ['my_cat_name_1', 'my_cat_name_2']}) - check = VSphereCheck('vsphere', {}, [{}]) - check.http.get("https://fake-vcenter.com/api/vcenter/vm", timeout=5) - @pytest.mark.usefixtures('mock_type', 'mock_threadpool', 'mock_api', 'mock_rest_api') def test_tag_prefix(aggregator, dd_run_check, realtime_instance): From f2bd9b7c89bd820127adf8998a0a4b6780e72937 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 17:04:15 +0200 Subject: [PATCH 053/100] Run formatter --- .../datadog_checks/base/utils/http.py | 18 +++--- .../tests/base/utils/http/test_http.py | 58 +++++++++++++------ http_check/tests/test_http_integration.py | 1 - vsphere/datadog_checks/vsphere/api.py | 1 - vsphere/tests/conftest.py | 1 + 5 files changed, 51 insertions(+), 28 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index d3ef603362a03..3093a8e96bac2 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -144,7 +144,7 @@ def create_ssl_context(config, overrides=None): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs - ca_cert = config['tls_ca_cert'] + ca_cert = config.get('tls_ca_cert') try: if ca_cert: ca_cert = os.path.expanduser(ca_cert) @@ -161,8 +161,8 @@ def create_ssl_context(config, overrides=None): ) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain - client_cert, client_key = config['tls_cert'], config['tls_private_key'] - client_key_pass = config['tls_private_key_password'] + client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key') + client_key_pass = config.get('tls_private_key_password') try: if client_key: client_key = os.path.expanduser(client_key) @@ -543,14 +543,14 @@ def _request(self, method, url, options): if self.auth_token_handler: try: - response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) response.raise_for_status() except Exception as e: self.logger.debug('Renewing auth token, as an error occurred: %s', e) self.handle_auth_token(method=method, url=url, default_options=self.options, error=str(e)) - response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) else: - response = self.make_request_aia_chasing(session, request_method, method, url, new_options, persist) + response = self.make_request_aia_chasing(request_method, method, url, new_options, persist) return ResponseWrapper(response, self.request_size) @@ -573,9 +573,9 @@ def _get_new_context(self, url, new_options): return create_ssl_context(self.tls_config, overrides=new_tls_config) - def make_request_aia_chasing(self, session, request_method, method, url, new_options, persist): + def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: - response = request_method(session, url, **new_options) + response = request_method(url, **new_options) except SSLError as e: # fetch the intermediate certs parsed_url = urlparse(url) @@ -593,7 +593,7 @@ def make_request_aia_chasing(self, session, request_method, method, url, new_opt certadapter = SSLContextAdapter(new_context) session.mount(url, certadapter) request_method = getattr(session, method) - response = request_method(session, url, **new_options) + response = request_method(url, **new_options) return response def populate_options(self, options): diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index c1ba5f86fb2fd..ab26b0e4d86a9 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -2,11 +2,11 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging +import os import ssl import subprocess import tempfile import time -import os import mock import pytest @@ -25,24 +25,47 @@ def openssl_https_server(): key_file = tempfile.NamedTemporaryFile(delete=False, suffix=".key") cert_file.close() key_file.close() - subprocess.run([ - "openssl", "req", "-x509", "-newkey", "rsa:2048", - "-keyout", key_file.name, "-out", cert_file.name, - "-days", "3", "-nodes", "-subj", "/CN=localhost" - ], check=True) + subprocess.run( + [ + "openssl", + "req", + "-x509", + "-newkey", + "rsa:2048", + "-keyout", + key_file.name, + "-out", + cert_file.name, + "-days", + "3", + "-nodes", + "-subj", + "/CN=localhost", + ], + check=True, + ) # Start OpenSSL server with a single cipher port = 8443 cipher = "AES256-GCM-SHA384" - server_proc = subprocess.Popen([ - "openssl", "s_server", - "-accept", str(port), - "-cert", cert_file.name, - "-key", key_file.name, - "-cipher", cipher, - "-tls1_2", - "-www" - ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + server_proc = subprocess.Popen( + [ + "openssl", + "s_server", + "-accept", + str(port), + "-cert", + cert_file.name, + "-key", + key_file.name, + "-cipher", + cipher, + "-tls1_2", + "-www", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) # Wait for server to be ready time.sleep(1) @@ -98,7 +121,7 @@ def test_cipher_construction(self, instance, expected_ciphers): # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) - + def test_http_success_with_default_ciphers(self, openssl_https_server): ''' Test that the default ciphers are sufficient to connect to the server. @@ -141,11 +164,12 @@ def test_http_failure_with_ssl_defaults(self, openssl_https_server): } init_config = {} # Mock SSL set_ciphers to disable it and use the default ciphers - with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: + with mock.patch.object(ssl.SSLContext, 'set_ciphers'): http = RequestsWrapper(instance, init_config) with pytest.raises(requests.exceptions.SSLError): http.get(url) + class TestRequestSize: def test_behavior_correct(self, mock_http_response): instance = {'request_size': 0.5} diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 741d2615f3da7..354796ede0506 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -442,7 +442,6 @@ def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning: - dd_run_check(check) mock_warning.assert_called() assert any(instance['tls_ca_cert'] in arg for arg in mock_warning.call_args) diff --git a/vsphere/datadog_checks/vsphere/api.py b/vsphere/datadog_checks/vsphere/api.py index 1c5f30669574f..cc87cc7e79fe3 100644 --- a/vsphere/datadog_checks/vsphere/api.py +++ b/vsphere/datadog_checks/vsphere/api.py @@ -3,7 +3,6 @@ # Licensed under Simplified BSD License (see LICENSE) import datetime as dt # noqa: F401 import functools -import ssl from typing import Any, Callable, List, TypeVar, cast # noqa: F401 import vsanapiutils diff --git a/vsphere/tests/conftest.py b/vsphere/tests/conftest.py index fa3e2e8fc353f..67c8b426fe997 100644 --- a/vsphere/tests/conftest.py +++ b/vsphere/tests/conftest.py @@ -256,6 +256,7 @@ def mock_get(*args, **kwargs): def mock_post(*args, **kwargs): import logging + logger = logging.getLogger(__name__) logger.warning(args) From 08d06effcee68366573552447a5dc544f5ccde06 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 17:57:21 +0200 Subject: [PATCH 054/100] Fix vault tests again --- vault/tests/test_vault.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/vault/tests/test_vault.py b/vault/tests/test_vault.py index 8fe1633508228..cf90dddea9d2a 100644 --- a/vault/tests/test_vault.py +++ b/vault/tests/test_vault.py @@ -805,8 +805,10 @@ def test_x_vault_request_header_is_set(instance, dd_run_check, use_openmetrics): instance = instance() instance['use_openmetrics'] = use_openmetrics - requests_get = requests.Session.get - with mock.patch('datadog_checks.base.utils.http.requests.Session.get', side_effect=requests_get) as mock_get: + def mock_requests_get(url, *args, **kwargs): + return requests.get(url, *args, **kwargs) + + with mock.patch('datadog_checks.base.utils.http.requests.Session.get', side_effect=mock_requests_get) as mock_get: c = Vault(Vault.CHECK_NAME, {}, [instance]) dd_run_check(c) From 8b0e39b4131e5d4c0925003a65521a57c59fbe74 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 18:01:00 +0200 Subject: [PATCH 055/100] Try fixing amazon_msk tests as they do not fail locally --- amazon_msk/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index ace67250b8fa2..50bf85754feb4 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,7 +33,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get'): yield From 4b5dc36844c0f69f02888ec1d641824ec3b5dca7 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 18:06:47 +0200 Subject: [PATCH 056/100] Try fixing amazon_msk tests as they do not fail locally 2 --- amazon_msk/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 50bf85754feb4..bc55ef97bb644 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,7 +33,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get'): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): yield From ecb0c43114ca6190faec4cee87cfd00d08ccde83 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 18:12:44 +0200 Subject: [PATCH 057/100] Run formatter --- amazon_msk/tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index bc55ef97bb644..9aa01984b83c4 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,7 +33,9 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch( + 'requests.Session.get', side_effect=mock_requests_get, autospec=True + ): yield From 81f76c3268e2c6cf8eb80d0a9581efc55cc54605 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 17 Jun 2025 18:22:16 +0200 Subject: [PATCH 058/100] Try fixing amazon_msk tests as they do not fail locally 3 --- amazon_msk/tests/conftest.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 9aa01984b83c4..ace67250b8fa2 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,9 +33,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch( - 'requests.Session.get', side_effect=mock_requests_get, autospec=True - ): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): yield From 23520a6cb8fe500c0be97f42f2cc919fca58acd9 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 10:54:23 +0200 Subject: [PATCH 059/100] Try fixing amazon_msk tests as they do not fail locally 4 --- amazon_msk/tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index ace67250b8fa2..50bf85754feb4 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,7 +33,7 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get'): yield From d06d37bcb96de16040fba1373f924a3a7a98b261 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 10:58:45 +0200 Subject: [PATCH 060/100] Try fixing amazon_msk tests as they do not fail locally 5 --- amazon_msk/tests/conftest.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 50bf85754feb4..7cae5b33d5461 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -30,10 +30,17 @@ def mock_requests_get(url, *args, **kwargs): return MockResponse(file_path=common.get_metrics_fixture_path(exporter_type)) +def mock_session_get(self, url, *args, **kwargs): + exporter_type = 'jmx' if urlparse(url).port == common.JMX_PORT else 'node' + return MockResponse(file_path=common.get_metrics_fixture_path(exporter_type)) + + @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch('requests.Session.get'): + with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch( + 'requests.Session.get', side_effect=mock_session_get + ): yield From e8b97aa3af24377cbb784e96ea40267ec179b3ac Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 11:09:32 +0200 Subject: [PATCH 061/100] Try fixing amazon_msk tests as they do not fail locally 6 --- amazon_msk/tests/conftest.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 7cae5b33d5461..85f87926d3627 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -30,16 +30,11 @@ def mock_requests_get(url, *args, **kwargs): return MockResponse(file_path=common.get_metrics_fixture_path(exporter_type)) -def mock_session_get(self, url, *args, **kwargs): - exporter_type = 'jmx' if urlparse(url).port == common.JMX_PORT else 'node' - return MockResponse(file_path=common.get_metrics_fixture_path(exporter_type)) - - @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch( - 'requests.Session.get', side_effect=mock_session_get + 'requests.Session.get', side_effect=mock_requests_get ): yield From 1df2a4f3a016f82263e107956b6378f4180fbff5 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 11:16:42 +0200 Subject: [PATCH 062/100] Try fixing http_check test --- http_check/tests/test_http_integration.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 354796ede0506..7e7396e602419 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -440,8 +440,10 @@ def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): 'skip_proxy': 'false', } - check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) - with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning: + with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning, mock.patch( + 'requests.Session.get' + ): + check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) dd_run_check(check) mock_warning.assert_called() assert any(instance['tls_ca_cert'] in arg for arg in mock_warning.call_args) From c288330da0555c0f4930538f14e04c0967e3df82 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 11:29:50 +0200 Subject: [PATCH 063/100] Fix datadog_checks_dev test --- datadog_checks_dev/tests/tooling/test_github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_dev/tests/tooling/test_github.py b/datadog_checks_dev/tests/tooling/test_github.py index a5671bcc2c4c8..ab96b415fcd3e 100644 --- a/datadog_checks_dev/tests/tooling/test_github.py +++ b/datadog_checks_dev/tests/tooling/test_github.py @@ -27,7 +27,7 @@ def test_get_pr_approvers(github_response, expected_approvers): mock_response.json.return_value = github_response with mock.patch( - "requests.Session.get", + "requests.get", return_value=mock_response, ): assert get_pr_approvers("integrations-core", "42", {}) == expected_approvers From 5099503c5ee1925eb38bf5c9db860df8ba6cb3aa Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 12:01:55 +0200 Subject: [PATCH 064/100] Fix datadog_checks_base test --- datadog_checks_base/tests/base/utils/http/test_authtoken.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index fd1464bb08dcd..4a99e275f3c58 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -616,10 +616,10 @@ def auth(*args, **kwargs): return MockResponse(json_data={}) return MockResponse(status_code=404) - http = RequestsWrapper(instance, init_config) - with mock.patch('requests.Session.post', side_effect=login), mock.patch( + with mock.patch('requests.post', side_effect=login), mock.patch( 'requests.Session.get', side_effect=auth ): + http = RequestsWrapper(instance, init_config) http.get('https://leader.mesos/service/some-service') From 5d3d0dfa9dd13793a920bc52fe70296de349bda5 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 12:06:15 +0200 Subject: [PATCH 065/100] Try fixing tls_cipher test --- datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 7857d7b935697..70d2c79baf092 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -270,7 +270,6 @@ def test_fetch_intermediate_certs_tls_ciphers(self): """Test that fetch_intermediate_certs uses the correct ciphers.""" instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384']} init_config = {} - http = RequestsWrapper(instance, init_config) with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: mock_socket = mock.MagicMock() @@ -288,6 +287,7 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): + http = RequestsWrapper(instance, init_config) http.fetch_intermediate_certs('example.com', 443) # Verify set_ciphers was called with the correct cipher list From 405350d4aff46e788911ca913c5bd1e39a4e87ea Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 12:58:14 +0200 Subject: [PATCH 066/100] Run formatter --- datadog_checks_base/tests/base/utils/http/test_authtoken.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_authtoken.py b/datadog_checks_base/tests/base/utils/http/test_authtoken.py index 4a99e275f3c58..3a872371a68f9 100644 --- a/datadog_checks_base/tests/base/utils/http/test_authtoken.py +++ b/datadog_checks_base/tests/base/utils/http/test_authtoken.py @@ -616,9 +616,7 @@ def auth(*args, **kwargs): return MockResponse(json_data={}) return MockResponse(status_code=404) - with mock.patch('requests.post', side_effect=login), mock.patch( - 'requests.Session.get', side_effect=auth - ): + with mock.patch('requests.post', side_effect=login), mock.patch('requests.Session.get', side_effect=auth): http = RequestsWrapper(instance, init_config) http.get('https://leader.mesos/service/some-service') From 060840f7e40d23254703f38ea5ff7ef83f3e0ea3 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 13:19:18 +0200 Subject: [PATCH 067/100] Fix datadog_checks_base test 2 --- .../tests/base/utils/http/test_tls_and_certs.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 70d2c79baf092..0b996f269a696 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -288,10 +288,11 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) + mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') http.fetch_intermediate_certs('example.com', 443) - - # Verify set_ciphers was called with the correct cipher list - mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') + # Assert set_ciphers called a second time after fetch_intermediate_certs + assert mock_context.set_ciphers.call_count == 2 + assert mock_context.set_ciphers.call_args_list[1][0][0] == 'TLS_RSA_WITH_AES_256_GCM_SHA384' class TestSSLContext: From 4aba58b74ef473e89fa16dde3ec472f138701b17 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 13:37:56 +0200 Subject: [PATCH 068/100] Fix datadog_checks_base test 3 --- .../datadog_checks/base/utils/http.py | 2 +- .../base/utils/http/test_tls_and_certs.py | 21 ++++++++----------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 3093a8e96bac2..fee8593dda01a 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -139,7 +139,7 @@ def create_ssl_context(config, overrides=None): updated_ciphers = "ALL" else: updated_ciphers = ":".join(ciphers) - + LOGGER.debug('Setting TLS ciphers to: %s', updated_ciphers) context.set_ciphers(updated_ciphers) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 0b996f269a696..e2955f0f270e2 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -12,10 +12,7 @@ pytestmark = [pytest.mark.unit] -IANA_TO_OPENSSL_NAME = { - 'TLS_RSA_WITH_AES_256_GCM_SHA384': 'AES256-GCM-SHA384', - 'TLS_RSA_WITH_AES_128_GCM_SHA256': 'AES128-GCM-SHA256', -} +TEST_CIPHERS = ['AES256-GCM-SHA384','AES128-GCM-SHA256'] class TestCert: @@ -268,7 +265,7 @@ def test_fetch_intermediate_certs(self, port): def test_fetch_intermediate_certs_tls_ciphers(self): """Test that fetch_intermediate_certs uses the correct ciphers.""" - instance = {'tls_verify': True, 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384']} + instance = {'tls_verify': True, 'tls_ciphers': [TEST_CIPHERS[0]]} init_config = {} with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: @@ -288,11 +285,11 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) - mock_context.set_ciphers.assert_called_once_with('TLS_RSA_WITH_AES_256_GCM_SHA384') + mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers'][0]) http.fetch_intermediate_certs('example.com', 443) # Assert set_ciphers called a second time after fetch_intermediate_certs assert mock_context.set_ciphers.call_count == 2 - assert mock_context.set_ciphers.call_args_list[1][0][0] == 'TLS_RSA_WITH_AES_256_GCM_SHA384' + assert mock_context.set_ciphers.call_args_list[1][0][0] == instance['tls_ciphers'][0] class TestSSLContext: @@ -348,7 +345,7 @@ def test_ssl_context_ciphers_can_be_modified(self): def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" - instance = {'tls_verify': True, 'tls_ciphers': list(IANA_TO_OPENSSL_NAME.keys())} + instance = {'tls_verify': True, 'tls_ciphers': TEST_CIPHERS} init_config = {} http = RequestsWrapper(instance, init_config) @@ -365,7 +362,7 @@ def test_tls_ciphers_applied_consistently(self): for cipher in instance['tls_ciphers']: # At least one entry's name field should match the OpenSSL name assert any( - IANA_TO_OPENSSL_NAME.get(cipher) in c.get('name') for c in https_adapter.ssl_context.get_ciphers() + cipher in c.get('name') for c in https_adapter.ssl_context.get_ciphers() ) def test_default_tls_ciphers(self): @@ -387,7 +384,7 @@ def test_host_header_compatibility(self): 'tls_use_host_header': True, 'headers': {'Host': 'custom-host.example.com'}, 'tls_verify': True, - 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], + 'tls_ciphers': [TEST_CIPHERS[0]], } init_config = {} http = RequestsWrapper(instance, init_config) @@ -433,7 +430,7 @@ def test_backward_compatibility_maintained(self): 'tls_validate_hostname': True, 'tls_ignore_warning': False, 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], - 'tls_ciphers': ['TLS_RSA_WITH_AES_256_GCM_SHA384'], + 'tls_ciphers': [TEST_CIPHERS[0]], } init_config = {} @@ -453,4 +450,4 @@ def test_backward_compatibility_maintained(self): # Verify TLS context wrapper has the right config assert http.tls_config['tls_verify'] is True - assert http.tls_config['tls_ciphers'] == ['TLS_RSA_WITH_AES_256_GCM_SHA384'] + assert http.tls_config['tls_ciphers'] == instance['tls_ciphers'] From 717b6e3cba1802a8384f49ff4c10fdcc1ad077c6 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 13:38:15 +0200 Subject: [PATCH 069/100] Run formatter --- .../tests/base/utils/http/test_tls_and_certs.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index e2955f0f270e2..4dc5167021813 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -12,7 +12,7 @@ pytestmark = [pytest.mark.unit] -TEST_CIPHERS = ['AES256-GCM-SHA384','AES128-GCM-SHA256'] +TEST_CIPHERS = ['AES256-GCM-SHA384', 'AES128-GCM-SHA256'] class TestCert: @@ -361,9 +361,7 @@ def test_tls_ciphers_applied_consistently(self): # Verify that the ciphers are set correctly in the TLS context for cipher in instance['tls_ciphers']: # At least one entry's name field should match the OpenSSL name - assert any( - cipher in c.get('name') for c in https_adapter.ssl_context.get_ciphers() - ) + assert any(cipher in c.get('name') for c in https_adapter.ssl_context.get_ciphers()) def test_default_tls_ciphers(self): """Test that default TLS ciphers are applied when none are specified.""" From 141157bd465a87477ddd2a30ebc23e5ad4edfc0d Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 13:52:00 +0200 Subject: [PATCH 070/100] Fix datadog_checks_base test final --- .../tests/base/utils/http/test_tls_and_certs.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 4dc5167021813..75158490fbf1e 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -338,11 +338,6 @@ def test_session_uses_ssl_context_adapter(self): assert isinstance(https_adapter, SSLContextAdapter) assert https_adapter.ssl_context is http.ssl_context - def test_ssl_context_ciphers_can_be_modified(self): - """Test that the chosen ciphers can be set on CI runners.""" - context = ssl.create_default_context() - context.set_ciphers('TLS_RSA_WITH_AES_256_GCM_SHA384') - def test_tls_ciphers_applied_consistently(self): """Test that tls_ciphers are applied consistently.""" instance = {'tls_verify': True, 'tls_ciphers': TEST_CIPHERS} From 9244708dca6ef5688ef02125eec5c804ead8c952 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 14:19:09 +0200 Subject: [PATCH 071/100] Add changelog --- datadog_checks_base/changelog.d/20179.fixed | 1 + datadog_checks_dev/changelog.d/20179.fixed | 1 + spark/changelog.d/20179.fixed | 1 + tls/changelog.d/20179.fixed | 1 + vsphere/changelog.d/20179.fixed | 1 + 5 files changed, 5 insertions(+) create mode 100644 datadog_checks_base/changelog.d/20179.fixed create mode 100644 datadog_checks_dev/changelog.d/20179.fixed create mode 100644 spark/changelog.d/20179.fixed create mode 100644 tls/changelog.d/20179.fixed create mode 100644 vsphere/changelog.d/20179.fixed diff --git a/datadog_checks_base/changelog.d/20179.fixed b/datadog_checks_base/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/datadog_checks_base/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter diff --git a/datadog_checks_dev/changelog.d/20179.fixed b/datadog_checks_dev/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/datadog_checks_dev/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter diff --git a/spark/changelog.d/20179.fixed b/spark/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/spark/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter diff --git a/tls/changelog.d/20179.fixed b/tls/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/tls/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter diff --git a/vsphere/changelog.d/20179.fixed b/vsphere/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/vsphere/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter From f5c5df6d190a19c6a49cfcfa3bc530637371b865 Mon Sep 17 00:00:00 2001 From: dkirov-dd <166512750+dkirov-dd@users.noreply.github.com> Date: Wed, 18 Jun 2025 14:23:20 +0200 Subject: [PATCH 072/100] Update base.py --- datadog_checks_base/datadog_checks/base/checks/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_base/datadog_checks/base/checks/base.py b/datadog_checks_base/datadog_checks/base/checks/base.py index 1728911961350..fee2d288ad77f 100644 --- a/datadog_checks_base/datadog_checks/base/checks/base.py +++ b/datadog_checks_base/datadog_checks/base/checks/base.py @@ -442,7 +442,7 @@ def diagnosis(self) -> Diagnosis: def get_tls_context(self, refresh=False, overrides=None): # type: (bool, Dict[AnyStr, Any]) -> ssl.SSLContext """ - Creates and cache an TlsContext instance based on user configuration. + Creates and cache an SSLContext instance based on user configuration. Note that user configuration can be overridden by using `overrides`. This should only be applied to older integration that manually set config values. From 90b183265267e1beb6c8f4abfcbda3cda2d7114d Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 18 Jun 2025 14:33:19 +0200 Subject: [PATCH 073/100] Use create_ssl_context in tls_remote --- tls/datadog_checks/tls/tls_remote.py | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/tls/datadog_checks/tls/tls_remote.py b/tls/datadog_checks/tls/tls_remote.py index 34430a7fd7fab..9a7f648f634d5 100644 --- a/tls/datadog_checks/tls/tls_remote.py +++ b/tls/datadog_checks/tls/tls_remote.py @@ -1,7 +1,6 @@ # (C) Datadog, Inc. 2021-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) -import ssl from hashlib import sha256 from struct import pack, unpack @@ -11,6 +10,7 @@ from datadog_checks.base import ConfigurationError, is_affirmative from datadog_checks.base.log import get_check_logger +from datadog_checks.base.utils.http import create_ssl_context from datadog_checks.base.utils.time import get_timestamp from .const import SERVICE_CHECK_CAN_CONNECT, SERVICE_CHECK_EXPIRATION, SERVICE_CHECK_VALIDATION @@ -178,16 +178,7 @@ def fetch_intermediate_certs(self): with sock: try: - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_CLIENT) - context.verify_mode = ssl.CERT_NONE - ciphers = self.agent_check.tls_context_wrapper.config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - - context.set_ciphers(updated_ciphers) + context = create_ssl_context(self.agent_check.tls_config, overrides={'tls_verify': False}) with context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) From 97abe09d97b3bfa02491cea3e9b0a6279908bc6e Mon Sep 17 00:00:00 2001 From: dkirov-dd <166512750+dkirov-dd@users.noreply.github.com> Date: Fri, 20 Jun 2025 11:24:07 +0200 Subject: [PATCH 074/100] Apply suggestions from code review Co-authored-by: Ilia Kurenkov --- datadog_checks_base/datadog_checks/base/utils/http.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index fee8593dda01a..6d87332f0ab8e 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -202,8 +202,7 @@ def get_tls_config_from_options(new_options): class SSLContextAdapter(requests.adapters.HTTPAdapter): """ - HTTPS adapter that uses SSL contexts. - This ensures consistent TLS configuration across all HTTPS requests. + This adapter lets us hook into requests.Session and make it use the SSLContext that we manage. """ def __init__(self, ssl_context, has_custom_context=False, **kwargs): @@ -709,7 +708,7 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # See: https://github.com/msabramo/requests-unixsocket session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) - # Attributes can't be passed to the constructor + # Options cannot be passed to the requests.Session init method but can be set as attributes on an initialized Session instance. for option, value in self.options.items(): setattr(session, option, value) return session From ebcdf0c882b026a3c8b2527146933b2a13d3faaa Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 20 Jun 2025 15:02:04 +0200 Subject: [PATCH 075/100] Add _mount_new_ssl_adapter method --- .../datadog_checks/base/utils/http.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 6d87332f0ab8e..74ad05b84ae42 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -528,14 +528,12 @@ def _request(self, method, url, options): session = self.session if new_context is not None: # If the context has changed, we need to create a new adapter - certadapter = SSLContextAdapter(new_context, has_custom_context=True) - session.mount(url, certadapter) + self._mount_new_ssl_adapter(session, url, ssl_context=new_context, has_custom_context=True) else: current_adapter = session.get_adapter(url) if isinstance(current_adapter, SSLContextAdapter) and current_adapter.has_custom_context: # If we are using a custom context, we need to revert to the original context - certadapter = SSLContextAdapter(self.ssl_context) - session.mount(url, certadapter) + self._mount_new_ssl_adapter(session, url) else: session = self.create_session(ssl_context=new_context) request_method = getattr(session, method) @@ -589,8 +587,7 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per session = self.create_session(new_context) else: session = self.session - certadapter = SSLContextAdapter(new_context) - session.mount(url, certadapter) + self._mount_new_ssl_adapter(session, url, ssl_context=new_context) request_method = getattr(session, method) response = request_method(url, **new_options) return response @@ -732,6 +729,13 @@ def __del__(self): # no cov # before _session was ever defined (since __del__ executes even if __init__ fails). pass + def _mount_new_ssl_adapter(self, session, url, ssl_context=None, **kwargs): + """ + Mounts a new SSLContextAdapter to the session for the given URL. + """ + adapter = SSLContextAdapter(ssl_context or self.ssl_context, **kwargs) + session.mount(url, adapter) + @contextmanager def handle_kerberos_keytab(keytab_file): From c073243dcf55fc9f77b8b2c5d8d75c175d6ecb0f Mon Sep 17 00:00:00 2001 From: David Kirov Date: Fri, 20 Jun 2025 15:14:46 +0200 Subject: [PATCH 076/100] Implement suggestions from code review --- .../datadog_checks/base/utils/http.py | 89 ++++--------------- .../datadog_checks/base/utils/tls.py | 65 +++++++++++++- .../base/utils/http/test_tls_and_certs.py | 12 +-- 3 files changed, 85 insertions(+), 81 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 74ad05b84ae42..9fa070166f36d 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -6,12 +6,12 @@ import logging import os import re -import ssl import warnings from contextlib import ExitStack, contextmanager from copy import deepcopy from urllib.parse import quote, urlparse, urlunparse +from .tls import create_ssl_context import lazy_loader import requests from binary import KIBIBYTE @@ -117,84 +117,26 @@ UDS_SCHEME = 'unix' -def create_ssl_context(config, overrides=None): - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext - # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT - context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - - if overrides is not None: - config = config.copy() - config.update(overrides) - - LOGGER.debug('Creating SSL context with config: %s', config) - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname - context.check_hostname = is_affirmative(config['tls_verify']) and config.get('tls_validate_hostname', True) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode - context.verify_mode = ssl.CERT_REQUIRED if is_affirmative(config['tls_verify']) else ssl.CERT_NONE - - ciphers = config.get('tls_ciphers') - if ciphers: - if 'ALL' in ciphers: - updated_ciphers = "ALL" - else: - updated_ciphers = ":".join(ciphers) - LOGGER.debug('Setting TLS ciphers to: %s', updated_ciphers) - context.set_ciphers(updated_ciphers) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs - ca_cert = config.get('tls_ca_cert') - try: - if ca_cert: - ca_cert = os.path.expanduser(ca_cert) - if os.path.isdir(ca_cert): - context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) - else: - context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) - else: - context.load_default_certs(ssl.Purpose.SERVER_AUTH) - except FileNotFoundError: - LOGGER.warning( - 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.', - ca_cert, - ) - - # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain - client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key') - client_key_pass = config.get('tls_private_key_password') - try: - if client_key: - client_key = os.path.expanduser(client_key) - if client_cert: - client_cert = os.path.expanduser(client_cert) - context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) - except FileNotFoundError: - LOGGER.warning( - 'TLS client certificate file not found: %s. ' 'Please check the `tls_cert` configuration option.', - client_cert, - ) - - return context - - def get_tls_config_from_options(new_options): '''Extract TLS configuration from request options.''' tls_config = {} verify = new_options.get('verify') cert = new_options.get('cert') - if verify is True: - tls_config["tls_verify"] = True - elif verify is False: - tls_config["tls_verify"] = False - elif isinstance(verify, str): + if isinstance(verify, str): tls_config["tls_verify"] = True tls_config["tls_ca_cert"] = verify + elif isinstance(verify, bool): + tls_config["tls_verify"] = verify + else: + LOGGER.warning( + 'Unexpected type for `verify` option. Expected bool or str, got %s.', + type(verify).__name__, + ) if isinstance(cert, str): tls_config["tls_cert"] = cert - elif isinstance(cert, tuple) and len(cert) == 2: + elif isinstance(cert, tuple) or isinstance(cert, list) and len(cert) == 2: tls_config["tls_cert"] = cert[0] tls_config["tls_private_key"] = cert[1] return tls_config @@ -535,7 +477,7 @@ def _request(self, method, url, options): # If we are using a custom context, we need to revert to the original context self._mount_new_ssl_adapter(session, url) else: - session = self.create_session(ssl_context=new_context) + session = self._create_session(ssl_context=new_context) request_method = getattr(session, method) if self.auth_token_handler: @@ -582,9 +524,8 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per if not certs: raise e new_context = create_ssl_context(self.tls_config, overrides={'tls_ca_cert': certs}) - # retry the connection via session object if not persist: - session = self.create_session(new_context) + session = self._create_session(new_context) else: session = self.session self._mount_new_ssl_adapter(session, url, ssl_context=new_context) @@ -663,7 +604,7 @@ def load_intermediate_certs(self, der_cert, certs): # Assume HTTP for now try: - response = self.get(uri, verify=False) # SKIP_HTTP_VALIDATION + response = self.get(uri) # SKIP_HTTP_VALIDATION except Exception as e: self.logger.error('Error fetching intermediate certificate from `%s`: %s', uri, e) continue @@ -674,7 +615,7 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - def create_session(self, ssl_context=None): + def _create_session(self, ssl_context=None): session = requests.Session() context = self.ssl_context if ssl_context is None else ssl_context @@ -714,7 +655,7 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): def session(self): if self._session is None: # Create a new session if it doesn't exist - self._session = self.create_session() + self._session = self._create_session() return self._session def handle_auth_token(self, **request): diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index e6fc7643a18dd..adcd91e3832b5 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -4,9 +4,10 @@ import logging from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict # noqa: F401 +import ssl +import os from ..config import is_affirmative -from .http import create_ssl_context if TYPE_CHECKING: from ..types import InstanceType # noqa: F401 @@ -31,6 +32,68 @@ } +def create_ssl_context(config, overrides=None): + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext + # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT + context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) + + if overrides is not None: + config = config.copy() + config.update(overrides) + + LOGGER.debug('Creating SSL context with config: %s', config) + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname + context.check_hostname = is_affirmative(config['tls_verify']) and config.get('tls_validate_hostname', True) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.verify_mode + context.verify_mode = ssl.CERT_REQUIRED if is_affirmative(config['tls_verify']) else ssl.CERT_NONE + + ciphers = config.get('tls_ciphers', []) + if isinstance(ciphers, str): + # If ciphers is a string, assume that it is formatted correctly + configured_ciphers = "ALL" if "ALL" in ciphers else ciphers + else: + configured_ciphers = "ALL" if "ALL" in ciphers else ":".join(ciphers) + if configured_ciphers: + LOGGER.debug('Setting TLS ciphers to: %s', configured_ciphers) + context.set_ciphers(configured_ciphers) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_verify_locations + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_default_certs + ca_cert = config.get('tls_ca_cert') + try: + if ca_cert: + ca_cert = os.path.expanduser(ca_cert) + if os.path.isdir(ca_cert): + context.load_verify_locations(cafile=None, capath=ca_cert, cadata=None) + else: + context.load_verify_locations(cafile=ca_cert, capath=None, cadata=None) + else: + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + except FileNotFoundError: + LOGGER.warning( + 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.', + ca_cert, + ) + + # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain + client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key') + client_key_pass = config.get('tls_private_key_password') + try: + if client_key: + client_key = os.path.expanduser(client_key) + if client_cert: + client_cert = os.path.expanduser(client_cert) + context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) + except FileNotFoundError: + LOGGER.warning( + 'TLS client certificate file not found: %s. ' 'Please check the `tls_cert` configuration option.', + client_cert, + ) + + return context + + class TlsContextWrapper(object): __slots__ = ('logger', 'config', 'tls_context') diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 75158490fbf1e..cc3b9cecfa92a 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -265,14 +265,14 @@ def test_fetch_intermediate_certs(self, port): def test_fetch_intermediate_certs_tls_ciphers(self): """Test that fetch_intermediate_certs uses the correct ciphers.""" - instance = {'tls_verify': True, 'tls_ciphers': [TEST_CIPHERS[0]]} + instance = {'tls_verify': True, 'tls_ciphers': TEST_CIPHERS[0]} init_config = {} with mock.patch('datadog_checks.base.utils.http.create_socket_connection') as mock_create_socket_connection: mock_socket = mock.MagicMock() mock_create_socket_connection.return_value = mock_socket - with mock.patch('datadog_checks.base.utils.http.ssl.SSLContext') as mock_ssl_context_class: + with mock.patch('datadog_checks.base.utils.tls.ssl.SSLContext') as mock_ssl_context_class: mock_context = mock.MagicMock() mock_ssl_context_class.return_value = mock_context @@ -285,11 +285,11 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) - mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers'][0]) + mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers']) http.fetch_intermediate_certs('example.com', 443) # Assert set_ciphers called a second time after fetch_intermediate_certs assert mock_context.set_ciphers.call_count == 2 - assert mock_context.set_ciphers.call_args_list[1][0][0] == instance['tls_ciphers'][0] + assert mock_context.set_ciphers.call_args_list[1][0][0] == instance['tls_ciphers'] class TestSSLContext: @@ -377,7 +377,7 @@ def test_host_header_compatibility(self): 'tls_use_host_header': True, 'headers': {'Host': 'custom-host.example.com'}, 'tls_verify': True, - 'tls_ciphers': [TEST_CIPHERS[0]], + 'tls_ciphers': TEST_CIPHERS[0], } init_config = {} http = RequestsWrapper(instance, init_config) @@ -423,7 +423,7 @@ def test_backward_compatibility_maintained(self): 'tls_validate_hostname': True, 'tls_ignore_warning': False, 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], - 'tls_ciphers': [TEST_CIPHERS[0]], + 'tls_ciphers': TEST_CIPHERS[0], } init_config = {} From 15a587345655fb0b84bbd29acf40a930d6e3e706 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 24 Jun 2025 14:46:39 +0200 Subject: [PATCH 077/100] Use ChainMaps --- .../datadog_checks/base/utils/http.py | 28 ++++++------------- .../datadog_checks/base/utils/tls.py | 10 ++----- tls/datadog_checks/tls/tls_remote.py | 3 +- 3 files changed, 13 insertions(+), 28 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 9fa070166f36d..260fc0ea81d77 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -7,11 +7,11 @@ import os import re import warnings +from collections import ChainMap from contextlib import ExitStack, contextmanager from copy import deepcopy from urllib.parse import quote, urlparse, urlunparse -from .tls import create_ssl_context import lazy_loader import requests from binary import KIBIBYTE @@ -29,6 +29,7 @@ from .headers import get_default_headers, update_headers from .network import create_socket_connection from .time import get_timestamp +from .tls import create_ssl_context # See Performance Optimizations in this package's README.md. requests_kerberos = lazy_loader.load('requests_kerberos') @@ -445,7 +446,7 @@ def _request(self, method, url, options): if persist is None: persist = self.persist_connections - new_options = self.populate_options(options) + new_options = ChainMap(options, self.options) if url.startswith('https') and not self.ignore_tls_warning and not new_options['verify']: self.logger.debug('An unverified HTTPS request is being made to %s', url) @@ -505,12 +506,9 @@ def _get_new_context(self, url, new_options): # If the URL is not HTTPS, no need to update the TLS context. if not url.startswith('https'): return - new_tls_config = get_tls_config_from_options(new_options) - self.logger.debug('Overrides for new context config: %s', new_tls_config) - - return create_ssl_context(self.tls_config, overrides=new_tls_config) + return create_ssl_context(ChainMap(new_tls_config, self.tls_config)) def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: @@ -523,7 +521,7 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - new_context = create_ssl_context(self.tls_config, overrides={'tls_ca_cert': certs}) + new_context = create_ssl_context(ChainMap({'tls_ca_cert': certs}, self.tls_config)) if not persist: session = self._create_session(new_context) else: @@ -533,17 +531,6 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per response = request_method(url, **new_options) return response - def populate_options(self, options): - # Avoid needless dictionary update if there are no options - if not options: - return self.options - - for option, value in self.options.items(): - # Make explicitly set options take precedence - options.setdefault(option, value) - - return options - def fetch_intermediate_certs(self, hostname, port=443): # TODO: prefer stdlib implementation when available, see https://bugs.python.org/issue18617 certs = [] @@ -556,7 +543,7 @@ def fetch_intermediate_certs(self, hostname, port=443): with sock: try: - context = create_ssl_context(self.tls_config, overrides={'tls_verify': False}) + context = create_ssl_context(ChainMap({'tls_verify': False}, self.tls_config)) with context.wrap_socket(sock, server_hostname=hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) @@ -646,7 +633,8 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): # See: https://github.com/msabramo/requests-unixsocket session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) - # Options cannot be passed to the requests.Session init method but can be set as attributes on an initialized Session instance. + # Options cannot be passed to the requests.Session init method + # but can be set as attributes on an initialized Session instance. for option, value in self.options.items(): setattr(session, option, value) return session diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index adcd91e3832b5..a1a560a968861 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -2,10 +2,10 @@ # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) import logging +import os +import ssl from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict # noqa: F401 -import ssl -import os from ..config import is_affirmative @@ -32,15 +32,11 @@ } -def create_ssl_context(config, overrides=None): +def create_ssl_context(config): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext # https://docs.python.org/3/library/ssl.html#ssl.PROTOCOL_TLS_CLIENT context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS) - if overrides is not None: - config = config.copy() - config.update(overrides) - LOGGER.debug('Creating SSL context with config: %s', config) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.check_hostname context.check_hostname = is_affirmative(config['tls_verify']) and config.get('tls_validate_hostname', True) diff --git a/tls/datadog_checks/tls/tls_remote.py b/tls/datadog_checks/tls/tls_remote.py index 9a7f648f634d5..5c5583056910a 100644 --- a/tls/datadog_checks/tls/tls_remote.py +++ b/tls/datadog_checks/tls/tls_remote.py @@ -1,6 +1,7 @@ # (C) Datadog, Inc. 2021-present # All rights reserved # Licensed under a 3-clause BSD style license (see LICENSE) +from collections import ChainMap from hashlib import sha256 from struct import pack, unpack @@ -178,7 +179,7 @@ def fetch_intermediate_certs(self): with sock: try: - context = create_ssl_context(self.agent_check.tls_config, overrides={'tls_verify': False}) + context = create_ssl_context(ChainMap({'tls_verify': False}, self.agent_check.tls_config)) with context.wrap_socket(sock, server_hostname=self.agent_check._server_hostname) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) From bcb39a2071603ece8bd79d68d6598774e778f9b6 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 13:37:48 +0200 Subject: [PATCH 078/100] Implement caching for HTTPS adapters --- .../datadog_checks/base/utils/http.py | 127 ++++++++---------- .../datadog_checks/base/utils/tls.py | 23 +++- 2 files changed, 81 insertions(+), 69 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 260fc0ea81d77..3c5085498e65d 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -29,7 +29,7 @@ from .headers import get_default_headers, update_headers from .network import create_socket_connection from .time import get_timestamp -from .tls import create_ssl_context +from .tls import create_ssl_context, TlsConfig, DEFAULT_PROTOCOL_VERSIONS, SUPPORTED_PROTOCOL_VERSIONS # See Performance Optimizations in this package's README.md. requests_kerberos = lazy_loader.load('requests_kerberos') @@ -55,10 +55,6 @@ # https://www.bittorrent.org/beps/bep_0003.html DEFAULT_CHUNK_SIZE = 16 -# https://github.com/python/cpython/blob/ef516d11c1a0f885dba0aba8cf5366502077cdd4/Lib/ssl.py#L158-L165 -DEFAULT_PROTOCOL_VERSIONS = {'SSLv3', 'TLSv1.2', 'TLSv1.3'} -SUPPORTED_PROTOCOL_VERSIONS = {'SSLv3', 'TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3'} - STANDARD_FIELDS = { 'allow_redirects': True, 'auth_token': None, @@ -84,19 +80,10 @@ 'read_timeout': None, 'request_size': DEFAULT_CHUNK_SIZE, 'skip_proxy': False, - 'tls_ca_cert': None, - 'tls_cert': None, - 'tls_ciphers': 'ALL', - 'tls_use_host_header': False, - 'tls_ignore_warning': False, - 'tls_private_key': None, - 'tls_private_key_password': None, - 'tls_protocols_allowed': DEFAULT_PROTOCOL_VERSIONS, - 'tls_validate_hostname': True, - 'tls_verify': True, 'timeout': DEFAULT_TIMEOUT, 'use_legacy_auth_encoding': True, 'username': None, + **TlsConfig().__dict__ # This will include all TLS-related fields } # For any known legacy fields that may be widespread DEFAULT_REMAPPED_FIELDS = { @@ -142,6 +129,9 @@ def get_tls_config_from_options(new_options): tls_config["tls_private_key"] = cert[1] return tls_config +def update_session_https_adapter(session, tls_config): + return session + class SSLContextAdapter(requests.adapters.HTTPAdapter): """ @@ -202,6 +192,7 @@ def __enter__(self): class RequestsWrapper(object): __slots__ = ( '_session', + '_https_adapters', 'tls_use_host_header', 'ignore_tls_warning', 'log_requests', @@ -214,7 +205,6 @@ class RequestsWrapper(object): 'request_size', 'tls_protocols_allowed', 'tls_config', - 'ssl_context', ) def __init__(self, instance, init_config, remapper=None, logger=None, session=None): @@ -412,7 +402,7 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No # Create TLS context for consistent TLS configuration self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} - self.ssl_context = create_ssl_context(self.tls_config) + self._https_adapters = {} def get(self, url, **options): return self._request('get', url, options) @@ -466,19 +456,12 @@ def _request(self, method, url, options): for hook in self.request_hooks: stack.enter_context(hook()) - new_context = self._get_new_context(url, new_options) + new_tls_config = self._get_new_tls_config(url, new_options) if persist: session = self.session - if new_context is not None: - # If the context has changed, we need to create a new adapter - self._mount_new_ssl_adapter(session, url, ssl_context=new_context, has_custom_context=True) - else: - current_adapter = session.get_adapter(url) - if isinstance(current_adapter, SSLContextAdapter) and current_adapter.has_custom_context: - # If we are using a custom context, we need to revert to the original context - self._mount_new_ssl_adapter(session, url) + self._mount_https_adapter(session, new_tls_config) else: - session = self._create_session(ssl_context=new_context) + session = self._create_session(tls_config=new_tls_config) request_method = getattr(session, method) if self.auth_token_handler: @@ -494,21 +477,20 @@ def _request(self, method, url, options): return ResponseWrapper(response, self.request_size) - def _get_new_context(self, url, new_options): + def _get_new_tls_config(self, url, new_options): """ - Creates a new TLS context if the new request options differ from the default ones. + Creates a new TLS config if the new request options differ from the default ones. """ - self.logger.debug('Default options: %s', self.options) - LOGGER.debug('New options: %s', new_options) # If `verify` or `cert` options differ from the defaults, a new adapter is created. if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: return - # If the URL is not HTTPS, no need to update the TLS context. if not url.startswith('https'): return - new_tls_config = get_tls_config_from_options(new_options) - self.logger.debug('Overrides for new context config: %s', new_tls_config) - return create_ssl_context(ChainMap(new_tls_config, self.tls_config)) + requested_tls_config = get_tls_config_from_options(new_options) + self.logger.debug('Overrides for new context config: %s', requested_tls_config) + new_tls_config = self.tls_config.copy() + new_tls_config.update(new_tls_config) + return new_tls_config def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: @@ -521,12 +503,14 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - new_context = create_ssl_context(ChainMap({'tls_ca_cert': certs}, self.tls_config)) + new_ca_certs = {'tls_ca_cert': certs} + new_tls_config = self.tls_config.copy() + new_tls_config.update(new_ca_certs) if not persist: - session = self._create_session(new_context) + session = self._create_session(new_tls_config) else: session = self.session - self._mount_new_ssl_adapter(session, url, ssl_context=new_context) + self._mount_https_adapter(session, new_tls_config) request_method = getattr(session, method) response = request_method(url, **new_options) return response @@ -602,32 +586,11 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - def _create_session(self, ssl_context=None): + def _create_session(self, tls_config=None): session = requests.Session() - context = self.ssl_context if ssl_context is None else ssl_context - - # Enables HostHeaderSSLAdapter if needed - # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter - if self.tls_use_host_header: - # Create a combined adapter that supports both TLS context and host headers - class SSLContextHostHeaderAdapter(SSLContextAdapter, _http_utils.HostHeaderSSLAdapter): - def __init__(self, ssl_context, **kwargs): - SSLContextAdapter.__init__(self, ssl_context, **kwargs) - _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) - - def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): - # Use TLS context from wrapper - pool_kwargs['ssl_context'] = self.ssl_context - return _http_utils.HostHeaderSSLAdapter.init_poolmanager( - self, connections, maxsize, block=block, **pool_kwargs - ) + config = self.tls_config if tls_config is None else tls_config - https_adapter = SSLContextHostHeaderAdapter(context) - else: - # Use SSLContextHTTPSAdapter for consistent TLS configuration - https_adapter = SSLContextAdapter(context) - - session.mount('https://', https_adapter) + self._mount_https_adapter(session, config) # Enable Unix Domain Socket (UDS) support. # See: https://github.com/msabramo/requests-unixsocket @@ -658,12 +621,40 @@ def __del__(self): # no cov # before _session was ever defined (since __del__ executes even if __init__ fails). pass - def _mount_new_ssl_adapter(self, session, url, ssl_context=None, **kwargs): - """ - Mounts a new SSLContextAdapter to the session for the given URL. - """ - adapter = SSLContextAdapter(ssl_context or self.ssl_context, **kwargs) - session.mount(url, adapter) + def _mount_https_adapter(self, session, tls_config): + tls_config_key = TlsConfig(**tls_config) + https_adapter = self._https_adapters.get(tls_config_key) + + if https_adapter is not None: + session.mount('https://', https_adapter) + return + + context = create_ssl_context(tls_config) + # Enables HostHeaderSSLAdapter if needed + # https://toolbelt.readthedocs.io/en/latest/adapters.html#hostheaderssladapter + if self.tls_use_host_header: + # Create a combined adapter that supports both TLS context and host headers + class SSLContextHostHeaderAdapter(SSLContextAdapter, _http_utils.HostHeaderSSLAdapter): + def __init__(self, ssl_context, **kwargs): + SSLContextAdapter.__init__(self, ssl_context, **kwargs) + _http_utils.HostHeaderSSLAdapter.__init__(self, **kwargs) + + def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): + # Use TLS context from wrapper + pool_kwargs['ssl_context'] = self.ssl_context + return _http_utils.HostHeaderSSLAdapter.init_poolmanager( + self, connections, maxsize, block=block, **pool_kwargs + ) + + https_adapter = SSLContextHostHeaderAdapter(context) + else: + # Use SSLContextHTTPSAdapter for consistent TLS configuration + https_adapter = SSLContextAdapter(context) + + # Cache the adapter for reuse + self._https_adapters[tls_config_key] = https_adapter + session.mount('https://', https_adapter) + @contextmanager diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index a1a560a968861..60bbf7dd9416d 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -5,7 +5,8 @@ import os import ssl from copy import deepcopy -from typing import TYPE_CHECKING, Any, AnyStr, Dict # noqa: F401 +from typing import TYPE_CHECKING, Any, AnyStr, Dict, Optional # noqa: F401 +from dataclasses import dataclass from ..config import is_affirmative @@ -21,6 +22,10 @@ # prefix. UNIQUE_FIELD_PREFIX = '_tls_context_' +# https://github.com/python/cpython/blob/ef516d11c1a0f885dba0aba8cf5366502077cdd4/Lib/ssl.py#L158-L165 +DEFAULT_PROTOCOL_VERSIONS = ('SSLv3', 'TLSv1.2', 'TLSv1.3') +SUPPORTED_PROTOCOL_VERSIONS = ('SSLv3', 'TLSv1', 'TLSv1.1', 'TLSv1.2', 'TLSv1.3') + STANDARD_FIELDS = { 'tls_verify': True, 'tls_ca_cert': None, @@ -31,6 +36,22 @@ 'tls_ciphers': 'ALL', } +@dataclass(frozen=True, eq=True) +class TlsConfig: + """ + Class used internally to cache HTTPS adapters with specific TLS configurations. + """ + tls_ca_cert: str | None = None + tls_cert: str | None = None + tls_ciphers: str = 'ALL' + tls_use_host_header: bool = False + tls_ignore_warning: bool = False + tls_private_key: str | None = None + tls_private_key_password: str | None = None + tls_protocols_allowed: tuple[str, ...] = DEFAULT_PROTOCOL_VERSIONS + tls_validate_hostname: bool = True + tls_verify: bool = True + def create_ssl_context(config): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext From 9915fc5bd131fdb705e335c58ec2a80815744c40 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 14:22:12 +0200 Subject: [PATCH 079/100] Use pydantic for TlsConfig class --- datadog_checks_base/datadog_checks/base/utils/tls.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 60bbf7dd9416d..e7855ba2f2a2c 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -7,6 +7,7 @@ from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict, Optional # noqa: F401 from dataclasses import dataclass +from pydantic import BaseModel from ..config import is_affirmative @@ -36,8 +37,7 @@ 'tls_ciphers': 'ALL', } -@dataclass(frozen=True, eq=True) -class TlsConfig: +class TlsConfig(BaseModel): """ Class used internally to cache HTTPS adapters with specific TLS configurations. """ @@ -52,6 +52,9 @@ class TlsConfig: tls_validate_hostname: bool = True tls_verify: bool = True + class Config: + frozen = True + def create_ssl_context(config): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext From 7a9059e166c90574f52bb73509ea63c6ea7d132e Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 16:09:20 +0200 Subject: [PATCH 080/100] Minor improvements and fixes --- .../datadog_checks/base/utils/http.py | 12 ++++++------ .../datadog_checks/base/utils/tls.py | 16 +++++++++++----- 2 files changed, 17 insertions(+), 11 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 3c5085498e65d..8d2691fe090dc 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -138,11 +138,10 @@ class SSLContextAdapter(requests.adapters.HTTPAdapter): This adapter lets us hook into requests.Session and make it use the SSLContext that we manage. """ - def __init__(self, ssl_context, has_custom_context=False, **kwargs): + def __init__(self, ssl_context, **kwargs): self.ssl_context = ssl_context # This is used to determine if the adapter was created with a custom context # for the purpose of reverting to the default context when needed. - self.has_custom_context = has_custom_context super(SSLContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): @@ -483,13 +482,13 @@ def _get_new_tls_config(self, url, new_options): """ # If `verify` or `cert` options differ from the defaults, a new adapter is created. if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: - return + return self.tls_config if not url.startswith('https'): - return + return self.tls_config requested_tls_config = get_tls_config_from_options(new_options) self.logger.debug('Overrides for new context config: %s', requested_tls_config) new_tls_config = self.tls_config.copy() - new_tls_config.update(new_tls_config) + new_tls_config.update(requested_tls_config) return new_tls_config def make_request_aia_chasing(self, request_method, method, url, new_options, persist): @@ -503,7 +502,7 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - new_ca_certs = {'tls_ca_cert': certs} + new_ca_certs = {'tls_intermediate_ca_certs': certs} new_tls_config = self.tls_config.copy() new_tls_config.update(new_ca_certs) if not persist: @@ -622,6 +621,7 @@ def __del__(self): # no cov pass def _mount_https_adapter(self, session, tls_config): + # Reuse existing adapter if it matches the TLS config tls_config_key = TlsConfig(**tls_config) https_adapter = self._https_adapters.get(tls_config_key) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index e7855ba2f2a2c..e38793a2673aa 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -6,7 +6,6 @@ import ssl from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict, Optional # noqa: F401 -from dataclasses import dataclass from pydantic import BaseModel from ..config import is_affirmative @@ -37,11 +36,12 @@ 'tls_ciphers': 'ALL', } -class TlsConfig(BaseModel): +class TlsConfig(BaseModel, frozen=True): """ Class used internally to cache HTTPS adapters with specific TLS configurations. """ tls_ca_cert: str | None = None + tls_intermediate_ca_certs: tuple[str, ...] | None = None tls_cert: str | None = None tls_ciphers: str = 'ALL' tls_use_host_header: bool = False @@ -51,9 +51,6 @@ class TlsConfig(BaseModel): tls_protocols_allowed: tuple[str, ...] = DEFAULT_PROTOCOL_VERSIONS tls_validate_hostname: bool = True tls_verify: bool = True - - class Config: - frozen = True def create_ssl_context(config): @@ -95,6 +92,15 @@ def create_ssl_context(config): 'TLS CA certificate file not found: %s. Please check the `tls_ca_cert` configuration option.', ca_cert, ) + intermediate_ca_certs = config.get('tls_intermediate_ca_certs') + try: + if intermediate_ca_certs: + context.load_verify_locations(cadata='\n'.join(intermediate_ca_certs)) + except ssl.SSLError: + LOGGER.warning( + "TLS intermediate CA certificate(s) could not be loaded: %s. ", + intermediate_ca_certs, + ) # https://docs.python.org/3/library/ssl.html#ssl.SSLContext.load_cert_chain client_cert, client_key = config.get('tls_cert'), config.get('tls_private_key') From b31c03b3cfaa9d218cb0732e20c60f530ec99897 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 16:09:27 +0200 Subject: [PATCH 081/100] Add tests --- .../base/utils/http/test_tls_and_certs.py | 196 ++++++------------ 1 file changed, 58 insertions(+), 138 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index cc3b9cecfa92a..4988e0d6656d2 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -8,7 +8,8 @@ import pytest from requests.exceptions import SSLError -from datadog_checks.base.utils.http import RequestsWrapper, SSLContextAdapter +from datadog_checks.base.utils.http import RequestsWrapper +from datadog_checks.base.utils.tls import TlsConfig pytestmark = [pytest.mark.unit] @@ -291,73 +292,18 @@ def test_fetch_intermediate_certs_tls_ciphers(self): assert mock_context.set_ciphers.call_count == 2 assert mock_context.set_ciphers.call_args_list[1][0][0] == instance['tls_ciphers'] - -class TestSSLContext: - """Test the core SSL context functionality.""" - - def test_requests_wrapper_creates_ssl_context(self): - """Test that RequestsWrapper creates an SSLContext instance.""" - instance = {'tls_verify': True} - init_config = {} - http = RequestsWrapper(instance, init_config) - - assert hasattr(http, 'ssl_context') - assert isinstance(http.ssl_context, ssl.SSLContext) - - @pytest.mark.parametrize( - "instance, verify_mode, check_hostname", - [ - pytest.param({'tls_verify': True}, ssl.CERT_REQUIRED, True, id='tls_verify true'), - pytest.param({'tls_verify': False}, ssl.CERT_NONE, False, id='tls_verify false'), - pytest.param( - {'tls_verify': True, 'tls_validate_hostname': False}, - ssl.CERT_REQUIRED, - False, - id='tls_verify true and tls_validate_hostname false', - ), - ], - ) - def test_ssl_context_has_correct_attributes(self, instance, verify_mode, check_hostname): - """Test that RequestsWrapper creates an SSLContext instance.""" - http = RequestsWrapper(instance, {}) - - assert hasattr(http, 'ssl_context') - assert isinstance(http.ssl_context, ssl.SSLContext) - assert http.ssl_context.verify_mode == verify_mode - assert http.ssl_context.check_hostname == check_hostname - - def test_session_uses_ssl_context_adapter(self): - """Test that the session uses SSLContextAdapter for consistent TLS configuration.""" - instance = {'tls_verify': True} - init_config = {} - http = RequestsWrapper(instance, init_config) - - session = http.session - https_adapter = session.get_adapter('https://example.com') - - assert isinstance(https_adapter, SSLContextAdapter) - assert https_adapter.ssl_context is http.ssl_context - - def test_tls_ciphers_applied_consistently(self): - """Test that tls_ciphers are applied consistently.""" - instance = {'tls_verify': True, 'tls_ciphers': TEST_CIPHERS} + def test_intermediate_certs_loaded(self): + """Test that intermediate certs are loaded correctly.""" + instance = {'tls_verify': True, 'tls_intermediate_ca_certs': ('some_cert', 'another_cert')} init_config = {} - http = RequestsWrapper(instance, init_config) - - # Verify the TLS context wrapper has the cipher configuration - assert http.tls_config['tls_ciphers'] == instance['tls_ciphers'] - - # Verify the session adapter uses the same TLS context - session = http.session - https_adapter = session.get_adapter('https://example.com') + with mock.patch('requests.Session.get'): + with mock.patch('ssl.SSLContext.load_verify_locations') as mock_load_verify_locations: + http = RequestsWrapper(instance, init_config) + http.get('https://example.com') + mock_load_verify_locations.assert_called_once_with(cadata='\n'.join(instance['tls_intermediate_ca_certs'])) - assert isinstance(https_adapter, SSLContextAdapter) - assert https_adapter.ssl_context is http.ssl_context - # Verify that the ciphers are set correctly in the TLS context - for cipher in instance['tls_ciphers']: - # At least one entry's name field should match the OpenSSL name - assert any(cipher in c.get('name') for c in https_adapter.ssl_context.get_ciphers()) +class TestSSLContext: def test_default_tls_ciphers(self): """Test that default TLS ciphers are applied when none are specified.""" instance = {'tls_verify': True} @@ -371,76 +317,50 @@ def test_default_tls_ciphers(self): assert mock_set_ciphers.call_count == 1 assert mock_set_ciphers.call_args[0][0] == 'ALL' - def test_host_header_compatibility(self): - """Test that host header functionality works with TLS context unification.""" - instance = { - 'tls_use_host_header': True, - 'headers': {'Host': 'custom-host.example.com'}, - 'tls_verify': True, - 'tls_ciphers': TEST_CIPHERS[0], - } - init_config = {} - http = RequestsWrapper(instance, init_config) - - session = http.session - https_adapter = session.get_adapter('https://example.com') - - # Should be the combined adapter that supports both TLS context and host headers - assert hasattr(https_adapter, 'ssl_context') - assert https_adapter.ssl_context is http.ssl_context - assert 'SSLContextHostHeaderAdapter' in str(type(https_adapter)) - - def test_remapper_functionality_preserved(self): - """Test that config remapping functionality is preserved with TLS context unification.""" - instance = { - 'disable_ssl_validation': True, - 'custom_private_key': '/path/to/key.pem', - } - remapper = { - 'disable_ssl_validation': {'name': 'tls_verify', 'invert': True}, - 'custom_private_key': {'name': 'tls_private_key'}, - } - init_config = {} - - # Mock the TLS context creation to avoid file operations - with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: - mock_context = mock.MagicMock() - mock_create_context.return_value = mock_context - - http = RequestsWrapper(instance, init_config, remapper=remapper) - - # Verify remapping worked - disable_ssl_validation: True should become tls_verify: False - assert http.tls_config['tls_verify'] is False - assert http.tls_config['tls_private_key'] == '/path/to/key.pem' - - def test_backward_compatibility_maintained(self): - """Test that all existing TLS configuration options still work.""" - instance = { - 'tls_verify': True, - 'tls_ca_cert': '/path/to/ca.pem', - 'tls_cert': '/path/to/cert.pem', - 'tls_private_key': '/path/to/key.pem', - 'tls_validate_hostname': True, - 'tls_ignore_warning': False, - 'tls_protocols_allowed': ['TLSv1.2', 'TLSv1.3'], - 'tls_ciphers': TEST_CIPHERS[0], - } - init_config = {} - - # Mock the TLS context creation to avoid file operations - with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_context: - mock_context = mock.MagicMock() - mock_create_context.return_value = mock_context - - # Should not raise any exceptions - http = RequestsWrapper(instance, init_config) - - # Verify all options are preserved in RequestsWrapper - assert http.options['verify'] == '/path/to/ca.pem' - assert http.options['cert'] == ('/path/to/cert.pem', '/path/to/key.pem') - assert http.ignore_tls_warning is False - assert http.tls_protocols_allowed == ['TLSv1.2', 'TLSv1.3'] - - # Verify TLS context wrapper has the right config - assert http.tls_config['tls_verify'] is True - assert http.tls_config['tls_ciphers'] == instance['tls_ciphers'] +class TestSSLContextAdapter: + def test_adapter_caching(self): + """SSLContextAdapter should be recovered from cache when possible.""" + + with mock.patch('requests.Session.get'): + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_ssl_context: + http = RequestsWrapper({'persist_connections': True, 'tls_verify': True}, {}) + # Verify that the adapter is created and cached + default_config_key = TlsConfig(**http.tls_config) + adapter = http.session.get_adapter('https://example.com') + assert http._https_adapters == {default_config_key: adapter} + mock_create_ssl_context.assert_called_once_with(http.tls_config) + + # Verify that the cached adapter is reused for the same TLS config + http.get('https://example.com') + + assert http._https_adapters == {default_config_key: adapter} + assert http.session.get_adapter('https://example.com') is adapter + mock_create_ssl_context.assert_called_once_with(http.tls_config) + + def test_adapter_caching_new_adapter(self): + """A new SSLContextAdapter should be created when a new TLS config is requested.""" + + with mock.patch('requests.Session.get'): + with mock.patch('datadog_checks.base.utils.http.create_ssl_context') as mock_create_ssl_context: + http = RequestsWrapper({'persist_connections': True, 'tls_verify': True}, {}) + # Verify that the adapter is created and cached for the default TLS config + default_config_key = TlsConfig(**http.tls_config) + adapter = http.session.get_adapter('https://example.com') + assert http._https_adapters == {default_config_key: adapter} + mock_create_ssl_context.assert_called_once_with(http.tls_config) + + # Verify that a new adapter is created for a different TLS config + http.get('https://example.com', verify=False) + + new_config = http.tls_config.copy() + new_config.update({'tls_verify': False}) + new_config_key = TlsConfig(**new_config) + new_adapter = http.session.get_adapter('https://example.com') + assert new_adapter is not adapter + mock_create_ssl_context.assert_called_with(new_config) + assert http._https_adapters == {default_config_key: adapter, new_config_key: new_adapter} + # Verify that no more adapters are created for the same configs + http.get('https://example.com', verify=False) + http.get('https://example.com', verify=True) + + assert http._https_adapters == {default_config_key: adapter, new_config_key: new_adapter} From 374ac1fd6f3381342a63ff61426dff1ff5050197 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 16:14:28 +0200 Subject: [PATCH 082/100] Run formatter --- datadog_checks_base/datadog_checks/base/utils/http.py | 6 +++--- datadog_checks_base/datadog_checks/base/utils/tls.py | 5 ++++- .../tests/base/utils/http/test_tls_and_certs.py | 5 ++++- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 8d2691fe090dc..1fee7cee519f9 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -29,7 +29,7 @@ from .headers import get_default_headers, update_headers from .network import create_socket_connection from .time import get_timestamp -from .tls import create_ssl_context, TlsConfig, DEFAULT_PROTOCOL_VERSIONS, SUPPORTED_PROTOCOL_VERSIONS +from .tls import SUPPORTED_PROTOCOL_VERSIONS, TlsConfig, create_ssl_context # See Performance Optimizations in this package's README.md. requests_kerberos = lazy_loader.load('requests_kerberos') @@ -83,7 +83,7 @@ 'timeout': DEFAULT_TIMEOUT, 'use_legacy_auth_encoding': True, 'username': None, - **TlsConfig().__dict__ # This will include all TLS-related fields + **TlsConfig().__dict__, # This will include all TLS-related fields } # For any known legacy fields that may be widespread DEFAULT_REMAPPED_FIELDS = { @@ -129,6 +129,7 @@ def get_tls_config_from_options(new_options): tls_config["tls_private_key"] = cert[1] return tls_config + def update_session_https_adapter(session, tls_config): return session @@ -656,7 +657,6 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): session.mount('https://', https_adapter) - @contextmanager def handle_kerberos_keytab(keytab_file): # There are no keytab options in any wrapper libs. The env var will be diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index e38793a2673aa..0180771fa9273 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -6,6 +6,7 @@ import ssl from copy import deepcopy from typing import TYPE_CHECKING, Any, AnyStr, Dict, Optional # noqa: F401 + from pydantic import BaseModel from ..config import is_affirmative @@ -36,10 +37,12 @@ 'tls_ciphers': 'ALL', } + class TlsConfig(BaseModel, frozen=True): """ Class used internally to cache HTTPS adapters with specific TLS configurations. """ + tls_ca_cert: str | None = None tls_intermediate_ca_certs: tuple[str, ...] | None = None tls_cert: str | None = None @@ -51,7 +54,7 @@ class TlsConfig(BaseModel, frozen=True): tls_protocols_allowed: tuple[str, ...] = DEFAULT_PROTOCOL_VERSIONS tls_validate_hostname: bool = True tls_verify: bool = True - + def create_ssl_context(config): # https://docs.python.org/3/library/ssl.html#ssl.SSLContext diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 4988e0d6656d2..204934823d0ef 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -300,7 +300,9 @@ def test_intermediate_certs_loaded(self): with mock.patch('ssl.SSLContext.load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) http.get('https://example.com') - mock_load_verify_locations.assert_called_once_with(cadata='\n'.join(instance['tls_intermediate_ca_certs'])) + mock_load_verify_locations.assert_called_once_with( + cadata='\n'.join(instance['tls_intermediate_ca_certs']) + ) class TestSSLContext: @@ -317,6 +319,7 @@ def test_default_tls_ciphers(self): assert mock_set_ciphers.call_count == 1 assert mock_set_ciphers.call_args[0][0] == 'ALL' + class TestSSLContextAdapter: def test_adapter_caching(self): """SSLContextAdapter should be recovered from cache when possible.""" From ca8febddc2a989f73d1e360f376985777dcc8a35 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 17:59:02 +0200 Subject: [PATCH 083/100] Allow tls_ca_cert to be bool for legacy compatibility --- datadog_checks_base/datadog_checks/base/utils/tls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 0180771fa9273..53a6c10f8325e 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -43,7 +43,7 @@ class TlsConfig(BaseModel, frozen=True): Class used internally to cache HTTPS adapters with specific TLS configurations. """ - tls_ca_cert: str | None = None + tls_ca_cert: str | bool | None = None tls_intermediate_ca_certs: tuple[str, ...] | None = None tls_cert: str | None = None tls_ciphers: str = 'ALL' From ccc3c1f043b752fec5814107c0c18bdf6ea93cc1 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 18:19:27 +0200 Subject: [PATCH 084/100] Fix datadog_checks_base tests --- datadog_checks_base/datadog_checks/base/utils/tls.py | 2 +- .../tests/base/utils/http/test_config.py | 4 +++- datadog_checks_base/tests/base/utils/http/test_http.py | 4 +++- .../tests/base/utils/http/test_tls_and_certs.py | 10 +++++++--- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index 53a6c10f8325e..baa8ab693a2f8 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -46,7 +46,7 @@ class TlsConfig(BaseModel, frozen=True): tls_ca_cert: str | bool | None = None tls_intermediate_ca_certs: tuple[str, ...] | None = None tls_cert: str | None = None - tls_ciphers: str = 'ALL' + tls_ciphers: str | tuple[str, ...] = 'ALL' tls_use_host_header: bool = False tls_ignore_warning: bool = False tls_private_key: str | None = None diff --git a/datadog_checks_base/tests/base/utils/http/test_config.py b/datadog_checks_base/tests/base/utils/http/test_config.py index 8263c48b1a90e..e1f8289b1566f 100644 --- a/datadog_checks_base/tests/base/utils/http/test_config.py +++ b/datadog_checks_base/tests/base/utils/http/test_config.py @@ -78,7 +78,8 @@ def test_config_ca_cert(self): init_config = {} with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: - RequestsWrapper(instance, init_config) + http = RequestsWrapper(instance, init_config) + http.session assert mock_load_verify_locations.call_count == 1 assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' @@ -89,6 +90,7 @@ def test_config_verify_and_ca_cert(self): with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) + http.session assert http.options['verify'] == 'ca_cert' assert mock_load_verify_locations.call_count == 1 diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index ab26b0e4d86a9..7885d115a02e6 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -117,7 +117,8 @@ def test_cipher_construction(self, instance, expected_ciphers): # Mock SSL context creation before RequestsWrapper initialization with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: - RequestsWrapper(instance, init_config) + http = RequestsWrapper(instance, init_config) + http.session # Trigger SSL context creation # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) @@ -149,6 +150,7 @@ def test_http_failure_with_wrong_cipher(self, openssl_https_server): } init_config = {} http = RequestsWrapper(instance, init_config) + http.session # Trigger SSL context creation with pytest.raises(requests.exceptions.SSLError): http.get(url) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 204934823d0ef..a0b725a0511cf 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -29,7 +29,8 @@ def test_config_cert(self): init_config = {} with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: - RequestsWrapper(instance, init_config) + http = RequestsWrapper(instance, init_config) + http.session assert mock_load_cert_chain.call_count == 1 assert mock_load_cert_chain.call_args[0][0] == 'cert' @@ -39,7 +40,8 @@ def test_config_cert_and_private_key(self): init_config = {} with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: - RequestsWrapper(instance, init_config) + http = RequestsWrapper(instance, init_config) + http.session assert mock_load_cert_chain.call_count == 1 assert mock_load_cert_chain.call_args[0][0] == 'cert' @@ -286,6 +288,7 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) + http.session mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers']) http.fetch_intermediate_certs('example.com', 443) # Assert set_ciphers called a second time after fetch_intermediate_certs @@ -313,7 +316,8 @@ def test_default_tls_ciphers(self): # Mock the SSLContext creation with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: - RequestsWrapper(instance, init_config) + http = RequestsWrapper(instance, init_config) + http.session # Trigger the SSLContext creation # Verify that the default ciphers are set assert mock_set_ciphers.call_count == 1 From 5abfee9c4b85bd04be1517e3db9545a5bf552e8f Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 25 Jun 2025 18:23:14 +0200 Subject: [PATCH 085/100] Fix default tls_verify value in go_expvar --- go_expvar/datadog_checks/go_expvar/go_expvar.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go_expvar/datadog_checks/go_expvar/go_expvar.py b/go_expvar/datadog_checks/go_expvar/go_expvar.py index 280d4adbf7783..0f0bec02886eb 100644 --- a/go_expvar/datadog_checks/go_expvar/go_expvar.py +++ b/go_expvar/datadog_checks/go_expvar/go_expvar.py @@ -67,7 +67,7 @@ class GoExpvar(AgentCheck): HTTP_CONFIG_REMAPPER = { - 'ssl_verify': {'name': 'tls_verify', 'default': None}, + 'ssl_verify': {'name': 'tls_verify', 'default': False}, 'ssl_certfile': {'name': 'tls_cert', 'default': None}, 'ssl_keyfile': {'name': 'tls_private_key', 'default': None}, } From 22774565bfe9600eee949d3dbdd28eecb07af047 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 10:40:54 +0200 Subject: [PATCH 086/100] Fix proxysql and vertica tests --- proxysql/tests/test_unit.py | 2 +- vertica/tests/test_unit.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/proxysql/tests/test_unit.py b/proxysql/tests/test_unit.py index 0c842bdb1f1a3..fc292e22b0908 100644 --- a/proxysql/tests/test_unit.py +++ b/proxysql/tests/test_unit.py @@ -101,7 +101,7 @@ def test_config_ok(dd_run_check, instance_basic): @pytest.mark.unit def test_tls_config_ok(dd_run_check, instance_basic_tls): - with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: with mock.patch('datadog_checks.proxysql.proxysql.pymysql') as pymysql: check = get_check(instance_basic_tls) diff --git a/vertica/tests/test_unit.py b/vertica/tests/test_unit.py index 34068a2f6cd0f..09ecd3baeff81 100644 --- a/vertica/tests/test_unit.py +++ b/vertica/tests/test_unit.py @@ -19,7 +19,7 @@ def test_ssl_config_ok(aggregator, tls_instance): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context @@ -37,7 +37,7 @@ def test_ssl_config_ok(aggregator, tls_instance): def test_ssl_legacy_config_ok(aggregator, tls_instance_legacy): with mock.patch('datadog_checks.vertica.vertica.vertica') as vertica: - with mock.patch('datadog_checks.base.utils.http.ssl') as ssl: + with mock.patch('datadog_checks.base.utils.tls.ssl') as ssl: vertica.connect.return_value = mock.MagicMock() tls_context = mock.MagicMock() ssl.SSLContext.return_value = tls_context From 596385f9b3cc8bbebe0d8add0da94403fe693e9a Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 10:54:04 +0200 Subject: [PATCH 087/100] Run formatter and fix linting --- datadog_checks_base/tests/base/utils/http/test_config.py | 4 ++-- datadog_checks_base/tests/base/utils/http/test_http.py | 4 ++-- .../tests/base/utils/http/test_tls_and_certs.py | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_config.py b/datadog_checks_base/tests/base/utils/http/test_config.py index e1f8289b1566f..57408b1a03aec 100644 --- a/datadog_checks_base/tests/base/utils/http/test_config.py +++ b/datadog_checks_base/tests/base/utils/http/test_config.py @@ -79,7 +79,7 @@ def test_config_ca_cert(self): with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) - http.session + http.get('https://example.com') assert mock_load_verify_locations.call_count == 1 assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' @@ -90,7 +90,7 @@ def test_config_verify_and_ca_cert(self): with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) - http.session + http.get('https://example.com') assert http.options['verify'] == 'ca_cert' assert mock_load_verify_locations.call_count == 1 diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index 7885d115a02e6..9ee44d47aa6d1 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -118,7 +118,7 @@ def test_cipher_construction(self, instance, expected_ciphers): # Mock SSL context creation before RequestsWrapper initialization with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: http = RequestsWrapper(instance, init_config) - http.session # Trigger SSL context creation + http.get('https://example.com') # Verify that set_ciphers was called with the expected ciphers during TLS context creation mock_set_ciphers.assert_called_once_with(expected_ciphers) @@ -150,7 +150,7 @@ def test_http_failure_with_wrong_cipher(self, openssl_https_server): } init_config = {} http = RequestsWrapper(instance, init_config) - http.session # Trigger SSL context creation + http.get('https://example.com') with pytest.raises(requests.exceptions.SSLError): http.get(url) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index a0b725a0511cf..36f6135178c75 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -30,7 +30,7 @@ def test_config_cert(self): with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: http = RequestsWrapper(instance, init_config) - http.session + http.get('https://example.com') assert mock_load_cert_chain.call_count == 1 assert mock_load_cert_chain.call_args[0][0] == 'cert' @@ -41,7 +41,7 @@ def test_config_cert_and_private_key(self): with mock.patch.object(ssl.SSLContext, 'load_cert_chain') as mock_load_cert_chain: http = RequestsWrapper(instance, init_config) - http.session + http.get('https://example.com') assert mock_load_cert_chain.call_count == 1 assert mock_load_cert_chain.call_args[0][0] == 'cert' @@ -288,7 +288,7 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) - http.session + http.get('https://example.com') mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers']) http.fetch_intermediate_certs('example.com', 443) # Assert set_ciphers called a second time after fetch_intermediate_certs @@ -317,7 +317,7 @@ def test_default_tls_ciphers(self): # Mock the SSLContext creation with mock.patch.object(ssl.SSLContext, 'set_ciphers') as mock_set_ciphers: http = RequestsWrapper(instance, init_config) - http.session # Trigger the SSLContext creation + http.get('https://example.com') # Verify that the default ciphers are set assert mock_set_ciphers.call_count == 1 From 8c9d556d0352fc286e624098f35f9e30696e5930 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 11:33:05 +0200 Subject: [PATCH 088/100] Add go_expvar changelog --- go_expvar/changelog.d/20179.fixed | 1 + 1 file changed, 1 insertion(+) create mode 100644 go_expvar/changelog.d/20179.fixed diff --git a/go_expvar/changelog.d/20179.fixed b/go_expvar/changelog.d/20179.fixed new file mode 100644 index 0000000000000..92096c8f53c1f --- /dev/null +++ b/go_expvar/changelog.d/20179.fixed @@ -0,0 +1 @@ +Allow HTTPS requests to use `tls_ciphers` parameter From 1007e0a9c1eec9ac15ef0cb2507744167761af12 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 11:47:31 +0200 Subject: [PATCH 089/100] Fix base tests --- datadog_checks_base/tests/base/utils/http/test_config.py | 4 ++-- datadog_checks_base/tests/base/utils/http/test_http.py | 2 +- .../tests/base/utils/http/test_tls_and_certs.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_config.py b/datadog_checks_base/tests/base/utils/http/test_config.py index 57408b1a03aec..37f47e9570719 100644 --- a/datadog_checks_base/tests/base/utils/http/test_config.py +++ b/datadog_checks_base/tests/base/utils/http/test_config.py @@ -79,8 +79,8 @@ def test_config_ca_cert(self): with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) - http.get('https://example.com') + assert http.session.verify == 'ca_cert' # The session attribute instantiates the SSLContext assert mock_load_verify_locations.call_count == 1 assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' @@ -90,8 +90,8 @@ def test_config_verify_and_ca_cert(self): with mock.patch.object(ssl.SSLContext, 'load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) - http.get('https://example.com') + assert http.session.verify == 'ca_cert' # The session attribute instantiates the SSLContext assert http.options['verify'] == 'ca_cert' assert mock_load_verify_locations.call_count == 1 assert mock_load_verify_locations.call_args[1]['cafile'] == 'ca_cert' diff --git a/datadog_checks_base/tests/base/utils/http/test_http.py b/datadog_checks_base/tests/base/utils/http/test_http.py index 9ee44d47aa6d1..8b4e18ca411ea 100644 --- a/datadog_checks_base/tests/base/utils/http/test_http.py +++ b/datadog_checks_base/tests/base/utils/http/test_http.py @@ -150,7 +150,7 @@ def test_http_failure_with_wrong_cipher(self, openssl_https_server): } init_config = {} http = RequestsWrapper(instance, init_config) - http.get('https://example.com') + assert http.session.verify == cert_path # The session attribute instantiates the SSLContext with pytest.raises(requests.exceptions.SSLError): http.get(url) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 36f6135178c75..7fd5870dfcc98 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -288,7 +288,7 @@ def test_fetch_intermediate_certs_tls_ciphers(self): # Mock the certificate loading to avoid cryptography operations with mock.patch('datadog_checks.base.utils.http.RequestsWrapper.load_intermediate_certs'): http = RequestsWrapper(instance, init_config) - http.get('https://example.com') + assert http.session.verify is True # The session attribute instantiates the SSLContext mock_context.set_ciphers.assert_called_once_with(instance['tls_ciphers']) http.fetch_intermediate_certs('example.com', 443) # Assert set_ciphers called a second time after fetch_intermediate_certs From f01d58f0454b2863be698409063352b634f72424 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 13:19:52 +0200 Subject: [PATCH 090/100] Fix intermediate_certs_tests for windows --- .../tests/base/utils/http/test_tls_and_certs.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 7fd5870dfcc98..493acdfddd501 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -302,8 +302,8 @@ def test_intermediate_certs_loaded(self): with mock.patch('requests.Session.get'): with mock.patch('ssl.SSLContext.load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) - http.get('https://example.com') - mock_load_verify_locations.assert_called_once_with( + assert http.session.verify is True # The session attribute instantiates the SSLContext + mock_load_verify_locations.assert_called_with( cadata='\n'.join(instance['tls_intermediate_ca_certs']) ) From 1c5991db77e3cd3dacf86ab99368680fc097fb9b Mon Sep 17 00:00:00 2001 From: David Kirov Date: Thu, 26 Jun 2025 13:30:35 +0200 Subject: [PATCH 091/100] Fix intermediate_certs_tests for windows in a better way --- .../tests/base/utils/http/test_tls_and_certs.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py index 493acdfddd501..5c9ee218eea1f 100644 --- a/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py +++ b/datadog_checks_base/tests/base/utils/http/test_tls_and_certs.py @@ -303,9 +303,10 @@ def test_intermediate_certs_loaded(self): with mock.patch('ssl.SSLContext.load_verify_locations') as mock_load_verify_locations: http = RequestsWrapper(instance, init_config) assert http.session.verify is True # The session attribute instantiates the SSLContext - mock_load_verify_locations.assert_called_with( - cadata='\n'.join(instance['tls_intermediate_ca_certs']) - ) + assert mock_load_verify_locations.call_count >= 1 + all_calls = mock_load_verify_locations.mock_calls + # Assert that the last call contains the intermediate CA certs + assert all_calls[-1].kwargs["cadata"] == "\n".join(instance['tls_intermediate_ca_certs']) class TestSSLContext: From 7b27443ab48e8bd072b8235badd46565ec5332db Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Sat, 28 Jun 2025 11:47:17 +0200 Subject: [PATCH 092/100] tweak how we create session and mount adapter --- .../datadog_checks/base/utils/http.py | 20 ++++++------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 1fee7cee519f9..c4174f555bec8 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -456,12 +456,9 @@ def _request(self, method, url, options): for hook in self.request_hooks: stack.enter_context(hook()) - new_tls_config = self._get_new_tls_config(url, new_options) - if persist: - session = self.session - self._mount_https_adapter(session, new_tls_config) - else: - session = self._create_session(tls_config=new_tls_config) + session = self.session if persist else self._create_session() + if url.startswith('https'): + self._mount_https_adapter(session, ChainMap(get_tls_config_from_options(new_options), self.tls_config)) request_method = getattr(session, method) if self.auth_token_handler: @@ -503,14 +500,9 @@ def make_request_aia_chasing(self, request_method, method, url, new_options, per certs = self.fetch_intermediate_certs(hostname, port) if not certs: raise e - new_ca_certs = {'tls_intermediate_ca_certs': certs} - new_tls_config = self.tls_config.copy() - new_tls_config.update(new_ca_certs) - if not persist: - session = self._create_session(new_tls_config) - else: - session = self.session - self._mount_https_adapter(session, new_tls_config) + session = self.session if persist else self._create_session() + if parsed_url.scheme == "https": + self._mount_https_adapter(session, ChainMap({'tls_intermediate_ca_certs': certs}, self.tls_config)) request_method = getattr(session, method) response = request_method(url, **new_options) return response From 986ea2447ddff2ebbe4424986eb5edb01c497139 Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Sat, 28 Jun 2025 12:48:24 +0200 Subject: [PATCH 093/100] Remove unused func and mounting https adapters when creating session --- .../datadog_checks/base/utils/http.py | 26 +++++-------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index c4174f555bec8..a77de963ba7b5 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -474,21 +474,6 @@ def _request(self, method, url, options): return ResponseWrapper(response, self.request_size) - def _get_new_tls_config(self, url, new_options): - """ - Creates a new TLS config if the new request options differ from the default ones. - """ - # If `verify` or `cert` options differ from the defaults, a new adapter is created. - if new_options.get('verify') == self.options['verify'] and new_options.get('cert') == self.options['cert']: - return self.tls_config - if not url.startswith('https'): - return self.tls_config - requested_tls_config = get_tls_config_from_options(new_options) - self.logger.debug('Overrides for new context config: %s', requested_tls_config) - new_tls_config = self.tls_config.copy() - new_tls_config.update(requested_tls_config) - return new_tls_config - def make_request_aia_chasing(self, request_method, method, url, new_options, persist): try: response = request_method(url, **new_options) @@ -578,12 +563,13 @@ def load_intermediate_certs(self, der_cert, certs): self.load_intermediate_certs(intermediate_cert, certs) return certs - def _create_session(self, tls_config=None): - session = requests.Session() - config = self.tls_config if tls_config is None else tls_config - - self._mount_https_adapter(session, config) + def _create_session(self): + """ + Initializes requests.Session and configures it with a UDS Adapter and options coming from user's config. + We leave it to callers to mount any HTTPS adapters if necessary. + """ + session = requests.Session() # Enable Unix Domain Socket (UDS) support. # See: https://github.com/msabramo/requests-unixsocket session.mount('{}://'.format(UDS_SCHEME), requests_unixsocket.UnixAdapter()) From b4abb442dd88c22344a170de7e46645009c4466a Mon Sep 17 00:00:00 2001 From: Ilia Kurenkov Date: Sat, 28 Jun 2025 13:16:42 +0200 Subject: [PATCH 094/100] Mount default https adapter in property --- datadog_checks_base/datadog_checks/base/utils/http.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index a77de963ba7b5..13c6de27effd0 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -583,8 +583,9 @@ def _create_session(self): @property def session(self): if self._session is None: - # Create a new session if it doesn't exist + # Create a new session if it doesn't exist and mount default HTTPS adapter. self._session = self._create_session() + self._mount_https_adapter(self._session, self.tls_config) return self._session def handle_auth_token(self, **request): From ac1f6d47bd651d789f578fc542ee898ea42996b9 Mon Sep 17 00:00:00 2001 From: dkirov-dd <166512750+dkirov-dd@users.noreply.github.com> Date: Tue, 1 Jul 2025 15:36:15 +0200 Subject: [PATCH 095/100] Apply suggestions from code review Co-authored-by: Ilia Kurenkov --- datadog_checks_base/datadog_checks/base/utils/http.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 13c6de27effd0..ab679b253b555 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -400,7 +400,6 @@ def __init__(self, instance, init_config, remapper=None, logger=None, session=No if config['kerberos_cache']: self.request_hooks.append(lambda: handle_kerberos_cache(config['kerberos_cache'])) - # Create TLS context for consistent TLS configuration self.tls_config = {key: value for key, value in config.items() if key.startswith('tls_')} self._https_adapters = {} @@ -603,10 +602,8 @@ def __del__(self): # no cov def _mount_https_adapter(self, session, tls_config): # Reuse existing adapter if it matches the TLS config tls_config_key = TlsConfig(**tls_config) - https_adapter = self._https_adapters.get(tls_config_key) - - if https_adapter is not None: - session.mount('https://', https_adapter) + if tls_config_key in self._https_adapters: + session.mount('https://', self._https_adapters[tls_config_key]) return context = create_ssl_context(tls_config) @@ -628,7 +625,6 @@ def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): https_adapter = SSLContextHostHeaderAdapter(context) else: - # Use SSLContextHTTPSAdapter for consistent TLS configuration https_adapter = SSLContextAdapter(context) # Cache the adapter for reuse From 146acb22015a947f6897d49124d77019716672ce Mon Sep 17 00:00:00 2001 From: dkirov-dd <166512750+dkirov-dd@users.noreply.github.com> Date: Tue, 1 Jul 2025 15:55:42 +0200 Subject: [PATCH 096/100] Update datadog_checks_base/datadog_checks/base/utils/http.py Co-authored-by: Ilia Kurenkov --- datadog_checks_base/datadog_checks/base/utils/http.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index ab679b253b555..e134ab12d0849 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -141,8 +141,6 @@ class SSLContextAdapter(requests.adapters.HTTPAdapter): def __init__(self, ssl_context, **kwargs): self.ssl_context = ssl_context - # This is used to determine if the adapter was created with a custom context - # for the purpose of reverting to the default context when needed. super(SSLContextAdapter, self).__init__() def init_poolmanager(self, connections, maxsize, block=False, **pool_kwargs): From 7e713a1615129acac79e98b4d69c7f70278b801c Mon Sep 17 00:00:00 2001 From: David Kirov Date: Tue, 1 Jul 2025 18:26:46 +0200 Subject: [PATCH 097/100] Add TypeError for wrong TLS option types --- .../datadog_checks/base/utils/http.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index e134ab12d0849..7cd4def429f44 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -116,17 +116,24 @@ def get_tls_config_from_options(new_options): tls_config["tls_ca_cert"] = verify elif isinstance(verify, bool): tls_config["tls_verify"] = verify - else: - LOGGER.warning( - 'Unexpected type for `verify` option. Expected bool or str, got %s.', - type(verify).__name__, + elif verify is not None: + raise TypeError( + 'Unexpected type for `verify` option. Expected bool or str, got {}.'.format(type(verify).__name__) ) if isinstance(cert, str): tls_config["tls_cert"] = cert - elif isinstance(cert, tuple) or isinstance(cert, list) and len(cert) == 2: + elif isinstance(cert, tuple) or isinstance(cert, list): + if len(cert) != 2: + raise TypeError( + 'Unexpected length for `cert` option. Expected a tuple of length 2, got {}.'.format(len(cert)) + ) tls_config["tls_cert"] = cert[0] tls_config["tls_private_key"] = cert[1] + elif cert is not None: + raise TypeError( + 'Unexpected type for `cert` option. Expected str or tuple, got {}.'.format(type(cert).__name__) + ) return tls_config From c9473afc15e4ad17e62fbd0057a5c0fca4c5dce7 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 2 Jul 2025 13:48:55 +0200 Subject: [PATCH 098/100] Run formatter --- amazon_msk/tests/conftest.py | 5 ++-- .../base/stubs/datadog_agent.py | 8 +++---- .../datadog_checks/base/utils/db/query.py | 4 +++- .../datadog_checks/base/utils/db/transform.py | 4 +++- .../datadog_checks/base/utils/http.py | 4 +--- .../test_transformers/test_time_elapsed.py | 4 +++- .../tests/base/utils/db/test_query_manager.py | 6 ++--- .../tests/base/utils/db/test_timed_cache.py | 6 ++--- .../dev/tooling/commands/create.py | 8 +++++-- .../meta/snmp/validators/validator.py | 4 +++- .../datadog_checks/dev/tooling/signing.py | 3 +-- .../{check_name}/config_models/defaults.py | 1 + .../{check_name}/config_models/defaults.py | 1 + .../{check_name}/config_models/defaults.py | 1 + .../datadog_checks/dev/tooling/utils.py | 12 ++-------- .../configuration/consumers/test_example.py | 24 ++++++++++++++----- http_check/tests/test_http_integration.py | 5 ++-- .../datadog_checks/twistlock/twistlock.py | 4 +++- twistlock/tests/test_twistlock.py | 4 +++- 19 files changed, 65 insertions(+), 43 deletions(-) diff --git a/amazon_msk/tests/conftest.py b/amazon_msk/tests/conftest.py index 85f87926d3627..ec03b5fda9d8f 100644 --- a/amazon_msk/tests/conftest.py +++ b/amazon_msk/tests/conftest.py @@ -33,8 +33,9 @@ def mock_requests_get(url, *args, **kwargs): @pytest.fixture def mock_data(): # Mock requests.get because it is used internally within boto3 - with mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), mock.patch( - 'requests.Session.get', side_effect=mock_requests_get + with ( + mock.patch('requests.get', side_effect=mock_requests_get, autospec=True), + mock.patch('requests.Session.get', side_effect=mock_requests_get), ): yield diff --git a/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py b/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py index adc56bad1010e..3e465941b07d9 100644 --- a/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py +++ b/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py @@ -67,10 +67,10 @@ def assert_external_tags(self, hostname, external_tags, match_tags_order=False): external_tags = {k: sorted(v) for (k, v) in external_tags.items()} tags = {k: sorted(v) for (k, v) in tags.items()} - assert external_tags == tags, ( - 'Expected {} external tags for hostname {}, found {}. Submitted external tags: {}'.format( - external_tags, hostname, tags, repr(self._external_tags) - ) + assert ( + external_tags == tags + ), 'Expected {} external tags for hostname {}, found {}. Submitted external tags: {}'.format( + external_tags, hostname, tags, repr(self._external_tags) ) return diff --git a/datadog_checks_base/datadog_checks/base/utils/db/query.py b/datadog_checks_base/datadog_checks/base/utils/db/query.py index ce88a4bcae618..137c835ae15f7 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/query.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/query.py @@ -214,7 +214,9 @@ def compile( elif extra_type not in extra_transformers and extra_type not in submission_transformers: raise ValueError('unknown type `{}` for extra {} of {}'.format(extra_type, extra_name, query_name)) - transformer_factory = extra_transformers.get(extra_type, submission_transformers.get(extra_type)) # type: TransformerFactory + transformer_factory = extra_transformers.get( + extra_type, submission_transformers.get(extra_type) + ) # type: TransformerFactory extra_source = extra.get('source') if extra_type in submission_transformers: diff --git a/datadog_checks_base/datadog_checks/base/utils/db/transform.py b/datadog_checks_base/datadog_checks/base/utils/db/transform.py index 8d8e513085ae6..0c4d82cec27d8 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/transform.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/transform.py @@ -85,7 +85,9 @@ def get_monotonic_gauge(transformers, column_name, **modifiers): Send the result as both a `gauge` suffixed by `.total` and a `monotonic_count` suffixed by `.count`. """ gauge = transformers['gauge'](transformers, '{}.total'.format(column_name), **modifiers) # type: Callable - monotonic_count = transformers['monotonic_count'](transformers, '{}.count'.format(column_name), **modifiers) # type: Callable + monotonic_count = transformers['monotonic_count']( + transformers, '{}.count'.format(column_name), **modifiers + ) # type: Callable def monotonic_gauge(_, value, **kwargs): # type: (List, str, Dict[str, Any]) -> None diff --git a/datadog_checks_base/datadog_checks/base/utils/http.py b/datadog_checks_base/datadog_checks/base/utils/http.py index 7cd4def429f44..da14d1fbd61d7 100644 --- a/datadog_checks_base/datadog_checks/base/utils/http.py +++ b/datadog_checks_base/datadog_checks/base/utils/http.py @@ -131,9 +131,7 @@ def get_tls_config_from_options(new_options): tls_config["tls_cert"] = cert[0] tls_config["tls_private_key"] = cert[1] elif cert is not None: - raise TypeError( - 'Unexpected type for `cert` option. Expected str or tuple, got {}.'.format(type(cert).__name__) - ) + raise TypeError('Unexpected type for `cert` option. Expected str or tuple, got {}.'.format(type(cert).__name__)) return tls_config diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py index c62456f300ebc..f23c446eaa229 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py @@ -12,7 +12,9 @@ def test(aggregator, dd_run_check, mock_http_response): # HELP go_memstats_last_gc_time_seconds Number of seconds since 1970 of last garbage collection. # TYPE go_memstats_last_gc_time_seconds gauge go_memstats_last_gc_time_seconds{{foo="bar"}} {} - """.format(get_timestamp() - 1.2) + """.format( + get_timestamp() - 1.2 + ) ) check = get_check({'metrics': [{'go_memstats_last_gc_time_seconds': {'type': 'time_elapsed'}}]}) dd_run_check(check) diff --git a/datadog_checks_base/tests/base/utils/db/test_query_manager.py b/datadog_checks_base/tests/base/utils/db/test_query_manager.py index ffb03f5042ecb..fa19dfd778a3e 100644 --- a/datadog_checks_base/tests/base/utils/db/test_query_manager.py +++ b/datadog_checks_base/tests/base/utils/db/test_query_manager.py @@ -1111,9 +1111,9 @@ class MyCheck(AgentCheck): query_manager2 = QueryManager(check2, mock_executor(), [dummy_query]) query_manager1.compile_queries() query_manager2.compile_queries() - assert not id(query_manager1.queries[0]) == id(query_manager2.queries[0]), ( - "QueryManager does not copy the queries" - ) + assert not id(query_manager1.queries[0]) == id( + query_manager2.queries[0] + ), "QueryManager does not copy the queries" def test_query_execution_error(self, caplog, aggregator): class Result(object): diff --git a/datadog_checks_base/tests/base/utils/db/test_timed_cache.py b/datadog_checks_base/tests/base/utils/db/test_timed_cache.py index 119784da7f6e1..30b1b2a2e61e0 100644 --- a/datadog_checks_base/tests/base/utils/db/test_timed_cache.py +++ b/datadog_checks_base/tests/base/utils/db/test_timed_cache.py @@ -16,9 +16,9 @@ def test_set_and_get_item(self): def test_get_with_default(self): cache = TimedCache(600) - assert cache.get('nonexistent', 'default') == 'default', ( - "Should return the default value when the key is not found." - ) + assert ( + cache.get('nonexistent', 'default') == 'default' + ), "Should return the default value when the key is not found." def test_item_deletion(self): cache = TimedCache(600) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py index 37723aa48bf1b..c3895080deabc 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py @@ -234,7 +234,9 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru # Fill in all common non Marketplace fields template_fields['pricing_plan'] = '' if repo_choice in ['core', 'integrations-internal-core']: - template_fields['author_info'] = """ + template_fields[ + 'author_info' + ] = """ "author": { "support_email": "help@datadoghq.com", "name": "Datadog", @@ -244,7 +246,9 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru else: prompt_and_update_if_missing(template_fields, 'email', 'Email used for support requests') prompt_and_update_if_missing(template_fields, 'author', 'Your name') - template_fields['author_info'] = f""" + template_fields[ + 'author_info' + ] = f""" "author": {{ "support_email": "{template_fields['email']}", "name": "{template_fields['author']}", diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py index fe71c79987a0b..24ea2bab4de6d 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py @@ -270,7 +270,9 @@ def validate(self, profile, path): if not all_metric_tags_are_valid: self.fail( "metric_tables defined in lines {} are not valid. \ - \nmetric_tags must have 'column' or 'index' value".format(lines) + \nmetric_tags must have 'column' or 'index' value".format( + lines + ) ) if not self.result.failed: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py index ae4f4d105496d..43d592e2de0e0 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/signing.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/signing.py @@ -8,6 +8,7 @@ # How long ddev will wait for GPG to finish, especially when asking dev for signature. import securesystemslib.settings + securesystemslib.settings.SUBPROCESS_TIMEOUT = 60 from securesystemslib.gpg.constants import gpg_command @@ -32,7 +33,6 @@ class NeitherTrackedNorIgnoredFileException(Exception): def __init__(self, filename): self.filename = filename - def __str__(self): return f'{self.filename} has neither been tracked nor ignored by git and in-toto!' @@ -41,7 +41,6 @@ class UntrackedButIgnoredFileException(Exception): def __init__(self, filename): self.filename = filename - def __str__(self): return f'{self.filename} has not been tracked, but it should be ignored by git and in-toto!' diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/datadog_checks/{check_name}/config_models/defaults.py b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/datadog_checks/{check_name}/config_models/defaults.py index 54022a8362565..bb569afb751ff 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/datadog_checks/{check_name}/config_models/defaults.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check/{check_name}/datadog_checks/{check_name}/config_models/defaults.py @@ -2,6 +2,7 @@ {documentation} + def instance_empty_default_hostname(): return False diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check_only/{check_name}/datadog_checks/{check_name}/config_models/defaults.py b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check_only/{check_name}/datadog_checks/{check_name}/config_models/defaults.py index 54022a8362565..bb569afb751ff 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check_only/{check_name}/datadog_checks/{check_name}/config_models/defaults.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/check_only/{check_name}/datadog_checks/{check_name}/config_models/defaults.py @@ -2,6 +2,7 @@ {documentation} + def instance_empty_default_hostname(): return False diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/datadog_checks/{check_name}/config_models/defaults.py b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/datadog_checks/{check_name}/config_models/defaults.py index f18e05420d284..f5b39ef8aa1aa 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/datadog_checks/{check_name}/config_models/defaults.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/templates/integration/jmx/{check_name}/datadog_checks/{check_name}/config_models/defaults.py @@ -2,6 +2,7 @@ {documentation} + def shared_new_gc_metrics(field, value): return False diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py index 3b855d4dd8264..bb2e8ed624d3f 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py @@ -170,13 +170,7 @@ def initialize_root(config, agent=False, core=False, extras=False, marketplace=F repo_choice = ( 'core' if core - else 'extras' - if extras - else 'agent' - if agent - else 'marketplace' - if marketplace - else config.get('repo', 'core') + else 'extras' if extras else 'agent' if agent else 'marketplace' if marketplace else config.get('repo', 'core') ) config['repo_choice'] = repo_choice message = None @@ -188,9 +182,7 @@ def initialize_root(config, agent=False, core=False, extras=False, marketplace=F repo = ( 'datadog-agent' if repo_choice == 'agent' - else 'marketplace' - if repo_choice == 'marketplace' - else f'integrations-{repo_choice}' + else 'marketplace' if repo_choice == 'marketplace' else f'integrations-{repo_choice}' ) message = f'`{repo}` directory `{root}` does not exist, defaulting to the current location.' diff --git a/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py b/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py index 00cb8779a7264..9fae1eb639c25 100644 --- a/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py +++ b/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py @@ -867,7 +867,9 @@ def test_section_description_length_limit(): description: words value: type: string - """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) + """.format( + 'a' * DESCRIPTION_LINE_LENGTH_LIMIT + ) ) files = consumer.render() @@ -888,7 +890,9 @@ def test_option_description_length_limit(): description: {} value: type: string - """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) + """.format( + 'a' * DESCRIPTION_LINE_LENGTH_LIMIT + ) ) files = consumer.render() @@ -910,7 +914,9 @@ def test_option_description_length_limit_with_noqa(): value: type: string example: something - """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT + ' /noqa') + """.format( + 'a' * DESCRIPTION_LINE_LENGTH_LIMIT + ' /noqa' + ) ) files = consumer.render() @@ -922,7 +928,9 @@ def test_option_description_length_limit_with_noqa(): ## {} # # foo: something - """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) + """.format( + 'a' * DESCRIPTION_LINE_LENGTH_LIMIT + ) ) @@ -1203,7 +1211,9 @@ def test_compact_example_long_line(): type: array items: type: string - """.format(long_str) + """.format( + long_str + ) ) files = consumer.render() contents, errors = files['test.yaml.example'] @@ -1215,7 +1225,9 @@ def test_compact_example_long_line(): # # foo: # - [{0}, {0}, {0}, {0}] - """.format(long_str) + """.format( + long_str + ) ) diff --git a/http_check/tests/test_http_integration.py b/http_check/tests/test_http_integration.py index 7e7396e602419..81ca5c13fa2a9 100644 --- a/http_check/tests/test_http_integration.py +++ b/http_check/tests/test_http_integration.py @@ -440,8 +440,9 @@ def test_unexisting_ca_cert_should_log_warning(aggregator, dd_run_check): 'skip_proxy': 'false', } - with mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning, mock.patch( - 'requests.Session.get' + with ( + mock.patch('datadog_checks.base.utils.http.logging.Logger.warning') as mock_warning, + mock.patch('requests.Session.get'), ): check = HTTPCheck('http_check', {'ca_certs': 'foo'}, [instance]) dd_run_check(check) diff --git a/twistlock/datadog_checks/twistlock/twistlock.py b/twistlock/datadog_checks/twistlock/twistlock.py index 0faed2670cdcd..69cb66d062afb 100644 --- a/twistlock/datadog_checks/twistlock/twistlock.py +++ b/twistlock/datadog_checks/twistlock/twistlock.py @@ -252,7 +252,9 @@ def _analyze_vulnerability(self, vuln, host=False, image=False): msg_text = """ There is a new CVE affecting your {}: {} - """.format(vuln_type, description) + """.format( + vuln_type, description + ) event = { 'timestamp': time.mktime(published_date.timetuple()), diff --git a/twistlock/tests/test_twistlock.py b/twistlock/tests/test_twistlock.py index c5e7457be9963..c6bef536c290c 100644 --- a/twistlock/tests/test_twistlock.py +++ b/twistlock/tests/test_twistlock.py @@ -109,7 +109,9 @@ def test_err_response(aggregator, instance): check = TwistlockCheck('twistlock', {}, [instance]) with pytest.raises(Exception, match='^Error in response'): - with mock.patch('requests.Session.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True): + with mock.patch( + 'requests.Session.get', return_value=MockResponse('{"err": "invalid credentials"}'), autospec=True + ): check.check(instance) From 3c9e8767761f92f1bd56bd83aa86e5014ada0718 Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 2 Jul 2025 13:59:55 +0200 Subject: [PATCH 099/100] Fix kubelet typo in test --- kubelet/tests/test_kubelet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kubelet/tests/test_kubelet.py b/kubelet/tests/test_kubelet.py index 911b03c628836..6ffe23ceaaeb7 100644 --- a/kubelet/tests/test_kubelet.py +++ b/kubelet/tests/test_kubelet.py @@ -574,7 +574,7 @@ def test_kubelet_credentials_update(monkeypatch, aggregator): ) kubelet_conn_info = {'url': 'http://127.0.0.1:10255', 'ca_cert': False} with ( - mock.patch('requests.Sesison.get', return_value=get), + mock.patch('requests.Session.get', return_value=get), mock.patch('datadog_checks.kubelet.kubelet.get_connection_info', return_value=kubelet_conn_info), ): check.check(instance) From f14f981790e2eeaa05c90dfbcb64a88d7320889d Mon Sep 17 00:00:00 2001 From: David Kirov Date: Wed, 2 Jul 2025 14:08:45 +0200 Subject: [PATCH 100/100] Run the ACTUAL formatter --- .../base/stubs/datadog_agent.py | 8 +++---- .../datadog_checks/base/utils/db/query.py | 4 +--- .../datadog_checks/base/utils/db/transform.py | 4 +--- .../datadog_checks/base/utils/tls.py | 2 +- .../test_transformers/test_time_elapsed.py | 4 +--- .../tests/base/utils/db/test_query_manager.py | 6 ++--- .../tests/base/utils/db/test_timed_cache.py | 6 ++--- .../dev/tooling/commands/create.py | 8 ++----- .../meta/snmp/validators/validator.py | 4 +--- .../datadog_checks/dev/tooling/utils.py | 12 ++++++++-- .../configuration/consumers/test_example.py | 24 +++++-------------- .../datadog_checks/twistlock/twistlock.py | 4 +--- 12 files changed, 34 insertions(+), 52 deletions(-) diff --git a/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py b/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py index 3e465941b07d9..adc56bad1010e 100644 --- a/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py +++ b/datadog_checks_base/datadog_checks/base/stubs/datadog_agent.py @@ -67,10 +67,10 @@ def assert_external_tags(self, hostname, external_tags, match_tags_order=False): external_tags = {k: sorted(v) for (k, v) in external_tags.items()} tags = {k: sorted(v) for (k, v) in tags.items()} - assert ( - external_tags == tags - ), 'Expected {} external tags for hostname {}, found {}. Submitted external tags: {}'.format( - external_tags, hostname, tags, repr(self._external_tags) + assert external_tags == tags, ( + 'Expected {} external tags for hostname {}, found {}. Submitted external tags: {}'.format( + external_tags, hostname, tags, repr(self._external_tags) + ) ) return diff --git a/datadog_checks_base/datadog_checks/base/utils/db/query.py b/datadog_checks_base/datadog_checks/base/utils/db/query.py index 137c835ae15f7..ce88a4bcae618 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/query.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/query.py @@ -214,9 +214,7 @@ def compile( elif extra_type not in extra_transformers and extra_type not in submission_transformers: raise ValueError('unknown type `{}` for extra {} of {}'.format(extra_type, extra_name, query_name)) - transformer_factory = extra_transformers.get( - extra_type, submission_transformers.get(extra_type) - ) # type: TransformerFactory + transformer_factory = extra_transformers.get(extra_type, submission_transformers.get(extra_type)) # type: TransformerFactory extra_source = extra.get('source') if extra_type in submission_transformers: diff --git a/datadog_checks_base/datadog_checks/base/utils/db/transform.py b/datadog_checks_base/datadog_checks/base/utils/db/transform.py index 0c4d82cec27d8..8d8e513085ae6 100644 --- a/datadog_checks_base/datadog_checks/base/utils/db/transform.py +++ b/datadog_checks_base/datadog_checks/base/utils/db/transform.py @@ -85,9 +85,7 @@ def get_monotonic_gauge(transformers, column_name, **modifiers): Send the result as both a `gauge` suffixed by `.total` and a `monotonic_count` suffixed by `.count`. """ gauge = transformers['gauge'](transformers, '{}.total'.format(column_name), **modifiers) # type: Callable - monotonic_count = transformers['monotonic_count']( - transformers, '{}.count'.format(column_name), **modifiers - ) # type: Callable + monotonic_count = transformers['monotonic_count'](transformers, '{}.count'.format(column_name), **modifiers) # type: Callable def monotonic_gauge(_, value, **kwargs): # type: (List, str, Dict[str, Any]) -> None diff --git a/datadog_checks_base/datadog_checks/base/utils/tls.py b/datadog_checks_base/datadog_checks/base/utils/tls.py index baa8ab693a2f8..ac713eb623e80 100644 --- a/datadog_checks_base/datadog_checks/base/utils/tls.py +++ b/datadog_checks_base/datadog_checks/base/utils/tls.py @@ -116,7 +116,7 @@ def create_ssl_context(config): context.load_cert_chain(client_cert, keyfile=client_key, password=client_key_pass) except FileNotFoundError: LOGGER.warning( - 'TLS client certificate file not found: %s. ' 'Please check the `tls_cert` configuration option.', + 'TLS client certificate file not found: %s. Please check the `tls_cert` configuration option.', client_cert, ) diff --git a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py index f23c446eaa229..c62456f300ebc 100644 --- a/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py +++ b/datadog_checks_base/tests/base/checks/openmetrics/test_v2/test_transformers/test_time_elapsed.py @@ -12,9 +12,7 @@ def test(aggregator, dd_run_check, mock_http_response): # HELP go_memstats_last_gc_time_seconds Number of seconds since 1970 of last garbage collection. # TYPE go_memstats_last_gc_time_seconds gauge go_memstats_last_gc_time_seconds{{foo="bar"}} {} - """.format( - get_timestamp() - 1.2 - ) + """.format(get_timestamp() - 1.2) ) check = get_check({'metrics': [{'go_memstats_last_gc_time_seconds': {'type': 'time_elapsed'}}]}) dd_run_check(check) diff --git a/datadog_checks_base/tests/base/utils/db/test_query_manager.py b/datadog_checks_base/tests/base/utils/db/test_query_manager.py index fa19dfd778a3e..ffb03f5042ecb 100644 --- a/datadog_checks_base/tests/base/utils/db/test_query_manager.py +++ b/datadog_checks_base/tests/base/utils/db/test_query_manager.py @@ -1111,9 +1111,9 @@ class MyCheck(AgentCheck): query_manager2 = QueryManager(check2, mock_executor(), [dummy_query]) query_manager1.compile_queries() query_manager2.compile_queries() - assert not id(query_manager1.queries[0]) == id( - query_manager2.queries[0] - ), "QueryManager does not copy the queries" + assert not id(query_manager1.queries[0]) == id(query_manager2.queries[0]), ( + "QueryManager does not copy the queries" + ) def test_query_execution_error(self, caplog, aggregator): class Result(object): diff --git a/datadog_checks_base/tests/base/utils/db/test_timed_cache.py b/datadog_checks_base/tests/base/utils/db/test_timed_cache.py index 30b1b2a2e61e0..119784da7f6e1 100644 --- a/datadog_checks_base/tests/base/utils/db/test_timed_cache.py +++ b/datadog_checks_base/tests/base/utils/db/test_timed_cache.py @@ -16,9 +16,9 @@ def test_set_and_get_item(self): def test_get_with_default(self): cache = TimedCache(600) - assert ( - cache.get('nonexistent', 'default') == 'default' - ), "Should return the default value when the key is not found." + assert cache.get('nonexistent', 'default') == 'default', ( + "Should return the default value when the key is not found." + ) def test_item_deletion(self): cache = TimedCache(600) diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py index c3895080deabc..37723aa48bf1b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/create.py @@ -234,9 +234,7 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru # Fill in all common non Marketplace fields template_fields['pricing_plan'] = '' if repo_choice in ['core', 'integrations-internal-core']: - template_fields[ - 'author_info' - ] = """ + template_fields['author_info'] = """ "author": { "support_email": "help@datadoghq.com", "name": "Datadog", @@ -246,9 +244,7 @@ def create(ctx, name, integration_type, location, non_interactive, quiet, dry_ru else: prompt_and_update_if_missing(template_fields, 'email', 'Email used for support requests') prompt_and_update_if_missing(template_fields, 'author', 'Your name') - template_fields[ - 'author_info' - ] = f""" + template_fields['author_info'] = f""" "author": {{ "support_email": "{template_fields['email']}", "name": "{template_fields['author']}", diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py index 24ea2bab4de6d..fe71c79987a0b 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/snmp/validators/validator.py @@ -270,9 +270,7 @@ def validate(self, profile, path): if not all_metric_tags_are_valid: self.fail( "metric_tables defined in lines {} are not valid. \ - \nmetric_tags must have 'column' or 'index' value".format( - lines - ) + \nmetric_tags must have 'column' or 'index' value".format(lines) ) if not self.result.failed: diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py index bb2e8ed624d3f..3b855d4dd8264 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/utils.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/utils.py @@ -170,7 +170,13 @@ def initialize_root(config, agent=False, core=False, extras=False, marketplace=F repo_choice = ( 'core' if core - else 'extras' if extras else 'agent' if agent else 'marketplace' if marketplace else config.get('repo', 'core') + else 'extras' + if extras + else 'agent' + if agent + else 'marketplace' + if marketplace + else config.get('repo', 'core') ) config['repo_choice'] = repo_choice message = None @@ -182,7 +188,9 @@ def initialize_root(config, agent=False, core=False, extras=False, marketplace=F repo = ( 'datadog-agent' if repo_choice == 'agent' - else 'marketplace' if repo_choice == 'marketplace' else f'integrations-{repo_choice}' + else 'marketplace' + if repo_choice == 'marketplace' + else f'integrations-{repo_choice}' ) message = f'`{repo}` directory `{root}` does not exist, defaulting to the current location.' diff --git a/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py b/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py index 9fae1eb639c25..00cb8779a7264 100644 --- a/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py +++ b/datadog_checks_dev/tests/tooling/configuration/consumers/test_example.py @@ -867,9 +867,7 @@ def test_section_description_length_limit(): description: words value: type: string - """.format( - 'a' * DESCRIPTION_LINE_LENGTH_LIMIT - ) + """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) ) files = consumer.render() @@ -890,9 +888,7 @@ def test_option_description_length_limit(): description: {} value: type: string - """.format( - 'a' * DESCRIPTION_LINE_LENGTH_LIMIT - ) + """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) ) files = consumer.render() @@ -914,9 +910,7 @@ def test_option_description_length_limit_with_noqa(): value: type: string example: something - """.format( - 'a' * DESCRIPTION_LINE_LENGTH_LIMIT + ' /noqa' - ) + """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT + ' /noqa') ) files = consumer.render() @@ -928,9 +922,7 @@ def test_option_description_length_limit_with_noqa(): ## {} # # foo: something - """.format( - 'a' * DESCRIPTION_LINE_LENGTH_LIMIT - ) + """.format('a' * DESCRIPTION_LINE_LENGTH_LIMIT) ) @@ -1211,9 +1203,7 @@ def test_compact_example_long_line(): type: array items: type: string - """.format( - long_str - ) + """.format(long_str) ) files = consumer.render() contents, errors = files['test.yaml.example'] @@ -1225,9 +1215,7 @@ def test_compact_example_long_line(): # # foo: # - [{0}, {0}, {0}, {0}] - """.format( - long_str - ) + """.format(long_str) ) diff --git a/twistlock/datadog_checks/twistlock/twistlock.py b/twistlock/datadog_checks/twistlock/twistlock.py index 69cb66d062afb..0faed2670cdcd 100644 --- a/twistlock/datadog_checks/twistlock/twistlock.py +++ b/twistlock/datadog_checks/twistlock/twistlock.py @@ -252,9 +252,7 @@ def _analyze_vulnerability(self, vuln, host=False, image=False): msg_text = """ There is a new CVE affecting your {}: {} - """.format( - vuln_type, description - ) + """.format(vuln_type, description) event = { 'timestamp': time.mktime(published_date.timetuple()),