Skip to content

Commit 77cc690

Browse files
committed
Format all files in integrations-core with Ruff as formatter
1 parent f354bbc commit 77cc690

File tree

267 files changed

+2062
-2146
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

267 files changed

+2062
-2146
lines changed

aerospike/datadog_checks/aerospike/check.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88

99
class AerospikeCheckV2(OpenMetricsBaseCheckV2):
10-
1110
__NAMESPACE__ = 'aerospike'
1211

1312
DEFAULT_METRIC_LIMIT = 0

airflow/tests/compose/dags/tutorial.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@
3939
catchup=False,
4040
tags=["example"],
4141
) as dag:
42-
4342
# t1, t2 and t3 are examples of tasks created by instantiating operators
4443
t1 = BashOperator(
4544
task_id="print_date",

airflow/tests/conftest.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,12 @@ def dd_environment(instance):
4343
build=True,
4444
conditions=[CheckEndpoints(URL + "/api/v1/health", attempts=120)],
4545
):
46-
yield instance, {
47-
'docker_volumes': ['{}/datadog.yaml:/etc/datadog-agent/datadog.yaml'.format(temp_dir)],
48-
}
46+
yield (
47+
instance,
48+
{
49+
'docker_volumes': ['{}/datadog.yaml:/etc/datadog-agent/datadog.yaml'.format(temp_dir)],
50+
},
51+
)
4952

5053

5154
@pytest.fixture(scope='session')

airflow/tests/test_unit.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_stat
3131
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])
3232

3333
with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None):
34-
3534
with mock.patch('datadog_checks.base.utils.http.requests') as req:
3635
mock_resp = mock.MagicMock(status_code=200)
3736
mock_resp.json.side_effect = [json_resp]
@@ -60,7 +59,6 @@ def test_service_checks_healthy_stable(
6059
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])
6160

6261
with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'):
63-
6462
with mock.patch('datadog_checks.base.utils.http.requests') as req:
6563
mock_resp = mock.MagicMock(status_code=200)
6664
mock_resp.json.side_effect = [
@@ -100,7 +98,6 @@ def test_dag_task_ongoing_duration(aggregator, task_instance):
10098
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])
10199

102100
with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'):
103-
104101
with mock.patch('datadog_checks.base.utils.http.requests') as req:
105102
mock_resp = mock.MagicMock(status_code=200)
106103
mock_resp.json.side_effect = [

apache/tests/test_apache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_no_metrics_failure(aggregator, check):
3838
check.check(NO_METRIC_CONFIG)
3939

4040
assert str(excinfo.value) == (
41-
"No metrics were fetched for this instance. Make sure that http://localhost:18180 " "is the proper url."
41+
"No metrics were fetched for this instance. Make sure that http://localhost:18180 is the proper url."
4242
)
4343

4444
sc_tags = ['apache_host:localhost', 'port:18180']

arangodb/datadog_checks/arangodb/check.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ class ArangodbCheck(OpenMetricsBaseCheckV2, ConfigMixin):
2121
SERVER_TAGS = {'mode': SERVER_MODE_ENDPOINT, 'id': SERVER_ID_ENDPOINT}
2222

2323
def __init__(self, name, init_config, instances):
24-
2524
super(ArangodbCheck, self).__init__(name, init_config, instances)
2625
self.openmetrics_endpoint = self.instance.get('openmetrics_endpoint')
2726
parsed_endpoint = urlparse(self.openmetrics_endpoint)

argo_workflows/tests/test_unit.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,6 @@ def instance():
9191
],
9292
)
9393
def test_check_with_fixtures(dd_run_check, aggregator, instance, mock_http_response, fixture_file, description):
94-
9594
mock_http_response(file_path=fixture_file)
9695
check = ArgoWorkflowsCheck('argo_workflows', {}, [instance])
9796
dd_run_check(check)
@@ -100,7 +99,6 @@ def test_check_with_fixtures(dd_run_check, aggregator, instance, mock_http_respo
10099
aggregator.assert_metric(f'argo_workflows.{m_name}', metric_type=m_type)
101100

102101
if fixture_file == 'tests/fixtures/metricsv3-6+.txt':
103-
104102
for m_name, m_type in V3_6_METRICS:
105103
aggregator.assert_metric(f'argo_workflows.{m_name}', metric_type=m_type)
106104

aspdotnet/datadog_checks/aspdotnet/aspdotnet.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111

1212
class AspdotnetCheck(PDHBaseCheck):
1313
def __new__(cls, name, init_config, instances):
14-
1514
if not is_affirmative(instances[0].get('use_legacy_check_version', False)):
1615
return AspdotnetCheckV2(name, init_config, instances)
1716
else:

aws_neuron/datadog_checks/aws_neuron/check.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ class AwsNeuronCheck(OpenMetricsBaseCheckV2):
1111
DEFAULT_METRIC_LIMIT = 0
1212

1313
def __init__(self, name, init_config, instances=None):
14-
1514
super(AwsNeuronCheck, self).__init__(
1615
name,
1716
init_config,

azure_iot_edge/tests/common.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -346,8 +346,7 @@
346346

347347
E2E_METRICS = (
348348
# All metrics...
349-
{name for name, _ in MODULE_METRICS}
350-
.union(name for name, _, _ in AGENT_METRICS)
349+
{name for name, _ in MODULE_METRICS}.union(name for name, _, _ in AGENT_METRICS)
351350
.union(name for name, _ in HUB_METRICS)
352351
# ... Except a few that don't get emitted by default.
353352
.difference(

btrfs/datadog_checks/btrfs/btrfs.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,6 @@ def get_usage(self, mountpoint):
109109
results = []
110110

111111
with FileDescriptor(mountpoint) as fd:
112-
113112
# Get the struct size needed
114113
# https://github.com/spotify/linux/blob/master/fs/btrfs/ioctl.h#L46-L50
115114
ret = sized_array(TWO_LONGS_STRUCT.size)
@@ -135,7 +134,6 @@ def get_unallocated_space(self, mountpoint):
135134
unallocated_bytes = 0
136135

137136
with FileDescriptor(mountpoint) as fd:
138-
139137
# Retrieve the fs info to get the number of devices and max device id
140138
fs_info = sized_array(BTRFS_FS_INFO_STRUCT.size)
141139
fcntl.ioctl(fd, BTRFS_IOC_FS_INFO, fs_info)

btrfs/tests/test_btrfs.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313

1414

1515
def mock_get_usage():
16-
1716
return [
1817
(1, 9672065024, 9093722112),
1918
(34, 33554432, 16384),
@@ -25,7 +24,6 @@ def mock_get_usage():
2524
# Just return a single device so the psutil portion of the check doesn't fail
2625
# The real data to check against is in mock_get_usage.
2726
def get_mock_devices():
28-
2927
device_tuple = collections.namedtuple('device_tuple', 'device mountpoint fstype opts')
3028

3129
return [device_tuple(device='/dev/disk1', mountpoint='/', fstype='btrfs', opts='local,multilabel')]

cacti/datadog_checks/cacti/cacti.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -208,9 +208,7 @@ def _in_whitelist(rrd):
208208
AND dl.snmp_index = hsc.snmp_index
209209
WHERE dt.data_source_path IS NOT NULL
210210
AND dt.data_source_path != ''
211-
AND ({} OR hsc.field_name is NULL) """.format(
212-
and_parameters
213-
)
211+
AND ({} OR hsc.field_name is NULL) """.format(and_parameters)
214212

215213
c.execute(rrd_query)
216214
res = []

calico/datadog_checks/calico/check.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ class CalicoCheck(OpenMetricsBaseCheckV2):
1111
DEFAULT_METRIC_LIMIT = 0
1212

1313
def __init__(self, name, init_config, instances=None):
14-
1514
super(CalicoCheck, self).__init__(
1615
name,
1716
init_config,

calico/tests/conftest.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,12 +41,14 @@ def setup_calico():
4141

4242
@pytest.fixture(scope='session')
4343
def dd_environment():
44-
45-
with kind_run(
46-
conditions=[setup_calico], kind_config=path.join(HERE, 'kind', 'kind-calico.yaml'), sleep=10
47-
) as kubeconfig, port_forward(kubeconfig, 'kube-system', 9091, 'service', 'felix-metrics-svc') as (
48-
calico_host,
49-
calico_port,
44+
with (
45+
kind_run(
46+
conditions=[setup_calico], kind_config=path.join(HERE, 'kind', 'kind-calico.yaml'), sleep=10
47+
) as kubeconfig,
48+
port_forward(kubeconfig, 'kube-system', 9091, 'service', 'felix-metrics-svc') as (
49+
calico_host,
50+
calico_port,
51+
),
5052
):
5153
endpoint = 'http://{}:{}/metrics'.format(calico_host, calico_port)
5254

calico/tests/test_calico.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313

1414
@pytest.mark.unit
1515
def test_check(aggregator, dd_run_check, mock_http_response):
16-
1716
mock_http_response(file_path=get_fixture_path('calico.txt'))
1817
check = CalicoCheck('calico', {}, [common.MOCK_CALICO_INSTANCE])
1918
dd_run_check(check)

cassandra_nodetool/datadog_checks/cassandra_nodetool/cassandra_nodetool.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929

3030

3131
class CassandraNodetoolCheck(AgentCheck):
32-
3332
datacenter_name_re = re.compile('^Datacenter: (.*)')
3433
# 1.
3534
# -- Address Load Tokens Owns Host ID Rack
@@ -88,7 +87,6 @@ def check(self, _):
8887
percent_total_by_dc = defaultdict(float)
8988
# Send the stats per node and compute the stats per datacenter
9089
for node in nodes:
91-
9290
node_tags = [
9391
'node_address:%s' % node['address'],
9492
'node_id:%s' % node['id'],

ceph/tests/test_unit.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,6 @@ def test_luminous_osd_full_metrics(_, aggregator, dd_run_check):
118118

119119
@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("raw.json"))
120120
def test_tagged_metrics(_, aggregator, dd_run_check):
121-
122121
ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
123122
dd_run_check(ceph_check)
124123

@@ -137,7 +136,6 @@ def test_tagged_metrics(_, aggregator, dd_run_check):
137136

138137
@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("raw2.json"))
139138
def test_osd_perf_with_osdstats(_, aggregator, dd_run_check):
140-
141139
ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
142140
dd_run_check(ceph_check)
143141

@@ -150,7 +148,6 @@ def test_osd_perf_with_osdstats(_, aggregator, dd_run_check):
150148

151149
@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("ceph_10.2.2.json"))
152150
def test_osd_status_metrics(_, aggregator, dd_run_check):
153-
154151
ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
155152
dd_run_check(ceph_check)
156153

cert_manager/tests/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121

2222
def setup_cert_manager():
23-
2423
# Deploy Cert Manager
2524
run_command(
2625
[

cisco_aci/datadog_checks/cisco_aci/api.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,6 @@ def make_request(self, path):
8585

8686

8787
class Api:
88-
8988
wrapper_factory = SessionWrapper
9089

9190
def __init__(

cisco_aci/datadog_checks/cisco_aci/cisco.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121

2222
class CiscoACICheck(AgentCheck):
23-
2423
HTTP_CONFIG_REMAPPER = {'ssl_verify': {'name': 'tls_verify'}, 'pwd': {'name': 'password'}}
2524
HA_SUPPORTED = True
2625

cisco_aci/tests/test_fabric.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -198,11 +198,11 @@ def assert_fabric_port_ingr_metrics(aggregator):
198198
aggregator.assert_metric(name=metric_name, value=90202911073.0, tags=interface_tags_201_eth2, hostname=hn201)
199199

200200
metric_name = 'cisco_aci.fabric.port.ingr_bytes.unicast'
201-
aggregator.assert_metric(name=metric_name, value=50443812.0, tags=interface_tags_101_eth1, hostname=hn101),
202-
aggregator.assert_metric(name=metric_name, value=70147142.0, tags=interface_tags_101_eth2, hostname=hn101),
203-
aggregator.assert_metric(name=metric_name, value=32704715.0, tags=interface_tags_102_eth1, hostname=hn102),
204-
aggregator.assert_metric(name=metric_name, value=23770059.0, tags=interface_tags_102_eth2, hostname=hn102),
205-
aggregator.assert_metric(name=metric_name, value=105702610.0, tags=interface_tags_201_eth1, hostname=hn201),
201+
aggregator.assert_metric(name=metric_name, value=50443812.0, tags=interface_tags_101_eth1, hostname=hn101)
202+
aggregator.assert_metric(name=metric_name, value=70147142.0, tags=interface_tags_101_eth2, hostname=hn101)
203+
aggregator.assert_metric(name=metric_name, value=32704715.0, tags=interface_tags_102_eth1, hostname=hn102)
204+
aggregator.assert_metric(name=metric_name, value=23770059.0, tags=interface_tags_102_eth2, hostname=hn102)
205+
aggregator.assert_metric(name=metric_name, value=105702610.0, tags=interface_tags_201_eth1, hostname=hn201)
206206
aggregator.assert_metric(name=metric_name, value=29485355.0, tags=interface_tags_201_eth2, hostname=hn201)
207207

208208
metric_name = 'cisco_aci.fabric.port.ingr_bytes.unicast.cum'

citrix_hypervisor/tests/test_lab.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,7 @@ def test_lab(aggregator, dd_run_check):
5050
"""
5151
if not is_affirmative(os.environ.get('TEST_CITRIX_RUN_LAB')):
5252
pytest.skip(
53-
"Skipped! Set TEST_CITRIX_RUN_LAB to run this test. "
54-
"TEST_CITRIX_USER and TEST_CITRIX_PASS must also be set."
53+
"Skipped! Set TEST_CITRIX_RUN_LAB to run this test. TEST_CITRIX_USER and TEST_CITRIX_PASS must also be set."
5554
)
5655

5756
username = os.environ['TEST_CITRIX_USER']

cloud_foundry_api/tests/test_cloud_foundry_api.py

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,10 @@ def test_check(_, __, ___, aggregator, instance, dd_events, dd_run_check):
9797
@mock.patch.object(CloudFoundryApiCheck, "get_spaces", return_value={"space_id": "space_name"})
9898
def test_get_events(_, __, ___, instance, dd_events):
9999
scroll_events_mock = mock.MagicMock(return_value=dd_events)
100-
with mock.patch.object(CloudFoundryApiCheck, "scroll_events", scroll_events_mock), mock.patch.object(
101-
CloudFoundryApiCheck, "get_oauth_token"
102-
) as get_oauth_token_mock:
100+
with (
101+
mock.patch.object(CloudFoundryApiCheck, "scroll_events", scroll_events_mock),
102+
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token") as get_oauth_token_mock,
103+
):
103104
check_v2 = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
104105
check_v2._api_version = "v2"
105106
check_v3 = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
@@ -511,7 +512,6 @@ def test_build_dd_event(_, __, ___, instance):
511512
@mock.patch("datadog_checks.cloud_foundry_api.cloud_foundry_api.get_next_url", side_effect=["next", ""])
512513
@mock.patch.object(CloudFoundryApiCheck, "http")
513514
def test_scroll_api_pages(http_mock, get_next_url_mock, __, ___, ____, aggregator, instance):
514-
515515
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
516516

517517
# When exhausting all pages
@@ -613,17 +613,19 @@ def test_get_orgs(_, __, instance, orgs_v2_p1, orgs_v2_p2, orgs_v3_p1, orgs_v3_p
613613
"321c58b0-777b-472f-812e-c08c53817074": "org_3",
614614
"0ba4c8cb-9e71-4d6e-b6ff-74e301ed6467": "org_4",
615615
}
616-
with mock.patch.object(
617-
CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v2_p1, orgs_v2_p2]
618-
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
616+
with (
617+
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v2_p1, orgs_v2_p2]),
618+
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
619+
):
619620
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
620621
check._api_version = "v2"
621622

622623
assert check.get_orgs() == expected_orgs
623624

624-
with mock.patch.object(
625-
CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v3_p1, orgs_v3_p2]
626-
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
625+
with (
626+
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v3_p1, orgs_v3_p2]),
627+
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
628+
):
627629
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
628630
check._api_version = "v3"
629631

@@ -639,17 +641,19 @@ def test_get_spaces(_, __, instance, spaces_v2_p1, spaces_v2_p2, spaces_v3_p1, s
639641
"d5d005a4-0320-4daa-ac0a-81f8dcd00fe0": "space_3",
640642
"8c7e64bb-0bf8-4a7a-92e1-2fe06e7ec793": "space_4",
641643
}
642-
with mock.patch.object(
643-
CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v2_p1, spaces_v2_p2]
644-
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
644+
with (
645+
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v2_p1, spaces_v2_p2]),
646+
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
647+
):
645648
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
646649
check._api_version = "v2"
647650

648651
assert check.get_spaces() == expected_spaces
649652

650-
with mock.patch.object(
651-
CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v3_p1, spaces_v3_p2]
652-
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
653+
with (
654+
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v3_p1, spaces_v3_p2]),
655+
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
656+
):
653657
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
654658
check._api_version = "v3"
655659

cloudera/datadog_checks/cloudera/api/api_v7.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,10 @@ def _collect_clusters(self):
5252
# Use len(read_clusters_response.items) * 2 workers since
5353
# for each cluster, we are executing 2 tasks in parallel.
5454
if len(discovered_clusters) > 0:
55-
with ThreadPoolExecutor(max_workers=len(discovered_clusters) * 3) as executor, raising_submitter(
56-
executor
57-
) as submit:
55+
with (
56+
ThreadPoolExecutor(max_workers=len(discovered_clusters) * 3) as executor,
57+
raising_submitter(executor) as submit,
58+
):
5859
for pattern, cluster_name, item, cluster_config in discovered_clusters:
5960
self._log.debug(
6061
"Discovered cluster: [pattern:%s, cluster_name:%s, config:%s]",
@@ -136,9 +137,10 @@ def _collect_hosts(self, cluster_name, config):
136137
# Use len(discovered_hosts) * 4 workers since
137138
# for each host, we are executing 4 tasks in parallel.
138139
if len(discovered_hosts) > 0:
139-
with ThreadPoolExecutor(max_workers=len(discovered_hosts) * 4) as executor, raising_submitter(
140-
executor
141-
) as submit:
140+
with (
141+
ThreadPoolExecutor(max_workers=len(discovered_hosts) * 4) as executor,
142+
raising_submitter(executor) as submit,
143+
):
142144
for pattern, key, item, config in discovered_hosts:
143145
self._log.debug(
144146
"discovered host: [pattern:%s, key:%s, item:%s, config:%s]", pattern, key, item, config

0 commit comments

Comments
 (0)