Skip to content

Commit 6124dbe

Browse files
Merge pull request #5151 from yt-project/pre-commit-ci-update-config
[pre-commit.ci] pre-commit autoupdate
2 parents dbaf77c + 5db86e0 commit 6124dbe

File tree

12 files changed

+15
-15
lines changed

12 files changed

+15
-15
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ repos:
3636
additional_dependencies: [black==24.3.0]
3737

3838
- repo: https://github.com/astral-sh/ruff-pre-commit
39-
rev: v0.9.9
39+
rev: v0.11.4
4040
hooks:
4141
- id: ruff-format
4242
- id: ruff

yt/data_objects/level_sets/clump_handling.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ def save_as_dataset(self, filename=None, fields=None):
289289
else:
290290
clump_info[ci] = np.array(clump_info[ci])
291291

292-
ftypes = {ci: "clump" for ci in clump_info}
292+
ftypes = dict.fromkeys(clump_info, "clump")
293293

294294
# collect data fields
295295
if fields is not None:

yt/data_objects/tests/test_profiles.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -608,7 +608,7 @@ def test_unequal_data_shape_profile(self):
608608
("gas", "mass"): mass,
609609
}
610610
fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")}
611-
field_types = {field: "gas" for field in my_data.keys()}
611+
field_types = dict.fromkeys(my_data.keys(), "gas")
612612
yt.save_as_dataset(fake_ds_med, "mydata.h5", my_data, field_types=field_types)
613613

614614
ds = yt.load("mydata.h5")
@@ -633,7 +633,7 @@ def test_unequal_bin_field_profile(self):
633633
("gas", "mass"): mass,
634634
}
635635
fake_ds_med = {"current_time": yt.YTQuantity(10, "Myr")}
636-
field_types = {field: "gas" for field in my_data.keys()}
636+
field_types = dict.fromkeys(my_data.keys(), "gas")
637637
yt.save_as_dataset(fake_ds_med, "mydata.h5", my_data, field_types=field_types)
638638

639639
ds = yt.load("mydata.h5")

yt/fields/tests/test_fields.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ def get_base_ds(nprocs):
145145
ds.parameters["EOSType"] = 1.0
146146
ds.parameters["EOSSoundSpeed"] = 1.0
147147
ds.conversion_factors["Time"] = 1.0
148-
ds.conversion_factors.update({f: 1.0 for f in fields})
148+
ds.conversion_factors.update(dict.fromkeys(fields, 1.0))
149149
ds.gamma = 5.0 / 3.0
150150
ds.current_redshift = 0.0001
151151
ds.cosmological_simulation = 1

yt/frontends/halo_catalog/tests/test_outputs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
def fake_halo_catalog(data):
1313
filename = "catalog.0.h5"
1414

15-
ftypes = {field: "." for field in data}
15+
ftypes = dict.fromkeys(data, ".")
1616
extra_attrs = {"data_type": "halo_catalog", "num_halos": data["particle_mass"].size}
1717

1818
ds = {

yt/geometry/coordinates/coordinate_handler.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -228,11 +228,11 @@ def convert_from_spherical(self, coord):
228228

229229
@cached_property
230230
def data_projection(self):
231-
return {ax: None for ax in self.axis_order}
231+
return dict.fromkeys(self.axis_order)
232232

233233
@cached_property
234234
def data_transform(self):
235-
return {ax: None for ax in self.axis_order}
235+
return dict.fromkeys(self.axis_order)
236236

237237
@cached_property
238238
def axis_name(self):

yt/loaders.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -341,7 +341,7 @@ def load_uniform_grid(
341341
field = ("io", key)
342342
mylog.debug("Reassigning '%s' to '%s'", key, field)
343343
else:
344-
key = cast(FieldKey, key)
344+
key = cast("FieldKey", key)
345345
field = key
346346
sfh._additional_fields += (field,)
347347
pdata[field] = data.pop(key)
@@ -1912,7 +1912,7 @@ def _reader(grid, field_name):
19121912
psize = get_psize(np.array(shape), nchunks)
19131913
left_edges, right_edges, shapes, _, _ = decompose_array(shape, psize, bbox)
19141914
for le, re, s in zip(left_edges, right_edges, shapes, strict=True):
1915-
data = {_: reader for _ in fields}
1915+
data = dict.fromkeys(fields, reader)
19161916
data.update({"left_edge": le, "right_edge": re, "dimensions": s, "level": 0})
19171917
grid_data.append(data)
19181918
return load_amr_grids(grid_data, shape, bbox=bbox, **dataset_arguments)

yt/utilities/io_handler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ def _read_fluid_selection(
113113
nodal_fields.append(field)
114114
else:
115115
rv[field] = np.empty(size, dtype="=f8")
116-
ind = {field: 0 for field in fields}
116+
ind = dict.fromkeys(fields, 0)
117117
for field, obj, data in self.io_iter(chunks, fields):
118118
if data is None:
119119
continue

yt/utilities/lib/octree_raytracing.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def vertex_centered_data(self, field):
5050
np.zeros((2, 2, 2, data_source.ires.size), dtype="float64"), units
5151
)
5252
binary_3D_index_iter = product(*[range(2)] * 3)
53-
ind = {(i, j, k): 0 for i, j, k in binary_3D_index_iter}
53+
ind = dict.fromkeys(binary_3D_index_iter, 0)
5454
for chunk in chunks:
5555
with data_source._chunked_read(chunk):
5656
gz = data_source._current_chunk.objs[0]

yt/visualization/fixed_resolution.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -536,7 +536,7 @@ def save_as_dataset(self, filename=None, fields=None):
536536
else:
537537
data.update(self.data)
538538

539-
ftypes = {field: "grid" for field in data}
539+
ftypes = dict.fromkeys(data, "grid")
540540
extra_attrs = {
541541
arg: getattr(self.data_source, arg, None)
542542
for arg in self.data_source._con_args + self.data_source._tds_attrs

0 commit comments

Comments
 (0)