Skip to content

Commit 3c91b5b

Browse files
author
Emma Ai
committed
update typing to 3.10
1 parent b51a8a3 commit 3c91b5b

35 files changed

+369
-380
lines changed

odc/stats/_algebra.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def median_by_ind(xr_da, dim, dtype="float32", name="median_by_ind"):
3232
meta=np.array((), dtype=dtype),
3333
drop_axis=0,
3434
)
35-
coords = dict((dim, xr_da.coords[dim]) for dim in xr_da.dims[1:])
35+
coords = {dim: xr_da.coords[dim] for dim in xr_da.dims[1:]}
3636

3737
return xr.DataArray(
3838
res, dims=xr_da.dims[1:], coords=coords, attrs=xr_da.attrs.copy()
@@ -44,5 +44,5 @@ def median_ds(xr_ds, dim, dtype="float32", name="median_ds"):
4444
for var, data in xr_ds.data_vars.items():
4545
res[var] = median_by_ind(data, dim, dtype, name)
4646
# pylint: disable=undefined-loop-variable
47-
coords = dict((dim, xr_ds.coords[dim]) for dim in data.dims[1:])
47+
coords = {dim: xr_ds.coords[dim] for dim in data.dims[1:]}
4848
return xr.Dataset(res, coords=coords, attrs=xr_ds.attrs.copy())

odc/stats/_cli_common.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import logging
44
import sys
5-
from typing import List, Tuple
65
import click
76

87

@@ -12,7 +11,7 @@
1211
from urllib.parse import urlparse
1312

1413

15-
TileIdx_txy = Tuple[str, int, int] # pylint: disable=invalid-name
14+
TileIdx_txy = tuple[str, int, int] # pylint: disable=invalid-name
1615

1716

1817
def parse_task(s: str) -> TileIdx_txy:
@@ -27,8 +26,8 @@ def parse_task(s: str) -> TileIdx_txy:
2726

2827

2928
def parse_all_tasks(
30-
inputs: List[str], all_possible_tasks: List[TileIdx_txy]
31-
) -> List[TileIdx_txy]:
29+
inputs: list[str], all_possible_tasks: list[TileIdx_txy]
30+
) -> list[TileIdx_txy]:
3231
"""
3332
Select a subset of all possible tasks given user input on cli.
3433
@@ -43,7 +42,7 @@ def parse_all_tasks(
4342
x+10/y-3/2019--P1Y
4443
"""
4544

46-
out: List[TileIdx_txy] = []
45+
out: list[TileIdx_txy] = []
4746
full_set = set(all_possible_tasks)
4847

4948
for s in inputs:
@@ -68,7 +67,7 @@ def parse_all_tasks(
6867
return out
6968

7069

71-
def parse_resolution(s: str, separator: str = ",") -> Tuple[float, float]:
70+
def parse_resolution(s: str, separator: str = ",") -> tuple[float, float]:
7271
parts = [float(v) for v in split_and_check(s, separator, (1, 2))]
7372

7473
if len(parts) == 1:

odc/stats/_cli_publish_tasks.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import json
22
import sys
3-
from typing import List, Optional
43

54
import click
65
import fsspec
@@ -28,15 +27,15 @@ def get_geometry(geojson_file: str) -> Geometry:
2827
)
2928

3029

31-
def filter_tasks(tasks: List[TileIdx_txy], geometry: Geometry, grid_name: str):
30+
def filter_tasks(tasks: list[TileIdx_txy], geometry: Geometry, grid_name: str):
3231
for task in tasks:
3332
task_geometry = GRIDS[grid_name].tile_geobox((task[1], task[2])).extent
3433
if task_geometry.intersects(geometry):
3534
yield task
3635

3736

3837
def publish_tasks(
39-
db: str, task_filter: str, geojson_filter: Optional[str], dryrun: bool, queue: str
38+
db: str, task_filter: str, geojson_filter: str | None, dryrun: bool, queue: str
4039
):
4140
reader = TaskReader(db)
4241
if len(task_filter) == 0:
@@ -67,7 +66,7 @@ def publish_tasks(
6766

6867
# We assume the db files are always be the S3 uri. If they are not, there is no need to use SQS queue to process.
6968
messages = (
70-
dict(Id=str(idx), MessageBody=json.dumps(render_sqs(tidx, db)))
69+
{"Id": str(idx), "MessageBody": json.dumps(render_sqs(tidx, db))}
7170
for idx, tidx in enumerate(tasks)
7271
)
7372

odc/stats/_gjson.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import math
22
from copy import deepcopy
33
import toolz
4-
from typing import Tuple, Dict, Any
4+
from typing import Any
55
from datetime import timedelta
66

77
from odc.geo.gridspec import GridSpec
@@ -11,7 +11,7 @@
1111
from .model import TileIdx_xy, TileIdx_txy
1212

1313

14-
def gs_bounds(gs: GridSpec, tiles: Tuple[Tuple[int, int], Tuple[int, int]]) -> Geometry:
14+
def gs_bounds(gs: GridSpec, tiles: tuple[tuple[int, int], tuple[int, int]]) -> Geometry:
1515
"""
1616
Compute Polygon for a selection of tiles.
1717
@@ -36,8 +36,8 @@ def timedelta_to_hours(td: timedelta) -> float:
3636

3737

3838
def compute_grid_info(
39-
cells: Dict[TileIdx_xy, Any], resolution: float = math.inf, title_width: int = 0
40-
) -> Dict[TileIdx_xy, Any]:
39+
cells: dict[TileIdx_xy, Any], resolution: float = math.inf, title_width: int = 0
40+
) -> dict[TileIdx_xy, Any]:
4141
"""
4242
Compute geojson feature for every cell in ``cells``.
4343
Where ``cells`` is produced by ``bin_dataset_stream``
@@ -75,8 +75,8 @@ def compute_grid_info(
7575

7676

7777
def gjson_from_tasks(
78-
tasks: Dict[TileIdx_txy, Any], grid_info: Dict[TileIdx_xy, Any]
79-
) -> Dict[str, Dict[str, Any]]:
78+
tasks: dict[TileIdx_txy, Any], grid_info: dict[TileIdx_xy, Any]
79+
) -> dict[str, dict[str, Any]]:
8080
"""
8181
Group tasks by time period and compute geosjon describing every tile covered by each time period.
8282
@@ -96,14 +96,14 @@ def _get(idx):
9696
dss = tasks[idx]
9797
utc_offset = timedelta(hours=geo["properties"]["utc_offset"])
9898

99-
ndays = len(set((ds.time + utc_offset).date() for ds in dss))
99+
ndays = len({(ds.time + utc_offset).date() for ds in dss})
100100
geo["properties"]["total"] = len(dss)
101101
geo["properties"]["days"] = ndays
102102

103103
return geo
104104

105105
def process(idxs):
106-
return dict(type="FeatureCollection", features=[_get(idx) for idx in idxs])
106+
return {"type": "FeatureCollection", "features": [_get(idx) for idx in idxs]}
107107

108108
return {
109109
t: process(idxs)

odc/stats/_grouper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
from __future__ import annotations
88

99
from datetime import timedelta
10-
from typing import TYPE_CHECKING, Any
10+
from typing import Any, TYPE_CHECKING
1111

1212
import numpy as np
1313
import pandas as pd

odc/stats/_sqs.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,13 @@
22
Work token for SQS based job control
33
"""
44

5-
from typing import Optional
65
from datetime import timedelta, datetime
76
import toolz
87
from .model import WorkTokenInterface
98

109

1110
class SQSWorkToken(WorkTokenInterface):
12-
def __init__(self, msg, timeout: int, t0: Optional[datetime] = None):
11+
def __init__(self, msg, timeout: int, t0: datetime | None = None):
1312
super().__init__()
1413
if t0 is None:
1514
t0 = self.now()

odc/stats/_text.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,28 @@
11
from pathlib import Path
2-
from typing import Union, Optional, Tuple, Dict, Any
2+
from typing import Any
33

4-
PathLike = Union[str, Path]
4+
PathLike = str | Path
55

66

77
# Copied from odc.io.text
88

99

10-
def read_int(path: PathLike, default=None, base=10) -> Optional[int]:
10+
def read_int(path: PathLike, default=None, base=10) -> int | None:
1111
"""
1212
Read single integer from a text file.
1313
1414
Useful for things like parsing content of /sys/ or /proc.
1515
"""
1616
try:
17-
with open(path, "rt", encoding="utf8") as f:
17+
with open(path, encoding="utf8") as f:
1818
return int(f.read(), base)
1919
except (FileNotFoundError, ValueError):
2020
return default
2121

2222

2323
def split_and_check(
24-
s: str, separator: str, n: Union[int, Tuple[int, ...]]
25-
) -> Tuple[str, ...]:
24+
s: str, separator: str, n: int | tuple[int, ...]
25+
) -> tuple[str, ...]:
2626
"""Turn string into tuple, checking that there are exactly as many parts as expected.
2727
:param s: String to parse
2828
:param separator: Separator character
@@ -44,7 +44,7 @@ def parse_slice(s: str) -> slice:
4444
Examples "::4", "2:5", "2::10", "3:100:5"
4545
"""
4646

47-
def parse(part: str) -> Optional[int]:
47+
def parse(part: str) -> int | None:
4848
if part == "":
4949
return None
5050
return int(part)
@@ -57,32 +57,32 @@ def parse(part: str) -> Optional[int]:
5757
return slice(*parts)
5858

5959

60-
def parse_yaml(s: str) -> Dict[str, Any]:
60+
def parse_yaml(s: str) -> dict[str, Any]:
6161
# pylint: disable=import-outside-toplevel
6262
import yaml
6363

6464
return yaml.load(s, Loader=getattr(yaml, "CSafeLoader", yaml.SafeLoader))
6565

6666

67-
def parse_yaml_file_or_inline(s: str) -> Dict[str, Any]:
67+
def parse_yaml_file_or_inline(s: str) -> dict[str, Any]:
6868
"""
6969
Accept on input either a path to yaml file or yaml text, return parsed yaml document.
7070
"""
7171
try:
7272
# if file
7373
path = Path(s)
74-
with open(path, "rt", encoding="utf8") as f:
74+
with open(path, encoding="utf8") as f:
7575
txt = f.read()
7676
assert isinstance(txt, str)
77-
except (FileNotFoundError, IOError, ValueError):
77+
except (FileNotFoundError, OSError, ValueError):
7878
txt = s
7979
result = parse_yaml(txt)
8080
if isinstance(result, str):
81-
raise IOError(f"No such file: {s}")
81+
raise OSError(f"No such file: {s}")
8282
return result
8383

8484

85-
def load_yaml_remote(yaml_url: str) -> Dict[str, Any]:
85+
def load_yaml_remote(yaml_url: str) -> dict[str, Any]:
8686
"""
8787
Open a yaml file remotely and return the parsed yaml document
8888
"""
@@ -97,7 +97,7 @@ def load_yaml_remote(yaml_url: str) -> Dict[str, Any]:
9797
raise
9898

9999

100-
def parse_range2d_int(s: str) -> Tuple[Tuple[int, int], Tuple[int, int]]:
100+
def parse_range2d_int(s: str) -> tuple[tuple[int, int], tuple[int, int]]:
101101
"""Parse string like "0:3,4:5" -> ((0,3), (4,5))"""
102102
try:
103103
return tuple(

0 commit comments

Comments
 (0)