Skip to content

Commit 492b386

Browse files
authored
Merge pull request #374 from furqan463/update_ruff
Update Ruff Rules (UP, RSE, TRY002, FBT003)
2 parents 171ea73 + 5c2edab commit 492b386

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

51 files changed

+408
-424
lines changed

docs/examples/arrow_example.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
"outputs": [],
2727
"source": [
2828
"%%capture cap --no-stderr\n",
29-
"from typing import Iterable\n",
29+
"from collections.abc import Iterable\n",
3030
"\n",
3131
"import numpy as np\n",
3232
"import pandas as pd\n",
@@ -405,7 +405,7 @@
405405
"type": "float"
406406
}
407407
],
408-
"ref": "7fc39afa-a366-4a3b-baae-833f31d69d02",
408+
"ref": "2548a4ee-832c-4467-a49b-f8d0aa1063f6",
409409
"rows": [
410410
[
411411
"0",
@@ -757,7 +757,7 @@
757757
"type": "float"
758758
}
759759
],
760-
"ref": "8823e105-4bd4-46fd-a1ff-ec1e5bc7687c",
760+
"ref": "9e919d45-d97c-40e3-ad30-09cdfa54cfc7",
761761
"rows": [
762762
[
763763
"0",

docs/examples/pandapower_example.ipynb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@
198198
"source": [
199199
"import pandas as pd\n",
200200
"\n",
201-
"pd.set_option(\"future.no_silent_downcasting\", True) # enable behaviour of pandas 3.x\n",
201+
"pd.options.future.no_silent_downcasting = True # enable behaviour of pandas 3.x\n",
202202
"\n",
203203
"# The node data is stored as a numpy structured array in input_data[\"line\"]\n",
204204
"display(input_data[\"line\"])\n",
@@ -289,7 +289,7 @@
289289
"type": "float"
290290
}
291291
],
292-
"ref": "eb369e38-8d56-41bd-8cd4-b4d6d31658c2",
292+
"ref": "bd00b4dd-df52-4856-8e90-68ddc1bd43a0",
293293
"rows": [
294294
[
295295
"0",
@@ -581,7 +581,7 @@
581581
"type": "float"
582582
}
583583
],
584-
"ref": "2fdfa403-0f5b-4b9b-8a82-1ffb5d04b66f",
584+
"ref": "97050d7e-8f61-4286-9e9d-f49820257e46",
585585
"rows": [
586586
[
587587
"101",
@@ -764,7 +764,7 @@
764764
"type": "float"
765765
}
766766
],
767-
"ref": "eb0233ad-7351-415b-9904-766914e17821",
767+
"ref": "d0a7d9d2-bf94-4ce4-b4b1-ac5dce7236b4",
768768
"rows": [
769769
[
770770
"101",
@@ -1003,7 +1003,7 @@
10031003
"name": "stdout",
10041004
"output_type": "stream",
10051005
"text": [
1006-
"2026-02-16 20:32:42 [warning ] Zero sequence parameters given in trafo shall be ignored: vkr0_percent, si0_hv_partial\n"
1006+
"2026-02-19 05:18:47 [warning ] Zero sequence parameters given in trafo shall be ignored: vkr0_percent, si0_hv_partial\n"
10071007
]
10081008
}
10091009
],

pyproject.toml

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -152,17 +152,17 @@ select = [
152152
# self
153153
#"SLF",
154154
# pyupgrade
155-
#"UP",
155+
"UP",
156156
# raise
157-
#"RSE",
157+
"RSE",
158158
# ruff sepcific rules
159159
"RUF",
160160
# bandit
161161
#"S",
162162
# boolean positional value in call
163-
#"FBT003",
163+
"FBT003",
164164
# raise vanilla args
165-
#"TRY002",
165+
"TRY002",
166166
# pytest-style
167167
#"PT",
168168
]
@@ -172,9 +172,6 @@ ignore = []
172172
# Imports that are imported using keyword "as" and are from the same source - are combined.
173173
combine-as-imports = true
174174

175-
[tool.ruff.lint.per-file-ignores]
176-
"tests/**/*.py" = ["TID252"] # intentional relative imports inside tests
177-
178175
[tool.mypy]
179176
follow_imports = "silent"
180177
ignore_missing_imports = true

src/power_grid_model_io/converters/base_converter.py

Lines changed: 21 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
import logging
99
from abc import ABC, abstractmethod
10-
from typing import Generic, Optional, Tuple, TypeVar
1110

1211
import structlog
1312
from power_grid_model import DatasetType
@@ -17,16 +16,14 @@
1716
from power_grid_model_io.data_types import ExtraInfo
1817
from power_grid_model_io.utils.auto_id import AutoID
1918

20-
T = TypeVar("T")
2119

22-
23-
class BaseConverter(Generic[T], ABC):
20+
class BaseConverter[T](ABC):
2421
"""Abstract converter class"""
2522

2623
def __init__(
2724
self,
28-
source: Optional[BaseDataStore[T]] = None,
29-
destination: Optional[BaseDataStore[T]] = None,
25+
source: BaseDataStore[T] | None = None,
26+
destination: BaseDataStore[T] | None = None,
3027
log_level: int = logging.INFO,
3128
):
3229
"""
@@ -39,9 +36,7 @@ def __init__(
3936
self._destination = destination
4037
self._auto_id = AutoID()
4138

42-
def load_input_data(
43-
self, data: Optional[T] = None, make_extra_info: bool = True
44-
) -> Tuple[SingleDataset, ExtraInfo]:
39+
def load_input_data(self, data: T | None = None, make_extra_info: bool = True) -> tuple[SingleDataset, ExtraInfo]:
4540
"""Load input data and extra info
4641
4742
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
@@ -62,70 +57,70 @@ def load_input_data(
6257
raise TypeError("Input data can not be batch data")
6358
return parsed_data, extra_info
6459

65-
def load_update_data(self, data: Optional[T] = None) -> Dataset:
60+
def load_update_data(self, data: T | None = None) -> Dataset:
6661
"""Load update data
6762
6863
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
6964
7065
Args:
71-
data: Optional[T]: (Default value = None)
66+
data: T | None: (Default value = None)
7267
7368
Returns:
7469
7570
"""
7671
data = self._load_data(data)
7772
return self._parse_data(data=data, data_type=DatasetType.update, extra_info=None)
7873

79-
def load_sym_output_data(self, data: Optional[T] = None) -> Dataset:
74+
def load_sym_output_data(self, data: T | None = None) -> Dataset:
8075
"""Load symmetric output data
8176
8277
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
8378
8479
Args:
85-
data: Optional[T]: (Default value = None)
80+
data: T | None: (Default value = None)
8681
8782
Returns:
8883
8984
"""
9085
data = self._load_data(data)
9186
return self._parse_data(data=data, data_type=DatasetType.sym_output, extra_info=None)
9287

93-
def load_asym_output_data(self, data: Optional[T] = None) -> Dataset:
88+
def load_asym_output_data(self, data: T | None = None) -> Dataset:
9489
"""Load asymmetric output data
9590
9691
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
9792
9893
Args:
99-
data: Optional[T]: (Default value = None)
94+
data: T | None: (Default value = None)
10095
10196
Returns:
10297
10398
"""
10499
data = self._load_data(data)
105100
return self._parse_data(data=data, data_type=DatasetType.asym_output, extra_info=None)
106101

107-
def load_sc_output_data(self, data: Optional[T] = None) -> Dataset:
102+
def load_sc_output_data(self, data: T | None = None) -> Dataset:
108103
"""Load sc output data
109104
110105
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
111106
112107
Args:
113-
data: Optional[T]: (Default value = None)
108+
data: T | None: (Default value = None)
114109
115110
Returns:
116111
117112
"""
118113
data = self._load_data(data)
119114
return self._parse_data(data=data, data_type=DatasetType.sc_output, extra_info=None)
120115

121-
def convert(self, data: Dataset, extra_info: Optional[ExtraInfo] = None) -> T:
116+
def convert(self, data: Dataset, extra_info: ExtraInfo | None = None) -> T:
122117
"""Convert input/update/(a)sym_output data and optionally extra info.
123118
124119
Note: You shouldn't have to overwrite this method. Check _serialize_data() instead.
125120
126121
Args:
127122
data: Dataset:
128-
extra_info: Optional[ExtraInfo]: (Default value = None)
123+
extra_info: ExtraInfo | None: (Default value = None)
129124
130125
Returns:
131126
@@ -135,17 +130,17 @@ def convert(self, data: Dataset, extra_info: Optional[ExtraInfo] = None) -> T:
135130
def save(
136131
self,
137132
data: Dataset,
138-
extra_info: Optional[ExtraInfo] = None,
139-
destination: Optional[BaseDataStore[T]] = None,
133+
extra_info: ExtraInfo | None = None,
134+
destination: BaseDataStore[T] | None = None,
140135
) -> None:
141136
"""Save input/update/(a)sym_output data and optionally extra info.
142137
143138
Note: You shouldn't have to overwrite this method. Check _serialize_data() instead.
144139
145140
Args:
146141
data: Dataset:
147-
extra_info: Optional[ExtraInfo]: (Default value = None)
148-
destination: Optional[BaseDataStore[T]]: (Default value = None)
142+
extra_info: ExtraInfo | None: (Default value = None)
143+
destination: BaseDataStore[T] | None: (Default value = None)
149144
150145
Returns:
151146
@@ -177,17 +172,17 @@ def get_log_level(self) -> int:
177172
"""
178173
return self._logger.getEffectiveLevel()
179174

180-
def _load_data(self, data: Optional[T]) -> T:
175+
def _load_data(self, data: T | None) -> T:
181176
if data is not None:
182177
return data
183178
if self._source is not None:
184179
return self._source.load()
185180
raise ValueError("No data supplied!")
186181

187182
@abstractmethod # pragma: nocover
188-
def _parse_data(self, data: T, data_type: DatasetType, extra_info: Optional[ExtraInfo]) -> Dataset:
183+
def _parse_data(self, data: T, data_type: DatasetType, extra_info: ExtraInfo | None) -> Dataset:
189184
pass
190185

191186
@abstractmethod # pragma: nocover
192-
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> T:
187+
def _serialize_data(self, data: Dataset, extra_info: ExtraInfo | None) -> T:
193188
pass

0 commit comments

Comments
 (0)