Skip to content

Commit e31e05c

Browse files
authored
Merge pull request #831 from PowerGridModel/feature/python-optional-as-none-typehint
Feature/Use None instead of Optional
2 parents 1624ba3 + 1dcdd42 commit e31e05c

14 files changed

+73
-80
lines changed

code_generation/meta_data.py

+6-7
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
# define dataclass for meta data
66

77
from dataclasses import dataclass
8-
from typing import Optional
98

109
from dataclasses_json import DataClassJsonMixin
1110

@@ -15,19 +14,19 @@ class Attribute(DataClassJsonMixin):
1514
data_type: str
1615
names: str | list[str]
1716
description: str
18-
nan_value: Optional[str] = None
17+
nan_value: str | None = None
1918

2019

2120
@dataclass
2221
class AttributeClass(DataClassJsonMixin):
2322
name: str
2423
attributes: list[Attribute]
25-
full_attributes: Optional[list[Attribute]] = None
26-
base: Optional[str] = None
24+
full_attributes: list[Attribute] | None = None
25+
base: str | None = None
2726
is_template: bool = False
28-
full_name: Optional[str] = None
29-
specification_names: Optional[list[str]] = None
30-
base_attributes: Optional[dict[str, list[Attribute]]] = None
27+
full_name: str | None = None
28+
specification_names: list[str] | None = None
29+
base_attributes: dict[str, list[Attribute]] | None = None
3130

3231

3332
@dataclass

src/power_grid_model/_core/error_handling.py

+5-6
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
"""
88

99
import re
10-
from typing import Optional
1110

1211
import numpy as np
1312

@@ -115,7 +114,7 @@ def _interpret_error(message: str, decode_error: bool = True) -> PowerGridError:
115114
return PowerGridError(message)
116115

117116

118-
def find_error(batch_size: int = 1, decode_error: bool = True) -> Optional[RuntimeError]:
117+
def find_error(batch_size: int = 1, decode_error: bool = True) -> RuntimeError | None:
119118
"""
120119
Check if there is an error and return it
121120
@@ -171,7 +170,7 @@ def assert_no_error(batch_size: int = 1, decode_error: bool = True):
171170

172171
def handle_errors(
173172
continue_on_batch_error: bool, batch_size: int = 1, decode_error: bool = True
174-
) -> Optional[PowerGridBatchError]:
173+
) -> PowerGridBatchError | None:
175174
"""
176175
Handle any errors in the way that is specified.
177176
@@ -184,10 +183,10 @@ def handle_errors(
184183
error: Any errors previously encountered, unless it was a batch error and continue_on_batch_error was True.
185184
186185
Returns:
187-
Optional[PowerGridBatchError]: None if there were no errors, or the previously encountered
188-
error if it was a batch error and continue_on_batch_error was True.
186+
PowerGridBatchError | None: None if there were no errors, or the previously encountered
187+
error if it was a batch error and continue_on_batch_error was True.
189188
"""
190-
error: Optional[RuntimeError] = find_error(batch_size=batch_size, decode_error=decode_error)
189+
error: RuntimeError | None = find_error(batch_size=batch_size, decode_error=decode_error)
191190
if error is None:
192191
return None
193192

src/power_grid_model/_core/power_grid_core.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from inspect import signature
1212
from itertools import chain
1313
from pathlib import Path
14-
from typing import Callable, Optional
14+
from typing import Callable
1515

1616
from power_grid_model._core.index_integer import IdC, IdxC
1717

@@ -206,7 +206,7 @@ class PowerGridCore:
206206
"""
207207

208208
_handle: HandlePtr
209-
_instance: Optional["PowerGridCore"] = None
209+
_instance: "PowerGridCore | None" = None
210210

211211
# singleton of power grid core
212212
def __new__(cls, *args, **kwargs):

src/power_grid_model/_core/power_grid_dataset.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
Power grid model raw dataset handler
77
"""
88

9-
from typing import Any, Mapping, Optional
9+
from typing import Any, Mapping
1010

1111
from power_grid_model._core.buffer_handling import (
1212
BufferProperties,
@@ -283,7 +283,7 @@ class CConstDataset:
283283
_const_dataset: ConstDatasetPtr
284284
_buffer_views: list[CBuffer]
285285

286-
def __new__(cls, data: Dataset, dataset_type: Optional[DatasetType] = None):
286+
def __new__(cls, data: Dataset, dataset_type: DatasetType | None = None):
287287
instance = super().__new__(cls)
288288
instance._const_dataset = ConstDatasetPtr()
289289

src/power_grid_model/_core/power_grid_model.py

+11-11
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
"""
88

99
from enum import IntEnum
10-
from typing import Optional, Type
10+
from typing import Type
1111

1212
import numpy as np
1313

@@ -45,11 +45,11 @@ class PowerGridModel:
4545
"""
4646

4747
_model_ptr: ModelPtr
48-
_all_component_count: Optional[dict[ComponentType, int]]
49-
_batch_error: Optional[PowerGridBatchError]
48+
_all_component_count: dict[ComponentType, int] | None
49+
_batch_error: PowerGridBatchError | None
5050

5151
@property
52-
def batch_error(self) -> Optional[PowerGridBatchError]:
52+
def batch_error(self) -> PowerGridBatchError | None:
5353
"""
5454
Get the batch error object, if present
5555
@@ -242,7 +242,7 @@ def _calculate_impl( # pylint: disable=too-many-positional-arguments
242242
self,
243243
calculation_type: CalculationType,
244244
symmetric: bool,
245-
update_data: Optional[Dataset],
245+
update_data: Dataset | None,
246246
output_component_types: ComponentAttributeMapping,
247247
options: Options,
248248
continue_on_batch_error: bool,
@@ -310,7 +310,7 @@ def _calculate_power_flow(
310310
error_tolerance: float = 1e-8,
311311
max_iterations: int = 20,
312312
calculation_method: CalculationMethod | str = CalculationMethod.newton_raphson,
313-
update_data: Optional[Dataset] = None,
313+
update_data: Dataset | None = None,
314314
threading: int = -1,
315315
output_component_types: ComponentAttributeMapping = None,
316316
continue_on_batch_error: bool = False,
@@ -347,7 +347,7 @@ def _calculate_state_estimation(
347347
error_tolerance: float = 1e-8,
348348
max_iterations: int = 20,
349349
calculation_method: CalculationMethod | str = CalculationMethod.iterative_linear,
350-
update_data: Optional[Dataset] = None,
350+
update_data: Dataset | None = None,
351351
threading: int = -1,
352352
output_component_types: ComponentAttributeMapping = None,
353353
continue_on_batch_error: bool = False,
@@ -379,7 +379,7 @@ def _calculate_short_circuit(
379379
self,
380380
*,
381381
calculation_method: CalculationMethod | str = CalculationMethod.iec60909,
382-
update_data: Optional[Dataset] = None,
382+
update_data: Dataset | None = None,
383383
threading: int = -1,
384384
output_component_types: ComponentAttributeMapping = None,
385385
continue_on_batch_error: bool = False,
@@ -416,7 +416,7 @@ def calculate_power_flow(
416416
error_tolerance: float = 1e-8,
417417
max_iterations: int = 20,
418418
calculation_method: CalculationMethod | str = CalculationMethod.newton_raphson,
419-
update_data: Optional[dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset] = None,
419+
update_data: dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset | None = None,
420420
threading: int = -1,
421421
output_component_types: ComponentAttributeMapping = None,
422422
continue_on_batch_error: bool = False,
@@ -514,7 +514,7 @@ def calculate_state_estimation(
514514
error_tolerance: float = 1e-8,
515515
max_iterations: int = 20,
516516
calculation_method: CalculationMethod | str = CalculationMethod.iterative_linear,
517-
update_data: Optional[dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset] = None,
517+
update_data: dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset | None = None,
518518
threading: int = -1,
519519
output_component_types: ComponentAttributeMapping = None,
520520
continue_on_batch_error: bool = False,
@@ -604,7 +604,7 @@ def calculate_short_circuit(
604604
self,
605605
*,
606606
calculation_method: CalculationMethod | str = CalculationMethod.iec60909,
607-
update_data: Optional[dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset] = None,
607+
update_data: dict[str, np.ndarray | dict[str, np.ndarray]] | Dataset | None = None,
608608
threading: int = -1,
609609
output_component_types: ComponentAttributeMapping = None,
610610
continue_on_batch_error: bool = False,

src/power_grid_model/_utils.py

+2-5
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"""
1212

1313
from copy import deepcopy
14-
from typing import Optional, Sequence, cast
14+
from typing import Sequence, cast
1515

1616
import numpy as np
1717

@@ -218,8 +218,6 @@ def _split_numpy_array_in_batches(
218218
219219
Args:
220220
data: A 1D or 2D Numpy structured array. A 1D array is a single table / batch, a 2D array is a batch per table.
221-
component: The name of the component to which the data belongs; only used for errors.
222-
attribute [optional]: The name of the attribute to which the data belongs; only used for errors.
223221
224222
Returns:
225223
A list with a single numpy structured array per batch
@@ -239,7 +237,6 @@ def split_dense_batch_data_in_batches(
239237
240238
Args:
241239
data: A 1D or 2D Numpy structured array. A 1D array is a single table / batch, a 2D array is a batch per table.
242-
component: The name of the component to which the data belongs, only used for errors.
243240
batch_size: size of batch
244241
245242
Returns:
@@ -326,7 +323,7 @@ def convert_dataset_to_python_dataset(data: Dataset) -> PythonDataset:
326323

327324
# Check if the dataset is a single dataset or batch dataset
328325
# It is batch dataset if it is 2D array or a indptr/data structure
329-
is_batch: Optional[bool] = None
326+
is_batch: bool | None = None
330327
for component, array in data.items():
331328
is_dense_batch = isinstance(array, np.ndarray) and array.ndim == 2
332329
is_sparse_batch = isinstance(array, dict) and "indptr" in array and "data" in array

src/power_grid_model/utils.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
import tempfile
1212
import warnings
1313
from pathlib import Path
14-
from typing import Optional, cast as cast_type
14+
from typing import cast as cast_type
1515

1616
import numpy as np
1717

@@ -142,9 +142,9 @@ def json_deserialize_from_file(
142142
def json_serialize_to_file(
143143
file_path: Path,
144144
data: Dataset,
145-
dataset_type: Optional[DatasetType] = None,
145+
dataset_type: DatasetType | None = None,
146146
use_compact_list: bool = False,
147-
indent: Optional[int] = 2,
147+
indent: int | None = 2,
148148
):
149149
"""
150150
Export JSON data in most recent format.
@@ -189,7 +189,7 @@ def msgpack_deserialize_from_file(
189189

190190

191191
def msgpack_serialize_to_file(
192-
file_path: Path, data: Dataset, dataset_type: Optional[DatasetType] = None, use_compact_list: bool = False
192+
file_path: Path, data: Dataset, dataset_type: DatasetType | None = None, use_compact_list: bool = False
193193
):
194194
"""
195195
Export msgpack data in most recent format.
@@ -234,7 +234,7 @@ def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Datase
234234

235235

236236
def export_json_data(
237-
json_file: Path, data: Dataset, indent: Optional[int] = 2, compact: bool = False, use_deprecated_format: bool = True
237+
json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False, use_deprecated_format: bool = True
238238
):
239239
"""
240240
[deprecated] Export json data in a deprecated serialization format.
@@ -268,7 +268,7 @@ def export_json_data(
268268

269269

270270
def _compatibility_deprecated_export_json_data(
271-
json_file: Path, data: Dataset, indent: Optional[int] = 2, compact: bool = False
271+
json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False
272272
):
273273
serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
274274
old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])

src/power_grid_model/validation/assertions.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
Helper functions to assert valid data. They basically call validate_input_data or validate_batch_data and raise a
77
ValidationException if the validation results in one or more errors.
88
"""
9-
from typing import Optional
10-
119
from power_grid_model.data_types import BatchDataset, SingleDataset
1210
from power_grid_model.enum import CalculationType
1311
from power_grid_model.validation.errors import ValidationError
@@ -31,7 +29,7 @@ def __str__(self):
3129

3230

3331
def assert_valid_input_data(
34-
input_data: SingleDataset, calculation_type: Optional[CalculationType] = None, symmetric: bool = True
32+
input_data: SingleDataset, calculation_type: CalculationType | None = None, symmetric: bool = True
3533
):
3634
"""
3735
Validates the entire input dataset:
@@ -60,7 +58,7 @@ def assert_valid_input_data(
6058
def assert_valid_batch_data(
6159
input_data: SingleDataset,
6260
update_data: BatchDataset,
63-
calculation_type: Optional[CalculationType] = None,
61+
calculation_type: CalculationType | None = None,
6462
symmetric: bool = True,
6563
):
6664
"""

src/power_grid_model/validation/errors.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import re
99
from abc import ABC
1010
from enum import Enum
11-
from typing import Any, Iterable, Optional, Type
11+
from typing import Any, Iterable, Type
1212

1313
from power_grid_model import ComponentType
1414

@@ -32,18 +32,18 @@ class ValidationError(ABC):
3232
3333
"""
3434

35-
component: Optional[ComponentType | list[ComponentType]] = None
35+
component: ComponentType | list[ComponentType] | None = None
3636
"""
3737
The component, or components, to which the error applies.
3838
"""
3939

40-
field: Optional[str | list[str] | list[tuple[ComponentType, str]]] = None
40+
field: str | list[str] | list[tuple[ComponentType, str]] | None = None
4141
"""
4242
The field, or fields, to which the error applies. A field can also be a tuple (component, field) when multiple
4343
components are being addressed.
4444
"""
4545

46-
ids: Optional[list[int] | list[tuple[ComponentType, int]]] = None
46+
ids: list[int] | list[tuple[ComponentType, int]] | None = None
4747
"""
4848
The object identifiers to which the error applies. A field object identifier can also be a tuple (component, id)
4949
when multiple components are being addressed.
@@ -79,7 +79,7 @@ def _unpack(field: str | tuple[ComponentType, str]) -> str:
7979
return self._delimiter.join(_unpack(field) for field in self.field)
8080
return _unpack(self.field) if self.field else str(self.field)
8181

82-
def get_context(self, id_lookup: Optional[list[str] | dict[int, str]] = None) -> dict[str, Any]:
82+
def get_context(self, id_lookup: list[str] | dict[int, str] | None = None) -> dict[str, Any]:
8383
"""
8484
Returns a dictionary that supplies (human readable) information about this error. Each member variable is
8585
included in the dictionary. If a function {field_name}_str() exists, the value is overwritten by that function.
@@ -129,9 +129,9 @@ class SingleFieldValidationError(ValidationError):
129129
_message = "Field {field} is not valid for {n} {objects}."
130130
component: ComponentType
131131
field: str
132-
ids: Optional[list[int]]
132+
ids: list[int] | None
133133

134-
def __init__(self, component: ComponentType, field: str, ids: Optional[Iterable[int]]):
134+
def __init__(self, component: ComponentType, field: str, ids: Iterable[int] | None):
135135
"""
136136
Args:
137137
component: Component name
@@ -325,9 +325,9 @@ def __init__( # pylint: disable=too-many-arguments
325325
self,
326326
component: ComponentType,
327327
field: str,
328-
ids: Optional[list[int]] = None,
329-
ref_components: Optional[ComponentType | list[ComponentType]] = None,
330-
filters: Optional[dict[str, Any]] = None,
328+
ids: list[int] | None = None,
329+
ref_components: ComponentType | list[ComponentType] | None = None,
330+
filters: dict[str, Any] | None = None,
331331
):
332332
# pylint: disable=too-many-positional-arguments
333333
super().__init__(component=component, field=field, ids=ids)

0 commit comments

Comments
 (0)