Skip to content

Commit 470e4ab

Browse files
authored
Merge pull request #170 from alliander-opensource/refactor/extra-info
Refactor ExtraInfo type hint
2 parents 63b0b94 + 568bcfb commit 470e4ab

File tree

11 files changed

+88
-100
lines changed

11 files changed

+88
-100
lines changed

src/power_grid_model_io/converters/base_converter.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from power_grid_model.data_types import Dataset, SingleDataset
1212

1313
from power_grid_model_io.data_stores.base_data_store import BaseDataStore
14-
from power_grid_model_io.data_types import ExtraInfoLookup
14+
from power_grid_model_io.data_types import ExtraInfo
1515
from power_grid_model_io.utils.auto_id import AutoID
1616

1717
T = TypeVar("T")
@@ -31,7 +31,7 @@ def __init__(self, source: Optional[BaseDataStore[T]] = None, destination: Optio
3131

3232
def load_input_data(
3333
self, data: Optional[T] = None, make_extra_info: bool = True
34-
) -> Tuple[SingleDataset, ExtraInfoLookup]:
34+
) -> Tuple[SingleDataset, ExtraInfo]:
3535
"""Load input data and extra info
3636
3737
Note: You shouldn't have to overwrite this method. Check _parse_data() instead.
@@ -44,7 +44,7 @@ def load_input_data(
4444
"""
4545

4646
data = self._load_data(data)
47-
extra_info: ExtraInfoLookup = {}
47+
extra_info: ExtraInfo = {}
4848
parsed_data = self._parse_data(data=data, data_type="input", extra_info=extra_info if make_extra_info else None)
4949
if isinstance(parsed_data, list):
5050
raise TypeError("Input data can not be batch data")
@@ -92,14 +92,14 @@ def load_asym_output_data(self, data: Optional[T] = None) -> Dataset:
9292
data = self._load_data(data)
9393
return self._parse_data(data=data, data_type="asym_output", extra_info=None)
9494

95-
def convert(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] = None) -> T:
95+
def convert(self, data: Dataset, extra_info: Optional[ExtraInfo] = None) -> T:
9696
"""Convert input/update/(a)sym_output data and optionally extra info.
9797
9898
Note: You shouldn't have to overwrite this method. Check _serialize_data() instead.
9999
100100
Args:
101101
data: Dataset:
102-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
102+
extra_info: Optional[ExtraInfo]: (Default value = None)
103103
104104
Returns:
105105
@@ -109,7 +109,7 @@ def convert(self, data: Dataset, extra_info: Optional[ExtraInfoLookup] = None) -
109109
def save(
110110
self,
111111
data: Dataset,
112-
extra_info: Optional[ExtraInfoLookup] = None,
112+
extra_info: Optional[ExtraInfo] = None,
113113
destination: Optional[BaseDataStore[T]] = None,
114114
) -> None:
115115
"""Save input/update/(a)sym_output data and optionally extra info.
@@ -118,7 +118,7 @@ def save(
118118
119119
Args:
120120
data: Dataset:
121-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
121+
extra_info: Optional[ExtraInfo]: (Default value = None)
122122
destination: Optional[BaseDataStore[T]]: (Default value = None)
123123
124124
Returns:
@@ -140,9 +140,9 @@ def _load_data(self, data: Optional[T]) -> T:
140140
raise ValueError("No data supplied!")
141141

142142
@abstractmethod # pragma: nocover
143-
def _parse_data(self, data: T, data_type: str, extra_info: Optional[ExtraInfoLookup]) -> Dataset:
143+
def _parse_data(self, data: T, data_type: str, extra_info: Optional[ExtraInfo]) -> Dataset:
144144
pass
145145

146146
@abstractmethod # pragma: nocover
147-
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> T:
147+
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> T:
148148
pass

src/power_grid_model_io/converters/pandapower_converter.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from power_grid_model.data_types import Dataset, SingleDataset
1515

1616
from power_grid_model_io.converters.base_converter import BaseConverter
17-
from power_grid_model_io.data_types import ExtraInfoLookup
17+
from power_grid_model_io.data_types import ExtraInfo
1818
from power_grid_model_io.functions import get_winding
1919
from power_grid_model_io.utils.regex import NODE_REF_RE, TRAFO3_CONNECTION_RE, TRAFO_CONNECTION_RE
2020

@@ -48,9 +48,7 @@ def __init__(self, system_frequency: float = 50.0, trafo_loading: str = "current
4848
self.idx_lookup: Dict[Tuple[str, Optional[str]], pd.Series] = {}
4949
self.next_idx = 0
5050

51-
def _parse_data(
52-
self, data: PandaPowerData, data_type: str, extra_info: Optional[ExtraInfoLookup] = None
53-
) -> Dataset:
51+
def _parse_data(self, data: PandaPowerData, data_type: str, extra_info: Optional[ExtraInfo] = None) -> Dataset:
5452
"""
5553
Set up for conversion from PandaPower to power-grid-model
5654
@@ -85,7 +83,7 @@ def _parse_data(
8583

8684
return self.pgm_input_data
8785

88-
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> PandaPowerData:
86+
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> PandaPowerData:
8987
"""
9088
Set up for conversion from power-grid-model to PandaPower
9189
@@ -137,7 +135,7 @@ def _create_input_data(self):
137135
self._create_pgm_input_generators()
138136
self._create_pgm_input_dclines()
139137

140-
def _fill_extra_info(self, extra_info: ExtraInfoLookup):
138+
def _fill_extra_info(self, extra_info: ExtraInfo):
141139
for (pp_table, name), indices in self.idx_lookup.items():
142140
for pgm_id, pp_idx in zip(indices.index, indices):
143141
if name:
@@ -153,7 +151,7 @@ def _fill_extra_info(self, extra_info: ExtraInfoLookup):
153151
else:
154152
extra_info[pgm_id][attr_name] = node_id
155153

156-
def _extra_info_to_idx_lookup(self, extra_info: ExtraInfoLookup):
154+
def _extra_info_to_idx_lookup(self, extra_info: ExtraInfo):
157155
"""
158156
Converts extra component info into idx_lookup
159157
@@ -179,7 +177,7 @@ def _extra_info_to_idx_lookup(self, extra_info: ExtraInfoLookup):
179177
self.idx[key] = pd.Series(pgm_ids, index=pp_indices)
180178
self.idx_lookup[key] = pd.Series(pp_indices, index=pgm_ids)
181179

182-
def _extra_info_to_pgm_input_data(self, extra_info: ExtraInfoLookup):
180+
def _extra_info_to_pgm_input_data(self, extra_info: ExtraInfo):
183181
"""
184182
Converts extra component info into node_lookup
185183

src/power_grid_model_io/converters/pgm_json_converter.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
from power_grid_model_io.converters.base_converter import BaseConverter
2121
from power_grid_model_io.data_stores.json_file_store import JsonFileStore
22-
from power_grid_model_io.data_types import ExtraInfoLookup, StructuredData
22+
from power_grid_model_io.data_types import ExtraInfo, StructuredData
2323
from power_grid_model_io.utils.dict import merge_dicts
2424

2525

@@ -44,7 +44,7 @@ def __init__(
4444
destination = JsonFileStore(file_path=Path(destination_file)) if destination_file else None
4545
super().__init__(source=source, destination=destination)
4646

47-
def _parse_data(self, data: StructuredData, data_type: str, extra_info: Optional[ExtraInfoLookup]) -> Dataset:
47+
def _parse_data(self, data: StructuredData, data_type: str, extra_info: Optional[ExtraInfo]) -> Dataset:
4848
"""This function expects Structured data, which can either be a dictionary (single dataset) or a list of
4949
dictionaries (batch dataset). The structured dataset consists of components + attributes that exist within
5050
power-grid-model, but can also contain other data. If this data should be saved for later usage an extra_info
@@ -57,7 +57,7 @@ def _parse_data(self, data: StructuredData, data_type: str, extra_info: Optional
5757
power-grid-model data) can be specified
5858
data: StructuredData:
5959
data_type: str:
60-
extra_info: Optional[ExtraInfoLookup]:
60+
extra_info: Optional[ExtraInfo]:
6161
6262
Returns:
6363
a dictionary containing the components as keys and their corresponding numpy arrays as values: a
@@ -75,7 +75,7 @@ def _parse_data(self, data: StructuredData, data_type: str, extra_info: Optional
7575
return self._parse_dataset(data=data, data_type=data_type, extra_info=extra_info)
7676

7777
def _parse_dataset(
78-
self, data: SinglePythonDataset, data_type: str, extra_info: Optional[ExtraInfoLookup]
78+
self, data: SinglePythonDataset, data_type: str, extra_info: Optional[ExtraInfo]
7979
) -> SingleDataset:
8080
"""This function parses a single Python dataset and returns a power-grid-model input or update dictionary
8181
@@ -86,7 +86,7 @@ def _parse_dataset(
8686
power-grid-model data) can be specified
8787
data: SinglePythonDataset:
8888
data_type: str:
89-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
89+
extra_info: Optional[ExtraInfo]: (Default value = None)
9090
9191
Returns:
9292
a dictionary containing the components as keys and their corresponding numpy arrays as values: a
@@ -102,7 +102,7 @@ def _parse_dataset(
102102

103103
@staticmethod
104104
def _parse_component(
105-
objects: ComponentList, component: str, data_type: str, extra_info: Optional[ExtraInfoLookup]
105+
objects: ComponentList, component: str, data_type: str, extra_info: Optional[ExtraInfo]
106106
) -> np.ndarray:
107107
"""This function generates a structured numpy array (power-grid-model native) from a structured dataset
108108
@@ -116,7 +116,7 @@ def _parse_component(
116116
objects: ComponentList:
117117
component: str:
118118
data_type: str:
119-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
119+
extra_info: Optional[ExtraInfo]: (Default value = None)
120120
121121
Returns:
122122
a numpy structured array for a power-grid-model component
@@ -144,7 +144,7 @@ def _parse_component(
144144
extra_info[obj["id"]][attribute] = value
145145
return array
146146

147-
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> StructuredData:
147+
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> StructuredData:
148148
"""This function converts a power-grid-model dataset to a structured dataset. First, the function checks if the
149149
dataset is a single dataset or batch dataset. If it is a batch, the batch data is converted to a list of
150150
batches, then each batch is converted individually.
@@ -155,7 +155,7 @@ def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup])
155155
structured dataset. The keys in this dictionary should match with id's of components in the power-grid-model
156156
dataset. Note, extra info can only be supplied for single datasets
157157
data: Dataset:
158-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
158+
extra_info: Optional[ExtraInfo]: (Default value = None)
159159
160160
Returns:
161161
the function returns a structured dataset
@@ -206,7 +206,7 @@ def _is_batch(data: Dataset) -> bool:
206206
return bool(is_batch)
207207

208208
@staticmethod
209-
def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfoLookup] = None) -> SinglePythonDataset:
209+
def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfo] = None) -> SinglePythonDataset:
210210
"""This function converts a single power-grid-model dataset to a structured dataset
211211
212212
Args:
@@ -215,7 +215,7 @@ def _serialize_dataset(data: SingleDataset, extra_info: Optional[ExtraInfoLookup
215215
structured dataset. The keys in this dictionary should match with id's of components in the power-grid-model
216216
dataset
217217
data: SingleDataset:
218-
extra_info: Optional[ExtraInfoLookup]: (Default value = None)
218+
extra_info: Optional[ExtraInfo]: (Default value = None)
219219
220220
Returns:
221221
the function returns a structured dataset

src/power_grid_model_io/converters/tabular_converter.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
from power_grid_model_io.converters.base_converter import BaseConverter
1818
from power_grid_model_io.data_stores.base_data_store import BaseDataStore
19-
from power_grid_model_io.data_types import ExtraInfoLookup, TabularData
19+
from power_grid_model_io.data_types import ExtraInfo, TabularData
2020
from power_grid_model_io.mappings.multiplier_mapping import MultiplierMapping, Multipliers
2121
from power_grid_model_io.mappings.tabular_mapping import InstanceAttributes, Tables, TabularMapping
2222
from power_grid_model_io.mappings.unit_mapping import UnitMapping, Units
@@ -79,7 +79,7 @@ def set_mapping_file(self, mapping_file: Path) -> None:
7979
MultiplierMapping(cast(Multipliers, mapping["multipliers"])) if "multipliers" in mapping else None
8080
)
8181

82-
def _parse_data(self, data: TabularData, data_type: str, extra_info: Optional[ExtraInfoLookup]) -> Dataset:
82+
def _parse_data(self, data: TabularData, data_type: str, extra_info: Optional[ExtraInfo]) -> Dataset:
8383
"""This function parses tabular data and returns power-grid-model data
8484
8585
Args:
@@ -90,7 +90,7 @@ def _parse_data(self, data: TabularData, data_type: str, extra_info: Optional[Ex
9090
power-grid-model data) can be specified
9191
data: TabularData:
9292
data_type: str:
93-
extra_info: Optional[ExtraInfoLookup]:
93+
extra_info: Optional[ExtraInfo]:
9494
9595
Returns:
9696
a power-grid-model dataset, i.e. a dictionary as {component: np.ndarray}
@@ -140,7 +140,7 @@ def _convert_table_to_component(
140140
table: str,
141141
component: str,
142142
attributes: InstanceAttributes,
143-
extra_info: Optional[ExtraInfoLookup],
143+
extra_info: Optional[ExtraInfo],
144144
) -> Optional[np.ndarray]:
145145
"""
146146
This function converts a single table/sheet of TabularData to a power-grid-model input/update array. One table
@@ -160,7 +160,7 @@ def _convert_table_to_component(
160160
table: str:
161161
component: str:
162162
attributes: InstanceAttributes:
163-
extra_info: Optional[ExtraInfoLookup]:
163+
extra_info: Optional[ExtraInfo]:
164164
165165
Returns:
166166
returns a power-grid-model structured array for one component
@@ -204,7 +204,7 @@ def _convert_col_def_to_attribute(
204204
component: str,
205205
attr: str,
206206
col_def: Any,
207-
extra_info: Optional[ExtraInfoLookup],
207+
extra_info: Optional[ExtraInfo],
208208
):
209209
"""This function updates one of the attributes of pgm_data, based on the corresponding table/column in a tabular
210210
dataset
@@ -225,7 +225,7 @@ def _convert_col_def_to_attribute(
225225
component: str:
226226
attr: str:
227227
col_def: Any:
228-
extra_info: Optional[ExtraInfoLookup]:
228+
extra_info: Optional[ExtraInfo]:
229229
230230
Returns:
231231
the function updates pgm_data, it should not return something
@@ -259,7 +259,7 @@ def _handle_extra_info(
259259
table: str,
260260
col_def: Any,
261261
uuids: np.ndarray,
262-
extra_info: Optional[ExtraInfoLookup],
262+
extra_info: Optional[ExtraInfo],
263263
) -> None:
264264
"""This function can extract extra info from the tabular data and store it in the extra_info dict
265265
@@ -275,7 +275,7 @@ def _handle_extra_info(
275275
table: str:
276276
col_def: Any:
277277
uuids: np.ndarray:
278-
extra_info: Optional[ExtraInfoLookup]:
278+
extra_info: Optional[ExtraInfo]:
279279
280280
Returns:
281281
@@ -322,23 +322,23 @@ def _merge_pgm_data(data: Dict[str, List[np.ndarray]]) -> Dict[str, np.ndarray]:
322322

323323
return merged
324324

325-
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfoLookup]) -> TabularData:
325+
def _serialize_data(self, data: Dataset, extra_info: Optional[ExtraInfo]) -> TabularData:
326326
if extra_info is not None:
327327
raise NotImplementedError("Extra info can not (yet) be stored for tabular data")
328328
if isinstance(data, list):
329329
raise NotImplementedError("Batch data can not (yet) be stored for tabular data")
330330
return TabularData(**data)
331331

332332
def _parse_col_def(
333-
self, data: TabularData, table: str, col_def: Any, extra_info: Optional[ExtraInfoLookup]
333+
self, data: TabularData, table: str, col_def: Any, extra_info: Optional[ExtraInfo]
334334
) -> pd.DataFrame:
335335
"""Interpret the column definition and extract/convert/create the data as a pandas DataFrame.
336336
337337
Args:
338338
data: TabularData:
339339
table: str:
340340
col_def: Any:
341-
extra_info: Optional[ExtraInfoLookup]:
341+
extra_info: Optional[ExtraInfo]:
342342
343343
Returns:
344344
@@ -436,7 +436,7 @@ def _parse_reference(
436436
return result[[value_column]]
437437

438438
def _parse_col_def_filter(
439-
self, data: TabularData, table: str, col_def: Dict[str, Any], extra_info: Optional[ExtraInfoLookup]
439+
self, data: TabularData, table: str, col_def: Dict[str, Any], extra_info: Optional[ExtraInfo]
440440
) -> pd.DataFrame:
441441
"""
442442
Parse column filters like 'auto_id', 'reference', 'function', etc
@@ -493,7 +493,7 @@ def _parse_auto_id(
493493
ref_table: Optional[str],
494494
ref_name: Optional[str],
495495
key_col_def: Union[str, List[str], Dict[str, str]],
496-
extra_info: Optional[ExtraInfoLookup],
496+
extra_info: Optional[ExtraInfo],
497497
) -> pd.DataFrame:
498498
"""
499499
Create (or retrieve) a unique numerical id for each object (row) in `data[table]`, based on the `name`

0 commit comments

Comments
 (0)