diff --git a/opencosmo/collection/__init__.py b/opencosmo/collection/__init__.py index 76330a6d..3dda12b9 100644 --- a/opencosmo/collection/__init__.py +++ b/opencosmo/collection/__init__.py @@ -1,11 +1,11 @@ from .collection import Collection, ParticleCollection, SimulationCollection -from .io import open_linked, open_multi_dataset_file, read_multi_dataset_file +from .io import open_multi_dataset_file, open_simulation_files, read_multi_dataset_file __all__ = [ "Collection", - "open_linked", "open_multi_dataset_file", "read_multi_dataset_file", "ParticleCollection", "SimulationCollection", + "open_simulation_files", ] diff --git a/opencosmo/collection/collection.py b/opencosmo/collection/collection.py index 92a1f363..39c18ba1 100644 --- a/opencosmo/collection/collection.py +++ b/opencosmo/collection/collection.py @@ -26,14 +26,19 @@ class Collection(Protocol): support higher-level operations that are applied across all datasets in the collection, sometimes in a non-obvious way. - This protocl defines methods a collection must implement. Note that - the "open" and "read" methods are used in the case an entire collection + This protocol defines methods a collection must implement. Most notably they + must include __getitem__, keys, values and __items__, which allows + a collection to behave like a read-only dictionary. + + + Note that the "open" and "read" methods are used in the case an entire collection is located within a single file. Multi-file collections are handled in the collection.io module. Most complexity is hidden from the user - who simply calls "oc.read" and "oc.open" to get a collection. The io - module also does sanity checking to ensure files are structurally valid, + who simply calls "oc.read" and "oc.open" to get a collection. The io + module also does sanity checking to ensure files are structurally valid, so we do not have to do it here. """ + @classmethod def open( cls, file: h5py.File, datasets_to_get: Optional[Iterable[str]] = None @@ -46,10 +51,11 @@ def read( def write(self, file: h5py.File): ... - def as_dict(self) -> dict[str, oc.Dataset]: ... - + def __getitem__(self, key: str) -> oc.Dataset: ... + def keys(self) -> Iterable[str]: ... + def values(self) -> Iterable[oc.Dataset]: ... + def items(self) -> Iterable[tuple[str, oc.Dataset]]: ... def __enter__(self): ... - def __exit__(self, *exc_details): ... @@ -63,7 +69,7 @@ def write_with_common_header( # figure out if we have unique headers header.write(file) - for key, dataset in collection.as_dict().items(): + for key, dataset in collection.items(): dataset.write(file, key, with_header=False) @@ -74,7 +80,7 @@ def write_with_unique_headers(collection: Collection, file: h5py.File): """ # figure out if we have unique headers - for key, dataset in collection.as_dict().items(): + for key, dataset in collection.items(): dataset.write(file, key) diff --git a/opencosmo/collection/io.py b/opencosmo/collection/io.py index a8a357aa..9b1581a7 100644 --- a/opencosmo/collection/io.py +++ b/opencosmo/collection/io.py @@ -4,21 +4,36 @@ import h5py +import opencosmo as oc from opencosmo import dataset as ds -from opencosmo import io from opencosmo.collection import Collection, ParticleCollection, SimulationCollection -from opencosmo.collection.link import LinkedCollection, get_links, verify_links -from opencosmo.header import read_header +from opencosmo.link.collection import LinkedCollection -class FileHandle: - """ - Helper class used just for setup +def open_simulation_files(**paths: Path) -> SimulationCollection: """ + Open multiple files and return a simulation collection. The data + type of every file must be the same. + + Parameters + ---------- + paths : str or Path + The paths to the files to open. - def __init__(self, path: Path): - self.handle = h5py.File(path, "r") - self.header = read_header(self.handle) + Returns + ------- + SimulationCollection + + """ + datasets: dict[str, oc.Dataset] = {} + for key, path in paths.items(): + dataset = oc.open(path) + if not isinstance(dataset, oc.Dataset): + raise ValueError("All datasets must be of the same type.") + dtypes = set(dataset.header.file.data_type for dataset in datasets.values()) + if len(dtypes) != 1: + raise ValueError("All datasets must be of the same type.") + return SimulationCollection(dtypes.pop(), datasets) def open_multi_dataset_file( @@ -41,34 +56,6 @@ def read_multi_dataset_file( return CollectionType.read(file, datasets) -def open_linked(*files: Path): - """ - Open a collection of files that are linked together, such as a - properties file and a particle file. - """ - file_handles = [FileHandle(file) for file in files] - datasets = [io.open(file) for file in files] - property_file_type, linked_files = verify_links(*[fh.header for fh in file_handles]) - property_handle = next( - filter(lambda x: x.header.file.data_type == property_file_type, file_handles) - ).handle - links = get_links(property_handle) - if not links: - raise ValueError("No valid links found in files") - - output_datasets: dict[str, ds.Dataset] = {} - for dataset in datasets: - if isinstance(dataset, ds.Dataset): - output_datasets[dataset.header.file.data_type] = dataset - else: - output_datasets.update(dataset.as_dict()) - - properties_file = output_datasets.pop(property_file_type) - return LinkedCollection( - properties_file.header, properties_file, output_datasets, links - ) - - def get_collection_type(file: h5py.File) -> type[Collection]: """ Determine the type of a single file containing multiple datasets. Currently diff --git a/opencosmo/collection/link.py b/opencosmo/collection/link.py deleted file mode 100644 index d4e6c1a1..00000000 --- a/opencosmo/collection/link.py +++ /dev/null @@ -1,426 +0,0 @@ -""" -Some types of data contain links to other data. In particular, property files -contain links to their particle files. HaloProperty files contain links to -halo particles, which include AGN, Dark Matter, Gas, and Star particles. -GalaxyProperties contain a link to the associated star particles in the -GalaxyParticles file. A link is simply a combination of a starting index -and a length, which maps a single row in a property file to a range of rows -in a particle file. - -As such, efficient querying of linking can only be done between specific types -of data -""" - -from __future__ import annotations - -from collections import defaultdict -from typing import Iterable, Optional, TypedDict - -import numpy as np -from h5py import File, Group - -import opencosmo as oc -from opencosmo.dataset.mask import Mask -from opencosmo.header import OpenCosmoHeader, read_header - -LINK_ALIASES = { # Name maps - "star_particles": "sodbighaloparticles_star_particles", - "dm_particles": "sodbighaloparticles_dm_particles", - "agn_particles": "sodbighaloparticles_agn_particles", - "gas_particles": "sodbighaloparticles_gas_particles", - "halo_profiles": "sod_profile_idx", - "galaxy_properties": "galaxyproperties", -} - -ALLOWED_LINKS = { # Files that can serve as a link holder and - "halo_properties": ["halo_particles", "halo_profiles"], - "galaxy_properties": ["galaxy_particles"], -} - - -class LinkedCollection(dict): - """ - A collection of datasets that are linked together, allowing - for cross-matching and other operations to be performed. - - For now, these are always a combination of a properties dataset - and several particle or profile datasets. - """ - - def __init__( - self, - header: OpenCosmoHeader, - properties: oc.Dataset, - datasets: dict, - links: GalaxyPropertyLink | HaloPropertyLink, - *args, - **kwargs, - ): - """ - Initialize a linked collection with the provided datasets and links. - """ - - self.__header = header - self.__properties = properties - self.__datasets = datasets - self[properties.header.file.data_type] = properties - self.__linked = links - self.__idxs = self.__properties.indices - self.update(self.__datasets) - - def as_dict(self) -> dict[str, oc.Dataset]: - return self - - def __enter__(self): - return self - - def __exit__(self, *args): - for dataset in self.values(): - try: - dataset.__exit__(*args) - except AttributeError: - continue - - @property - def header(self): - return self.__header - - @classmethod - def read(cls, file: File, names: Optional[Iterable[str]] = None): - """ - Read a collection of linked datasets from an HDF5 file. - """ - header = read_header(file) - properties = oc.read(file, header.file.data_type) - links = get_links(file[header.file.data_type]) - if names is None: - names = set(file.keys()) - {header.file.data_type, "header"} - - datasets = {name: oc.read(file, name) for name in names} - output_datasets = {} - for name, ds in datasets.items(): - if name in LINK_ALIASES.values(): - key = next(k for k, v in LINK_ALIASES.items() if v == name) - output_datasets[key] = ds - else: - output_datasets[name] = ds - - return cls(header, properties, output_datasets, links) - - @classmethod - def open(cls, file: File, names: Optional[Iterable[str]] = None): - """ - Open a collection of linked datasets from an HDF5 file. - """ - header = read_header(file) - properties = oc.open(file, header.file.data_type) - if not isinstance(properties, oc.Dataset): - raise ValueError( - "Expected a single dataset for the properties file, but found a collection of them" - ) - links = get_links(file) - if names is None: - names = set(file.keys()) - {header.file.data_type, "header"} - - datasets = {name: oc.open(file, name) for name in names} - output_datasets = {} - for name, ds in datasets.items(): - if name in LINK_ALIASES.values(): - key = next(k for k, v in LINK_ALIASES.items() if v == name) - output_datasets[key] = ds - else: - output_datasets[name] = ds - return cls(header, properties, output_datasets, links) - - def write(self, file: File): - """ - Write the collection to an HDF5 file. - """ - self.__header.write(file) - idxs = self.__properties.indices - for key, dataset in self.items(): - alias = LINK_ALIASES.get(key, key) - if dataset is self.__properties: - continue - try: - starts = self.__linked[key]["start_index"][idxs] # type: ignore - sizes = self.__linked[key]["length"][idxs] # type: ignore - indices = np.concatenate( - [ - np.arange(start, start + size) - for start, size in zip(starts, sizes) - ] - ) - dataset.write(file, alias, _indices=indices) - except IndexError: - indices = self.__linked[key][idxs] # type: ignore - dataset.write(file, alias, _indices=indices) - - property_dataset = self.__properties.header.file.data_type - self.__properties.write(file, property_dataset, property_dataset) - write_links(file[property_dataset], self.__linked, self.__properties.indices) - - def __get_linked(self, dtype: str, index: int): - if dtype not in self.__linked: - raise ValueError(f"No links found for {dtype}") - elif index >= len(self.__properties): - raise ValueError(f"Index {index} out of range for {dtype}") - # find the index into the linked dataset at the mask index - linked_index = self.__idxs[index] - try: - start = self.__linked[dtype]["start_index"][linked_index] # type: ignore - size = self.__linked[dtype]["length"][linked_index] # type: ignore - except IndexError: - start = self.__linked[dtype][linked_index] # type: ignore - size = 1 - - if start == -1 or size == -1: - return None - - return self.__datasets[dtype].take_range(start, start + size) - - def objects(self, dtypes: Optional[str | list[str]] = None): - """ - Iterate over the objects in the collection, returning the properties - of the object as well as its particles and/or profiles. The specific - datatypes you want to return can be specified with the `dtypes` argument. - If `dtypes` is not provided, all linked datasets will be returned. - - The objects are returned as a tuple of the properties and a dictionary - of the linked datasets. If only one datatype is requested, the second - element of the tuple will simply be the linked dataset. For example - If we have a collection of halo properties linked to halo particles and - star particles: - - .. code-block:: python - - for properties, particles in collection.objects(): - properties # dict of properties for the given halo - particles # dict containing one halo particle dataset - # and one star particle dataset for this halo - - Parameters - ---------- - dtypes : str or list of str, optional - The data types to return. If not provided, all datasets will be returned. - - Returns - ------- - - tuple of (OpenCosmoHeader, dict) or (OpenCosmoHeader, oc.Dataset) - The properties of the object and the linked datasets - - """ - if dtypes is None: - dtypes = list(k for k in self.__linked.keys() if k in self.__datasets) - elif isinstance(dtypes, str): - dtypes = [dtypes] - if not all(dtype in self.__linked for dtype in dtypes): - raise ValueError("One or more of the provided data types is not linked") - - ndtypes = len(dtypes) - for i, properties in enumerate(self.__properties.rows()): - results = {dtype: self.__get_linked(dtype, i) for dtype in dtypes} - if all(result is None for result in results.values()): - continue - if ndtypes == 1: - yield properties, results[dtypes[0]] - else: - yield properties, results - - def filter(self, *masks: Mask): - """ - Filtering a linked collection always operates on the properties dataset. For - example, a collection of halos can be filtered by the standard halo properties, - such as fof_halo_mass. The filteriing works identically to - :meth:`oc.Dataset.filter`. - - Parameters - ---------- - masks : Mask - The masks to apply to the properties dataset. - - Returns - ------- - LinkedCollection - A new collection with the filtered properties dataset. - """ - new_properties = self.__properties.filter(*masks) - return LinkedCollection( - self.header, new_properties, self.__datasets, self.__linked - ) - - def take(self, n: int, at: str = "start"): - """ - Take some number of objects from the collection. This method operates - identically to :meth:`oc.Dataset.take`. - """ - new_properties = self.__properties.take(n, at) - return LinkedCollection( - self.header, new_properties, self.__datasets, self.__linked - ) - - def with_units(self, convention: str) -> LinkedCollection: - """ - Convert the units of the collection to a given convention. This method - operates identically to :meth:`oc.Dataset.with_units - """ - new_properties = self.__properties.with_units(convention) - new_datasets = {k: v.with_units(convention) for k, v in self.__datasets.items()} - return LinkedCollection( - self.header, new_properties, new_datasets, self.__linked - ) - - -def verify_links(*headers: OpenCosmoHeader) -> tuple[str, list[str]]: - """ - Verify that the links in the headers are valid. This means that the - link holder has a corresponding link target and that the link target - is of the correct type. It also verifies that the linked files are from - the same simulation. Returns a dictionary where the keys are the - link holder files and the values are lists of the corresponding link. - - Raises an error if the links are not valid, otherwise returns the links. - """ - - data_types = [header.file.data_type for header in headers] - if len(set(data_types)) != len(data_types): - raise ValueError("Data types in files must be unique to link correctly") - - master_files = [dt for dt in data_types if dt in ALLOWED_LINKS] - if not master_files: - raise ValueError("No valid link holder files found in headers") - - dtypes_to_headers = {header.file.data_type: header for header in headers} - - links = defaultdict(list) # {file: [link_header, ...]} - for file in master_files: - for link in ALLOWED_LINKS[file]: - try: - link_header = dtypes_to_headers[link] - # Check that the headers come from the same simulation - if link_header.simulation != dtypes_to_headers[file].simulation: - raise ValueError(f"Simulation mismatch between {file} and {link}") - links[file].append(link) - except KeyError: - continue # No link header found for this file - - # Master files also need to have the same simulation - if len(master_files) > 1: - raise ValueError("Data linking can only have one master file") - for file in master_files: - if ( - dtypes_to_headers[file].simulation - != dtypes_to_headers[master_files[0]].simulation - ): - raise ValueError( - f"Simulation mismatch between {file} and {master_files[0]}" - ) - output_file = master_files[0] - return output_file, links[output_file] - - -def get_links(file: File | Group) -> GalaxyPropertyLink | HaloPropertyLink: - if "data_linked" not in file.keys(): - raise ValueError(f"No links found in {file.name}") - keys = file["data_linked"].keys() - # Remove everything after the last underscore to get unique link names - unique_keys = {key.rsplit("_", 1)[0] for key in keys} - if any(k.startswith("sod") for k in unique_keys): - # we're dealing with a halo property file - return read_halo_property_links(file) - else: - # we're dealing with a galaxy property file - size = file["data_linked"]["galaxyparticles_star_particles_size"][()] - start = file["data_linked"]["galaxyparticles_star_particles_start"][()] - return {"galaxy_particles": {"start_index": start, "length": size}} - - -def read_halo_property_links(file: File | Group) -> HaloPropertyLink: - """ - Read the links from a halo property file. The links are stored in the - "data_linked" group of the file. Each link is a combination of a - starting index and a length, which maps a single row in the property - file to a range of rows in the particle file. - """ - - # Read the links for dark matter, AGN, gas, and star particles - return { - "dm_particles": { - "start_index": file["data_linked"][ - "sodbighaloparticles_dm_particles_start" - ][()], - "length": file["data_linked"]["sodbighaloparticles_dm_particles_size"][()], - }, - "agn_particles": { - "start_index": file["data_linked"][ - "sodbighaloparticles_agn_particles_start" - ][()], - "length": file["data_linked"]["sodbighaloparticles_agn_particles_size"][()], - }, - "gas_particles": { - "start_index": file["data_linked"][ - "sodbighaloparticles_gas_particles_start" - ][()], - "length": file["data_linked"]["sodbighaloparticles_gas_particles_size"][()], - }, - "star_particles": { - "start_index": file["data_linked"][ - "sodbighaloparticles_star_particles_start" - ][()], - "length": file["data_linked"]["sodbighaloparticles_star_particles_size"][ - () - ], - }, - "galaxy_properties": { - "start_index": file["data_linked"]["galaxyproperties_start"][()], - "length": file["data_linked"]["galaxyproperties_size"][()], - }, - "halo_profiles": file["data_linked"]["sod_profile_idx"][()], - } - - -def pack_links( - data_link: DataLink | np.ndarray, indices: np.ndarray -) -> DataLink | np.ndarray: - if isinstance(data_link, np.ndarray): - return np.arange(len(indices)) - elif isinstance(data_link, dict): - lengths = data_link["length"][indices] - new_starts = np.cumsum(lengths) - new_starts = np.insert(new_starts, 0, 0)[:-1] - return {"start_index": new_starts, "length": lengths} - - -def write_links( - file: File | Group, - links: GalaxyPropertyLink | HaloPropertyLink, - indices: np.ndarray, -): - group = file.require_group("data_linked") - for key, value in links.items(): - link_to_write = pack_links(value, indices) # type: ignore - alias = LINK_ALIASES.get(key, key) - if key == "halo_profiles": - group.create_dataset(alias, data=link_to_write) - else: - group.create_dataset(f"{alias}_start", data=link_to_write["start_index"]) - group.create_dataset(f"{alias}_size", data=link_to_write["length"]) - - -class DataLink(TypedDict): - start_index: np.ndarray # The starting index of the link in the particle file - length: np.ndarray # The - - -class HaloPropertyLink(TypedDict): - dm_particles: DataLink - agn_particles: DataLink - gas_particles: DataLink - star_particles: DataLink - galaxy_properties: DataLink - halo_profiles: np.ndarray - - -class GalaxyPropertyLink(TypedDict): - galaxy_particles: DataLink diff --git a/opencosmo/dataset/dataset.py b/opencosmo/dataset/dataset.py index 953af668..c3b65919 100644 --- a/opencosmo/dataset/dataset.py +++ b/opencosmo/dataset/dataset.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Generator, Optional +from typing import Generator, Iterable, Optional import h5py import numpy as np @@ -104,15 +104,10 @@ def write( if with_header: write_header(file, self.__header, dataset_name) - if _indices is not None: - _indices.sort() - if _indices[0] < 0 or _indices[-1] >= len(self): - raise ValueError("Indices out of bounds") - idxs = self.__indices[_indices] - else: - idxs = self.__indices + if _indices is None: + _indices = self.__indices - self.__handler.write(file, idxs, self.__builders.keys(), dataset_name) + self.__handler.write(file, _indices, self.__builders.keys(), dataset_name) def rows(self) -> Generator[dict[str, float | units.Quantity]]: """ @@ -127,11 +122,15 @@ def rows(self) -> Generator[dict[str, float | units.Quantity]]: """ max = len(self) chunk_ranges = [(i, min(i + 1000, max)) for i in range(0, max, 1000)] + if len(chunk_ranges) == 0: + chunk_ranges = [(0, 0)] for start, end in chunk_ranges: - chunk = self.take_range(start, end).data + chunk = self.take_range(start, end) + + chunk_data = chunk.data columns = { - k: chunk[k].quantity if chunk[k].unit else chunk[k] - for k in chunk.keys() + k: chunk_data[k].quantity if chunk_data[k].unit else chunk_data[k] + for k in chunk_data.keys() } for i in range(len(chunk)): yield {k: v[i] for k, v in columns.items()} @@ -165,14 +164,17 @@ def take_range(self, start: int, end: int) -> Table: if end > len(self): raise ValueError("end must be less than the length of the dataset.") - new_indicies = self.__indices[start:end] + if start < 0 or end > len(self): + raise ValueError("start and end must be within the bounds of the dataset.") + + new_indices = self.__indices[start:end] return Dataset( self.__handler, self.__header, self.__builders, self.__base_unit_transformations, - new_indicies, + new_indices, ) def filter(self, *masks: Mask) -> Dataset: @@ -212,7 +214,7 @@ def filter(self, *masks: Mask) -> Dataset: new_indices, ) - def select(self, columns: str | list[str]) -> Dataset: + def select(self, columns: str | Iterable[str]) -> Dataset: """ Select a subset of columns from the dataset. @@ -344,11 +346,22 @@ def take(self, n: int, at: str = "start") -> Dataset: """ - if at not in ["start", "end", "random"]: + if n < 0 or n > len(self): + raise ValueError( + "Invalid value for 'n', must be between 0 and the length of the dataset" + ) + if at == "start": + new_indices = self.__indices[:n] + elif at == "end": + new_indices = self.__indices[-n:] + elif at == "random": + new_indices = np.random.choice(self.__indices, n, replace=False) + new_indices.sort() + + else: raise ValueError( "Invalid value for 'at'. Must be one of 'start', 'end', or 'random'." ) - new_indices = self.__handler.take_indices(n, at, self.__indices) return Dataset( self.__handler, diff --git a/opencosmo/file.py b/opencosmo/file.py index 3e37364b..c5a954bc 100644 --- a/opencosmo/file.py +++ b/opencosmo/file.py @@ -15,6 +15,10 @@ FileReader = Callable[Concatenate[h5py.File | h5py.Group, ...], Any] FileWriter = Callable[Concatenate[h5py.File | h5py.Group, ...], None] +""" +A collection of utilities for working with hdf5 files. +""" + class FileExistance(Enum): MUST_EXIST = "must_exist" @@ -51,7 +55,6 @@ def wrapper(file: h5py.File | Path | str, *args, **kwargs): if not isinstance(file, h5py.File): path = resolve_path(file, FileExistance.MUST_NOT_EXIST) if MPI is not None and MPI.COMM_WORLD.Get_size() > 1: - MPI.COMM_WORLD.barrier() with h5py.File(path, "w", driver="mpio", comm=MPI.COMM_WORLD) as f: return func(f, *args, **kwargs) @@ -92,7 +95,6 @@ def wrapper(file: h5py.File | Path | str, *args, **kwargs): return wrapper -@broadcast_read def get_data_structure(group: h5py.Group) -> dict[str, Any]: units = {name: group[name].attrs.get("unit", "") for name in group.keys()} return units diff --git a/opencosmo/handler/handler.py b/opencosmo/handler/handler.py index eb47e213..47bc148b 100644 --- a/opencosmo/handler/handler.py +++ b/opencosmo/handler/handler.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Iterable, Protocol +from typing import Iterable, Optional, Protocol import h5py import numpy as np @@ -45,7 +45,7 @@ def write( file: h5py.File, indices: np.ndarray, columns: Iterable[str], - dataset_name="data", + dataset_name: Optional[str] = None, ) -> None: ... def get_data( self, diff --git a/opencosmo/handler/im.py b/opencosmo/handler/im.py index e93367e8..d0c80ec0 100644 --- a/opencosmo/handler/im.py +++ b/opencosmo/handler/im.py @@ -74,7 +74,6 @@ def write( group = file else: group = file.require_group(dataset_name) - data_group = group.require_group("data") for column in columns: data_group.create_dataset(column, data=self.__data[column][indices]) diff --git a/opencosmo/handler/mpi.py b/opencosmo/handler/mpi.py index 90ddeb15..d6bef5fd 100644 --- a/opencosmo/handler/mpi.py +++ b/opencosmo/handler/mpi.py @@ -53,6 +53,7 @@ def __init__( tree: Tree, group_name: Optional[str] = None, comm=MPI.COMM_WORLD, + rank_range: Optional[Tuple[int, int]] = None, ): self.__file = file self.__group_name = group_name @@ -63,11 +64,14 @@ def __init__( self.__columns = get_data_structure(self.__group) self.__comm = comm self.__tree = tree + self.__elem_range = rank_range def elem_range(self) -> Tuple[int, int]: """ The full dataset will be split into equal parts by rank. """ + if self.__elem_range is not None: + return self.__elem_range nranks = self.__comm.Get_size() rank = self.__comm.Get_rank() n = self.__group[next(iter(self.__columns))].shape[0] @@ -113,6 +117,7 @@ def write( indices: np.ndarray, columns: Iterable[str], dataset_name: Optional[str] = None, + selected: Optional[np.ndarray] = None, ) -> None: columns = list(columns) input = verify_input( @@ -125,6 +130,8 @@ def write( columns = input["columns"] rank_range = self.elem_range() + # indices = redistribute_indices(indices, rank_range) + rank_output_length = len(indices) all_output_lengths = self.__comm.allgather(rank_output_length) @@ -146,20 +153,28 @@ def write( else: group = file.require_group(dataset_name) data_group = group.create_group("data") + for column in columns: # This step has to be done by all ranks, per documentation - data_group.create_dataset( - column, (full_output_length,), dtype=self.__group[column].dtype - ) + shape: Tuple[int, ...] + if len(self.__group[column].shape) != 1: + shape = (full_output_length, self.__group[column].shape[1]) + else: + shape = (full_output_length,) + + data_group.create_dataset(column, shape, dtype=self.__group[column].dtype) if self.__columns[column] is not None: data_group[column].attrs["unit"] = self.__columns[column] self.__comm.Barrier() - for column in columns: - data = self.__group[column][rank_range[0] : rank_range[1]][()] - data = data[indices] - data_group[column][rank_start:rank_end] = data + if rank_output_length != 0: + for column in columns: + data = self.__group[column][rank_range[0] : rank_range[1]][()] + data = data[indices] + + data_group[column][rank_start:rank_end] = data + mask = np.zeros(len(self), dtype=bool) mask[indices] = True @@ -178,36 +193,40 @@ def get_data( Get data from the file in the range for this rank. """ builder_keys = list(builders.keys()) - builder_keys = verify_input(comm=self.__comm, builder_keys=builder_keys)[ - "builder_keys" - ] - if self.__group is None: raise ValueError("This file has already been closed") + + if len(indices) == 0: + columns = {key: Column() for key in builder_keys} + return Table(columns) output = {} range_ = self.elem_range() for column in builder_keys: builder = builders[column] - data = self.__group[column][range_[0] : range_[1]] - col = Column(data[indices], name=column) - output[column] = builder.build(col) - self.__comm.Barrier() - + if len(indices) > 0: + data = self.__group[column][range_[0] : range_[1]] + col = Column(data[indices], name=column) + output[column] = builder.build(col) + else: + col = Column() + output[column] = col if len(output) == 1: return next(iter(output.values())) return Table(output) + def take_range(self, start: int, end: int, indices: np.ndarray) -> np.ndarray: + if start < 0 or end > len(indices): + raise ValueError("Requested range is not within the rank's range.") + + return indices[start:end] + def take_indices(self, n: int, strategy: str, indices: np.ndarray) -> np.ndarray: """ - This is the tricky one. We need to update the mask based on the amount of - data in ALL the ranks. - masks are localized to each rank. For "start" and "end" it's just a matter of figuring out how many elements each rank is responsible for. For "random" we need to be more clever. """ - n = verify_input(comm=self.__comm, n=n)["n"] rank_length = len(indices) rank_lengths = self.__comm.allgather(rank_length) @@ -247,9 +266,10 @@ def take_indices(self, n: int, strategy: str, indices: np.ndarray) -> np.ndarray ] if len(rank_indicies) == 0: # This rank doesn't have enough data - raise ValueError( + warn( "This take operation will return no data for rank " f"{self.__comm.Get_rank()}" ) + return np.array([], dtype=int) return rank_indicies - rank_start_index diff --git a/opencosmo/handler/oom.py b/opencosmo/handler/oom.py index 16bc1b7b..5e9680f7 100644 --- a/opencosmo/handler/oom.py +++ b/opencosmo/handler/oom.py @@ -64,6 +64,7 @@ def write( indices: np.ndarray, columns: Iterable[str], dataset_name: Optional[str] = None, + selected: Optional[np.ndarray] = None, ) -> None: if self.__group is None: raise ValueError("This file has already been closed") @@ -72,15 +73,23 @@ def write( else: group = file.require_group(dataset_name) + if selected is not None: + selected.sort() + if selected[-1] >= len(indices): + raise ValueError("Selected indices are out of range") + idxs = indices[selected] + else: + idxs = indices + data_group = group.create_group("data") for column in columns: data = self.__group[column][()] - data = data[indices] + data = data[idxs] data_group.create_dataset(column, data=data) if self.__columns[column] is not None: data_group[column].attrs["unit"] = self.__columns[column] tree_mask = np.zeros(len(self), dtype=bool) - tree_mask[indices] = True + tree_mask[idxs] = True tree = self.__tree.apply_mask(tree_mask) tree.write(group) @@ -122,6 +131,11 @@ def get_range( return Table(output) + def take_range(self, start: int, end: int, indices: np.ndarray) -> np.ndarray: + if start < 0 or end > len(indices): + raise ValueError("Indices out of range") + return indices[start:end] + def take_indices(self, n: int, strategy: str, indices: np.ndarray) -> np.ndarray: if n > (length := len(indices)): raise ValueError( diff --git a/opencosmo/header.py b/opencosmo/header.py index ceef2f58..f2a841ad 100644 --- a/opencosmo/header.py +++ b/opencosmo/header.py @@ -14,6 +14,14 @@ class OpenCosmoHeader: + """ + A class to represent the header of an OpenCosmo file. The header contains + information about the simulation the data is a part of, as well as other + meatadata that are useful to the library in various contexts. Most files + will have a single unique header, but it is possible to have multiple + headers in a SimulationCollection. + """ + def __init__( self, file_pars: parameters.FileParameters, diff --git a/opencosmo/io.py b/opencosmo/io.py index abeaafbd..6f1358ae 100644 --- a/opencosmo/io.py +++ b/opencosmo/io.py @@ -24,10 +24,11 @@ def open( - file: str | Path, datasets: Optional[str | Iterable[str]] = None + file: str | Path | h5py.File, + datasets: Optional[str | Iterable[str]] = None, ) -> oc.Dataset | collection.Collection: """ - Open a dataset from a file without reading the data into memory. + Open a dataset or data collection from a file without reading the data into memory. The object returned by this function will only read data from the file when it is actually needed. This is useful if the file is very large @@ -56,9 +57,22 @@ def open( ---------- file : str or pathlib.Path The path to the file to open. + datasets : str or list[str], optional + If the file has multiple datasets, the name of the dataset(s) to open. + All other datasets will be ignored. If not provided, will open all + datasets + + Returns + ------- + dataset : oc.Dataset or oc.Collection + The dataset or collection opened from the file. + """ - path = resolve_path(file, FileExistance.MUST_EXIST) - file_handle = h5py.File(path, "r") + if not isinstance(file, h5py.File) and not isinstance(file, h5py.Group): + path = resolve_path(file, FileExistance.MUST_EXIST) + file_handle = h5py.File(path, "r") + else: + file_handle = file if "data" not in file_handle: if not isinstance(datasets, str): return collection.open_multi_dataset_file(file_handle, datasets) @@ -85,8 +99,8 @@ def open( builders, base_unit_transformations = u.get_default_unit_transformations( group, header ) - mask = np.arange(len(handler)) + mask = np.arange(len(handler)) dataset = oc.Dataset(handler, header, builders, base_unit_transformations, mask) return dataset @@ -106,10 +120,15 @@ def read( ---------- file : str or pathlib.Path The path to the file to read. + datasets : str or list[str], optional + If the file has multiple datasets, the name of the dataset(s) to read. + All other datasets will be ignored. If not provided, will read all + datasets + Returns ------- - dataset : oc.Dataset - The dataset read from the file. + dataset : oc.Dataset or oc.Collection + The dataset or collection read from the file. """ diff --git a/opencosmo/link/__init__.py b/opencosmo/link/__init__.py new file mode 100644 index 00000000..7ebdd119 --- /dev/null +++ b/opencosmo/link/__init__.py @@ -0,0 +1,11 @@ +from .collection import LinkedCollection +from .handler import LinkHandler, OomLinkHandler +from .io import open_linked_file, open_linked_files + +__all__ = [ + "LinkedCollection", + "LinkHandler", + "OomLinkHandler", + "open_linked_files", + "open_linked_file", +] diff --git a/opencosmo/link/collection.py b/opencosmo/link/collection.py new file mode 100644 index 00000000..983ecbc4 --- /dev/null +++ b/opencosmo/link/collection.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +from typing import Any, Iterable, Optional + +import numpy as np +from h5py import File + +import opencosmo as oc +from opencosmo import link as l + + +class LinkedCollection: + """ + A collection of datasets that are linked together, allowing + for cross-matching and other operations to be performed. + + For now, these are always a combination of a properties dataset + and several particle or profile datasets. + """ + + def __init__( + self, + properties: oc.Dataset, + handlers: dict[str, l.LinkHandler], + *args, + **kwargs, + ): + """ + Initialize a linked collection with the provided datasets and links. + """ + + self.__properties = properties + self.__handlers = handlers + self.__idxs = self.__properties.indices + + @classmethod + def open( + cls, file: File, datasets_to_get: Optional[Iterable[str]] = None + ) -> LinkedCollection: + return l.open_linked_file(file, datasets_to_get) + + @classmethod + def read(cls, *args, **kwargs) -> LinkedCollection: + raise NotImplementedError + + @property + def properties(self) -> oc.Dataset: + """ + Return the properties dataset. + """ + return self.__properties + + def keys(self) -> list[str]: + """ + Return the keys of the linked datasets. + """ + return list(self.__handlers.keys()) + [self.__properties.header.file.data_type] + + def values(self) -> list[oc.Dataset]: + """ + Return the linked datasets. + """ + return [self.__properties] + [ + handler.get_all_data() for handler in self.__handlers.values() + ] + + def items(self) -> list[tuple[str, oc.Dataset]]: + """ + Return the linked datasets as key-value pairs. + """ + return [ + (key, handler.get_all_data()) for key, handler in self.__handlers.items() + ] + + def __getitem__(self, key: str) -> oc.Dataset: + """ + Return the linked dataset with the given key. + """ + if key == self.__properties.header.file.data_type: + return self.__properties + elif key not in self.__handlers: + raise KeyError(f"Dataset {key} not found in collection.") + return self.__handlers[key].get_all_data() + + def __enter__(self): + return self + + def __exit__(self, *args): + for dataset in self.values(): + try: + dataset.__exit__(*args) + except AttributeError: + continue + + def select(self, dataset: str, columns: str | list[str]) -> LinkedCollection: + """ + Update the linked collection to only include the columns specified + in the given dataset. + """ + if dataset == self.__properties.header.file.data_type: + new_properties = self.__properties.select(columns) + return LinkedCollection( + new_properties, + self.__handlers, + ) + + elif dataset not in self.__handlers: + raise ValueError(f"Dataset {dataset} not found in collection.") + handler = self.__handlers[dataset] + new_handler = handler.select(columns) + return LinkedCollection( + self.__properties, {**self.__handlers, dataset: new_handler} + ) + + def filter(self, *masks): + """ + Apply a filter to the properties dataset and propagate it to the linked datasets + """ + if not masks: + return self + filtered = self.__properties.filter(*masks) + return LinkedCollection( + filtered, + self.__handlers, + ) + + def take(self, n: int, at: str = "start"): + new_properties = self.__properties.take(n, at) + return LinkedCollection( + new_properties, + self.__handlers, + ) + + def objects( + self, data_types: Optional[Iterable[str]] = None + ) -> Iterable[tuple[dict[str, Any], dict[str, Optional[oc.Dataset]]]]: + """ + Iterate over the properties dataset and the linked datasets. + """ + if data_types is None: + handlers = self.__handlers + elif not all(dt in self.__handlers for dt in data_types): + raise ValueError("Some data types are not linked in the collection.") + else: + handlers = {dt: self.__handlers[dt] for dt in data_types} + + for i, row in enumerate(self.__properties.rows()): + index = np.array(self.__properties.indices[i]) + output = {key: handler.get_data(index) for key, handler in handlers.items()} + if not any(output.values()): + continue + yield row, output + + def write(self, file: File): + header = self.__properties.header + header.write(file) + self.__properties.write(file, header.file.data_type) + link_group = file[header.file.data_type].create_group("data_linked") + keys = list(self.__handlers.keys()) + keys.sort() + for key in keys: + handler = self.__handlers[key] + handler.write(file, link_group, key, self.__idxs) diff --git a/opencosmo/link/handler.py b/opencosmo/link/handler.py new file mode 100644 index 00000000..a2ceae7d --- /dev/null +++ b/opencosmo/link/handler.py @@ -0,0 +1,161 @@ +from __future__ import annotations + +from typing import Optional, Protocol + +import numpy as np +from h5py import File, Group + +import opencosmo as oc +from opencosmo.handler import OutOfMemoryHandler +from opencosmo.header import OpenCosmoHeader +from opencosmo.spatial import read_tree +from opencosmo.transformations import units as u + + +def build_dataset( + file: File | Group, header: OpenCosmoHeader, indices: Optional[np.ndarray] = None +) -> oc.Dataset: + tree = read_tree(file, header) + builders, base_unit_transformations = u.get_default_unit_transformations( + file, header + ) + handler = OutOfMemoryHandler(file, tree=tree) + if indices is None: + indices = np.arange(len(handler)) + return oc.Dataset(handler, header, builders, base_unit_transformations, indices) + + +class LinkHandler(Protocol): + """ + A LinkHandler is responsible for handling linked datasets. Links are found + in property files, and contain indexes into another dataset. For example, a + halo properties file will contain links to a halo particles file. Each halo + in the properties file will have a corresponding range of indexes that contain + the associated particles in the particles file. + + The link handler is responsible for reading data and instatiating datasets + that contain the linked data for the given object. There will be one link + handler for each linked dataset in the properties file. This potentially + means there will be multiple pointers to a single particle file, for example. + """ + + def __init__( + self, + file: File | Group, + links: Group | tuple[Group, Group], + header: OpenCosmoHeader, + *args, + **kwargs, + ): ... + def get_data(self, indices: int | np.ndarray) -> Optional[oc.Dataset]: + """ + Given a index or a set of indices, return the data from the linked dataset + that corresponds to the halo/galaxy at that index in the properties file. + Sometimes the linked dataset will not have data for that object, in which + case None should be returned. + """ + pass + + def get_all_data(self) -> oc.Dataset: + """ + Return all the data from the linked dataset. + """ + pass + + def write( + self, data_group: Group, link_group: Group, name: str, indices: int | np.ndarray + ) -> None: + """ + Write the linked data for the given indices to data_group. + This function will then update the links to be consistent with the newly + written data, and write the updated links to link_group. + """ + pass + + def select(self, columns: str | list[str]) -> LinkHandler: + """ + Return a new LinkHandler that only contains the data for the given indices. + """ + pass + + +class OomLinkHandler: + """ + Links are currently only supported out-of-memory. + """ + + def __init__( + self, + file: File | Group, + link: Group | tuple[Group, Group], + header: OpenCosmoHeader, + ): + self.file = file + self.link = link + self.header = header + self.selected: Optional[set[str]] = None + + def get_all_data(self) -> oc.Dataset: + return build_dataset(self.file, self.header) + + def get_data(self, indices: int | np.ndarray) -> Optional[oc.Dataset]: + if isinstance(indices, int): + indices = np.array([indices], dtype=int) + min_idx = np.min(indices) + max_idx = np.max(indices) + + if isinstance(self.link, tuple): + start = self.link[0][min_idx : max_idx + 1][indices - min_idx] + size = self.link[1][min_idx : max_idx + 1][indices - min_idx] + valid_rows = size > 0 + start = start[valid_rows] + size = size[valid_rows] + if not start.size: + return None + indices_into_data = np.concatenate( + [np.arange(idx, idx + length) for idx, length in zip(start, size)] + ) + else: + indices_into_data = self.link[min_idx : max_idx + 1][indices - min_idx] + indices_into_data = np.array(indices_into_data[indices_into_data >= 0]) + if not indices_into_data.size: + return None + + dataset = build_dataset(self.file, self.header, indices_into_data) + if self.selected is not None: + dataset = dataset.select(self.selected) + return dataset + + def select(self, columns: str | list[str]) -> OomLinkHandler: + if self.selected is not None: + new_selected = set(columns) + if not new_selected.issubset(self.selected): + raise ValueError("Tried to select columns that are not in the dataset.") + else: + new_selected = set(columns) + + self.selected = new_selected + return self + + def write( + self, group: Group, link_group: Group, name: str, indices: int | np.ndarray + ): + if isinstance(indices, int): + indices = np.array([indices]) + # Pack the indices + if not isinstance(self.link, tuple): + new_idxs = np.full(len(indices), -1) + current_values = self.link[indices[0] : indices[-1] + 1] + current_values = current_values[indices - indices[0]] + has_data = current_values >= 0 + new_idxs[has_data] = np.arange(sum(has_data)) + link_group.create_dataset("sod_profile_idx", data=new_idxs, dtype=int) + else: + lengths = self.link[1][indices] + new_starts = np.insert(np.cumsum(lengths), 0, 0)[:-1] + link_group.create_dataset(f"{name}_start", data=new_starts, dtype=int) + link_group.create_dataset(f"{name}_size", data=lengths, dtype=int) + + dataset = self.get_data(indices) + if dataset is not None: + dataset.write(group, name) diff --git a/opencosmo/link/io.py b/opencosmo/link/io.py new file mode 100644 index 00000000..06daf4f9 --- /dev/null +++ b/opencosmo/link/io.py @@ -0,0 +1,205 @@ +from collections import defaultdict +from pathlib import Path +from typing import Iterable, Optional, Type + +from h5py import File, Group + +import opencosmo as oc +from opencosmo import link as l +from opencosmo.header import OpenCosmoHeader, read_header + +try: + from mpi4py import MPI + + from opencosmo.link.mpi import MpiLinkHandler +except ImportError: + MPI = None # type: ignore + +LINK_ALIASES = { # Left: Name in file, right: Name in collection + "sodbighaloparticles_star_particles": "star_particles", + "sodbighaloparticles_dm_particles": "dm_particles", + "sodbighaloparticles_gravity_particles": "dm_particles", + "sodbighaloparticles_agn_particles": "agn_particles", + "sodbighaloparticles_gas_particles": "gas_particles", + "sod_profile": "halo_profiles", + "galaxyproperties": "galaxy_properties", + "galaxyparticles_star_particles": "star_particles", +} + +ALLOWED_LINKS = { # Files that can serve as a link holder and + "halo_properties": ["halo_particles", "halo_profiles", "galaxy_properties"], + "galaxy_properties": ["galaxy_particles"], +} + + +def verify_links(*headers: OpenCosmoHeader) -> tuple[str, list[str]]: + """ + Verify that the links in the headers are valid. This means that the + link holder has a corresponding link target and that the link target + is of the correct type. It also verifies that the linked files are from + the same simulation. Returns a dictionary where the keys are the + link holder files and the values are lists of the corresponding link. + + Raises an error if the links are not valid, otherwise returns the links. + """ + + data_types = [header.file.data_type for header in headers] + if len(set(data_types)) != len(data_types): + raise ValueError("Data types in files must be unique to link correctly") + + properties_files = [dt for dt in data_types if dt in ALLOWED_LINKS] + if not properties_files: + raise ValueError("No valid link holder files found in headers") + + dtypes_to_headers = {header.file.data_type: header for header in headers} + + links = defaultdict(list) # {file: [link_header, ...]} + for file in properties_files: + for link in ALLOWED_LINKS[file]: + try: + link_header = dtypes_to_headers[link] + # Check that the headers come from the same simulation + if link_header.simulation != dtypes_to_headers[file].simulation: + raise ValueError(f"Simulation mismatch between {file} and {link}") + links[file].append(link) + except KeyError: + continue # No link header found for this file + + has_links = [file in links for file in properties_files] + # Properties files also need to have the same simulation + if len(properties_files) > 1: + # need exactly one true (for now) + if sum(has_links) != 1: + raise NotImplementedError("Chained links are not yet supported") + for file in properties_files: + if ( + dtypes_to_headers[file].simulation + != dtypes_to_headers[properties_files[0]].simulation + ): + raise ValueError( + f"Simulation mismatch between {file} and {properties_files[0]}" + ) + properties_files = [ + file for file, has_link in zip(properties_files, has_links) if has_link + ] + property_file = properties_files[0] + return property_file, links[property_file] + + +def open_linked_files(*files: Path): + """ + Open a collection of files that are linked together, such as a + properties file and a particle file. + """ + file_handles = [File(file, "r") for file in files] + headers = [read_header(file) for file in file_handles] + properties_file, linked_files = verify_links(*headers) + properties_index = next( + index + for index, header in enumerate(headers) + if header.file.data_type == properties_file + ) + properties_file = file_handles.pop(properties_index) + properties_dataset = oc.open(properties_file) + if not isinstance(properties_dataset, oc.Dataset): + raise ValueError( + "Properties file must contain a single dataset, but found more" + ) + + linked_files_by_type = { + file["header"]["file"].attrs["data_type"]: file for file in file_handles + } + if len(linked_files_by_type) != len(linked_files): + raise ValueError("Linked files must have unique data types") + return get_linked_datasets( + properties_dataset, linked_files_by_type, properties_file + ) + + +def open_linked_file( + file_handle: File, datasets_to_get: Optional[Iterable[str]] = None +) -> l.LinkedCollection: + """ + Open a single file that contains both properties and linked datasets. + """ + properties_name = list( + filter(lambda name: "properties" in name, file_handle.keys()) + ) + if len(properties_name) != 1: + raise ValueError( + "A linked file must contain exactly one properties dataset, " + f"found {len(properties_name)}" + ) + properties_name = properties_name[0] + names_to_ignore = [properties_name, "header"] + list(datasets_to_get or []) + other_datasets = [ + name for name in file_handle.keys() if name not in names_to_ignore + ] + if not other_datasets: + raise ValueError("No linked datasets found in file") + linked_groups_by_type = {name: file_handle[name] for name in other_datasets} + properties_dataset = oc.open(file_handle[properties_name]) + if not isinstance(properties_dataset, oc.Dataset): + raise ValueError("Properties dataset must be a single dataset") + + return get_linked_datasets( + properties_dataset, linked_groups_by_type, file_handle[properties_name] + ) + + +def get_linked_datasets( + properties_dataset: oc.Dataset, + linked_files_by_type: dict[str, File | Group], + properties_file: File, +) -> l.LinkedCollection: + datasets = {} + for dtype, pointer in linked_files_by_type.items(): + if "data" not in pointer.keys(): + datasets.update({k: pointer[k] for k in pointer.keys() if k != "header"}) + else: + datasets.update({dtype: pointer}) + + link_handlers = get_link_handlers( + properties_file, datasets, properties_dataset.header + ) + output = {} + for key, handler in link_handlers.items(): + if key in LINK_ALIASES: + output[LINK_ALIASES[key]] = handler + else: + output[key] = handler + + return l.LinkedCollection(properties_dataset, output) + + +def get_link_handlers( + link_file: File | Group, + linked_files: dict[str, File | Group], + header: OpenCosmoHeader, +) -> dict[str, l.LinkHandler]: + if "data_linked" not in link_file.keys(): + raise KeyError("No linked datasets found in the file.") + links = link_file["data_linked"] + + handler: Type[l.LinkHandler] + if MPI is not None and MPI.COMM_WORLD.Get_size() > 1: + handler = MpiLinkHandler + else: + handler = l.OomLinkHandler + unique_dtypes = {key.rsplit("_", 1)[0] for key in links.keys()} + output_links = {} + for dtype in unique_dtypes: + if dtype not in linked_files and LINK_ALIASES.get(dtype) not in linked_files: + continue # Skip if the linked file is not provided + + key = LINK_ALIASES.get(dtype, dtype) + if "data" not in linked_files[key].keys(): + raise KeyError(f"No data group found in linked file for dtype '{dtype}'") + try: + start = links[f"{dtype}_start"] + size = links[f"{dtype}_size"] + output_links[key] = handler(linked_files[key], (start, size), header) + except KeyError: + index = links["sod_profile_idx"] + output_links[key] = handler(linked_files[key], index, header) + return output_links diff --git a/opencosmo/link/mpi.py b/opencosmo/link/mpi.py new file mode 100644 index 00000000..818e3511 --- /dev/null +++ b/opencosmo/link/mpi.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +from typing import Optional + +import numpy as np +from h5py import File, Group +from mpi4py import MPI + +import opencosmo as oc +from opencosmo.dataset.column import ColumnBuilder +from opencosmo.handler import MPIHandler +from opencosmo.header import OpenCosmoHeader +from opencosmo.spatial import Tree, read_tree +from opencosmo.transformations import TransformationDict +from opencosmo.transformations import units as u + + +def build_dataset( + file: File | Group, + indices: np.ndarray, + header: OpenCosmoHeader, + comm: MPI.Comm, + tree: Tree, + base_transformations: TransformationDict, + builders: dict[str, ColumnBuilder], +) -> oc.Dataset: + if len(indices) > 0: + index_range = (indices.min(), indices.max() + 1) + indices = indices - index_range[0] + else: + index_range = None + + handler = MPIHandler(file, tree=tree, comm=comm, rank_range=index_range) + return oc.Dataset(handler, header, builders, base_transformations, indices) + + +def build_full_dataset( + file: File | Group, + header: OpenCosmoHeader, + comm: MPI.Comm, + tree: Tree, + base_transformations: TransformationDict, + builders: dict[str, ColumnBuilder], +) -> oc.Dataset: + handler = MPIHandler(file, tree=tree, comm=comm) + return oc.Dataset( + handler, header, builders, base_transformations, np.arange(len(handler)) + ) + + +class MpiLinkHandler: + def __init__( + self, + file: File | Group, + link: Group | tuple[Group, Group], + header: OpenCosmoHeader, + comm: MPI.Comm = MPI.COMM_WORLD, + ): + self.selected: Optional[set[str]] = None + self.file = file + self.link = link + self.header = header + self.comm = comm + self.tree = read_tree(file, header) + self.builders, self.base_unit_transformations = ( + u.get_default_unit_transformations(file, header) + ) + if isinstance(self.link, tuple): + n_per_rank = self.link[0].shape[0] // self.comm.Get_size() + self.offset = n_per_rank * self.comm.Get_rank() + else: + n_per_rank = self.link.shape[0] // self.comm.Get_size() + self.offset = n_per_rank * self.comm.Get_rank() + + def get_all_data(self) -> oc.Dataset: + return build_full_dataset( + self.file, + self.header, + self.comm, + self.tree, + self.base_unit_transformations, + self.builders, + ) + + def get_data(self, indices: int | np.ndarray) -> Optional[oc.Dataset]: + if isinstance(indices, int): + indices = np.array([indices], dtype=int) + + if isinstance(self.link, tuple): + start = self.link[0][indices + self.offset] + size = self.link[1][indices + self.offset] + valid_rows = size > 0 + start = start[valid_rows] + size = size[valid_rows] + if len(start) == 0: + indices_into_data = np.array([], dtype=int) + else: + indices_into_data = np.concatenate( + [np.arange(idx, idx + length) for idx, length in zip(start, size)] + ) + else: + indices_into_data = self.link[indices + self.offset] + indices_into_data = indices_into_data[indices_into_data >= 0] + if len(indices_into_data) == 0: + indices_into_data = np.array([], dtype=int) + dataset = build_dataset( + self.file, + indices_into_data, + self.header, + self.comm, + self.tree, + self.base_unit_transformations, + self.builders, + ) + if self.selected is not None: + dataset = dataset.select(self.selected) + return dataset + + def select(self, columns: str | list[str]) -> MpiLinkHandler: + if self.selected is not None: + new_selected = set(columns) + if not new_selected.issubset(self.selected): + raise ValueError("Tried to select columns that are not in the dataset.") + else: + new_selected = set(columns) + + self.selected = new_selected + return self + + def write( + self, data_group: File, link_group: Group, name: str, indices: int | np.ndarray + ): + # Pack the indices + if isinstance(indices, int): + indices = np.array([indices]) + sizes = self.comm.allgather(len(indices)) + shape = (sum(sizes),) + if sum(sizes) == 0: + return + + if not isinstance(self.link, tuple): + link_group.create_dataset("sod_profile_idx", shape=shape, dtype=int) + self.comm.Barrier() + start = indices[0] + end = indices[-1] + 1 + indices_into_data = self.link[self.offset + start : self.offset + end] + indices_into_data = indices_into_data[indices - start] + nonzero = indices_into_data >= 0 + nonzero = self.comm.gather(nonzero) + + if self.comm.Get_rank() == 0: + nonzero = np.concatenate(nonzero) + sod_profile_idx = np.full(len(nonzero), -1) + sod_profile_idx[nonzero] = np.arange(sum(nonzero)) + link_group["sod_profile_idx"][:] = sod_profile_idx + else: + link_group.create_dataset(f"{name}_start", shape=shape, dtype=int) + link_group.create_dataset(f"{name}_size", shape=shape, dtype=int) + self.comm.Barrier() + rank_sizes = self.link[1][self.offset + indices] + all_rank_sizes = self.comm.gather(rank_sizes) + if self.comm.Get_rank() == 0: + if all_rank_sizes is None: + # should never happen, but mypy... + raise ValueError("No data to write") + + all_sizes = np.concatenate(all_rank_sizes) + starts = np.insert(np.cumsum(all_sizes), 0, 0)[:-1] + link_group[f"{name}_start"][:] = starts + link_group[f"{name}_size"][:] = all_sizes + + dataset = self.get_data(indices) + + if dataset is not None: + dataset.write(data_group, name) diff --git a/opencosmo/spatial/__init__.py b/opencosmo/spatial/__init__.py index 66a1870b..57aac60c 100644 --- a/opencosmo/spatial/__init__.py +++ b/opencosmo/spatial/__init__.py @@ -1,3 +1,3 @@ -from .tree import read_tree +from .tree import Tree, read_tree -__all__ = ["read_tree"] +__all__ = ["read_tree", "Tree"] diff --git a/poetry.lock b/poetry.lock index 5319100f..49054f61 100644 --- a/poetry.lock +++ b/poetry.lock @@ -77,14 +77,14 @@ typing = ["pandas-stubs (>=2.0)"] [[package]] name = "astropy-iers-data" -version = "0.2025.2.24.0.34.4" +version = "0.2025.3.31.0.36.18" description = "IERS Earth Rotation and Leap Second tables for the astropy core package" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "astropy_iers_data-0.2025.2.24.0.34.4-py3-none-any.whl", hash = "sha256:be8b3b75b09b1aa1d22de9b5243854e00d19936aca6d8f64466d16197c04bb28"}, - {file = "astropy_iers_data-0.2025.2.24.0.34.4.tar.gz", hash = "sha256:fce62431ce38129d166360f59563f506fbe37b2d1df5e0038a4ad0b0277274f7"}, + {file = "astropy_iers_data-0.2025.3.31.0.36.18-py3-none-any.whl", hash = "sha256:f351647c4df0980ba5e4030b5971a37e046b5019dd173a31a82e6de8baae527e"}, + {file = "astropy_iers_data-0.2025.3.31.0.36.18.tar.gz", hash = "sha256:4457a3d0d5b123d52d32fb2e1fc698b9634a209371ecc435cdce4feee6ed9b2b"}, ] [package.extras] @@ -286,25 +286,25 @@ numpy = ">=1.19.3" [[package]] name = "hdf5plugin" -version = "5.0.0" +version = "5.1.0" description = "HDF5 Plugins for Windows, MacOS, and Linux" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "hdf5plugin-5.0.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:8f696fcfd8c05b574e98180580e6d28428582cb9c7dd62b17c41ce3bdd5c5994"}, - {file = "hdf5plugin-5.0.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6793420f5c0bc753e925ef47fac74e491f8aaf27bfa6c61fce5fccaf4cd8e767"}, - {file = "hdf5plugin-5.0.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b34b4e1d71ed47fdd080fce30d9fa9b043c9263385584e8006903c0c10eae1"}, - {file = "hdf5plugin-5.0.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5f3e9cb4448841d07dd9d9258132b7eb900b38f8c49e899efe4050834757e6"}, - {file = "hdf5plugin-5.0.0-py3-none-win_amd64.whl", hash = "sha256:9bded0f5536471ace7855bd881762de1125586af1162001c39b8e899b89c47e2"}, - {file = "hdf5plugin-5.0.0.tar.gz", hash = "sha256:3bcc5c4f523953fe020a220c7b1b307c62066e39fdbdcd904fa2268db80e9dbb"}, + {file = "hdf5plugin-5.1.0-py3-none-macosx_10_13_universal2.whl", hash = "sha256:6f88bdc3ebf1d7393557d6c70811552f76f8fdd275988a7d2c904633f1a21a1d"}, + {file = "hdf5plugin-5.1.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0151f844e5f7de0e26cc2de275a339f6936c825fee915cbd54318e22a913c00a"}, + {file = "hdf5plugin-5.1.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b613e16d376d3b37fd2d76893e356c402100bd68a02abbe960a98e8257ca8758"}, + {file = "hdf5plugin-5.1.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6da81b0b168f271b0cf995a12c28cf01b381587fed21f25fd91b2c90d5108425"}, + {file = "hdf5plugin-5.1.0-py3-none-win_amd64.whl", hash = "sha256:6be3409554bde676db0f1ab46a27e87ea73d7974f359f354a738c812618261d1"}, + {file = "hdf5plugin-5.1.0.tar.gz", hash = "sha256:cf78f1426b5868128b9ec6c498b70d6734e1dc8007a8ed1e7282954ab421b3fa"}, ] [package.dependencies] h5py = ">=3.0.0" [package.extras] -doc = ["ipython", "nbsphinx", "sphinx", "sphinx-rtd-theme"] +doc = ["ipython", "nbsphinx", "sphinx", "sphinx_rtd_theme"] test = ["blosc2 (>=2.5.1)", "blosc2-grok (>=0.2.2)"] [[package]] @@ -336,14 +336,14 @@ files = [ [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test", "test-mpi"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] @@ -437,14 +437,14 @@ files = [ [[package]] name = "mpi-pytest" -version = "2025.2.0" +version = "2025.4.0" description = "A pytest plugin for executing tests in parallel with MPI" optional = false python-versions = ">=3.7" groups = ["test-mpi"] files = [ - {file = "mpi_pytest-2025.2.0-py3-none-any.whl", hash = "sha256:5e258f1bac13c2d14d87b244f5a63d5a5e0609cfc5e302b67ea221d1782d1846"}, - {file = "mpi_pytest-2025.2.0.tar.gz", hash = "sha256:561cfe0d044071d88fca2e6125de127fbfd44e6a0df1c23f74b65d0bae5bfed0"}, + {file = "mpi_pytest-2025.4.0-py3-none-any.whl", hash = "sha256:b06f62bcd42c8ae3cbcc71a54e3640ed74384dce6d1e1d8eef34c03eaa97997c"}, + {file = "mpi_pytest-2025.4.0.tar.gz", hash = "sha256:5efbacce4eaba3bdf2a539c9abf94079d1a85506255055ef48d9817d0f4ffa4d"}, ] [package.dependencies] @@ -541,67 +541,67 @@ files = [ [[package]] name = "numpy" -version = "2.2.3" +version = "2.2.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "numpy-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cbc6472e01952d3d1b2772b720428f8b90e2deea8344e854df22b0618e9cce71"}, - {file = "numpy-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdfe0c22692a30cd830c0755746473ae66c4a8f2e7bd508b35fb3b6a0813d787"}, - {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:e37242f5324ffd9f7ba5acf96d774f9276aa62a966c0bad8dae692deebec7716"}, - {file = "numpy-2.2.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95172a21038c9b423e68be78fd0be6e1b97674cde269b76fe269a5dfa6fadf0b"}, - {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b47c440210c5d1d67e1cf434124e0b5c395eee1f5806fdd89b553ed1acd0a3"}, - {file = "numpy-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0391ea3622f5c51a2e29708877d56e3d276827ac5447d7f45e9bc4ade8923c52"}, - {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f6b3dfc7661f8842babd8ea07e9897fe3d9b69a1d7e5fbb743e4160f9387833b"}, - {file = "numpy-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1ad78ce7f18ce4e7df1b2ea4019b5817a2f6a8a16e34ff2775f646adce0a5027"}, - {file = "numpy-2.2.3-cp310-cp310-win32.whl", hash = "sha256:5ebeb7ef54a7be11044c33a17b2624abe4307a75893c001a4800857956b41094"}, - {file = "numpy-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:596140185c7fa113563c67c2e894eabe0daea18cf8e33851738c19f70ce86aeb"}, - {file = "numpy-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16372619ee728ed67a2a606a614f56d3eabc5b86f8b615c79d01957062826ca8"}, - {file = "numpy-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5521a06a3148686d9269c53b09f7d399a5725c47bbb5b35747e1cb76326b714b"}, - {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:7c8dde0ca2f77828815fd1aedfdf52e59071a5bae30dac3b4da2a335c672149a"}, - {file = "numpy-2.2.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:77974aba6c1bc26e3c205c2214f0d5b4305bdc719268b93e768ddb17e3fdd636"}, - {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d42f9c36d06440e34226e8bd65ff065ca0963aeecada587b937011efa02cdc9d"}, - {file = "numpy-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2712c5179f40af9ddc8f6727f2bd910ea0eb50206daea75f58ddd9fa3f715bb"}, - {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c8b0451d2ec95010d1db8ca733afc41f659f425b7f608af569711097fd6014e2"}, - {file = "numpy-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9b4a8148c57ecac25a16b0e11798cbe88edf5237b0df99973687dd866f05e1b"}, - {file = "numpy-2.2.3-cp311-cp311-win32.whl", hash = "sha256:1f45315b2dc58d8a3e7754fe4e38b6fce132dab284a92851e41b2b344f6441c5"}, - {file = "numpy-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f48ba6f6c13e5e49f3d3efb1b51c8193215c42ac82610a04624906a9270be6f"}, - {file = "numpy-2.2.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12c045f43b1d2915eca6b880a7f4a256f59d62df4f044788c8ba67709412128d"}, - {file = "numpy-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:87eed225fd415bbae787f93a457af7f5990b92a334e346f72070bf569b9c9c95"}, - {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:712a64103d97c404e87d4d7c47fb0c7ff9acccc625ca2002848e0d53288b90ea"}, - {file = "numpy-2.2.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a5ae282abe60a2db0fd407072aff4599c279bcd6e9a2475500fc35b00a57c532"}, - {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5266de33d4c3420973cf9ae3b98b54a2a6d53a559310e3236c4b2b06b9c07d4e"}, - {file = "numpy-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b787adbf04b0db1967798dba8da1af07e387908ed1553a0d6e74c084d1ceafe"}, - {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34c1b7e83f94f3b564b35f480f5652a47007dd91f7c839f404d03279cc8dd021"}, - {file = "numpy-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4d8335b5f1b6e2bce120d55fb17064b0262ff29b459e8493d1785c18ae2553b8"}, - {file = "numpy-2.2.3-cp312-cp312-win32.whl", hash = "sha256:4d9828d25fb246bedd31e04c9e75714a4087211ac348cb39c8c5f99dbb6683fe"}, - {file = "numpy-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:83807d445817326b4bcdaaaf8e8e9f1753da04341eceec705c001ff342002e5d"}, - {file = "numpy-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bfdb06b395385ea9b91bf55c1adf1b297c9fdb531552845ff1d3ea6e40d5aba"}, - {file = "numpy-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c9f4edbf4c065fddb10a4f6e8b6a244342d95966a48820c614891e5059bb50"}, - {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:a0c03b6be48aaf92525cccf393265e02773be8fd9551a2f9adbe7db1fa2b60f1"}, - {file = "numpy-2.2.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:2376e317111daa0a6739e50f7ee2a6353f768489102308b0d98fcf4a04f7f3b5"}, - {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fb62fe3d206d72fe1cfe31c4a1106ad2b136fcc1606093aeab314f02930fdf2"}, - {file = "numpy-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52659ad2534427dffcc36aac76bebdd02b67e3b7a619ac67543bc9bfe6b7cdb1"}, - {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b416af7d0ed3271cad0f0a0d0bee0911ed7eba23e66f8424d9f3dfcdcae1304"}, - {file = "numpy-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1402da8e0f435991983d0a9708b779f95a8c98c6b18a171b9f1be09005e64d9d"}, - {file = "numpy-2.2.3-cp313-cp313-win32.whl", hash = "sha256:136553f123ee2951bfcfbc264acd34a2fc2f29d7cdf610ce7daf672b6fbaa693"}, - {file = "numpy-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5b732c8beef1d7bc2d9e476dbba20aaff6167bf205ad9aa8d30913859e82884b"}, - {file = "numpy-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:435e7a933b9fda8126130b046975a968cc2d833b505475e588339e09f7672890"}, - {file = "numpy-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7678556eeb0152cbd1522b684dcd215250885993dd00adb93679ec3c0e6e091c"}, - {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2e8da03bd561504d9b20e7a12340870dfc206c64ea59b4cfee9fceb95070ee94"}, - {file = "numpy-2.2.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:c9aa4496fd0e17e3843399f533d62857cef5900facf93e735ef65aa4bbc90ef0"}, - {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4ca91d61a4bf61b0f2228f24bbfa6a9facd5f8af03759fe2a655c50ae2c6610"}, - {file = "numpy-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deaa09cd492e24fd9b15296844c0ad1b3c976da7907e1c1ed3a0ad21dded6f76"}, - {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:246535e2f7496b7ac85deffe932896a3577be7af8fb7eebe7146444680297e9a"}, - {file = "numpy-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:daf43a3d1ea699402c5a850e5313680ac355b4adc9770cd5cfc2940e7861f1bf"}, - {file = "numpy-2.2.3-cp313-cp313t-win32.whl", hash = "sha256:cf802eef1f0134afb81fef94020351be4fe1d6681aadf9c5e862af6602af64ef"}, - {file = "numpy-2.2.3-cp313-cp313t-win_amd64.whl", hash = "sha256:aee2512827ceb6d7f517c8b85aa5d3923afe8fc7a57d028cffcd522f1c6fd082"}, - {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3c2ec8a0f51d60f1e9c0c5ab116b7fc104b165ada3f6c58abf881cb2eb16044d"}, - {file = "numpy-2.2.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ed2cf9ed4e8ebc3b754d398cba12f24359f018b416c380f577bbae112ca52fc9"}, - {file = "numpy-2.2.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39261798d208c3095ae4f7bc8eaeb3481ea8c6e03dc48028057d3cbdbdb8937e"}, - {file = "numpy-2.2.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:783145835458e60fa97afac25d511d00a1eca94d4a8f3ace9fe2043003c678e4"}, - {file = "numpy-2.2.3.tar.gz", hash = "sha256:dbdc15f0c81611925f382dfa97b3bd0bc2c1ce19d4fe50482cb0ddc12ba30020"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8146f3550d627252269ac42ae660281d673eb6f8b32f113538e0cc2a9aed42b9"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e642d86b8f956098b564a45e6f6ce68a22c2c97a04f5acd3f221f57b8cb850ae"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:a84eda42bd12edc36eb5b53bbcc9b406820d3353f1994b6cfe453a33ff101775"}, + {file = "numpy-2.2.4-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:4ba5054787e89c59c593a4169830ab362ac2bee8a969249dc56e5d7d20ff8df9"}, + {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7716e4a9b7af82c06a2543c53ca476fa0b57e4d760481273e09da04b74ee6ee2"}, + {file = "numpy-2.2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf8c1d66f432ce577d0197dceaac2ac00c0759f573f28516246351c58a85020"}, + {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:218f061d2faa73621fa23d6359442b0fc658d5b9a70801373625d958259eaca3"}, + {file = "numpy-2.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:df2f57871a96bbc1b69733cd4c51dc33bea66146b8c63cacbfed73eec0883017"}, + {file = "numpy-2.2.4-cp310-cp310-win32.whl", hash = "sha256:a0258ad1f44f138b791327961caedffbf9612bfa504ab9597157806faa95194a"}, + {file = "numpy-2.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:0d54974f9cf14acf49c60f0f7f4084b6579d24d439453d5fc5805d46a165b542"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9e0a277bb2eb5d8a7407e14688b85fd8ad628ee4e0c7930415687b6564207a4"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eeea959168ea555e556b8188da5fa7831e21d91ce031e95ce23747b7609f8a4"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bd3ad3b0a40e713fc68f99ecfd07124195333f1e689387c180813f0e94309d6f"}, + {file = "numpy-2.2.4-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cf28633d64294969c019c6df4ff37f5698e8326db68cc2b66576a51fad634880"}, + {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fa8fa7697ad1646b5c93de1719965844e004fcad23c91228aca1cf0800044a1"}, + {file = "numpy-2.2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4162988a360a29af158aeb4a2f4f09ffed6a969c9776f8f3bdee9b06a8ab7e5"}, + {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:892c10d6a73e0f14935c31229e03325a7b3093fafd6ce0af704be7f894d95687"}, + {file = "numpy-2.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db1f1c22173ac1c58db249ae48aa7ead29f534b9a948bc56828337aa84a32ed6"}, + {file = "numpy-2.2.4-cp311-cp311-win32.whl", hash = "sha256:ea2bb7e2ae9e37d96835b3576a4fa4b3a97592fbea8ef7c3587078b0068b8f09"}, + {file = "numpy-2.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:f7de08cbe5551911886d1ab60de58448c6df0f67d9feb7d1fb21e9875ef95e91"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a7b9084668aa0f64e64bd00d27ba5146ef1c3a8835f3bd912e7a9e01326804c4"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dbe512c511956b893d2dacd007d955a3f03d555ae05cfa3ff1c1ff6df8851854"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:bb649f8b207ab07caebba230d851b579a3c8711a851d29efe15008e31bb4de24"}, + {file = "numpy-2.2.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:f34dc300df798742b3d06515aa2a0aee20941c13579d7a2f2e10af01ae4901ee"}, + {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f7ac96b16955634e223b579a3e5798df59007ca43e8d451a0e6a50f6bfdfba"}, + {file = "numpy-2.2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f92084defa704deadd4e0a5ab1dc52d8ac9e8a8ef617f3fbb853e79b0ea3592"}, + {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4e84a6283b36632e2a5b56e121961f6542ab886bc9e12f8f9818b3c266bfbb"}, + {file = "numpy-2.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:11c43995255eb4127115956495f43e9343736edb7fcdb0d973defd9de14cd84f"}, + {file = "numpy-2.2.4-cp312-cp312-win32.whl", hash = "sha256:65ef3468b53269eb5fdb3a5c09508c032b793da03251d5f8722b1194f1790c00"}, + {file = "numpy-2.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:2aad3c17ed2ff455b8eaafe06bcdae0062a1db77cb99f4b9cbb5f4ecb13c5146"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cf4e5c6a278d620dee9ddeb487dc6a860f9b199eadeecc567f777daace1e9e7"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1974afec0b479e50438fc3648974268f972e2d908ddb6d7fb634598cdb8260a0"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:79bd5f0a02aa16808fcbc79a9a376a147cc1045f7dfe44c6e7d53fa8b8a79392"}, + {file = "numpy-2.2.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:3387dd7232804b341165cedcb90694565a6015433ee076c6754775e85d86f1fc"}, + {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f527d8fdb0286fd2fd97a2a96c6be17ba4232da346931d967a0630050dfd298"}, + {file = "numpy-2.2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bce43e386c16898b91e162e5baaad90c4b06f9dcbe36282490032cec98dc8ae7"}, + {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31504f970f563d99f71a3512d0c01a645b692b12a63630d6aafa0939e52361e6"}, + {file = "numpy-2.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:81413336ef121a6ba746892fad881a83351ee3e1e4011f52e97fba79233611fd"}, + {file = "numpy-2.2.4-cp313-cp313-win32.whl", hash = "sha256:f486038e44caa08dbd97275a9a35a283a8f1d2f0ee60ac260a1790e76660833c"}, + {file = "numpy-2.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:207a2b8441cc8b6a2a78c9ddc64d00d20c303d79fba08c577752f080c4007ee3"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8120575cb4882318c791f839a4fd66161a6fa46f3f0a5e613071aae35b5dd8f8"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a761ba0fa886a7bb33c6c8f6f20213735cb19642c580a931c625ee377ee8bd39"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:ac0280f1ba4a4bfff363a99a6aceed4f8e123f8a9b234c89140f5e894e452ecd"}, + {file = "numpy-2.2.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:879cf3a9a2b53a4672a168c21375166171bc3932b7e21f622201811c43cdd3b0"}, + {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f05d4198c1bacc9124018109c5fba2f3201dbe7ab6e92ff100494f236209c960"}, + {file = "numpy-2.2.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2f085ce2e813a50dfd0e01fbfc0c12bbe5d2063d99f8b29da30e544fb6483b8"}, + {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:92bda934a791c01d6d9d8e038363c50918ef7c40601552a58ac84c9613a665bc"}, + {file = "numpy-2.2.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ee4d528022f4c5ff67332469e10efe06a267e32f4067dc76bb7e2cddf3cd25ff"}, + {file = "numpy-2.2.4-cp313-cp313t-win32.whl", hash = "sha256:05c076d531e9998e7e694c36e8b349969c56eadd2cdcd07242958489d79a7286"}, + {file = "numpy-2.2.4-cp313-cp313t-win_amd64.whl", hash = "sha256:188dcbca89834cc2e14eb2f106c96d6d46f200fe0200310fc29089657379c58d"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7051ee569db5fbac144335e0f3b9c2337e0c8d5c9fee015f259a5bd70772b7e8"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ab2939cd5bec30a7430cbdb2287b63151b77cf9624de0532d629c9a1c59b1d5c"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0f35b19894a9e08639fd60a1ec1978cb7f5f7f1eace62f38dd36be8aecdef4d"}, + {file = "numpy-2.2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b4adfbbc64014976d2f91084915ca4e626fbf2057fb81af209c1a6d776d23e3d"}, + {file = "numpy-2.2.4.tar.gz", hash = "sha256:9ba03692a45d3eef66559efe1d1096c4b9b75c0986b5dff5530c378fb8331d4f"}, ] [[package]] @@ -634,20 +634,21 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.10.6" +version = "2.11.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, + {file = "pydantic-2.11.1-py3-none-any.whl", hash = "sha256:5b6c415eee9f8123a14d859be0c84363fec6b1feb6b688d6435801230b56e0b8"}, + {file = "pydantic-2.11.1.tar.gz", hash = "sha256:442557d2910e75c991c39f4b4ab18963d57b9b55122c8b2a9cd176d8c29ce968"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" +pydantic-core = "2.33.0" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -655,112 +656,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.2" +version = "2.33.0" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, + {file = "pydantic_core-2.33.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71dffba8fe9ddff628c68f3abd845e91b028361d43c5f8e7b3f8b91d7d85413e"}, + {file = "pydantic_core-2.33.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:abaeec1be6ed535a5d7ffc2e6c390083c425832b20efd621562fbb5bff6dc518"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:759871f00e26ad3709efc773ac37b4d571de065f9dfb1778012908bcc36b3a73"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dcfebee69cd5e1c0b76a17e17e347c84b00acebb8dd8edb22d4a03e88e82a207"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b1262b912435a501fa04cd213720609e2cefa723a07c92017d18693e69bf00b"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4726f1f3f42d6a25678c67da3f0b10f148f5655813c5aca54b0d1742ba821b8f"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e790954b5093dff1e3a9a2523fddc4e79722d6f07993b4cd5547825c3cbf97b5"}, + {file = "pydantic_core-2.33.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34e7fb3abe375b5c4e64fab75733d605dda0f59827752debc99c17cb2d5f3276"}, + {file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ecb158fb9b9091b515213bed3061eb7deb1d3b4e02327c27a0ea714ff46b0760"}, + {file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:4d9149e7528af8bbd76cc055967e6e04617dcb2a2afdaa3dea899406c5521faa"}, + {file = "pydantic_core-2.33.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e81a295adccf73477220e15ff79235ca9dcbcee4be459eb9d4ce9a2763b8386c"}, + {file = "pydantic_core-2.33.0-cp310-cp310-win32.whl", hash = "sha256:f22dab23cdbce2005f26a8f0c71698457861f97fc6318c75814a50c75e87d025"}, + {file = "pydantic_core-2.33.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cb2390355ba084c1ad49485d18449b4242da344dea3e0fe10babd1f0db7dcfc"}, + {file = "pydantic_core-2.33.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a608a75846804271cf9c83e40bbb4dab2ac614d33c6fd5b0c6187f53f5c593ef"}, + {file = "pydantic_core-2.33.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e1c69aa459f5609dec2fa0652d495353accf3eda5bdb18782bc5a2ae45c9273a"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9ec80eb5a5f45a2211793f1c4aeddff0c3761d1c70d684965c1807e923a588b"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e925819a98318d17251776bd3d6aa9f3ff77b965762155bdad15d1a9265c4cfd"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bf68bb859799e9cec3d9dd8323c40c00a254aabb56fe08f907e437005932f2b"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b2ea72dea0825949a045fa4071f6d5b3d7620d2a208335207793cf29c5a182d"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1583539533160186ac546b49f5cde9ffc928062c96920f58bd95de32ffd7bffd"}, + {file = "pydantic_core-2.33.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:23c3e77bf8a7317612e5c26a3b084c7edeb9552d645742a54a5867635b4f2453"}, + {file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a7a7f2a3f628d2f7ef11cb6188bcf0b9e1558151d511b974dfea10a49afe192b"}, + {file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:f1fb026c575e16f673c61c7b86144517705865173f3d0907040ac30c4f9f5915"}, + {file = "pydantic_core-2.33.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:635702b2fed997e0ac256b2cfbdb4dd0bf7c56b5d8fba8ef03489c03b3eb40e2"}, + {file = "pydantic_core-2.33.0-cp311-cp311-win32.whl", hash = "sha256:07b4ced28fccae3f00626eaa0c4001aa9ec140a29501770a88dbbb0966019a86"}, + {file = "pydantic_core-2.33.0-cp311-cp311-win_amd64.whl", hash = "sha256:4927564be53239a87770a5f86bdc272b8d1fbb87ab7783ad70255b4ab01aa25b"}, + {file = "pydantic_core-2.33.0-cp311-cp311-win_arm64.whl", hash = "sha256:69297418ad644d521ea3e1aa2e14a2a422726167e9ad22b89e8f1130d68e1e9a"}, + {file = "pydantic_core-2.33.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6c32a40712e3662bebe524abe8abb757f2fa2000028d64cc5a1006016c06af43"}, + {file = "pydantic_core-2.33.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8ec86b5baa36f0a0bfb37db86c7d52652f8e8aa076ab745ef7725784183c3fdd"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4deac83a8cc1d09e40683be0bc6d1fa4cde8df0a9bf0cda5693f9b0569ac01b6"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:175ab598fb457a9aee63206a1993874badf3ed9a456e0654273e56f00747bbd6"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f36afd0d56a6c42cf4e8465b6441cf546ed69d3a4ec92724cc9c8c61bd6ecf4"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a98257451164666afafc7cbf5fb00d613e33f7e7ebb322fbcd99345695a9a61"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecc6d02d69b54a2eb83ebcc6f29df04957f734bcf309d346b4f83354d8376862"}, + {file = "pydantic_core-2.33.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a69b7596c6603afd049ce7f3835bcf57dd3892fc7279f0ddf987bebed8caa5a"}, + {file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea30239c148b6ef41364c6f51d103c2988965b643d62e10b233b5efdca8c0099"}, + {file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:abfa44cf2f7f7d7a199be6c6ec141c9024063205545aa09304349781b9a125e6"}, + {file = "pydantic_core-2.33.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20d4275f3c4659d92048c70797e5fdc396c6e4446caf517ba5cad2db60cd39d3"}, + {file = "pydantic_core-2.33.0-cp312-cp312-win32.whl", hash = "sha256:918f2013d7eadea1d88d1a35fd4a1e16aaf90343eb446f91cb091ce7f9b431a2"}, + {file = "pydantic_core-2.33.0-cp312-cp312-win_amd64.whl", hash = "sha256:aec79acc183865bad120b0190afac467c20b15289050648b876b07777e67ea48"}, + {file = "pydantic_core-2.33.0-cp312-cp312-win_arm64.whl", hash = "sha256:5461934e895968655225dfa8b3be79e7e927e95d4bd6c2d40edd2fa7052e71b6"}, + {file = "pydantic_core-2.33.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f00e8b59e1fc8f09d05594aa7d2b726f1b277ca6155fc84c0396db1b373c4555"}, + {file = "pydantic_core-2.33.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a73be93ecef45786d7d95b0c5e9b294faf35629d03d5b145b09b81258c7cd6d"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff48a55be9da6930254565ff5238d71d5e9cd8c5487a191cb85df3bdb8c77365"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a4ea04195638dcd8c53dadb545d70badba51735b1594810e9768c2c0b4a5da"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41d698dcbe12b60661f0632b543dbb119e6ba088103b364ff65e951610cb7ce0"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae62032ef513fe6281ef0009e30838a01057b832dc265da32c10469622613885"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f225f3a3995dbbc26affc191d0443c6c4aa71b83358fd4c2b7d63e2f6f0336f9"}, + {file = "pydantic_core-2.33.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5bdd36b362f419c78d09630cbaebc64913f66f62bda6d42d5fbb08da8cc4f181"}, + {file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2a0147c0bef783fd9abc9f016d66edb6cac466dc54a17ec5f5ada08ff65caf5d"}, + {file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c860773a0f205926172c6644c394e02c25421dc9a456deff16f64c0e299487d3"}, + {file = "pydantic_core-2.33.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:138d31e3f90087f42aa6286fb640f3c7a8eb7bdae829418265e7e7474bd2574b"}, + {file = "pydantic_core-2.33.0-cp313-cp313-win32.whl", hash = "sha256:d20cbb9d3e95114325780f3cfe990f3ecae24de7a2d75f978783878cce2ad585"}, + {file = "pydantic_core-2.33.0-cp313-cp313-win_amd64.whl", hash = "sha256:ca1103d70306489e3d006b0f79db8ca5dd3c977f6f13b2c59ff745249431a606"}, + {file = "pydantic_core-2.33.0-cp313-cp313-win_arm64.whl", hash = "sha256:6291797cad239285275558e0a27872da735b05c75d5237bbade8736f80e4c225"}, + {file = "pydantic_core-2.33.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7b79af799630af263eca9ec87db519426d8c9b3be35016eddad1832bac812d87"}, + {file = "pydantic_core-2.33.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eabf946a4739b5237f4f56d77fa6668263bc466d06a8036c055587c130a46f7b"}, + {file = "pydantic_core-2.33.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8a1d581e8cdbb857b0e0e81df98603376c1a5c34dc5e54039dcc00f043df81e7"}, + {file = "pydantic_core-2.33.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7c9c84749f5787781c1c45bb99f433402e484e515b40675a5d121ea14711cf61"}, + {file = "pydantic_core-2.33.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64672fa888595a959cfeff957a654e947e65bbe1d7d82f550417cbd6898a1d6b"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26bc7367c0961dec292244ef2549afa396e72e28cc24706210bd44d947582c59"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ce72d46eb201ca43994303025bd54d8a35a3fc2a3495fac653d6eb7205ce04f4"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14229c1504287533dbf6b1fc56f752ce2b4e9694022ae7509631ce346158de11"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:085d8985b1c1e48ef271e98a658f562f29d89bda98bf120502283efbc87313eb"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31860fbda80d8f6828e84b4a4d129fd9c4535996b8249cfb8c720dc2a1a00bb8"}, + {file = "pydantic_core-2.33.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f200b2f20856b5a6c3a35f0d4e344019f805e363416e609e9b47c552d35fd5ea"}, + {file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f72914cfd1d0176e58ddc05c7a47674ef4222c8253bf70322923e73e14a4ac3"}, + {file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:91301a0980a1d4530d4ba7e6a739ca1a6b31341252cb709948e0aca0860ce0ae"}, + {file = "pydantic_core-2.33.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7419241e17c7fbe5074ba79143d5523270e04f86f1b3a0dff8df490f84c8273a"}, + {file = "pydantic_core-2.33.0-cp39-cp39-win32.whl", hash = "sha256:7a25493320203005d2a4dac76d1b7d953cb49bce6d459d9ae38e30dd9f29bc9c"}, + {file = "pydantic_core-2.33.0-cp39-cp39-win_amd64.whl", hash = "sha256:82a4eba92b7ca8af1b7d5ef5f3d9647eee94d1f74d21ca7c21e3a2b92e008358"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2762c568596332fdab56b07060c8ab8362c56cf2a339ee54e491cd503612c50"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bf637300ff35d4f59c006fff201c510b2b5e745b07125458a5389af3c0dff8c"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c151ce3d59ed56ebd7ce9ce5986a409a85db697d25fc232f8e81f195aa39a1"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee65f0cc652261744fd07f2c6e6901c914aa6c5ff4dcfaf1136bc394d0dd26b"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:024d136ae44d233e6322027bbf356712b3940bee816e6c948ce4b90f18471b3d"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e37f10f6d4bc67c58fbd727108ae1d8b92b397355e68519f1e4a7babb1473442"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:502ed542e0d958bd12e7c3e9a015bce57deaf50eaa8c2e1c439b512cb9db1e3a"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:715c62af74c236bf386825c0fdfa08d092ab0f191eb5b4580d11c3189af9d330"}, + {file = "pydantic_core-2.33.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bccc06fa0372151f37f6b69834181aa9eb57cf8665ed36405fb45fbf6cac3bae"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d8dc9f63a26f7259b57f46a7aab5af86b2ad6fbe48487500bb1f4b27e051e4c"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:30369e54d6d0113d2aa5aee7a90d17f225c13d87902ace8fcd7bbf99b19124db"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3eb479354c62067afa62f53bb387827bee2f75c9c79ef25eef6ab84d4b1ae3b"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0310524c833d91403c960b8a3cf9f46c282eadd6afd276c8c5edc617bd705dc9"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eddb18a00bbb855325db27b4c2a89a4ba491cd6a0bd6d852b225172a1f54b36c"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ade5dbcf8d9ef8f4b28e682d0b29f3008df9842bb5ac48ac2c17bc55771cc976"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2c0afd34f928383e3fd25740f2050dbac9d077e7ba5adbaa2227f4d4f3c8da5c"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7da333f21cd9df51d5731513a6d39319892947604924ddf2e24a4612975fb936"}, + {file = "pydantic_core-2.33.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4b6d77c75a57f041c5ee915ff0b0bb58eabb78728b69ed967bc5b780e8f701b8"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba95691cf25f63df53c1d342413b41bd7762d9acb425df8858d7efa616c0870e"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4f1ab031feb8676f6bd7c85abec86e2935850bf19b84432c64e3e239bffeb1ec"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c1151827eef98b83d49b6ca6065575876a02d2211f259fb1a6b7757bd24dd8"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66d931ea2c1464b738ace44b7334ab32a2fd50be023d863935eb00f42be1778"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0bcf0bab28995d483f6c8d7db25e0d05c3efa5cebfd7f56474359e7137f39856"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:89670d7a0045acb52be0566df5bc8b114ac967c662c06cf5e0c606e4aadc964b"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:b716294e721d8060908dbebe32639b01bfe61b15f9f57bcc18ca9a0e00d9520b"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fc53e05c16697ff0c1c7c2b98e45e131d4bfb78068fffff92a82d169cbb4c7b7"}, + {file = "pydantic_core-2.33.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:68504959253303d3ae9406b634997a2123a0b0c1da86459abbd0ffc921695eac"}, + {file = "pydantic_core-2.33.0.tar.gz", hash = "sha256:40eb8af662ba409c3cbf4a8150ad32ae73514cd7cb1f1a2113af39763dd616b3"}, ] [package.dependencies] @@ -811,14 +811,14 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" groups = ["test", "test-mpi"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -933,30 +933,30 @@ test = ["pytest (>=8)"] [[package]] name = "ruff" -version = "0.9.8" +version = "0.9.10" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["develop"] files = [ - {file = "ruff-0.9.8-py3-none-linux_armv6l.whl", hash = "sha256:d236f0ce0190bbc6fa9b4c4b85e916fb4c50fd087e6558af1bf5a45eb20e374d"}, - {file = "ruff-0.9.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:59fac6922b336d0c38df199761ade561563e1b7636e3a2b767b9ee5a68aa9cbf"}, - {file = "ruff-0.9.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a82082ec72bde2166ec138055307396c4d4e543fd97266dc2bfa24284cb30af6"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e06635d12321605d1d11226c7d3c6b1245a0df498099868d14b4e353b3f0ac22"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:65961815bb35d427e957940d13b2a1d0a67d8b245d3a7e0b5a4a2058536d3532"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c18356beaef174797ad83f11debc5569e96afa73a549b2d073912565cfc4cfd1"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a1dfc443bee0288ea926a4d9ecfd858bf94ddf0a03a256c63e81b2b6dccdfc7d"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc86d5a85cd5ab1d5aff1650f038aa34681d0692cc2467aa9ddef37bd56ea3f9"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:66662aa19535d58fe6d04e5b59a39e495b102f2f5a2a1b9698e240eb78f429ef"}, - {file = "ruff-0.9.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:733647b2fe9367e1aa049c0eba296363746f3bc0dbfd454b0bc4b7b46cdf0146"}, - {file = "ruff-0.9.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:100031be9777f67af7f61b4d4eea2a0531ed6788940aca4360f6b9aae317c53b"}, - {file = "ruff-0.9.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f090758d58b4667d9022eee1085a854db93d800279e5a177ebda5adc1faf639"}, - {file = "ruff-0.9.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f774998b9c9a062510533aba9b53085de6be6d41e13a7a0bd086af8a40e838c3"}, - {file = "ruff-0.9.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6ef7cc80626264ab8ab4d68b359ba867b8a52b0830a9643cd31289146dd40892"}, - {file = "ruff-0.9.8-py3-none-win32.whl", hash = "sha256:54b57b623a683e696a1ede99db95500763c1badafe105b6ad8d8e9d96e385ae2"}, - {file = "ruff-0.9.8-py3-none-win_amd64.whl", hash = "sha256:b0878103b2fb8af55ad701308a69ce713108ad346c3a3a143ebcd1e13829c9a7"}, - {file = "ruff-0.9.8-py3-none-win_arm64.whl", hash = "sha256:e459a4fc4150fcc60da26c59a6a4b70878c60a99df865a71cf6f958dc68c419a"}, - {file = "ruff-0.9.8.tar.gz", hash = "sha256:12d455f2be6fe98accbea2487bbb8eaec716c760bf60b45e7e13f76f913f56e9"}, + {file = "ruff-0.9.10-py3-none-linux_armv6l.whl", hash = "sha256:eb4d25532cfd9fe461acc83498361ec2e2252795b4f40b17e80692814329e42d"}, + {file = "ruff-0.9.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:188a6638dab1aa9bb6228a7302387b2c9954e455fb25d6b4470cb0641d16759d"}, + {file = "ruff-0.9.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5284dcac6b9dbc2fcb71fdfc26a217b2ca4ede6ccd57476f52a587451ebe450d"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47678f39fa2a3da62724851107f438c8229a3470f533894b5568a39b40029c0c"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99713a6e2766b7a17147b309e8c915b32b07a25c9efd12ada79f217c9c778b3e"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524ee184d92f7c7304aa568e2db20f50c32d1d0caa235d8ddf10497566ea1a12"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df92aeac30af821f9acf819fc01b4afc3dfb829d2782884f8739fb52a8119a16"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de42e4edc296f520bb84954eb992a07a0ec5a02fecb834498415908469854a52"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d257f95b65806104b6b1ffca0ea53f4ef98454036df65b1eda3693534813ecd1"}, + {file = "ruff-0.9.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60dec7201c0b10d6d11be00e8f2dbb6f40ef1828ee75ed739923799513db24c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d838b60007da7a39c046fcdd317293d10b845001f38bcb55ba766c3875b01e43"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ccaf903108b899beb8e09a63ffae5869057ab649c1e9231c05ae354ebc62066c"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f9567d135265d46e59d62dc60c0bfad10e9a6822e231f5b24032dba5a55be6b5"}, + {file = "ruff-0.9.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f202f0d93738c28a89f8ed9eaba01b7be339e5d8d642c994347eaa81c6d75b8"}, + {file = "ruff-0.9.10-py3-none-win32.whl", hash = "sha256:bfb834e87c916521ce46b1788fbb8484966e5113c02df216680102e9eb960029"}, + {file = "ruff-0.9.10-py3-none-win_amd64.whl", hash = "sha256:f2160eeef3031bf4b17df74e307d4c5fb689a6f3a26a2de3f7ef4044e3c484f1"}, + {file = "ruff-0.9.10-py3-none-win_arm64.whl", hash = "sha256:5fd804c0327a5e5ea26615550e706942f348b197d5475ff34c19733aee4b2e69"}, + {file = "ruff-0.9.10.tar.gz", hash = "sha256:9bacb735d7bada9cfb0f2c227d3658fc443d90a727b47f206fb33f52f3c0eac7"}, ] [[package]] @@ -973,14 +973,14 @@ files = [ [[package]] name = "sphinx" -version = "8.2.1" +version = "8.2.3" description = "Python documentation generator" optional = false python-versions = ">=3.11" groups = ["docs"] files = [ - {file = "sphinx-8.2.1-py3-none-any.whl", hash = "sha256:b5d2bb3cdf6207fcacde9f92085d2b97667b05b9c346eaec426ca4be8af505e9"}, - {file = "sphinx-8.2.1.tar.gz", hash = "sha256:e4b932951b9c18b039f73b72e4e63afe967d90408700ec222b981ac24647c01e"}, + {file = "sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3"}, + {file = "sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348"}, ] [package.dependencies] @@ -1004,7 +1004,7 @@ sphinxcontrib-serializinghtml = ">=1.1.9" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.394)", "pytest (>=8.0)", "ruff (==0.9.7)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] +lint = ["betterproto (==2.0.0b6)", "mypy (==1.15.0)", "pypi-attestations (==0.0.21)", "pyright (==1.1.395)", "pytest (>=8.0)", "ruff (==0.9.9)", "sphinx-lint (>=0.9)", "types-Pillow (==10.2.0.20240822)", "types-Pygments (==2.19.0.20250219)", "types-colorama (==0.4.15.20240311)", "types-defusedxml (==0.7.0.20240218)", "types-docutils (==0.21.0.20241128)", "types-requests (==2.32.0.20241016)", "types-urllib3 (==1.26.25.14)"] test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "pytest-xdist[psutil] (>=3.4)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] @@ -1144,16 +1144,31 @@ test = ["pytest"] [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "develop"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.0-py3-none-any.whl", hash = "sha256:c8dd92cc0d6425a97c18fbb9d1954e5ff92c1ca881a309c45f06ebc0b79058e5"}, + {file = "typing_extensions-4.13.0.tar.gz", hash = "sha256:0a4ac55a5820789d87e297727d229866c9650f6521b64206413c4fbada24d95b"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "urllib3" version = "2.3.0" @@ -1175,4 +1190,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "af32dd48a4b5fe6a240cd5da31043c8603016d868131189f75c3e892158e4931" +content-hash = "f548ab310b6f63a345594b8138463cd12a7a85a578c383f81de6054e942e3816" diff --git a/pyproject.toml b/pyproject.toml index a9e11be2..39f31acb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ mpi4py = "^4.0.3" optional = true [tool.poetry.group.test-mpi.dependencies] -mpi-pytest = "^2025.2.0" +mpi-pytest = "^2025.4.0" [build-system] requires = ["poetry-core"] diff --git a/test/parallel/test_mpi.py b/test/parallel/test_mpi.py index 3d5bcf4c..77eeefb1 100644 --- a/test/parallel/test_mpi.py +++ b/test/parallel/test_mpi.py @@ -1,3 +1,4 @@ +from collections import defaultdict from pathlib import Path import h5py @@ -7,6 +8,7 @@ from pytest_mpi.parallel_assert import parallel_assert import opencosmo as oc +from opencosmo.link import open_linked_files @pytest.fixture @@ -25,6 +27,14 @@ def malformed_header_path(input_path, tmp_path): return update_simulation_parameter(input_path, update, tmp_path, "malformed_header") +@pytest.fixture +def all_paths(data_path: Path): + files = ["haloparticles.hdf5", "haloproperties.hdf5", "sodproperties.hdf5"] + + hdf_files = [data_path / file for file in files] + return list(hdf_files) + + def update_simulation_parameter( base_cosmology_path: Path, parameters: dict[str, float], tmp_path: Path, name: str ): @@ -44,33 +54,27 @@ def test_mpi(input_path): with oc.open(input_path) as f: data = f.data - parallel_assert(lambda: len(data) != 0) + parallel_assert(len(data) != 0) @pytest.mark.parallel(nprocs=4) def test_take(input_path): ds = oc.open(input_path) - - comm = mpi4py.MPI.COMM_WORLD data = ds.data - - rank_length = len(data) - total_length = comm.allreduce(rank_length, op=mpi4py.MPI.SUM) - # get a random number between 0 and total_length - if comm.Get_rank() == 0: - n = np.random.randint(1000, total_length) - else: - n = 0 - n = comm.bcast(n, root=0) - + n = 1000 ds = ds.take(n, "random") data = ds.data ds.close() - lengths = comm.gather(len(data), root=0) - if comm.Get_rank() == 0: - total_length = sum(lengths) - assert sum(lengths) == total_length - parallel_assert(lambda: total_length == n, participating=comm.Get_rank() == 0) + parallel_assert(len(data) == n) + + halo_tags = data["fof_halo_tag"] + gathered_tags = mpi4py.MPI.COMM_WORLD.gather(halo_tags, root=0) + tags = set() + if mpi4py.MPI.COMM_WORLD.Get_rank() == 0: + for tag_list in gathered_tags: + tags.update(tag_list) + + assert len(tags) == 4 * n @pytest.mark.parallel(nprocs=4) @@ -79,8 +83,8 @@ def test_filters(input_path): ds = ds.filter(oc.col("sod_halo_mass") > 0) data = ds.data ds.close() - parallel_assert(lambda: len(data) != 0) - parallel_assert(lambda: all(data["sod_halo_mass"] > 0)) + parallel_assert(len(data) != 0) + parallel_assert(all(data["sod_halo_mass"] > 0)) @pytest.mark.parallel(nprocs=4) @@ -103,13 +107,13 @@ def test_filter_write(input_path, tmp_path): tree = handler._InMemoryHandler__tree starts = tree._Tree__starts sizes = tree._Tree__sizes - parallel_assert(lambda: np.all(data == written_data)) + parallel_assert(np.all(data == written_data)) for level in sizes: - parallel_assert(lambda: np.sum(sizes[level]) == len(handler)) - parallel_assert(lambda: starts[level][0] == 0) + parallel_assert(np.sum(sizes[level]) == len(handler)) + parallel_assert(starts[level][0] == 0) if level > 0: sizes_from_starts = np.diff(np.append(starts[level], len(handler))) - parallel_assert(lambda: np.all(sizes_from_starts == sizes[level])) + parallel_assert(np.all(sizes_from_starts == sizes[level])) @pytest.mark.parallel(nprocs=4) @@ -117,7 +121,7 @@ def test_collect(input_path): with oc.open(input_path) as f: ds = f.filter(oc.col("sod_halo_mass") > 0).take(100, at="random").collect() - parallel_assert(lambda: len(ds.data) == 100) + parallel_assert(len(ds.data) == 400) @pytest.mark.parallel(nprocs=4) @@ -130,29 +134,14 @@ def test_select_collect(input_path): .collect() ) - parallel_assert(lambda: len(ds.data) == 100) - parallel_assert(lambda: set(ds.data.columns) == {"sod_halo_mass", "fof_halo_mass"}) - - -@pytest.mark.parallel(nprocs=4) -def test_take_empty_rank(input_path): - comm = mpi4py.MPI.COMM_WORLD - with oc.open(input_path) as f: - ds = f.filter(oc.col("sod_halo_mass") > 0) - data = ds.data - length = comm.allgather(len(data)) - n_to_take = length[0] + length[1] // 2 - if comm.Get_rank() in [0, 1]: - ds = ds.take(n_to_take, at="start") - else: - with pytest.raises(ValueError): - ds = ds.take(n_to_take, at="start") + parallel_assert(len(ds.data) == 400) + parallel_assert(set(ds.data.columns) == {"sod_halo_mass", "fof_halo_mass"}) @pytest.mark.parallel(nprocs=4) def test_read_particles(particle_path): with oc.open(particle_path) as f: - parallel_assert(lambda: isinstance(f, dict)) + parallel_assert(isinstance(f, dict)) @pytest.mark.parallel(nprocs=4) @@ -162,14 +151,57 @@ def test_write_particles(particle_path, tmp_path): output_path = comm.bcast(output_path, root=0) with oc.open(particle_path) as f: oc.write(output_path, f) + original_data = oc.open(particle_path) + written_data = oc.open(output_path) + indices = np.random.randint(0, len(original_data), 100) + for key in original_data.keys(): + assert np.all( + original_data[key].data[indices] == written_data[key].data[indices] + ) + header = original_data.header + written_header = written_data.header + models = ["file_pars", "simulation_pars", "reformat_pars", "cosmotools_pars"] + for model in models: + key = f"_OpenCosmoHeader__{model}" + parallel_assert(getattr(header, key) == getattr(written_header, key)) + + +@pytest.mark.parallel(nprocs=4) +def test_link_write(all_paths, tmp_path): + collection = open_linked_files(*all_paths) + collection = collection.filter(oc.col("sod_halo_mass") > 10**13) + length = len(collection.properties) + length = 8 if length > 8 else length + comm = mpi4py.MPI.COMM_WORLD + output_path = tmp_path / "random_linked.hdf5" + output_path = comm.bcast(output_path, root=0) + + collection = collection.take(length, at="random") + written_data = defaultdict(list) + + for i, (properties, particles) in enumerate(collection.objects()): + for key, ds in particles.items(): + written_data[properties["fof_halo_tag"]].append((key, len(ds))) + + oc.write(output_path, collection) + + read_data = defaultdict(list) + read_ds = oc.open(output_path) + for properties, particles in read_ds.objects(): + for key, ds in particles.items(): + read_data[properties["fof_halo_tag"]].append((key, len(ds))) + + all_read = comm.gather(read_data, root=0) + all_written = comm.gather(written_data, root=0) + # merge the dictionaries if comm.Get_rank() == 0: - original_data = oc.read(particle_path) - written_data = oc.read(output_path) - for key in original_data.keys(): - assert np.all(original_data[key].data == written_data[key].data) - header = original_data.header - written_header = written_data.header - models = ["file_pars", "simulation_pars", "reformat_pars", "cosmotools_pars"] - for model in models: - key = f"_OpenCosmoHeader__{model}" - assert getattr(header, key) == getattr(written_header, key) + read_data = {} + written_data = {} + for i in range(len(all_read)): + read_data.update(all_read[i]) + written_data.update(all_written[i]) + for key in read_data: + assert set(read_data[key]) == set(written_data[key]) + + with pytest.raises(NotImplementedError): + oc.read(output_path) diff --git a/test/test_collection.py b/test/test_collection.py index 750d8060..d36d74f4 100644 --- a/test/test_collection.py +++ b/test/test_collection.py @@ -1,9 +1,10 @@ from pathlib import Path +from typing import defaultdict import pytest import opencosmo as oc -from opencosmo.collection import open_linked +from opencosmo.link import open_linked_files @pytest.fixture @@ -12,9 +13,15 @@ def multi_path(data_path): @pytest.fixture -def all_paths(data_path: Path): +def halo_paths(data_path: Path): files = ["haloparticles.hdf5", "haloproperties.hdf5", "sodproperties.hdf5"] + hdf_files = [data_path / file for file in files] + return list(hdf_files) + +@pytest.fixture +def galaxy_paths(data_path: Path): + files = ["galaxyproperties.hdf5", "galaxyparticles.hdf5"] hdf_files = [data_path / file for file in files] return list(hdf_files) @@ -39,37 +46,115 @@ def test_multi_filter_write(multi_path, tmp_path): assert all(ds.data["sod_halo_mass"] > 0) -def test_data_linking(all_paths): - collection = open_linked(*all_paths) - collection = collection.filter(oc.col("sod_halo_mass") > 10**13.5).take( +def test_data_linking(halo_paths): + collection = open_linked_files(*halo_paths) + collection = collection.filter(oc.col("sod_halo_mass") > 10**13).take( 10, at="random" ) particle_species = filter(lambda name: "particles" in name, collection.keys()) - for properties, particles in collection.objects(list(particle_species)): + n_particles = 0 + n_profiles = 0 + for properties, particles in collection.objects(): halo_tags = set() - for particle_species in particles.values(): - halo_tags.update(particle_species.data["fof_halo_tag"]) + for name, particle_species in particles.items(): + if particle_species is None: + continue + try: + halo_tags.update(particle_species.data["fof_halo_tag"]) + n_particles += 1 + except KeyError: + bin_tags = [tag for tag in particle_species.data["unique_tag"][0]] + halo_tags.update(bin_tags) + n_profiles += 1 assert len(set(halo_tags)) == 1 assert halo_tags.pop() == properties["fof_halo_tag"] + assert n_particles > 0 + assert n_profiles > 0 -def test_link_write(all_paths, tmp_path): - collection = open_linked(*all_paths) +def test_data_link_selection(halo_paths): + collection = open_linked_files(*halo_paths) + collection = collection.filter(oc.col("sod_halo_mass") > 10**13).take( + 10, at="random" + ) + collection = collection.select("dm_particles", ["x", "y", "z"]) + collection = collection.select("halo_properties", ["fof_halo_tag", "sod_halo_mass"]) + found_dm_particles = False + for properties, particles in collection.objects(): + assert set(properties.keys()) == {"fof_halo_tag", "sod_halo_mass"} + if particles["dm_particles"] is not None: + dm_particles = particles["dm_particles"] + found_dm_particles = True + assert set(dm_particles.data.colnames) == {"x", "y", "z"} + assert found_dm_particles + + +def test_link_halos_to_galaxies(halo_paths, galaxy_paths): + galaxy_path = galaxy_paths[0] + collection = open_linked_files(*halo_paths, galaxy_path) + collection = collection.filter(oc.col("sod_halo_mass") > 10**14).take(10) + for properties, particles in collection.objects(): + fof_tag = properties["fof_halo_tag"] + for p in particles.values(): + try: + tags = set(p.data["fof_halo_tag"]) + assert len(tags) == 1 + assert tags.pop() == fof_tag + except KeyError: + tags = set(p.data["fof_halo_bin_tag"][0]) + assert len(tags) == 1 + assert tags.pop() == fof_tag + + +def test_galaxy_linking(galaxy_paths): + collection = open_linked_files(*galaxy_paths) + collection = collection.filter(oc.col("gal_mass") < 10**12).take(10, at="random") + for properties, particles in collection.objects(): + gal_tag = properties["gal_tag"] + star_particles = particles["star_particles"] + particle_gal_tags = set(star_particles.data["gal_tag"]) + assert len(particle_gal_tags) == 1 + assert particle_gal_tags.pop() == gal_tag + + +def test_link_write(halo_paths, tmp_path): + collection = open_linked_files(*halo_paths) collection = collection.filter(oc.col("sod_halo_mass") > 10**13.5).take( 10, at="random" ) + original_output = defaultdict(list) + for properties, particles in collection.objects(): + for name, particle_species in particles.items(): + if particle_species is None: + continue + original_output[properties["fof_halo_tag"]].append(name) + + read_output = defaultdict(list) oc.write(tmp_path / "linked.hdf5", collection) - written_data = oc.read(tmp_path / "linked.hdf5") + written_data = oc.open(tmp_path / "linked.hdf5") n = 0 - particle_species = filter(lambda name: "particles" in name, written_data.keys()) - for properties, particles in written_data.objects(list(particle_species)): + for properties, particles in written_data.objects(): halo_tags = set() n += 1 - for particle_type, particle_species in particles.items(): - species_tags = set(particle_species.data["fof_halo_tag"]) - halo_tags.update(species_tags) + for linked_type, linked_dataset in particles.items(): + if linked_dataset is None: + continue + read_output[properties["fof_halo_tag"]].append(linked_type) + + if "particles" not in linked_type: + bin_tags = [tag for tag in linked_dataset.data["fof_halo_bin_tag"][0]] + halo_tags.update(bin_tags) + else: + species_tags = set(linked_dataset.data["fof_halo_tag"]) + halo_tags.update(species_tags) assert len(halo_tags) == 1 assert halo_tags.pop() == properties["fof_halo_tag"] + for key in original_output.keys(): + assert set(original_output[key]) == set(read_output[key]) + + with pytest.raises(NotImplementedError): + oc.read(tmp_path / "linked.hdf5") + assert n == 10