From 4c83084bd1f4005097c27c53d748b1b028feb22b Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Fri, 18 Oct 2024 11:16:53 +0200 Subject: [PATCH 1/3] Docstring fixes in _grid_geometry.py --- mikeio/spatial/_grid_geometry.py | 123 ++++++++++++++++++------------- 1 file changed, 70 insertions(+), 53 deletions(-) diff --git a/mikeio/spatial/_grid_geometry.py b/mikeio/spatial/_grid_geometry.py index 5189104fd..57f8cd4c0 100644 --- a/mikeio/spatial/_grid_geometry.py +++ b/mikeio/spatial/_grid_geometry.py @@ -69,8 +69,9 @@ def _print_axis_txt(name: str, x: np.ndarray, dx: float) -> str: @dataclass class Grid1D(_Geometry): - """1D grid (node-based) - axis is increasing and equidistant + """1D grid (node-based). + + Axis is increasing and equidistant Parameters ---------- @@ -103,6 +104,7 @@ class Grid1D(_Geometry): ```{python} mikeio.Grid1D(x=[0.1, 0.5, 0.9]) ``` + """ _dx: float @@ -125,7 +127,7 @@ def __init__( node_coordinates: np.ndarray | None = None, axis_name: str = "x", ): - """Create equidistant 1D spatial geometry""" + """Create equidistant 1D spatial geometry.""" super().__init__(projection=projection) self._origin = (0.0, 0.0) if origin is None else (origin[0], origin[1]) assert len(self._origin) == 2, "origin must be a tuple of length 2" @@ -154,25 +156,25 @@ def __str__(self) -> str: return f"Grid1D (n={self.nx}, dx={self.dx:.4g})" def find_index(self, x: float, **kwargs: Any) -> int: - """Find nearest point + """Find nearest point. Parameters ---------- x : float x-coordinate of point + **kwargs : Any + Not used Returns ------- int index of nearest point - **kwargs : Any - Not used See Also -------- [](`mikeio.Dataset.sel`) - """ + """ d = (self.x - x) ** 2 return int(np.argmin(d)) @@ -200,18 +202,18 @@ def interp(self, data: np.ndarray, ids: np.ndarray, weights: np.ndarray) -> Any: @property def dx(self) -> float: - """grid spacing""" + """Grid spacing.""" return self._dx @property def x(self) -> np.ndarray: - """array of node coordinates""" + """Array of node coordinates.""" x1 = self._x0 + self.dx * (self.nx - 1) return np.linspace(self._x0, x1, self.nx) @property def nx(self) -> int: - """number of grid points""" + """Number of grid points.""" return self._nx @property @@ -225,7 +227,7 @@ def orientation(self) -> float: def isel( self, idx: int | np.int64 | slice, axis: int | None = None ) -> "Grid1D" | GeometryPoint2D | GeometryPoint3D | GeometryUndefined: - """Get a subset geometry from this geometry + """Get a subset geometry from this geometry. Parameters ---------- @@ -254,8 +256,8 @@ def isel( ```{python} g.isel(1) ``` - """ + """ if not np.isscalar(idx): nc = None if self._nc is None else self._nc[idx, :] return Grid1D( @@ -279,7 +281,7 @@ def isel( class _Grid2DPlotter: - """Plot Grid2D + """Plot Grid2D. Examples -------- @@ -288,6 +290,7 @@ class _Grid2DPlotter: g = mikeio.read("../data/waves.dfs2").geometry ax = g.plot() ``` + """ def __init__(self, geometry: "Grid2D") -> None: @@ -299,7 +302,7 @@ def __call__( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Any: - """Plot bathymetry as coloured patches""" + """Plot bathymetry as coloured patches.""" ax = self._get_ax(ax, figsize) return self._plot_grid(ax, **kwargs) @@ -346,7 +349,7 @@ def outline( linewidth: float = 1.2, **kwargs: Any, ) -> Axes: - """Plot Grid2D outline + """Plot Grid2D outline. Examples -------- @@ -354,6 +357,7 @@ def outline( g = mikeio.read("../data/waves.dfs2").geometry g.plot.outline() ``` + """ ax = self._get_ax(ax, figsize) g = self.g @@ -398,7 +402,8 @@ def _set_aspect_and_labels(self, ax: Axes) -> None: @dataclass class Grid2D(_Geometry): - """2D grid + """2D grid. + Origin in the center of cell in lower-left corner x and y axes are increasing and equidistant """ @@ -433,7 +438,7 @@ def __init__( is_spectral: bool = False, is_vertical: bool = False, ): - """Create equidistant 2D spatial geometry + """Create equidistant 2D spatial geometry. Parameters ---------- @@ -474,6 +479,7 @@ def __init__( import mikeio mikeio.Grid2D(x0=12.0, nx=2, dx=0.25, y0=55.0, ny=3, dy=0.25, projection="LONG/LAT") ``` + """ super().__init__(projection=projection) self._shift_origin_on_write = origin is None # user-constructed @@ -519,7 +525,7 @@ def _create_in_bbox( nx: int | None = None, ny: int | None = None, ) -> None: - """create 2d grid in bounding box, specifying spacing or shape + """Create 2d grid in bounding box, specifying spacing or shape. Parameters ---------- @@ -538,6 +544,7 @@ def _create_in_bbox( ny : int, optional number of points in y-direction can be None, in which case the value will be inferred + """ left, bottom, right, top = self._parse_bbox(bbox) @@ -564,7 +571,7 @@ def _create_in_bbox( @staticmethod def _parse_bbox( - bbox: tuple[float, float, float, float] + bbox: tuple[float, float, float, float], ) -> tuple[float, float, float, float]: left = bbox[0] bottom = bbox[1] @@ -630,17 +637,17 @@ def __str__(self) -> str: @property def dx(self) -> float: - """x grid spacing""" + """X grid spacing.""" return self._dx @property def dy(self) -> float: - """y grid spacing""" + """Y grid spacing.""" return self._dy @property def x(self) -> np.ndarray: - """array of x coordinates (element center)""" + """Array of x coordinates (element center).""" if self.is_spectral and self.dx > 1: return self._logarithmic_f(self.nx, self._x0, self.dx) @@ -660,7 +667,7 @@ def x(self) -> np.ndarray: def _logarithmic_f( n: int = 25, f0: float = 0.055, freq_factor: float = 1.1 ) -> np.ndarray: - """Generate logarithmic frequency axis + """Generate logarithmic frequency axis. Parameters ---------- @@ -675,6 +682,7 @@ def _logarithmic_f( ------- np.ndarray array of logarithmic distributed discrete frequencies + """ logf0 = np.log(f0) logdf = np.log(f0 * freq_factor) - logf0 @@ -683,7 +691,7 @@ def _logarithmic_f( @property def y(self) -> np.ndarray: - """array of y coordinates (element center)""" + """Array of y coordinates (element center).""" if self.is_local_coordinates and not (self.is_spectral or self.is_vertical): y0 = self._y0 + self._dy / 2 else: @@ -695,27 +703,28 @@ def y(self) -> np.ndarray: @property def nx(self) -> int: - """number of x grid points""" + """Number of x grid points.""" return self._nx @property def ny(self) -> int: - """number of y grid points""" + """Number of y grid points.""" return self._ny @property def origin(self) -> tuple[float, float]: - """Coordinates of grid origo (in projection)""" + """Coordinates of grid origo (in projection).""" return self._origin @property def orientation(self) -> float: - """Grid orientation""" + """Grid orientation.""" return self._orientation @property def bbox(self) -> BoundingBox: - """bounding box (left, bottom, right, top) + """Bounding box (left, bottom, right, top). + Note: not the same as the cell center values (x0,y0,x1,y1)! """ if self._is_rotated: @@ -730,7 +739,7 @@ def bbox(self) -> BoundingBox: @cached_property def xy(self) -> np.ndarray: - """n-by-2 array of x- and y-coordinates""" + """N-by-2 array of x- and y-coordinates.""" xx, yy = np.meshgrid(self.x, self.y) xcol = xx.reshape(-1, 1) ycol = yy.reshape(-1, 1) @@ -738,14 +747,15 @@ def xy(self) -> np.ndarray: @property def _cart(self) -> Cartography: - """MIKE Core Cartography object""" + """MIKE Core Cartography object.""" factory = ( Cartography.CreateGeoOrigin if self.is_geo else Cartography.CreateProjOrigin ) return factory(self.projection_string, *self.origin, self.orientation) def _shift_x0y0_to_origin(self) -> None: - """Shift spatial axis to start at (0,0) adding the start to origin instead + """Shift spatial axis to start at (0,0) adding the start to origin instead. + Note: this will not change the x or y properties. """ if self._is_rotated: @@ -762,7 +772,7 @@ def _shift_x0y0_to_origin(self) -> None: self._origin = (self._origin[0] + x0, self._origin[1] + y0) def contains(self, coords: np.ndarray) -> Any: - """test if a list of points are inside grid + """Test if a list of points are inside grid. Parameters ---------- @@ -773,6 +783,7 @@ def contains(self, coords: np.ndarray) -> Any: ------- bool array True for points inside, False otherwise + """ coords = np.atleast_2d(coords) y = coords[:, 1] @@ -792,7 +803,7 @@ def find_index( coords: np.ndarray | None = None, area: tuple[float, float, float, float] | None = None, ) -> tuple[Any, Any]: - """Find nearest index (i,j) of point(s) + """Find nearest index (i,j) of point(s). Parameters ---------- @@ -809,6 +820,7 @@ def find_index( ------- array(int), array(int) i- and j-index of nearest cell + """ if x is None and y is not None and not np.isscalar(y): raise ValueError( @@ -841,7 +853,7 @@ def find_index( raise ValueError("Provide x,y or coords") def _xy_to_index(self, xy: np.ndarray) -> tuple[np.ndarray, np.ndarray]: - """Find specific points in this geometry""" + """Find specific points in this geometry.""" xy = np.atleast_2d(xy) y = xy[:, 1] x = xy[:, 0] @@ -862,7 +874,7 @@ def _xy_to_index(self, xy: np.ndarray) -> tuple[np.ndarray, np.ndarray]: def _bbox_to_index( self, bbox: tuple[float, float, float, float] | BoundingBox ) -> tuple[range, range]: - """Find subarea within this geometry""" + """Find subarea within this geometry.""" if not (len(bbox) == 4): raise ValueError( "area most be a bounding box of coordinates e.g. area=(-10.0, 10.0 20.0, 30.0)" @@ -976,7 +988,8 @@ def _to_element_table(self, index_base: int = 0) -> list[list[int]]: @staticmethod def _centers_to_nodes(x: np.ndarray) -> np.ndarray: - """Nodes will be placed mid-way between centers + """Nodes will be placed mid-way between centers. + If non-equidistant, new centers will hence not equal old centers! """ if len(x) == 1: @@ -991,12 +1004,13 @@ def _nodes_to_centers(xn: np.ndarray) -> Any: return (xn[1:] + xn[:-1]) / 2 def get_node_coordinates(self) -> np.ndarray: - """node coordinates for this grid + """Node coordinates for this grid. Returns ------- array(float) 2d array with x,y-coordinates, length=(nx+1)*(ny+1) + """ xn = self._centers_to_nodes(self.x) yn = self._centers_to_nodes(self.y) @@ -1012,7 +1026,7 @@ def to_geometryFM( north: int = 5, south: int = 3, ) -> GeometryFM2D: - """convert Grid2D to GeometryFM2D + """Convert Grid2D to GeometryFM2D. Parameters ---------- @@ -1026,6 +1040,7 @@ def to_geometryFM( code value for north boundary south: int, optional code value for south boundary + """ from mikeio.spatial._FM_geometry import GeometryFM2D @@ -1062,7 +1077,7 @@ def to_geometryFM( def to_mesh( self, outfilename: str | Path, z: np.ndarray | float | None = None ) -> None: - """export grid to mesh file + """Export grid to mesh file. Parameters ---------- @@ -1071,6 +1086,7 @@ def to_mesh( z : float or array(float), optional bathymetry values for each node, by default 0 if array: must have length=(nx+1)*(ny+1) + """ g = self.to_geometryFM() @@ -1087,7 +1103,8 @@ def to_mesh( @dataclass class Grid3D(_Geometry): - """3D grid + """3D grid. + Origin in the center of cell in lower-left corner x, y and z axes are increasing and equidistant """ @@ -1149,7 +1166,7 @@ def _is_rotated(self) -> Any: @property def x(self) -> np.ndarray: - """array of x-axis coordinates (element center)""" + """Array of x-axis coordinates (element center).""" x0 = self._x0 + self._dx / 2 if self.is_local_coordinates else self._x0 x1 = x0 + self.dx * (self.nx - 1) @@ -1158,17 +1175,17 @@ def x(self) -> np.ndarray: @property def dx(self) -> float: - """x-axis grid spacing""" + """X-axis grid spacing.""" return self._dx @property def nx(self) -> int: - """number of x grid points""" + """Number of x grid points.""" return self._nx @property def y(self) -> np.ndarray: - """array of y-axis coordinates (element center)""" + """Array of y-axis coordinates (element center).""" y0 = self._y0 + self._dy / 2 if self.is_local_coordinates else self._y0 y1 = y0 + self.dy * (self.ny - 1) y_local = np.linspace(y0, y1, self.ny) @@ -1176,38 +1193,38 @@ def y(self) -> np.ndarray: @property def dy(self) -> float: - """y-axis grid spacing""" + """Y-axis grid spacing.""" return self._dy @property def ny(self) -> int: - """number of y grid points""" + """Number of y grid points.""" return self._ny @property def z(self) -> np.ndarray: - """array of z-axis node coordinates""" + """Array of z-axis node coordinates.""" z1 = self._z0 + self.dz * (self.nz - 1) return np.linspace(self._z0, z1, self.nz) @property def dz(self) -> float: - """z-axis grid spacing""" + """Z-axis grid spacing.""" return self._dz @property def nz(self) -> int: - """number of z grid points""" + """Number of z grid points.""" return self._nz @property def origin(self) -> tuple[float, float]: - """Coordinates of grid origo (in projection)""" + """Coordinates of grid origo (in projection).""" return self._origin @property def orientation(self) -> float: - """Grid orientation""" + """Grid orientation.""" return self._orientation def find_index( @@ -1224,7 +1241,7 @@ def find_index( def isel( self, idx: int | np.ndarray, axis: int ) -> Grid3D | Grid2D | GeometryUndefined: - """Get a subset geometry from this geometry""" + """Get a subset geometry from this geometry.""" assert isinstance(axis, int), "axis must be an integer (or 'x', 'y' or 'z')" axis = axis + 3 if axis < 0 else axis From 9dda2ee2851a0cc92debc18a9d2f87a9b58f74ac Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Fri, 18 Oct 2024 11:24:39 +0200 Subject: [PATCH 2/3] Docstring fixes for _FM_geometry.py --- mikeio/spatial/_FM_geometry.py | 90 ++++++++++++++++++---------------- 1 file changed, 49 insertions(+), 41 deletions(-) diff --git a/mikeio/spatial/_FM_geometry.py b/mikeio/spatial/_FM_geometry.py index 1e18bb503..564228cab 100644 --- a/mikeio/spatial/_FM_geometry.py +++ b/mikeio/spatial/_FM_geometry.py @@ -39,7 +39,7 @@ class _GeometryFMPlotter: - """Plot GeometryFM + """Plot GeometryFM. Examples -------- @@ -51,6 +51,7 @@ class _GeometryFMPlotter: >>> g.plot.mesh() # mesh only >>> g.plot.outline() # domain outline only >>> g.plot.boundary_nodes() + """ def __init__(self, geometry: GeometryFM2D | GeometryFM3D) -> None: @@ -62,7 +63,7 @@ def __call__( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot bathymetry as coloured patches""" + """Plot bathymetry as coloured patches.""" ax = self._get_ax(ax, figsize) kwargs["plot_type"] = kwargs.get("plot_type") or "patch" return self._plot_FM_map(ax, **kwargs) @@ -73,7 +74,7 @@ def contour( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot bathymetry as contour lines""" + """Plot bathymetry as contour lines.""" ax = self._get_ax(ax, figsize) kwargs["plot_type"] = "contour" return self._plot_FM_map(ax, **kwargs) @@ -84,7 +85,7 @@ def contourf( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot bathymetry as filled contours""" + """Plot bathymetry as filled contours.""" ax = self._get_ax(ax, figsize) kwargs["plot_type"] = "contourf" return self._plot_FM_map(ax, **kwargs) @@ -126,8 +127,7 @@ def mesh( figsize: tuple[float, float] | None = None, ax: Axes | None = None, ) -> Axes: - """Plot mesh only""" - + """Plot mesh only.""" # TODO this must be a duplicate, delegate from matplotlib.collections import PatchCollection # type: ignore @@ -152,8 +152,7 @@ def outline( figsize: tuple[float, float] | None = None, ax: Axes | None = None, ) -> Axes: - """Plot domain outline (using the boundary_polylines property)""" - + """Plot domain outline (using the boundary_polylines property).""" ax = self._get_ax(ax=ax, figsize=figsize) ax.set_aspect(self._plot_aspect()) @@ -174,7 +173,7 @@ def boundary_nodes( figsize: tuple[float, float] | None = None, ax: Axes | None = None, ) -> Axes: - """Plot mesh boundary nodes and their code values""" + """Plot mesh boundary nodes and their code values.""" import matplotlib.pyplot as plt ax = self._get_ax(ax=ax, figsize=figsize) @@ -341,7 +340,7 @@ def is_spectral(self) -> bool: @property def n_nodes(self) -> int: - """Number of nodes""" + """Number of nodes.""" return len(self._node_ids) @property @@ -350,7 +349,7 @@ def node_ids(self) -> np.ndarray: @property def n_elements(self) -> int: - """Number of elements""" + """Number of elements.""" return len(self._element_ids) @property @@ -359,7 +358,7 @@ def element_ids(self) -> np.ndarray: @cached_property def max_nodes_per_element(self) -> int: - """The maximum number of nodes for an element""" + """The maximum number of nodes for an element.""" maxnodes = 0 for local_nodes in self.element_table: n = len(local_nodes) @@ -369,7 +368,7 @@ def max_nodes_per_element(self) -> int: @property def codes(self) -> np.ndarray: - """Node codes of all nodes (0=water, 1=land, 2...=open boundaries)""" + """Node codes of all nodes (0=water, 1=land, 2...=open boundaries).""" return self._codes @codes.setter @@ -380,7 +379,7 @@ def codes(self, v: np.ndarray) -> None: @property def boundary_codes(self) -> list[int]: - """Unique list of boundary codes""" + """Unique list of boundary codes.""" valid = list(set(self.codes)) return [code for code in valid if code > 0] @@ -440,8 +439,7 @@ def __repr__(self) -> str: @staticmethod def _point_in_polygon(xn: np.ndarray, yn: np.ndarray, xp: float, yp: float) -> bool: - """Check for each side in the polygon that the point is on the correct side""" - + """Check for each side in the polygon that the point is on the correct side.""" for j in range(len(xn) - 1): if (yn[j + 1] - yn[j]) * (xp - xn[j]) + (-xn[j + 1] + xn[j]) * ( yp - yn[j] @@ -462,7 +460,7 @@ def _area_is_polygon(area: Sequence[tuple[float, float]] | Sequence[float]) -> b @property def type_name(self) -> str: - """Type name, e.g. Mesh, Dfsu2D""" + """Type name, e.g. Mesh, Dfsu2D.""" return self._type.name if self._type else "Mesh" @property @@ -471,12 +469,11 @@ def ndim(self) -> int: @property def geometry2d(self) -> GeometryFM2D: - """Return self""" return self @property def is_2d(self) -> bool: - """Type is either mesh or Dfsu2D (2 horizontal dimensions)""" + """Type is either mesh or Dfsu2D (2 horizontal dimensions).""" return self._type in ( DfsuFileType.Dfsu2D, DfsuFileType.DfsuSpectral2D, @@ -485,12 +482,12 @@ def is_2d(self) -> bool: @property def is_layered(self) -> bool: - """Type is layered dfsu (3d, vertical profile or vertical column)""" + """Type is layered dfsu (3d, vertical profile or vertical column).""" return False @property def is_spectral(self) -> bool: - """Type is spectral dfsu (point, line or area spectrum)""" + """Type is spectral dfsu (point, line or area spectrum).""" return self._type in ( DfsuFileType.DfsuSpectral0D, DfsuFileType.DfsuSpectral1D, @@ -499,12 +496,12 @@ def is_spectral(self) -> bool: @property def is_tri_only(self) -> bool: - """Does the mesh consist of triangles only?""" + """Does the mesh consist of triangles only.""" return self.max_nodes_per_element == 3 or self.max_nodes_per_element == 6 @cached_property def element_coordinates(self) -> np.ndarray: - """Center coordinates of each element""" + """Center coordinates of each element.""" return self._calc_element_coordinates() @cached_property @@ -519,7 +516,7 @@ def find_nearest_elements( n_nearest: int = 1, return_distances: bool = False, ) -> Any: - """Find index of nearest elements (optionally for a list) + """Find index of nearest elements (optionally for a list). Parameters ---------- @@ -554,6 +551,7 @@ def find_nearest_elements( See Also -------- find_index : find element indicies for points or an area + """ idx, d2d = self._find_n_nearest_2d_elements(x, y, n=n_nearest) @@ -570,7 +568,7 @@ def get_2d_interpolant( p: int = 2, radius: float | None = None, ) -> tuple[Any, Any]: - """IDW interpolant for list of coordinates + """IDW interpolant for list of coordinates. Parameters ---------- @@ -590,6 +588,7 @@ def get_2d_interpolant( ------- (np.array, np.array) element ids and weights + """ xy = np.atleast_2d(xy) ids, dists = self._find_n_nearest_2d_elements(xy, n=n_nearest) @@ -618,7 +617,7 @@ def interp2d( weights: np.ndarray | None = None, shape: tuple[int, ...] | None = None, ) -> np.ndarray | list[np.ndarray]: - """interp spatially in data (2d only) + """Interpolate spatially in data (2d only). Parameters ---------- @@ -642,6 +641,7 @@ def interp2d( >>> g = dfs.get_overset_grid(shape=(50,40)) >>> elem_ids, weights = dfs.get_2d_interpolant(g.xy) >>> dsi = dfs.interp2d(ds, elem_ids, weights) + """ return interp2d(data, elem_ids, weights, shape) # type: ignore @@ -743,7 +743,7 @@ def get_overset_grid( ny: int | None = None, buffer: float = 0.0, ) -> Grid2D: - """get a 2d grid that covers the domain by specifying spacing or shape + """Get a 2d grid that covers the domain by specifying spacing or shape. Parameters ---------- @@ -766,6 +766,7 @@ def get_overset_grid( ------- 2d grid + """ nc = self.node_coordinates bbox = xy_to_bbox(nc, buffer=buffer) @@ -778,6 +779,7 @@ def get_element_area(self) -> np.ndarray: ------- np.array(float) areas in m2 + """ n_elements = self.n_elements @@ -840,11 +842,11 @@ def get_element_area(self) -> np.ndarray: @cached_property def boundary_polylines(self) -> BoundaryPolylines: - """Lists of closed polylines defining domain outline""" + """Lists of closed polylines defining domain outline.""" return self._get_boundary_polylines() def contains(self, points: np.ndarray) -> np.ndarray: - """test if a list of points are contained by mesh + """Test if a list of points are contained by mesh. Parameters ---------- @@ -855,6 +857,7 @@ def contains(self, points: np.ndarray) -> np.ndarray: ------- bool array True for points inside, False otherwise + """ import matplotlib.path as mp # type: ignore @@ -880,7 +883,7 @@ def __contains__(self, pt: np.ndarray) -> bool: return self.contains(pt)[0] def _get_boundary_polylines_uncategorized(self) -> list[list[np.int64]]: - """Construct closed polylines for all boundary faces""" + """Construct closed polylines for all boundary faces.""" boundary_faces = self._get_boundary_faces() face_remains = boundary_faces.copy() polylines = [] @@ -906,9 +909,7 @@ def _get_boundary_polylines_uncategorized(self) -> list[list[np.int64]]: return polylines def _get_boundary_polylines(self) -> BoundaryPolylines: - """Get boundary polylines and categorize as inner or outer by - assessing the signed area - """ + """Get boundary polylines and categorize as inner or outer by assessing the signed area.""" polylines = self._get_boundary_polylines_uncategorized() poly_lines_int = [] @@ -934,7 +935,7 @@ def _get_boundary_polylines(self) -> BoundaryPolylines: return BoundaryPolylines(n_ext, poly_lines_ext, n_int, poly_lines_int) def _get_boundary_faces(self) -> np.ndarray: - """Construct list of faces""" + """Construct list of faces.""" element_table = self.element_table all_faces = [] @@ -957,7 +958,7 @@ def _get_boundary_faces(self) -> np.ndarray: def isel( self, idx: Sequence[int], keepdims: bool = False, **kwargs: Any ) -> "GeometryFM2D" | GeometryPoint2D: - """export a selection of elements to a new geometry + """Export a selection of elements to a new geometry. Typically not called directly, but by Dataset/DataArray's isel() or sel() methods. @@ -970,6 +971,8 @@ def isel( Should the original Geometry type be kept (keepdims=True) or should it be reduced e.g. to a GeometryPoint2D if possible (keepdims=False), by default False + **kwargs: Any + Not used Returns ------- @@ -979,8 +982,8 @@ def isel( See Also -------- find_index : find element indicies for points or an area - """ + """ return self.elements_to_geometry(elements=idx, keepdims=keepdims) def find_index( @@ -1036,6 +1039,7 @@ def find_index( -------- isel : get subset geometry for specific indicies find_nearest_elements : find nearest instead of containing elements + """ if (coords is not None) or (x is not None) or (y is not None): if area is not None: @@ -1065,7 +1069,7 @@ def _inside_polygon(polygon: np.ndarray, xy: np.ndarray) -> np.ndarray: def _elements_in_area( self, area: Sequence[float] | Sequence[tuple[float, float]] ) -> np.ndarray: - """Find 2d element ids of elements inside area""" + """Find 2d element ids of elements inside area.""" if self._area_is_bbox(area): x0, y0, x1, y1 = area xc = self.element_coordinates[:, 0] @@ -1113,7 +1117,7 @@ def elements_to_geometry( def _get_nodes_and_table_for_elements( self, elements: np.ndarray | list[int] ) -> tuple[Any, Any]: - """list of nodes and element table for a list of elements + """List of nodes and element table for a list of elements. Parameters ---------- @@ -1126,6 +1130,7 @@ def _get_nodes_and_table_for_elements( array of node ids (unique) list(list(int)) element table with a list of nodes for each element + """ elem_tbl = np.empty(len(elements), dtype=np.dtype("O")) @@ -1138,7 +1143,7 @@ def _get_nodes_and_table_for_elements( def get_node_centered_data( self, data: np.ndarray, extrapolate: bool = True ) -> np.ndarray: - """convert cell-centered data to node-centered by pseudo-laplacian method + """Convert cell-centered data to node-centered by pseudo-laplacian method. Parameters ---------- @@ -1151,6 +1156,7 @@ def get_node_centered_data( ------- np.array(float) node-centered data + """ geometry = self nc = geometry.node_coordinates @@ -1159,12 +1165,13 @@ def get_node_centered_data( return _get_node_centered_data(nc, elem_table, ec, data, extrapolate) def to_shapely(self) -> Any: - """Export mesh as shapely MultiPolygon + """Export mesh as shapely MultiPolygon. Returns ------- shapely.geometry.MultiPolygon polygons with mesh elements + """ from shapely.geometry import MultiPolygon, Polygon # type: ignore @@ -1182,12 +1189,13 @@ def to_shapely(self) -> Any: return mp def to_mesh(self, outfilename: str | Path) -> None: - """Export geometry to new mesh file + """Export geometry to new mesh file. Parameters ---------- outfilename : str path to file to be written + """ builder = MeshBuilder() outfilename = str(outfilename) From 5fee9328751d99528f207ff55520669afb375e89 Mon Sep 17 00:00:00 2001 From: Henrik Andersson Date: Fri, 18 Oct 2024 14:39:11 +0200 Subject: [PATCH 3/3] Docstring fixes, e.g. missing arguments, wrong argument types, etc. --- mikeio/__init__.py | 11 +- mikeio/_interpolation.py | 9 +- mikeio/_spectral.py | 12 +- mikeio/_time.py | 6 +- mikeio/dataset/_data_plot.py | 77 ++++++---- mikeio/dataset/_data_utils.py | 3 +- mikeio/dataset/_dataarray.py | 183 +++++++++++++++------- mikeio/dataset/_dataset.py | 194 ++++++++++++++++-------- mikeio/dfs/_dfs.py | 56 ++++--- mikeio/dfs/_dfs0.py | 39 ++--- mikeio/dfs/_dfs1.py | 6 +- mikeio/dfs/_dfs2.py | 25 ++- mikeio/dfs/_dfs3.py | 17 ++- mikeio/dfsu/_dfsu.py | 44 +++--- mikeio/dfsu/_layered.py | 47 +++--- mikeio/dfsu/_mesh.py | 25 +-- mikeio/dfsu/_spectral.py | 38 ++--- mikeio/eum/_eum.py | 25 +-- mikeio/exceptions.py | 12 ++ mikeio/generic.py | 53 ++++--- mikeio/pfs/__init__.py | 3 +- mikeio/pfs/_pfsdocument.py | 27 ++-- mikeio/pfs/_pfssection.py | 36 +++-- mikeio/spatial/_FM_geometry_layered.py | 53 ++++--- mikeio/spatial/_FM_geometry_spectral.py | 22 +-- mikeio/spatial/_FM_utils.py | 81 ++++++---- mikeio/spatial/_geometry.py | 6 +- mikeio/spatial/_utils.py | 7 +- mikeio/spatial/crs.py | 23 +-- mikeio/xyz.py | 4 + pyproject.toml | 6 +- 31 files changed, 698 insertions(+), 452 deletions(-) diff --git a/mikeio/__init__.py b/mikeio/__init__.py index f0db3e04e..1a00baaea 100644 --- a/mikeio/__init__.py +++ b/mikeio/__init__.py @@ -54,7 +54,7 @@ def read( keepdims: bool = False, **kwargs: Any, ) -> Dataset: - """Read all or a subset of the data from a dfs file + """Read all or a subset of the data from a dfs file. All dfs files can be subsetted with the *items* and *time* arguments. But the following file types also have the shown additional arguments: @@ -90,6 +90,8 @@ def read( fill_bad_data_value: fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan + **kwargs: Any + Additional keyword arguments Returns ------- @@ -124,8 +126,8 @@ def read( >>> ds = mikeio.read("MT3D_sigma_z.dfsu", layers=[-2,-1]) >>> ds = mikeio.read("HD2D.dfsu", error_bad_data=False) # replace corrupt data with np.nan >>> ds = mikeio.read("HD2D.dfsu", error_bad_data=False, fill_bad_data_value=0.0) # replace corrupt data with 0.0 - """ + """ ext = Path(filename).suffix.lower() if "dfs" not in ext: @@ -137,7 +139,7 @@ def read( def open(filename: str | Path, **kwargs: Any) -> Any: - """Open a dfs/mesh file (and read the header) + """Open a dfs/mesh file (and read the header). The typical workflow for small dfs files is to read all data with *mikeio.read* instead of using this function. For big files, however, @@ -152,6 +154,8 @@ def open(filename: str | Path, **kwargs: Any) -> Any: type : str, optional Dfs2 only. Additional information about the file, e.g. "spectral" for spectral dfs2 files. By default: None. + **kwargs: Any + Additional keyword arguments, e.g. *type="spectral"* See also -------- @@ -164,6 +168,7 @@ def open(filename: str | Path, **kwargs: Any) -> Any: >>> ds = dfs.read(items="Salinity", time="2016-01") >>> dfs = mikeio.open("pt_spectra.dfs2", type="spectral") + """ ext = Path(filename).suffix.lower()[1:] diff --git a/mikeio/_interpolation.py b/mikeio/_interpolation.py index 1c073b6e3..f9d725149 100644 --- a/mikeio/_interpolation.py +++ b/mikeio/_interpolation.py @@ -9,7 +9,7 @@ def get_idw_interpolant(distances: np.ndarray, p: float = 2) -> np.ndarray: - """IDW interpolant for 2d array of distances + """IDW interpolant for 2d array of distances. https://pro.arcgis.com/en/pro-app/help/analysis/geostatistical-analyst/how-inverse-distance-weighted-interpolation-works.htm @@ -24,6 +24,7 @@ def get_idw_interpolant(distances: np.ndarray, p: float = 2) -> np.ndarray: ------- np.array weights + """ is_1d = distances.ndim == 1 if is_1d: @@ -68,7 +69,7 @@ def interp2d( weights: np.ndarray | None = None, shape: tuple[int, ...] | None = None, ) -> Dataset | np.ndarray: - """interp spatially in data (2d only) + """interp spatially in data (2d only). Parameters ---------- @@ -90,6 +91,7 @@ def interp2d( -------- >>> elem_ids, weights = dfs.get_spatial_interpolant(coords) >>> dsi = interp2d(ds, elem_ids, weights) + """ from .dataset import DataArray, Dataset @@ -157,7 +159,7 @@ def _interp_itemstep( elem_ids: np.ndarray, weights: np.ndarray | None = None, ) -> np.ndarray: - """Interpolate a single item and time step + """Interpolate a single item and time step. Parameters ---------- @@ -176,6 +178,7 @@ def _interp_itemstep( Notes ----- This function is used internally by interp2d + """ if weights is None: return data[elem_ids] diff --git a/mikeio/_spectral.py b/mikeio/_spectral.py index b0a4681f1..6e4fcccbe 100644 --- a/mikeio/_spectral.py +++ b/mikeio/_spectral.py @@ -23,14 +23,16 @@ def plot_2dspectrum( figsize: tuple[float, float] = (7, 7), add_colorbar: bool = True, ) -> Axes: - """ - Plot spectrum in polar coordinates + """Plot spectrum in polar coordinates. Parameters ---------- spectrum: np.array - spectral values as 2d array with dimensions: directions, frequencies + frequencies: np.array + frequency axis + directions: np.array + direction axis plot_type: str, optional type of plot: 'contour', 'contourf', 'patch', 'shaded', by default: 'contourf' @@ -61,8 +63,8 @@ def plot_2dspectrum( Returns ------- - """ + """ import matplotlib.pyplot as plt if (frequencies is None or len(frequencies) <= 1) and ( @@ -214,7 +216,7 @@ def calc_m0_from_spectrum( def _f_to_df(f: np.ndarray) -> np.ndarray: - """Frequency bins for equidistant or logrithmic frequency axis""" + """Frequency bins for equidistant or logrithmic frequency axis.""" if np.isclose(np.diff(f).min(), np.diff(f).max()): # equidistant frequency bins return (f[1] - f[0]) * np.ones_like(f) diff --git a/mikeio/_time.py b/mikeio/_time.py index 271b1e73c..690df82ec 100644 --- a/mikeio/_time.py +++ b/mikeio/_time.py @@ -8,7 +8,7 @@ @dataclass class DateTimeSelector: - """Helper class for selecting time steps from a pandas DatetimeIndex""" + """Helper class for selecting time steps from a pandas DatetimeIndex.""" index: pd.DatetimeIndex @@ -18,7 +18,7 @@ def isel( int | Iterable[int] | str | datetime | pd.DatetimeIndex | slice | None ) = None, ) -> list[int]: - """Select time steps from a pandas DatetimeIndex + """Select time steps from a pandas DatetimeIndex. Parameters ---------- @@ -46,8 +46,8 @@ def isel( ```{python} dts.isel(-1) ``` - """ + """ indices = list(range(len(self.index))) if x is None: diff --git a/mikeio/dataset/_data_plot.py b/mikeio/dataset/_data_plot.py index 26a06b958..214ad9684 100644 --- a/mikeio/dataset/_data_plot.py +++ b/mikeio/dataset/_data_plot.py @@ -15,7 +15,7 @@ class _DataArrayPlotter: - """Context aware plotter (sensible plotting according to geometry)""" + """Context aware plotter (sensible plotting according to geometry).""" def __init__(self, da: "DataArray") -> None: self.da = da @@ -26,7 +26,7 @@ def __call__( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot DataArray according to geometry + """Plot DataArray according to geometry. Parameters ---------- @@ -36,10 +36,13 @@ def __call__( specify size of figure title: str, optional axes title + **kwargs: Any + additional arguments passed to the plotting function Returns ------- + """ fig, ax = self._get_fig_ax(ax, figsize) @@ -80,7 +83,7 @@ def hist( title: str | None = None, **kwargs: Any, ) -> Axes: - """Plot DataArray as histogram (using ax.hist) + """Plot DataArray as histogram (using ax.hist). Parameters ---------- @@ -97,6 +100,8 @@ def hist( specify size of figure title: str, optional axes title + **kwargs: Any + additional arguments passed to the plotting function See Also -------- @@ -105,6 +110,7 @@ def hist( Returns ------- + """ ax = self._get_ax(ax, figsize) if title is not None: @@ -122,7 +128,7 @@ def line( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data as lines (timeseries if time is present)""" + """Plot data as lines (timeseries if time is present).""" fig, ax = self._get_fig_ax(ax, figsize) if self.da._has_time_axis: return self._timeseries(self.da.values, fig, ax, **kwargs) @@ -160,7 +166,7 @@ def _get_first_step_values(self) -> np.ndarray: class _DataArrayPlotterGrid1D(_DataArrayPlotter): - """Plot a DataArray with a Grid1D geometry + """Plot a DataArray with a Grid1D geometry. Examples -------- @@ -171,6 +177,7 @@ class _DataArrayPlotterGrid1D(_DataArrayPlotter): >>> da.plot.imshow() >>> da.plot.pcolormesh() >>> da.plot.hist() + """ def __call__( @@ -191,7 +198,7 @@ def line( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot as spatial lines""" + """Plot as spatial lines.""" _, ax = self._get_fig_ax(ax, figsize) return self._lines(ax, **kwargs) @@ -201,7 +208,7 @@ def timeseries( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot as timeseries""" + """Plot as timeseries.""" if self.da.n_timesteps == 1: raise ValueError("Not possible with single timestep DataArray") fig, ax = self._get_fig_ax(ax, figsize) @@ -213,7 +220,7 @@ def imshow( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot as 2d""" + """Plot as 2d.""" if not self.da._has_time_axis: raise ValueError( "Not possible without time axis. DataArray only has 1 dimension." @@ -230,7 +237,7 @@ def pcolormesh( title: str | None = None, **kwargs: Any, ) -> Axes: - """Plot multiple lines as 2d color plot""" + """Plot multiple lines as 2d color plot.""" if not self.da._has_time_axis: raise ValueError( "Not possible without time axis. DataArray only has 1 dimension." @@ -251,7 +258,7 @@ def pcolormesh( return ax def _lines(self, ax: Axes, title: str | None = None, **kwargs: Any) -> Axes: - """x-lines - one per timestep""" + """x-lines - one per timestep.""" if title is not None: ax.set_title(title) elif self.da.n_timesteps == 1: @@ -263,7 +270,7 @@ def _lines(self, ax: Axes, title: str | None = None, **kwargs: Any) -> Axes: class _DataArrayPlotterGrid2D(_DataArrayPlotter): - """Plot a DataArray with a Grid2D geometry + """Plot a DataArray with a Grid2D geometry. If DataArray has multiple time steps, the first step will be plotted. @@ -274,6 +281,7 @@ class _DataArrayPlotterGrid2D(_DataArrayPlotter): da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"] da.plot() ``` + """ def __call__( @@ -291,7 +299,7 @@ def contour( title: str | None = None, **kwargs: Any, ) -> Axes: - """Plot data as contour lines + """Plot data as contour lines. Examples -------- @@ -299,6 +307,7 @@ def contour( da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"] da.plot.contour() ``` + """ _, ax = self._get_fig_ax(ax, figsize) @@ -321,7 +330,7 @@ def contourf( label: str | None = None, **kwargs: Any, ) -> Axes: - """Plot data as filled contours + """Plot data as filled contours. Examples -------- @@ -329,6 +338,7 @@ def contourf( da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"] da.plot.contourf() ``` + """ fig, ax = self._get_fig_ax(ax, figsize) @@ -352,7 +362,7 @@ def pcolormesh( label: str | None = None, **kwargs: Any, ) -> Axes: - """Plot data as coloured patches + """Plot data as coloured patches. Examples -------- @@ -360,6 +370,7 @@ def pcolormesh( da = mikeio.read("../data/gebco_sound.dfs2")["Elevation"] da.plot.pcolormesh() ``` + """ fig, ax = self._get_fig_ax(ax, figsize) @@ -409,7 +420,7 @@ def _set_aspect_and_labels(ax: Axes, geometry: Any, y: np.ndarray) -> None: class _DataArrayPlotterFM(_DataArrayPlotter): - """Plot a DataArray with a GeometryFM geometry + """Plot a DataArray with a GeometryFM geometry. If DataArray has multiple time steps, the first step will be plotted. @@ -422,6 +433,7 @@ class _DataArrayPlotterFM(_DataArrayPlotter): da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot() ``` + """ def __call__( @@ -430,7 +442,7 @@ def __call__( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data as coloured patches""" + """Plot data as coloured patches.""" ax = self._get_ax(ax, figsize) return self._plot_FM_map(ax, **kwargs) @@ -440,7 +452,7 @@ def patch( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data as coloured patches + """Plot data as coloured patches. Examples -------- @@ -448,6 +460,7 @@ def patch( da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot.patch() ``` + """ ax = self._get_ax(ax, figsize) kwargs["plot_type"] = "patch" @@ -459,7 +472,7 @@ def contour( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data as contour lines + """Plot data as contour lines. Examples -------- @@ -467,6 +480,7 @@ def contour( da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot.contour() ``` + """ ax = self._get_ax(ax, figsize) kwargs["plot_type"] = "contour" @@ -478,7 +492,7 @@ def contourf( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data as filled contours + """Plot data as filled contours. Examples -------- @@ -486,6 +500,7 @@ def contourf( da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot.contourf() ``` + """ ax = self._get_ax(ax, figsize) kwargs["plot_type"] = "contourf" @@ -497,7 +512,7 @@ def mesh( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot mesh only + """Plot mesh only. Examples -------- @@ -505,6 +520,7 @@ def mesh( da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot.mesh() ``` + """ return self.da.geometry.plot.mesh(figsize=figsize, ax=ax, **kwargs) @@ -514,7 +530,7 @@ def outline( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot domain outline (using the boundary_polylines property) + """Plot domain outline (using the boundary_polylines property). Examples -------- @@ -522,6 +538,7 @@ def outline( da = mikeio.read("../data/HD2D.dfsu")["Surface elevation"] da.plot.outline() ``` + """ return self.da.geometry.plot.outline(figsize=figsize, ax=ax, **kwargs) @@ -555,7 +572,7 @@ def _plot_FM_map(self, ax: Axes, **kwargs: Any) -> Axes: class _DataArrayPlotterFMVerticalColumn(_DataArrayPlotter): - """Plot a DataArray with a GeometryFMVerticalColumn geometry + """Plot a DataArray with a GeometryFMVerticalColumn geometry. If DataArray has multiple time steps, the first step will be plotted. @@ -577,6 +594,7 @@ class _DataArrayPlotterFMVerticalColumn(_DataArrayPlotter): ```{python} da.plot.hist() ``` + """ def __call__( @@ -595,7 +613,7 @@ def line( extrapolate: bool = True, **kwargs: Any, ) -> Axes: - """Plot data as vertical lines""" + """Plot data as vertical lines.""" ax = self._get_ax(ax, figsize) return self._line(ax, extrapolate=extrapolate, **kwargs) @@ -640,7 +658,7 @@ def pcolormesh( title: str | None = None, **kwargs: Any, ) -> Axes: - """Plot data as coloured patches""" + """Plot data as coloured patches.""" fig, ax = self._get_fig_ax(ax, figsize) ze = self.da.geometry.calc_ze() pos = ax.pcolormesh( @@ -660,7 +678,7 @@ def pcolormesh( class _DataArrayPlotterFMVerticalProfile(_DataArrayPlotter): - """Plot a DataArray with a 2DV GeometryFMVerticalProfile geometry + """Plot a DataArray with a 2DV GeometryFMVerticalProfile geometry. If DataArray has multiple time steps, the first step will be plotted. @@ -674,6 +692,7 @@ class _DataArrayPlotterFMVerticalProfile(_DataArrayPlotter): ```{python} da.plot.hist() ``` + """ def __call__( @@ -834,7 +853,7 @@ def __init__(self, ds: Dataset) -> None: def __call__( self, figsize: tuple[float, float] | None = None, **kwargs: Any ) -> Axes: - """Plot multiple DataArrays as time series (only possible dfs0-type data)""" + """Plot multiple DataArrays as time series (only possible dfs0-type data).""" if self.ds.dims == ("time",): df = self.ds.to_dataframe() return df.plot(figsize=figsize, **kwargs) @@ -863,7 +882,7 @@ def scatter( figsize: tuple[float, float] | None = None, **kwargs: Any, ) -> Axes: - """Plot data from two DataArrays against each other in a scatter plot + """Plot data from two DataArrays against each other in a scatter plot. Parameters ---------- @@ -877,7 +896,8 @@ def scatter( specify size of figure title: str, optional axes title - **kwargs: additional kwargs will be passed to ax.scatter() + **kwargs: Any + additional kwargs will be passed to ax.scatter() Returns ------- @@ -888,6 +908,7 @@ def scatter( >>> ds = mikeio.read("oresund_sigma_z.dfsu") >>> ds.plot.scatter(x="Salinity", y="Temperature", title="S-vs-T") >>> ds.plot.scatter(x=0, y=1, figsize=(9,9), marker='*') + """ _, ax = self._get_fig_ax(ax, figsize) if "title" in kwargs: diff --git a/mikeio/dataset/_data_utils.py b/mikeio/dataset/_data_utils.py index b04e2550d..ba9551524 100644 --- a/mikeio/dataset/_data_utils.py +++ b/mikeio/dataset/_data_utils.py @@ -23,8 +23,7 @@ def _get_time_idx_list( time: pd.DatetimeIndex, steps: int | Iterable[int] | str | datetime | pd.DatetimeIndex | slice, ) -> list[int] | slice: - """Find list of idx in DatetimeIndex""" - + """Find list of idx in DatetimeIndex.""" # indexing with a slice needs to be handled differently, since slicing returns a view if isinstance(steps, slice): diff --git a/mikeio/dataset/_dataarray.py b/mikeio/dataset/_dataarray.py index 40e372d86..276947fbc 100644 --- a/mikeio/dataset/_dataarray.py +++ b/mikeio/dataset/_dataarray.py @@ -133,7 +133,7 @@ def __call__(self, tail: bool = True) -> "DataArray": class DataArray: - """DataArray with data and metadata for a single item in a dfs file + """DataArray with data and metadata for a single item in a dfs file. The DataArray has these main properties: @@ -153,6 +153,7 @@ class DataArray: item=mikeio.ItemInfo("Water level", mikeio.EUMType.Water_Level)) da ``` + """ deletevalue = 1.0e-35 @@ -320,7 +321,7 @@ def _parse_zn( return zn def _is_compatible(self, other: "DataArray", raise_error: bool = False) -> bool: - """check if other DataArray has equivalent dimensions, time and geometry""" + """check if other DataArray has equivalent dimensions, time and geometry.""" problems = [] assert isinstance(other, DataArray) if self.shape != other.shape: @@ -390,7 +391,7 @@ def _set_spectral_attributes(self, geometry: GeometryType) -> None: @property def name(self) -> str: - """Name of this DataArray (=da.item.name)""" + """Name of this DataArray (=da.item.name).""" assert isinstance(self.item.name, str) return self.item.name @@ -400,22 +401,22 @@ def name(self, value: str) -> None: @property def type(self) -> EUMType: - """EUMType""" + """EUMType.""" return self.item.type @property def unit(self) -> EUMUnit: - """EUMUnit""" + """EUMUnit.""" return self.item.unit @property def start_time(self) -> datetime: - """First time instance (as datetime)""" + """First time instance (as datetime).""" return self.time[0].to_pydatetime() @property def end_time(self) -> datetime: - """Last time instance (as datetime)""" + """Last time instance (as datetime).""" # TODO: use pd.Timestamp instead return self.time[-1].to_pydatetime() @@ -440,28 +441,28 @@ def timestep(self) -> float: @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return len(self.time) @property def shape(self) -> Any: - """Tuple of array dimensions""" + """Tuple of array dimensions.""" return self.values.shape @property def ndim(self) -> int: - """Number of array dimensions""" + """Number of array dimensions.""" assert isinstance(self.values.ndim, int) return self.values.ndim @property def dtype(self) -> Any: - """Data-type of the array elements""" + """Data-type of the array elements.""" return self.values.dtype @property def values(self) -> np.ndarray: - """Values as a np.ndarray (equivalent to to_numpy())""" + """Values as a np.ndarray (equivalent to to_numpy()).""" return self._values @values.setter @@ -475,7 +476,7 @@ def values(self, value: np.ndarray | float) -> None: self._values = value # type: ignore def to_numpy(self) -> np.ndarray: - """Values as a np.ndarray (equivalent to values)""" + """Values as a np.ndarray (equivalent to values).""" return self._values @property @@ -483,7 +484,7 @@ def _has_time_axis(self) -> bool: return self.dims[0][0] == "t" def dropna(self) -> "DataArray": - """Remove time steps where values are NaN""" + """Remove time steps where values are NaN.""" if not self._has_time_axis: raise ValueError("Not available if no time axis!") @@ -495,24 +496,29 @@ def dropna(self) -> "DataArray": return self.isel(idx, axis=0) def flipud(self) -> "DataArray": - """Flip upside down (on first non-time axis)""" - + """Flip upside down (on first non-time axis).""" first_non_t_axis = 1 if self._has_time_axis else 0 self.values = np.flip(self.values, axis=first_non_t_axis) return self def describe(self, percentiles=None, include=None, exclude=None) -> pd.DataFrame: # type: ignore - """Generate descriptive statistics by wrapping [](`pandas.DataFrame.describe`) + """Generate descriptive statistics by wrapping [](`pandas.DataFrame.describe`). Parameters ---------- + percentiles : list-like of numbers, optional + The percentiles to include in the output. All should fall between 0 and 1. + include : 'all', list-like of dtypes or None (default), optional + A white list of data types to include in the result. + exclude : list-like of dtypes or None (default), optional + A black list of data types to omit from the result. Returns ------- pd.DataFrame - """ + """ data = {} data[self.name] = self.to_numpy().ravel() df = pd.DataFrame(data).describe( @@ -522,17 +528,17 @@ def describe(self, percentiles=None, include=None, exclude=None) -> pd.DataFrame return df def copy(self) -> "DataArray": - """Make copy of DataArray""" + """Make copy of DataArray.""" return deepcopy(self) def squeeze(self) -> "DataArray": - """Remove axes of length 1 + """Remove axes of length 1. Returns ------- DataArray - """ + """ data = np.squeeze(self.values) dims = [d for s, d in zip(self.shape, self.dims) if s != 1] @@ -601,6 +607,7 @@ def isel( Parameters ---------- idx: int, scalar or array_like + Index, or indices, along the specified dimension(s) axis: (int, str, None), optional axis number or "time", by default 0 time : int, optional @@ -614,6 +621,8 @@ def isel( element : int, optional Bounding box of coordinates (left lower and right upper) to be selected, by default None + **kwargs: Any + Not used Returns ------- @@ -644,6 +653,7 @@ def isel( da = mikeio.read("../data/oresund_sigma_z.dfsu").Temperature da.isel(element=range(200)) ``` + """ if isinstance(self.geometry, Grid2D) and ("x" in kwargs and "y" in kwargs): idx_x = kwargs["x"] @@ -771,6 +781,8 @@ def sel( layer(s) to be selected: "top", "bottom" or layer number from bottom 0,1,2,... or from the top -1,-2,... or as list of these; only for layered dfsu, by default None + **kwargs: Any + Additional keyword arguments Returns ------- @@ -810,6 +822,7 @@ def sel( ```{python} da.sel(layers="bottom") ``` + """ if any([isinstance(v, slice) for v in kwargs.values()]): return self._sel_with_slice(kwargs) @@ -883,7 +896,7 @@ def interp( interpolant: tuple[Any, Any] | None = None, **kwargs: Any, ) -> "DataArray": - """Interpolate data in time and space + """Interpolate data in time and space. This method currently has limited functionality for spatial interpolation. It will be extended in the future. @@ -906,9 +919,15 @@ def interp( x-coordinate of point to be interpolated to, by default None y : float, optional y-coordinate of point to be interpolated to, by default None + z : float, optional + z-coordinate of point to be interpolated to, by default None n_nearest : int, optional When using IDW interpolation, how many nearest points should be used, by default: 3 + interpolant : tuple, optional + Precomputed interpolant, by default None + **kwargs: Any + Additional keyword arguments to be passed to the interpolation Returns ------- @@ -929,6 +948,7 @@ def interp( >>> da = mikeio.read("HD2D.dfsu").Salinity >>> da.interp(x=340000, y=6160000) + """ if z is not None: raise NotImplementedError() @@ -997,7 +1017,7 @@ def interp( def __dataarray_read_item_time_func( self, item: int, step: int ) -> tuple[np.ndarray, float]: - "Used by _extract_track" + "Used by _extract_track." # Ignore item argument data = self.isel(time=step).to_numpy() time = (self.time[step] - self.time[0]).total_seconds() # type: ignore @@ -1010,8 +1030,7 @@ def extract_track( method: Literal["nearest", "inverse_distance"] = "nearest", dtype: Any = np.float32, ) -> "Dataset": - """ - Extract data along a moving track + """Extract data along a moving track. Parameters --------- @@ -1023,12 +1042,15 @@ def extract_track( method: str, optional Spatial interpolation method ('nearest' or 'inverse_distance') default='nearest' + dtype: Any, optional + Data type of the output data, default=np.float32 Returns ------- Dataset A dataset with data dimension t The first two items will be x- and y- coordinates of track + """ from .._track import _extract_track @@ -1058,7 +1080,7 @@ def interp_time( extrapolate: bool = True, fill_value: float = np.nan, ) -> "DataArray": - """Temporal interpolation + """Temporal interpolation. Wrapper of [](`scipy.interpolate.interp1d`) @@ -1076,6 +1098,7 @@ def interp_time( Returns ------- DataArray + """ from scipy.interpolate import interp1d # type: ignore @@ -1133,8 +1156,8 @@ def interp_na(self, axis: str = "time", **kwargs: Any) -> "DataArray": ```{python} da.interp_na() ``` - """ + """ xr_da = self.to_xarray().interpolate_na(dim=axis, **kwargs) self.values = xr_da.values return self @@ -1145,7 +1168,7 @@ def interp_like( interpolant: tuple[Any, Any] | None = None, **kwargs: Any, ) -> "DataArray": - """Interpolate in space (and in time) to other geometry (and time axis) + """Interpolate in space (and in time) to other geometry (and time axis). Note: currently only supports interpolation from dfsu-2d to dfs2 or other dfsu-2d DataArrays @@ -1153,9 +1176,11 @@ def interp_like( Parameters ---------- other: Dataset, DataArray, Grid2D, GeometryFM, pd.DatetimeIndex - interpolant, optional + The target geometry (and time axis) to interpolate to + interpolant: tuple, optional Reuse pre-calculated index and weights - kwargs: additional kwargs are passed to interpolation method + **kwargs: Any + additional kwargs are passed to interpolation method Examples -------- @@ -1167,6 +1192,7 @@ def interp_like( ------- DataArray Interpolated DataArray + """ if not (isinstance(self.geometry, GeometryFM2D) and self.geometry.is_2d): raise NotImplementedError( @@ -1230,11 +1256,12 @@ def interp_like( def concat( dataarrays: Sequence["DataArray"], keep: Literal["last", "first"] = "last" ) -> "DataArray": - """Concatenate DataArrays along the time axis + """Concatenate DataArrays along the time axis. Parameters --------- - dataarrays: sequence of DataArrays + dataarrays: list[DataArray] + DataArrays to concatenate keep: 'first' or 'last', optional default: last @@ -1255,6 +1282,7 @@ def concat( da3 = mikeio.DataArray.concat([da1,da2]) da3 ``` + """ from mikeio import Dataset @@ -1268,12 +1296,14 @@ def concat( # ============= Aggregation methods =========== def max(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Max value along an axis + """Max value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1283,16 +1313,19 @@ def max(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanmax : Max values with NaN values removed + """ return self.aggregate(axis=axis, func=np.max, **kwargs) def min(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Min value along an axis + """Min value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1302,16 +1335,19 @@ def min(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanmin : Min values with NaN values removed + """ return self.aggregate(axis=axis, func=np.min, **kwargs) def mean(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Mean value along an axis + """Mean value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1321,16 +1357,19 @@ def mean(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanmean : Mean values with NaN values removed + """ return self.aggregate(axis=axis, func=np.mean, **kwargs) def std(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Standard deviation values along an axis + """Standard deviation values along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1340,21 +1379,25 @@ def std(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanstd : Standard deviation values with NaN values removed + """ return self.aggregate(axis=axis, func=np.std, **kwargs) def ptp(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Range (max - min) a.k.a Peak to Peak along an axis + """Range (max - min) a.k.a Peak to Peak along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- DataArray array with peak to peak values + """ return self.aggregate(axis=axis, func=np.ptp, **kwargs) @@ -1366,7 +1409,11 @@ def average( Parameters ---------- axis: (int, str, None), optional - axis number or "time" or "space", by default 0 + axis number or "time" or "space", by default + weights: np.ndarray + weights to apply to the values + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1383,6 +1430,7 @@ def average( >>> da = dfs.read(["Current speed"])[0] >>> area = dfs.get_element_area() >>> da2 = da.average(axis="space", weights=area) + """ def func(x, axis, keepdims): # type: ignore @@ -1394,12 +1442,14 @@ def func(x, axis, keepdims): # type: ignore return self.aggregate(axis=axis, func=func, **kwargs) def nanmax(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Max value along an axis (NaN removed) + """Max value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1409,16 +1459,19 @@ def nanmax(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanmax : Max values with NaN values removed + """ return self.aggregate(axis=axis, func=np.nanmax, **kwargs) def nanmin(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Min value along an axis (NaN removed) + """Min value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1428,16 +1481,19 @@ def nanmin(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- nanmin : Min values with NaN values removed + """ return self.aggregate(axis=axis, func=np.nanmin, **kwargs) def nanmean(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Mean value along an axis (NaN removed) + """Mean value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1447,16 +1503,19 @@ def nanmean(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- mean : Mean values + """ return self.aggregate(axis=axis, func=np.nanmean, **kwargs) def nanstd(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": - """Standard deviation value along an axis (NaN removed) + """Standard deviation value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1466,13 +1525,14 @@ def nanstd(self, axis: int | str = 0, **kwargs: Any) -> "DataArray": See Also -------- std : Standard deviation + """ return self.aggregate(axis=axis, func=np.nanstd, **kwargs) def aggregate( self, axis: int | str = 0, func: Callable[..., Any] = np.nanmean, **kwargs: Any ) -> "DataArray": - """Aggregate along an axis + """Aggregate along an axis. Parameters ---------- @@ -1480,6 +1540,8 @@ def aggregate( axis number or "time" or "space", by default 0 func: function, optional default np.nanmean + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1490,8 +1552,8 @@ def aggregate( -------- max : Max values nanmax : Max values with NaN values removed - """ + """ axis = self._parse_axis(self.shape, self.dims, axis) time = self._time_by_agg_axis(self.time, axis) @@ -1550,6 +1612,8 @@ def quantile( which must be between 0 and 1 inclusive. axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1565,6 +1629,7 @@ def quantile( See Also -------- nanquantile : quantile with NaN values ignored + """ return self._quantile(q, axis=axis, func=np.quantile, **kwargs) @@ -1588,6 +1653,8 @@ def nanquantile( which must be between 0 and 1 inclusive. axis: (int, str, None), optional axis number or "time" or "space", by default 0 + **kwargs: Any + Additional keyword arguments Returns ------- @@ -1603,6 +1670,7 @@ def nanquantile( See Also -------- quantile : Quantile with NaN values + """ return self._quantile(q, axis=axis, func=np.nanquantile, **kwargs) @@ -1696,7 +1764,7 @@ def _apply_unary_math_operation(self, func: Callable) -> "DataArray": def _apply_math_operation( self, other: "DataArray" | float, func: Callable, *, txt: str ) -> "DataArray": - """Apply a binary math operation with a scalar, an array or another DataArray""" + """Apply a binary math operation with a scalar, an array or another DataArray.""" try: other_values = other.values if hasattr(other, "values") else other data = func(self.values, other_values) @@ -1781,7 +1849,7 @@ def _boolmask_to_new_DataArray(self, bmask) -> "DataArray": # type: ignore # ============= output methods: to_xxx() =========== def _to_dataset(self) -> "Dataset": - """Create a single-item dataset""" + """Create a single-item dataset.""" from mikeio import Dataset return Dataset( @@ -1789,7 +1857,7 @@ def _to_dataset(self) -> "Dataset": ) # Single-item Dataset (All info is contained in the DataArray, no need for additional info) def to_dfs(self, filename: str | Path, **kwargs: Any) -> None: - """Write data to a new dfs file + """Write data to a new dfs file. Parameters ---------- @@ -1798,13 +1866,16 @@ def to_dfs(self, filename: str | Path, **kwargs: Any) -> None: dtype: str, np.dtype, DfsSimpleType, optional Dfs0 only: set the dfs data type of the written data to e.g. np.float64, by default: DfsSimpleType.Float (=np.float32) + **kwargs: Any + Additional keyword arguments, e.g. dtype for dfs0 + """ self._to_dataset().to_dfs(filename, **kwargs) def to_dataframe( self, *, unit_in_name: bool = False, round_time: str | bool = "ms" ) -> pd.DataFrame: - """Convert to DataFrame + """Convert to DataFrame. Parameters ---------- @@ -1816,25 +1887,24 @@ def to_dataframe( Returns ------- pd.DataFrame - """ + """ return self._to_dataset().to_dataframe( unit_in_name=unit_in_name, round_time=round_time ) def to_pandas(self) -> pd.Series: - """Convert to Pandas Series + """Convert to Pandas Series. Returns ------- pd.Series - """ + """ return pd.Series(data=self.to_numpy(), index=self.time, name=self.name) def to_xarray(self) -> "xarray.DataArray": - """Export to xarray.DataArray""" - + """Export to xarray.DataArray.""" import xarray as xr coords: MutableMapping[str, Any] = {} @@ -1942,7 +2012,7 @@ def _parse_interp_time( def _time_by_agg_axis( time: pd.DatetimeIndex, axis: int | Sequence[int] ) -> pd.DatetimeIndex: - """New DatetimeIndex after aggregating over time axis""" + """New DatetimeIndex after aggregating over time axis.""" if axis == 0 or (isinstance(axis, Sequence) and 0 in axis): time = pd.DatetimeIndex([time[0]]) @@ -1953,8 +2023,7 @@ def _get_time_idx_list( time: pd.DatetimeIndex, steps: int | Iterable[int] | str | datetime | pd.DatetimeIndex | slice, ) -> list[int] | slice: - """Find list of idx in DatetimeIndex""" - + """Find list of idx in DatetimeIndex.""" return _get_time_idx_list(time, steps) @staticmethod @@ -1984,7 +2053,7 @@ def _set_by_boolean_mask( @staticmethod def _parse_time(time: Any) -> pd.DatetimeIndex: - """Allow anything that we can create a DatetimeIndex from""" + """Allow anything that we can create a DatetimeIndex from.""" if time is None: time = [pd.Timestamp(2018, 1, 1)] # TODO is this the correct epoch? if isinstance(time, str) or (not isinstance(time, Iterable)): diff --git a/mikeio/dataset/_dataset.py b/mikeio/dataset/_dataset.py index 45395e0bc..0999ab6c0 100644 --- a/mikeio/dataset/_dataset.py +++ b/mikeio/dataset/_dataset.py @@ -46,7 +46,7 @@ class Dataset: - """Dataset containing one or more DataArrays with common geometry and time + """Dataset containing one or more DataArrays with common geometry and time. Most often obtained by reading a dfs file. But can also be created a sequence or dictonary of DataArrays. The mikeio.Dataset @@ -77,6 +77,7 @@ class Dataset: import mikeio mikeio.read("../data/europe_wind_long_lat.dfs2") ``` + """ def __init__( @@ -112,7 +113,7 @@ def __init__( @staticmethod def _is_DataArrays(data: Any) -> bool: - """Check if input is Sequence/Mapping of DataArrays""" + """Check if input is Sequence/Mapping of DataArrays.""" if isinstance(data, (Dataset, DataArray)): return True if isinstance(data, Mapping): @@ -153,7 +154,7 @@ def _create_dataarrays( def _init_from_DataArrays( self, data: Sequence[DataArray] | Mapping[str, DataArray], validate: bool = True ) -> MutableMapping[str, DataArray]: - """Initialize Dataset object with Iterable of DataArrays""" + """Initialize Dataset object with Iterable of DataArrays.""" data_vars = self._DataArrays_as_mapping(data) if (len(data_vars) > 1) and validate: @@ -228,7 +229,7 @@ def _parse_items( def _DataArrays_as_mapping( data: DataArray | Sequence[DataArray] | Mapping[str, DataArray], ) -> MutableMapping[str, DataArray]: - """Create dict of DataArrays if necessary""" + """Create dict of DataArrays if necessary.""" if isinstance(data, MutableMapping): data_vars = Dataset._validate_item_names_and_keys( data @@ -267,7 +268,7 @@ def _unique_item_names(das: Sequence[DataArray]) -> list[str]: @staticmethod def _check_all_different_ids(das: Sequence[DataArray]) -> None: - """Are all the DataArrays different objects or are some referring to the same""" + """Are all the DataArrays different objects or are some referring to the same.""" ids = np.zeros(len(das), dtype=np.int64) ids_val = np.zeros(len(das), dtype=np.int64) for j, da in enumerate(das): @@ -293,7 +294,7 @@ def _check_all_different_ids(das: Sequence[DataArray]) -> None: @staticmethod def _id_of_DataArrays_equal(da1: DataArray, da2: DataArray) -> None: - """Check if two DataArrays are actually the same object""" + """Check if two DataArrays are actually the same object.""" if id(da1) == id(da2): raise ValueError( f"Cannot add the same object ({da1.name}) twice! Create a copy first." @@ -314,12 +315,12 @@ def _check_already_present(self, new_da: DataArray) -> None: @property def _dt(self) -> float: - """Original time step in seconds""" + """Original time step in seconds.""" return self[0]._dt @property def time(self) -> pd.DatetimeIndex: - """Time axis""" + """Time axis.""" return list(self)[0].time @time.setter @@ -329,13 +330,13 @@ def time(self, new_time: pd.DatetimeIndex) -> None: @property def start_time(self) -> datetime: - """First time instance (as datetime)""" + """First time instance (as datetime).""" # TODO: use pd.Timestamp instead return self.time[0].to_pydatetime() # type: ignore @property def end_time(self) -> datetime: - """Last time instance (as datetime)""" + """Last time instance (as datetime).""" # TODO: use pd.Timestamp instead return self.time[-1].to_pydatetime() # type: ignore @@ -357,32 +358,33 @@ def is_equidistant(self) -> bool: return len(self.time.to_series().diff().dropna().unique()) == 1 def to_numpy(self) -> NDArray[np.floating]: - """Stack data to a single ndarray with shape (n_items, n_timesteps, ...) + """Stack data to a single ndarray with shape (n_items, n_timesteps, ...). Returns ------- np.ndarray + """ return np.stack([x.to_numpy() for x in self]) @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return len(self.time) @property def items(self) -> list[ItemInfo]: - """ItemInfo for each of the DataArrays as a list""" + """ItemInfo for each of the DataArrays as a list.""" return [x.item for x in self] @property def n_items(self) -> int: - """Number of items/DataArrays, equivalent to len()""" + """Number of items/DataArrays, equivalent to len().""" return len(self._data_vars) @property def names(self) -> list[str]: - """Name of each of the DataArrays as a list""" + """Name of each of the DataArrays as a list.""" return [da.name for da in self] def _ipython_key_completions_(self): # type: ignore @@ -390,27 +392,27 @@ def _ipython_key_completions_(self): # type: ignore @property def ndim(self) -> int: - """Number of array dimensions of each DataArray""" + """Number of array dimensions of each DataArray.""" return self[0].ndim @property def dims(self) -> tuple[str, ...]: - """Named array dimensions of each DataArray""" + """Named array dimensions of each DataArray.""" return self[0].dims @property def shape(self) -> Any: - """Shape of each DataArray""" + """Shape of each DataArray.""" return self[0].shape @property def deletevalue(self) -> float: - """File delete value""" + """File delete value.""" return self[0].deletevalue @property def geometry(self) -> Any: - """Geometry of each DataArray""" + """Geometry of each DataArray.""" return self[0].geometry @property @@ -420,14 +422,14 @@ def _zn(self) -> np.ndarray | None: # TODO: remove this @property def n_elements(self) -> int: - """Number of spatial elements/points""" + """Number of spatial elements/points.""" n_elem = int(np.prod(self.shape)) if self.n_timesteps > 1: n_elem = int(n_elem / self.n_timesteps) return n_elem def describe(self, **kwargs: Any) -> pd.DataFrame: - """Generate descriptive statistics by wrapping :py:meth:`pandas.DataFrame.describe`""" + """Generate descriptive statistics by wrapping :py:meth:`pandas.DataFrame.describe`.""" data = {x.name: x.to_numpy().ravel() for x in self} df = pd.DataFrame(data).describe(**kwargs) @@ -435,11 +437,10 @@ def describe(self, **kwargs: Any) -> pd.DataFrame: def copy(self) -> "Dataset": """Returns a copy of this dataset.""" - return deepcopy(self) def dropna(self) -> "Dataset": - """Remove time steps where all items are NaN""" + """Remove time steps where all items are NaN.""" if not self[0]._has_time_axis: # type: ignore raise ValueError("Not available if no time axis!") @@ -458,18 +459,19 @@ def dropna(self) -> "Dataset": return self.isel(all_index, axis=0) def flipud(self) -> "Dataset": - """Flip data upside down (on first non-time axis)""" + """Flip data upside down (on first non-time axis).""" self._data_vars = { key: value.flipud() for (key, value) in self._data_vars.items() } return self def squeeze(self) -> "Dataset": - """Remove axes of length 1 + """Remove axes of length 1. Returns ------- Dataset + """ res = {name: da.squeeze() for name, da in self._data_vars.items()} @@ -478,7 +480,7 @@ def squeeze(self) -> "Dataset": def create_data_array( self, data: NDArray[np.floating], item: ItemInfo | None = None ) -> DataArray: - """Create a new DataArray with the same time and geometry as the dataset + """Create a new DataArray with the same time and geometry as the dataset. Examples -------- @@ -488,6 +490,7 @@ def create_data_array( >>> da = ds.create_data_array(values) >>> da_name = ds.create_data_array(values,"Foo") >>> da_eum = ds.create_data_array(values, item=mikeio.ItemInfo("TS", mikeio.EUMType.Temperature)) + """ return DataArray( data=data, time=self.time, geometry=self.geometry, zn=self._zn, item=item @@ -568,7 +571,7 @@ def __set_or_insert_item(self, key, value: DataArray, insert=False) -> None: # self._set_name_attr(key, value) def insert(self, key: int, value: DataArray) -> None: - """Insert DataArray in a specific position + """Insert DataArray in a specific position. Parameters ---------- @@ -577,11 +580,12 @@ def insert(self, key: int, value: DataArray) -> None: value : DataArray DataArray to be inserted, must comform with with existing DataArrays and must have a unique item name + """ self.__set_or_insert_item(key, value, insert=True) def remove(self, key: int | str) -> None: - """Remove DataArray from Dataset + """Remove DataArray from Dataset. Parameters ---------- @@ -591,11 +595,12 @@ def remove(self, key: int | str) -> None: See also -------- pop + """ self.__delitem__(key) def rename(self, mapper: Mapping[str, str], inplace: bool = False) -> "Dataset": - """Rename items (DataArrays) in Dataset + """Rename items (DataArrays) in Dataset. Parameters ---------- @@ -614,6 +619,7 @@ def rename(self, mapper: Mapping[str, str], inplace: bool = False) -> "Dataset": >>> ds = mikeio.read("tide1.dfs1") >>> newds = ds.rename({"Level":"Surface Elevation"}) >>> ds.rename({"Level":"Surface Elevation"}, inplace=True) + """ if inplace: ds = self @@ -748,7 +754,7 @@ def _multi_indexing_attempted(self, key: Any) -> bool: # TODO change this to return a single type def _key_to_str(self, key: Any) -> Any: - """Translate item selection key to str (or list[str])""" + """Translate item selection key to str (or list[str]).""" if isinstance(key, str): return key if isinstance(key, int): @@ -792,6 +798,7 @@ def isel( Parameters ---------- idx: int, scalar or array_like + Index, or indices, along the specified dimension(s) axis: (int, str, None), optional axis number or "time", by default 0 time : int, optional @@ -805,6 +812,8 @@ def isel( element : int, optional Bounding box of coordinates (left lower and right upper) to be selected, by default None + **kwargs: Any + Not used Returns ------- @@ -821,6 +830,7 @@ def isel( >>> ds = mikeio.read("tests/testdata/HD2D.dfsu") >>> ds2 = ds.isel(time=[0,1,2]) >>> ds3 = ds2.isel(elements=[100,200]) + """ res = [da.isel(idx=idx, axis=axis, **kwargs) for da in self] return Dataset(data=res, validate=False) @@ -866,6 +876,8 @@ def sel( layer(s) to be selected: "top", "bottom" or layer number from bottom 0,1,2,... or from the top -1,-2,... or as list of these; only for layered dfsu, by default None + **kwargs: Any + Not used Returns ------- @@ -887,6 +899,7 @@ def sel( >>> ds.sel(x=340000, y=6160000, z=-3) >>> ds.sel(area=(340000, 6160000, 350000, 6170000)) >>> ds.sel(layers="bottom") + """ res = [da.sel(**kwargs) for da in self] return Dataset(data=res, validate=False) @@ -901,7 +914,7 @@ def interp( n_nearest: int = 3, **kwargs: Any, ) -> "Dataset": - """Interpolate data in time and space + """Interpolate data in time and space. This method currently has limited functionality for spatial interpolation. It will be extended in the future. @@ -924,9 +937,13 @@ def interp( x-coordinate of point to be interpolated to, by default None y : float, optional y-coordinate of point to be interpolated to, by default None + z : float, optional + z-coordinate of point to be interpolated to, by default None n_nearest : int, optional When using IDW interpolation, how many nearest points should be used, by default: 3 + **kwargs: Any + Additional keyword arguments are passed to the interpolant Returns ------- @@ -947,6 +964,7 @@ def interp( >>> ds = mikeio.read("HD2D.dfsu") >>> ds.interp(x=340000, y=6160000) + """ if z is not None: raise NotImplementedError() @@ -979,8 +997,7 @@ def interp( def __dataset_read_item_time_func( self, item: int, step: int ) -> tuple[np.ndarray, float]: - "Used by _extract_track" - + "Used by _extract_track." data = self[item].isel(time=step).to_numpy() time = (self.time[step] - self.time[0]).total_seconds() # type: ignore @@ -992,8 +1009,7 @@ def extract_track( method: Literal["nearest", "inverse_distance"] = "nearest", dtype: Any = np.float32, ) -> "Dataset": - """ - Extract data along a moving track + """Extract data along a moving track. Parameters --------- @@ -1005,12 +1021,15 @@ def extract_track( method: str, optional Spatial interpolation method ('nearest' or 'inverse_distance') default='nearest' + dtype: Any, optional + Data type of the returned data, default=np.float32 Returns ------- Dataset A dataset with data dimension t The first two items will be x- and y- coordinates of track + """ from .._track import _extract_track @@ -1046,7 +1065,7 @@ def interp_time( extrapolate: bool = True, fill_value: float = np.nan, ) -> "Dataset": - """Temporal interpolation + """Temporal interpolation. Wrapper of :py:class:`scipy.interpolate.interp1d` @@ -1092,6 +1111,7 @@ def interp_time( 2: V velocity (meter per sec) 3: Current speed (meter per sec) >>> dsi = ds.interp_time(freq='2H') + """ if freq: dt = pd.to_timedelta(freq).total_seconds() @@ -1120,7 +1140,7 @@ def interp_like( other: "Dataset" | DataArray | Grid2D | GeometryFM2D | pd.DatetimeIndex, **kwargs: Any, ) -> "Dataset": - """Interpolate in space (and in time) to other geometry (and time axis) + """Interpolate in space (and in time) to other geometry (and time axis). Note: currently only supports interpolation from dfsu-2d to dfs2 or other dfsu-2d Datasets @@ -1128,7 +1148,9 @@ def interp_like( Parameters ---------- other: Dataset, DataArray, Grid2D, GeometryFM, pd.DatetimeIndex - kwargs: additional kwargs are passed to interpolation method + Dataset, DataArray, Grid2D or GeometryFM2D to interpolate to + **kwargs: Any + additional kwargs are passed to interpolation method Examples -------- @@ -1143,6 +1165,7 @@ def interp_like( ------- Dataset Interpolated Dataset + """ if not (isinstance(self.geometry, GeometryFM2D) and self.geometry.is_2d): raise NotImplementedError( @@ -1184,11 +1207,12 @@ def interp_like( def concat( datasets: Sequence["Dataset"], keep: Literal["last", "first"] = "last" ) -> "Dataset": - """Concatenate Datasets along the time axis + """Concatenate Datasets along the time axis. Parameters --------- - datasets: sequence of Datasets + datasets: list[Dataset] + list of Datasets to concatenate keep: 'first' or 'last', optional which values to keep in case of overlap, by default 'last' @@ -1208,6 +1232,7 @@ def concat( >>> ds3 = Dataset.concat([ds1,ds2]) >>> ds3.n_timesteps 4 + """ ds = datasets[0].copy() for dsj in datasets[1:]: @@ -1217,16 +1242,18 @@ def concat( @staticmethod def merge(datasets: Sequence["Dataset"]) -> "Dataset": - """Merge Datasets along the item dimension + """Merge Datasets along the item dimension. Parameters --------- - datasets: sequence of Datasets + datasets: list[Dataset] + list of Datasets to merge Returns ------- Dataset merged dataset + """ ds = datasets[0].copy() for other in datasets[1:]: @@ -1324,7 +1351,7 @@ def _check_all_items_match(self, other: "Dataset") -> None: def aggregate( self, axis: int | str = 0, func: Callable = np.nanmean, **kwargs: Any ) -> "Dataset": - """Aggregate along an axis + """Aggregate along an axis. Parameters ---------- @@ -1332,11 +1359,14 @@ def aggregate( axis number or "time", "space" or "items", by default 0 func: function, optional default np.nanmean + **kwargs: Any + additional arguments passed to the function Returns ------- Dataset dataset with aggregated values + """ if axis == "items": if self.n_items <= 1: @@ -1390,6 +1420,8 @@ def quantile( which must be between 0 and 1 inclusive. axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1405,6 +1437,7 @@ def quantile( See Also -------- nanquantile : quantile with NaN values ignored + """ return self._quantile(q, axis=axis, func=np.quantile, **kwargs) @@ -1422,6 +1455,8 @@ def nanquantile( which must be between 0 and 1 inclusive. axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Examples -------- @@ -1433,6 +1468,7 @@ def nanquantile( ------- Dataset dataset with quantile values + """ return self._quantile(q, axis=axis, func=np.nanquantile, **kwargs) @@ -1476,12 +1512,14 @@ def _quantile(self, q, *, axis=0, func=np.quantile, **kwargs) -> "Dataset": # t return Dataset(data=res, validate=False) def max(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Max value along an axis + """Max value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1491,16 +1529,19 @@ def max(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": See Also -------- nanmax : Max values with NaN values removed + """ return self.aggregate(axis=axis, func=np.max, **kwargs) def min(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Min value along an axis + """Min value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1510,16 +1551,19 @@ def min(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": See Also -------- nanmin : Min values with NaN values removed + """ return self.aggregate(axis=axis, func=np.min, **kwargs) def mean(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Mean value along an axis + """Mean value along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1530,16 +1574,19 @@ def mean(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": -------- nanmean : Mean values with NaN values removed average : Weighted average + """ return self.aggregate(axis=axis, func=np.mean, **kwargs) def std(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Standard deviation along an axis + """Standard deviation along an axis. Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1549,12 +1596,13 @@ def std(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": See Also -------- nanstd : Standard deviation with NaN values removed + """ return self.aggregate(axis=axis, func=np.std, **kwargs) def ptp(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": """Range (max - min) a.k.a Peak to Peak along an axis - Parameters + Parameters. ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 @@ -1563,6 +1611,7 @@ def ptp(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": ------- Dataset dataset with peak to peak values + """ return self.aggregate(axis=axis, func=np.ptp, **kwargs) @@ -1577,6 +1626,8 @@ def average(self, *, weights, axis=0, **kwargs) -> "Dataset": # type: ignore weights to average over axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1594,6 +1645,7 @@ def average(self, *, weights, axis=0, **kwargs) -> "Dataset": # type: ignore >>> ds = dfs.read(["Current speed"]) >>> area = dfs.get_element_area() >>> ds2 = ds.average(axis="space", weights=area) + """ def func(x, axis, keepdims): # type: ignore @@ -1605,12 +1657,14 @@ def func(x, axis, keepdims): # type: ignore return self.aggregate(axis=axis, func=func, **kwargs) def nanmax(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Max value along an axis (NaN removed) + """Max value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function See Also -------- @@ -1620,46 +1674,55 @@ def nanmax(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": ------- Dataset dataset with max values + """ return self.aggregate(axis=axis, func=np.nanmax, **kwargs) def nanmin(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Min value along an axis (NaN removed) + """Min value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- Dataset dataset with min values + """ return self.aggregate(axis=axis, func=np.nanmin, **kwargs) def nanmean(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Mean value along an axis (NaN removed) + """Mean value along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- Dataset dataset with mean values + """ return self.aggregate(axis=axis, func=np.nanmean, **kwargs) def nanstd(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": - """Standard deviation along an axis (NaN removed) + """Standard deviation along an axis (NaN removed). Parameters ---------- axis: (int, str, None), optional axis number or "time", "space" or "items", by default 0 + **kwargs: Any + additional arguments passed to the function Returns ------- @@ -1669,6 +1732,7 @@ def nanstd(self, axis: int | str = 0, **kwargs: Any) -> "Dataset": See Also -------- std : Standard deviation + """ return self.aggregate(axis=axis, func=np.nanstd, **kwargs) @@ -1771,8 +1835,7 @@ def _multiply_value(self, value: float) -> "Dataset": # =============================================== def to_pandas(self, **kwargs: Any) -> pd.Series | pd.DataFrame: - """Convert Dataset to a Pandas DataFrame""" - + """Convert Dataset to a Pandas DataFrame.""" if self.n_items != 1: return self.to_dataframe(**kwargs) else: @@ -1781,7 +1844,7 @@ def to_pandas(self, **kwargs: Any) -> pd.Series | pd.DataFrame: def to_dataframe( self, *, unit_in_name: bool = False, round_time: str | bool = "ms" ) -> pd.DataFrame: - """Convert Dataset to a Pandas DataFrame + """Convert Dataset to a Pandas DataFrame. Parameters ---------- @@ -1793,6 +1856,7 @@ def to_dataframe( Returns ------- pd.DataFrame + """ if self.ndim > 1: raise ValueError( @@ -1817,17 +1881,16 @@ def to_dataframe( return df def to_dfs(self, filename: str | Path, **kwargs: Any) -> None: - """Write dataset to a new dfs file + """Write dataset to a new dfs file. Parameters ---------- filename: str full path to the new dfs file - dtype: str, np.dtype, DfsSimpleType, optional - Dfs0 only: set the dfs data type of the written data - to e.g. np.float64, by default: DfsSimpleType.Float (=np.float32) - """ + **kwargs: Any + additional arguments passed to the writing function, e.g. dtype for dfs0 + """ filename = str(filename) if isinstance( @@ -1896,7 +1959,7 @@ def _to_dfsu(self, filename: str | Path) -> None: write_dfsu(filename, self) def to_xarray(self) -> "xarray.Dataset": - """Export to xarray.Dataset""" + """Export to xarray.Dataset.""" import xarray data = {da.name: da.to_xarray() for da in self} @@ -1925,7 +1988,7 @@ def from_pandas( df: pd.DataFrame, items: Mapping[str, ItemInfo] | Sequence[ItemInfo] | ItemInfo | None = None, ) -> "Dataset": - """Create a Dataset from a pandas DataFrame + """Create a Dataset from a pandas DataFrame. Parameters ---------- @@ -1958,7 +2021,6 @@ def from_pandas( ``` """ - if not isinstance(df.index, pd.DatetimeIndex): # look for datetime column for col in df.columns: @@ -1989,7 +2051,7 @@ def from_polars( items: Mapping[str, ItemInfo] | Sequence[ItemInfo] | ItemInfo | None = None, datetime_col: str | None = None, ) -> "Dataset": - """Create a Dataset from a polars DataFrame + """Create a Dataset from a polars DataFrame. Parameters ---------- @@ -2029,8 +2091,8 @@ def from_polars( ) ds ``` - """ + """ import polars as pl if datetime_col is None: diff --git a/mikeio/dfs/_dfs.py b/mikeio/dfs/_dfs.py index e59c8642a..174627f44 100644 --- a/mikeio/dfs/_dfs.py +++ b/mikeio/dfs/_dfs.py @@ -179,16 +179,21 @@ def _valid_timesteps( def _item_numbers_by_name( - dfsItemInfo: DfsDynamicItemInfo, item_names: list[str], ignore_first: bool = False + dfsItemInfo: list[DfsDynamicItemInfo], + item_names: list[str], + ignore_first: bool = False, ) -> list[int]: - """Utility function to find item numbers + """Utility function to find item numbers. Parameters ---------- - dfsItemInfo : MIKE dfs ItemInfo object - + dfsItemInfo : list[DfsDynamicItemInfo] + item info from dfs file item_names : list[str] Names of items to be found + ignore_first : bool, optional + Ignore first item, by default False + Returns ------- @@ -199,6 +204,7 @@ def _item_numbers_by_name( ------ KeyError In case item is not found in the dfs file + """ first_idx = 1 if ignore_first else 0 names = [x.Name for x in dfsItemInfo[first_idx:]] @@ -217,11 +223,12 @@ def _get_item_info( item_numbers: list[int] | None = None, ignore_first: bool = False, ) -> ItemInfoList: - """Read DFS ItemInfo for specific item numbers + """Read DFS ItemInfo for specific item numbers. Parameters ---------- dfsItemInfo : list[DfsDynamicItemInfo] + Item info from dfs file item_numbers : list[int], optional Item numbers to read, by default all items are read ignore_first : bool, optional @@ -230,6 +237,7 @@ def _get_item_info( Returns ------- ItemInfoList + """ first_idx = 1 if ignore_first else 0 if item_numbers is None: @@ -349,8 +357,7 @@ def read( keepdims: bool = False, dtype: Any = np.float32, ) -> Dataset: - """ - Read data from a dfs file + """Read data from a dfs file. Parameters --------- @@ -361,12 +368,14 @@ def read( keepdims: bool, optional When reading a single time step only, should the time-dimension be kept in the returned Dataset? by default: False + dtype: data-type, optional + Define the dtype of the returned dataset (default = np.float32) Returns ------- Dataset - """ + """ self._open() item_numbers = _valid_item_numbers(self._dfs.ItemInfo, items) @@ -430,16 +439,17 @@ def _open(self) -> None: raise NotImplementedError("Should be implemented by subclass") def _get_item_info(self, item_numbers: Sequence[int]) -> list[ItemInfo]: - """Read DFS ItemInfo + """Read DFS ItemInfo. Parameters ---------- - dfs : MIKE dfs object item_numbers : list[int] + Item numbers to read Returns ------- list[Iteminfo] + """ items = [] for item in item_numbers: @@ -459,17 +469,17 @@ def geometry(self) -> Any: @property def deletevalue(self) -> float: - "File delete value" + "File delete value." return self._deletevalue @property def n_items(self) -> int: - "Number of items" + "Number of items." return len(self.items) @property def items(self) -> list[ItemInfo]: - "List of items" + "List of items." return self._items @property @@ -478,12 +488,12 @@ def time(self) -> pd.DatetimeIndex | None: @property def start_time(self) -> pd.Timestamp: - """File start time""" + """File start time.""" return self._start_time @property def end_time(self) -> pd.Timestamp: - """File end time""" + """File end time.""" if self._end_time is None: self._end_time = self.read(items=[0]).time[-1].to_pydatetime() @@ -491,12 +501,12 @@ def end_time(self) -> pd.Timestamp: @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return self._n_timesteps @property def timestep(self) -> Any: - """Time step size in seconds""" + """Time step size in seconds.""" # this will fail if the TimeAxisType is not calendar and equidistant, but that is ok return self._dfs.FileInfo.TimeAxis.TimeStepInSeconds() @@ -506,22 +516,22 @@ def projection_string(self) -> str: @property def longitude(self) -> float: - """Origin longitude""" + """Origin longitude.""" return self._longitude @property def latitude(self) -> float: - """Origin latitude""" + """Origin latitude.""" return self._latitude @property def origin(self) -> Any: - """Origin (in own CRS)""" + """Origin (in own CRS).""" return self.geometry.origin @property def orientation(self) -> Any: - """Orientation (in own CRS)""" + """Orientation (in own CRS).""" return self.geometry.orientation @property @@ -532,7 +542,7 @@ def is_geo(self) -> bool: @property @abstractmethod def shape(self) -> tuple[int, ...]: - """Shape of the data array""" + """Shape of the data array.""" pass def _validate_no_orientation_in_geo(self) -> None: @@ -540,7 +550,7 @@ def _validate_no_orientation_in_geo(self) -> None: raise ValueError("Orientation is not supported for LONG/LAT coordinates") def _origin_and_orientation_in_CRS(self) -> tuple[Any, float]: - """Project origin and orientation to projected CRS (if not LONG/LAT)""" + """Project origin and orientation to projected CRS (if not LONG/LAT).""" if self.is_geo: origin = self._longitude, self._latitude orientation = 0.0 diff --git a/mikeio/dfs/_dfs0.py b/mikeio/dfs/_dfs0.py index 007fbc9c0..70f12b685 100644 --- a/mikeio/dfs/_dfs0.py +++ b/mikeio/dfs/_dfs0.py @@ -83,12 +83,13 @@ def _write_dfs0( class Dfs0: def __init__(self, filename: str | Path): - """Create a Dfs0 object for reading, writing + """Create a Dfs0 object for reading, writing. Parameters ---------- filename: str or Path File name including full path to the dfs0 file. + """ self._filename = str(filename) @@ -137,8 +138,7 @@ def read( time: int | str | slice | None = None, **kwargs: Any, ) -> Dataset: - """ - Read data from a dfs0 file. + """Read data from a dfs0 file. Parameters ---------- @@ -146,11 +146,14 @@ def read( Read only selected items, by number (0-based), or by name time: int, str, datetime, pd.TimeStamp, sequence, slice or pd.DatetimeIndex, optional Read only selected time steps, by default None (=all) + **kwargs: Any + Additional keyword arguments are ignored Returns ------- Dataset A Dataset with data dimensions [t] + """ path = Path(self._filename) if not path.exists(): @@ -202,9 +205,7 @@ def read( return ds def _read(self, filename: str) -> tuple[list[np.ndarray], pd.DatetimeIndex]: - """ - Read all data from a dfs0 file. - """ + """Read all data from a dfs0 file.""" self._dfs = DfsFileFactory.DfsGenericOpen(filename) raw_data = self._dfs.ReadDfs0DataDouble() # Bulk read the data @@ -240,8 +241,7 @@ def _to_dfs_datatype(dtype: Any = None) -> DfsSimpleType: def to_dataframe( self, unit_in_name: bool = False, round_time: str = "ms" ) -> pd.DataFrame: - """ - Read data from the dfs0 file and return a Pandas DataFrame. + """Read data from the dfs0 file and return a Pandas DataFrame. Parameters ---------- @@ -252,6 +252,7 @@ def to_dataframe( Returns ------- pd.DataFrame + """ data, time = self._read(self._filename) items = self.items @@ -277,8 +278,7 @@ def from_dataframe( unit: EUMUnit | None = None, items: Sequence[ItemInfo] | None = None, ) -> None: - """ - Create a dfs0 from a pandas Dataframe + """Create a dfs0 from a pandas Dataframe. Parameters ---------- @@ -293,22 +293,23 @@ def from_dataframe( Same unit for all items items: list[ItemInfo] Different types, units for each items + """ return dataframe_to_dfs0(df, filename, itemtype, unit, items) @property def n_items(self) -> int: - """Number of items""" + """Number of items.""" return self._n_items @property def items(self) -> list[ItemInfo]: - """List of items""" + """List of items.""" return self._items @property def start_time(self) -> datetime: - """File start time""" + """File start time.""" return self._start_time @cached_property @@ -324,12 +325,12 @@ def end_time(self) -> datetime: @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return self._n_timesteps @property def timestep(self) -> float: - """Time step size in seconds""" + """Time step size in seconds.""" if self._timeaxistype == TimeAxisType.CalendarEquidistant: return self._source.FileInfo.TimeAxis.TimeStep # type: ignore else: @@ -337,7 +338,7 @@ def timestep(self) -> float: @property def time(self) -> pd.DatetimeIndex: - """File all datetimes""" + """File all datetimes.""" if self._timeaxistype == TimeAxisType.CalendarEquidistant: freq = pd.Timedelta(seconds=self.timestep) return pd.date_range( @@ -374,11 +375,12 @@ def dataframe_to_dfs0( title: str = "", dtype: Any | None = None, ) -> None: - """ - Create a dfs0 + """Create a dfs0. Parameters ---------- + self: pd.DataFrame + Dataframe with data filename: str filename to write output itemtype: EUMType, optional @@ -391,6 +393,7 @@ def dataframe_to_dfs0( Title of dfs0 file dtype : np.dtype, optional default np.float32 + """ if not isinstance(self.index, pd.DatetimeIndex): raise ValueError( diff --git a/mikeio/dfs/_dfs1.py b/mikeio/dfs/_dfs1.py index ecfac48ef..5e705b3c7 100644 --- a/mikeio/dfs/_dfs1.py +++ b/mikeio/dfs/_dfs1.py @@ -96,15 +96,15 @@ def geometry(self) -> Grid1D: @property def x0(self) -> float: - """Start point of x values (often 0)""" + """Start point of x values (often 0).""" return self._x0 @property def dx(self) -> float: - """Step size in x direction""" + """Step size in x direction.""" return self._dx @property def nx(self) -> int: - """Number of node values""" + """Number of node values.""" return self._nx diff --git a/mikeio/dfs/_dfs2.py b/mikeio/dfs/_dfs2.py index 8c0174d52..6e3572d4f 100644 --- a/mikeio/dfs/_dfs2.py +++ b/mikeio/dfs/_dfs2.py @@ -152,8 +152,7 @@ def read( keepdims: bool = False, dtype: Any = np.float32, ) -> Dataset: - """ - Read data from a dfs2 file + """Read data from a dfs2 file. Parameters --------- @@ -172,8 +171,8 @@ def read( Returns ------- Dataset - """ + """ self._open() item_numbers = _valid_item_numbers(self._dfs.ItemInfo, items) @@ -243,8 +242,7 @@ def read( ) def append(self, ds: Dataset, validate: bool = True) -> None: - """ - Append a Dataset to an existing dfs2 file + """Append a Dataset to an existing dfs2 file. Parameters ---------- @@ -257,6 +255,7 @@ def append(self, ds: Dataset, validate: bool = True) -> None: Notes ----- The original file is modified. + """ if validate: if self.geometry != ds.geometry: @@ -279,41 +278,41 @@ def _open(self) -> None: @property def geometry(self) -> Grid2D: - """Spatial information""" + """Spatial information.""" assert isinstance(self._geometry, Grid2D) return self._geometry @property def x0(self) -> Any: - """Start point of x values (often 0)""" + """Start point of x values (often 0).""" return self.geometry.x[0] @property def y0(self) -> Any: - """Start point of y values (often 0)""" + """Start point of y values (often 0).""" return self.geometry.y[0] @property def dx(self) -> float: - """Step size in x direction""" + """Step size in x direction.""" return self.geometry.dx @property def dy(self) -> float: - """Step size in y direction""" + """Step size in y direction.""" return self.geometry.dy @property def shape(self) -> tuple[int, ...]: - """Tuple with number of values in the t-, y-, x-direction""" + """Tuple with number of values in the t-, y-, x-direction.""" return (self._n_timesteps, self.geometry.ny, self.geometry.nx) @property def nx(self) -> int: - """Number of values in the x-direction""" + """Number of values in the x-direction.""" return self.geometry.nx @property def ny(self) -> int: - """Number of values in the y-direction""" + """Number of values in the y-direction.""" return self.geometry.ny diff --git a/mikeio/dfs/_dfs3.py b/mikeio/dfs/_dfs3.py index 90055a282..d0827e6c9 100644 --- a/mikeio/dfs/_dfs3.py +++ b/mikeio/dfs/_dfs3.py @@ -155,8 +155,7 @@ def read( keepdims: bool = False, dtype: Any = np.float32, ) -> Dataset: - """ - Read data from a dfs3 file + """Read data from a dfs3 file. Parameters --------- @@ -164,6 +163,8 @@ def read( Read only selected items, by number (0-based), or by name time: int, str, datetime, pd.TimeStamp, sequence, slice or pd.DatetimeIndex, optional Read only selected time steps, by default None (=all) + area: tuple[float, float, float, float], optional + Read only data within the specified rectangular area (x0, x1, y0, y1) keepdims: bool, optional When reading a single time step or a single layer only, should the singleton dimension be kept @@ -176,8 +177,8 @@ def read( Returns ------- Dataset - """ + """ if area is not None: raise NotImplementedError("area subsetting is not yet implemented for Dfs3") # NOTE: @@ -267,8 +268,7 @@ def read( ) def append(self, ds: Dataset, validate: bool = True) -> None: - """ - Append a Dataset to an existing dfs3 file + """Append a Dataset to an existing dfs3 file. Parameters ---------- @@ -280,6 +280,7 @@ def append(self, ds: Dataset, validate: bool = True) -> None: Notes ----- The original file is modified. + """ if validate: if self.geometry != ds.geometry: @@ -313,17 +314,17 @@ def geometry(self) -> Grid3D: @property def dx(self) -> float: - """Step size in x direction""" + """Step size in x direction.""" return self._dx @property def dy(self) -> float: - """Step size in y direction""" + """Step size in y direction.""" return self._dy @property def dz(self) -> float: - """Step size in y direction""" + """Step size in y direction.""" return self._dz @property diff --git a/mikeio/dfsu/_dfsu.py b/mikeio/dfsu/_dfsu.py index 292399fa7..768df5726 100644 --- a/mikeio/dfsu/_dfsu.py +++ b/mikeio/dfsu/_dfsu.py @@ -35,7 +35,7 @@ def write_dfsu(filename: str | Path, data: Dataset) -> None: - """Write a dfsu file + """Write a dfsu file. Parameters ---------- @@ -43,6 +43,7 @@ def write_dfsu(filename: str | Path, data: Dataset) -> None: dfsu filename data: Dataset Dataset to be written + """ filename = str(filename) @@ -121,7 +122,7 @@ def write_dfsu_data(dfs: DfsuFile, ds: Dataset, is_layered: bool) -> None: def _validate_elements_and_geometry_sel(elements: Any, **kwargs: Any) -> None: - """Check that only one of elements, area, x, y is selected""" + """Check that only one of elements, area, x, y is selected.""" used_kwargs = [key for key, val in kwargs.items() if val is not None] if elements is not None and len(used_kwargs) > 0: @@ -316,37 +317,37 @@ def geometry(self) -> Any: @property def deletevalue(self) -> float: - """File delete value""" + """File delete value.""" return self._deletevalue @property def n_items(self) -> int: - """Number of items""" + """Number of items.""" return len(self.items) @property def items(self) -> list[ItemInfo]: - """List of items""" + """List of items.""" return self._items @property def start_time(self) -> datetime: - """File start time""" + """File start time.""" return self._start_time @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return self._n_timesteps @property def timestep(self) -> float: - """Time step size in seconds""" + """Time step size in seconds.""" return self._timestep @property def end_time(self) -> pd.Timestamp: - """File end time""" + """File end time.""" if self._equidistant: return self.time[-1] else: @@ -402,8 +403,7 @@ def read( error_bad_data: bool = True, fill_bad_data_value: float = np.nan, ) -> Dataset: - """ - Read data from a dfsu file + """Read data from a dfsu file. Parameters --------- @@ -428,13 +428,15 @@ def read( fill_bad_data_value: fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan + dtype: Any, optional + Data type to read, by default np.float32 Returns ------- Dataset A Dataset with data dimensions [t,elements] - """ + """ if dtype not in [np.float32, np.float64]: raise ValueError("Invalid data type. Choose np.float32 or np.float64") dfs = DfsuFile.Open(self._filename) @@ -528,15 +530,15 @@ def read( ) def append(self, ds: Dataset, validate: bool = True) -> None: - """ - Append data to an existing dfsu file + """Append data to an existing dfsu file. Parameters ---------- - data: Dataset + ds: Dataset Dataset to be appended validate: bool, optional Validate that the items and geometry match, by default True + """ if validate: if ds.geometry != self.geometry: @@ -559,7 +561,7 @@ def _parse_geometry_sel( x: float | None, y: float | None, ) -> np.ndarray | None: - """Parse geometry selection + """Parse geometry selection. Parameters ---------- @@ -583,6 +585,7 @@ def _parse_geometry_sel( ------ ValueError If no elements are found in selection + """ elements = None @@ -607,7 +610,7 @@ def get_overset_grid( ny: int | None = None, buffer: float = 0.0, ) -> Grid2D: - """get a 2d grid that covers the domain by specifying spacing or shape + """get a 2d grid that covers the domain by specifying spacing or shape. Parameters ---------- @@ -630,6 +633,7 @@ def get_overset_grid( ------- 2d grid + """ nc = self.geometry.geometry2d.node_coordinates bbox = xy_to_bbox(nc, buffer=buffer) @@ -657,8 +661,7 @@ def extract_track( method: Literal["nearest", "inverse_distance"] = "nearest", dtype: Any = np.float32, ) -> Dataset: - """ - Extract track data from a dfsu file + """Extract track data from a dfsu file. Parameters --------- @@ -672,6 +675,8 @@ def extract_track( method: str, optional Spatial interpolation method ('nearest' or 'inverse_distance') default='nearest' + dtype: Any, optional + Data type to read, by default np.float32 Returns ------- @@ -693,6 +698,7 @@ def extract_track( 1: Latitude (undefined) 2: Surface elevation (meter) 3: Wind speed (meter per sec) + """ dfs = DfsuFile.Open(self._filename) diff --git a/mikeio/dfsu/_layered.py b/mikeio/dfsu/_layered.py index face729a9..b5821f136 100644 --- a/mikeio/dfsu/_layered.py +++ b/mikeio/dfsu/_layered.py @@ -84,37 +84,37 @@ def __repr__(self) -> str: @property def deletevalue(self) -> float: - """File delete value""" + """File delete value.""" return self._deletevalue @property def n_items(self) -> int: - """Number of items""" + """Number of items.""" return len(self.items) @property def items(self) -> list[ItemInfo]: - """List of items""" + """List of items.""" return self._items @property def start_time(self) -> pd.Timestamp: - """File start time""" + """File start time.""" return self._start_time @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return self._n_timesteps @property def timestep(self) -> float: - """Time step size in seconds""" + """Time step size in seconds.""" return self._timestep @property def end_time(self) -> pd.Timestamp: - """File end time""" + """File end time.""" if self._equidistant: return self.time[-1] else: @@ -184,17 +184,17 @@ def _read_geometry(filename: str) -> GeometryFM3D | GeometryFMVerticalProfile: @property def n_layers(self) -> int: - """Maximum number of layers""" + """Maximum number of layers.""" return self.geometry._n_layers @property def n_sigma_layers(self) -> int: - """Number of sigma layers""" + """Number of sigma layers.""" return self.geometry.n_sigma_layers @property def n_z_layers(self) -> int: - """Maximum number of z-layers""" + """Maximum number of z-layers.""" return self.n_layers - self.n_sigma_layers def read( @@ -213,8 +213,7 @@ def read( error_bad_data: bool = True, fill_bad_data_value: float = np.nan, ) -> Dataset: - """ - Read data from a dfsu file + """Read data from a dfsu file. Parameters --------- @@ -244,11 +243,14 @@ def read( fill_bad_data_value: fill value for to impute corrupt data, used in conjunction with error_bad_data=False default np.nan + dtype: numpy.dtype, optional + Data type to read, by default np.float32 Returns ------- Dataset A Dataset with data dimensions [t,elements] + """ if dtype not in [np.float32, np.float64]: raise ValueError("Invalid data type. Choose np.float32 or np.float64") @@ -381,8 +383,7 @@ def read( ) def append(self, ds: Dataset, validate: bool = True) -> None: - """ - Append data to a dfsu file + """Append data to a dfsu file. Parameters --------- @@ -390,6 +391,7 @@ def append(self, ds: Dataset, validate: bool = True) -> None: Dataset to append validate: bool, optional Validate that the dataset to append has the same geometry and items, by default True + """ if validate: if self.geometry != ds.geometry: @@ -411,14 +413,12 @@ class Dfsu2DV(DfsuLayered): def plot_vertical_profile( self, values: np.ndarray | DataArray, - time_step: int | None = None, cmin: float | None = None, cmax: float | None = None, label: str = "", **kwargs: Any, ) -> Axes: - """ - Plot unstructured vertical profile + """Plot unstructured vertical profile. Parameters ---------- @@ -438,17 +438,16 @@ def plot_vertical_profile( specify size of figure ax: matplotlib.axes, optional Adding to existing axis, instead of creating new fig + **kwargs: Any + Additional keyword arguments Returns ------- + """ if isinstance(values, DataArray): values = values.to_numpy() - if time_step is not None: - raise NotImplementedError( - "Deprecated functionality. Instead, read as DataArray da, then use da.plot()" - ) g = self.geometry return _plot_vertical_profile( @@ -467,18 +466,18 @@ def plot_vertical_profile( class Dfsu3D(DfsuLayered): @property def geometry2d(self) -> GeometryFM2D: - """The 2d geometry for a 3d object""" + """The 2d geometry for a 3d object.""" return self.geometry.geometry2d def extract_surface_elevation_from_3d(self, n_nearest: int = 4) -> DataArray: - """ - Extract surface elevation from a 3d dfsu file (based on zn) + """Extract surface elevation from a 3d dfsu file (based on zn) to a new 2d dfsu file with a surface elevation item. Parameters --------- n_nearest: int, optional number of points for spatial interpolation (inverse_distance), default=4 + """ # validate input assert ( diff --git a/mikeio/dfsu/_mesh.py b/mikeio/dfsu/_mesh.py index 276658958..809c02fb8 100644 --- a/mikeio/dfsu/_mesh.py +++ b/mikeio/dfsu/_mesh.py @@ -20,8 +20,7 @@ class Mesh: - """ - The Mesh class is initialized with a mesh file. + """The Mesh class is initialized with a mesh file. Parameters --------- @@ -39,6 +38,7 @@ class Mesh: import mikeio mikeio.Mesh("../data/odense_rough.mesh") ``` + """ def __init__(self, filename: str | Path) -> None: @@ -75,37 +75,37 @@ def __repr__(self) -> str: # TODO re-consider if all of these properties are needed, since they all are available in the geometry @property def n_elements(self) -> int: - """Number of elements""" + """Number of elements.""" return self.geometry.n_elements @property def element_coordinates(self) -> np.ndarray: - """Coordinates of element centroids""" + """Coordinates of element centroids.""" return self.geometry.element_coordinates @property def node_coordinates(self) -> np.ndarray: - """Coordinates of nodes""" + """Coordinates of nodes.""" return self.geometry.node_coordinates @property def n_nodes(self) -> int: - """Number of nodes""" + """Number of nodes.""" return self.geometry.n_nodes @property def codes(self) -> np.ndarray: - """Codes of nodes""" + """Codes of nodes.""" return self.geometry.codes @property def element_table(self) -> np.ndarray: - """Element table""" + """Element table.""" return self.geometry.element_table @property def zn(self) -> np.ndarray: - """Static bathymetry values (depth) at nodes""" + """Static bathymetry values (depth) at nodes.""" return self.geometry.node_coordinates[:, 2] @zn.setter @@ -118,14 +118,14 @@ def write( self, outfilename: str | Path, ) -> None: - """write mesh to file + """write mesh to file. Parameters ---------- outfilename : str path to file - """ + """ geometry = self.geometry assert isinstance(geometry, GeometryFM2D) # i.e. not a GeometryPoint2d @@ -133,7 +133,7 @@ def write( self.geometry.to_mesh(outfilename=outfilename) def to_shapely(self) -> MultiPolygon: - """Convert Mesh geometry to shapely MultiPolygon + """Convert Mesh geometry to shapely MultiPolygon. Returns ------- @@ -147,5 +147,6 @@ def to_shapely(self) -> MultiPolygon: msh = mikeio.open("../data/odense_rough.mesh") msh.to_shapely() ``` + """ return self.geometry.to_shapely() diff --git a/mikeio/dfsu/_spectral.py b/mikeio/dfsu/_spectral.py index 6026f5210..e34d80b91 100644 --- a/mikeio/dfsu/_spectral.py +++ b/mikeio/dfsu/_spectral.py @@ -72,42 +72,42 @@ def __repr__(self) -> str: def geometry( self, ) -> GeometryFMPointSpectrum | GeometryFMLineSpectrum | GeometryFMAreaSpectrum: - """Geometry""" + """Geometry.""" return self._geometry @property def deletevalue(self) -> float: - """File delete value""" + """File delete value.""" return self._deletevalue @property def n_items(self) -> int: - """Number of items""" + """Number of items.""" return len(self.items) @property def items(self) -> list[ItemInfo]: - """List of items""" + """List of items.""" return self._items @property def start_time(self) -> pd.Timestamp: - """File start time""" + """File start time.""" return self._start_time @property def n_timesteps(self) -> int: - """Number of time steps""" + """Number of time steps.""" return self._n_timesteps @property def timestep(self) -> float: - """Time step size in seconds""" + """Time step size in seconds.""" return self._timestep @property def end_time(self) -> pd.Timestamp: - """File end time""" + """File end time.""" if self._equidistant: return self.time[-1] else: @@ -184,7 +184,7 @@ def _read_geometry( @staticmethod def _get_direction_unit(filename: str) -> int: - """Determine if the directional axis is in degrees or radians""" + """Determine if the directional axis is in degrees or radians.""" source = DfsFileFactory.DfsGenericOpen(filename) try: for static_item in iter(source.ReadStaticItemNext, None): @@ -197,22 +197,22 @@ def _get_direction_unit(filename: str) -> int: @property def n_frequencies(self) -> int | None: - """Number of frequencies""" + """Number of frequencies.""" return 0 if self.frequencies is None else len(self.frequencies) @property def frequencies(self) -> np.ndarray | None: - """Frequency axis""" + """Frequency axis.""" return self.geometry._frequencies @property def n_directions(self) -> int | None: - """Number of directions""" + """Number of directions.""" return 0 if self.directions is None else len(self.directions) @property def directions(self) -> np.ndarray | None: - """Directional axis""" + """Directional axis.""" return self.geometry._directions def _get_spectral_data_shape( @@ -266,8 +266,7 @@ def read( keepdims: bool = False, dtype: Any = np.float32, ) -> Dataset: - """ - Read data from a spectral dfsu file + """Read data from a spectral dfsu file. Parameters --------- @@ -289,6 +288,8 @@ def read( Read only selected element ids (spectral area files only) nodes: list[int], optional Read only selected node ids (spectral line files only) + dtype: numpy.dtype, optional + Data type to read. Default is np.float32 Returns ------- @@ -312,6 +313,7 @@ def read( geometry: DfsuSpectral2D (40 elements, 33 nodes) items: 0: Energy density (meter pow 2 sec per deg) + """ if dtype not in [np.float32, np.float64]: raise ValueError("Invalid data type. Choose np.float32 or np.float64") @@ -391,7 +393,7 @@ def _parse_geometry_sel( x: float | None, y: float | None, ) -> np.ndarray | None: - """Parse geometry selection + """Parse geometry selection. Parameters ---------- @@ -415,6 +417,7 @@ def _parse_geometry_sel( ------ ValueError If no elements are found in selection + """ elements = None @@ -481,7 +484,7 @@ def _parse_elements_nodes( def calc_Hm0_from_spectrum( self, spectrum: np.ndarray | DataArray, tail: bool = True ) -> np.ndarray: - """Calculate significant wave height (Hm0) from spectrum + """Calculate significant wave height (Hm0) from spectrum. Parameters ---------- @@ -494,6 +497,7 @@ def calc_Hm0_from_spectrum( ------- np.ndarray significant wave height values + """ if isinstance(spectrum, DataArray): m0 = calc_m0_from_spectrum( diff --git a/mikeio/eum/_eum.py b/mikeio/eum/_eum.py index d40bc6816..3a16f9dfe 100644 --- a/mikeio/eum/_eum.py +++ b/mikeio/eum/_eum.py @@ -29,7 +29,7 @@ def _type_list(search: str | None = None) -> dict[eumItem, str]: - """Get a dictionary of the EUM items + """Get a dictionary of the EUM items. Notes ----- @@ -44,6 +44,7 @@ def _type_list(search: str | None = None) -> dict[eumItem, str]: ------- dict names and codes for EUM items + """ items = {} check = True @@ -70,17 +71,18 @@ def _type_list(search: str | None = None) -> dict[eumItem, str]: def _unit_list(eum_type: int) -> dict[str, eumUnit]: - """Get a dictionary of valid units + """Get a dictionary of valid units. Parameters ---------- - type_enum: int + eum_type: int EUM variable type, e.g. 100006 or EUMType.Temperature Returns ------- dict names and codes for valid units + """ items = {} n_units_for_eum_type = eumWrapper.eumGetItemUnitCount(eum_type) @@ -111,7 +113,7 @@ class TimeStepUnit(IntEnum): class EUMType(IntEnum): - """EUM type + """EUM type. Examples -------- @@ -123,6 +125,7 @@ class EUMType(IntEnum): ```{python} mikeio.EUMType.Temperature.units ``` + """ Water_Level = 100000 @@ -719,7 +722,7 @@ def __init__(self, code: int) -> None: @property def display_name(self) -> str: - """Display friendly name""" + """Display friendly name.""" name = self.name name = name.replace("_", " ") return name @@ -729,7 +732,7 @@ def __repr__(self) -> str: @property def units(self) -> list[EUMUnit]: - """List valid units for this EUM type""" + """List valid units for this EUM type.""" temp = _unit_list(self.code).values() return [EUMUnit(value) for value in temp] @@ -740,7 +743,7 @@ def search(pattern: str) -> list[EUMType]: class EUMUnit(IntEnum): - """EUM unit + """EUM unit. Examples -------- @@ -748,6 +751,7 @@ class EUMUnit(IntEnum): import mikeio mikeio.EUMUnit.degree_Kelvin ``` + """ meter = 1000 @@ -1360,7 +1364,7 @@ def __init__(self, code: int) -> None: @property def display_name(self) -> str: - """Display friendly name""" + """Display friendly name.""" name = self.name name = name.replace("_", " ") return name @@ -1395,7 +1399,7 @@ def __repr__(self) -> str: class ItemInfo: - """ItemInfo + """ItemInfo. Parameters ---------- @@ -1418,6 +1422,7 @@ class ItemInfo: ```{python} mikeio.ItemInfo(mikeio.EUMType.Wind_speed) ``` + """ def __init__( @@ -1493,7 +1498,7 @@ def __repr__(self) -> str: @staticmethod def from_mikecore_dynamic_item_info(dfsItemInfo: DfsDynamicItemInfo) -> "ItemInfo": - """Create ItemInfo from a mikecore.DfsDynamicItemInfo object""" + """Create ItemInfo from a mikecore.DfsDynamicItemInfo object.""" name = dfsItemInfo.Name item = dfsItemInfo.Quantity.Item unit = dfsItemInfo.Quantity.Unit diff --git a/mikeio/exceptions.py b/mikeio/exceptions.py index 4c52689f0..e38208e5f 100644 --- a/mikeio/exceptions.py +++ b/mikeio/exceptions.py @@ -1,8 +1,12 @@ +"""Custom exceptions for mikeio.""" + from __future__ import annotations from typing import Any class DataDimensionMismatch(ValueError): + """Raised when data matrices in the x dimension do not all match in the data list.""" + def __init__(self) -> None: self.message = ( "Data matrices in the x dimension do not all match in the data list." @@ -12,6 +16,8 @@ def __init__(self) -> None: class ItemsError(ValueError): + """Raised when items are not integers or strings.""" + def __init__(self, n_items_file: int) -> None: self.n_items_file = n_items_file super().__init__( @@ -20,6 +26,8 @@ def __init__(self, n_items_file: int) -> None: class InvalidGeometry(ValueError): + """Raised when an invalid geometry is used.""" + def __init__( self, message: str = "Invalid operation for this type of geometry" ) -> None: @@ -27,6 +35,8 @@ def __init__( class InvalidDataValueType(ValueError): + """Raised when an invalid data value type is used.""" + def __init__(self) -> None: super().__init__( "Invalid data type. Choose 'Instantaneous', 'Accumulated', 'StepAccumulated', " @@ -35,6 +45,8 @@ def __init__(self) -> None: class OutsideModelDomainError(ValueError): + """Raised when point(s) are outside the model domain.""" + def __init__( self, *, diff --git a/mikeio/generic.py b/mikeio/generic.py index 7cf0e29aa..555467435 100644 --- a/mikeio/generic.py +++ b/mikeio/generic.py @@ -1,3 +1,5 @@ +"""Generic functions for working with all types of dfs files.""" + from __future__ import annotations import math import os @@ -38,7 +40,7 @@ class _ChunkInfo: - """Class for keeping track of an chunked processing + """Class for keeping track of an chunked processing. Parameters ---------- @@ -64,6 +66,7 @@ class _ChunkInfo: Return the end index for a chunk from_dfs(dfs, item_numbers, buffer_size) Calculate chunk info based on # of elements in dfs file and selected buffer size + """ def __init__(self, n_data: int, n_chunks: int): @@ -75,15 +78,15 @@ def __repr__(self) -> str: @property def chunk_size(self) -> int: - """number of data points per chunk""" + """number of data points per chunk.""" return math.ceil(self.n_data / self.n_chunks) def stop(self, start: int) -> int: - """Return the stop index for a chunk""" + """Return the stop index for a chunk.""" return min(start + self.chunk_size, self.n_data) def chunk_end(self, start: int) -> int: - """Return the end index for a chunk""" + """Return the end index for a chunk.""" e2 = self.stop(start) return self.chunk_size - ((start + self.chunk_size) - e2) @@ -91,8 +94,7 @@ def chunk_end(self, start: int) -> int: def from_dfs( dfs: DfsFile, item_numbers: list[int], buffer_size: float ) -> "_ChunkInfo": - """Calculate chunk info based on # of elements in dfs file and selected buffer size""" - + """Calculate chunk info based on # of elements in dfs file and selected buffer size.""" n_time_steps = dfs.FileInfo.TimeAxis.NumberOfTimeSteps n_data_all: int = np.sum([dfs.ItemInfo[i].ElementCount for i in item_numbers]) mem_need = 8 * n_time_steps * n_data_all # n_items * @@ -109,7 +111,7 @@ def _clone( timestep: float | None = None, items: Sequence[int | str | DfsDynamicItemInfo] | None = None, ) -> DfsFile: - """Clone a dfs file + """Clone a dfs file. Parameters ---------- @@ -130,6 +132,7 @@ def _clone( ------- DfsFile MIKE generic dfs file object + """ source = DfsFileFactory.DfsGenericOpen(str(infilename)) fi = source.FileInfo @@ -212,7 +215,7 @@ def scale( factor: float = 1.0, items: Sequence[int | str] | None = None, ) -> None: - """Apply scaling to any dfs file + """Apply scaling to any dfs file. Parameters ---------- @@ -227,6 +230,7 @@ def scale( value to multiply to all items, default 1.0 items: list[str] or list[int], optional Process only selected items, by number (0-based) or name, by default: all + """ infilename = str(infilename) outfilename = str(outfilename) @@ -263,8 +267,7 @@ def fill_corrupt( fill_value: float = np.nan, items: Sequence[str | int] | None = None, ) -> None: - """ - Replace corrupt (unreadable) data with fill_value, default delete value. + """Replace corrupt (unreadable) data with fill_value, default delete value. Parameters ---------- @@ -277,6 +280,7 @@ def fill_corrupt( value to use where data is corrupt, default delete value items: list[str] or list[int], optional Process only selected items, by number (0-based) or name, by default: all + """ dfs_i = DfsFileFactory.DfsGenericOpen(infilename) @@ -323,7 +327,7 @@ def sum( infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Sum two dfs files (a+b) + """Sum two dfs files (a+b). Parameters ---------- @@ -333,6 +337,7 @@ def sum( full path to the second input file outfilename: str | pathlib.Path full path to the output file + """ infilename_a = str(infilename_a) infilename_b = str(infilename_b) @@ -376,7 +381,7 @@ def diff( infilename_b: str | pathlib.Path, outfilename: str | pathlib.Path, ) -> None: - """Calculate difference between two dfs files (a-b) + """Calculate difference between two dfs files (a-b). Parameters ---------- @@ -386,6 +391,7 @@ def diff( full path to the second input file outfilename: str | pathlib.Path full path to the output file + """ infilename_a = str(infilename_a) infilename_b = str(infilename_b) @@ -431,7 +437,7 @@ def concat( outfilename: str | pathlib.Path, keep: str = "last", ) -> None: - """Concatenates files along the time axis + """Concatenates files along the time axis. Overlap handling is defined by the `keep` argument, by default the last one will be used. @@ -449,6 +455,7 @@ def concat( ------ The list of input files have to be sorted, i.e. in chronological order + """ # fast path for Dfs0 suffix = pathlib.Path(infilenames[0]).suffix @@ -556,7 +563,7 @@ def extract( step: int = 1, items: Sequence[int | str] | None = None, ) -> None: - """Extract timesteps and/or items to a new dfs file + """Extract timesteps and/or items to a new dfs file. Parameters ---------- @@ -584,6 +591,7 @@ def extract( >>> extract('f_in.dfsu', 'f_out.dfsu', items=[2, 0]) >>> extract('f_in.dfsu', 'f_out.dfsu', items="Salinity") >>> extract('f_in.dfsu', 'f_out.dfsu', end='2018-2-1 00:00', items="Salinity") + """ dfs_i = DfsFileFactory.DfsGenericOpenEdit(str(infilename)) @@ -644,7 +652,7 @@ def _parse_start_end( start: int | float | str | datetime, end: int | float | str | datetime, ) -> tuple[datetime | None, int, float, int, float]: # TODO better return type - """Helper function for parsing start and end arguments""" + """Helper function for parsing start and end arguments.""" n_time_steps = time_axis.NumberOfTimeSteps file_start_datetime = time_axis.StartDateTime file_start_sec = time_axis.StartTimeOffset @@ -723,7 +731,7 @@ def _parse_start_end( def _parse_step(time_axis: TimeAxis, step: int) -> float | None: - """Helper function for parsing step argument""" + """Helper function for parsing step argument.""" if step == 1: timestep = None elif time_axis.TimeAxisType == 3: @@ -740,7 +748,7 @@ def avg_time( outfilename: str | pathlib.Path, skipna: bool = True, ) -> None: - """Create a temporally averaged dfs file + """Create a temporally averaged dfs file. Parameters ---------- @@ -750,8 +758,8 @@ def avg_time( output filename skipna : bool, optional exclude NaN/delete values when computing the result, default True - """ + """ dfs_i = DfsFileFactory.DfsGenericOpen(str(infilename)) dfs_o = _clone(infilename, outfilename) @@ -809,7 +817,7 @@ def quantile( skipna: bool = True, buffer_size: float = 1.0e9, ) -> None: - """Create temporal quantiles of all items in dfs file + """Create temporal quantiles of all items in dfs file. Parameters ---------- @@ -836,6 +844,7 @@ def quantile( >>> quantile("huge.dfsu", "Q01.dfsu", q=0.1, buffer_size=5.0e9) >>> quantile("with_nans.dfsu", "Q05.dfsu", q=0.5, skipna=False) + """ func = np.nanquantile if skipna else np.quantile @@ -918,7 +927,7 @@ def quantile( def _read_item(dfs: DfsFile, item: int, timestep: int) -> np.ndarray: - """Read item data from dfs file + """Read item data from dfs file. Parameters ---------- @@ -933,6 +942,7 @@ def _read_item(dfs: DfsFile, item: int, timestep: int) -> np.ndarray: ------- np.ndarray item data + """ indatatime = dfs.ReadItemTimeStep(item + 1, timestepIndex=timestep) indata = indatatime.Data @@ -945,7 +955,7 @@ def _read_item(dfs: DfsFile, item: int, timestep: int) -> np.ndarray: def _get_repeated_items( items_in: list[DfsDynamicItemInfo], prefixes: list[str] ) -> list[ItemInfo]: - """Create new items by repeating the items in items_in with the prefixes + """Create new items by repeating the items in items_in with the prefixes. Parameters ---------- @@ -958,6 +968,7 @@ def _get_repeated_items( ------- list[ItemInfo] List of new items + """ item_numbers = _valid_item_numbers(items_in) items_in = _get_item_info(items_in) diff --git a/mikeio/pfs/__init__.py b/mikeio/pfs/__init__.py index 98f75939b..0faae18cf 100644 --- a/mikeio/pfs/__init__.py +++ b/mikeio/pfs/__init__.py @@ -10,7 +10,7 @@ def read_pfs( encoding: str = "cp1252", unique_keywords: bool = False, ) -> PfsDocument: - """Read a pfs file to a Pfs object for further analysis/manipulation + """Read a pfs file to a Pfs object for further analysis/manipulation. Parameters ---------- @@ -29,6 +29,7 @@ def read_pfs( ------- PfsDocument A PfsDocument object + """ return PfsDocument(filename, encoding=encoding, unique_keywords=unique_keywords) diff --git a/mikeio/pfs/_pfsdocument.py b/mikeio/pfs/_pfsdocument.py index 75159ed5a..0a5df1a6d 100644 --- a/mikeio/pfs/_pfsdocument.py +++ b/mikeio/pfs/_pfsdocument.py @@ -68,7 +68,7 @@ def map_constructor_duplicate_sections( class PfsDocument(PfsSection): - """Create a PfsDocument object for reading, writing and manipulating pfs files + """Create a PfsDocument object for reading, writing and manipulating pfs files. Parameters ---------- @@ -83,6 +83,7 @@ class PfsDocument(PfsSection): If True: warnings will be issued if non-unique keywords are present and the first occurence will be used by default False + """ def __init__( @@ -109,7 +110,7 @@ def __init__( @staticmethod def from_text(text: str) -> PfsDocument: - """Create a PfsDocument from a string""" + """Create a PfsDocument from a string.""" from io import StringIO f = StringIO(text) @@ -129,7 +130,7 @@ def _to_nonunique_key_dict(keys: Any, vals: Any) -> dict[Any, Any]: return data def keys(self) -> list[str]: # type: ignore - """Return a list of the PfsDocument's keys (target names)""" + """Return a list of the PfsDocument's keys (target names).""" return [k for k, _ in self.items()] def values(self) -> list[PfsSection | PfsNonUniqueList]: # type: ignore @@ -137,11 +138,11 @@ def values(self) -> list[PfsSection | PfsNonUniqueList]: # type: ignore return [v for _, v in self.items()] def items(self) -> list[tuple[str, PfsSection | PfsNonUniqueList]]: # type: ignore - """Return a new view of the PfsDocument's items ((key, value) pairs)""" + """Return a new view of the PfsDocument's items ((key, value) pairs).""" return [(k, v) for k, v in self.__dict__.items() if k not in self._ALIAS_LIST] def to_dict(self) -> dict: - """Convert to (nested) dict (as a copy)""" + """Convert to (nested) dict (as a copy).""" d = super().to_dict() _ = d.pop("_ALIAS_LIST") return d @@ -162,13 +163,13 @@ def _unravel_items(items: Callable) -> tuple[list, list]: @property def targets(self) -> list[PfsSection]: - """List of targets (root sections)""" + """List of targets (root sections).""" _, rvals = self._unravel_items(self.items) return rvals @property def n_targets(self) -> int: - """Number of targets (root sections)""" + """Number of targets (root sections).""" return len(self.targets) @property @@ -178,13 +179,12 @@ def is_unique(self) -> bool: @property def names(self) -> list[str]: - """Names of the targets (root sections) as a list""" + """Names of the targets (root sections) as a list.""" rkeys, _ = self._unravel_items(self.items) return rkeys def copy(self) -> PfsDocument: - """Return a deep copy of the PfsDocument""" - + """Return a deep copy of the PfsDocument.""" text = repr(self) return PfsDocument.from_text(text) @@ -218,7 +218,7 @@ def _parse_non_file_input( ), names: Sequence[str] | None = None, ) -> tuple[Sequence[str], list[PfsSection]]: - """dict/PfsSection or lists of these can be parsed""" + """dict/PfsSection or lists of these can be parsed.""" if names is None: assert isinstance(input, Mapping), "input must be a mapping" names, sections = PfsDocument._unravel_items(input.items) @@ -257,7 +257,7 @@ def _is_FM_engine(self) -> bool: return "FemEngine" in self.names[0] def _add_all_FM_aliases(self) -> None: - """create MIKE FM module aliases""" + """create MIKE FM module aliases.""" self._add_FM_alias("HD", "HYDRODYNAMIC_MODULE") self._add_FM_alias("SW", "SPECTRAL_WAVE_MODULE") self._add_FM_alias("TR", "TRANSPORT_MODULE") @@ -384,7 +384,7 @@ def _parse_token(self, token: str, context: str = "") -> str: return s def write(self, filename: str) -> None: - """Write object to a pfs file + """Write object to a pfs file. Parameters ---------- @@ -394,6 +394,7 @@ def write(self, filename: str) -> None: Notes ----- To return the content as a string, use repr() + """ from mikeio import __version__ as mikeio_version diff --git a/mikeio/pfs/_pfssection.py b/mikeio/pfs/_pfssection.py index ce717d0b8..0a7b1fc0a 100644 --- a/mikeio/pfs/_pfssection.py +++ b/mikeio/pfs/_pfssection.py @@ -15,7 +15,7 @@ def _merge_dict(a: dict[str, Any], b: Mapping[str, Any]) -> dict[str, Any]: - """merges dict b into dict a; handling non-unique keys""" + """merges dict b into dict a; handling non-unique keys.""" for key in b: if key in a: if isinstance(a[key], dict) and isinstance(b[key], dict): @@ -35,7 +35,7 @@ class PfsNonUniqueList(list): class PfsSection(SimpleNamespace, MutableMapping[str, Any]): @staticmethod def from_dataframe(df: pd.DataFrame, prefix: str) -> "PfsSection": - """Create a PfsSection from a DataFrame + """Create a PfsSection from a DataFrame. Parameters ---------- @@ -56,6 +56,7 @@ def from_dataframe(df: pd.DataFrame, prefix: str) -> "PfsSection": ```{python} mikeio.PfsSection.from_dataframe(df,"STATION_") ``` + """ d = {f"{prefix}{idx}": row.to_dict() for idx, row in df.iterrows()} @@ -117,7 +118,7 @@ def _parse_value(self, v: Any) -> Any: @staticmethod def _str_is_scientific_float(s: str) -> bool: - """True: -1.0e2, 1E-4, -0.1E+0.5; False: E12, E-4""" + """True: -1.0e2, 1E-4, -0.1E+0.5; False: E12, E-4.""" if len(s) < 3: return False if ( @@ -157,7 +158,7 @@ def clear(self) -> None: return self.__dict__.clear() def keys(self) -> KeysView[str]: - """Return a new view of the PfsSection's keys""" + """Return a new view of the PfsSection's keys.""" return self.__dict__.keys() def values(self) -> ValuesView[Any]: @@ -165,12 +166,12 @@ def values(self) -> ValuesView[Any]: return self.__dict__.values() def items(self) -> ItemsView[str, Any]: - """Return a new view of the PfsSection's items ((key, value) pairs)""" + """Return a new view of the PfsSection's items ((key, value) pairs).""" return self.__dict__.items() # TODO: better name def update_recursive(self, key: Any, value: Any) -> None: - """Update recursively all matches of key with value""" + """Update recursively all matches of key with value.""" for k, v in self.items(): if isinstance(v, PfsSection): self[k].update_recursive(key, value) @@ -187,7 +188,7 @@ def search( case: bool = False, ) -> PfsSection: """Find recursively all keys, sections or parameters - matching a pattern + matching a pattern. NOTE: logical OR between multiple conditions @@ -208,6 +209,7 @@ def search( ------- PfsSection Search result as a nested PfsSection + """ results = [] if text is not None: @@ -242,8 +244,7 @@ def _find_patterns_generator( keylist: list[str] | None = None, case: bool = False, ) -> Any: - """Look for patterns in either keys, params or sections""" - + """Look for patterns in either keys, params or sections.""" keylist = [] if keylist is None else keylist for k, v in self.items(): kk = str(k) if case else str(k).lower() @@ -263,7 +264,7 @@ def _find_patterns_generator( @staticmethod def _yield_deep_dict(keys: Sequence[str], val: Any) -> Any: - """yield a deep nested dict with keys with a single deep value val""" + """yield a deep nested dict with keys with a single deep value val.""" for j in range(len(keys) - 1, -1, -1): d = {keys[j]: val} val = d @@ -282,7 +283,7 @@ def _param_match(parampat: Any, v: Any, case: bool) -> Any: return parampat == v def find_replace(self, old_value: Any, new_value: Any) -> None: - """Update recursively all old_value with new_value""" + """Update recursively all old_value with new_value.""" for k, v in self.items(): if isinstance(v, PfsSection): self[k].find_replace(old_value, new_value) @@ -301,7 +302,7 @@ def _to_txt_lines(self) -> list[str]: def _write_with_func( self, func: Callable[[str], Any], level: int = 0, newline: str = "\n" ) -> None: - """Write pfs nested objects + """Write pfs nested objects. Parameters ---------- @@ -311,6 +312,7 @@ def _write_with_func( Level of indentation (add 3 spaces for each), by default 0 newline : str, optional newline string, by default "\n" + """ lvl_prefix = " " for k, v in self.items(): @@ -350,7 +352,7 @@ def _write_with_func( def _prepare_value_for_write( self, v: str | bool | datetime | list[str | bool | datetime] ) -> str: - """catch peculiarities of string formatted pfs data + """catch peculiarities of string formatted pfs data. Parameters ---------- @@ -360,6 +362,7 @@ def _prepare_value_for_write( Returns ------- modified value + """ # some crude checks and corrections if isinstance(v, str): @@ -390,7 +393,7 @@ def _prepare_value_for_write( return v def to_dict(self) -> dict[str, Any]: - """Convert to (nested) dict (as a copy)""" + """Convert to (nested) dict (as a copy).""" d = self.__dict__.copy() for key, value in d.items(): if isinstance(value, PfsSection): @@ -398,7 +401,7 @@ def to_dict(self) -> dict[str, Any]: return d def to_dataframe(self, prefix: str | None = None) -> pd.DataFrame: - """Output enumerated subsections to a DataFrame + """Output enumerated subsections to a DataFrame. Parameters ---------- @@ -418,6 +421,7 @@ def to_dataframe(self, prefix: str | None = None) -> pd.DataFrame: pfs = mikeio.read_pfs("../data/pfs/lake.sw") pfs.SW.OUTPUTS.to_dataframe(prefix="OUTPUT_") ``` + """ if prefix is not None: sections = [ @@ -448,7 +452,7 @@ def to_dataframe(self, prefix: str | None = None) -> pd.DataFrame: @classmethod def _merge_PfsSections(cls, sections: Sequence[dict[str, Any]]) -> "PfsSection": - """Merge a list of PfsSections/dict""" + """Merge a list of PfsSections/dict.""" assert len(sections) > 0 a = sections[0] for b in sections[1:]: diff --git a/mikeio/spatial/_FM_geometry_layered.py b/mikeio/spatial/_FM_geometry_layered.py index f20ab7144..1cca0d344 100644 --- a/mikeio/spatial/_FM_geometry_layered.py +++ b/mikeio/spatial/_FM_geometry_layered.py @@ -175,7 +175,7 @@ def elements_to_geometry( @cached_property def element_coordinates(self) -> np.ndarray: - """Center coordinates of each element""" + """Center coordinates of each element.""" return self._calc_element_coordinates(maxnodes=8) def _get_nodes_and_table_for_elements( @@ -183,7 +183,7 @@ def _get_nodes_and_table_for_elements( elements: Sequence[int] | np.ndarray, node_layers: Layer = "all", ) -> tuple[Any, Any]: - """list of nodes and element table for a list of elements + """list of nodes and element table for a list of elements. Parameters ---------- @@ -199,6 +199,7 @@ def _get_nodes_and_table_for_elements( array of node ids (unique) list(list(int)) element table with a list of nodes for each element + """ elem_tbl = np.empty(len(elements), dtype=np.dtype("O")) if (node_layers == "all") or self.is_2d: @@ -221,12 +222,13 @@ def _get_nodes_and_table_for_elements( return nodes, elem_tbl def to_2d_geometry(self) -> GeometryFM2D: - """extract 2d geometry from 3d geometry + """extract 2d geometry from 3d geometry. Returns ------- UnstructuredGeometry 2d geometry (bottom nodes) + """ # extract information for selected elements elem_ids = self.bottom_elements @@ -267,7 +269,7 @@ def to_2d_geometry(self) -> GeometryFM2D: @cached_property def n_elements(self) -> int: - """Number of 3d elements""" + """Number of 3d elements.""" return len(self.element_table) @property @@ -288,7 +290,7 @@ def is_layered(self) -> bool: @cached_property def layer_ids(self) -> np.ndarray: - """The layer number (0=bottom, 1, 2, ...) for each 3d element""" + """The layer number (0=bottom, 1, 2, ...) for each 3d element.""" if self._layer_ids is None: res = self._get_2d_to_3d_association() self._e2_e3_table = res[0] @@ -298,22 +300,22 @@ def layer_ids(self) -> np.ndarray: @property def n_layers(self) -> int: - """Maximum number of layers""" + """Maximum number of layers.""" return self._n_layers @property def n_sigma_layers(self) -> int: - """Number of sigma layers""" + """Number of sigma layers.""" return self._n_sigma @property def n_z_layers(self) -> int: - """Maximum number of z-layers""" + """Maximum number of z-layers.""" return self.n_layers - self.n_sigma_layers @cached_property def top_elements(self) -> np.ndarray: - """List of 3d element ids of surface layer""" + """List of 3d element ids of surface layer.""" # note: if subset of elements is selected then this cannot be done! # fast path if no z-layers @@ -330,7 +332,7 @@ def top_elements(self) -> np.ndarray: def _elements_in_area( self, area: Sequence[tuple[float, float]] | Sequence[float] ) -> np.ndarray: - """Find element ids of elements inside area""" + """Find element ids of elements inside area.""" idx2d = self.geometry2d._elements_in_area(area) if len(idx2d) > 0: return np.hstack(self.e2_e3_table[idx2d]) @@ -339,8 +341,7 @@ def _elements_in_area( @staticmethod def _find_top_layer_elements(elementTable: np.ndarray) -> np.ndarray: - """ - Find element indices (zero based) of the elements being the upper-most element + """Find element indices (zero based) of the elements being the upper-most element in its column. Each column is identified by matching node id numbers. For 3D elements the last half of the node numbers of the bottom element must match the first half @@ -351,9 +352,8 @@ def _find_top_layer_elements(elementTable: np.ndarray) -> np.ndarray: is stored in res: For the first column it is res[0]+1. For the i'th column, it is res[i]-res[i-1]. - :returns: A list of element indices of top layer elements + :returns: A list of element indices of top layer elements. """ - topLayerElments = [] # Find top layer elements by matching the number numers of the last half of elmt i @@ -396,8 +396,7 @@ def _find_top_layer_elements(elementTable: np.ndarray) -> np.ndarray: @cached_property def n_layers_per_column(self) -> np.ndarray: - """List of number of layers for each column""" - + """List of number of layers for each column.""" top_elems = self.top_elements n = len(top_elems) tmp = top_elems.copy() @@ -408,11 +407,11 @@ def n_layers_per_column(self) -> np.ndarray: @cached_property def bottom_elements(self) -> np.ndarray: - """List of 3d element ids of bottom layer""" + """List of 3d element ids of bottom layer.""" return self.top_elements - self.n_layers_per_column + 1 def get_layer_elements(self, layers: int | Layer | Sequence[int]) -> np.ndarray: - """3d element ids for one (or more) specific layer(s) + """3d element ids for one (or more) specific layer(s). Parameters ---------- @@ -424,6 +423,7 @@ def get_layer_elements(self, layers: int | Layer | Sequence[int]) -> np.ndarray: ------- np.array(int) element ids + """ if isinstance(layers, str): if layers == "top": @@ -459,8 +459,7 @@ def get_layer_elements(self, layers: int | Layer | Sequence[int]) -> np.ndarray: @property def e2_e3_table(self) -> np.ndarray: - """The 2d-to-3d element connectivity table for a 3d object""" - + """The 2d-to-3d element connectivity table for a 3d object.""" # e2_e3, 2d_ids and layer_ids are all set at the same time if self._e2_e3_table is None: @@ -472,7 +471,7 @@ def e2_e3_table(self) -> np.ndarray: @property def elem2d_ids(self) -> np.ndarray: - """The associated 2d element id for each 3d element""" + """The associated 2d element id for each 3d element.""" if self._2d_ids is None: res = self._get_2d_to_3d_association() self._e2_e3_table = res[0] @@ -523,8 +522,7 @@ def _z_idx_in_column(self, e3_col: np.ndarray, z: np.ndarray) -> np.ndarray: def _find_elem3d_from_elem2d( self, elem2d: int | np.ndarray, z: np.ndarray | float ) -> np.ndarray: - """Find 3d element ids from 2d element ids and z-values""" - + """Find 3d element ids from 2d element ids and z-values.""" # TODO: coordinate with _find_3d_from_2d_points() elem2d = [elem2d] if np.isscalar(elem2d) else elem2d @@ -592,11 +590,11 @@ def _find_elem3d_from_elem2d( @cached_property def _dz(self) -> np.ndarray: - """Height of each 3d element (using static zn information)""" + """Height of each 3d element (using static zn information).""" return self._calc_dz() def _calc_dz(self) -> np.ndarray: - """Height of 3d elements using static or dynamic zn information""" + """Height of 3d elements using static or dynamic zn information.""" element_table = self.element_table n_elements = len(element_table) @@ -753,6 +751,7 @@ def get_nearest_relative_distance(self, coords: tuple[float, float]) -> float: ------- float relative distance in meters from start of transect + """ xe = self.element_coordinates[:, 0] ye = self.element_coordinates[:, 1] @@ -809,7 +808,7 @@ def _find_nearest_element_2d(self, coords: np.ndarray) -> np.ndarray: class GeometryFMVerticalColumn(GeometryFM3D): - "A 3d geometry with consisting of a single vertical column" + "A 3d geometry with consisting of a single vertical column." # TODO: add plotter @@ -837,7 +836,7 @@ def _calc_zee(self, zn: np.ndarray | None = None) -> np.ndarray: def _interp_values( self, zn: np.ndarray, data: np.ndarray, z: np.ndarray ) -> np.ndarray: - """Interpolate to other z values, allow linear extrapolation""" + """Interpolate to other z values, allow linear extrapolation.""" from scipy.interpolate import interp1d # type: ignore opt = {"kind": "linear", "bounds_error": False, "fill_value": "extrapolate"} diff --git a/mikeio/spatial/_FM_geometry_spectral.py b/mikeio/spatial/_FM_geometry_spectral.py index 7dd4066ce..b3d774ba4 100644 --- a/mikeio/spatial/_FM_geometry_spectral.py +++ b/mikeio/spatial/_FM_geometry_spectral.py @@ -54,22 +54,22 @@ def ndim(self) -> int: @property def n_frequencies(self) -> int: - """Number of frequencies""" + """Number of frequencies.""" return 0 if self.frequencies is None else len(self.frequencies) @property def frequencies(self) -> np.ndarray | None: - """Frequency axis""" + """Frequency axis.""" return self._frequencies @property def n_directions(self) -> int: - """Number of directions""" + """Number of directions.""" return 0 if self.directions is None else len(self.directions) @property def directions(self) -> np.ndarray | None: - """Directional axis""" + """Directional axis.""" return self._directions @@ -105,22 +105,22 @@ def __init__( @property def n_frequencies(self) -> int: - """Number of frequencies""" + """Number of frequencies.""" return 0 if self.frequencies is None else len(self.frequencies) @property def frequencies(self) -> np.ndarray | None: - """Frequency axis""" + """Frequency axis.""" return self._frequencies @property def n_directions(self) -> int: - """Number of directions""" + """Number of directions.""" return 0 if self.directions is None else len(self.directions) @property def directions(self) -> np.ndarray | None: - """Directional axis""" + """Directional axis.""" return self._directions @@ -135,7 +135,7 @@ def elements_to_geometry( # type: ignore self, elements: Sequence[int], keepdims: bool = False ) -> "GeometryFMPointSpectrum" | "GeometryFMAreaSpectrum": """export a selection of elements to new flexible file geometry - Parameters + Parameters. ---------- elements : list(int) list of element ids @@ -145,6 +145,7 @@ def elements_to_geometry( # type: ignore ------- GeometryFMAreaSpectrum or GeometryFMPointSpectrum which can be used for further extraction or saved to file + """ elements = np.atleast_1d(elements) # type: ignore if len(elements) == 1: @@ -187,7 +188,7 @@ def _nodes_to_geometry( # type: ignore ) -> GeometryFMPointSpectrum | GeometryFMLineSpectrum: """export a selection of nodes to new flexible file geometry Note: takes only the elements for which all nodes are selected - Parameters + Parameters. ---------- nodes : list(int) list of node ids @@ -195,6 +196,7 @@ def _nodes_to_geometry( # type: ignore ------- GeometryFMPointSpectrum | GeometryFMLineSpectrum which can be used for further extraction or saved to file + """ nodes = np.atleast_1d(nodes) if len(nodes) == 1: diff --git a/mikeio/spatial/_FM_utils.py b/mikeio/spatial/_FM_utils.py index 4d76c416e..4eefb8c72 100644 --- a/mikeio/spatial/_FM_utils.py +++ b/mikeio/spatial/_FM_utils.py @@ -44,16 +44,20 @@ def _plot_map( ax: Axes | None = None, add_colorbar: bool = True, ) -> Axes: - """ - Plot unstructured data and/or mesh, mesh outline + """Plot unstructured data and/or mesh, mesh outline. Parameters ---------- - node_coordinates, - element_table, - element_coordinates, + node_coordinates: np.array + node coordinates + element_table: np.array + element table + element_coordinates: np.array + element coordinates boundary_polylines: BoundaryPolylines, - projection, + boundary polylines + projection: str, optional + projection type, default: "" z: np.array or a Dataset with a single item, optional value for each element to plot, default bathymetry plot_type: str, optional @@ -100,8 +104,8 @@ def _plot_map( >>> ds.n_items 1 >>> dfs.plot(z=ds) # plot surface elevation - """ + """ import matplotlib.pyplot as plt import matplotlib @@ -236,7 +240,7 @@ def __set_colormap_levels( levels: int | Sequence[float] | np.ndarray | None, z: np.ndarray, ) -> tuple[float, float, Colormap, Normalize, ScalarMappable, np.ndarray]: - """Set colormap, levels, vmin, vmax, and cmap_norm + """Set colormap, levels, vmin, vmax, and cmap_norm. Parameters ---------- @@ -265,8 +269,8 @@ def __set_colormap_levels( colormap object levels : list of float list of levels - """ + """ import matplotlib import matplotlib.cm as cm import matplotlib.colors as mplc @@ -303,7 +307,7 @@ def __set_colormap_levels( def __set_plot_limits(ax: Axes, nc: np.ndarray) -> None: - """Set default plot limits + """Set default plot limits. Override with matplotlib ax.set_xlim, ax.set_ylim """ @@ -316,7 +320,7 @@ def __set_plot_limits(ax: Axes, nc: np.ndarray) -> None: def __plot_mesh_only(ax: Axes, nc: np.ndarray, element_table: np.ndarray) -> None: - """plot mesh only (no data) + """plot mesh only (no data). Parameters ---------- @@ -331,6 +335,7 @@ def __plot_mesh_only(ax: Axes, nc: np.ndarray, element_table: np.ndarray) -> Non ------- matplotlib.axes.Axes axes object + """ from matplotlib.collections import PatchCollection @@ -342,7 +347,7 @@ def __plot_mesh_only(ax: Axes, nc: np.ndarray, element_table: np.ndarray) -> Non def __plot_outline_only(ax: Axes, boundary_polylines: BoundaryPolylines) -> Axes: - """plot outline only (no data) + """plot outline only (no data). Parameters ---------- @@ -355,6 +360,7 @@ def __plot_outline_only(ax: Axes, boundary_polylines: BoundaryPolylines) -> Axes ------- matplotlib.axes.Axes axes object + """ __add_outline(ax, boundary_polylines) return ax @@ -371,7 +377,7 @@ def __plot_patch( vmin: float, vmax: float, ) -> PatchCollection: - """plot patch with data from z + """plot patch with data from z. Parameters ---------- @@ -398,8 +404,8 @@ def __plot_patch( ------- matplotlib.axes.Axes axes object - """ + """ patches = _to_polygons(nc, element_table) if show_mesh: @@ -431,7 +437,7 @@ def __get_tris( z: np.ndarray, n_refinements: int, ) -> tuple[Triangulation, np.ndarray]: - """get triangulation object and node-centered data + """get triangulation object and node-centered data. Parameters ---------- @@ -449,8 +455,8 @@ def __get_tris( Returns ------- matplotlib.tri.Triangulation and node-centered data - """ + """ import matplotlib.tri as tri elem_table, ec, z = __create_tri_only_element_table(nc, element_table, ec, data=z) @@ -474,7 +480,7 @@ def __add_colorbar( levels: np.ndarray, cbar_extend: str, ) -> None: - """add colorbar to axes + """add colorbar to axes. Parameters ---------- @@ -494,8 +500,8 @@ def __add_colorbar( Returns ------- None - """ + """ from mpl_toolkits.axes_grid1 import make_axes_locatable # type: ignore import matplotlib.pyplot as plt @@ -513,7 +519,7 @@ def __add_colorbar( def __set_aspect_ratio(ax: Axes, nc: np.ndarray, projection: str) -> None: - """set aspect ratio + """set aspect ratio. Parameters ---------- @@ -527,6 +533,7 @@ def __set_aspect_ratio(ax: Axes, nc: np.ndarray, projection: str) -> None: Returns ------- None + """ is_geo = projection == "LONG/LAT" if is_geo: @@ -539,7 +546,7 @@ def __set_aspect_ratio(ax: Axes, nc: np.ndarray, projection: str) -> None: def __add_non_tri_mesh( ax: Axes, nc: np.ndarray, element_table: np.ndarray, plot_type: str ) -> None: - """add non-triangular mesh to axes + """add non-triangular mesh to axes. Parameters ---------- @@ -550,10 +557,12 @@ def __add_non_tri_mesh( element_table : array of int element table plot_type : str + type of plot Returns ------- None + """ # if mesh is not tri only, we need to add it manually on top from matplotlib.collections import PatchCollection @@ -574,7 +583,7 @@ def __add_non_tri_mesh( def __add_outline(ax: Axes, boundary_polylines: BoundaryPolylines) -> None: - """add outline to axes + """add outline to axes. Parameters ---------- @@ -586,8 +595,8 @@ def __add_outline(ax: Axes, boundary_polylines: BoundaryPolylines) -> None: Returns ------- None - """ + """ lines = boundary_polylines.exteriors + boundary_polylines.interiors for line in lines: ax.plot(*line.xy.T, color="0.4", linewidth=1.2) @@ -610,12 +619,13 @@ def __is_tri_only(element_table: np.ndarray) -> bool: def _to_polygons(node_coordinates: np.ndarray, element_table: np.ndarray) -> list[Any]: - """generate matplotlib polygons from element table for plotting + """generate matplotlib polygons from element table for plotting. Returns ------- list(matplotlib.patches.Polygon) list of polygons for plotting + """ from matplotlib.patches import Polygon @@ -640,13 +650,16 @@ def _get_node_centered_data( data: np.ndarray, extrapolate: bool = True, ) -> np.ndarray: - """convert cell-centered data to node-centered by pseudo-laplacian method + """convert cell-centered data to node-centered by pseudo-laplacian method. Parameters ---------- - node_coordinates, - element_table, - element_coordinates + node_coordinates: np.array(float) + node coordinates + element_table: np.array[int] + element table + element_coordinates: np.array(float) + element coordinates data : np.array(float) cell-centered data extrapolate : bool, optional @@ -656,6 +669,7 @@ def _get_node_centered_data( ------- np.array(float) node-centered data + """ nc = node_coordinates elem_table, ec, data = __create_tri_only_element_table( @@ -703,8 +717,7 @@ def __create_tri_only_element_table( element_coordinates: np.ndarray, data: np.ndarray, ) -> tuple[np.ndarray, np.ndarray, np.ndarray]: - """Convert quad/tri mesh to pure tri-mesh""" - + """Convert quad/tri mesh to pure tri-mesh.""" if __is_tri_only(element_table): # already tri-only? just convert to 2d array return np.stack(element_table), element_coordinates, data # type: ignore @@ -763,13 +776,14 @@ def _plot_vertical_profile( add_colorbar: bool = True, **kwargs: Any, ) -> Axes: - """ - Plot unstructured vertical profile + """Plot unstructured vertical profile. Parameters ---------- node_coordinates: np.array - element_table: np.array[np.array] + node coordinates + element_table: np.array + element table values: np.array value for each element to plot zn: np.array, optional @@ -796,10 +810,13 @@ def _plot_vertical_profile( specify size of figure ax: matplotlib.axes, optional Adding to existing axis, instead of creating new fig + **kwargs: Any + Additional keyword arguments Returns ------- + """ import matplotlib.pyplot as plt from matplotlib.collections import PolyCollection diff --git a/mikeio/spatial/_geometry.py b/mikeio/spatial/_geometry.py index 998ea3e4d..77af91e2f 100644 --- a/mikeio/spatial/_geometry.py +++ b/mikeio/spatial/_geometry.py @@ -17,12 +17,12 @@ def __init__(self, projection: str = "LONG/LAT") -> None: @property def projection_string(self) -> str: - """The projection string""" + """The projection string.""" return self._projstr @property def projection(self) -> str: - """The projection""" + """The projection.""" return self._projstr @property @@ -82,7 +82,7 @@ def wkt(self) -> str: @property def ndim(self) -> int: - """Geometry dimension""" + """Geometry dimension.""" return 0 def to_shapely(self) -> Any: diff --git a/mikeio/spatial/_utils.py b/mikeio/spatial/_utils.py index f9f9e69dd..a030c59e1 100644 --- a/mikeio/spatial/_utils.py +++ b/mikeio/spatial/_utils.py @@ -6,7 +6,7 @@ def xy_to_bbox(xy: np.ndarray, buffer: float = 0.0) -> BoundingBox: - """return bounding box for list of coordinates""" + """Return bounding box for list of coordinates.""" left = xy[:, 0].min() - buffer bottom = xy[:, 1].min() - buffer right = xy[:, 0].max() + buffer @@ -17,7 +17,7 @@ def xy_to_bbox(xy: np.ndarray, buffer: float = 0.0) -> BoundingBox: def dist_in_meters( coords: np.ndarray, pt: tuple[float, float], is_geo: bool = False ) -> np.ndarray: - """get distance between array of coordinates and point + """Get distance between array of coordinates and point. Parameters ---------- @@ -32,6 +32,7 @@ def dist_in_meters( ------- array distances in meter + """ coords = np.atleast_2d(coords) xe = coords[:, 0] @@ -61,7 +62,7 @@ def _get_dist_geo(lon: float, lat: float, lon1: float, lat1: float) -> float: def _relative_cumulative_distance( coords: np.ndarray, reference: np.ndarray | None = None, is_geo: bool = False ) -> np.ndarray: - """Calculate the cumulative relative distance along a path""" + """Calculate the cumulative relative distance along a path.""" coords = np.atleast_2d(coords) d = np.zeros_like(coords[:, 0]) if reference is not None: diff --git a/mikeio/spatial/crs.py b/mikeio/spatial/crs.py index 478b0bf46..9c7455da1 100644 --- a/mikeio/spatial/crs.py +++ b/mikeio/spatial/crs.py @@ -1,3 +1,5 @@ +"""Coordinate Reference System (CRS) conversion.""" + import warnings from typing import TYPE_CHECKING @@ -21,6 +23,8 @@ class CRSConversionError(Exception): class CRS: + """Coordinate Reference System (CRS) class.""" + def __init__(self, projection_string: str) -> None: """Create an instance of the CRS class. @@ -56,28 +60,32 @@ def __repr__(self) -> str: @property def map_projection(self) -> MapProjection: + """Get the map projection object.""" # https://manuals.mikepoweredbydhi.help/2021/General/Class_Library/DHI_Projections/html/T_DHI_Projections_MapProjection.htm return self.__cartography.Projection @property def name(self) -> str: + """Get the name of the projection.""" return self.__cartography.ProjectionName @property def projection_string(self) -> str: + """Get the projection string.""" return self.__cartography.ProjectionString @property def is_geographical(self) -> bool: + """Check if the projection is geographical.""" return MapProjection.IsGeographical(self.projection_string) @property def is_projected(self) -> bool: + """Check if the projection is projected.""" return not self.is_geographical def to_pyproj(self) -> "crs.CRS": - """ - Convert projection to pyptoj.CRS object. + """Convert projection to pyptoj.CRS object. Returns ------- @@ -97,8 +105,7 @@ def to_pyproj(self) -> "crs.CRS": @classmethod def from_pyproj(cls, pyproj_crs: "crs.CRS") -> "CRS": - """ - Create CRS object from pyproj.CRS object. + """Create CRS object from pyproj.CRS object. Parameters ---------- @@ -111,12 +118,10 @@ def from_pyproj(cls, pyproj_crs: "crs.CRS") -> "CRS": CRS instance. """ - return cls(projection_string=pyproj_crs.to_wkt(version="WKT1_ESRI")) def to_epsg(self, min_confidence: int = 70) -> int: - """ - Convert projection to pyptoj.CRS object. + """Convert projection to pyptoj.CRS object. Parameters ---------- @@ -137,7 +142,6 @@ def to_epsg(self, min_confidence: int = 70) -> int: Unexpected 'pyproj.to_epsg' return type. """ - epsg_code = self.to_pyproj().to_epsg(min_confidence=min_confidence) if epsg_code is None: raise CRSConversionError( @@ -153,8 +157,7 @@ def to_epsg(self, min_confidence: int = 70) -> int: @classmethod def from_epsg(cls, epsg: int) -> "CRS": - """ - Create CRS object from EPSG code. + """Create CRS object from EPSG code. Parameters ---------- diff --git a/mikeio/xyz.py b/mikeio/xyz.py index 5fde1d9f7..fb7630f1e 100644 --- a/mikeio/xyz.py +++ b/mikeio/xyz.py @@ -1,3 +1,5 @@ +"""Read and write xyz files.""" + from __future__ import annotations from pathlib import Path @@ -5,6 +7,7 @@ def read_xyz(filename: str | Path) -> pd.DataFrame: + """Read an xyz file into a DataFrame.""" df = pd.read_csv(filename, sep="\t", header=None) if df.shape[1] == 1: df = pd.read_csv(filename, sep=" ", header=None) @@ -18,6 +21,7 @@ def read_xyz(filename: str | Path) -> pd.DataFrame: def dataframe_to_xyz(self: pd.DataFrame, filename: str | Path) -> None: + """Write DataFrame to xyz file.""" # TODO validation self.to_csv(filename, sep="\t", header=False, index=False) diff --git a/pyproject.toml b/pyproject.toml index 4a1ad4e42..befabe4a0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,9 @@ notebooks= [ "Bug Tracker" = "https://github.com/DHI/mikeio/issues" -[tool.ruff] +[tool.ruff.lint] # ignore long lines -lint.ignore = ["E501", "E741"] \ No newline at end of file +ignore = ["E501", "E741"] + +select = ["D100", "D101", "D102", "D103", "D202", "D212", "D413", "D415", "D417"] \ No newline at end of file