|
35 | 35 | from ..coding.calendar_ops import convert_calendar, interp_calendar
|
36 | 36 | from ..coding.cftimeindex import CFTimeIndex, _parse_array_of_cftime_strings
|
37 | 37 | from ..plot.dataset_plot import _Dataset_PlotMethods
|
| 38 | +from . import alignment |
| 39 | +from . import dtypes as xrdtypes |
38 | 40 | from . import (
|
39 |
| - alignment, |
40 |
| - dtypes, |
41 | 41 | duck_array_ops,
|
42 | 42 | formatting,
|
43 | 43 | formatting_html,
|
@@ -385,6 +385,18 @@ def variables(self) -> Mapping[Hashable, Variable]:
|
385 | 385 | all_variables = self._dataset.variables
|
386 | 386 | return Frozen({k: all_variables[k] for k in self})
|
387 | 387 |
|
| 388 | + @property |
| 389 | + def dtypes(self) -> Frozen[Hashable, np.dtype]: |
| 390 | + """Mapping from data variable names to dtypes. |
| 391 | +
|
| 392 | + Cannot be modified directly, but is updated when adding new variables. |
| 393 | +
|
| 394 | + See Also |
| 395 | + -------- |
| 396 | + Dataset.dtype |
| 397 | + """ |
| 398 | + return self._dataset.dtypes |
| 399 | + |
388 | 400 | def _ipython_key_completions_(self):
|
389 | 401 | """Provide method for the key-autocompletions in IPython."""
|
390 | 402 | return [
|
@@ -677,6 +689,24 @@ def sizes(self) -> Frozen[Hashable, int]:
|
677 | 689 | """
|
678 | 690 | return self.dims
|
679 | 691 |
|
| 692 | + @property |
| 693 | + def dtypes(self) -> Frozen[Hashable, np.dtype]: |
| 694 | + """Mapping from data variable names to dtypes. |
| 695 | +
|
| 696 | + Cannot be modified directly, but is updated when adding new variables. |
| 697 | +
|
| 698 | + See Also |
| 699 | + -------- |
| 700 | + DataArray.dtype |
| 701 | + """ |
| 702 | + return Frozen( |
| 703 | + { |
| 704 | + n: v.dtype |
| 705 | + for n, v in self._variables.items() |
| 706 | + if n not in self._coord_names |
| 707 | + } |
| 708 | + ) |
| 709 | + |
680 | 710 | def load(self: T_Dataset, **kwargs) -> T_Dataset:
|
681 | 711 | """Manually trigger loading and/or computation of this dataset's data
|
682 | 712 | from disk or a remote source into memory and return this dataset.
|
@@ -2792,7 +2822,7 @@ def reindex_like(
|
2792 | 2822 | method: ReindexMethodOptions = None,
|
2793 | 2823 | tolerance: int | float | Iterable[int | float] | None = None,
|
2794 | 2824 | copy: bool = True,
|
2795 |
| - fill_value: Any = dtypes.NA, |
| 2825 | + fill_value: Any = xrdtypes.NA, |
2796 | 2826 | ) -> T_Dataset:
|
2797 | 2827 | """Conform this object onto the indexes of another object, filling in
|
2798 | 2828 | missing values with ``fill_value``. The default fill value is NaN.
|
@@ -2858,7 +2888,7 @@ def reindex(
|
2858 | 2888 | method: ReindexMethodOptions = None,
|
2859 | 2889 | tolerance: int | float | Iterable[int | float] | None = None,
|
2860 | 2890 | copy: bool = True,
|
2861 |
| - fill_value: Any = dtypes.NA, |
| 2891 | + fill_value: Any = xrdtypes.NA, |
2862 | 2892 | **indexers_kwargs: Any,
|
2863 | 2893 | ) -> T_Dataset:
|
2864 | 2894 | """Conform this object onto a new set of indexes, filling in
|
@@ -3074,7 +3104,7 @@ def _reindex(
|
3074 | 3104 | method: str = None,
|
3075 | 3105 | tolerance: int | float | Iterable[int | float] | None = None,
|
3076 | 3106 | copy: bool = True,
|
3077 |
| - fill_value: Any = dtypes.NA, |
| 3107 | + fill_value: Any = xrdtypes.NA, |
3078 | 3108 | sparse: bool = False,
|
3079 | 3109 | **indexers_kwargs: Any,
|
3080 | 3110 | ) -> T_Dataset:
|
@@ -4532,7 +4562,7 @@ def _unstack_full_reindex(
|
4532 | 4562 | def unstack(
|
4533 | 4563 | self: T_Dataset,
|
4534 | 4564 | dim: Hashable | Iterable[Hashable] | None = None,
|
4535 |
| - fill_value: Any = dtypes.NA, |
| 4565 | + fill_value: Any = xrdtypes.NA, |
4536 | 4566 | sparse: bool = False,
|
4537 | 4567 | ) -> T_Dataset:
|
4538 | 4568 | """
|
@@ -4677,7 +4707,7 @@ def merge(
|
4677 | 4707 | overwrite_vars: Hashable | Iterable[Hashable] = frozenset(),
|
4678 | 4708 | compat: CompatOptions = "no_conflicts",
|
4679 | 4709 | join: JoinOptions = "outer",
|
4680 |
| - fill_value: Any = dtypes.NA, |
| 4710 | + fill_value: Any = xrdtypes.NA, |
4681 | 4711 | combine_attrs: CombineAttrsOptions = "override",
|
4682 | 4712 | ) -> T_Dataset:
|
4683 | 4713 | """Merge the arrays of two datasets into a single dataset.
|
@@ -5886,7 +5916,7 @@ def _set_sparse_data_from_dataframe(
|
5886 | 5916 | # missing values and needs a fill_value. For consistency, don't
|
5887 | 5917 | # special case the rare exceptions (e.g., dtype=int without a
|
5888 | 5918 | # MultiIndex).
|
5889 |
| - dtype, fill_value = dtypes.maybe_promote(values.dtype) |
| 5919 | + dtype, fill_value = xrdtypes.maybe_promote(values.dtype) |
5890 | 5920 | values = np.asarray(values, dtype=dtype)
|
5891 | 5921 |
|
5892 | 5922 | data = COO(
|
@@ -5924,7 +5954,7 @@ def _set_numpy_data_from_dataframe(
|
5924 | 5954 | # fill in missing values:
|
5925 | 5955 | # https://stackoverflow.com/a/35049899/809705
|
5926 | 5956 | if missing_values:
|
5927 |
| - dtype, fill_value = dtypes.maybe_promote(values.dtype) |
| 5957 | + dtype, fill_value = xrdtypes.maybe_promote(values.dtype) |
5928 | 5958 | data = np.full(shape, fill_value, dtype)
|
5929 | 5959 | else:
|
5930 | 5960 | # If there are no missing values, keep the existing dtype
|
@@ -6415,7 +6445,7 @@ def diff(
|
6415 | 6445 | def shift(
|
6416 | 6446 | self: T_Dataset,
|
6417 | 6447 | shifts: Mapping[Any, int] | None = None,
|
6418 |
| - fill_value: Any = dtypes.NA, |
| 6448 | + fill_value: Any = xrdtypes.NA, |
6419 | 6449 | **shifts_kwargs: int,
|
6420 | 6450 | ) -> T_Dataset:
|
6421 | 6451 |
|
@@ -6470,7 +6500,7 @@ def shift(
|
6470 | 6500 | for name, var in self.variables.items():
|
6471 | 6501 | if name in self.data_vars:
|
6472 | 6502 | fill_value_ = (
|
6473 |
| - fill_value.get(name, dtypes.NA) |
| 6503 | + fill_value.get(name, xrdtypes.NA) |
6474 | 6504 | if isinstance(fill_value, dict)
|
6475 | 6505 | else fill_value
|
6476 | 6506 | )
|
@@ -6931,7 +6961,9 @@ def differentiate(
|
6931 | 6961 | dim = coord_var.dims[0]
|
6932 | 6962 | if _contains_datetime_like_objects(coord_var):
|
6933 | 6963 | if coord_var.dtype.kind in "mM" and datetime_unit is None:
|
6934 |
| - datetime_unit, _ = np.datetime_data(coord_var.dtype) |
| 6964 | + datetime_unit = cast( |
| 6965 | + "DatetimeUnitOptions", np.datetime_data(coord_var.dtype)[0] |
| 6966 | + ) |
6935 | 6967 | elif datetime_unit is None:
|
6936 | 6968 | datetime_unit = "s" # Default to seconds for cftime objects
|
6937 | 6969 | coord_var = coord_var._to_numeric(datetime_unit=datetime_unit)
|
@@ -7744,7 +7776,7 @@ def idxmin(
|
7744 | 7776 | self: T_Dataset,
|
7745 | 7777 | dim: Hashable | None = None,
|
7746 | 7778 | skipna: bool | None = None,
|
7747 |
| - fill_value: Any = dtypes.NA, |
| 7779 | + fill_value: Any = xrdtypes.NA, |
7748 | 7780 | keep_attrs: bool | None = None,
|
7749 | 7781 | ) -> T_Dataset:
|
7750 | 7782 | """Return the coordinate label of the minimum value along a dimension.
|
@@ -7841,7 +7873,7 @@ def idxmax(
|
7841 | 7873 | self: T_Dataset,
|
7842 | 7874 | dim: Hashable | None = None,
|
7843 | 7875 | skipna: bool | None = None,
|
7844 |
| - fill_value: Any = dtypes.NA, |
| 7876 | + fill_value: Any = xrdtypes.NA, |
7845 | 7877 | keep_attrs: bool | None = None,
|
7846 | 7878 | ) -> T_Dataset:
|
7847 | 7879 | """Return the coordinate label of the maximum value along a dimension.
|
|
0 commit comments