Skip to content

Commit 757b0ca

Browse files
committed
(fix): more typing
1 parent 3890abd commit 757b0ca

File tree

3 files changed

+26
-14
lines changed

3 files changed

+26
-14
lines changed

properties/test_pandas_roundtrip.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,14 @@
33
"""
44

55
from functools import partial
6+
from typing import cast
67

78
import numpy as np
89
import pandas as pd
910
import pytest
1011

1112
import xarray as xr
13+
from xarray.core.dataset import Dataset
1214

1315
pytest.importorskip("hypothesis")
1416
import hypothesis.extra.numpy as npst # isort:skip
@@ -88,7 +90,7 @@ def test_roundtrip_dataarray(data, arr) -> None:
8890

8991

9092
@given(datasets_1d_vars())
91-
def test_roundtrip_dataset(dataset) -> None:
93+
def test_roundtrip_dataset(dataset: Dataset) -> None:
9294
df = dataset.to_dataframe()
9395
assert isinstance(df, pd.DataFrame)
9496
roundtripped = xr.Dataset(df)
@@ -119,7 +121,7 @@ def test_roundtrip_pandas_dataframe(df) -> None:
119121
df.columns.name = "cols"
120122
arr = xr.DataArray(df)
121123
roundtripped = arr.to_pandas()
122-
pd.testing.assert_frame_equal(df, roundtripped)
124+
pd.testing.assert_frame_equal(df, cast(pd.DataFrame, roundtripped))
123125
xr.testing.assert_identical(arr, roundtripped.to_xarray())
124126

125127

@@ -143,8 +145,8 @@ def test_roundtrip_pandas_dataframe_datetime(df) -> None:
143145
pd.arrays.IntervalArray(
144146
[pd.Interval(0, 1), pd.Interval(1, 5), pd.Interval(2, 6)]
145147
),
146-
pd.arrays.TimedeltaArray._from_sequence(pd.TimedeltaIndex(["1h", "2h", "3h"])),
147-
pd.arrays.DatetimeArray._from_sequence(
148+
pd.arrays.TimedeltaArray._from_sequence(pd.TimedeltaIndex(["1h", "2h", "3h"])), # type: ignore[attr-defined]
149+
pd.arrays.DatetimeArray._from_sequence( # type: ignore[attr-defined]
148150
pd.DatetimeIndex(["2023-01-01", "2023-01-02", "2023-01-03"], freq="D")
149151
),
150152
np.array([1, 2, 3], dtype="int64"),

xarray/core/dtypes.py

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
from __future__ import annotations
22

33
import functools
4-
from typing import TYPE_CHECKING
4+
from collections.abc import Iterable
5+
from typing import TYPE_CHECKING, cast
56

67
import numpy as np
78
from pandas.api.types import is_extension_array_dtype
@@ -53,7 +54,9 @@ def __eq__(self, other):
5354
)
5455

5556

56-
def maybe_promote(dtype: np.dtype | ExtensionDtype) -> tuple[np.dtype, Any]:
57+
def maybe_promote(
58+
dtype: np.dtype | ExtensionDtype,
59+
) -> tuple[np.dtype | ExtensionDtype, Any]:
5760
"""Simpler equivalent of pandas.core.common._maybe_promote
5861
5962
Parameters
@@ -70,7 +73,9 @@ def maybe_promote(dtype: np.dtype | ExtensionDtype) -> tuple[np.dtype, Any]:
7073
fill_value: Any
7174
if is_extension_array_dtype(dtype):
7275
return dtype, dtype.na_value
73-
elif HAS_STRING_DTYPE and np.issubdtype(dtype, np.dtypes.StringDType()):
76+
else:
77+
dtype = cast(np.dtype, dtype)
78+
if HAS_STRING_DTYPE and np.issubdtype(dtype, np.dtypes.StringDType()):
7479
# for now, we always promote string dtypes to object for consistency with existing behavior
7580
# TODO: refactor this once we have a better way to handle numpy vlen-string dtypes
7681
dtype_ = object
@@ -251,7 +256,7 @@ def maybe_promote_to_variable_width(
251256

252257

253258
def should_promote_to_object(
254-
arrays_and_dtypes: np.typing.ArrayLike | np.typing.DTypeLike, xp
259+
arrays_and_dtypes: Iterable[np.typing.ArrayLike | np.typing.DTypeLike], xp
255260
) -> bool:
256261
"""
257262
Test whether the given arrays_and_dtypes, when evaluated individually, match the
@@ -281,7 +286,9 @@ def should_promote_to_object(
281286

282287

283288
def result_type(
284-
*arrays_and_dtypes: np.typing.ArrayLike | np.typing.DTypeLike,
289+
*arrays_and_dtypes: list[
290+
np.typing.ArrayLike | np.typing.DTypeLike | ExtensionDtype
291+
],
285292
xp=None,
286293
) -> np.dtype:
287294
"""Like np.result_type, but with type promotion rules matching pandas.
@@ -314,7 +321,7 @@ def result_type(
314321
maybe_promote_to_variable_width,
315322
# let extension arrays handle their own str/bytes
316323
should_return_str_or_bytes=any(
317-
map(is_extension_array_dtype, arrays_and_dtypes)
324+
map(is_extension_array_dtype, arrays_and_dtypes) # type: ignore[arg-type]
318325
),
319326
),
320327
arrays_and_dtypes,

xarray/core/extension_array.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def __extension_duck_array__issubdtype(
5858

5959
@implements("astype") # np.astype was added in 2.1.0, but we only require >=1.24
6060
def __extension_duck_array__astype(
61-
array_or_scalar: np.typing.ArrayLike,
61+
array_or_scalar: T_ExtensionArray,
6262
dtype: DTypeLikeSave,
6363
order: str = "K",
6464
casting: str = "unsafe",
@@ -68,7 +68,7 @@ def __extension_duck_array__astype(
6868
) -> ExtensionArray:
6969
if (
7070
not (
71-
is_extension_array_dtype(array_or_scalar) or is_extension_array_dtype(dtype)
71+
is_extension_array_dtype(array_or_scalar) or is_extension_array_dtype(dtype) # type: ignore[arg-dtype]
7272
)
7373
or casting != "unsafe"
7474
or not subok
@@ -81,7 +81,8 @@ def __extension_duck_array__astype(
8181

8282
@implements(np.asarray)
8383
def __extension_duck_array__asarray(
84-
array_or_scalar: np.typing.ArrayLike, dtype: DTypeLikeSave = None
84+
array_or_scalar: np.typing.ArrayLike | T_ExtensionArray,
85+
dtype: DTypeLikeSave | None = None,
8586
) -> ExtensionArray:
8687
if not is_extension_array_dtype(dtype):
8788
return NotImplemented
@@ -90,7 +91,9 @@ def __extension_duck_array__asarray(
9091

9192

9293
def as_extension_array(
93-
array_or_scalar: np.typing.ArrayLike, dtype: ExtensionDtype, copy: bool = False
94+
array_or_scalar: np.typing.ArrayLike | T_ExtensionArray,
95+
dtype: ExtensionDtype | DTypeLikeSave | None,
96+
copy: bool = False,
9497
) -> ExtensionArray:
9598
if is_scalar(array_or_scalar):
9699
return dtype.construct_array_type()._from_sequence( # type: ignore[attr-defined]

0 commit comments

Comments
 (0)