Skip to content

Commit 0c2a86e

Browse files
committed
use numpy 2
1 parent df72896 commit 0c2a86e

File tree

2 files changed

+6
-6
lines changed

2 files changed

+6
-6
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ classifiers = [
4141

4242
dependencies = [
4343
"flatbuffers",
44-
"numpy<2", # Pinned to <2 due to f142 np.unicode
44+
"numpy>2"
4545
]
4646

4747
[project.optional-dependencies]

src/streaming_data_types/logdata_f142.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -490,8 +490,8 @@ def _serialise_value(
490490
):
491491
# We can use a dictionary to map most numpy types to one of the types defined in the flatbuffer schema
492492
# but we have to handle strings separately as there are many subtypes
493-
if np.issubdtype(value.dtype, np.unicode_) or np.issubdtype(
494-
value.dtype, np.string_
493+
if np.issubdtype(value.dtype, np.str_) or np.issubdtype(
494+
value.dtype, np.bytes_
495495
):
496496
string_serialiser(builder, value, source)
497497
else:
@@ -501,7 +501,7 @@ def _serialise_value(
501501
# There are a few numpy types we don't try to handle, for example complex numbers
502502
raise NotImplementedError(
503503
f"Cannot serialise data of type {value.dtype}, must use one of "
504-
f"{list(_map_scalar_type_to_serialiser.keys()) + [np.unicode_]}"
504+
f"{list(_map_scalar_type_to_serialiser.keys()) + [np.str_]}"
505505
)
506506

507507

@@ -539,8 +539,8 @@ def _serialise_value(
539539

540540
def _decode_if_scalar_string(value: np.ndarray) -> Union[str, np.ndarray]:
541541
if value.ndim == 0 and (
542-
np.issubdtype(value.dtype, np.unicode_)
543-
or np.issubdtype(value.dtype, np.string_)
542+
np.issubdtype(value.dtype, np.str_)
543+
or np.issubdtype(value.dtype, np.bytes_)
544544
):
545545
return value.item().decode()
546546
return value

0 commit comments

Comments
 (0)