Skip to content

Commit

Permalink
Add support for NumPy v2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
LevN0 committed Jun 22, 2024
1 parent 3550fc2 commit afdd4b9
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 14 deletions.
4 changes: 2 additions & 2 deletions pds4_tools/reader/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from .data_types import pds_to_numpy_name

from ..utils.compat import OrderedDict
from ..utils.compat import OrderedDict, np_issubclass
from ..extern import six


Expand Down Expand Up @@ -537,7 +537,7 @@ def view(self, dtype=None, type=None, fill_value=None):
obj = super(PDS_marray, self).view(dtype=dtype, type=type, fill_value=fill_value)

# Fix bug in NumPy < v1.10, which resets fill value on ``view`` if mask is not nomask
if ((dtype is None) or ((type is None) and np.issubclass_(dtype, np.ma.MaskedArray))) and \
if ((dtype is None) or ((type is None) and np_issubclass(dtype, np.ma.MaskedArray))) and \
(fill_value is None):

obj._fill_value = self._fill_value
Expand Down
30 changes: 23 additions & 7 deletions pds4_tools/reader/data_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import numpy as np

from ..utils.compat import np_unicode
from ..utils.deprecation import rename_parameter
from ..utils.logging import logger_init

Expand Down Expand Up @@ -135,7 +136,7 @@ def pds_to_numpy_type(data_type=None, data=None, field_length=None, decode_strin
# Get dtype for character data (from data)
if is_character_data:
unicode_requested = decode_strings and not is_bitstring_data
dtype = 'U' if (np.issubdtype(data.dtype, np.unicode_) or unicode_requested) else 'S'
dtype = 'U' if (np.issubdtype(data.dtype, np_unicode) or unicode_requested) else 'S'

if field_length is not None:
dtype += str(field_length)
Expand Down Expand Up @@ -248,7 +249,7 @@ def pds_to_builtin_type(data_type=None, data=None, decode_strings=False, decode_

if is_character_data:
unicode_requested = decode_strings and not is_bitstring_data
_type = six.text_type if (np.issubdtype(data.dtype, np.unicode_) or unicode_requested
_type = six.text_type if (np.issubdtype(data.dtype, np_unicode) or unicode_requested
) else six.binary_type
else:
_type = type(np.asscalar(data[0]))
Expand Down Expand Up @@ -311,7 +312,7 @@ def numpy_to_pds_type(dtype, ascii_numerics=False):
"""

# For string dtypes
if np.issubdtype(dtype, np.unicode_):
if np.issubdtype(dtype, np_unicode):
data_type = 'UTF8_String'

elif np.issubdtype(dtype, np.string_):
Expand Down Expand Up @@ -460,6 +461,8 @@ def data_type_convert_table_ascii(data_type, data, mask_nulls=False, decode_stri
data[mask_array] = six.ensure_binary(str(fill_value))

# Special handling for boolean due to e.g. bool('false') = True
# and that in NumPy 2.0+ string arrays cast to bool set
# all non-empty strings as True.
if data_type == 'ASCII_Boolean':

# Replace 'true' and 'false' with 1 and 0
Expand All @@ -469,9 +472,10 @@ def data_type_convert_table_ascii(data_type, data, mask_nulls=False, decode_stri
data = data.split(b'@')

try:
data = np.asarray(data).astype(dtype, copy=False)
data = np.asarray(data).astype(np.uint8, copy=False) \
.astype(dtype, copy=False)
except TypeError:
data = np.asarray(data).astype(dtype)
data = np.asarray(data).astype(np.uint8).astype(dtype)

# Convert ASCII numerics into their proper data type
elif not np.issubdtype(dtype, np.character):
Expand Down Expand Up @@ -770,11 +774,23 @@ def apply_scaling_and_value_offset(data, scaling_factor=None, value_offset=None,
data = adjust_array_data_type(data, scaling_factor, value_offset)

# Apply scaling factor and value offset
# (workaround inability of NumPy 2.0+ to add / multiply in cases where
# value on right of operand is out-of-bounds of dtype, see PR #99)
if not no_scaling:
data *= scaling_factor

scale_dtype = None
if isinstance(scaling_factor, six.integer_types):
scale_dtype = get_min_integer_numpy_type([scaling_factor])

data *= np.array(scaling_factor, dtype=scale_dtype)

if not no_offset:
data += value_offset

offset_dtype = None
if isinstance(value_offset, six.integer_types):
offset_dtype = get_min_integer_numpy_type([value_offset])

data += np.array(value_offset, dtype=offset_dtype)

# Restore the original mask if necessary, removing any additional mask applied above for Special_Constants
if (special_constants is not None) and (mask is not None):
Expand Down
12 changes: 12 additions & 0 deletions pds4_tools/utils/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import inspect
from xml.etree import ElementTree as ET

import numpy as np

from ..extern import six

# OrderedDict compat (Python 2.7+ and 3.1+)
Expand All @@ -26,6 +28,16 @@
ET_Element_iter = ET_Element.iter if hasattr(ET_Element, 'iter') else ET_Element.getiterator
ET_ParseError = ET.ParseError if hasattr(ET, 'ParseError') else None

# NumPy compat (NumPy 2.0+)
try:
np_unicode = np.unicode_
except AttributeError:
np_unicode = np.str_

try:
np_issubclass = np.issubclass_
except AttributeError:
np_issubclass = issubclass

# signature.bind(...).arguments compat (Python 3.3+)
def bind_arguments(func, *args, **kwargs):
Expand Down
13 changes: 8 additions & 5 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
[tox]
envlist =
{py310}-numpy{1_21_0,latest}
{py39}-numpy{1_19_3,latest}
{py38,py37}-numpy{1_15_0,latest}
{py27,py35,py36}-numpy{1_13_0,1_8_0,latest}
{py34}-numpy{1_13_0,1_8_0,latest}-attrs_py34
{py310}-numpy{1_21_0,1_26_4,latest}
{py39}-numpy{1_19_3,1_26_4,latest}
{py37,py38}-numpy{1_15_0,latest}
{py35,py36}-numpy{1_13_0,latest}
{py34}-numpy{1_8_0,1_13_0,latest}-attrs_py34
{py27}-numpy{1_8_0,1_13_0,latest}
recreate = True

[gh-actions]
Expand All @@ -29,10 +30,12 @@ basepython =
deps =
pytest
attrs_py34: attrs==20.3.0
numpy1_8_0: numpy==1.8.0
numpy1_13_0: numpy==1.13.0
numpy1_15_0: numpy==1.15.0
numpy1_19_3: numpy==1.19.3
numpy1_21_0: numpy==1.21.0
numpy1_26_4: numpy==1.26.4
numpylatest: numpy
commands = py.test

0 comments on commit afdd4b9

Please sign in to comment.