Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MNT] Add Python 3.11 and 3.12 support #148

Closed
wants to merge 10 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
strategy:
matrix:
os: ['ubuntu-latest']
python-version: ['3.8', '3.9', '3.10']
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
steps:
- name: Checkout code
uses: actions/checkout@v4
Expand Down
13 changes: 9 additions & 4 deletions neuromaps/datasets/_osf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"""Functions for working with data/osf.json file."""

import os
from pkg_resources import resource_filename
import importlib.resources
import json

try:
Expand All @@ -27,9 +27,14 @@
INFO_KEYS = ['source', 'refs', 'comments', 'demographics']

# distribution JSON
OSFJSON = resource_filename(
'neuromaps', os.path.join('datasets', 'data', 'osf.json')
)

# temporary fix to be removed by the osf fix
if getattr(importlib.resources, 'files', None) is not None:
OSFJSON = importlib.resources.files("neuromaps") / "datasets/data/osf.json"
else:
from pkg_resources import resource_filename
OSFJSON = resource_filename('neuromaps',
os.path.join('datasets', 'data', 'osf.json'))


def parse_filename(fname, return_ext=True, verbose=False):
Expand Down
11 changes: 9 additions & 2 deletions neuromaps/datasets/tests/test__osf.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# -*- coding: utf-8 -*-
"""For testing neuromaps.datasets._osf functionality."""

from pkg_resources import resource_filename
import os
import importlib.resources

import pytest

Expand All @@ -22,7 +23,13 @@ def test_parse_fname_list():

def test_parse_json():
"""Test parsing a JSON file."""
osf = resource_filename('neuromaps', 'datasets/data/osf.json')
# temporary fix to be removed by the osf fix
if getattr(importlib.resources, 'files', None) is not None:
osf = importlib.resources.files("neuromaps") / "datasets/data/osf.json"
else:
from pkg_resources import resource_filename
osf = resource_filename('neuromaps',
os.path.join('datasets', 'data', 'osf.json'))
out = _osf.parse_json(osf)
assert isinstance(out, list) and all(isinstance(i, dict) for i in out)

Expand Down
13 changes: 9 additions & 4 deletions neuromaps/datasets/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@

import json
import os
from pkg_resources import resource_filename

import importlib.resources
import requests

RESTRICTED = ["grh4d"]
Expand Down Expand Up @@ -70,8 +69,14 @@ def get_dataset_info(name, return_restricted=True):
dataset : dict or list-of-dict
Information on requested data
"""
fn = resource_filename('neuromaps',
os.path.join('datasets', 'data', 'osf.json'))
# temporary fix to be removed by the osf fix
if getattr(importlib.resources, 'files', None) is not None:
fn = importlib.resources.files("neuromaps") / "datasets/data/osf.json"
else:
from pkg_resources import resource_filename
fn = resource_filename('neuromaps',
os.path.join('datasets', 'data', 'osf.json'))

with open(fn) as src:
osf_resources = _osfify_urls(json.load(src), return_restricted)

Expand Down
6 changes: 3 additions & 3 deletions neuromaps/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def load_nifti(img):
try:
img = nib.load(img)
except (TypeError) as err:
if not ("os.PathLike" in str(err) and "not Nifti1Image" in str(err)):
if not ("os.PathLike" in str(err) and "Nifti1Image" in str(err)):
raise err
return img

Expand Down Expand Up @@ -164,7 +164,7 @@ def load_gifti(img):
# it's not a pre-loaded GiftiImage so error out
elif (isinstance(err, TypeError)
and not (
"os.PathLike" in str(err) and "not GiftiImage" in str(err)
"os.PathLike" in str(err) and "GiftiImage" in str(err)
)
):
raise err
Expand Down Expand Up @@ -213,7 +213,7 @@ def load_data(data):
if (isinstance(err, AttributeError)
or (
"os.PathLike" in str(err)
and "not Nifti1Image" in str(err)
and "Nifti1Image" in str(err)
)
):
out = np.stack([load_nifti(img).get_fdata() for img in data],
Expand Down
2 changes: 1 addition & 1 deletion neuromaps/nulls/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Functions for computing null models."""

__all__ = [
'naive_nonparametric', 'alexander_bloch', 'vazquez_rodriguez', 'vasa',
'alexander_bloch', 'vazquez_rodriguez', 'vasa',
'hungarian', 'baum', 'cornblath', 'burt2018', 'burt2020', 'moran'
]

Expand Down
2 changes: 1 addition & 1 deletion neuromaps/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ def efficient_pearsonr(a, b, ddof=1, nan_policy='propagate', return_pval=True):
if return_pval:
# taken from scipy.stats
ab = (n_obs / 2) - 1
prob = 2 * special.btdtr(ab, ab, 0.5 * (1 - np.abs(corr)))
prob = 2 * special.betainc(ab, ab, 0.5 * (1 - np.abs(corr)))

return corr, prob

Expand Down
7 changes: 7 additions & 0 deletions neuromaps/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@
import pytest


def pytest_configure(config):
"""Add markers for tests."""
config.addinivalue_line(
"markers", "workbench: mark test to run with Connectome Workbench"
)


def pytest_runtest_setup(item):
"""Skip tests that require workbench if it's not installed."""
markers = set(mark.name for mark in item.iter_markers())
Expand Down
69 changes: 61 additions & 8 deletions neuromaps/tests/test_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,69 @@ def test_fix_coordsys():
assert False


@pytest.mark.xfail
def test_load_nifti():
@pytest.fixture(scope="session")
def dummy_img(request, tmp_path_factory):
"""Return a valid image file."""
file_type = request.param["file_type"]
return_type = request.param["return_type"]

rng = np.random.default_rng()

# create a valid image file
if file_type == "nifti":
data = rng.random((10, 10, 10))
curr_img = nib.Nifti1Image(data, affine=np.eye(4))
curr_path = tmp_path_factory.mktemp("nifti") \
/ "valid_nifti_file.nii.gz"
nib.save(curr_img, str(curr_path))
elif file_type == "gifti":
data = rng.random((10, 10), dtype=np.float32)
curr_img = nib.gifti.GiftiImage()
gifti_data_array = nib.gifti.GiftiDataArray(data)
curr_img.add_gifti_data_array(gifti_data_array)
curr_path = tmp_path_factory.mktemp("gifti") \
/ "valid_gifti_file.gii"
nib.save(curr_img, str(curr_path))
else:
raise ValueError(f"Invalid file type: {file_type}")

# return the appropriate file type
if return_type == "str":
return str(curr_path)
elif return_type == "path":
return curr_path
elif return_type == "object":
return curr_img
else:
raise ValueError(f"Invalid return type: {return_type}")


@pytest.mark.parametrize(
"dummy_img", [
pytest.param(
{"file_type": "nifti", "return_type": _},
id=_
) for _ in ["str", "path", "object"]
], indirect=True
)
def test_load_nifti(dummy_img):
"""Test loading a NIfTI image."""
assert False


@pytest.mark.xfail
def test_load_gifti():
res = images.load_nifti(dummy_img)
assert isinstance(res, nib.Nifti1Image)


@pytest.mark.parametrize(
"dummy_img", [
pytest.param(
{"file_type": "gifti", "return_type": _},
id=_
) for _ in ["str", "path", "object"]
], indirect=True
)
def test_load_gifti(dummy_img):
"""Test loading a GIFTI image."""
assert False
res = images.load_gifti(dummy_img)
assert isinstance(res, nib.gifti.GiftiImage)


@pytest.mark.xfail
Expand Down
Loading