Skip to content

Commit

Permalink
Merge pull request #1124 from effigies/sty/normalize
Browse files Browse the repository at this point in the history
STY: Normalize to blue and isort
  • Loading branch information
effigies authored Jan 1, 2023
2 parents 0a6e73e + 69785cd commit 14faf88
Show file tree
Hide file tree
Showing 212 changed files with 9,681 additions and 8,437 deletions.
12 changes: 12 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Thu Dec 29 22:53:17 2022 -0500 - effigies@gmail.com - STY: Reduce array().astype() and similar constructs
bf298113da99079c9c7b5e1690e41879828cd472
# Thu Dec 29 22:32:46 2022 -0500 - effigies@gmail.com - STY: pyupgrade --py37-plus
4481a4c2640bd4be6e9c468e550d01aae448ab99
# Fri Dec 30 11:01:19 2022 -0500 - effigies@gmail.com - STY: Run vanilla blue
6b0ddd23b1da1df7ca9ae275673f82bfa20a754c
# Thu Dec 29 21:46:13 2022 -0500 - markiewicz@stanford.edu - STY: Manual, blue-compatible touchups
263fca9bf6d4ca314a5a322b4824d6f53d0589df
# Thu Dec 29 21:32:00 2022 -0500 - effigies@gmail.com - STY: isort
0ab2856cac4d4baae7ab3e2f6d58421db55d807f
# Thu Dec 29 21:30:29 2022 -0500 - effigies@gmail.com - STY: blue
1a8dd302ff85b1136c81d492509b80e7748339f0
5 changes: 4 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,9 @@ distclean: clean
$(WWW_DIR):
if [ ! -d $(WWW_DIR) ]; then mkdir -p $(WWW_DIR); fi

.git-blame-ignore-revs:
git log --grep "\[git-blame-ignore-rev\]" --pretty=format:"# %ad - %ae - %s%n%H" \
> .git-blame-ignore-revs

#
# Tests
Expand Down Expand Up @@ -288,4 +291,4 @@ rm-orig:
# Remove .orig temporary diff files generated by git
find . -name "*.orig" -print | grep -v "fsaverage" | xargs rm

.PHONY: orig-src pylint all build
.PHONY: orig-src pylint all build .git-blame-ignore-revs
84 changes: 50 additions & 34 deletions nibabel/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@

import os

from .pkg_info import __version__
from .info import long_description as __doc__
from .pkg_info import __version__

__doc__ += """
Quickstart
==========
Expand Down Expand Up @@ -38,32 +39,39 @@

# module imports
from . import analyze as ana
from . import spm99analyze as spm99
from . import spm2analyze as spm2
from . import ecat, mriutils
from . import nifti1 as ni1
from . import ecat
from . import spm2analyze as spm2
from . import spm99analyze as spm99
from . import streamlines, viewers

# isort: split

# object imports
from .fileholders import FileHolder, FileHolderError
from .loadsave import load, save
from .arrayproxy import is_proxy
from .analyze import AnalyzeHeader, AnalyzeImage
from .spm99analyze import Spm99AnalyzeHeader, Spm99AnalyzeImage
from .spm2analyze import Spm2AnalyzeHeader, Spm2AnalyzeImage
from .nifti1 import Nifti1Header, Nifti1Image, Nifti1Pair
from .nifti2 import Nifti2Header, Nifti2Image, Nifti2Pair
from .minc1 import Minc1Image
from .minc2 import Minc2Image
from .arrayproxy import is_proxy
from .cifti2 import Cifti2Header, Cifti2Image
from .gifti import GiftiImage
from .fileholders import FileHolder, FileHolderError
from .freesurfer import MGHImage
from .funcs import (squeeze_image, concat_images, four_to_three,
as_closest_canonical)
from .orientations import (io_orientation, flip_axis, OrientationError,
apply_orientation, aff2axcodes)
from .funcs import as_closest_canonical, concat_images, four_to_three, squeeze_image
from .gifti import GiftiImage
from .imageclasses import all_image_classes
from . import mriutils
from . import streamlines
from . import viewers
from .loadsave import load, save
from .minc1 import Minc1Image
from .minc2 import Minc2Image
from .nifti1 import Nifti1Header, Nifti1Image, Nifti1Pair
from .nifti2 import Nifti2Header, Nifti2Image, Nifti2Pair
from .orientations import (
OrientationError,
aff2axcodes,
apply_orientation,
flip_axis,
io_orientation,
)
from .spm2analyze import Spm2AnalyzeHeader, Spm2AnalyzeImage
from .spm99analyze import Spm99AnalyzeHeader, Spm99AnalyzeImage

# isort: split

from .pkg_info import get_pkg_info as _get_pkg_info

Expand All @@ -72,9 +80,15 @@ def get_info():
return _get_pkg_info(os.path.dirname(__file__))


def test(label=None, verbose=1, extra_argv=None,
doctests=False, coverage=False, raise_warnings=None,
timer=False):
def test(
label=None,
verbose=1,
extra_argv=None,
doctests=False,
coverage=False,
raise_warnings=None,
timer=False,
):
"""
Run tests for nibabel using pytest
Expand Down Expand Up @@ -107,29 +121,30 @@ def test(label=None, verbose=1, extra_argv=None,
Returns the result of running the tests as a ``pytest.ExitCode`` enum
"""
import pytest

args = []

if label is not None:
raise NotImplementedError("Labels cannot be set at present")
raise NotImplementedError('Labels cannot be set at present')

verbose = int(verbose)
if verbose > 0:
args.append("-" + "v" * verbose)
args.append('-' + 'v' * verbose)
elif verbose < 0:
args.append("-" + "q" * -verbose)
args.append('-' + 'q' * -verbose)

if extra_argv:
args.extend(extra_argv)
if doctests:
args.append("--doctest-modules")
args.append('--doctest-modules')
if coverage:
args.extend(["--cov", "nibabel"])
args.extend(['--cov', 'nibabel'])
if raise_warnings is not None:
raise NotImplementedError("Warning filters are not implemented")
raise NotImplementedError('Warning filters are not implemented')
if timer:
raise NotImplementedError("Timing is not implemented")
raise NotImplementedError('Timing is not implemented')

args.extend(["--pyargs", "nibabel"])
args.extend(['--pyargs', 'nibabel'])

return pytest.main(args=args)

Expand Down Expand Up @@ -157,9 +172,10 @@ def bench(label=None, verbose=1, extra_argv=None):
Returns the result of running the tests as a ``pytest.ExitCode`` enum
"""
from pkg_resources import resource_filename
config = resource_filename("nibabel", "benchmarks/pytest.benchmark.ini")

config = resource_filename('nibabel', 'benchmarks/pytest.benchmark.ini')
args = []
if extra_argv is not None:
args.extend(extra_argv)
args.extend(["-c", config])
args.extend(['-c', config])
return test(label, verbose, extra_argv=args)
28 changes: 14 additions & 14 deletions nibabel/affines.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
""" Utility routines for working with points and affine transforms
"""Utility routines for working with points and affine transforms
"""
import numpy as np

from functools import reduce

import numpy as np


class AffineError(ValueError):
""" Errors in calculating or using affines """
"""Errors in calculating or using affines"""

# Inherits from ValueError to keep compatibility with ValueError previously
# raised in append_diag
pass


def apply_affine(aff, pts, inplace=False):
""" Apply affine matrix `aff` to points `pts`
"""Apply affine matrix `aff` to points `pts`
Returns result of application of `aff` to the *right* of `pts`. The
coordinate dimension of `pts` should be the last.
Expand Down Expand Up @@ -142,7 +143,7 @@ def to_matvec(transform):


def from_matvec(matrix, vector=None):
""" Combine a matrix and vector into an homogeneous affine
"""Combine a matrix and vector into an homogeneous affine
Combine a rotation / scaling / shearing matrix and translation vector into
a transform in homogeneous coordinates.
Expand Down Expand Up @@ -185,14 +186,14 @@ def from_matvec(matrix, vector=None):
nin, nout = matrix.shape
t = np.zeros((nin + 1, nout + 1), matrix.dtype)
t[0:nin, 0:nout] = matrix
t[nin, nout] = 1.
t[nin, nout] = 1.0
if vector is not None:
t[0:nin, nout] = vector
return t


def append_diag(aff, steps, starts=()):
""" Add diagonal elements `steps` and translations `starts` to affine
"""Add diagonal elements `steps` and translations `starts` to affine
Typical use is in expanding 4x4 affines to larger dimensions. Nipy is the
main consumer because it uses NxM affines, whereas we generally only use
Expand Down Expand Up @@ -236,8 +237,7 @@ def append_diag(aff, steps, starts=()):
raise AffineError('Steps should have same length as starts')
old_n_out, old_n_in = aff.shape[0] - 1, aff.shape[1] - 1
# make new affine
aff_plus = np.zeros((old_n_out + n_steps + 1,
old_n_in + n_steps + 1), dtype=aff.dtype)
aff_plus = np.zeros((old_n_out + n_steps + 1, old_n_in + n_steps + 1), dtype=aff.dtype)
# Get stuff from old affine
aff_plus[:old_n_out, :old_n_in] = aff[:old_n_out, :old_n_in]
aff_plus[:old_n_out, -1] = aff[:old_n_out, -1]
Expand All @@ -250,7 +250,7 @@ def append_diag(aff, steps, starts=()):


def dot_reduce(*args):
r""" Apply numpy dot product function from right to left on arrays
r"""Apply numpy dot product function from right to left on arrays
For passed arrays :math:`A, B, C, ... Z` returns :math:`A \dot B \dot C ...
\dot Z` where "." is the numpy array dot product.
Expand All @@ -270,7 +270,7 @@ def dot_reduce(*args):


def voxel_sizes(affine):
r""" Return voxel size for each input axis given `affine`
r"""Return voxel size for each input axis given `affine`
The `affine` is the mapping between array (voxel) coordinates and mm
(world) coordinates.
Expand Down Expand Up @@ -308,7 +308,7 @@ def voxel_sizes(affine):
but in general has length (N-1) where input `affine` is shape (M, N).
"""
top_left = affine[:-1, :-1]
return np.sqrt(np.sum(top_left ** 2, axis=0))
return np.sqrt(np.sum(top_left**2, axis=0))


def obliquity(affine):
Expand Down Expand Up @@ -340,7 +340,7 @@ def obliquity(affine):


def rescale_affine(affine, shape, zooms, new_shape=None):
""" Return a new affine matrix with updated voxel sizes (zooms)
"""Return a new affine matrix with updated voxel sizes (zooms)
This function preserves the rotations and shears of the original
affine, as well as the RAS location of the central voxel of the
Expand Down
Loading

0 comments on commit 14faf88

Please sign in to comment.