Skip to content

Commit

Permalink
Merge branch 'main' into prep-1.0.0rc1
Browse files Browse the repository at this point in the history
  • Loading branch information
tsalo authored Nov 20, 2024
2 parents ae51def + 3d14e53 commit 1545897
Show file tree
Hide file tree
Showing 10 changed files with 159 additions and 58 deletions.
2 changes: 2 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -702,6 +702,8 @@ workflows:
- Recon_PYAFQExternalTrk
- Recon_ScalarMap
- Recon_multises_pre1_IO_test
- Recon_multises_post1_IO_test
- Recon_msmt_Multishell_HSVS
filters:
branches:
ignore:
Expand Down
73 changes: 54 additions & 19 deletions qsirecon/cli/recon_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ def recon_plot():
parser.add_argument("--padding", type=int, default=10, help="number of slices to plot")
opts = parser.parse_args()

print(f"TMPDIR={os.getenv('TMPDIR')}")
if opts.mif:
odf_img, directions = mif2amps(opts.mif, os.getcwd())
LOGGER.info("converting %s to plot ODF/peaks", opts.mif)
Expand All @@ -126,29 +125,65 @@ def recon_plot():
else:
background_data = nb.load(opts.background_image).get_fdata()

LOGGER.info("saving peaks image to %s", opts.peaks_image)
peak_slice_series(
odf_4d,
sphere,
background_data,
opts.peaks_image,
n_cuts=opts.ncuts,
mask_image=opts.mask_file,
padding=opts.padding,
)
LOGGER.info("Starting a virtual framebuffer for FURY")

# Plot ODFs in interesting regions
if opts.odf_rois and not opts.peaks_only:
LOGGER.info("saving odfs image to %s", opts.odfs_image)
odf_roi_plot(
# Xvfb won't allow an empty $DISPLAY
if "DISPLAY" in os.environ:
del os.environ["DISPLAY"]

from xvfbwrapper import Xvfb

display = Xvfb(nolisten="tcp")
display.extra_xvfb_args += ["+iglx"]

try:
display.start()
except Exception as exc:
LOGGER.warning(
"Unable to start Xvfb!! If you are running this via Apptainer/Singularity "
"there may be an issue accessing the /tmp directory.\n\n"
"If you used the --containall flag, please also use --writable-tmpfs.\n\n"
"Otherwise, be sure that the /tmp directory is writable by Singularity/Apptainer. "
"This may require contacting a system administrator.\n\n"
f"The python error was\n{exc}"
)
sys.exit(1)

try:
peak_slice_series(
odf_4d,
sphere,
background_data,
opts.odfs_image,
opts.odf_rois,
subtract_iso=opts.subtract_iso,
mask=opts.mask_file,
opts.peaks_image,
n_cuts=opts.ncuts,
mask_image=opts.mask_file,
padding=opts.padding,
)
except Exception as exc:
LOGGER.warning(exc)
sys.exit(1)

LOGGER.info("saving peaks image to %s", opts.peaks_image)

# Plot ODFs in interesting regions
if opts.odf_rois and not opts.peaks_only:
try:
odf_roi_plot(
odf_4d,
sphere,
background_data,
opts.odfs_image,
opts.odf_rois,
subtract_iso=opts.subtract_iso,
mask=opts.mask_file,
)
except Exception as exc:
LOGGER.warning(exc)
sys.exit(1)

LOGGER.info("saved odfs image to %s", opts.odfs_image)

display.stop()
sys.exit(0)


Expand Down
4 changes: 3 additions & 1 deletion qsirecon/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,9 @@ def _process_value(value):

cls._processing_list = []
for dwi_relpath, anat_relpath in cls.processing_list:
cls._processing_list.append((cls.layout.get_file(dwi_relpath), cls.layout.get_file(anat_relpath)))
cls._processing_list.append(
(cls.layout.get_file(dwi_relpath), cls.layout.get_file(anat_relpath))
)
cls.processing_list = cls._processing_list

dataset_links = {
Expand Down
1 change: 0 additions & 1 deletion qsirecon/interfaces/reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,6 @@ class CLIReconPeaksReport(CommandLine):
input_spec = _ReconPeaksReportInputSpec
output_spec = _ReconPeaksReportOutputSpec
_cmd = "recon_plot"
_redirect_x = True

def _list_outputs(self):
outputs = self.output_spec().get()
Expand Down
115 changes: 85 additions & 30 deletions qsirecon/tests/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,19 @@

from qsirecon.cli import run
from qsirecon.cli.parser import parse_args
from qsirecon.cli.workflow import build_boilerplate, build_workflow
from qsirecon.cli.workflow import build_boilerplate, build_workflow, copy_boilerplate
from qsirecon.reports.core import generate_reports
from qsirecon.tests.utils import (
check_generated_files,
download_test_data,
freesurfer_license,
get_test_data_path,
)
from qsirecon.utils.bids import write_derivative_description
from qsirecon.utils.bids import (
write_atlas_dataset_description,
write_bidsignore,
write_derivative_description,
)

nipype_config.enable_debug_mode()

Expand Down Expand Up @@ -58,7 +63,7 @@ def test_mrtrix_singleshell_ss3t_act(data_dir, output_dir, working_dir):
"Gordon333Ext",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -95,7 +100,7 @@ def test_mrtrix_multishell_msmt_hsvs(data_dir, output_dir, working_dir):
"--report-output-level=root",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -136,7 +141,7 @@ def test_mrtrix_singleshell_ss3t_noact(data_dir, output_dir, working_dir):
"--report-output-level=subject",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -160,7 +165,7 @@ def test_multises_post1_qsiprep_reportroot(data_dir, output_dir, working_dir):
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -185,7 +190,7 @@ def test_multises_post1_qsiprep_reportsubject(data_dir, output_dir, working_dir)
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -210,7 +215,7 @@ def test_multises_post1_qsiprep_reportsession(data_dir, output_dir, working_dir)
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -234,7 +239,7 @@ def test_multises_pre1_qsiprep_reportroot(data_dir, output_dir, working_dir):
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -259,7 +264,7 @@ def test_multises_pre1_qsiprep_reportsubject(data_dir, output_dir, working_dir):
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand All @@ -284,7 +289,7 @@ def test_multises_pre1_qsiprep_reportsession(data_dir, output_dir, working_dir):
"--recon-spec=test_workflow",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -323,7 +328,7 @@ def test_amico_noddi(data_dir, output_dir, working_dir):
"--report-output-level=session",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -362,7 +367,7 @@ def test_autotrack(data_dir, output_dir, working_dir):
"--recon-spec=dsi_studio_autotrack",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -404,7 +409,7 @@ def test_dipy_mapmri(data_dir, output_dir, working_dir):
"--output-resolution=5",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -445,7 +450,7 @@ def test_dipy_dki(data_dir, output_dir, working_dir):
"--recon-spec=dipy_dki",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -478,7 +483,7 @@ def test_scalar_mapper(data_dir, output_dir, working_dir):
"--nthreads=1",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -512,7 +517,7 @@ def test_pyafq_recon_external_trk(data_dir, output_dir, working_dir):
"--recon-spec=mrtrix_multishell_msmt_pyafq_tractometry",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -546,7 +551,7 @@ def test_pyafq_recon_full(data_dir, output_dir, working_dir):
"--recon-spec=pyafq_tractometry",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -586,7 +591,7 @@ def test_mrtrix3_recon(data_dir, output_dir, working_dir):
"4S156Parcels",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


@pytest.mark.integration
Expand Down Expand Up @@ -617,10 +622,10 @@ def test_tortoise_recon(data_dir, output_dir, working_dir):
"--recon-spec=TORTOISE",
]

_run_and_generate(TEST_NAME, parameters, test_main=True)
_run_and_generate(TEST_NAME, parameters, test_main=False)


def _run_and_generate(test_name, parameters, test_main=True):
def _run_and_generate(test_name, parameters, test_main=False):
from qsirecon import config

# TODO: Add --clean-workdir param to CLI
Expand All @@ -637,25 +642,75 @@ def _run_and_generate(test_name, parameters, test_main=True):

assert e.value.code == 0
else:
# XXX: This isn't working because config.execution.fs_license_file is None.
parse_args(parameters)
config_file = config.execution.work_dir / f"config-{config.execution.run_uuid}.toml"
config.loggers.cli.warning(f"Saving config file to {config_file}")
config.execution.fs_license_file = freesurfer_license(config.execution.work_dir)
config.to_filename(config_file)

retval = build_workflow(config_file, retval={})
qsirecon_wf = retval["workflow"]
qsirecon_wf.run()
write_derivative_description(config.execution.fmri_dir, config.execution.output_dir)

build_boilerplate(str(config_file), qsirecon_wf)
generate_reports(
output_level=config.execution.report_output_level,
output_dir=config.execution.output_dir,
run_uuid=config.execution.run_uuid,
qsirecon_suffix="",
config.loggers.workflow.log(
15,
"\n".join(["config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()]),
)

qsirecon_wf.run(**config.nipype.get_plugin())

write_derivative_description(
config.execution.bids_dir,
config.execution.output_dir,
atlases=config.execution.atlases,
dataset_links=config.execution.dataset_links,
)

if config.execution.atlases:
write_atlas_dataset_description(config.execution.output_dir / "atlases")

# Compile list of output folders
qsirecon_suffixes = config.workflow.qsirecon_suffixes
config.loggers.cli.info(f"QSIRecon pipeline suffixes: {qsirecon_suffixes}")
failed_reports = []
for qsirecon_suffix in qsirecon_suffixes:
suffix_dir = str(
config.execution.output_dir / "derivatives" / f"qsirecon-{qsirecon_suffix}"
)

# Add other pipeline-specific suffixes to the dataset links
other_suffixes = [s for s in qsirecon_suffixes if s != qsirecon_suffix]
dataset_links = config.execution.dataset_links.copy()
dataset_links["qsirecon"] = str(config.execution.output_dir)
dataset_links.update(
{
f"qsirecon-{s}": str(
config.execution.output_dir / "derivatives" / f"qsirecon-{s}"
)
for s in other_suffixes
}
)

# Copy the boilerplate files
copy_boilerplate(config.execution.output_dir, suffix_dir)

suffix_failed_reports = generate_reports(
output_level=config.execution.report_output_level,
output_dir=suffix_dir,
run_uuid=config.execution.run_uuid,
qsirecon_suffix=qsirecon_suffix,
)
failed_reports += suffix_failed_reports
write_derivative_description(
config.execution.bids_dir,
suffix_dir,
atlases=config.execution.atlases,
dataset_links=dataset_links,
)
write_bidsignore(suffix_dir)

if failed_reports:
print(failed_reports)

output_list_file = os.path.join(get_test_data_path(), f"{test_name}_outputs.txt")
optional_outputs_list = os.path.join(get_test_data_path(), f"{test_name}_optional_outputs.txt")
if not os.path.isfile(optional_outputs_list):
Expand Down
Loading

0 comments on commit 1545897

Please sign in to comment.