From 97aef93b1d2899ca547c813f9e1662a8fae0ae7a Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 27 Mar 2024 18:46:36 -0400 Subject: [PATCH 001/142] MAINT: Update dependencies, move to ruff --- .pre-commit-config.yaml | 30 ++++++++++---------- pyproject.toml | 61 ++++++++++++++++++++++++++++++++++++----- 2 files changed, 69 insertions(+), 22 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4cd27cd7..816907f2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,18 +1,18 @@ +exclude: ".*/data/.*" repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - - id: check-added-large-files -- repo: https://github.com/psf/black - rev: 22.3.0 + - id: trailing-whitespace + exclude: '.*\.svg' + - id: end-of-file-fixer + exclude: '.*\.svg' + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-added-large-files + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.2.0 hooks: - - id: black - files: ^nibabies/ -- repo: https://github.com/pycqa/isort - rev: 5.10.1 - hooks: - - id: isort - files: ^nibabies/ + - id: ruff + - id: ruff-format diff --git a/pyproject.toml b/pyproject.toml index 93a56cb5..d6b38be2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] license = {file = "LICENSE"} requires-python = ">=3.10" @@ -23,19 +24,16 @@ dependencies = [ "nireports >= 23.2.0", "nitime", "nitransforms >= 23.0.1", - "niworkflows >= 1.9.0", -# "niworkflows @ git+https://github.com/nipreps/niworkflows.git@master", + "niworkflows @ git+https://github.com/nipreps/niworkflows.git@master", "numpy >= 1.21.0", "packaging", "pandas", "psutil >= 5.4", "pybids >= 0.15.0", "requests", - "sdcflows ~= 2.5.2", -# "sdcflows @ git+https://github.com/nipreps/sdcflows.git@master", - "smriprep ~= 0.12.1", -# "smriprep @ git+https://github.com/nipreps/smriprep.git@master", - "tedana >= 0.0.12", + "sdcflows @ git+https://github.com/nipreps/sdcflows.git@master", + "smriprep @ git+https://github.com/nipreps/smriprep.git@master", + "tedana >= 23.0.2", "templateflow >= 24.2.0", "toml", ] @@ -97,6 +95,10 @@ raw-options = { version_scheme = "nipreps-calver" } [tool.hatch.build.hooks.vcs] version-file = "nibabies/_version.py" +# +# Developer tool configurations +# + [tool.black] line-length = 99 skip-string-normalization = true @@ -121,3 +123,48 @@ doctest_optionflags = "ALLOW_UNICODE NORMALIZE_WHITESPACE ELLIPSIS" env = "PYTHONHASHSEED=0" filterwarnings = ["ignore::DeprecationWarning"] junit_family = "xunit2" + +[tool.ruff] +line-length = 99 + +[tool.ruff.lint] +extend-select = [ + "F", + "E", + "W", + "I", + "UP", + "YTT", + "S", + "BLE", + "B", + "A", + # "CPY", + "C4", + "DTZ", + "T10", + # "EM", + "EXE", + "FA", + "ISC", + "ICN", + "PT", + "Q", +] +extend-ignore = [ + "S311", # We are not using random for cryptographic purposes + "ISC001", + "S603", +] + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "single" + +[tool.ruff.lint.extend-per-file-ignores] +"*/test_*.py" = ["S101"] +"fmriprep/utils/debug.py" = ["A002", "T100"] +"docs/conf.py" = ["A001"] +"docs/sphinxext/github_link.py" = ["BLE001"] + +[tool.ruff.format] +quote-style = "single" From 89c3198c30e82355814dc54c6bbdb15598ff32cf Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 28 Mar 2024 14:12:07 -0400 Subject: [PATCH 002/142] MAINT: Update dependencies --- .readthedocs.yml | 4 +++- env.yml | 50 +++++++++++++++++++++++++++--------------------- 2 files changed, 31 insertions(+), 23 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index be478304..a85becf3 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -3,7 +3,7 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3.10" + python: "3.11" jobs: post_checkout: - git fetch --unshallow @@ -15,3 +15,5 @@ python: path: . extra_requirements: - doc + - method: pip + path: wrapper/ diff --git a/env.yml b/env.yml index 43a10cd5..5416ccca 100644 --- a/env.yml +++ b/env.yml @@ -4,33 +4,39 @@ channels: - conda-forge # Update this ~yearly; last updated April 2023 dependencies: - - python >=3.10,<3.11 + - python =3.11 # Needed for svgo and bids-validator; consider moving to deno - - nodejs=16 + - nodejs=20 # Intel Math Kernel Library for numpy - - mkl=2022.1 - - mkl-service=2.4 + - mkl=2023.2.0 + - mkl-service=2.4.0 + # git-annex for templateflow users with DataLad superdatasets + - git-annex=*=alldep* # Base scientific python stack; required by FSL, so pinned here - - numpy=1.24 - - scipy=1.10 - - matplotlib=3.7 - - pandas=2.0 - - h5py=3.8 + - numpy=1.26 + - scipy=1.11 + - matplotlib=3.8 + - pandas=2.2 + - h5py=3.10 # Dependencies compiled against numpy, best to stick with conda - - nitime=0.9 - - scikit-image=0.20 - - scikit-learn=1.2 + - nitime=0.10 + - scikit-image=0.22 + - scikit-learn=1.4 # Utilities - - graphviz=6.0 + - graphviz=9.0 - pandoc=3.1 - # Workflow dependencies: Convert3D - - convert3d=1.3.0 - - libitk=5.2 # 5.3 fails with c3d - # Workflow dependencies: FSL (versions pinned in 6.0.6.2) - - fsl-bet2=2111.0 - - fsl-flirt=2111.0 - - fsl-fast4=2111.0 - - fsl-fugue=2201.2 + # Workflow dependencies: ANTs + - ants=2.5 + # Workflow dependencies: FSL (versions pinned in 6.0.7.7) + - fsl-bet2=2111.4 + - fsl-flirt=2111.2 + - fsl-fast4=2111.3 + - fsl-fugue=2201.4 - fsl-mcflirt=2111.0 - fsl-miscmaths=2203.2 - - fsl-topup=2203.1 + - fsl-topup=2203.2 + - pip + - pip: + - -r requirements.txt +variables: + FSLOUTPUTTYPE: NIFTI_GZ From 00242781c1b1fbf7f9d62fe6ce594e692e919e8b Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 28 Mar 2024 14:13:01 -0400 Subject: [PATCH 003/142] CHORE: Add Thomas to contributors --- .maint/contributors.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.maint/contributors.json b/.maint/contributors.json index 2a7199d5..1c6fbb31 100644 --- a/.maint/contributors.json +++ b/.maint/contributors.json @@ -1,4 +1,9 @@ [ + { + "affiliation": "Department of Pediatrics, University of Minnesota, MN, USA", + "name": "Madison, Thomas", + "orcid": "0000-0003-3030-6580" + }, { "affiliation": "Montreal Neurological Institute, McGill University", "name": "Huberty, Scott", From d7f886693c8f3105df5ca989a46a35e88cd1be01 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 28 Mar 2024 14:30:03 -0400 Subject: [PATCH 004/142] RF[WIP]: Start fit-apply --- nibabies/workflows/anatomical/base.py | 453 ++++++++++++------------- nibabies/workflows/base.py | 303 +++++++++++------ nibabies/workflows/fit.py | 458 ++++++++++++++++++++++++++ 3 files changed, 889 insertions(+), 325 deletions(-) create mode 100644 nibabies/workflows/fit.py diff --git a/nibabies/workflows/anatomical/base.py b/nibabies/workflows/anatomical/base.py index 489c3a81..fa57315c 100644 --- a/nibabies/workflows/anatomical/base.py +++ b/nibabies/workflows/anatomical/base.py @@ -33,33 +33,33 @@ ANAT_OUT_FIELDS = [ - "anat_preproc", - "anat_brain", - "anat_mask", - "anat_dseg", - "anat_tpms", - "anat_ref_xfms", - "std_preproc", - "std_brain", - "std_dseg", - "std_tpms", - "subjects_dir", - "subject_id", - "anat2std_xfm", - "std2anat_xfm", - "anat2fsnative_xfm", - "fsnative2anat_xfm", - "surfaces", - "morphometrics", - "anat_aseg", - "anat_mcrib", - "anat_aparc", - "anat_ribbon", - "template", + 'anat_preproc', + 'anat_brain', + 'anat_mask', + 'anat_dseg', + 'anat_tpms', + 'anat_ref_xfms', + 'std_preproc', + 'std_brain', + 'std_dseg', + 'std_tpms', + 'subjects_dir', + 'subject_id', + 'anat2std_xfm', + 'std2anat_xfm', + 'anat2fsnative_xfm', + 'fsnative2anat_xfm', + 'surfaces', + 'morphometrics', + 'anat_aseg', + 'anat_mcrib', + 'anat_aparc', + 'anat_ribbon', + 'template', # registration sphere space is dependent on surface recon method - "sphere_reg", - "sphere_reg_fsLR", - "midthickness_fsLR", + 'sphere_reg', + 'sphere_reg_fsLR', + 'midthickness_fsLR', ] @@ -72,7 +72,6 @@ def init_infant_anat_wf( contrast: ty.Literal['T1w', 'T2w'], bids_root: str | Path, derivatives: Derivatives, - freesurfer: bool, hires: bool | None, longitudinal: bool, omp_nthreads: int, @@ -83,7 +82,7 @@ def init_infant_anat_wf( sloppy: bool, spaces: SpatialReferences, cifti_output: ty.Literal['91k', '170k'] | None, - name: str = "infant_anat_wf", + name: str = 'infant_anat_wf', ) -> LiterateWorkflow: """ @@ -136,7 +135,7 @@ def init_infant_anat_wf( """ if not t1w or not t2w: # Error type? - raise RuntimeError("Both T1w and T2w images are required to run this workflow.") + raise RuntimeError('Both T1w and T2w images are required to run this workflow.') num_t1w = len(t1w) num_t2w = len(t2w) @@ -158,7 +157,7 @@ def init_infant_anat_wf( t2w_aseg = bool(derivatives.t2w_aseg) # The T2 derivatives are only prioritized first if MCRIBS reconstruction is to be used. - if recon_method == "mcribs": + if recon_method == 'mcribs': if t2w_aseg: t1w_aseg = False if t2w_mask: @@ -170,7 +169,7 @@ def init_infant_anat_wf( t2w_aseg = False config.loggers.workflow.info( - "Derivatives used:\n\t\n\t\n\t\n\t", + 'Derivatives used:\n\t\n\t\n\t\n\t', t1w_mask, t1w_aseg, t2w_mask, @@ -184,16 +183,16 @@ def init_infant_anat_wf( ) wf.__desc__ = desc.format( - ants_ver=ANTsInfo.version() or "(version unknown)", + ants_ver=ANTsInfo.version() or '(version unknown)', skullstrip_tpl=skull_strip_template.fullname, ) - wf.__postdesc__ = "" + wf.__postdesc__ = '' inputnode = pe.Node( - niu.IdentityInterface(fields=["t1w", "t2w", "subject_id", "subjects_dir"]), # FLAIR / ROI? - name="inputnode", + niu.IdentityInterface(fields=['t1w', 't2w', 'subject_id', 'subjects_dir']), # FLAIR / ROI? + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=ANAT_OUT_FIELDS), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=ANAT_OUT_FIELDS), name='outputnode') # Define output workflows anat_reports_wf = init_anat_reports_wf( @@ -211,39 +210,39 @@ def init_infant_anat_wf( ) t1w_template_wf = init_anat_template_wf( - contrast="T1w", + contrast='T1w', num_files=num_t1w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, sloppy=sloppy, has_mask=t1w_mask, has_aseg=t1w_aseg, - name="t1w_template_wf", + name='t1w_template_wf', ) t2w_template_wf = init_anat_template_wf( - contrast="T2w", + contrast='T2w', num_files=num_t2w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, sloppy=sloppy, has_mask=t2w_mask, has_aseg=t2w_aseg, - name="t2w_template_wf", + name='t2w_template_wf', ) # Clean up each anatomical template # Denoise, INU, + Clipping - t1w_preproc_wf = init_anat_preproc_wf(name="t1w_preproc_wf") - t2w_preproc_wf = init_anat_preproc_wf(name="t2w_preproc_wf") + t1w_preproc_wf = init_anat_preproc_wf(name='t1w_preproc_wf') + t2w_preproc_wf = init_anat_preproc_wf(name='t2w_preproc_wf') - if skull_strip_mode != "force": - raise NotImplementedError("Skull stripping is currently required.") + if skull_strip_mode != 'force': + raise NotImplementedError('Skull stripping is currently required.') coregistration_wf = init_coregistration_wf( omp_nthreads=omp_nthreads, sloppy=sloppy, - debug="registration" in config.execution.debug, + debug='registration' in config.execution.debug, t1w_mask=t1w_mask, probmap=not t2w_mask, ) @@ -269,78 +268,78 @@ def init_infant_anat_wf( # fmt:off wf.connect([ - (inputnode, t1w_template_wf, [("t1w", "inputnode.anat_files")]), - (inputnode, t2w_template_wf, [("t2w", "inputnode.anat_files")]), - (inputnode, anat_reports_wf, [("t1w", "inputnode.source_file")]), - (inputnode, coreg_report_wf, [("t1w", "inputnode.source_file")]), - (inputnode, anat_norm_wf, [(("t1w", fix_multi_source_name), "inputnode.orig_t1w")]), + (inputnode, t1w_template_wf, [('t1w', 'inputnode.anat_files')]), + (inputnode, t2w_template_wf, [('t2w', 'inputnode.anat_files')]), + (inputnode, anat_reports_wf, [('t1w', 'inputnode.source_file')]), + (inputnode, coreg_report_wf, [('t1w', 'inputnode.source_file')]), + (inputnode, anat_norm_wf, [(('t1w', fix_multi_source_name), 'inputnode.orig_t1w')]), (t1w_template_wf, outputnode, [ - ("outputnode.anat_realign_xfm", "anat_ref_xfms")]), - (t1w_template_wf, t1w_preproc_wf, [("outputnode.anat_ref", "inputnode.in_anat")]), + ('outputnode.anat_realign_xfm', 'anat_ref_xfms')]), + (t1w_template_wf, t1w_preproc_wf, [('outputnode.anat_ref', 'inputnode.in_anat')]), (t1w_template_wf, anat_derivatives_wf, [ - ("outputnode.anat_valid_list", "inputnode.t1w_source_files"), - ("outputnode.anat_realign_xfm", "inputnode.t1w_ref_xfms")]), + ('outputnode.anat_valid_list', 'inputnode.t1w_source_files'), + ('outputnode.anat_realign_xfm', 'inputnode.t1w_ref_xfms')]), (t1w_template_wf, anat_reports_wf, [ - ("outputnode.out_report", "inputnode.anat_conform_report")]), + ('outputnode.out_report', 'inputnode.anat_conform_report')]), - (t2w_template_wf, t2w_preproc_wf, [("outputnode.anat_ref", "inputnode.in_anat")]), + (t2w_template_wf, t2w_preproc_wf, [('outputnode.anat_ref', 'inputnode.in_anat')]), (t2w_template_wf, anat_derivatives_wf, [ - ("outputnode.anat_valid_list", "inputnode.t2w_source_files"), - ("outputnode.anat_realign_xfm", "inputnode.t2w_ref_xfms")]), + ('outputnode.anat_valid_list', 'inputnode.t2w_source_files'), + ('outputnode.anat_realign_xfm', 'inputnode.t2w_ref_xfms')]), - (t1w_preproc_wf, coregistration_wf, [("outputnode.anat_preproc", "inputnode.in_t1w")]), - (t1w_preproc_wf, coreg_report_wf, [("outputnode.anat_preproc", "inputnode.t1w_preproc")]), + (t1w_preproc_wf, coregistration_wf, [('outputnode.anat_preproc', 'inputnode.in_t1w')]), + (t1w_preproc_wf, coreg_report_wf, [('outputnode.anat_preproc', 'inputnode.t1w_preproc')]), (coregistration_wf, coreg_report_wf, [ - ("outputnode.t1w_mask", "inputnode.in_mask"), - ("outputnode.t2w_preproc", "inputnode.t2w_preproc")]), + ('outputnode.t1w_mask', 'inputnode.in_mask'), + ('outputnode.t2w_preproc', 'inputnode.t2w_preproc')]), (coregistration_wf, anat_norm_wf, [ - ("outputnode.t1w_preproc", "inputnode.moving_image"), - ("outputnode.t1w_mask", "inputnode.moving_mask")]), - (coregistration_wf, anat_seg_wf, [("outputnode.t1w_brain", "inputnode.anat_brain")]), + ('outputnode.t1w_preproc', 'inputnode.moving_image'), + ('outputnode.t1w_mask', 'inputnode.moving_mask')]), + (coregistration_wf, anat_seg_wf, [('outputnode.t1w_brain', 'inputnode.anat_brain')]), (coregistration_wf, anat_derivatives_wf, [ - ("outputnode.t1w_mask", "inputnode.anat_mask"), - ("outputnode.t1w_preproc", "inputnode.t1w_preproc"), - ("outputnode.t2w_preproc", "inputnode.t2w_preproc"), + ('outputnode.t1w_mask', 'inputnode.anat_mask'), + ('outputnode.t1w_preproc', 'inputnode.t1w_preproc'), + ('outputnode.t2w_preproc', 'inputnode.t2w_preproc'), ]), (coregistration_wf, outputnode, [ - ("outputnode.t1w_preproc", "anat_preproc"), - ("outputnode.t1w_brain", "anat_brain"), - ("outputnode.t1w_mask", "anat_mask"), + ('outputnode.t1w_preproc', 'anat_preproc'), + ('outputnode.t1w_brain', 'anat_brain'), + ('outputnode.t1w_mask', 'anat_mask'), ]), (anat_seg_wf, outputnode, [ - ("outputnode.anat_dseg", "anat_dseg"), - ("outputnode.anat_tpms", "anat_tpms")]), + ('outputnode.anat_dseg', 'anat_dseg'), + ('outputnode.anat_tpms', 'anat_tpms')]), (anat_seg_wf, anat_derivatives_wf, [ - ("outputnode.anat_dseg", "inputnode.anat_dseg"), - ("outputnode.anat_tpms", "inputnode.anat_tpms"), + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), ]), (anat_seg_wf, anat_norm_wf, [ - ("outputnode.anat_dseg", "inputnode.moving_segmentation"), - ("outputnode.anat_tpms", "inputnode.moving_tpms")]), + ('outputnode.anat_dseg', 'inputnode.moving_segmentation'), + ('outputnode.anat_tpms', 'inputnode.moving_tpms')]), - (anat_norm_wf, anat_reports_wf, [("poutputnode.template", "inputnode.template")]), + (anat_norm_wf, anat_reports_wf, [('poutputnode.template', 'inputnode.template')]), (anat_norm_wf, outputnode, [ - ("poutputnode.standardized", "std_preproc"), - ("poutputnode.std_mask", "std_mask"), - ("poutputnode.std_dseg", "std_dseg"), - ("poutputnode.std_tpms", "std_tpms"), - ("outputnode.template", "template"), - ("outputnode.anat2std_xfm", "anat2std_xfm"), - ("outputnode.std2anat_xfm", "std2anat_xfm")]), + ('poutputnode.standardized', 'std_preproc'), + ('poutputnode.std_mask', 'std_mask'), + ('poutputnode.std_dseg', 'std_dseg'), + ('poutputnode.std_tpms', 'std_tpms'), + ('outputnode.template', 'template'), + ('outputnode.anat2std_xfm', 'anat2std_xfm'), + ('outputnode.std2anat_xfm', 'std2anat_xfm')]), (anat_norm_wf, anat_derivatives_wf, [ - ("outputnode.template", "inputnode.template"), - ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), - ("outputnode.std2anat_xfm", "inputnode.std2anat_xfm")]), + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm')]), (outputnode, anat_reports_wf, [ - ("anat_preproc", "inputnode.anat_preproc"), - ("anat_mask", "inputnode.anat_mask"), - ("anat_dseg", "inputnode.anat_dseg"), - ("std_preproc", "inputnode.std_t1w"), - ("std_mask", "inputnode.std_mask"), + ('anat_preproc', 'inputnode.anat_preproc'), + ('anat_mask', 'inputnode.anat_mask'), + ('anat_dseg', 'inputnode.anat_dseg'), + ('std_preproc', 'inputnode.std_t1w'), + ('std_mask', 'inputnode.std_mask'), ]), ]) @@ -355,7 +354,9 @@ def init_infant_anat_wf( ) if derivatives: wf.connect([ - (coregistration_wf, coreg_deriv_wf, [('outputnode.t1w2t2w_xfm', 'inputnode.t1w2t2w_xfm')]), + (coregistration_wf, coreg_deriv_wf, [ + ('outputnode.t1w2t2w_xfm', 'inputnode.t1w2t2w_xfm'), + ]), (t1w_preproc_wf, coreg_deriv_wf, [('outputnode.anat_preproc', 'inputnode.t1w_ref')]), (t2w_preproc_wf, coreg_deriv_wf, [('outputnode.anat_preproc', 'inputnode.t2w_ref')]), ]) @@ -391,16 +392,16 @@ def init_infant_anat_wf( template_specs=skull_strip_template.spec, omp_nthreads=omp_nthreads, sloppy=sloppy, - debug="registration" in config.execution.debug, + debug='registration' in config.execution.debug, ) # fmt:off wf.connect([ (t2w_preproc_wf, brain_extraction_wf, [ - ("outputnode.anat_preproc", "inputnode.t2w_preproc")]), + ('outputnode.anat_preproc', 'inputnode.t2w_preproc')]), (brain_extraction_wf, coregistration_wf, [ - ("outputnode.t2w_preproc", "inputnode.in_t2w"), - ("outputnode.out_mask", "inputnode.in_mask"), - ("outputnode.out_probmap", "inputnode.in_probmap")]), + ('outputnode.t2w_preproc', 'inputnode.in_t2w'), + ('outputnode.out_mask', 'inputnode.in_mask'), + ('outputnode.out_probmap', 'inputnode.in_probmap')]), ]) # fmt:on @@ -414,7 +415,9 @@ def init_infant_anat_wf( # fmt:off wf.connect([ (t1w_template_wf, deriv_buffer, [('outputnode.anat_aseg', 't1w_aseg')]), - (t1w_template_wf, coreg_deriv_wf, [('outputnode.anat_aseg', 'inputnode.t1w_aseg')]), + (t1w_template_wf, coreg_deriv_wf, [ + ('outputnode.anat_aseg', 'inputnode.t1w_aseg'), + ]), (coreg_deriv_wf, deriv_buffer, [('outputnode.t2w_aseg', 't2w_aseg')]), ]) # fmt:on @@ -424,7 +427,9 @@ def init_infant_anat_wf( # fmt:off wf.connect([ (t2w_template_wf, deriv_buffer, [('outputnode.anat_aseg', 't2w_aseg')]), - (t2w_template_wf, coreg_deriv_wf, [('outputnode.anat_aseg', 'inputnode.t2w_aseg')]), + (t2w_template_wf, coreg_deriv_wf, [ + ('outputnode.anat_aseg', 'inputnode.t2w_aseg'), + ]), (coreg_deriv_wf, deriv_buffer, [('outputnode.t1w_aseg', 't1w_aseg')]), ]) # fmt:on @@ -453,7 +458,7 @@ def init_infant_anat_wf( # Denoise template T2w, since using the template / preproc resulted in intersection errors denoise_t2w = pe.Node( - DenoiseImage(dimension=3, noise_model="Rician"), name='denoise_t2w' + DenoiseImage(dimension=3, noise_model='Rician'), name='denoise_t2w' ) # t2w mask, t2w aseg surface_recon_wf = init_mcribs_surface_recon_wf( @@ -485,9 +490,9 @@ def init_infant_anat_wf( # fmt:off wf.connect([ (t2w_preproc_wf, surface_recon_wf, [ - ("outputnode.anat_preproc", "inputnode.t2w")]), + ('outputnode.anat_preproc', 'inputnode.t2w')]), (anat_seg_wf, surface_recon_wf, [ - ("outputnode.anat_aseg", "inputnode.ants_segs")]), + ('outputnode.anat_aseg', 'inputnode.ants_segs')]), ]) # fmt:on @@ -497,52 +502,52 @@ def init_infant_anat_wf( # fmt:off wf.connect([ (inputnode, surface_recon_wf, [ - ("subject_id", "inputnode.subject_id"), - ("subjects_dir", "inputnode.subjects_dir")]), + ('subject_id', 'inputnode.subject_id'), + ('subjects_dir', 'inputnode.subjects_dir')]), (t1w_template_wf, surface_recon_wf, [ - ("outputnode.anat_ref", "inputnode.t1w"), + ('outputnode.anat_ref', 'inputnode.t1w'), ]), (coregistration_wf, surface_recon_wf, [ - ("outputnode.t1w_brain", "inputnode.skullstripped_t1"), - ("outputnode.t1w_preproc", "inputnode.corrected_t1"), + ('outputnode.t1w_brain', 'inputnode.skullstripped_t1'), + ('outputnode.t1w_preproc', 'inputnode.corrected_t1'), ]), (surface_recon_wf, outputnode, [ - ("outputnode.subjects_dir", "subjects_dir"), - ("outputnode.subject_id", "subject_id"), - ("outputnode.t1w2fsnative_xfm", "anat2fsnative_xfm"), - ("outputnode.fsnative2t1w_xfm", "fsnative2anat_xfm"), - ("outputnode.surfaces", "surfaces"), - ("outputnode.morphometrics", "morphometrics"), - ("outputnode.out_aparc", "anat_aparc"), - ("outputnode.out_aseg", "anat_aseg"), + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ('outputnode.t1w2fsnative_xfm', 'anat2fsnative_xfm'), + ('outputnode.fsnative2t1w_xfm', 'fsnative2anat_xfm'), + ('outputnode.surfaces', 'surfaces'), + ('outputnode.morphometrics', 'morphometrics'), + ('outputnode.out_aparc', 'anat_aparc'), + ('outputnode.out_aseg', 'anat_aseg'), ]), (coregistration_wf, anat_ribbon_wf, [ - ("outputnode.t1w_mask", "inputnode.t1w_mask"), + ('outputnode.t1w_mask', 'inputnode.t1w_mask'), ]), (surface_recon_wf, anat_ribbon_wf, [ - ("outputnode.surfaces", "inputnode.surfaces"), + ('outputnode.surfaces', 'inputnode.surfaces'), ]), (anat_ribbon_wf, outputnode, [ - ("outputnode.anat_ribbon", "anat_ribbon") + ('outputnode.anat_ribbon', 'anat_ribbon') ]), (anat_ribbon_wf, anat_derivatives_wf, [ - ("outputnode.anat_ribbon", "inputnode.anat_ribbon"), + ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), ]), (surface_recon_wf, sphere_reg_wf, [ ('outputnode.subject_id', 'inputnode.subject_id'), ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (surface_recon_wf, anat_reports_wf, [ - ("outputnode.subject_id", "inputnode.subject_id"), - ("outputnode.subjects_dir", "inputnode.subjects_dir"), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (surface_recon_wf, anat_derivatives_wf, [ - ("outputnode.out_aseg", "inputnode.anat_fs_aseg"), - ("outputnode.out_aparc", "inputnode.anat_fs_aparc"), - ("outputnode.t1w2fsnative_xfm", "inputnode.anat2fsnative_xfm"), - ("outputnode.fsnative2t1w_xfm", "inputnode.fsnative2anat_xfm"), - ("outputnode.surfaces", "inputnode.surfaces"), - ("outputnode.morphometrics", "inputnode.morphometrics"), + ('outputnode.out_aseg', 'inputnode.anat_fs_aseg'), + ('outputnode.out_aparc', 'inputnode.anat_fs_aparc'), + ('outputnode.t1w2fsnative_xfm', 'inputnode.anat2fsnative_xfm'), + ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2anat_xfm'), + ('outputnode.surfaces', 'inputnode.surfaces'), + ('outputnode.morphometrics', 'inputnode.morphometrics'), ]), (sphere_reg_wf, outputnode, [ ('outputnode.sphere_reg', 'sphere_reg'), @@ -558,7 +563,7 @@ def init_infant_anat_wf( init_anat_fsLR_resampling_wf, ) - is_mcribs = recon_method == "mcribs" + is_mcribs = recon_method == 'mcribs' # handles morph_grayords_wf anat_fsLR_resampling_wf = init_anat_fsLR_resampling_wf(cifti_output, mcribs=is_mcribs) anat_derivatives_wf.get_node('inputnode').inputs.cifti_density = cifti_output @@ -573,10 +578,10 @@ def init_infant_anat_wf( ('outputnode.surfaces', 'inputnode.surfaces'), ('outputnode.morphometrics', 'inputnode.morphometrics')]), (anat_fsLR_resampling_wf, anat_derivatives_wf, [ - ("outputnode.cifti_morph", "inputnode.cifti_morph"), - ("outputnode.cifti_metadata", "inputnode.cifti_metadata")]), + ('outputnode.cifti_morph', 'inputnode.cifti_morph'), + ('outputnode.cifti_metadata', 'inputnode.cifti_metadata')]), (anat_fsLR_resampling_wf, outputnode, [ - ("outputnode.midthickness_fsLR", "midthickness_fsLR")]) + ('outputnode.midthickness_fsLR', 'midthickness_fsLR')]) ]) # fmt:on @@ -603,17 +608,17 @@ def init_infant_single_anat_wf( sloppy: bool, spaces: SpatialReferences, cifti_output: ty.Literal['91k', '170k'] | None, - name: str = "infant_single_anat_wf", + name: str = 'infant_single_anat_wf', ) -> LiterateWorkflow: """""" if t1w and t2w: # Error type? raise RuntimeError( - "This workflow uses only T1w or T2w inputs, but both contrasts are available." + 'This workflow uses only T1w or T2w inputs, but both contrasts are available.' ) if not (t1w or t2w): - raise RuntimeError("This workflow requires either a T1w or T2w, but none were found.") + raise RuntimeError('This workflow requires either a T1w or T2w, but none were found.') anat_files = t1w or t2w num_files = len(anat_files) @@ -628,17 +633,17 @@ def init_infant_single_anat_wf( aseg = derivatives.t2w_aseg config.loggers.workflow.info( - f"Derivatives used (%s):\n\t\t\n\t\t\n", + 'Derivatives used (%s):\n\t\t\n\t\t\n', contrast, bool(mask), bool(aseg), ) inputnode = pe.Node( - niu.IdentityInterface(fields=["t1w", "t2w", "subject_id", "subjects_dir"]), # FLAIR / ROI? - name="inputnode", + niu.IdentityInterface(fields=['t1w', 't2w', 'subject_id', 'subjects_dir']), # FLAIR / ROI? + name='inputnode', ) - outputnode = pe.Node(niu.IdentityInterface(fields=ANAT_OUT_FIELDS), name="outputnode") + outputnode = pe.Node(niu.IdentityInterface(fields=ANAT_OUT_FIELDS), name='outputnode') desc = _gen_anat_wf_desc( t1w=t1w or None, @@ -646,10 +651,10 @@ def init_infant_single_anat_wf( mask=bool(mask), ) workflow.__desc__ = desc.format( - ants_ver=ANTsInfo.version() or "(version unknown)", + ants_ver=ANTsInfo.version() or '(version unknown)', skullstrip_tpl=skull_strip_template.fullname, ) - workflow.__postdesc__ = "" + workflow.__postdesc__ = '' # outputs recon_method = config.workflow.surface_recon_method # TODO: Make workflow parameter @@ -677,10 +682,10 @@ def init_infant_single_anat_wf( sloppy=sloppy, has_mask=bool(mask), has_aseg=bool(aseg), - name=f"{contrast.lower()}_template_wf", + name=f'{contrast.lower()}_template_wf', ) # preproc - anat_preproc_wf = init_anat_preproc_wf(name=f"{contrast.lower()}_preproc_wf") + anat_preproc_wf = init_anat_preproc_wf(name=f'{contrast.lower()}_preproc_wf') # T2-only brain extraction anat_seg_wf = init_anat_segmentations_wf( anat_modality=contrast, @@ -730,7 +735,7 @@ def init_infant_single_anat_wf( template_specs=skull_strip_template.spec, omp_nthreads=omp_nthreads, sloppy=sloppy, - debug="registration" in config.execution.debug, + debug='registration' in config.execution.debug, ) # fmt:off workflow.connect([ @@ -753,19 +758,21 @@ def init_infant_single_anat_wf( # fmt:off workflow.connect([ - (inputnode, anat_template_wf, [(contrast.lower(), "inputnode.anat_files")]), - (inputnode, anat_reports_wf, [(contrast.lower(), "inputnode.source_file")]), - (inputnode, anat_norm_wf, [((contrast.lower(), fix_multi_source_name), "inputnode.orig_t1w")]), + (inputnode, anat_template_wf, [(contrast.lower(), 'inputnode.anat_files')]), + (inputnode, anat_reports_wf, [(contrast.lower(), 'inputnode.source_file')]), + (inputnode, anat_norm_wf, [ + ((contrast.lower(), fix_multi_source_name), 'inputnode.orig_t1w'), + ]), (anat_template_wf, outputnode, [ - ("outputnode.anat_realign_xfm", "anat_ref_xfms")]), + ('outputnode.anat_realign_xfm', 'anat_ref_xfms')]), (anat_template_wf, anat_preproc_wf, [ - ("outputnode.anat_ref", "inputnode.in_anat")]), + ('outputnode.anat_ref', 'inputnode.in_anat')]), (anat_template_wf, anat_derivatives_wf, [ - ("outputnode.anat_valid_list", f"inputnode.{contrast.lower()}_source_files"), - ("outputnode.anat_realign_xfm", f"inputnode.{contrast.lower()}_ref_xfms")]), + ('outputnode.anat_valid_list', f'inputnode.{contrast.lower()}_source_files'), + ('outputnode.anat_realign_xfm', f'inputnode.{contrast.lower()}_ref_xfms')]), (anat_template_wf, anat_reports_wf, [ - ("outputnode.out_report", "inputnode.anat_conform_report")]), + ('outputnode.out_report', 'inputnode.anat_conform_report')]), (anat_preproc_wf, anat_norm_wf, [ ('outputnode.anat_preproc', 'inputnode.moving_image')]), (anat_preproc_wf, outputnode, [ @@ -778,35 +785,35 @@ def init_infant_single_anat_wf( ('anat_mask', 'anat_mask')]), (mask_buffer, anat_seg_wf, [('anat_brain', 'inputnode.anat_brain')]), (anat_seg_wf, outputnode, [ - ("outputnode.anat_dseg", "anat_dseg"), - ("outputnode.anat_tpms", "anat_tpms")]), + ('outputnode.anat_dseg', 'anat_dseg'), + ('outputnode.anat_tpms', 'anat_tpms')]), (anat_seg_wf, anat_derivatives_wf, [ - ("outputnode.anat_dseg", "inputnode.anat_dseg"), - ("outputnode.anat_tpms", "inputnode.anat_tpms")]), + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ('outputnode.anat_tpms', 'inputnode.anat_tpms')]), (mask_buffer, anat_norm_wf, [ ('anat_mask', 'inputnode.moving_mask')]), (anat_seg_wf, anat_norm_wf, [ - ("outputnode.anat_dseg", "inputnode.moving_segmentation"), - ("outputnode.anat_tpms", "inputnode.moving_tpms")]), - (anat_norm_wf, anat_reports_wf, [("poutputnode.template", "inputnode.template")]), + ('outputnode.anat_dseg', 'inputnode.moving_segmentation'), + ('outputnode.anat_tpms', 'inputnode.moving_tpms')]), + (anat_norm_wf, anat_reports_wf, [('poutputnode.template', 'inputnode.template')]), (anat_norm_wf, outputnode, [ - ("poutputnode.standardized", "std_preproc"), - ("poutputnode.std_mask", "std_mask"), - ("poutputnode.std_dseg", "std_dseg"), - ("poutputnode.std_tpms", "std_tpms"), - ("outputnode.template", "template"), - ("outputnode.anat2std_xfm", "anat2std_xfm"), - ("outputnode.std2anat_xfm", "std2anat_xfm")]), + ('poutputnode.standardized', 'std_preproc'), + ('poutputnode.std_mask', 'std_mask'), + ('poutputnode.std_dseg', 'std_dseg'), + ('poutputnode.std_tpms', 'std_tpms'), + ('outputnode.template', 'template'), + ('outputnode.anat2std_xfm', 'anat2std_xfm'), + ('outputnode.std2anat_xfm', 'std2anat_xfm')]), (anat_norm_wf, anat_derivatives_wf, [ - ("outputnode.template", "inputnode.template"), - ("outputnode.anat2std_xfm", "inputnode.anat2std_xfm"), - ("outputnode.std2anat_xfm", "inputnode.std2anat_xfm")]), + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm')]), (outputnode, anat_reports_wf, [ - ("anat_preproc", "inputnode.anat_preproc"), - ("anat_mask", "inputnode.anat_mask"), - ("anat_dseg", "inputnode.anat_dseg"), - ("std_preproc", "inputnode.std_t1w"), - ("std_mask", "inputnode.std_mask"), + ('anat_preproc', 'inputnode.anat_preproc'), + ('anat_mask', 'inputnode.anat_mask'), + ('anat_dseg', 'inputnode.anat_dseg'), + ('std_preproc', 'inputnode.std_t1w'), + ('std_mask', 'inputnode.std_mask'), ]), ]) # fmt:on @@ -871,52 +878,52 @@ def init_infant_single_anat_wf( # fmt:off workflow.connect([ (inputnode, surface_recon_wf, [ - ("subject_id", "inputnode.subject_id"), - ("subjects_dir", "inputnode.subjects_dir")]), + ('subject_id', 'inputnode.subject_id'), + ('subjects_dir', 'inputnode.subjects_dir')]), (anat_template_wf, surface_recon_wf, [ - ("outputnode.anat_ref", "inputnode.t1w"), + ('outputnode.anat_ref', 'inputnode.t1w'), ]), (mask_buffer, surface_recon_wf, [ - ("anat_brain", "inputnode.skullstripped_t1")]), + ('anat_brain', 'inputnode.skullstripped_t1')]), (anat_preproc_wf, surface_recon_wf, [ - ("outputnode.anat_preproc", "inputnode.corrected_t1")]), + ('outputnode.anat_preproc', 'inputnode.corrected_t1')]), (surface_recon_wf, outputnode, [ - ("outputnode.subjects_dir", "subjects_dir"), - ("outputnode.subject_id", "subject_id"), - ("outputnode.t1w2fsnative_xfm", "anat2fsnative_xfm"), - ("outputnode.fsnative2t1w_xfm", "fsnative2anat_xfm"), - ("outputnode.surfaces", "surfaces"), - ("outputnode.morphometrics", "morphometrics"), - ("outputnode.out_aparc", "anat_aparc"), - ("outputnode.out_aseg", "anat_aseg"), + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ('outputnode.t1w2fsnative_xfm', 'anat2fsnative_xfm'), + ('outputnode.fsnative2t1w_xfm', 'fsnative2anat_xfm'), + ('outputnode.surfaces', 'surfaces'), + ('outputnode.morphometrics', 'morphometrics'), + ('outputnode.out_aparc', 'anat_aparc'), + ('outputnode.out_aseg', 'anat_aseg'), ]), (mask_buffer, anat_ribbon_wf, [ - ("anat_mask", "inputnode.t1w_mask"), + ('anat_mask', 'inputnode.t1w_mask'), ]), (surface_recon_wf, anat_ribbon_wf, [ - ("outputnode.surfaces", "inputnode.surfaces"), + ('outputnode.surfaces', 'inputnode.surfaces'), ]), (anat_ribbon_wf, outputnode, [ - ("outputnode.anat_ribbon", "anat_ribbon") + ('outputnode.anat_ribbon', 'anat_ribbon') ]), (anat_ribbon_wf, anat_derivatives_wf, [ - ("outputnode.anat_ribbon", "inputnode.anat_ribbon"), + ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), ]), (surface_recon_wf, sphere_reg_wf, [ ('outputnode.subject_id', 'inputnode.subject_id'), ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (surface_recon_wf, anat_reports_wf, [ - ("outputnode.subject_id", "inputnode.subject_id"), - ("outputnode.subjects_dir", "inputnode.subjects_dir"), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (surface_recon_wf, anat_derivatives_wf, [ - ("outputnode.out_aseg", "inputnode.anat_fs_aseg"), - ("outputnode.out_aparc", "inputnode.anat_fs_aparc"), - ("outputnode.t1w2fsnative_xfm", "inputnode.anat2fsnative_xfm"), - ("outputnode.fsnative2t1w_xfm", "inputnode.fsnative2anat_xfm"), - ("outputnode.surfaces", "inputnode.surfaces"), - ("outputnode.morphometrics", "inputnode.morphometrics"), + ('outputnode.out_aseg', 'inputnode.anat_fs_aseg'), + ('outputnode.out_aparc', 'inputnode.anat_fs_aparc'), + ('outputnode.t1w2fsnative_xfm', 'inputnode.anat2fsnative_xfm'), + ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2anat_xfm'), + ('outputnode.surfaces', 'inputnode.surfaces'), + ('outputnode.morphometrics', 'inputnode.morphometrics'), ]), (sphere_reg_wf, outputnode, [ ('outputnode.sphere_reg', 'sphere_reg'), @@ -932,7 +939,7 @@ def init_infant_single_anat_wf( init_anat_fsLR_resampling_wf, ) - is_mcribs = recon_method == "mcribs" + is_mcribs = recon_method == 'mcribs' # handles morph_grayords_wf anat_fsLR_resampling_wf = init_anat_fsLR_resampling_wf(cifti_output, mcribs=is_mcribs) anat_derivatives_wf.get_node('inputnode').inputs.cifti_density = cifti_output @@ -947,10 +954,10 @@ def init_infant_single_anat_wf( ('outputnode.surfaces', 'inputnode.surfaces'), ('outputnode.morphometrics', 'inputnode.morphometrics')]), (anat_fsLR_resampling_wf, anat_derivatives_wf, [ - ("outputnode.cifti_morph", "inputnode.cifti_morph"), - ("outputnode.cifti_metadata", "inputnode.cifti_metadata")]), + ('outputnode.cifti_morph', 'inputnode.cifti_morph'), + ('outputnode.cifti_metadata', 'inputnode.cifti_metadata')]), (anat_fsLR_resampling_wf, outputnode, [ - ("outputnode.midthickness_fsLR", "midthickness_fsLR")]) + ('outputnode.midthickness_fsLR', 'midthickness_fsLR')]) ]) # fmt:on return workflow @@ -973,52 +980,52 @@ def _gen_anat_wf_desc(t1w: list | None, t2w: list | None, mask: bool) -> str: # Anatomicals found if anat is not None: desc += ( - f"A total of {len(anat)} {modality} images were found " - "within the input BIDS dataset.\n" + f'A total of {len(anat)} {modality} images were found ' + 'within the input BIDS dataset.\n' ) else: desc += ( - f"A total of {len(t1w)} T1w and {len(t2w)} T2w images " - "were found within the input BIDS dataset.\n" + f'A total of {len(t1w)} T1w and {len(t2w)} T2w images ' + 'were found within the input BIDS dataset.\n' ) # Template + Preproc workflows if t1w: if len(t1w) == 1: desc += ( - f"The T1-weighted (T1w) image was denoised " - "and corrected for intensity non-uniformity (INU)" + 'The T1-weighted (T1w) image was denoised ' + 'and corrected for intensity non-uniformity (INU)' ) else: desc += ( - "All of the T1-weighted images were corrected for intensity " - "non-uniformity (INU)" + 'All of the T1-weighted images were corrected for intensity ' + 'non-uniformity (INU)' ) desc += ( - "with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} " - "[@ants, RRID:SCR_004757]" + 'with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} ' + '[@ants, RRID:SCR_004757]' ) - desc += ".\n" if len(t1w) > 1 else ", and used as T1w-reference throughout the workflow.\n" + desc += '.\n' if len(t1w) > 1 else ', and used as T1w-reference throughout the workflow.\n' if t2w: if len(t2w) == 1: desc += ( - "The T2-weighted (T2w) image was denoised and corrected for intensity " - "non-uniformity (INU)" + 'The T2-weighted (T2w) image was denoised and corrected for intensity ' + 'non-uniformity (INU)' ) else: desc += ( - "All of the T2-weighted images were corrected for intensity " - "non-uniformity (INU)" + 'All of the T2-weighted images were corrected for intensity ' + 'non-uniformity (INU)' ) desc += ( - "with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} " - "[@ants, RRID:SCR_004757]" + 'with `N4BiasFieldCorrection` [@n4], distributed with ANTs {ants_ver} ' + '[@ants, RRID:SCR_004757]' ) - desc += ".\n" if len(t2w) > 1 else ", and used as T2w-reference throughout the workflow.\n" + desc += '.\n' if len(t2w) > 1 else ', and used as T2w-reference throughout the workflow.\n' # Precomputed derivatives if mask: - desc += "A previously computed mask was used to skull-strip the anatomical image." + desc += 'A previously computed mask was used to skull-strip the anatomical image.' else: desc += ( diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index b47bac04..08628d77 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -57,6 +57,7 @@ from nibabies.workflows.bold import init_func_preproc_wf if ty.TYPE_CHECKING: + from bids.layout import BIDSLayout from niworkflows.utils.spaces import SpatialReferences @@ -90,7 +91,7 @@ def init_nibabies_wf(subworkflows_list): from niworkflows.interfaces.bids import BIDSFreeSurferDir ver = Version(config.environment.version) - nibabies_wf = Workflow(name=f"nibabies_{ver.major}_{ver.minor}_wf") + nibabies_wf = Workflow(name=f'nibabies_{ver.major}_{ver.minor}_wf') nibabies_wf.base_dir = config.execution.work_dir execution_spaces = init_execution_spaces() @@ -100,7 +101,7 @@ def init_nibabies_wf(subworkflows_list): fsdir = pe.Node( BIDSFreeSurferDir( derivatives=config.execution.output_dir, - freesurfer_home=os.getenv("FREESURFER_HOME"), + freesurfer_home=os.getenv('FREESURFER_HOME'), spaces=execution_spaces.get_fs_spaces(), ), name=f"fsdir_run_{config.execution.run_uuid.replace('-', '_')}", @@ -114,13 +115,13 @@ def init_nibabies_wf(subworkflows_list): age = parse_bids_for_age_months(config.execution.bids_dir, subject_id, session_id) if config.workflow.age_months: config.loggers.cli.warning( - "`--age-months` is deprecated and will be removed in a future release." - "Please use a `sessions.tsv` or `participants.tsv` file to track participants age." + '`--age-months` is deprecated and will be removed in a future release.' + 'Please use a `sessions.tsv` or `participants.tsv` file to track participants age.' ) age = config.workflow.age_months if age is None: raise RuntimeError( - "Could not find age for sub-{subject}{session}".format( + 'Could not find age for sub-{subject}{session}'.format( subject=subject_id, session=f'_ses-{session_id}' if session_id else '' ) ) @@ -134,25 +135,25 @@ def init_nibabies_wf(subworkflows_list): spaces=output_spaces, ) - bids_level = [f"sub-{subject_id}"] + bids_level = [f'sub-{subject_id}'] if session_id: - bids_level.append(f"ses-{session_id}") + bids_level.append(f'ses-{session_id}') log_dir = ( - config.execution.nibabies_dir.joinpath(*bids_level) / "log" / config.execution.run_uuid + config.execution.nibabies_dir.joinpath(*bids_level) / 'log' / config.execution.run_uuid ) - single_subject_wf.config["execution"]["crashdump_dir"] = str(log_dir) + single_subject_wf.config['execution']['crashdump_dir'] = str(log_dir) for node in single_subject_wf._get_all_nodes(): node.config = deepcopy(single_subject_wf.config) if freesurfer: - nibabies_wf.connect(fsdir, "subjects_dir", single_subject_wf, "inputnode.subjects_dir") + nibabies_wf.connect(fsdir, 'subjects_dir', single_subject_wf, 'inputnode.subjects_dir') else: nibabies_wf.add_nodes([single_subject_wf]) # Dump a copy of the config file into the log directory log_dir.mkdir(exist_ok=True, parents=True) - config.to_filename(log_dir / "nibabies.toml") + config.to_filename(log_dir / 'nibabies.toml') return nibabies_wf @@ -209,9 +210,9 @@ def init_single_subject_wf( from .anatomical import init_infant_anat_wf, init_infant_single_anat_wf name = ( - f"single_subject_{subject_id}_{session_id}_wf" + f'single_subject_{subject_id}_{session_id}_wf' if session_id - else f"single_subject_{subject_id}_wf" + else f'single_subject_{subject_id}_wf' ) subject_data = collect_data( config.execution.layout, @@ -222,51 +223,56 @@ def init_single_subject_wf( bids_filters=config.execution.bids_filters, )[0] - if "flair" in config.workflow.ignore: - subject_data["flair"] = [] - if "t2w" in config.workflow.ignore: - subject_data["t2w"] = [] + if 'flair' in config.workflow.ignore: + subject_data['flair'] = [] + if 't2w' in config.workflow.ignore: + subject_data['t2w'] = [] anat_only = config.workflow.anat_only derivatives = Derivatives(bids_root=config.execution.layout.root) - contrast = "T1w" if subject_data["t1w"] else "T2w" + contrast = 'T1w' if subject_data['t1w'] else 'T2w' single_modality = not (subject_data['t1w'] and subject_data['t2w']) # Make sure we always go through these two checks - if not anat_only and not subject_data["bold"]: + if not anat_only and not subject_data['bold']: task_id = config.execution.task_id raise RuntimeError( - "No BOLD images found for participant {} and task {}. " - "All workflows require BOLD images.".format( - subject_id, task_id if task_id else "" + 'No BOLD images found for participant {} and task {}. ' + 'All workflows require BOLD images.'.format( + subject_id, task_id if task_id else '' ) ) + # bold_runs = [ + # sorted( + # listify(run), + # key=lambda fl: config.execution.layout.get_metadata(fl).get('EchoTime', 0), + # ) + # for run in subject_data['bold'] + # ] + if config.execution.derivatives: for deriv_path in config.execution.derivatives: - config.loggers.workflow.info("Searching for derivatives in %s", deriv_path) + config.loggers.workflow.info('Searching for derivatives in %s', deriv_path) derivatives.populate( deriv_path, subject_id, session_id=session_id, ) - config.loggers.workflow.info("Found precomputed derivatives %s", derivatives) + config.loggers.workflow.info('Found precomputed derivatives %s', derivatives) workflow = Workflow(name=name) - workflow.__desc__ = """ + workflow.__desc__ = f""" Results included in this manuscript come from preprocessing -performed using *NiBabies* {nibabies_ver}, +performed using *NiBabies* {config.environment.version}, derived from fMRIPrep (@fmriprep1; @fmriprep2; RRID:SCR_016216). -The underlying workflow engine used is *Nipype* {nipype_ver} +The underlying workflow engine used is *Nipype* {config.environment.nipype_version} (@nipype1; @nipype2; RRID:SCR_002502). -""".format( - nibabies_ver=config.environment.version, - nipype_ver=config.environment.nipype_version, - ) - workflow.__postdesc__ = """ +""" + workflow.__postdesc__ = f""" Many internal operations of *NiBabies* use -*Nilearn* {nilearn_ver} [@nilearn, RRID:SCR_001362], +*Nilearn* {NILEARN_VERSION} [@nilearn, RRID:SCR_001362], mostly within the functional processing workflow. For more details of the pipeline, see [the section corresponding to workflows in *nibabies*'s documentation]\ @@ -284,13 +290,11 @@ def init_single_subject_wf( ### References -""".format( - nilearn_ver=NILEARN_VERSION - ) +""" nibabies_dir = str(config.execution.nibabies_dir) - inputnode = pe.Node(niu.IdentityInterface(fields=["subjects_dir"]), name="inputnode") + inputnode = pe.Node(niu.IdentityInterface(fields=['subjects_dir']), name='inputnode') # TODO: Revisit T1w/T2w restrictions for BIDSDataGrabber bidssrc = pe.Node( @@ -300,12 +304,12 @@ def init_single_subject_wf( anat_derivatives=False, subject_id=subject_id, ), - name="bidssrc", + name='bidssrc', ) bids_info = pe.Node( BIDSInfo(bids_dir=config.execution.bids_dir, bids_validate=False), - name="bids_info", + name='bids_info', ) summary = pe.Node( @@ -313,58 +317,58 @@ def init_single_subject_wf( std_spaces=spaces.get_spaces(nonstandard=False), nstd_spaces=spaces.get_spaces(standard=False), ), - name="summary", + name='summary', run_without_submitting=True, ) about = pe.Node( - AboutSummary(version=config.environment.version, command=" ".join(sys.argv)), - name="about", + AboutSummary(version=config.environment.version, command=' '.join(sys.argv)), + name='about', run_without_submitting=True, ) ds_report_summary = pe.Node( DerivativesDataSink( base_directory=nibabies_dir, - desc="summary", - datatype="figures", - dismiss_entities=("echo",), + desc='summary', + datatype='figures', + dismiss_entities=('echo',), ), - name="ds_report_summary", + name='ds_report_summary', run_without_submitting=True, ) ds_report_about = pe.Node( DerivativesDataSink( base_directory=nibabies_dir, - desc="about", - datatype="figures", - dismiss_entities=("echo",), + desc='about', + datatype='figures', + dismiss_entities=('echo',), ), - name="ds_report_about", + name='ds_report_about', run_without_submitting=True, ) - wf_args = dict( - ants_affine_init=True, - age_months=age, - contrast=contrast, - t1w=subject_data["t1w"], - t2w=subject_data["t2w"], - bids_root=config.execution.bids_dir, - derivatives=derivatives, - freesurfer=config.workflow.run_reconall, - hires=config.workflow.hires, - longitudinal=config.workflow.longitudinal, - omp_nthreads=config.nipype.omp_nthreads, - output_dir=nibabies_dir, - segmentation_atlases=config.execution.segmentation_atlases_dir, - skull_strip_mode=config.workflow.skull_strip_t1w, - skull_strip_template=Reference.from_string(config.workflow.skull_strip_template)[0], - sloppy=config.execution.sloppy, - spaces=spaces, - cifti_output=config.workflow.cifti_output, - ) + wf_args = { + 'ants_affine_init': True, + 'age_months': age, + 'contrast': contrast, + 't1w': subject_data['t1w'], + 't2w': subject_data['t2w'], + 'bids_root': config.execution.bids_dir, + 'derivatives': derivatives, + 'freesurfer': config.workflow.run_reconall, + 'hires': config.workflow.hires, + 'longitudinal': config.workflow.longitudinal, + 'omp_nthreads': config.nipype.omp_nthreads, + 'output_dir': nibabies_dir, + 'segmentation_atlases': config.execution.segmentation_atlases_dir, + 'skull_strip_mode': config.workflow.skull_strip_t1w, + 'skull_strip_template': Reference.from_string(config.workflow.skull_strip_template)[0], + 'sloppy': config.execution.sloppy, + 'spaces': spaces, + 'cifti_output': config.workflow.cifti_output, + } anat_preproc_wf = ( init_infant_anat_wf(**wf_args) if not single_modality @@ -419,8 +423,8 @@ def init_single_subject_wf( # Overwrite ``out_path_base`` of smriprep's DataSinks for node in workflow.list_node_names(): - if node.split(".")[-1].startswith("ds_"): - workflow.get_node(node).interface.out_path_base = "" + if node.split('.')[-1].startswith('ds_'): + workflow.get_node(node).interface.out_path_base = '' if anat_only: return workflow @@ -428,9 +432,9 @@ def init_single_subject_wf( # Susceptibility distortion correction fmap_estimators = None if any((config.workflow.use_syn_sdc, config.workflow.force_syn)): - config.loggers.workflow.critical("SyN processing is not yet implemented.") + config.loggers.workflow.critical('SyN processing is not yet implemented.') - if "fieldmaps" not in config.workflow.ignore: + if 'fieldmaps' not in config.workflow.ignore: from sdcflows.utils.wrangler import find_estimators # SDC Step 1: Run basic heuristics to identify available data for fieldmap estimation @@ -445,7 +449,7 @@ def init_single_subject_wf( # Append the functional section to the existing anatomical exerpt # That way we do not need to stream down the number of bold datasets - anat_preproc_wf.__postdesc__ = getattr(anat_preproc_wf, '__postdesc__') or '' + anat_preproc_wf.__postdesc__ = anat_preproc_wf.__postdesc__ or '' func_pre_desc = f""" Functional data preprocessing @@ -460,7 +464,7 @@ def init_single_subject_wf( if func_preproc_wf is None: continue - func_preproc_wf.__desc__ = func_pre_desc + (getattr(func_preproc_wf, '__desc__') or '') + func_preproc_wf.__desc__ = func_pre_desc + (func_preproc_wf.__desc__ or '') # fmt:off workflow.connect([ (anat_preproc_wf, func_preproc_wf, [ @@ -491,13 +495,13 @@ def init_single_subject_wf( if not has_fieldmap: config.loggers.workflow.warning( - "Data for fieldmap estimation not present. Please note that these data " - "will not be corrected for susceptibility distortions." + 'Data for fieldmap estimation not present. Please note that these data ' + 'will not be corrected for susceptibility distortions.' ) return workflow config.loggers.workflow.info( - f"Fieldmap estimators found: {[e.method for e in fmap_estimators]}" + f'Fieldmap estimators found: {[e.method for e in fmap_estimators]}' ) from sdcflows import fieldmaps as fm @@ -505,7 +509,7 @@ def init_single_subject_wf( fmap_wf = init_fmap_preproc_wf( sloppy=bool(config.execution.sloppy), - debug="fieldmaps" in config.execution.debug, + debug='fieldmaps' in config.execution.debug, estimators=fmap_estimators, omp_nthreads=config.nipype.omp_nthreads, output_dir=nibabies_dir, @@ -523,20 +527,20 @@ def init_single_subject_wf( # fmt: off workflow.connect([ (fmap_wf, func_preproc_wf, [ - ("outputnode.fmap", "inputnode.fmap"), - ("outputnode.fmap_ref", "inputnode.fmap_ref"), - ("outputnode.fmap_coeff", "inputnode.fmap_coeff"), - ("outputnode.fmap_mask", "inputnode.fmap_mask"), - ("outputnode.fmap_id", "inputnode.fmap_id"), - ("outputnode.method", "inputnode.sdc_method"), + ('outputnode.fmap', 'inputnode.fmap'), + ('outputnode.fmap_ref', 'inputnode.fmap_ref'), + ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), + ('outputnode.fmap_mask', 'inputnode.fmap_mask'), + ('outputnode.fmap_id', 'inputnode.fmap_id'), + ('outputnode.method', 'inputnode.sdc_method'), ]), ]) # fmt: on # Overwrite ``out_path_base`` of sdcflows's DataSinks for node in fmap_wf.list_node_names(): - if node.split(".")[-1].startswith("ds_"): - fmap_wf.get_node(node).interface.out_path_base = "" + if node.split('.')[-1].startswith('ds_'): + fmap_wf.get_node(node).interface.out_path_base = '' # Step 3: Manually connect PEPOLAR for estimator in fmap_estimators: @@ -551,27 +555,28 @@ def init_single_subject_wf( suffices = [s.suffix for s in estimator.sources] if estimator.method == fm.EstimatorType.PEPOLAR: - if set(suffices) == {"epi"} or sorted(suffices) == ["bold", "epi"]: - fmap_wf_inputs = getattr(fmap_wf.inputs, f"in_{estimator.bids_id}") + if set(suffices) == {'epi'} or sorted(suffices) == ['bold', 'epi']: + fmap_wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') fmap_wf_inputs.in_data = [str(s.path) for s in estimator.sources] fmap_wf_inputs.metadata = [s.metadata for s in estimator.sources] else: raise NotImplementedError( - "Sophisticated PEPOLAR schemes (e.g., using DWI+EPI) are unsupported." + 'Sophisticated PEPOLAR schemes (e.g., using DWI+EPI) are unsupported.' ) return workflow def _prefix(subid): - return subid if subid.startswith("sub-") else f"sub-{subid}" + return subid if subid.startswith('sub-') else f'sub-{subid}' def init_workflow_spaces(execution_spaces: SpatialReferences, age_months: int): """ Create output spaces at a per-subworkflow level. - This address the case where a multi-session subject is run, and requires separate template cohorts. + This address the case where a multi-session subject is run, + and requires separate template cohorts. """ from niworkflows.utils.spaces import Reference @@ -580,12 +585,12 @@ def init_workflow_spaces(execution_spaces: SpatialReferences, age_months: int): spaces = deepcopy(execution_spaces) if age_months is None: - raise RuntimeError("Participant age (in months) is required.") + raise RuntimeError('Participant age (in months) is required.') if not spaces.references: # Ensure age specific template is added if nothing is present - cohort = cohort_by_months("MNIInfant", age_months) - spaces.add(("MNIInfant", {"res": "native", "cohort": cohort})) + cohort = cohort_by_months('MNIInfant', age_months) + spaces.add(('MNIInfant', {'res': 'native', 'cohort': cohort})) if not spaces.is_cached(): spaces.checkpoint() @@ -595,15 +600,15 @@ def init_workflow_spaces(execution_spaces: SpatialReferences, age_months: int): # These spaces will not be included in the final outputs. if config.workflow.use_aroma: # Make sure there's a normalization to FSL for AROMA to use. - spaces.add(Reference("MNI152NLin6Asym", {"res": "2"})) + spaces.add(Reference('MNI152NLin6Asym', {'res': '2'})) if config.workflow.cifti_output: # CIFTI grayordinates to corresponding FSL-MNI resolutions. - vol_res = "2" if config.workflow.cifti_output == "91k" else "1" - spaces.add(Reference("MNI152NLin6Asym", {"res": vol_res})) + vol_res = '2' if config.workflow.cifti_output == '91k' else '1' + spaces.add(Reference('MNI152NLin6Asym', {'res': vol_res})) # Ensure a non-native version of MNIInfant is added as a target - cohort = cohort_by_months("MNIInfant", age_months) - spaces.add(Reference("MNIInfant", {"cohort": cohort})) + cohort = cohort_by_months('MNIInfant', age_months) + spaces.add(Reference('MNIInfant', {'cohort': cohort})) return spaces @@ -614,6 +619,100 @@ def init_execution_spaces(): spaces = config.execution.output_spaces or SpatialReferences() if not isinstance(spaces, SpatialReferences): spaces = SpatialReferences( - [ref for s in spaces.split(" ") for ref in Reference.from_string(s)] + [ref for s in spaces.split(' ') for ref in Reference.from_string(s)] ) return spaces + + +def map_fieldmap_estimation( + layout: BIDSLayout, + subject_id: str, + bold_data: list[list[str]], + ignore_fieldmaps: bool, + use_syn: bool | str, + force_syn: bool, + filters: dict | None, +) -> tuple[list, dict]: + if not any((not ignore_fieldmaps, use_syn, force_syn)): + return [], {} + + from sdcflows import fieldmaps as fm + from sdcflows.utils.wrangler import find_estimators + + # In the case where fieldmaps are ignored and `--use-syn-sdc` is requested, + # SDCFlows `find_estimators` still receives a full layout (which includes the fmap modality) + # and will not calculate fmapless schemes. + # Similarly, if fieldmaps are ignored and `--force-syn` is requested, + # `fmapless` should be set to True to ensure BOLD targets are found to be corrected. + fmap_estimators = find_estimators( + layout=layout, + subject=subject_id, + fmapless=bool(use_syn) or ignore_fieldmaps and force_syn, + force_fmapless=force_syn or ignore_fieldmaps and use_syn, + bids_filters=filters, + ) + + if not fmap_estimators: + if use_syn: + message = ( + 'Fieldmap-less (SyN) estimation was requested, but PhaseEncodingDirection ' + 'information appears to be absent.' + ) + config.loggers.workflow.error(message) + if use_syn == 'error': + raise ValueError(message) + return [], {} + + if ignore_fieldmaps and any(f.method == fm.EstimatorType.ANAT for f in fmap_estimators): + config.loggers.workflow.info( + 'Option "--ignore fieldmaps" was set, but either "--use-syn-sdc" ' + 'or "--force-syn" were given, so fieldmap-less estimation will be executed.' + ) + fmap_estimators = [f for f in fmap_estimators if f.method == fm.EstimatorType.ANAT] + + # Pare down estimators to those that are actually used + # If fmap_estimators == [], all loops/comprehensions terminate immediately + all_ids = {fmap.bids_id for fmap in fmap_estimators} + bold_files = (bold_series[0] for bold_series in bold_data) + + all_estimators = { + bold_file: [fmap_id for fmap_id in get_estimator(layout, bold_file) if fmap_id in all_ids] + for bold_file in bold_files + } + + for bold_file, estimator_key in all_estimators.items(): + if len(estimator_key) > 1: + config.loggers.workflow.warning( + f"Several fieldmaps <{', '.join(estimator_key)}> are " + f"'IntendedFor' <{bold_file}>, using {estimator_key[0]}" + ) + estimator_key[1:] = [] + + # Final, 1-1 map, dropping uncorrected BOLD + estimator_map = { + bold_file: estimator_key[0] + for bold_file, estimator_key in all_estimators.items() + if estimator_key + } + + fmap_estimators = [f for f in fmap_estimators if f.bids_id in estimator_map.values()] + + return fmap_estimators, estimator_map + + +def get_estimator(layout, fname): + field_source = layout.get_metadata(fname).get('B0FieldSource') + if isinstance(field_source, str): + field_source = (field_source,) + + if field_source is None: + import re + from pathlib import Path + + from sdcflows.fieldmaps import get_identifier + + # Fallback to IntendedFor + intended_rel = re.sub(r'^sub-[a-zA-Z0-9]*/', '', str(Path(fname).relative_to(layout.root))) + field_source = get_identifier(intended_rel) + + return field_source diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py new file mode 100644 index 00000000..b72a6a39 --- /dev/null +++ b/nibabies/workflows/fit.py @@ -0,0 +1,458 @@ +import logging + +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from niworkflows.engine.workflows import LiterateWorkflow as Workflow +from niworkflows.utils.connections import pop_file + +from nibabies import config + +LOGGER = logging.getLogger('nipype.workflow') + +def init_infant_anat_fit_wf( + age_months, + t1w, + t2w, + bids_root, + precomputed, + hires, + longitudinal, + omp_nthreads, + output_dir, + segmentation_atlases, + skull_strip_mode, + skull_strip_template, + sloppy, + spaces, + cifti_output, + name='infant_anat_fit_wf', +): + """ + Stage the anatomical preprocessing steps: + - T1w reference + - T2w reference + - Brain extraction and INU (bias field) correction + - Brain tissue segmentation + - Spatial normalization to standard spaces. + - Surface reconstruction (MCRIBS / infant_recon_all / recon-all) + """ + + workflow = Workflow(name=name) + num_t1w = len(t1w) + num_t2w = len(t2w) + + if not num_t1w and not num_t2w: + raise FileNotFoundError('No anatomical scans provided!') + + if not num_t1w or not num_t2w: + modality = 'T1w' if num_t1w else 'T2w' + anatomicals = t1w or t2w + + workflow = init_infant_single_anat_fit_wf( + modality, + age_months=age_months, + anatomicals=anatomicals, + bids_root=bids_root, + precomputed=precomputed, + hires=hires, + longitudinal=longitudinal, + omp_nthreads=omp_nthreads, + output_dir=output_dir, + segmentation_atlases=segmentation_atlases, + skull_strip_mode=skull_strip_mode, + skull_strip_template=skull_strip_mode, + sloppy=sloppy, + spaces=spaces, + cifti_output=cifti_output, + ) + + return workflow + + # Organization + # ------------ + # This workflow takes the usual (inputnode -> graph -> outputnode) format + # The graph consists of (input -> compute -> datasink -> buffer) units, + # and all inputs to outputnode are buffer. + # If precomputed inputs are found, then these units are replaced with (buffer) + # At the time of writing, t1w_mask is an exception, which takes the form + # (t1w_buffer -> refined_buffer -> datasink -> outputnode) + # All outputnode components should therefore point to files in the input or + # output directories. + inputnode = pe.Node( + niu.IdentityInterface(fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), + name='inputnode', + ) + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + # Primary derivatives + 't1w_preproc', + 't2w_preproc', + 't1w2t2w_xfm', + 't1w_mask', + 't1w_dseg', + 't1w_tpms', + 'anat2std_xfm', + 'fsnative2t1w_xfm', + # Surface and metric derivatives for fsLR resampling + 'white', + 'pial', + 'midthickness', + 'sphere', + 'thickness', + 'sulc', + 'sphere_reg', + 'sphere_reg_fsLR', + 'sphere_reg_msm', + 'anat_ribbon', + # Reverse transform; not computable from forward transform + 'std2anat_xfm', + # Metadata + 'template', + 'subjects_dir', + 'subject_id', + 't1w_valid_list', + ] + ), + name='outputnode', + ) + + # If all derivatives exist, inputnode could go unconnected, so add explicitly + workflow.add_nodes([inputnode]) + + # Stage 1 inputs (filtered) + sourcefile_buffer = pe.Node( + niu.IdentityInterface(fields=['source_files']), + name='sourcefile_buffer', + ) + + # Stage 2 - Anatomicals + t1w_buffer = pe.Node( + niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask', 't1w_brain']), + name='t1w_buffer', + ) + t2w_buffer = pe.Node( + niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask', 't2w_brain']) + ) + + # Stage 3 - Coregistration + t1w2t2w_buffer = pe.Node(niu.Merge(2), name='t1w2t2w_buffer') + t2w2t1w_buffer = pe.Node(niu.Merge(2), name='t2w2t1w_buffer') + + # Stage 4 - Segmentation + seg_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms']), + name='seg_buffer', + ) + # Stage 5 - collated template names, forward and reverse transforms + template_buffer = pe.Node(niu.Merge(2), name='template_buffer') + anat2std_buffer = pe.Node(niu.Merge(2), name='anat2std_buffer') + std2anat_buffer = pe.Node(niu.Merge(2), name='std2anat_buffer') + + # Stage 6 results: Refined stage 2 results; may be direct copy if no refinement + refined_buffer = pe.Node( + niu.IdentityInterface(fields=['t1w_mask', 't1w_brain']), + name='refined_buffer', + ) + + # Stage 8 results: GIFTI surfaces + surfaces_buffer = pe.Node( + niu.IdentityInterface( + fields=['white', 'pial', 'midthickness', 'sphere', 'sphere_reg', 'thickness', 'sulc'] + ), + name='surfaces_buffer', + ) + + # Stage 9 and 10 results: fsLR sphere registration + fsLR_buffer = pe.Node(niu.IdentityInterface(fields=['sphere_reg_fsLR']), name='fsLR_buffer') + msm_buffer = pe.Node(niu.IdentityInterface(fields=['sphere_reg_msm']), name='msm_buffer') + + workflow.connect([ + (seg_buffer, outputnode, [ + ('t1w_dseg', 't1w_dseg'), + ('t1w_tpms', 't1w_tpms'), + ]), + (anat2std_buffer, outputnode, [('out', 'anat2std_xfm')]), + (std2anat_buffer, outputnode, [('out', 'std2anat_xfm')]), + (template_buffer, outputnode, [('out', 'template')]), + (sourcefile_buffer, outputnode, [('source_files', 't1w_valid_list')]), + (surfaces_buffer, outputnode, [ + ('white', 'white'), + ('pial', 'pial'), + ('midthickness', 'midthickness'), + ('sphere', 'sphere'), + ('sphere_reg', 'sphere_reg'), + ('thickness', 'thickness'), + ('sulc', 'sulc'), + ]), + (fsLR_buffer, outputnode, [('sphere_reg_fsLR', 'sphere_reg_fsLR')]), + (msm_buffer, outputnode, [('sphere_reg_msm', 'sphere_reg_msm')]), + ]) # fmt:skip + + # Reporting + recon_method = config.workflow.surface_recon_method + anat_reports_wf = init_anat_reports_wf( + surface_recon=recon_method, + output_dir=output_dir, + sloppy=sloppy, + ) + + workflow.connect([ + (outputnode, anat_reports_wf, [ + ('t1w_valid_list', 'inputnode.source_file'), + ('t1w_preproc', 'inputnode.t1w_preproc'), + ('t1w_mask', 'inputnode.t1w_mask'), + ('t1w_dseg', 'inputnode.t1w_dseg'), + ('template', 'inputnode.template'), + ('anat2std_xfm', 'inputnode.anat2std_xfm'), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ]), + ]) # fmt:skip + + desc = ( + '\nAnatomical data preprocessing\n\n: ', + f'A total of {len(t1w)} T1w and {len(t2w)} T2w images ' + 'were found within the input BIDS dataset.' + ) + + # Stage 1: Conform & valid T1w/T2w images + t1w_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) + t2w_validate = t1w_validate.clone('t2w_validate') + + if not precomputed.t1w_preproc: + LOGGER.info('ANAT Stage 1: Adding T1w template workflow') + desc += ( + 'The T1-weighted (T1w) image was denoised and corrected for intensity ' + 'non-uniformity (INU)' + ) + + t1w_template_wf = init_anat_template_wf( + contrast='T1w', + num_files=num_t1w, + longitudinal=longitudinal, + omp_nthreads=omp_nthreads, + sloppy=sloppy, + name='t1w_template_wf', + ) + ds_t1w_template_wf = init_ds_template_wf( + modality='T1w', + output_dir=output_dir, + num_anat=num_t1w, + name='ds_t1w_template_wf', + ) + else: + LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') + desc += ( + ' A preprocessed T1w image was provided as a precomputed input and used as ' + 'T1w-reference through the workflow.' + ) + + t1w_validate.inputs.in_file = precomputed.t1w_preproc + sourcefile_buffer.inputs.source_files = [precomputed.t1w_preproc] + + workflow.connect([ + (t1w_validate, t1w_buffer, [('out_file', 't1w_preproc')]), + (t1w_buffer, outputnode, [('t1w_preproc', 't1w_preproc')]), + ]) # fmt:skip + + if not precomputed.t2w_preproc: + LOGGER.info('ANAT Stage 1: Adding T2w template workflow') + desc += ( + 'The T1-weighted (T2w) image was denoised and corrected for intensity ' + 'non-uniformity (INU)' + ) + + t2w_template_wf = init_anat_template_wf( + contrast='T2w', + num_files=num_t1w, + longitudinal=longitudinal, + omp_nthreads=omp_nthreads, + sloppy=sloppy, + name='t2w_template_wf', + ) + ds_t2w_template_wf = init_ds_template_wf( + modality='T2w', + output_dir=output_dir, + num_anat=num_t2w, + name='ds_t2w_template_wf', + ) + else: + LOGGER.info('ANAT Found preprocessed T2w - skipping Stage 1') + desc += ( + ' A preprocessed T2w image was provided as a precomputed input and used as ' + 'T2w-reference through the workflow.' + ) + + t2w_validate.inputs.in_file = precomputed.t2w_preproc + sourcefile_buffer.inputs.source_files = [precomputed.t2w_preproc] + + workflow.connect([ + (t2w_validate, t2w_buffer, [('out_file', 't2w_preproc')]), + (t2w_buffer, outputnode, [('t2w_preproc', 't2w_preproc')]), + ]) # fmt:skip + + + # Stage 2: Use previously computed mask or calculate + # If we only have one mask (could be either T1w/T2w), + # just apply transform to get it in the other space + only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask + only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask + + if precomputed.t1w_mask or only_t2w_mask: + desc += ( + ' A pre-computed T1w brain mask was provided as input and ' + 'used throughout the workflow.' + ) + # A mask is available and will be applied + apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') + workflow.connect([ + (t1w_validate, apply_t1w_mask, [('out_file', 'in_file')]), + (refined_buffer, outputnode, [('t1w_mask', 't1w_mask')]) + ]) # fmt:skip + if precomputed.t1w_mask: + LOGGER.info('ANAT Found T1w brain mask') + + t1w_buffer.inputs.t1w_mask = precomputed.t1w_mask + # If we have a mask, always apply it + apply_t1w_mask.inputs.in_mask = precomputed.t1w_mask + elif only_t2w_mask: + LOGGER.info('ANAT No T1w brain mask but a T2w mask is available') + + transform_t2w_mask = pe.Node( + ApplyTransforms(interpolation='MultiLabel'), + name='transform_t2w_mask' + ) + workflow.connect([ + (refined_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), + (t2w_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), + (t2w2t1w_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), + (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_file')]), + ]) # fmt:skip + + if not precomputed.t1w_preproc: + LOGGER.info('ANAT Skipping T1w skull-strip, INU-correction only') + n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + atropos_use_random_seed=not skull_strip_fixed_seed, + ) + workflow.connect([ + (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, t1w_buffer, [ + (('outputnode.bias_corrected', pop_file), 't1w_preproc'), + (('outputnode.out_file', pop_file), 't1w_brain'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Applying T1w mask to precomputed T1w') + workflow.connect(apply_t1w_mask, 'out_file', t1w_buffer, 't1w_brain') + else: + # T2w will be used for brain extraction + # so just use the one from the coregistration workflow + workflow.connect([ + () + ]) + + if precomputed.t2w_mask: + LOGGER.info('ANAT Found T2w brain mask') + + t2w_buffer.inputs.t2w_mask = precomputed.t2w_mask + # If we have a mask, always apply it + apply_t2w_mask = pe.Node(ApplyMask(in_mask=precomputed.t2w_mask), name='apply_t2w_mask') + workflow.connect([ + (t2w_validate, apply_t2w_mask, [('out_file', 'in_file')]), + (refined_buffer, outputnode, [('t2w_mask', 't2w_mask')]) + ]) # fmt:skip + + if not precomputed.t2w_preproc: + LOGGER.info('ANAT Skipping skull-strip, INU-correction only') + n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + atropos_use_random_seed=not skull_strip_fixed_seed, + ) + workflow.connect([ + (apply_t2w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, t2w_buffer, [ + (('outputnode.bias_corrected', pop_file), 't2w_preproc'), + (('outputnode.out_file', pop_file), 't2w_brain'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Applying T2w mask to precomputed T2w') + workflow.connect(apply_t2w_mask, 'out_file', t2w_buffer, 't2w_brain') + elif only_t1w_mask: + workflow.connect([ + + ]) + else: + LOGGER.info('ANAT Stage 2: Preparing brain extraction workflow') + if skull_strip_mode == 'auto': + run_skull_strip = not all(_is_skull_stripped(img) for img in t1w) + else: + run_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] + + # Stage 3: Coregistration + # To use the found xfm, requires both precomputed anatomicals to be found as well + if precomputed.t1w_preproc and precomputed.t2w_preproc and precomputed.t1w2t2w_xfm: + LOGGER.info('ANAT Found T1w-T2w xfm') + desc += ( + ' A T1w-T2w coregistration transform was provided as input and used throughout the workflow.' + ) + + t1w2t2w_buffer.inputs.t1w2t2w_xfm = precomputed.t1w2t2w_xfm + else: + LOGGER.info('ANAT Coregistering anatomicals') + desc += ( + ' The T1w and T2w reference volumes were co-registered using ANTs.' + ) + + coregistration_wf = init_coregistration_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug='registration' in config.execution.debug, + t1w_mask=False, + probmap=not precomputed.t2w_mask, + ) + + workflow.connect([ + (t1w_buffer, coregistration_wf, [('t1w_preproc', 'inputnode.in_t1w')]), + (t2w_buffer, coregistration_wf, [ + ('t2w_preproc', 'inputnode.in_t2w'), + ('t2w_mask', 'inputnode.in_mask'), + ]), + (coregistration_wf, t1w2t2w_buffer, [('outputnode.t1w2t2w_xfm', 't1w2t2w_xfm')]), + ]) # fmt:skip + + # Stage 4: Segmentation + if precomputed.t1w_dseg: + ... + + + workflow.__desc__ = desc + return workflow + + +def init_infant_single_anat_fit_wf( + modality, + *, + age_months: int, + anatomicals: list, + bids_root: str, + precomputed, + hires, + longitudinal, + omp_nthreads, + output_dir, + segmentation_atlases, + skull_strip_mode, + skull_strip_template, + sloppy, + spaces, + cifti_output, + name='infant_single_anat_fit_wf', +): + desc = ( + '\nAnatomical data preprocessing\n\n: ', + f'A total of {len(anatomicals)} {modality} images were found ' + 'within the input BIDS dataset.\n' + ) From 4c50c8d0efd3ca818b3485edb161a3ba88da9e56 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 2 Apr 2024 17:17:39 -0400 Subject: [PATCH 005/142] RF: Clean up segmentation workflow --- nibabies/workflows/anatomical/segmentation.py | 395 ++++++++++-------- 1 file changed, 223 insertions(+), 172 deletions(-) diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index b7607ff0..c520282d 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -1,215 +1,266 @@ import sys +import typing as ty +from pathlib import Path -from nipype.interfaces import fsl +from nipype.interfaces import ants, fsl from nipype.interfaces import utility as niu from nipype.interfaces.ants.segmentation import JointFusion from nipype.pipeline import engine as pe from niworkflows.data import load as load_nwf +from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.fixes import FixHeaderRegistration as Registration -from smriprep.utils.misc import apply_lut as _apply_bids_lut +from niworkflows.utils.connections import listify +from smriprep.utils.misc import apply_lut from smriprep.workflows.anatomical import ( _aseg_to_three, _probseg_fast2bids, _split_segments, ) -from nibabies.config import DEFAULT_MEMORY_MIN_GB +from nibabies import config +LOGGER = config.loggers.workflow -def init_anat_segmentations_wf( - anat_modality="T1w", - template_dir=None, - sloppy=False, - omp_nthreads=1, - precomp_aseg=None, - name="anat_segmentations_wf", -): - """ - Create discrete and probabilistic segmentations from an anatomical image. - - There are a number of heuristics used to calculate the aseg, based on what is available. - 1) A precomputed aseg is provided (via `precomp_aseg`) - 2) Two or more segmentation templates are provided (via `template_dir`) - 3) Otherwise, fallback to FSL FAST - """ - from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from niworkflows.utils.connections import listify - - if precomp_aseg and template_dir: - print("Found precomputed aseg; skipping JointLabelFusion", file=sys.stderr) - template_dir = None - wf = Workflow(name=name) - inputnode = pe.Node( - niu.IdentityInterface(fields=["anat_brain", "anat_aseg"]), - name="inputnode", - ) +def init_segmentation_wf( + *, + sloppy: bool, + method: ty.Literal['fast', 'jlf'] = 'fast', + image_type: ty.Literal['T1w', 'T2w'] = 'T2w', + jlf_template_dir: Path | None = None, + omp_nthreads: int = 1, + has_aseg: bool = False, + name: str = 'segmentation_wf', +): + workflow = Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=['anat_brain']), name='inputnode') outputnode = pe.Node( - niu.IdentityInterface(fields=["anat_aseg", "anat_dseg", "anat_tpms"]), - name="outputnode", + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'anat_aseg']), + name='outputnode', ) - buffernode = pe.Node(niu.IdentityInterface(fields=["final_aseg"]), name='buffernode') - - wf.__desc__ = """Brain tissue segmentation of cerebrospinal fluid (CSF), -white-matter (WM) and gray-matter (GM) was performed on """ - - # Coerce segmentation labels to BIDS - lut_anat_dseg = pe.Node(niu.Function(function=_apply_bids_lut), name="lut_anat_dseg") + aseg_buffer = pe.Node(niu.IdentityInterface(fields=['anat_aseg']), name='aseg_buffer') - if not any((precomp_aseg, template_dir)): - from nipype.interfaces.fsl.base import Info as FSLInfo + to_dseg = pe.Node( + niu.Function(function=apply_lut, output_names=['out_dseg']), + name='to_dseg', + ) - wf.__desc__ += ( - f"the brain-extracted {anat_modality} using FSL FAST " - f"{FSLInfo.version() or '(version unknown)'}." + if method == 'fast': + workflow.__desc__ = ( + 'Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM), and ' + f'gray-matter (GM) was performed on the brain-extracted {image_type} using FSL ' + f'FAST, distributed with {fsl.Info.version() or 'version unknown'}' ) - - # Use FSL FAST for segmentations - anat_dseg = pe.Node( + fast = pe.Node( fsl.FAST(segments=True, no_bias=True, probability_maps=True), - name="anat_dseg", + name='anat_dseg', mem_gb=3, ) - lut_anat_dseg.inputs.lut = (0, 3, 1, 2) # Maps: 0 -> 0, 3 -> 1, 1 -> 2, 2 -> 3. + + to_dseg.inputs.lut = (0, 3, 1, 2) # Maps: 0 -> 0, 3 -> 1, 1 -> 2, 2 -> 3. fast2bids = pe.Node( niu.Function(function=_probseg_fast2bids), - name="fast2bids", + name='fast2bids', run_without_submitting=True, ) - - # fmt:off - wf.connect([ - (inputnode, anat_dseg, [("anat_brain", "in_files")]), - (anat_dseg, lut_anat_dseg, [("partial_volume_map", "in_dseg")]), - (lut_anat_dseg, outputnode, [("out", "anat_dseg")]), - (anat_dseg, fast2bids, [("partial_volume_files", "inlist")]), - (fast2bids, outputnode, [("out", "anat_tpms")]), - ]) - # fmt:on - return wf - - # Joint Label Fusion - elif template_dir: - from nipype.interfaces.ants.base import Info as ANTsInfo - - wf.__desc__ += ( - f"the brain-extracted {anat_modality} using ANTs JointFusion, distributed with ANTs " - f"{ANTsInfo.version() or '(version unknown)'}." - ) - tmpl_anats, tmpl_segs = _parse_segmentation_atlases(anat_modality, template_dir) - - # register to templates - ants_params = "testing" if sloppy else "precise" - # Register to each subject space - norm = pe.MapNode( - Registration(from_file=load_nwf(f"antsBrainExtraction_{ants_params}.json")), - name="norm", - iterfield=["moving_image"], - n_procs=omp_nthreads, - mem_gb=DEFAULT_MEMORY_MIN_GB, + workflow.connect([ + (inputnode, fast, [('anat_brain', 'in_files')]), + (fast, to_dseg, [('partial_volume_map', 'in_dseg')]), + (to_dseg, outputnode, [('out', 'anat_dseg')]), + (fast, fast2bids, [('partial_volume_files', 'inlist')]), + (fast2bids, outputnode, [('out', 'anat_tpms')]), + ]) # fmt:skip + return workflow # NOTE: no aseg will be output + + # Otherwise, segment tissue based on subcortical segmentation + if has_aseg: + workflow.connect( + [ + (inputnode, aseg_buffer, [('anat_aseg', 'anat_aseg')]), + ] ) - norm.inputs.moving_image = tmpl_anats - norm.inputs.float = True - - apply_atlas = pe.MapNode( - ApplyTransforms( - dimension=3, - interpolation="NearestNeighbor", - float=True, - ), - iterfield=["transforms", "input_image"], - name="apply_atlas", - ) - apply_atlas.inputs.input_image = tmpl_anats - apply_seg = pe.MapNode( - ApplyTransforms(dimension=3, interpolation="MultiLabel"), # NearestNeighbor? - name="apply_seg", - iterfield=["transforms", "input_image"], - ) - apply_seg.inputs.input_image = tmpl_segs - - jointfusion = pe.Node( - JointFusion( - dimension=3, - out_label_fusion="fusion_labels.nii.gz", - num_threads=omp_nthreads, - ), - name="jointfusion", - ) + elif method == 'jlf': + if not jlf_template_dir or not Path(jlf_template_dir).exists(): + raise RuntimeError('JLF requires a template directory.') - jf_label = pe.Node( - niu.Function(function=_to_dtype, output_names=["out_file"]), name="jf_label" + jlf_wf = init_jlf_wf( + jlf_template_dir=jlf_template_dir, + sloppy=sloppy, + image_type=image_type, + omp_nthreads=omp_nthreads, ) - # fmt:off - wf.connect([ - (inputnode, norm, [('anat_brain', 'fixed_image')]), - (norm, apply_atlas, [('forward_transforms', 'transforms')]), - (inputnode, apply_atlas, [('anat_brain', 'reference_image')]), - (norm, apply_seg, [('forward_transforms', 'transforms')]), - (inputnode, apply_seg, [('anat_brain', 'reference_image')]), - (inputnode, jointfusion, [(('anat_brain', listify), 'target_image')]), - (apply_atlas, jointfusion, [('output_image', 'atlas_image')]), - (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), - (jointfusion, jf_label, [('out_label_fusion', 'in_file')]), - (jf_label, buffernode, [('out_file', 'final_aseg')]), - ]) - # fmt:on - - elif precomp_aseg: - wf.__desc__ += "a pre-computed segmentation." - - wf.connect(inputnode, "anat_aseg", buffernode, "final_aseg") - - # Otherwise, the final aseg will be split into three tissue classes - # regardless if it was precomputed or generated via JLF - lut_anat_dseg.inputs.lut = _aseg_to_three() - split_seg = pe.Node(niu.Function(function=_split_segments), name="split_seg") - - # fmt: off - wf.connect([ - (buffernode, outputnode, [('final_aseg', 'anat_aseg')]), - (buffernode, lut_anat_dseg, [('final_aseg', 'in_dseg')]), - (lut_anat_dseg, outputnode, [('out', 'anat_dseg')]), - (lut_anat_dseg, split_seg, [('out', 'in_file')]), - (split_seg, outputnode, [('out', 'anat_tpms')]), - ]) - # fmt: on - - return wf - - -def _parse_segmentation_atlases(anat_modality, template_dir): + workflow.connect([ + (inputnode, jlf_wf, [('inputnode.anat_brain', 'inputnode.anat_brain')]), + (jlf_wf, aseg_buffer, [('outputnode.anat_aseg', 'anat_aseg')]), + ]) # fmt:skip + + to_dseg.inputs.lut = _aseg_to_three() + split_seg = pe.Node(niu.Function(function=_split_segments), name='split_seg') + + workflow.connect([ + (aseg_buffer, outputnode, [('anat_aseg', 'anat_aseg')]), + (aseg_buffer, to_dseg, [('anat_aseg', 'in_dseg')]), + (to_dseg, outputnode, [('out_dseg', 'anat_dseg')]), + (to_dseg, split_seg, [('out_dseg', 'in_file')]), + (split_seg, outputnode, [('out_tpms', 'anat_tpms')]), + ]) # fmt:skip + return workflow + + +def init_jlf_wf( + jlf_template_dir: Path, + sloppy: bool, + image_type: ty.Literal['T1w', 'T2w'] = 'T2w', + omp_nthreads: int = 1, + max_templates: int | None = None, + name: str = 'jlf_wf', +): + workflow = Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=['anat_brain']), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=['anat_aseg'])) + + jlf_templates = _parse_jlf_templates( + jlf_template_dir, + image_type=image_type, + max_templates=max_templates, + ) + segmentations = jlf_templates.keys() + references = jlf_templates.values() + + workflow.__desc__ = ( + f'The {image_type} image was registered to {len(segmentations)} templates for ' + f'JointFusion, distributed with ANTs {ants.base.Info.version() or 'version unknown'}, ' + 'for image segmentation. Brain tissue segmentation of cerebrospinal fluid (CSF), ' + 'white-matter (WM), and gray-matter (GM) were derived from the label fused image.' + ) + + precision = 'testing' if sloppy else 'precise' + norm_templates = pe.MapNode( + Registration(from_file=load_nwf(f'antsBrainExtraction_{precision}.json')), + name='norm_templates', + iterfield=['moving_image'], + n_procs=omp_nthreads, + mem_gb=config.DEFAULT_MEMORY_MIN_GB, + ) + norm_templates.inputs.moving_image = references + norm_templates.inputs.float = True + + apply_template = pe.MapNode( + ApplyTransforms( + dimension=3, + interpolation='NearestNeighbor', + float=True, + ), + iterfield=['transform', 'input_image'], + name='apply_template', + ) + apply_template.inputs.input_image = norm_templates + + apply_seg = pe.MapNode( + ApplyTransforms(dimension=3, interpolation='MultiLabel'), + name='apply_seg', + iterfield=['transforms', 'input_image'], + ) + apply_seg.inputs.input_image = segmentations + + jointfusion = pe.Node( + JointFusion( + dimension=3, + out_label_fusion='fusion_labels.nii.gz', + num_threads=omp_nthreads, + ), + name='jointfusion', + ) + clean_label_file = pe.Node( + niu.Function(function=_to_dtype, output_names=['out_file']), name='clean_label_file' + ) + workflow.connect([ + (inputnode, norm_templates, [('anat_brain', 'fixed_image')]), + (norm_templates, apply_template, [('forward_transforms', 'transforms')]), + (inputnode, apply_template, [('anat_brain', 'reference_image')]), + (norm_templates, apply_seg, [('forward_transforms', 'transforms')]), + (inputnode, apply_seg, [('anat_brain', 'reference_image')]), + (inputnode, jointfusion, [(('anat_brain', listify), 'target_image')]), + (apply_template, jointfusion, [('output_image', 'atlas_image')]), + (apply_seg, jointfusion, [('output_image', 'atlas_segmentation_image')]), + (jointfusion, clean_label_file, [('out_label_fusion', 'in_file')]), + (clean_label_file, outputnode, [('out_file', 'anat_aseg')]), + ]) # fmt:skip + return workflow + + +def _parse_jlf_templates( + templates_dir: Path | str, + image_type: ty.Literal['T1w', 'T2w'] = 'T2w', + max_templates: int | None = None, +): """ Parse segmentation templates directory for anatomical and segmentation files. + The segmentations are expected to follow the FreeSurfer LUT, and the anatomicals + should be masked. + + This is compatible with the DCAN layout:: + + jlf-templates/ + ├── Template01 + │ ├── Segmentation.nii.gz + │ ├── T1w_brain.nii.gz + │ └── T2w_brain.nii.gz + ├── Template02 + ... + + And the BIDS layout:: + + Templates/ + ├── dataset_description.json + ├── sub-01 + │ ├── sub-01_desc-aseg_dseg.nii.gz + │ ├── sub-01_T1w.json + │ ├── sub-01_T1w.nii.gz + │ ├── sub-01_T2w.json + │ └── sub-01_T2w.nii.gz + ├── sub-02 + ... - This is currently hardcoded to match DCAN lab templates. - Will need to rethink standardization for more general cases. """ - from pathlib import Path - - anats, segs = [], [] - - for f in Path(template_dir).glob("**/*.nii*"): - if "Segmentation" in f.name: - segs.append(str(f.absolute())) - elif anat_modality in f.name: - anats.append(str(f.absolute())) - - assert anats - assert segs - # there should matching files per template - assert len(anats) == len(segs) - - return sorted(anats), sorted(segs) - - -def _to_dtype(in_file, dtype="uint8"): + segmentations = {} + templates_dir = Path(templates_dir) + templates = [template.name for template in templates_dir.iterdir() if template.is_dir()] + if not max_templates: + max_templates = len(templates) + + if not templates: + raise FileNotFoundError('JLF requested but no templates found.') + + for template in templates[:max_templates]: + files = sorted((templates_dir / template).iterdir()) + seg = None + anat = None + for fl in files: + if 'Segmentation' in fl.name or '_dseg' in fl.name: + seg = str(fl) + elif image_type in fl.name: + anat = str(fl) + if seg is None or anat is None: + print( + f'No anatomical or segmentation found for JLF template: {template}', + file=sys.stderr, + ) + continue + segmentations[seg] = anat + + if len(segmentations) == 0: + raise FileNotFoundError('JLF requested but anatomicals / segmentations were not found.') + return segmentations + + +def _to_dtype(in_file, dtype='uint8'): """ Freesurfer's ``mri_convert`` complains about unsigned 32-bit integers. - Since we may plan using the JLF segmentation within ``infant_recon_all``, + Since we may use the JLF segmentation with FreeSurfer tools better to make this change now. """ from pathlib import Path From 44f02dd133b56b6680dea805a63ee6e7c9e804e5 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 2 Apr 2024 21:54:07 -0400 Subject: [PATCH 006/142] STY: Single connect formatting, add log message --- nibabies/workflows/anatomical/segmentation.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index c520282d..7e01ede5 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -75,11 +75,8 @@ def init_segmentation_wf( # Otherwise, segment tissue based on subcortical segmentation if has_aseg: - workflow.connect( - [ - (inputnode, aseg_buffer, [('anat_aseg', 'anat_aseg')]), - ] - ) + LOGGER.info('ANAT Segmentation: Using existing segmentation') + workflow.connect(inputnode, 'anat_aseg', aseg_buffer, 'anat_aseg') elif method == 'jlf': if not jlf_template_dir or not Path(jlf_template_dir).exists(): From fbdd6793f8a79dbe9ae7766c89a55ffaa699eb46 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 2 Apr 2024 21:55:57 -0400 Subject: [PATCH 007/142] ENH[WIP]: More fitting --- nibabies/workflows/fit.py | 267 ++++++++++++++++++++++++++++---------- 1 file changed, 199 insertions(+), 68 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index b72a6a39..7a5f96b9 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -3,12 +3,22 @@ from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow +from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms +from niworkflows.interfaces.header import ValidateImage +from niworkflows.interfaces.nibabel import ApplyMask from niworkflows.utils.connections import pop_file +from smriprep.workflows.anatomical import init_anat_template_wf +from smriprep.workflows.outputs import ( + init_ds_mask_wf, + init_ds_template_wf, +) from nibabies import config +from nibabies.workflows.anatomical.segmentation import init_segmentation_wf LOGGER = logging.getLogger('nipype.workflow') + def init_infant_anat_fit_wf( age_months, t1w, @@ -74,8 +84,8 @@ def init_infant_anat_fit_wf( # The graph consists of (input -> compute -> datasink -> buffer) units, # and all inputs to outputnode are buffer. # If precomputed inputs are found, then these units are replaced with (buffer) - # At the time of writing, t1w_mask is an exception, which takes the form - # (t1w_buffer -> refined_buffer -> datasink -> outputnode) + # At the time of writing, t1w_mask / t2w_mask are an exception, which takes the form + # (t{1,2}w_buffer -> refined_buffer -> datasink -> outputnode) # All outputnode components should therefore point to files in the input or # output directories. inputnode = pe.Node( @@ -122,18 +132,30 @@ def init_infant_anat_fit_wf( # Stage 1 inputs (filtered) sourcefile_buffer = pe.Node( - niu.IdentityInterface(fields=['source_files']), + niu.IdentityInterface(fields=['t1w_source_files', 't2w_source_files']), name='sourcefile_buffer', ) # Stage 2 - Anatomicals - t1w_buffer = pe.Node( - niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask', 't1w_brain']), - name='t1w_buffer', - ) - t2w_buffer = pe.Node( - niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask', 't2w_brain']) + anat_buffer = pe.Node( + niu.IdentityInterface( + fields=[ + 't1w_preproc', + 't1w_mask', + 't1w_brain', + 't2w_preproc', + 't2w_mask', + 't2w_brain', + ] + ) ) + # t1w_buffer = pe.Node( + # niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask', 't1w_brain']), + # name='t1w_buffer', + # ) + # t2w_buffer = pe.Node( + # niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask', 't2w_brain']) + # ) # Stage 3 - Coregistration t1w2t2w_buffer = pe.Node(niu.Merge(2), name='t1w2t2w_buffer') @@ -151,7 +173,7 @@ def init_infant_anat_fit_wf( # Stage 6 results: Refined stage 2 results; may be direct copy if no refinement refined_buffer = pe.Node( - niu.IdentityInterface(fields=['t1w_mask', 't1w_brain']), + niu.IdentityInterface(fields=['t1w_mask', 't1w_brain', 't2w_mask', 't2w_brain']), name='refined_buffer', ) @@ -175,7 +197,10 @@ def init_infant_anat_fit_wf( (anat2std_buffer, outputnode, [('out', 'anat2std_xfm')]), (std2anat_buffer, outputnode, [('out', 'std2anat_xfm')]), (template_buffer, outputnode, [('out', 'template')]), - (sourcefile_buffer, outputnode, [('source_files', 't1w_valid_list')]), + (sourcefile_buffer, outputnode, [ + ('t1w_source_files', 't1w_valid_list'), + ('t2w_source_files', 't2w_valid_list'), + ]), (surfaces_buffer, outputnode, [ ('white', 'white'), ('pial', 'pial'), @@ -213,7 +238,7 @@ def init_infant_anat_fit_wf( desc = ( '\nAnatomical data preprocessing\n\n: ', f'A total of {len(t1w)} T1w and {len(t2w)} T2w images ' - 'were found within the input BIDS dataset.' + 'were found within the input BIDS dataset.', ) # Stage 1: Conform & valid T1w/T2w images @@ -236,11 +261,30 @@ def init_infant_anat_fit_wf( name='t1w_template_wf', ) ds_t1w_template_wf = init_ds_template_wf( - modality='T1w', + image_type='T1w', output_dir=output_dir, num_anat=num_t1w, name='ds_t1w_template_wf', ) + + workflow.connect([ + (inputnode, t1w_template_wf, [('t1w', 'inputnode.anat_files')]), + (t1w_template_wf, t1w_validate, [('outputnode.anat_ref', 'in_file')]), + (t1w_template_wf, sourcefile_buffer, [ + ('outputnode.anat_valid_list', 't1w_source_files'), + ]), + (t1w_template_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.anat_conform_report'), + ]), + (t1w_template_wf, ds_t1w_template_wf, [ + ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), + ]), + (sourcefile_buffer, ds_t1w_template_wf, [ + ('t1w_source_files', 'inputnode.source_files'), + ]), + (anat_buffer, ds_t1w_template_wf, [('t1w_preproc', 'inputnode.anat_preproc')]), + (ds_t1w_template_wf, outputnode, [('outputnode.t1w_preproc', 't1w_preproc')]), + ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') desc += ( @@ -252,14 +296,14 @@ def init_infant_anat_fit_wf( sourcefile_buffer.inputs.source_files = [precomputed.t1w_preproc] workflow.connect([ - (t1w_validate, t1w_buffer, [('out_file', 't1w_preproc')]), - (t1w_buffer, outputnode, [('t1w_preproc', 't1w_preproc')]), + (t1w_validate, anat_buffer, [('out_file', 't1w_preproc')]), + (anat_buffer, outputnode, [('t1w_preproc', 't1w_preproc')]), ]) # fmt:skip if not precomputed.t2w_preproc: LOGGER.info('ANAT Stage 1: Adding T2w template workflow') desc += ( - 'The T1-weighted (T2w) image was denoised and corrected for intensity ' + 'The T2-weighted (T2w) image was denoised and corrected for intensity ' 'non-uniformity (INU)' ) @@ -272,11 +316,30 @@ def init_infant_anat_fit_wf( name='t2w_template_wf', ) ds_t2w_template_wf = init_ds_template_wf( - modality='T2w', + image_type='T2w', output_dir=output_dir, num_anat=num_t2w, name='ds_t2w_template_wf', ) + + workflow.connect([ + (inputnode, t2w_template_wf, [('t2w', 'inputnode.anat_files')]), + (t2w_template_wf, t2w_validate, [('outputnode.anat_ref', 'in_file')]), + (t2w_template_wf, sourcefile_buffer, [ + ('outputnode.anat_valid_list', 't2w_source_files'), + ]), + (t2w_template_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.anat_conform_report'), + ]), + (t2w_template_wf, ds_t2w_template_wf, [ + ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), + ]), + (sourcefile_buffer, ds_t2w_template_wf, [ + ('t2w_source_files', 'inputnode.source_files'), + ]), + (anat_buffer, ds_t2w_template_wf, [('t2w_preproc', 'inputnode.anat_preproc')]), + (ds_t2w_template_wf, outputnode, [('outputnode.t2w_preproc', 't2w_preproc')]), + ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T2w - skipping Stage 1') desc += ( @@ -285,20 +348,20 @@ def init_infant_anat_fit_wf( ) t2w_validate.inputs.in_file = precomputed.t2w_preproc - sourcefile_buffer.inputs.source_files = [precomputed.t2w_preproc] + sourcefile_buffer.inputs.t2w_source_files = [precomputed.t2w_preproc] workflow.connect([ - (t2w_validate, t2w_buffer, [('out_file', 't2w_preproc')]), - (t2w_buffer, outputnode, [('t2w_preproc', 't2w_preproc')]), + (t2w_validate, anat_buffer, [('out_file', 't2w_preproc')]), + (anat_buffer, outputnode, [('t2w_preproc', 't2w_preproc')]), ]) # fmt:skip - # Stage 2: Use previously computed mask or calculate # If we only have one mask (could be either T1w/T2w), # just apply transform to get it in the other space only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask + save_t1w_mask = True if precomputed.t1w_mask or only_t2w_mask: desc += ( ' A pre-computed T1w brain mask was provided as input and ' @@ -306,26 +369,23 @@ def init_infant_anat_fit_wf( ) # A mask is available and will be applied apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') - workflow.connect([ - (t1w_validate, apply_t1w_mask, [('out_file', 'in_file')]), - (refined_buffer, outputnode, [('t1w_mask', 't1w_mask')]) - ]) # fmt:skip + workflow.connect(t1w_validate, 'out_file', apply_t1w_mask, 'in_file') if precomputed.t1w_mask: LOGGER.info('ANAT Found T1w brain mask') - t1w_buffer.inputs.t1w_mask = precomputed.t1w_mask - # If we have a mask, always apply it + save_t1w_mask = False + anat_buffer.inputs.t1w_mask = precomputed.t1w_mask apply_t1w_mask.inputs.in_mask = precomputed.t1w_mask + workflow.connect(refined_buffer, 't1w_mask', outputnode, 't1w_mask') elif only_t2w_mask: LOGGER.info('ANAT No T1w brain mask but a T2w mask is available') transform_t2w_mask = pe.Node( - ApplyTransforms(interpolation='MultiLabel'), - name='transform_t2w_mask' + ApplyTransforms(interpolation='MultiLabel'), name='transform_t2w_mask' ) workflow.connect([ (refined_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), - (t2w_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), + (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), (t2w2t1w_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_file')]), ]) # fmt:skip @@ -338,73 +398,113 @@ def init_infant_anat_fit_wf( ) workflow.connect([ (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, t1w_buffer, [ + (n4_only_wf, anat_buffer, [ (('outputnode.bias_corrected', pop_file), 't1w_preproc'), (('outputnode.out_file', pop_file), 't1w_brain'), ]), ]) # fmt:skip else: LOGGER.info('ANAT Applying T1w mask to precomputed T1w') - workflow.connect(apply_t1w_mask, 'out_file', t1w_buffer, 't1w_brain') + workflow.connect(apply_t1w_mask, 'out_file', anat_buffer, 't1w_brain') else: # T2w will be used for brain extraction # so just use the one from the coregistration workflow - workflow.connect([ - () - ]) - - if precomputed.t2w_mask: - LOGGER.info('ANAT Found T2w brain mask') + ... - t2w_buffer.inputs.t2w_mask = precomputed.t2w_mask - # If we have a mask, always apply it - apply_t2w_mask = pe.Node(ApplyMask(in_mask=precomputed.t2w_mask), name='apply_t2w_mask') + if save_t1w_mask: + ds_t1w_mask_wf = init_ds_mask_wf( + bids_root=bids_root, + output_dir=output_dir, + mask_type='brain', + name='ds_t1w_mask_wf', + ) workflow.connect([ - (t2w_validate, apply_t2w_mask, [('out_file', 'in_file')]), - (refined_buffer, outputnode, [('t2w_mask', 't2w_mask')]) + (sourcefile_buffer, ds_t1w_mask_wf, [('t1w_source_files', 'inputnode.source_files')]), + (refined_buffer, ds_t1w_mask_wf, [('t1w_mask', 'inputnode.mask_file')]), + (ds_t1w_mask_wf, outputnode, [('outputnode.mask_file', 't1w_mask')]), ]) # fmt:skip + save_t2w_mask = True + if precomputed.t2w_mask or only_t1w_mask: + desc += ( + ' A pre-computed T2w brain mask was provided as input and ' + 'used throughout the workflow.' + ) + # A mask is available and will be applied + apply_t2w_mask = pe.Node(ApplyMask(), name='apply_t2w_mask') + workflow.connect(t2w_validate, 'out_file', apply_t2w_mask, 'in_file') + if precomputed.t2w_mask: + LOGGER.info('ANAT Found T2w brain mask') + + save_t2w_mask = False + anat_buffer.inputs.t2w_mask = precomputed.t2w_mask + apply_t1w_mask.inputs.in_mask = precomputed.t2w_mask + workflow.connect(refined_buffer, 't2w_mask', outputnode, 't1w_mask') + elif only_t2w_mask: + LOGGER.info('ANAT No T2w brain mask but a T1w mask is available') + + transform_t2w_mask = pe.Node( + ApplyTransforms(interpolation='MultiLabel'), name='transform_t1w_mask' + ) + workflow.connect([ + (refined_buffer, transform_t2w_mask, [('t1w_mask', 'input_image')]), + (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), + (t2w2t1w_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), + (transform_t2w_mask, apply_t2w_mask, [('output_image', 'in_file')]), + (apply_t2w_mask, refined_buffer, [('out_file', 't2w_mask')]), + ]) # fmt:skip + if not precomputed.t2w_preproc: - LOGGER.info('ANAT Skipping skull-strip, INU-correction only') + LOGGER.info('ANAT Skipping T1w skull-strip, INU-correction only') n4_only_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, atropos_use_random_seed=not skull_strip_fixed_seed, ) workflow.connect([ - (apply_t2w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, t2w_buffer, [ + (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, anat_buffer, [ (('outputnode.bias_corrected', pop_file), 't2w_preproc'), (('outputnode.out_file', pop_file), 't2w_brain'), ]), ]) # fmt:skip else: LOGGER.info('ANAT Applying T2w mask to precomputed T2w') - workflow.connect(apply_t2w_mask, 'out_file', t2w_buffer, 't2w_brain') - elif only_t1w_mask: - workflow.connect([ + workflow.connect(apply_t1w_mask, 'out_file', anat_buffer, 't2w_brain') - ]) else: LOGGER.info('ANAT Stage 2: Preparing brain extraction workflow') if skull_strip_mode == 'auto': run_skull_strip = not all(_is_skull_stripped(img) for img in t1w) else: run_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] + ... - # Stage 3: Coregistration - # To use the found xfm, requires both precomputed anatomicals to be found as well - if precomputed.t1w_preproc and precomputed.t2w_preproc and precomputed.t1w2t2w_xfm: - LOGGER.info('ANAT Found T1w-T2w xfm') - desc += ( - ' A T1w-T2w coregistration transform was provided as input and used throughout the workflow.' + if save_t2w_mask: + ds_t2w_mask_wf = init_ds_mask_wf( + bids_root=bids_root, + output_dir=output_dir, + mask_type='brain', + name='ds_t2w_mask_wf', ) + workflow.connect([ + (sourcefile_buffer, ds_t2w_mask_wf, [('t2w_source_files', 'inputnode.source_files')]), + (refined_buffer, ds_t2w_mask_wf, [('t2w_mask', 'inputnode.mask_file')]), + (ds_t2w_mask_wf, outputnode, [('outputnode.mask_file', 't2w_mask')]), + ]) # fmt:skip - t1w2t2w_buffer.inputs.t1w2t2w_xfm = precomputed.t1w2t2w_xfm + # Stage 3: Coregistration + # To use the found xfm, requires both precomputed anatomicals to be found as well + if precomputed.t1w_preproc and precomputed.t2w_preproc: + if precomputed.t1w2t2w_xfm: + LOGGER.info('ANAT Found T1w-T2w xfm') + desc += ' A T1w-T2w coregistration transform was provided as input and used throughout the workflow.' + coreg_buffer.inputs.t1w2t2w_xfm = precomputed.t1w2t2w_xfm + if precomputed.t2w2t1w_xfm: + LOGGER.info('ANAT Found T2w-T1w xfm') + coreg_buffer.inputs.t2w2t1w_xfm = precomputed.t2w2t1w_xfm else: - LOGGER.info('ANAT Coregistering anatomicals') - desc += ( - ' The T1w and T2w reference volumes were co-registered using ANTs.' - ) + LOGGER.info('ANAT Coregistering anatomical references') + desc += ' The T1w and T2w reference volumes were co-registered using ANTs.' coregistration_wf = init_coregistration_wf( omp_nthreads=omp_nthreads, @@ -413,20 +513,51 @@ def init_infant_anat_fit_wf( t1w_mask=False, probmap=not precomputed.t2w_mask, ) - workflow.connect([ - (t1w_buffer, coregistration_wf, [('t1w_preproc', 'inputnode.in_t1w')]), - (t2w_buffer, coregistration_wf, [ + (anat_buffer, coregistration_wf, [ + ('t1w_preproc', 'inputnode.in_t1w'), ('t2w_preproc', 'inputnode.in_t2w'), ('t2w_mask', 'inputnode.in_mask'), ]), - (coregistration_wf, t1w2t2w_buffer, [('outputnode.t1w2t2w_xfm', 't1w2t2w_xfm')]), + (coregistration_wf, coreg_buffer, [ + ('outputnode.t1w2t2w_xfm', 't1w2t2w_xfm'), + ('outputnode.t2w2t1w_xfm', 't2w2t1w_xfm'), + ]), ]) # fmt:skip + # At this point, we should decide which anatomical we will be using going forward: + # This will depend on the age of the participant, as myelination should be somewhat complete + # by 9+ months + image_type = 't2w' if age_months >= 9 else 't1w' + anat_aseg = getattr(precomputed, f'{image_type}_aseg', False) + seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' + # Stage 4: Segmentation - if precomputed.t1w_dseg: - ... + have_dseg = precomputed.t1w_dseg or precomputed.t2w_dseg + have_tpms = precomputed.t1w_tpms or precomputed.t2w_tpms + if not (have_dseg and have_tpms): + LOGGER.info('ANAT Stage 4: Tissue segmentation') + segmentation_wf = init_segmentation_wf( + sloppy=sloppy, + method=seg_method, + image_type=image_type.capitalize(), + omp_nthreads=omp_nthreads, + has_aseg=bool(anat_aseg), + ) + + workflow.connect([ + (anat_buffer, segmentation_wf, [(f'{image_type}_brain', 'anat_brain')]), + (segmentation_wf, seg_buffer, [ + ('outputnode.anat_dseg', 'anat_dseg'), + ('outputnode.anat_tpms', 'anat_tpms'), + ]), + ]) # fmt:skip + if anat_aseg or seg_method == 'jlf': + workflow.connect(segmentation_wf, 'outputnode.anat_aseg', seg_buffer, 'anat_aseg') + if anat_aseg: + segmentation_wf.inputs.inputnode.anat_aseg = anat_aseg + # TODO: datasink workflow.__desc__ = desc return workflow @@ -454,5 +585,5 @@ def init_infant_single_anat_fit_wf( desc = ( '\nAnatomical data preprocessing\n\n: ', f'A total of {len(anatomicals)} {modality} images were found ' - 'within the input BIDS dataset.\n' + 'within the input BIDS dataset.\n', ) From 1a147def442a41edb1849a286a0bef164190a672 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 3 Apr 2024 14:06:04 -0400 Subject: [PATCH 008/142] ENH: Add segmentation fit step --- nibabies/workflows/fit.py | 75 ++++++++++++++++++++++++++++----------- 1 file changed, 54 insertions(+), 21 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 7a5f96b9..04a0ffaa 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -9,11 +9,14 @@ from niworkflows.utils.connections import pop_file from smriprep.workflows.anatomical import init_anat_template_wf from smriprep.workflows.outputs import ( + init_ds_dseg_wf, init_ds_mask_wf, init_ds_template_wf, + init_ds_tpms_wf, ) from nibabies import config +from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf LOGGER = logging.getLogger('nipype.workflow') @@ -149,17 +152,17 @@ def init_infant_anat_fit_wf( ] ) ) - # t1w_buffer = pe.Node( - # niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask', 't1w_brain']), - # name='t1w_buffer', - # ) - # t2w_buffer = pe.Node( - # niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask', 't2w_brain']) - # ) - - # Stage 3 - Coregistration - t1w2t2w_buffer = pe.Node(niu.Merge(2), name='t1w2t2w_buffer') - t2w2t1w_buffer = pe.Node(niu.Merge(2), name='t2w2t1w_buffer') + + # At this point, we should decide which anatomical we will be using going forward: + # This will depend on the age of the participant, as myelination should be somewhat complete + # by 9+ months + image_type = 't2w' if age_months >= 9 else 't1w' + + # Stage 3 - Coregistration transforms + coreg_buffer = pe.Node( + niu.IdentityInterface(fields=['t1w2t2w_xfm', 't2w2t1w_xfm']), + name='coreg_buffer', + ) # Stage 4 - Segmentation seg_buffer = pe.Node( @@ -386,7 +389,7 @@ def init_infant_anat_fit_wf( workflow.connect([ (refined_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), - (t2w2t1w_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), + (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_file')]), ]) # fmt:skip @@ -449,7 +452,7 @@ def init_infant_anat_fit_wf( workflow.connect([ (refined_buffer, transform_t2w_mask, [('t1w_mask', 'input_image')]), (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), - (t2w2t1w_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), + (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), (transform_t2w_mask, apply_t2w_mask, [('output_image', 'in_file')]), (apply_t2w_mask, refined_buffer, [('out_file', 't2w_mask')]), ]) # fmt:skip @@ -525,17 +528,13 @@ def init_infant_anat_fit_wf( ]), ]) # fmt:skip - # At this point, we should decide which anatomical we will be using going forward: - # This will depend on the age of the participant, as myelination should be somewhat complete - # by 9+ months - image_type = 't2w' if age_months >= 9 else 't1w' + # Stage 4: Segmentation + anat_dseg = getattr(precomputed, f'{image_type}_dseg', None) + anat_tpms = getattr(precomputed, f'{image_type}_tpms', None) anat_aseg = getattr(precomputed, f'{image_type}_aseg', False) seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' - # Stage 4: Segmentation - have_dseg = precomputed.t1w_dseg or precomputed.t2w_dseg - have_tpms = precomputed.t1w_tpms or precomputed.t2w_tpms - if not (have_dseg and have_tpms): + if not (anat_dseg and anat_tpms): LOGGER.info('ANAT Stage 4: Tissue segmentation') segmentation_wf = init_segmentation_wf( sloppy=sloppy, @@ -555,9 +554,43 @@ def init_infant_anat_fit_wf( if anat_aseg or seg_method == 'jlf': workflow.connect(segmentation_wf, 'outputnode.anat_aseg', seg_buffer, 'anat_aseg') if anat_aseg: + LOGGER.info('ANAT Found precomputed anatomical segmentation') segmentation_wf.inputs.inputnode.anat_aseg = anat_aseg # TODO: datasink + if not anat_dseg: + ds_dseg_wf = init_ds_dseg_wf(output_dir=output_dir) + workflow.connect([ + (sourcefile_buffer, ds_dseg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (segmentation_wf, ds_dseg_wf, [ + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ]), + (ds_dseg_wf, seg_buffer, [('outputnode.anat_dseg', 'anat_dseg')]), + ]) # fmt:skip + + if not anat_tpms: + ds_tpms_wf = init_ds_tpms_wf(output_dir=output_dir) + workflow.connect([ + (sourcefile_buffer, ds_dseg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (segmentation_wf, ds_tpms_wf, [ + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + ]), + (ds_tpms_wf, seg_buffer, [('outputnode.anat_tpms', 'anat_tpms')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Skipping segmentation workflow') + if anat_dseg: + LOGGER.info('ANAT Found discrete segmentation') + desc += 'Precomputed discrete tissue segmentations were provided as inputs.\n' + seg_buffer.inputs.anat_dseg = anat_dseg + if anat_tpms: + LOGGER.info('ANAT Found tissue probability maps') + desc += 'Precomputed tissue probabiilty maps were provided as inputs.\n' + seg_buffer.inputs.anat_tpms = anat_tpms workflow.__desc__ = desc return workflow From 7ea329a2ebd51ff44a16373eac7fab13d9574347 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 4 Apr 2024 16:48:00 -0400 Subject: [PATCH 009/142] ENH[WIP]: Add template normalization, imports, type annots --- nibabies/workflows/fit.py | 110 ++++++++++++++++++++++++++++++-------- 1 file changed, 89 insertions(+), 21 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 04a0ffaa..279525fd 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -1,4 +1,6 @@ import logging +import typing as ty +from pathlib import Path from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe @@ -8,37 +10,44 @@ from niworkflows.interfaces.nibabel import ApplyMask from niworkflows.utils.connections import pop_file from smriprep.workflows.anatomical import init_anat_template_wf +from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, init_ds_mask_wf, init_ds_template_wf, + init_ds_template_registration_wf, init_ds_tpms_wf, ) - from nibabies import config from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf + +if ty.TYPE_CHECKING: + from nibabies.utils.bids import Derivatives + from niworkflows.utils.spaces import Reference, SpatialReferences + LOGGER = logging.getLogger('nipype.workflow') def init_infant_anat_fit_wf( - age_months, - t1w, - t2w, - bids_root, - precomputed, - hires, - longitudinal, - omp_nthreads, - output_dir, - segmentation_atlases, - skull_strip_mode, - skull_strip_template, - sloppy, - spaces, - cifti_output, - name='infant_anat_fit_wf', + age_months: int, + t1w: list, + t2w: list, + bids_root: Path, + precomputed: Derivatives, + hires: bool, + longitudinal: bool, + omp_nthreads: int, + output_dir: Path, + segmentation_atlases: Path | None, + skull_strip_mode: ty.Literal['auto', 'skip', 'force'], + skull_strip_template: 'Reference', + sloppy: bool, + spaces: 'SpatialReferences', + recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, + cifti_output: ty.Literal['91k', '170k'] | None, + name: str = 'infant_anat_fit_wf', ): """ Stage the anatomical preprocessing steps: @@ -218,7 +227,6 @@ def init_infant_anat_fit_wf( ]) # fmt:skip # Reporting - recon_method = config.workflow.surface_recon_method anat_reports_wf = init_anat_reports_wf( surface_recon=recon_method, output_dir=output_dir, @@ -239,9 +247,9 @@ def init_infant_anat_fit_wf( ]) # fmt:skip desc = ( - '\nAnatomical data preprocessing\n\n: ', + '\nAnatomical data preprocessing\n\n: ' f'A total of {len(t1w)} T1w and {len(t2w)} T2w images ' - 'were found within the input BIDS dataset.', + 'were found within the input BIDS dataset.' ) # Stage 1: Conform & valid T1w/T2w images @@ -582,7 +590,7 @@ def init_infant_anat_fit_wf( (ds_tpms_wf, seg_buffer, [('outputnode.anat_tpms', 'anat_tpms')]), ]) # fmt:skip else: - LOGGER.info('ANAT Skipping segmentation workflow') + LOGGER.info('ANAT Stage 4: Skipping segmentation workflow') if anat_dseg: LOGGER.info('ANAT Found discrete segmentation') desc += 'Precomputed discrete tissue segmentations were provided as inputs.\n' @@ -592,7 +600,67 @@ def init_infant_anat_fit_wf( desc += 'Precomputed tissue probabiilty maps were provided as inputs.\n' seg_buffer.inputs.anat_tpms = anat_tpms + # Stage 5: Normalization + templates = [] + found_xfms = {} + for template in spaces.get_spaces(nonstandard=False, dim=(3,)): + xfms = precomputed.get('transforms', {}).get(template, {}) + if set(xfms) != {'forward', 'reverse'}: + templates.append(template) + else: + found_xfms[template] = xfms + + template_buffer.inputs.in1 = list(found_xfms) + anat2std_buffer.inputs.in1 = [xfm['forward'] for xfm in found_xfms.values()] + std2anat_buffer.inputs.in1 = [xfm['reverse'] for xfm in found_xfms.values()] + + if templates: + LOGGER.info(f'ANAT Stage 5: Preparing normalization workflow for {templates}') + register_template_wf = init_register_template_wf( + sloppy=sloppy, + omp_nthreads=omp_nthreads, + templates=templates, + ) + ds_template_registration_wf = init_ds_template_registration_wf( + output_dir=output_dir, + image_type=image_type.capitalize(), + ) + + workflow.connect([ + (inputnode, register_template_wf, [('roi', 'inputnode.lesion_mask')]), + (anat_buffer, register_template_wf, [(f'{image_type}_preproc', 'inputnode.moving_image')]), + (refined_buffer, register_template_wf, [(f'{image_type}_mask', 'inputnode.moving_mask')]), + (sourcefile_buffer, ds_template_registration_wf, [ + (f'{image_type}_source_files', 'inputnode.source_files') + ]), + (register_template_wf, ds_template_registration_wf, [ + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), + ]), + (register_template_wf, template_buffer, [('outputnode.template', 'in2')]), + (ds_template_registration_wf, std2anat_buffer, [('outputnode.std2anat_xfm', 'in2')]), + (ds_template_registration_wf, anat2std_buffer, [('outputnode.anat2std_xfm', 'in2')]), + ]) # fmt:skip + if found_xfms: + LOGGER.info(f'ANAT Stage 5: Found pre-computed registrations for {found_xfms}') + + # Only refine mask if necessary + if anat_mask or recon_method == None: + workflow.connect([ + (anat_buffer, refined_buffer, [ + (f'{image_type}_mask', 'anat_mask'), + (f'{image_type}_brain', 'anat_brain'), + ]), + ]) # fmt:skip + workflow.__desc__ = desc + + if recon_method == None: + LOGGER.info('ANAT Skipping Stages 6+') + return workflow + + # Stage 6: Surface reconstruction return workflow From c1e19ed05c175805769f62f641c0dcbca8240f54 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 10 Apr 2024 11:33:51 -0400 Subject: [PATCH 010/142] ENH: Add surface recon logic, move towards single anatomical reference --- nibabies/workflows/fit.py | 236 ++++++++++++++++++++++++++++++++------ 1 file changed, 203 insertions(+), 33 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 279525fd..88df7a48 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -13,20 +13,22 @@ from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, + init_ds_fs_registration_wf, init_ds_mask_wf, - init_ds_template_wf, init_ds_template_registration_wf, + init_ds_template_wf, init_ds_tpms_wf, ) + from nibabies import config from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf - if ty.TYPE_CHECKING: - from nibabies.utils.bids import Derivatives from niworkflows.utils.spaces import Reference, SpatialReferences + from nibabies.utils.bids import Derivatives + LOGGER = logging.getLogger('nipype.workflow') @@ -34,6 +36,7 @@ def init_infant_anat_fit_wf( age_months: int, t1w: list, t2w: list, + flair: list, bids_root: Path, precomputed: Derivatives, hires: bool, @@ -101,7 +104,9 @@ def init_infant_anat_fit_wf( # All outputnode components should therefore point to files in the input or # output directories. inputnode = pe.Node( - niu.IdentityInterface(fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), + niu.IdentityInterface( + fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id'], + ), name='inputnode', ) outputnode = pe.Node( @@ -142,30 +147,61 @@ def init_infant_anat_fit_wf( # If all derivatives exist, inputnode could go unconnected, so add explicitly workflow.add_nodes([inputnode]) + # Duplicates will be added to the sourcefile / anat buffers as `anat_...` to facilitate + # usage of the preferred anatomical + # Stage 1 inputs (filtered) sourcefile_buffer = pe.Node( - niu.IdentityInterface(fields=['t1w_source_files', 't2w_source_files']), + niu.IdentityInterface( + fields=['t1w_source_files', 't2w_source_files', 'anat_source_files'], + ), name='sourcefile_buffer', ) # Stage 2 - Anatomicals + t1w_buffer = pe.Node( + niu.IdentityInterface(fields=['t1w_preproc', 't1w_mask' 't1w_brain']), + name='t1w_buffer', + ) + t2w_buffer = pe.Node( + niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask' 't2w_brain']), + name='t2w_buffer', + ) anat_buffer = pe.Node( niu.IdentityInterface( fields=[ - 't1w_preproc', - 't1w_mask', - 't1w_brain', - 't2w_preproc', - 't2w_mask', - 't2w_brain', + 'anat_preproc', + 'anat_mask', + 'anat_brain', ] - ) + ), + name='anat_buffer', ) - # At this point, we should decide which anatomical we will be using going forward: + # At this point, we should decide which anatomical we will use as the reference space. # This will depend on the age of the participant, as myelination should be somewhat complete # by 9+ months - image_type = 't2w' if age_months >= 9 else 't1w' + reference_anat = 't2w' if age_months <= 8 else 't1w' + image_type = reference_anat.capitalize() + + if reference_anat == 't1w': + LOGGER.info('ANAT: Using T1w as the reference anatomical') + workflow.connect([ + (t1w_buffer, anat_buffer, [ + ('t1w_preproc', 'anat_preproc'), + ('t1w_mask', 'anat_mask'), + ('t1w_brain', 'anat_brain'), + ]), + ]) # fmt:skip + elif reference_anat == 't2w': + LOGGER.info('ANAT: Using T2w as the reference anatomical') + workflow.connect([ + (t2w_buffer, anat_buffer, [ + ('t2w_preproc', 'anat_preproc'), + ('t2w_mask', 'anat_mask'), + ('t2w_brain', 'anat_brain'), + ]), + ]) # fmt:skip # Stage 3 - Coregistration transforms coreg_buffer = pe.Node( @@ -185,7 +221,7 @@ def init_infant_anat_fit_wf( # Stage 6 results: Refined stage 2 results; may be direct copy if no refinement refined_buffer = pe.Node( - niu.IdentityInterface(fields=['t1w_mask', 't1w_brain', 't2w_mask', 't2w_brain']), + niu.IdentityInterface(fields=['anat_mask', 'anat_brain']), name='refined_buffer', ) @@ -203,8 +239,8 @@ def init_infant_anat_fit_wf( workflow.connect([ (seg_buffer, outputnode, [ - ('t1w_dseg', 't1w_dseg'), - ('t1w_tpms', 't1w_tpms'), + ('anat_dseg', 'anat_dseg'), + ('anat_tpms', 'anat_tpms'), ]), (anat2std_buffer, outputnode, [('out', 'anat2std_xfm')]), (std2anat_buffer, outputnode, [('out', 'std2anat_xfm')]), @@ -235,10 +271,10 @@ def init_infant_anat_fit_wf( workflow.connect([ (outputnode, anat_reports_wf, [ - ('t1w_valid_list', 'inputnode.source_file'), - ('t1w_preproc', 'inputnode.t1w_preproc'), - ('t1w_mask', 'inputnode.t1w_mask'), - ('t1w_dseg', 'inputnode.t1w_dseg'), + ('anat_valid_list', 'inputnode.source_file'), + ('anat_preproc', 'inputnode.anat_preproc'), + ('anat_mask', 'inputnode.anat_mask'), + ('anat_dseg', 'inputnode.anat_dseg'), ('template', 'inputnode.template'), ('anat2std_xfm', 'inputnode.anat2std_xfm'), ('subjects_dir', 'inputnode.subjects_dir'), @@ -278,6 +314,12 @@ def init_infant_anat_fit_wf( name='ds_t1w_template_wf', ) + if reference_anat == 't1w': + workflow.connect( + t1w_template_wf, 'outputnode.anat_valid_list', + sourcefile_buffer, 'anat_source_files', + ) # fmt:skip + workflow.connect([ (inputnode, t1w_template_wf, [('t1w', 'inputnode.anat_files')]), (t1w_template_wf, t1w_validate, [('outputnode.anat_ref', 'in_file')]), @@ -298,13 +340,12 @@ def init_infant_anat_fit_wf( ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') - desc += ( - ' A preprocessed T1w image was provided as a precomputed input and used as ' - 'T1w-reference through the workflow.' - ) + desc += ' A preprocessed T1w image was provided as input.' t1w_validate.inputs.in_file = precomputed.t1w_preproc - sourcefile_buffer.inputs.source_files = [precomputed.t1w_preproc] + sourcefile_buffer.inputs.t1w_source_files = [precomputed.t1w_preproc] + if reference_anat == 't1w': + sourcefile_buffer.inputs.anat_source_files = [precomputed.t1w_preproc] workflow.connect([ (t1w_validate, anat_buffer, [('out_file', 't1w_preproc')]), @@ -333,6 +374,12 @@ def init_infant_anat_fit_wf( name='ds_t2w_template_wf', ) + if reference_anat == 't2w': + workflow.connect( + t2w_template_wf, 'outputnode.anat_valid_list', + sourcefile_buffer, 'anat_source_files', + ) # fmt:skip + workflow.connect([ (inputnode, t2w_template_wf, [('t2w', 'inputnode.anat_files')]), (t2w_template_wf, t2w_validate, [('outputnode.anat_ref', 'in_file')]), @@ -353,13 +400,12 @@ def init_infant_anat_fit_wf( ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T2w - skipping Stage 1') - desc += ( - ' A preprocessed T2w image was provided as a precomputed input and used as ' - 'T2w-reference through the workflow.' - ) + desc += ' A preprocessed T2w image was provided as input.' t2w_validate.inputs.in_file = precomputed.t2w_preproc sourcefile_buffer.inputs.t2w_source_files = [precomputed.t2w_preproc] + if precomputed.t2w_preproc: + sourcefile_buffer.inputs.anat_source_files = [precomputed.t2w_preproc] workflow.connect([ (t2w_validate, anat_buffer, [('out_file', 't2w_preproc')]), @@ -372,6 +418,19 @@ def init_infant_anat_fit_wf( only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask + anat_mask = None + transform_mask = False + if reference_anat == 't1w': + anat_mask = precomputed.t1w_mask + if not anat_mask and precomputed.t2w_mask: + anat_mask = precomputed.t2w_mask + transform_mask = True + elif reference_anat == 't2w': + anat_mask = precomputed.t2w_mask + if not anat_mask and precomputed.t1w_mask: + anat_mask = precomputed.t1w_mask + transform_mask = True + save_t1w_mask = True if precomputed.t1w_mask or only_t2w_mask: desc += ( @@ -649,18 +708,129 @@ def init_infant_anat_fit_wf( if anat_mask or recon_method == None: workflow.connect([ (anat_buffer, refined_buffer, [ - (f'{image_type}_mask', 'anat_mask'), - (f'{image_type}_brain', 'anat_brain'), + ('anat_mask', 'anat_mask'), + ('anat_brain', 'anat_brain'), ]), ]) # fmt:skip workflow.__desc__ = desc - if recon_method == None: + if not recon_method: LOGGER.info('ANAT Skipping Stages 6+') return workflow # Stage 6: Surface reconstruction + + if recon_method == 'mcribs': + from nibabies.workflows.anatomical.surfaces import init_mcribs_surface_recon_wf + + LOGGER.info('ANAT Stage 6: Preparing M-CRIB-S reconstruction workflow') + surface_recon_wf = init_mcribs_surface_recon_wf( + omp_nthreads=omp_nthreads, + use_aseg=bool(anat_aseg), + use_mask=True, + mcribs_dir=str(config.execution.mcribs_dir), + ) + + workflow.connect([ + (t2w_buffer, surface_recon_wf, [ + ('t2w_preproc', 'inputnode.t2w'), + ('t2w_mask', 'inputnode.t2w_mask'), + ]), + (anat_buffer, surface_recon_wf, [ + ('anat_aseg', 'inputnode.t2w_aseg'), + ]), + ]) # fmt:skip + + else: + from smriprep.utils.misc import fs_isRunning + + fs_isrunning = pe.Node( + niu.Function(function=fs_isRunning), overwrite=True, name='fs_isrunning' + ) + fs_isrunning.inputs.logger = LOGGER + + if recon_method == 'freesurfer': + from smriprep.workflows.surfaces import init_surface_recon_wf + + LOGGER.info('ANAT Stage 6: Preparing FreeSurfer recon-all workflow') + fs_isrunning = pe.Node( + niu.Function(function=fs_isRunning), overwrite=True, name='fs_isrunning' + ) + fs_isrunning.inputs.logger = LOGGER + + surface_recon_wf = init_surface_recon_wf( + name='surface_recon_wf', + omp_nthreads=omp_nthreads, + hires=hires, + fs_no_resume=fs_no_resume, + precomputed=precomputed, + ) + + if t2w or flair: + t2w_or_flair = 'T2-weighted' if t2w else 'FLAIR' + if surface_recon_wf.__desc__: + surface_recon_wf.__desc__ += ( + f'A {t2w_or_flair} image was used to improve pial surface refinement.' + ) + workflow.connect([ + (inputnode, surface_recon_wf, [ + ('t2w', 'inputnode.t2w'), + ('flair', 'inputnode.flair'), + ]), + ]) # fmt:skip + + elif recon_method == 'infantfs': + from nibabies.workflows.anatomical.surfaces import init_infantfs_surface_recon_wf + + LOGGER.info('ANAT Stage 6: Preparing Infant FreeSurfer workflow') + surface_recon_wf = init_infantfs_surface_recon_wf( + age_months=age_months, + use_aseg=bool(anat_mask), + ) + + # Use the T1w image + workflow.connect([ + (inputnode, fs_isrunning, [ + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ]), + (inputnode, surface_recon_wf, [ + ('subject_id', 'inputnode.subject_id'), + ]), + (fs_isrunning, surface_recon_wf, [('out', 'inputnode.subjects_dir')]), + (t1w_validate, surface_recon_wf, [('out_file', 'inputnode.t1w')]), + (t1w_buffer, surface_recon_wf, [('t1w_brain', 'inputnode.skullstripped_t1')]), + (surface_recon_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ]), + ]) # fmt:skip + + fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') + if not fsnative_xfms: + ds_fs_registration_wf = init_ds_fs_registration_wf(output_dir=output_dir) + # fmt:off + workflow.connect([ + (sourcefile_buffer, ds_fs_registration_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (surface_recon_wf, ds_fs_registration_wf, [ + ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2anat_xfm'), + ]), + (ds_fs_registration_wf, outputnode, [ + ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), + ]), + ]) + # fmt:on + elif 'reverse' in fsnative_xfms: + LOGGER.info('ANAT Found fsnative-to-anatomical transform - skipping registration') + outputnode.inputs.fsnative2anat_xfm = fsnative_xfms['reverse'] + else: + raise RuntimeError( + 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' + ) + return workflow From a0dbf7cf602818228c0082b6aec8cacb62174a3c Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 10 Apr 2024 17:12:35 -0400 Subject: [PATCH 011/142] ENH: Add mask refinement, rework T1w mask logic --- nibabies/workflows/fit.py | 193 ++++++++++++++++++++++++-------------- 1 file changed, 124 insertions(+), 69 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 88df7a48..1ca2d912 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -7,7 +7,7 @@ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.header import ValidateImage -from niworkflows.interfaces.nibabel import ApplyMask +from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.utils.connections import pop_file from smriprep.workflows.anatomical import init_anat_template_wf from smriprep.workflows.fit.registration import init_register_template_wf @@ -211,7 +211,7 @@ def init_infant_anat_fit_wf( # Stage 4 - Segmentation seg_buffer = pe.Node( - niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms']), + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs']), name='seg_buffer', ) # Stage 5 - collated template names, forward and reverse transforms @@ -225,6 +225,11 @@ def init_infant_anat_fit_wf( name='refined_buffer', ) + surf2anat_buffer = pe.Node( + niu.IdentityInterface(fields=['fsnative2anat_xfm', 'anat2fsnative_xfm']), + name='surf2anat_buffer', + ) + # Stage 8 results: GIFTI surfaces surfaces_buffer = pe.Node( niu.IdentityInterface( @@ -315,10 +320,9 @@ def init_infant_anat_fit_wf( ) if reference_anat == 't1w': - workflow.connect( - t1w_template_wf, 'outputnode.anat_valid_list', - sourcefile_buffer, 'anat_source_files', - ) # fmt:skip + workflow.connect([ + (t1w_template_wf, sourcefile_buffer, [('outputnode.anat_valid_list', 'anat_source_files')]), + ]) # fmt:skip workflow.connect([ (inputnode, t1w_template_wf, [('t1w', 'inputnode.anat_files')]), @@ -348,8 +352,7 @@ def init_infant_anat_fit_wf( sourcefile_buffer.inputs.anat_source_files = [precomputed.t1w_preproc] workflow.connect([ - (t1w_validate, anat_buffer, [('out_file', 't1w_preproc')]), - (anat_buffer, outputnode, [('t1w_preproc', 't1w_preproc')]), + (t1w_validate, t1w_buffer, [('out_file', 't1w_preproc')]), ]) # fmt:skip if not precomputed.t2w_preproc: @@ -408,91 +411,119 @@ def init_infant_anat_fit_wf( sourcefile_buffer.inputs.anat_source_files = [precomputed.t2w_preproc] workflow.connect([ - (t2w_validate, anat_buffer, [('out_file', 't2w_preproc')]), - (anat_buffer, outputnode, [('t2w_preproc', 't2w_preproc')]), + (t2w_validate, t2w_buffer, [('out_file', 't2w_preproc')]), ]) # fmt:skip # Stage 2: Use previously computed mask or calculate # If we only have one mask (could be either T1w/T2w), # just apply transform to get it in the other space - only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask - only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask + # only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask + # only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask + t1w_mask = precomputed.t1w_mask + t2w_mask = precomputed.t2w_mask anat_mask = None - transform_mask = False - if reference_anat == 't1w': - anat_mask = precomputed.t1w_mask - if not anat_mask and precomputed.t2w_mask: - anat_mask = precomputed.t2w_mask - transform_mask = True - elif reference_anat == 't2w': - anat_mask = precomputed.t2w_mask - if not anat_mask and precomputed.t1w_mask: - anat_mask = precomputed.t1w_mask - transform_mask = True + # T1w masking - define pre-emptively + apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') - save_t1w_mask = True - if precomputed.t1w_mask or only_t2w_mask: - desc += ( - ' A pre-computed T1w brain mask was provided as input and ' - 'used throughout the workflow.' - ) - # A mask is available and will be applied - apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') - workflow.connect(t1w_validate, 'out_file', apply_t1w_mask, 'in_file') - if precomputed.t1w_mask: - LOGGER.info('ANAT Found T1w brain mask') + if not t1w_mask: + if skull_strip_mode == 'auto': + run_t1w_skull_strip = not all(_is_skull_stripped(img) for img in t1w) + else: + run_t1w_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] - save_t1w_mask = False - anat_buffer.inputs.t1w_mask = precomputed.t1w_mask - apply_t1w_mask.inputs.in_mask = precomputed.t1w_mask - workflow.connect(refined_buffer, 't1w_mask', outputnode, 't1w_mask') - elif only_t2w_mask: - LOGGER.info('ANAT No T1w brain mask but a T2w mask is available') + if not run_t1w_skull_strip: # Image is masked + desc += ( + 'The provided T1w image was previously skull-stripped; ' + 'a brain mask was derived from the input image.' + ) + if not precomputed.t1w_preproc: + LOGGER.info('ANAT Stage 2: Skipping skull-strip, INU-correction only') + + n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + atropos_use_random_seed=not skull_strip_fixed_seed, + ) + workflow.connect([ + (t1w_validate, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, t1w_buffer, [ + (('outputnode.bias_corrected', pop_file), 't1w_preproc'), + ('outputnode.out_mask', 't1w_mask'), + (('outputnode.out_file', pop_file), 't1w_brain'), + ('outputnode.out_segm', 'ants_seg'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 2: Skipping skull-strip, generating mask from input') + binarize_t1w = pe.Node(Binarize(thresh_low=2), name='binarize_t1w') + workflow.connect([ + (t1w_validate, binarize_t1w, [('out_file', 'in_file')]), + (t1w_validate, t1w_buffer, [('out_file', 't1w_brain')]), + (binarize_t1w, t1w_buffer, [('out_file', 't1w_mask')]), + ]) # fmt:skip + else: + # T2w -> T1w transformation of the mask will occur if either + # 1) reusing a precomputed T2w mask + # 2) calculating with T2w template brain extraction + LOGGER.info('ANAT T2w mask will be transformed into T1w space') transform_t2w_mask = pe.Node( ApplyTransforms(interpolation='MultiLabel'), name='transform_t2w_mask' ) workflow.connect([ - (refined_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), - (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), + (t2w_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), - (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_file')]), + (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_mask')]), + (t1w_buffer, apply_t1w_mask, [('t1w_preproc', 'in_file')]), # TODO: Unsure about this connection ]) # fmt:skip - if not precomputed.t1w_preproc: - LOGGER.info('ANAT Skipping T1w skull-strip, INU-correction only') + # Save T1w mask + ds_t1w_mask_wf = init_ds_mask_wf( + bids_root=bids_root, + output_dir=output_dir, + mask_type='brain', + name='ds_t1w_mask_wf', + ) + workflow.connect([ + (sourcefile_buffer, ds_t1w_mask_wf, [('t1w_source_files', 'inputnode.source_files')]), + ]) # fmt:skip + + if reference_anat == 't1w': + workflow.connect([ + (refined_buffer, ds_t1w_mask_wf, [('anat_mask', 'inputnode.mask_file')]), + (ds_t1w_mask_wf, outputnode, [('outputnode.mask_file', 'anat_mask')]), + ]) # fmt:skip + else: + workflow.connect([ + (t1w_buffer, ds_t1w_mask_wf, [('t1w_mask', 'inputnode.mask_file')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Found T1w brain mask') + if reference_anat == 't1w': + desc += ( + 'A pre-computed T1w brain mask was provided as input and used throughout the ' + 'workflow.' + ) + t1w_buffer.inputs.t1w_mask = precomputed.t1w_mask + apply_t1w_mask.inputs.in_mask = precomputed.t1w_mask + workflow.connect(t1w_validate, 'out_file', apply_t1w_mask, 'in_file') + + if not precomputed.t1w: + LOGGER.info('ANAT Skipping skull-strip, INU-correction only') n4_only_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, atropos_use_random_seed=not skull_strip_fixed_seed, ) workflow.connect([ (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, anat_buffer, [ + (n4_only_wf, t1w_buffer, [ (('outputnode.bias_corrected', pop_file), 't1w_preproc'), (('outputnode.out_file', pop_file), 't1w_brain'), ]), ]) # fmt:skip else: - LOGGER.info('ANAT Applying T1w mask to precomputed T1w') - workflow.connect(apply_t1w_mask, 'out_file', anat_buffer, 't1w_brain') - else: - # T2w will be used for brain extraction - # so just use the one from the coregistration workflow - ... - - if save_t1w_mask: - ds_t1w_mask_wf = init_ds_mask_wf( - bids_root=bids_root, - output_dir=output_dir, - mask_type='brain', - name='ds_t1w_mask_wf', - ) - workflow.connect([ - (sourcefile_buffer, ds_t1w_mask_wf, [('t1w_source_files', 'inputnode.source_files')]), - (refined_buffer, ds_t1w_mask_wf, [('t1w_mask', 'inputnode.mask_file')]), - (ds_t1w_mask_wf, outputnode, [('outputnode.mask_file', 't1w_mask')]), - ]) # fmt:skip + LOGGER.info('ANAT Skipping T1w masking') + workflow.connect(apply_t1w_mask, 'out_file', t1w_buffer, 't1w_brain') save_t2w_mask = True if precomputed.t2w_mask or only_t1w_mask: @@ -789,7 +820,7 @@ def init_infant_anat_fit_wf( use_aseg=bool(anat_mask), ) - # Use the T1w image + # Force use of the T1w image workflow.connect([ (inputnode, fs_isrunning, [ ('subjects_dir', 'subjects_dir'), @@ -810,7 +841,6 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf(output_dir=output_dir) - # fmt:off workflow.connect([ (sourcefile_buffer, ds_fs_registration_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -821,8 +851,7 @@ def init_infant_anat_fit_wf( (ds_fs_registration_wf, outputnode, [ ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), ]), - ]) - # fmt:on + ]) # fmt:skip elif 'reverse' in fsnative_xfms: LOGGER.info('ANAT Found fsnative-to-anatomical transform - skipping registration') outputnode.inputs.fsnative2anat_xfm = fsnative_xfms['reverse'] @@ -831,6 +860,32 @@ def init_infant_anat_fit_wf( 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' ) + if not have_mask: + LOGGER.info('ANAT Stage 7: Preparing mask refinement workflow') + # Stage 6: Refine ANTs mask with FreeSurfer segmentation + refinement_wf = init_refinement_wf() + applyrefined = pe.Node(ApplyMask(), name='applyrefined') + + workflow.connect([ + (surface_recon_wf, refinement_wf, [ + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ]), + (surf2anat_buffer, refinement_wf, [ + ('fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ]), + (anat_buffer, refinement_wf, [ + ('anat_preproc', 'inputnode.reference_image'), + ('ants_seg', 'inputnode.ants_segs'), # TODO: Verify this is the same as dseg + ]), + (anat_buffer, applyrefined, [('anat_preproc', 'in_file')]), + (refinement_wf, applyrefined, [('outputnode.out_brainmask', 'in_mask')]), + (refinement_wf, refined_buffer, [('outputnode.out_brainmask', 'anat_mask')]), + (applyrefined, refined_buffer, [('out_file', 'anat_brain')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Found brain mask - skipping Stage 7') + return workflow From 9138398fb867446c9c70e954449dc6d622e13793 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 11 Apr 2024 17:03:03 -0400 Subject: [PATCH 012/142] ENH: Handle surfaces/morphometrics/fsLR registration --- nibabies/workflows/fit.py | 164 +++++++++++++++++++++++++++++++++++++- 1 file changed, 160 insertions(+), 4 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 1ca2d912..60f3259a 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -9,12 +9,15 @@ from niworkflows.interfaces.header import ValidateImage from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.utils.connections import pop_file -from smriprep.workflows.anatomical import init_anat_template_wf +from smriprep.workflows.anatomical import init_anat_ribbon_wf, init_anat_template_wf +from smriprep.workflows.surfaces import init_gifti_surfaces_wf, init_gifti_morphometrics_wf from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, init_ds_fs_registration_wf, init_ds_mask_wf, + init_ds_surface_metrics_wf, + init_ds_surfaces_wf, init_ds_template_registration_wf, init_ds_template_wf, init_ds_tpms_wf, @@ -225,9 +228,9 @@ def init_infant_anat_fit_wf( name='refined_buffer', ) - surf2anat_buffer = pe.Node( + fsnative_buffer = pe.Node( niu.IdentityInterface(fields=['fsnative2anat_xfm', 'anat2fsnative_xfm']), - name='surf2anat_buffer', + name='fsnative_buffer', ) # Stage 8 results: GIFTI surfaces @@ -871,7 +874,7 @@ def init_infant_anat_fit_wf( ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id'), ]), - (surf2anat_buffer, refinement_wf, [ + (fsnative_buffer, refinement_wf, [ ('fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), ]), (anat_buffer, refinement_wf, [ @@ -886,6 +889,159 @@ def init_infant_anat_fit_wf( else: LOGGER.info('ANAT Found brain mask - skipping Stage 7') + # Stages 8-10: Surface conversion and registration + # sphere_reg is needed to generate sphere_reg_fsLR + # sphere and sulc are needed to generate sphere_reg_msm + # white, pial, midthickness and thickness are needed to resample in the cortical ribbon + # TODO: Consider paring down or splitting into a subworkflow that can be called on-demand + # A subworkflow would still need to check for precomputed outputs + needed_anat_surfs = ['white', 'pial', 'midthickness'] + needed_metrics = ['thickness', 'sulc'] + needed_spheres = ['sphere_reg', 'sphere'] + + # Detect pre-computed surfaces + found_surfs = { + surf: sorted(precomputed[surf]) + for surf in needed_anat_surfs + needed_metrics + needed_spheres + if len(precomputed.get(surf, [])) == 2 + } + if found_surfs: + LOGGER.info(f'ANAT Stage 8: Found pre-converted surfaces for {list(found_surfs)}') + surfaces_buffer.inputs.trait_set(**found_surfs) + + # Stage 8: Surface conversion + surfs = [surf for surf in needed_anat_surfs if surf not in found_surfs] + spheres = [sphere for sphere in needed_spheres if sphere not in found_surfs] + if surfs or spheres: + LOGGER.info(f'ANAT Stage 8: Creating GIFTI surfaces for {surfs + spheres}') + if surfs: + gifti_surfaces_wf = init_gifti_surfaces_wf(surfaces=surfs) + ds_surfaces_wf = init_ds_surfaces_wf( + bids_root=str(bids_root), + output_dir=str(output_dir), + surfaces=surfs, + ) + + workflow.connect([ + (surface_recon_wf, gifti_surfaces_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ]), + (fsnative_buffer, gifti_surfaces_wf, [ + ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ]), + (gifti_surfaces_wf, surfaces_buffer, [ + (f'outputnode.{surf}', surf) for surf in surfs + ]), + (sourcefile_buffer, ds_surfaces_wf, [('source_files', 'inputnode.source_files')]), + (gifti_surfaces_wf, ds_surfaces_wf, [ + (f'outputnode.{surf}', f'inputnode.{surf}') for surf in surfs + ]), + ]) # fmt:skip + if spheres: + gifti_spheres_wf = init_gifti_surfaces_wf( + surfaces=spheres, to_scanner=False, name='gifti_spheres_wf' + ) + ds_spheres_wf = init_ds_surfaces_wf( + bids_root=str(bids_root), + output_dir=str(output_dir), + surfaces=spheres, + name='ds_spheres_wf', + ) + + workflow.connect([ + (surface_recon_wf, gifti_spheres_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + # No transform for spheres, following HCP pipelines' lead + ]), + (gifti_spheres_wf, surfaces_buffer, [ + (f'outputnode.{sphere}', sphere) for sphere in spheres + ]), + (sourcefile_buffer, ds_spheres_wf, [('source_files', 'inputnode.source_files')]), + (gifti_spheres_wf, ds_spheres_wf, [ + (f'outputnode.{sphere}', f'inputnode.{sphere}') for sphere in spheres + ]), + ]) # fmt:skip + metrics = [metric for metric in needed_metrics if metric not in found_surfs] + if metrics: + LOGGER.info(f'ANAT Stage 8: Creating GIFTI metrics for {metrics}') + gifti_morph_wf = init_gifti_morphometrics_wf(morphometrics=metrics) + ds_morph_wf = init_ds_surface_metrics_wf( + bids_root=str(bids_root), + output_dir=str(output_dir), + metrics=metrics, + name='ds_morph_wf', + ) + + workflow.connect([ + (surface_recon_wf, gifti_morph_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ]), + (gifti_morph_wf, surfaces_buffer, [ + (f'outputnode.{metric}', metric) for metric in metrics + ]), + (sourcefile_buffer, ds_morph_wf, [('source_files', 'inputnode.source_files')]), + (gifti_morph_wf, ds_morph_wf, [ + (f'outputnode.{metric}', f'inputnode.{metric}') for metric in metrics + ]), + ]) # fmt:skip + + if 'anat_ribbon' not in precomputed: + LOGGER.info('ANAT Stage 8a: Creating cortical ribbon mask') + anat_ribbon_wf = init_anat_ribbon_wf() + ds_ribbon_mask_wf = init_ds_mask_wf( + bids_root=str(bids_root), + output_dir=str(output_dir), + mask_type='ribbon', + name='ds_ribbon_mask_wf', + ) + + workflow.connect([ + (anat_buffer, anat_ribbon_wf, [ + ('anat_preproc', 'inputnode.ref_file'), + ]), + (surfaces_buffer, anat_ribbon_wf, [ + ('white', 'inputnode.white'), + ('pial', 'inputnode.pial'), + ]), + (sourcefile_buffer, ds_ribbon_mask_wf, [('source_files', 'inputnode.source_files')]), + (anat_ribbon_wf, ds_ribbon_mask_wf, [ + ('outputnode.anat_ribbon', 'inputnode.mask_file'), + ]), + (ds_ribbon_mask_wf, outputnode, [('outputnode.mask_file', 'anat_ribbon')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 8a: Found pre-computed cortical ribbon mask') + outputnode.inputs.anat_ribbon = precomputed['anat_ribbon'] + + # Stage 9: Baseline fsLR registration + if len(precomputed.get('sphere_reg_fsLR', [])) < 2: + LOGGER.info('ANAT Stage 9: Creating fsLR registration sphere') + if recon_method == 'mcribs': + fsLR_reg_wf = init_mcribs_fsLR_reg_wf() # TODO + else: + fsLR_reg_wf = init_fsLR_reg_wf() + + ds_fsLR_reg_wf = init_ds_surfaces_wf( + bids_root=str(bids_root), + output_dir=str(output_dir), + surfaces=['sphere_reg_fsLR'], + name='ds_fsLR_reg_wf', + ) + + workflow.connect([ + (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), + (fsLR_reg_wf, ds_fsLR_reg_wf, [ + ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 9: Found pre-computed fsLR registration sphere') + fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_fsLR']) return workflow From 96b3f8aca26c5a9d705c06e9565ebcb9dc1159f1 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 12 Apr 2024 17:32:32 -0400 Subject: [PATCH 013/142] ENH: Simplify mcribs/infantfs workflows --- nibabies/workflows/anatomical/surfaces.py | 432 +++++++++++----------- 1 file changed, 207 insertions(+), 225 deletions(-) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index fff2d490..958cda61 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -1,6 +1,5 @@ """Anatomical surface projections""" -from typing import Optional from nipype.interfaces import freesurfer as fs from nipype.interfaces import io as nio @@ -9,31 +8,27 @@ from niworkflows.engine.workflows import LiterateWorkflow from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert from niworkflows.interfaces.freesurfer import PatchedRobustRegister as RobustRegister -from niworkflows.utils.connections import pop_file -from smriprep.workflows.surfaces import init_gifti_surface_wf +from smriprep.interfaces.freesurfer import MakeMidthickness +from smriprep.workflows.surfaces import _extract_fs_fields from nibabies.config import DEFAULT_MEMORY_MIN_GB from nibabies.data import load as load_data SURFACE_INPUTS = [ - "subjects_dir", - "subject_id", - "t1w", - "t2w", - "flair", - "skullstripped_t1", - "corrected_t1", - "ants_segs", + 't1w', + 't2w', + 'flair', + 'skullstripped_t1', + 'subjects_dir', + 'subject_id', + # Customize aseg + 'in_aseg', ] SURFACE_OUTPUTS = [ - "subjects_dir", - "subject_id", - "t1w2fsnative_xfm", - "fsnative2t1w_xfm", - "surfaces", - "morphometrics", - "out_aseg", - "out_aparc", + 'subjects_dir', + 'subject_id', + 'anat2fsnative_xfm', + 'fsnative2anat_xfm', ] @@ -42,8 +37,9 @@ def init_mcribs_surface_recon_wf( omp_nthreads: int, use_aseg: bool, use_mask: bool, - mcribs_dir: Optional[str] = None, - name: str = "mcribs_surface_recon_wf", + precomputed: dict, + mcribs_dir: str | None = None, + name: str = 'mcribs_surface_recon_wf', ): """ Reconstruct cortical surfaces using the M-CRIB-S pipeline. @@ -53,11 +49,11 @@ def init_mcribs_surface_recon_wf( """ from niworkflows.interfaces.nibabel import MapLabels, ReorientImage - from ...interfaces.mcribs import MCRIBReconAll + from nibabies.interfaces.mcribs import MCRIBReconAll if not use_aseg: raise NotImplementedError( - "A previously computed segmentation is required for the M-CRIB-S workflow." + 'A previously computed segmentation is required for the M-CRIB-S workflow.' ) inputnode = pe.Node( @@ -65,14 +61,14 @@ def init_mcribs_surface_recon_wf( ) outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name='outputnode') - wf = LiterateWorkflow(name=name) - wf.__desc__ = f"""\ -Brain surfaces were reconstructed using `MCRIBReconAll` [M-CRIB-S, @mcribs], -leveraging the masked, preprocessed T2w and remapped anatomical segmentation. -""" + workflow = LiterateWorkflow(name=name) + workflow.__desc__ = ( + 'Brain surfaces were reconstructed with a modified `MCRIBReconAll` [M-CRIB-S, @mcribs]' + 'workflow, using the reference T2w and a pre-computed anatomical segmentation' + ) - # dictionary to map labels from FS to M-CRIB-S - aseg2mcrib = { + # mapping of labels from FS to M-CRIB-S + fs2mcribs = { 2: 51, 3: 21, 4: 49, @@ -108,10 +104,10 @@ def init_mcribs_surface_recon_wf( 63: 50, 253: 48, } - map_labels = pe.Node(MapLabels(mappings=aseg2mcrib), name="map_labels") + map_labels = pe.Node(MapLabels(mappings=fs2mcribs), name='map_labels') - t2w_las = pe.Node(ReorientImage(target_orientation="LAS"), name="t2w_las") - seg_las = t2w_las.clone(name="seg_las") + t2w_las = pe.Node(ReorientImage(target_orientation='LAS'), name='t2w_las') + seg_las = t2w_las.clone(name='seg_las') mcribs_recon = pe.Node( MCRIBReconAll( @@ -121,7 +117,7 @@ def init_mcribs_surface_recon_wf( fast_collision=True, nthreads=omp_nthreads, ), - name="mcribs_recon", + name='mcribs_recon', mem_gb=5, ) if mcribs_dir: @@ -132,78 +128,85 @@ def init_mcribs_surface_recon_wf( # If available, dilated mask and use in recon-neonatal-cortex from niworkflows.interfaces.morphology import BinaryDilation - mask_dil = pe.Node(BinaryDilation(radius=3), name="mask_dil") - mask_las = t2w_las.clone(name="mask_las") - # fmt:off - wf.connect([ - (inputnode, mask_dil, [("anat_mask", "in_mask")]), - (mask_dil, mask_las, [("out_mask", "in_file")]), - (mask_las, mcribs_recon, [("out_file", "mask_file")]), - ]) - # fmt:on + mask_dil = pe.Node(BinaryDilation(radius=3), name='mask_dil') + mask_las = t2w_las.clone(name='mask_las') + workflow.connect([ + (inputnode, mask_dil, [('anat_mask', 'in_mask')]), + (mask_dil, mask_las, [('out_mask', 'in_file')]), + (mask_las, mcribs_recon, [('out_file', 'mask_file')]), + ]) # fmt:skip mcribs_postrecon = pe.Node( MCRIBReconAll(autorecon_after_surf=True, nthreads=omp_nthreads), - name="mcribs_postrecon", + name='mcribs_postrecon', mem_gb=5, ) fssource = pe.Node(nio.FreeSurferSource(), name='fssource', run_without_submitting=True) - brainmask2nii = pe.Node(fs.MRIConvert(out_type="niigz"), name="brainmask2nii") - aparc2nii = pe.Node(fs.MRIConvert(out_type="niigz"), name="aparc2nii") - - fsnative2t1w_xfm = pe.Node( - RobustRegister(auto_sens=True, est_int_scale=True), - name='fsnative2t1w_xfm', + midthickness = pe.MapNode( + MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'), + iterfield='in_file', + name='midthickness', + n_procs=min(omp_nthreads, 12), ) + save_midthickness = pe.Node(nio.DataSink(parameterization=False), name='save_midthickness') - t1w2fsnative_xfm = pe.Node( - LTAConvert(out_lta=True, invert=True), - name="t1w2fsnative_xfm", + sync = pe.Node( + niu.Function( + function=_extract_fs_fields, + output_names=['subjects_dir', 'subject_id'], + ), + name='sync', ) - gifti_surface_wf = init_gifti_surface_wf() - # fmt:off - wf.connect([ - (inputnode, t2w_las, [("t2w", "in_file")]), - (inputnode, map_labels, [("ants_segs", "in_file")]), - (inputnode, outputnode, [("ants_segs", "out_aseg")]), # Input segs are final - (map_labels, seg_las, [("out_file", "in_file")]), + workflow.connect([ + (inputnode, t2w_las, [('t2w', 'in_file')]), + (inputnode, map_labels, [('in_aseg', 'in_file')]), + (map_labels, seg_las, [('out_file', 'in_file')]), (inputnode, mcribs_recon, [ - ("subjects_dir", "subjects_dir"), - ("subject_id", "subject_id")]), - (t2w_las, mcribs_recon, [("out_file", "t2w_file")]), - (seg_las, mcribs_recon, [("out_file", "segmentation_file")]), + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (t2w_las, mcribs_recon, [('out_file', 't2w_file')]), + (seg_las, mcribs_recon, [('out_file', 'segmentation_file')]), (inputnode, mcribs_postrecon, [ - ("subjects_dir", "subjects_dir"), - ("subject_id", "subject_id")]), - (mcribs_recon, mcribs_postrecon, [("mcribs_dir", "outdir")]), - (inputnode, fssource, [("subject_id", "subject_id")]), - (mcribs_postrecon, fssource, [("subjects_dir", "subjects_dir")]), - (mcribs_postrecon, outputnode, [("subjects_dir", "subjects_dir")]), - (inputnode, outputnode, [("subject_id", "subject_id")]), - - (inputnode, fsnative2t1w_xfm, [('skullstripped_t1', 'target_file')]), - (fssource, brainmask2nii, [('brainmask', 'in_file')]), - (fssource, aparc2nii, [(('aparc_aseg', pop_file), 'in_file')]), - (aparc2nii, outputnode, [('out_file', 'out_aparc')]), - (brainmask2nii, fsnative2t1w_xfm, [('out_file', 'source_file')]), - (fsnative2t1w_xfm, t1w2fsnative_xfm, [('out_reg_file', 'in_lta')]), - (inputnode, gifti_surface_wf, [("subject_id", "inputnode.subject_id")]), - (mcribs_postrecon, gifti_surface_wf, [("subjects_dir", "inputnode.subjects_dir")]), - (fsnative2t1w_xfm, gifti_surface_wf, [ - ('out_reg_file', 'inputnode.fsnative2t1w_xfm')]), - (fsnative2t1w_xfm, outputnode, [('out_reg_file', 'fsnative2t1w_xfm')]), - (t1w2fsnative_xfm, outputnode, [('out_lta', 't1w2fsnative_xfm')]), - (gifti_surface_wf, outputnode, [ - ('outputnode.surfaces', 'surfaces'), - ('outputnode.morphometrics', 'morphometrics')]), - ]) - # fmt:on - return wf + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (mcribs_recon, mcribs_postrecon, [('mcribs_dir', 'outdir')]), + (mcribs_postrecon, fssource, [('subjects_dir', 'subjects_dir')]), + (inputnode, fssource, [('subject_id', 'subject_id')]), + (fssource, midthickness, [ + ('white', 'in_file'), + ('graymid', 'graymid'), + ]), + (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]), + (save_midthickness, sync, [('out_file', 'filenames')]), + (sync, outputnode, [ + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ]), + ]) # fmt:skip + + if 'fsnative' not in precomputed.get('transforms', {}): + fsnative2anat_xfm = pe.Node( + RobustRegister(auto_sens=True, est_int_scale=True), + name='fsnative2anat_xfm', + ) + anat2fsnative_xfm = pe.Node( + LTAConvert(out_lta=True, invert=True), + name='anat2fsnative_xfm', + ) + workflow.connect([ + (inputnode, fsnative2anat_xfm, [('t2w', 'target_file')]), + (fssource, fsnative2anat_xfm, [('T2', 'source_file')]), + (fsnative2anat_xfm, outputnode, [('out_reg_file', 'fsnative2anat_xfm')]), + (fsnative2anat_xfm, anat2fsnative_xfm, [('out_reg_file', 'in_lta')]), + (anat2fsnative_xfm, outputnode, [('out_lta', 'anat2fsnative_xfm')]), + ]) # fmt:skip + return workflow -def init_mcribs_sphere_reg_wf(*, name="mcribs_sphere_reg_wf"): + +def init_mcribs_sphere_reg_wf(*, name='mcribs_sphere_reg_wf'): """ Generate GIFTI registration sphere files from MCRIBS template to dHCP42 (32k). @@ -215,12 +218,12 @@ def init_mcribs_sphere_reg_wf(*, name="mcribs_sphere_reg_wf"): workflow = LiterateWorkflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(["subjects_dir", "subject_id"]), - name="inputnode", + niu.IdentityInterface(['subjects_dir', 'subject_id']), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(["sphere_reg", "sphere_reg_fsLR"]), - name="outputnode", + niu.IdentityInterface(['sphere_reg', 'sphere_reg_fsLR']), + name='outputnode', ) get_spheres = pe.Node( @@ -233,10 +236,10 @@ def init_mcribs_sphere_reg_wf(*, name="mcribs_sphere_reg_wf"): # # See https://github.com/DCAN-Labs/DCAN-HCP/tree/9291324 sphere_gii = pe.MapNode( - fs.MRIsConvert(out_datatype="gii"), iterfield="in_file", name="sphere_gii" + fs.MRIsConvert(out_datatype='gii'), iterfield='in_file', name='sphere_gii' ) - fix_meta = pe.MapNode(FixGiftiMetadata(), iterfield="in_file", name="fix_meta") + fix_meta = pe.MapNode(FixGiftiMetadata(), iterfield='in_file', name='fix_meta') # load template files atlases = load_data.cached('atlases') @@ -245,12 +248,13 @@ def init_mcribs_sphere_reg_wf(*, name="mcribs_sphere_reg_wf"): # project to 41k dHCP atlas sphere # - sphere-in: Individual native sphere in surf directory registered to 41k atlas sphere # - sphere-to: the 41k atlas sphere, in the fsaverage directory - # - sphere-unproject-from: 41k atlas sphere registered to dHCP 42wk sphere, in the fsaverage directory + # - sphere-unproject-from: 41k atlas sphere registered to dHCP 42wk sphere, + # in the fsaverage directory # - sphere-out: lh.sphere.reg2.dHCP42.native.surf.gii project_unproject = pe.MapNode( SurfaceSphereProjectUnproject(), - iterfield=["sphere_in", "sphere_project_to", "sphere_unproject_from"], - name="project_unproject", + iterfield=['sphere_in', 'sphere_project_to', 'sphere_unproject_from'], + name='project_unproject', ) project_unproject.inputs.sphere_project_to = [ atlases / 'tpl-fsaverage_hemi-L_den-41k_desc-reg_sphere.surf.gii', @@ -279,47 +283,35 @@ def init_mcribs_sphere_reg_wf(*, name="mcribs_sphere_reg_wf"): def init_infantfs_surface_recon_wf( - *, age_months, use_aseg=False, name="infantfs_surface_recon_wf" + *, + age_months: int, + precomputed: dict, + use_aseg: bool = False, + name: str = 'infantfs_surface_recon_wf', ): from nibabies.interfaces.freesurfer import InfantReconAll - # Synchronized inputs to smriprep.workflows.surfaces.init_surface_recon_wf - wf = LiterateWorkflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_INPUTS), name="inputnode") - outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name="outputnode") + workflow = LiterateWorkflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_INPUTS), name='inputnode') + outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name='outputnode') - wf.__desc__ = f"""\ -Brain surfaces were reconstructed using `infant_recon_all` [FreeSurfer -{fs.Info().looseversion() or ""}, RRID:SCR_001847, @infantfs], -leveraging the masked, preprocessed T1w and anatomical segmentation. -""" + desc = ( + 'Brain surfaces were reconstructed using `infant_recon_all` [FreeSurfer ' + f'{fs.Info().looseversion() or ""}, RRID:SCR_001847, @infantfs], ' + 'using the reference T1w' + ) + desc += '.' if not use_aseg else ' and a pre-computed anatomical segmentation.' + workflow.__desc__ = desc - gen_recon_outdir = pe.Node(niu.Function(function=_gen_recon_dir), name="gen_recon_outdir") + gen_recon_outdir = pe.Node(niu.Function(function=_gen_recon_dir), name='gen_recon_outdir') # inject the intensity-normalized skull-stripped t1w from the brain extraction workflow - recon = pe.Node(InfantReconAll(age=age_months), name="reconall") + recon = pe.Node(InfantReconAll(age=age_months), name='reconall') fssource = pe.Node(nio.FreeSurferSource(), name='fssource', run_without_submitting=True) - - fsnative2t1w_xfm = pe.Node( - RobustRegister(auto_sens=True, est_int_scale=True), - name='fsnative2t1w_xfm', - ) - - t1w2fsnative_xfm = pe.Node( - LTAConvert(out_lta=True, invert=True), - name="t1w2fsnative_xfm", - ) - - # convert generated surfaces to GIFTIs - gifti_surface_wf = init_gifti_surface_wf() - - aparc2nii = pe.Node(fs.MRIConvert(out_type="niigz"), name="aparc2nii") - if use_aseg: - wf.connect(inputnode, "ants_segs", recon, "aseg_file") + workflow.connect(inputnode, 'in_aseg', recon, 'aseg_file') - # fmt: off - wf.connect([ + workflow.connect([ (inputnode, gen_recon_outdir, [ ('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), @@ -339,45 +331,35 @@ def init_infantfs_surface_recon_wf( ('subject_id', 'subject_id'), (('outdir', _parent), 'subjects_dir'), ]), - (recon, gifti_surface_wf, [ - ('subject_id', 'inputnode.subject_id'), - (('outdir', _parent), 'inputnode.subjects_dir'), - ]), - (fssource, outputnode, [ - (('aseg', _replace_mgz), 'anat_aseg'), - ]), - (inputnode, fsnative2t1w_xfm, [('skullstripped_t1', 'target_file')]), - (fssource, fsnative2t1w_xfm, [ - (('norm', _replace_mgz), 'source_file'), - ]), - (fsnative2t1w_xfm, t1w2fsnative_xfm, [('out_reg_file', 'in_lta')]), - (fssource, aparc2nii, [ - ('aparc_aseg', 'in_file'), - ]), - (aparc2nii, outputnode, [ - ('out_file', 'out_aparc'), - ]), - (fssource, outputnode, [ - (('aseg', _replace_mgz), 'out_aseg'), - ]), - (fsnative2t1w_xfm, outputnode, [ - ('out_reg_file', 'fsnative2t1w_xfm'), - ]), - (t1w2fsnative_xfm, outputnode, [ - ('out_lta', 't1w2fsnative_xfm'), - ]), - (fsnative2t1w_xfm, gifti_surface_wf, [ - ('out_reg_file', 'inputnode.fsnative2t1w_xfm')]), - (gifti_surface_wf, outputnode, [ - ('outputnode.surfaces', 'surfaces'), - ('outputnode.morphometrics', 'morphometrics'), - ]), - ]) - # fmt: on - return wf + ]) # fmt:skip + + if 'fsnative' not in precomputed.get('transforms', {}): + fsnative2anat_xfm = pe.Node( + RobustRegister(auto_sens=True, est_int_scale=True), + name='fsnative2anat_xfm', + ) + anat2fsnative_xfm = pe.Node( + LTAConvert(out_lta=True, invert=True), + name='anat2fsnative_xfm', + ) + workflow.connect([ + (inputnode, fsnative2anat_xfm, [('skullstripped_t1', 'target_file')]), + (fssource, fsnative2anat_xfm, [ + (('norm', _replace_mgz), 'source_file'), + ]), + (fsnative2anat_xfm, anat2fsnative_xfm, [('out_reg_file', 'in_lta')]), + (fsnative2anat_xfm, outputnode, [ + ('out_reg_file', 'fsnative2anat_xfm'), + ]), + (anat2fsnative_xfm, outputnode, [ + ('out_lta', 'anat2fsnative_xfm'), + ]), + ]) # fmt:skip + + return workflow -def init_anat_ribbon_wf(name="anat_ribbon_wf"): +def init_anat_ribbon_wf(name='anat_ribbon_wf'): from nipype.interfaces import fsl from nibabies.interfaces.workbench import CreateSignedDistanceVolume @@ -391,85 +373,85 @@ def init_anat_ribbon_wf(name="anat_ribbon_wf"): inputnode = pe.Node( niu.IdentityInterface( fields=[ - "surfaces", # anat_giftis, - "t1w_mask", + 'surfaces', # anat_giftis, + 't1w_mask', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( fields=[ - "anat_ribbon", + 'anat_ribbon', ] ), - name="outputnode", + name='outputnode', ) select_wm = pe.Node( niu.Select(index=[0, 1]), - name="select_wm", + name='select_wm', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) select_pial = pe.Node( niu.Select(index=[2, 3]), - name="select_pial", + name='select_pial', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) select_midthick = pe.Node( niu.Select(index=[6, 7]), - name="select_midthick", + name='select_midthick', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) create_wm_distvol = pe.MapNode( CreateSignedDistanceVolume(), - iterfield=["surf_file"], - name="create_wm_distvol", + iterfield=['surf_file'], + name='create_wm_distvol', ) create_pial_distvol = pe.MapNode( CreateSignedDistanceVolume(), - iterfield=["surf_file"], - name="create_pial_distvol", + iterfield=['surf_file'], + name='create_pial_distvol', ) thresh_wm_distvol = pe.MapNode( - fsl.maths.MathsCommand(args="-thr 0 -bin -mul 255"), - iterfield=["in_file"], - name="thresh_wm_distvol", + fsl.maths.MathsCommand(args='-thr 0 -bin -mul 255'), + iterfield=['in_file'], + name='thresh_wm_distvol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) uthresh_pial_distvol = pe.MapNode( - fsl.maths.MathsCommand(args="-uthr 0 -abs -bin -mul 255"), - iterfield=["in_file"], - name="uthresh_pial_distvol", + fsl.maths.MathsCommand(args='-uthr 0 -abs -bin -mul 255'), + iterfield=['in_file'], + name='uthresh_pial_distvol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) bin_wm_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_wm_distvol", + fsl.maths.UnaryMaths(operation='bin'), + iterfield=['in_file'], + name='bin_wm_distvol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) bin_pial_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_pial_distvol", + fsl.maths.UnaryMaths(operation='bin'), + iterfield=['in_file'], + name='bin_pial_distvol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) split_wm_distvol = pe.Node( niu.Split(splits=[1, 1]), - name="split_wm_distvol", + name='split_wm_distvol', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) @@ -477,62 +459,62 @@ def init_anat_ribbon_wf(name="anat_ribbon_wf"): merge_wm_distvol_no_flatten = pe.Node( niu.Merge(2), no_flatten=True, - name="merge_wm_distvol_no_flatten", + name='merge_wm_distvol_no_flatten', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) make_ribbon_vol = pe.MapNode( - fsl.maths.MultiImageMaths(op_string="-mas %s -mul 255"), - iterfield=["in_file", "operand_files"], - name="make_ribbon_vol", + fsl.maths.MultiImageMaths(op_string='-mas %s -mul 255'), + iterfield=['in_file', 'operand_files'], + name='make_ribbon_vol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) bin_ribbon_vol = pe.MapNode( - fsl.maths.UnaryMaths(operation="bin"), - iterfield=["in_file"], - name="bin_ribbon_vol", + fsl.maths.UnaryMaths(operation='bin'), + iterfield=['in_file'], + name='bin_ribbon_vol', mem_gb=DEFAULT_MEMORY_MIN_GB, ) split_squeeze_ribbon_vol = pe.Node( niu.Split(splits=[1, 1], squeeze=True), - name="split_squeeze_ribbon", + name='split_squeeze_ribbon', mem_gb=DEFAULT_MEMORY_MIN_GB, run_without_submitting=True, ) combine_ribbon_vol_hemis = pe.Node( - fsl.maths.BinaryMaths(operation="add"), - name="combine_ribbon_vol_hemis", + fsl.maths.BinaryMaths(operation='add'), + name='combine_ribbon_vol_hemis', mem_gb=DEFAULT_MEMORY_MIN_GB, ) # make HCP-style ribbon volume in T1w space workflow.connect( [ - (inputnode, select_wm, [("surfaces", "inlist")]), - (inputnode, select_pial, [("surfaces", "inlist")]), - (inputnode, select_midthick, [("surfaces", "inlist")]), - (select_wm, create_wm_distvol, [(("out", _sorted_by_basename), "surf_file")]), - (inputnode, create_wm_distvol, [("t1w_mask", "ref_file")]), - (select_pial, create_pial_distvol, [(("out", _sorted_by_basename), "surf_file")]), - (inputnode, create_pial_distvol, [("t1w_mask", "ref_file")]), - (create_wm_distvol, thresh_wm_distvol, [("out_file", "in_file")]), - (create_pial_distvol, uthresh_pial_distvol, [("out_file", "in_file")]), - (thresh_wm_distvol, bin_wm_distvol, [("out_file", "in_file")]), - (uthresh_pial_distvol, bin_pial_distvol, [("out_file", "in_file")]), - (bin_wm_distvol, split_wm_distvol, [("out_file", "inlist")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out1", "in1")]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [("out2", "in2")]), - (bin_pial_distvol, make_ribbon_vol, [("out_file", "in_file")]), - (merge_wm_distvol_no_flatten, make_ribbon_vol, [("out", "operand_files")]), - (make_ribbon_vol, bin_ribbon_vol, [("out_file", "in_file")]), - (bin_ribbon_vol, split_squeeze_ribbon_vol, [("out_file", "inlist")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out1", "in_file")]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [("out2", "operand_file")]), - (combine_ribbon_vol_hemis, outputnode, [("out_file", "anat_ribbon")]), + (inputnode, select_wm, [('surfaces', 'inlist')]), + (inputnode, select_pial, [('surfaces', 'inlist')]), + (inputnode, select_midthick, [('surfaces', 'inlist')]), + (select_wm, create_wm_distvol, [(('out', _sorted_by_basename), 'surf_file')]), + (inputnode, create_wm_distvol, [('t1w_mask', 'ref_file')]), + (select_pial, create_pial_distvol, [(('out', _sorted_by_basename), 'surf_file')]), + (inputnode, create_pial_distvol, [('t1w_mask', 'ref_file')]), + (create_wm_distvol, thresh_wm_distvol, [('out_file', 'in_file')]), + (create_pial_distvol, uthresh_pial_distvol, [('out_file', 'in_file')]), + (thresh_wm_distvol, bin_wm_distvol, [('out_file', 'in_file')]), + (uthresh_pial_distvol, bin_pial_distvol, [('out_file', 'in_file')]), + (bin_wm_distvol, split_wm_distvol, [('out_file', 'inlist')]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [('out1', 'in1')]), + (split_wm_distvol, merge_wm_distvol_no_flatten, [('out2', 'in2')]), + (bin_pial_distvol, make_ribbon_vol, [('out_file', 'in_file')]), + (merge_wm_distvol_no_flatten, make_ribbon_vol, [('out', 'operand_files')]), + (make_ribbon_vol, bin_ribbon_vol, [('out_file', 'in_file')]), + (bin_ribbon_vol, split_squeeze_ribbon_vol, [('out_file', 'inlist')]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [('out1', 'in_file')]), + (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [('out2', 'operand_file')]), + (combine_ribbon_vol_hemis, outputnode, [('out_file', 'anat_ribbon')]), ] ) return workflow @@ -569,6 +551,6 @@ def _get_dhcp_spheres(subject_id: str, subjects_dir: str) -> list: for hemi in 'lr': sphere = Path(subjects_dir) / subject_id / 'surf' / f'{hemi}h.sphere.reg2' if not sphere.exists(): - raise OSError("MCRIBS spherical registration not found.") + raise OSError('MCRIBS spherical registration not found.') out.append(str(sphere)) return out From f14e8165213dff9499d9f674bed3f85c07ce41c1 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 15 Apr 2024 10:54:08 -0400 Subject: [PATCH 014/142] ENH: Add symbolic copy of sphere.reg to make FS integration easier --- nibabies/interfaces/mcribs.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/nibabies/interfaces/mcribs.py b/nibabies/interfaces/mcribs.py index 5cba805c..856000cb 100644 --- a/nibabies/interfaces/mcribs.py +++ b/nibabies/interfaces/mcribs.py @@ -117,11 +117,11 @@ def cmdline(self): self._no_run = True if self._no_run: - return "echo MCRIBSReconAll: nothing to do" + return 'echo MCRIBSReconAll: nothing to do' return cmd def _setup_directory_structure(self, mcribs_dir: Path) -> None: - ''' + """ Create the required structure for skipping steps. The directory tree @@ -141,7 +141,7 @@ def _setup_directory_structure(self, mcribs_dir: Path) -> None: └── └── N4 └── .nii.gz - ''' + """ sid = self.inputs.subject_id mkdir_kw = {'parents': True, 'exist_ok': True} root = mcribs_dir / sid @@ -204,7 +204,8 @@ def _run_interface(self, runtime): mcribs_dir = self.inputs.outdir or Path(runtime.cwd) / 'mcribs' self._mcribs_dir = Path(mcribs_dir) if self.inputs.surfrecon: - assert self.inputs.t2w_file, "Missing T2w input" + if not self.inputs.t2w_file: + raise AttributeError('Missing T2w input') self._setup_directory_structure(self._mcribs_dir) # overwrite CWD to be in MCRIB subject's directory runtime.cwd = str(self._mcribs_dir / self.inputs.subject_id) @@ -225,6 +226,12 @@ def _list_outputs(self): dst = Path(self.inputs.subjects_dir) / self.inputs.subject_id if not dst.exists(): shutil.copytree(mcribs_fs, dst) + # Copy registration sphere to better match recon-all output + for hemi in 'lr': + orig = dst / 'surf' / f'{hemi}h.sphere.reg2' + symbolic = Path(str(orig)[:-1]) + if orig.exists() and not symbolic.exists(): + shutil.copyfile(orig, symbolic) outputs['subjects_dir'] = self.inputs.subjects_dir return outputs @@ -251,7 +258,7 @@ def _verify_surfrecon_outputs(surfrecon_dir: Path, error: bool) -> bool: for fl in fls: if not (surfrecon_dir / d / fl).exists(): if error: - raise FileNotFoundError(f"SurfReconDeformable missing: {fl}") + raise FileNotFoundError(f'SurfReconDeformable missing: {fl}') return False return True @@ -279,6 +286,6 @@ def _verify_autorecon_outputs(fs_dir: Path, error: bool) -> bool: for fl in fls: if not (fs_dir / d / fl).exists(): if error: - raise FileNotFoundError(f"FreeSurfer directory missing: {fl}") + raise FileNotFoundError(f'FreeSurfer directory missing: {fl}') return False return True From 0e2c2a2c4e68d38ebdd28262a928de868c3a127b Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 15 Apr 2024 11:01:24 -0400 Subject: [PATCH 015/142] FIX: Simplify dhcp reg workflow, remove crud --- nibabies/workflows/anatomical/surfaces.py | 257 +++------------------- 1 file changed, 35 insertions(+), 222 deletions(-) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index 958cda61..023abc2c 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -1,6 +1,7 @@ """Anatomical surface projections""" +import templateflow.api as tf from nipype.interfaces import freesurfer as fs from nipype.interfaces import io as nio from nipype.interfaces import utility as niu @@ -11,9 +12,6 @@ from smriprep.interfaces.freesurfer import MakeMidthickness from smriprep.workflows.surfaces import _extract_fs_fields -from nibabies.config import DEFAULT_MEMORY_MIN_GB -from nibabies.data import load as load_data - SURFACE_INPUTS = [ 't1w', 't2w', @@ -206,44 +204,26 @@ def init_mcribs_surface_recon_wf( return workflow -def init_mcribs_sphere_reg_wf(*, name='mcribs_sphere_reg_wf'): +def init_mcribs_dhcp_wf(*, name='mcribs_dhcp_wf'): """ - Generate GIFTI registration sphere files from MCRIBS template to dHCP42 (32k). + Generate GIFTI registration files to dhcp (42-week) space. - TODO: Clarify any distinction with fsLR + Note: The dhcp template was derived from the Conte69 atlas, + and maps reasonably well to fsLR. """ - from smriprep.interfaces.surf import FixGiftiMetadata from smriprep.interfaces.workbench import SurfaceSphereProjectUnproject workflow = LiterateWorkflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(['subjects_dir', 'subject_id']), + niu.IdentityInterface(['sphere_reg', 'sulc']), name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(['sphere_reg', 'sphere_reg_fsLR']), + niu.IdentityInterface(['sphere_reg_fsLR']), name='outputnode', ) - get_spheres = pe.Node( - niu.Function(function=_get_dhcp_spheres), - name='get_spheres', - run_without_submitting=True, - ) - - # Via FreeSurfer2CaretConvertAndRegisterNonlinear.sh#L270-L273 - # - # See https://github.com/DCAN-Labs/DCAN-HCP/tree/9291324 - sphere_gii = pe.MapNode( - fs.MRIsConvert(out_datatype='gii'), iterfield='in_file', name='sphere_gii' - ) - - fix_meta = pe.MapNode(FixGiftiMetadata(), iterfield='in_file', name='fix_meta') - - # load template files - atlases = load_data.cached('atlases') - # SurfaceSphereProjectUnProject # project to 41k dHCP atlas sphere # - sphere-in: Individual native sphere in surf directory registered to 41k atlas sphere @@ -257,27 +237,39 @@ def init_mcribs_sphere_reg_wf(*, name='mcribs_sphere_reg_wf'): name='project_unproject', ) project_unproject.inputs.sphere_project_to = [ - atlases / 'tpl-fsaverage_hemi-L_den-41k_desc-reg_sphere.surf.gii', - atlases / 'tpl-fsaverage_hemi-R_den-41k_desc-reg_sphere.surf.gii', + str( + tf.get( + 'fsaverage', + density='41k', + hemi=hemi, + desc=None, + suffix='sphere', + extension='.surf.gii', + ) + ) + for hemi in 'LR' ] - project_unproject.inputs.sphere_unproject_from = [ - atlases / 'tpl-dHCP_space-fsaverage_hemi-L_den-41k_desc-reg_sphere.surf.gii', - atlases / 'tpl-dHCP_space-fsaverage_hemi-R_den-41k_desc-reg_sphere.surf.gii', + + project_unproject.inputs.sphere_unproject_from = [ # TODO: Use symmetric template + str( + tf.get( + 'dhcpAsym', + space='fsaverage', + hemi=hemi, + density='41k', + desc='reg', + suffix='sphere', + extension='.surf.gii', + raise_empty=True, + ) + ) + for hemi in 'LR' ] - # fmt:off workflow.connect([ - (inputnode, get_spheres, [ - ('subjects_dir', 'subjects_dir'), - ('subject_id', 'subject_id'), - ]), - (get_spheres, sphere_gii, [(('out', _sorted_by_basename), 'in_file')]), - (sphere_gii, fix_meta, [('converted', 'in_file')]), - (fix_meta, project_unproject, [('out_file', 'sphere_in')]), - (sphere_gii, outputnode, [('converted', 'sphere_reg')]), + (inputnode, project_unproject, [('sphere_reg', 'sphere_in')]), (project_unproject, outputnode, [('sphere_out', 'sphere_reg_fsLR')]), - ]) - # fmt:on + ]) # fmt:skip return workflow @@ -359,167 +351,6 @@ def init_infantfs_surface_recon_wf( return workflow -def init_anat_ribbon_wf(name='anat_ribbon_wf'): - from nipype.interfaces import fsl - - from nibabies.interfaces.workbench import CreateSignedDistanceVolume - - # 0, 1 = wm; 2, 3 = pial; 6, 7 = mid - # note that order of lh / rh within each surf type is not guaranteed due to use - # of unsorted glob by FreeSurferSource prior, but we can do a sort - # to ensure consistent ordering - workflow = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=[ - 'surfaces', # anat_giftis, - 't1w_mask', - ] - ), - name='inputnode', - ) - outputnode = pe.Node( - niu.IdentityInterface( - fields=[ - 'anat_ribbon', - ] - ), - name='outputnode', - ) - - select_wm = pe.Node( - niu.Select(index=[0, 1]), - name='select_wm', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - select_pial = pe.Node( - niu.Select(index=[2, 3]), - name='select_pial', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - select_midthick = pe.Node( - niu.Select(index=[6, 7]), - name='select_midthick', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - create_wm_distvol = pe.MapNode( - CreateSignedDistanceVolume(), - iterfield=['surf_file'], - name='create_wm_distvol', - ) - - create_pial_distvol = pe.MapNode( - CreateSignedDistanceVolume(), - iterfield=['surf_file'], - name='create_pial_distvol', - ) - - thresh_wm_distvol = pe.MapNode( - fsl.maths.MathsCommand(args='-thr 0 -bin -mul 255'), - iterfield=['in_file'], - name='thresh_wm_distvol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - uthresh_pial_distvol = pe.MapNode( - fsl.maths.MathsCommand(args='-uthr 0 -abs -bin -mul 255'), - iterfield=['in_file'], - name='uthresh_pial_distvol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - bin_wm_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation='bin'), - iterfield=['in_file'], - name='bin_wm_distvol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - bin_pial_distvol = pe.MapNode( - fsl.maths.UnaryMaths(operation='bin'), - iterfield=['in_file'], - name='bin_pial_distvol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - split_wm_distvol = pe.Node( - niu.Split(splits=[1, 1]), - name='split_wm_distvol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - merge_wm_distvol_no_flatten = pe.Node( - niu.Merge(2), - no_flatten=True, - name='merge_wm_distvol_no_flatten', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - make_ribbon_vol = pe.MapNode( - fsl.maths.MultiImageMaths(op_string='-mas %s -mul 255'), - iterfield=['in_file', 'operand_files'], - name='make_ribbon_vol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - bin_ribbon_vol = pe.MapNode( - fsl.maths.UnaryMaths(operation='bin'), - iterfield=['in_file'], - name='bin_ribbon_vol', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - split_squeeze_ribbon_vol = pe.Node( - niu.Split(splits=[1, 1], squeeze=True), - name='split_squeeze_ribbon', - mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True, - ) - - combine_ribbon_vol_hemis = pe.Node( - fsl.maths.BinaryMaths(operation='add'), - name='combine_ribbon_vol_hemis', - mem_gb=DEFAULT_MEMORY_MIN_GB, - ) - - # make HCP-style ribbon volume in T1w space - workflow.connect( - [ - (inputnode, select_wm, [('surfaces', 'inlist')]), - (inputnode, select_pial, [('surfaces', 'inlist')]), - (inputnode, select_midthick, [('surfaces', 'inlist')]), - (select_wm, create_wm_distvol, [(('out', _sorted_by_basename), 'surf_file')]), - (inputnode, create_wm_distvol, [('t1w_mask', 'ref_file')]), - (select_pial, create_pial_distvol, [(('out', _sorted_by_basename), 'surf_file')]), - (inputnode, create_pial_distvol, [('t1w_mask', 'ref_file')]), - (create_wm_distvol, thresh_wm_distvol, [('out_file', 'in_file')]), - (create_pial_distvol, uthresh_pial_distvol, [('out_file', 'in_file')]), - (thresh_wm_distvol, bin_wm_distvol, [('out_file', 'in_file')]), - (uthresh_pial_distvol, bin_pial_distvol, [('out_file', 'in_file')]), - (bin_wm_distvol, split_wm_distvol, [('out_file', 'inlist')]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [('out1', 'in1')]), - (split_wm_distvol, merge_wm_distvol_no_flatten, [('out2', 'in2')]), - (bin_pial_distvol, make_ribbon_vol, [('out_file', 'in_file')]), - (merge_wm_distvol_no_flatten, make_ribbon_vol, [('out', 'operand_files')]), - (make_ribbon_vol, bin_ribbon_vol, [('out_file', 'in_file')]), - (bin_ribbon_vol, split_squeeze_ribbon_vol, [('out_file', 'inlist')]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [('out1', 'in_file')]), - (split_squeeze_ribbon_vol, combine_ribbon_vol_hemis, [('out2', 'operand_file')]), - (combine_ribbon_vol_hemis, outputnode, [('out_file', 'anat_ribbon')]), - ] - ) - return workflow - - def _parent(p): from pathlib import Path @@ -536,21 +367,3 @@ def _gen_recon_dir(subjects_dir, subject_id): def _replace_mgz(in_file): return in_file.replace('.mgz', '.nii.gz') - - -def _sorted_by_basename(inlist): - from os.path import basename - - return sorted(inlist, key=lambda x: str(basename(x))) - - -def _get_dhcp_spheres(subject_id: str, subjects_dir: str) -> list: - from pathlib import Path - - out = [] - for hemi in 'lr': - sphere = Path(subjects_dir) / subject_id / 'surf' / f'{hemi}h.sphere.reg2' - if not sphere.exists(): - raise OSError('MCRIBS spherical registration not found.') - out.append(str(sphere)) - return out From b93349eda8d0ef41b3ef8cfdc5b430c4165e13cc Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 15 Apr 2024 11:07:13 -0400 Subject: [PATCH 016/142] FIX: Insert new parameters, update mcribs -> fslr workflow --- nibabies/workflows/fit.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index 60f3259a..e583a9e0 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -10,7 +10,12 @@ from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.utils.connections import pop_file from smriprep.workflows.anatomical import init_anat_ribbon_wf, init_anat_template_wf -from smriprep.workflows.surfaces import init_gifti_surfaces_wf, init_gifti_morphometrics_wf +from smriprep.workflows.surfaces import ( + init_fsLR_reg_wf, + init_gifti_surfaces_wf, + init_gifti_morphometrics_wf, + init_refinement_wf, +) from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, @@ -26,6 +31,7 @@ from nibabies import config from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf +from nibabies.workflows.anatomical.surfaces import init_mcribs_dhcp_wf if ty.TYPE_CHECKING: from niworkflows.utils.spaces import Reference, SpatialReferences @@ -715,7 +721,7 @@ def init_infant_anat_fit_wf( templates=templates, ) ds_template_registration_wf = init_ds_template_registration_wf( - output_dir=output_dir, + output_dir=str(output_dir), image_type=image_type.capitalize(), ) @@ -763,6 +769,7 @@ def init_infant_anat_fit_wf( omp_nthreads=omp_nthreads, use_aseg=bool(anat_aseg), use_mask=True, + precomputed=precomputed, mcribs_dir=str(config.execution.mcribs_dir), ) @@ -820,6 +827,7 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Stage 6: Preparing Infant FreeSurfer workflow') surface_recon_wf = init_infantfs_surface_recon_wf( age_months=age_months, + precomputed=precomputed, use_aseg=bool(anat_mask), ) @@ -843,7 +851,10 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: - ds_fs_registration_wf = init_ds_fs_registration_wf(output_dir=output_dir) + ds_fs_registration_wf = init_ds_fs_registration_wf( + image_type=image_type, + output_dir=output_dir + ) workflow.connect([ (sourcefile_buffer, ds_fs_registration_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -1020,7 +1031,7 @@ def init_infant_anat_fit_wf( if len(precomputed.get('sphere_reg_fsLR', [])) < 2: LOGGER.info('ANAT Stage 9: Creating fsLR registration sphere') if recon_method == 'mcribs': - fsLR_reg_wf = init_mcribs_fsLR_reg_wf() # TODO + fsLR_reg_wf = init_mcribs_dhcp_wf() else: fsLR_reg_wf = init_fsLR_reg_wf() From 94529076625867cda9c93c7aa0b998a308be1960 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 15 Apr 2024 14:30:58 -0400 Subject: [PATCH 017/142] RF: Contrast -> image type --- nibabies/workflows/fit.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/fit.py index e583a9e0..5e3e192f 100644 --- a/nibabies/workflows/fit.py +++ b/nibabies/workflows/fit.py @@ -10,12 +10,6 @@ from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.utils.connections import pop_file from smriprep.workflows.anatomical import init_anat_ribbon_wf, init_anat_template_wf -from smriprep.workflows.surfaces import ( - init_fsLR_reg_wf, - init_gifti_surfaces_wf, - init_gifti_morphometrics_wf, - init_refinement_wf, -) from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, @@ -27,6 +21,12 @@ init_ds_template_wf, init_ds_tpms_wf, ) +from smriprep.workflows.surfaces import ( + init_fsLR_reg_wf, + init_gifti_morphometrics_wf, + init_gifti_surfaces_wf, + init_refinement_wf, +) from nibabies import config from nibabies.workflows.anatomical.registration import init_coregistration_wf @@ -314,7 +314,7 @@ def init_infant_anat_fit_wf( ) t1w_template_wf = init_anat_template_wf( - contrast='T1w', + image_type='T1w', num_files=num_t1w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, @@ -372,7 +372,7 @@ def init_infant_anat_fit_wf( ) t2w_template_wf = init_anat_template_wf( - contrast='T2w', + image_type='T2w', num_files=num_t1w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, @@ -852,8 +852,7 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf( - image_type=image_type, - output_dir=output_dir + image_type=image_type, output_dir=output_dir ) workflow.connect([ (sourcefile_buffer, ds_fs_registration_wf, [ From f3e42045f654ef5bbd5c1f3b5d951818b1f170dc Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 15 Apr 2024 14:31:54 -0400 Subject: [PATCH 018/142] RF: Move fit to anatomical workflows module --- nibabies/workflows/{ => anatomical}/fit.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename nibabies/workflows/{ => anatomical}/fit.py (100%) diff --git a/nibabies/workflows/fit.py b/nibabies/workflows/anatomical/fit.py similarity index 100% rename from nibabies/workflows/fit.py rename to nibabies/workflows/anatomical/fit.py From bd851ce67ac90fee00fde55ab802dbc611c136ff Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 18 Apr 2024 10:21:28 -0400 Subject: [PATCH 019/142] FIX: T2w brainmask, move precomputed to dict --- nibabies/workflows/anatomical/fit.py | 218 +++++++++++++++------------ 1 file changed, 124 insertions(+), 94 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 5e3e192f..c7e1d8d2 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -9,7 +9,11 @@ from niworkflows.interfaces.header import ValidateImage from niworkflows.interfaces.nibabel import ApplyMask, Binarize from niworkflows.utils.connections import pop_file -from smriprep.workflows.anatomical import init_anat_ribbon_wf, init_anat_template_wf +from smriprep.workflows.anatomical import ( + _is_skull_stripped, + init_anat_ribbon_wf, + init_anat_template_wf, +) from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( init_ds_dseg_wf, @@ -29,6 +33,7 @@ ) from nibabies import config +from nibabies.workflows.anatomical.brain_extraction import init_infant_brain_extraction_wf from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf from nibabies.workflows.anatomical.surfaces import init_mcribs_dhcp_wf @@ -36,7 +41,6 @@ if ty.TYPE_CHECKING: from niworkflows.utils.spaces import Reference, SpatialReferences - from nibabies.utils.bids import Derivatives LOGGER = logging.getLogger('nipype.workflow') @@ -47,7 +51,7 @@ def init_infant_anat_fit_wf( t2w: list, flair: list, bids_root: Path, - precomputed: Derivatives, + precomputed: dict, hires: bool, longitudinal: bool, omp_nthreads: int, @@ -302,11 +306,14 @@ def init_infant_anat_fit_wf( 'were found within the input BIDS dataset.' ) + t1w_preproc = precomputed.get('t1w_preproc', None) + t2w_preproc = precomputed.get('t2w_preproc', None) + # Stage 1: Conform & valid T1w/T2w images t1w_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) t2w_validate = t1w_validate.clone('t2w_validate') - if not precomputed.t1w_preproc: + if not t1w_preproc: LOGGER.info('ANAT Stage 1: Adding T1w template workflow') desc += ( 'The T1-weighted (T1w) image was denoised and corrected for intensity ' @@ -355,16 +362,16 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') desc += ' A preprocessed T1w image was provided as input.' - t1w_validate.inputs.in_file = precomputed.t1w_preproc - sourcefile_buffer.inputs.t1w_source_files = [precomputed.t1w_preproc] + t1w_validate.inputs.in_file = t1w_preproc + sourcefile_buffer.inputs.t1w_source_files = [t1w_preproc] if reference_anat == 't1w': - sourcefile_buffer.inputs.anat_source_files = [precomputed.t1w_preproc] + sourcefile_buffer.inputs.anat_source_files = [t1w_preproc] workflow.connect([ (t1w_validate, t1w_buffer, [('out_file', 't1w_preproc')]), ]) # fmt:skip - if not precomputed.t2w_preproc: + if not t2w_preproc: LOGGER.info('ANAT Stage 1: Adding T2w template workflow') desc += ( 'The T2-weighted (T2w) image was denoised and corrected for intensity ' @@ -414,26 +421,25 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Found preprocessed T2w - skipping Stage 1') desc += ' A preprocessed T2w image was provided as input.' - t2w_validate.inputs.in_file = precomputed.t2w_preproc - sourcefile_buffer.inputs.t2w_source_files = [precomputed.t2w_preproc] - if precomputed.t2w_preproc: - sourcefile_buffer.inputs.anat_source_files = [precomputed.t2w_preproc] + t2w_validate.inputs.in_file = t2w_preproc + sourcefile_buffer.inputs.t2w_source_files = [t2w_preproc] + if reference_anat == 't2w': + sourcefile_buffer.inputs.anat_source_files = [t2w_preproc] workflow.connect([ (t2w_validate, t2w_buffer, [('out_file', 't2w_preproc')]), ]) # fmt:skip + # Stage 2: Use previously computed mask or calculate # If we only have one mask (could be either T1w/T2w), # just apply transform to get it in the other space - # only_t1w_mask = precomputed.t1w_mask and not precomputed.t2w_mask - # only_t2w_mask = precomputed.t2w_mask and not precomputed.t1w_mask - - t1w_mask = precomputed.t1w_mask - t2w_mask = precomputed.t2w_mask + t1w_mask = precomputed.get('t1w_mask', None) + t2w_mask = precomputed.get('t2w_mask', None) anat_mask = None # T1w masking - define pre-emptively apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') + apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') if not t1w_mask: if skull_strip_mode == 'auto': @@ -441,13 +447,13 @@ def init_infant_anat_fit_wf( else: run_t1w_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] - if not run_t1w_skull_strip: # Image is masked + if not run_t1w_skull_strip: desc += ( 'The provided T1w image was previously skull-stripped; ' 'a brain mask was derived from the input image.' ) - if not precomputed.t1w_preproc: + if not t1w_preproc: LOGGER.info('ANAT Stage 2: Skipping skull-strip, INU-correction only') n4_only_wf = init_n4_only_wf( @@ -513,19 +519,21 @@ def init_infant_anat_fit_wf( 'A pre-computed T1w brain mask was provided as input and used throughout the ' 'workflow.' ) - t1w_buffer.inputs.t1w_mask = precomputed.t1w_mask - apply_t1w_mask.inputs.in_mask = precomputed.t1w_mask + t1w_buffer.inputs.t1w_mask = t1w_mask + apply_t1w_mask.inputs.in_mask = t1w_mask workflow.connect(t1w_validate, 'out_file', apply_t1w_mask, 'in_file') - if not precomputed.t1w: + if not t1w_preproc: LOGGER.info('ANAT Skipping skull-strip, INU-correction only') - n4_only_wf = init_n4_only_wf( + t1w_n4_only_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, atropos_use_random_seed=not skull_strip_fixed_seed, + bids_suffix='T1w', + name='t1w_n4_only_wf', ) workflow.connect([ - (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, t1w_buffer, [ + (apply_t1w_mask, t1w_n4_only_wf, [('out_file', 'inputnode.in_files')]), + (t1w_n4_only_wf, t1w_buffer, [ (('outputnode.bias_corrected', pop_file), 't1w_preproc'), (('outputnode.out_file', pop_file), 't1w_brain'), ]), @@ -534,84 +542,106 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Skipping T1w masking') workflow.connect(apply_t1w_mask, 'out_file', t1w_buffer, 't1w_brain') - save_t2w_mask = True - if precomputed.t2w_mask or only_t1w_mask: - desc += ( - ' A pre-computed T2w brain mask was provided as input and ' - 'used throughout the workflow.' - ) - # A mask is available and will be applied - apply_t2w_mask = pe.Node(ApplyMask(), name='apply_t2w_mask') - workflow.connect(t2w_validate, 'out_file', apply_t2w_mask, 'in_file') - if precomputed.t2w_mask: - LOGGER.info('ANAT Found T2w brain mask') - - save_t2w_mask = False - anat_buffer.inputs.t2w_mask = precomputed.t2w_mask - apply_t1w_mask.inputs.in_mask = precomputed.t2w_mask - workflow.connect(refined_buffer, 't2w_mask', outputnode, 't1w_mask') - elif only_t2w_mask: - LOGGER.info('ANAT No T2w brain mask but a T1w mask is available') + if not t2w_mask: + if skull_strip_mode == 'auto': + run_t2w_skull_strip = not all(_is_skull_stripped(img) for img in t2w) + else: + run_t2w_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] - transform_t2w_mask = pe.Node( - ApplyTransforms(interpolation='MultiLabel'), name='transform_t1w_mask' + if not run_t2w_skull_strip: + desc += ( + 'The T2w reference was previously skull-stripped; ' + 'a brain mask was derived from the input image.' ) - workflow.connect([ - (refined_buffer, transform_t2w_mask, [('t1w_mask', 'input_image')]), - (anat_buffer, transform_t2w_mask, [('t2w_preproc', 'reference_image')]), - (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), - (transform_t2w_mask, apply_t2w_mask, [('output_image', 'in_file')]), - (apply_t2w_mask, refined_buffer, [('out_file', 't2w_mask')]), - ]) # fmt:skip - if not precomputed.t2w_preproc: - LOGGER.info('ANAT Skipping T1w skull-strip, INU-correction only') - n4_only_wf = init_n4_only_wf( - omp_nthreads=omp_nthreads, - atropos_use_random_seed=not skull_strip_fixed_seed, - ) - workflow.connect([ - (apply_t1w_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, anat_buffer, [ - (('outputnode.bias_corrected', pop_file), 't2w_preproc'), - (('outputnode.out_file', pop_file), 't2w_brain'), - ]), - ]) # fmt:skip + if not t2w_preproc: + LOGGER.info('ANAT Stage 2b: Skipping skull-strip, INU-correction only') + + t2w_n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + bids_suffix=image_type, + atropos_use_random_seed=not skull_strip_fixed_seed, + name='t2w_n4_only_wf', + ) + workflow.connect([ + (t2w_validate, t2w_n4_only_wf, [('out_file', 'inputnode.in_files')]), + (t2w_n4_only_wf, t2w_buffer, [ + (('outputnode.bias_corrected', pop_file), 't2w_preproc'), + ('outputnode.out_mask', 't2w_mask'), + (('outputnode.out_file', pop_file), 't2w_brain'), + ('outputnode.out_segm', 'ants_seg'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 2b: Skipping skull-strip, generating mask from input') + binarize_t2w = pe.Node(Binarize(thresh_low=2), name='binarize_t2w') + workflow.connect([ + (t2w_validate, binarize_t1w, [('out_file', 'in_file')]), + (t2w_validate, t2w_buffer, [('out_file', 't2w_brain')]), + (binarize_t2w, t2w_buffer, [('out_file', 't2w_mask')]), + ]) # fmt:skip else: - LOGGER.info('ANAT Applying T2w mask to precomputed T2w') - workflow.connect(apply_t1w_mask, 'out_file', anat_buffer, 't2w_brain') + # Check whether we can convert a previously computed T2w mask + # or need to run the atlas based brain extraction + if t1w_mask: + LOGGER.info('ANAT T2w mask will be transformed into T1w space') + transform_t1w_mask = pe.Node( + ApplyTransforms(interpolation='MultiLabel'), name='transform_t1w_mask', + ) + + workflow.connect([ + (t1w_buffer, transform_t1w_mask, [('t1w_mask', 'input_image')]), + (coreg_buffer, transform_t1w_mask, [('t1w2t2w_xfm', 'transforms')]), + (transform_t1w_mask, apply_t2w_mask, [('output_image', 'in_mask')]), + (t2w_buffer, apply_t1w_mask, [('t2w_preproc', 'in_file')]), # TODO: Unsure about this connection + ]) # fmt:skip + else: + LOGGER.info('ANAT Brain mask will be calculated using T2w') + brain_extraction_wf = init_infant_brain_extraction_wf( + age_months=age_months, + ants_affine_init=ants_affine_init, + skull_strip_template=skull_strip_template.space, + template_specs=skull_strip_template.spec, + omp_nthreads=omp_nthreads, + sloppy=sloppy, + debug='registration' in config.execution.debug, + ) + + workflow.connect([ + (t2w_validate, brain_extraction_wf, [ + ('out_file', 'inputnode.t2w_preproc'), + ]), + (brain_extraction_wf, t2w_buffer, [ + ('outputnode.out_mask', 't2w_mask'), + ('outputnode.t2w_brain', 't2w_brain'), + ]), + ]) else: - LOGGER.info('ANAT Stage 2: Preparing brain extraction workflow') - if skull_strip_mode == 'auto': - run_skull_strip = not all(_is_skull_stripped(img) for img in t1w) - else: - run_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] - ... + LOGGER.info('ANAT Found T2w brain mask') - if save_t2w_mask: - ds_t2w_mask_wf = init_ds_mask_wf( - bids_root=bids_root, - output_dir=output_dir, - mask_type='brain', - name='ds_t2w_mask_wf', - ) - workflow.connect([ - (sourcefile_buffer, ds_t2w_mask_wf, [('t2w_source_files', 'inputnode.source_files')]), - (refined_buffer, ds_t2w_mask_wf, [('t2w_mask', 'inputnode.mask_file')]), - (ds_t2w_mask_wf, outputnode, [('outputnode.mask_file', 't2w_mask')]), - ]) # fmt:skip + if reference_anat == 't2w': + desc += ( + 'A pre-computed T1w brain mask was provided as input and used throughout the ' + 'workflow.' + ) + t2w_buffer.inputs.t2w_mask = t2w_mask + apply_t2w_mask.inputs.in_mask = t2w_mask + workflow.connect(t2w_validate, 'out_file', apply_t2w_mask, 'in_file') # Stage 3: Coregistration + t1w2t2w_xfm = precomputed.get('t1w2t2w_xfm') + t2w2t1w_xfm = precomputed.get('t2w2t1w_xfm') + # To use the found xfm, requires both precomputed anatomicals to be found as well - if precomputed.t1w_preproc and precomputed.t2w_preproc: - if precomputed.t1w2t2w_xfm: + if t1w_preproc and t2w_preproc: + if t1w2t2w_xfm: LOGGER.info('ANAT Found T1w-T2w xfm') desc += ' A T1w-T2w coregistration transform was provided as input and used throughout the workflow.' - coreg_buffer.inputs.t1w2t2w_xfm = precomputed.t1w2t2w_xfm - if precomputed.t2w2t1w_xfm: + coreg_buffer.inputs.t1w2t2w_xfm = t1w2t2w_xfm + if t2w2t1w_xfm: LOGGER.info('ANAT Found T2w-T1w xfm') - coreg_buffer.inputs.t2w2t1w_xfm = precomputed.t2w2t1w_xfm + coreg_buffer.inputs.t2w2t1w_xfm = t2w2t1w_xfm else: LOGGER.info('ANAT Coregistering anatomical references') desc += ' The T1w and T2w reference volumes were co-registered using ANTs.' @@ -621,7 +651,7 @@ def init_infant_anat_fit_wf( sloppy=sloppy, debug='registration' in config.execution.debug, t1w_mask=False, - probmap=not precomputed.t2w_mask, + probmap=not t2w_mask, ) workflow.connect([ (anat_buffer, coregistration_wf, [ @@ -666,7 +696,7 @@ def init_infant_anat_fit_wf( # TODO: datasink if not anat_dseg: - ds_dseg_wf = init_ds_dseg_wf(output_dir=output_dir) + ds_dseg_wf = init_ds_dseg_wf(output_dir=str(output_dir)) workflow.connect([ (sourcefile_buffer, ds_dseg_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -678,7 +708,7 @@ def init_infant_anat_fit_wf( ]) # fmt:skip if not anat_tpms: - ds_tpms_wf = init_ds_tpms_wf(output_dir=output_dir) + ds_tpms_wf = init_ds_tpms_wf(output_dir=str(output_dir)) workflow.connect([ (sourcefile_buffer, ds_dseg_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -852,7 +882,7 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf( - image_type=image_type, output_dir=output_dir + image_type=image_type, output_dir=str(output_dir) ) workflow.connect([ (sourcefile_buffer, ds_fs_registration_wf, [ From 502cca2889f1906546d769123a1159ee23175134 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 18 Apr 2024 17:48:35 -0400 Subject: [PATCH 020/142] ENH: Add top-level anat workflow --- nibabies/workflows/anatomical/fit.py | 353 +++++++++++++++++++++++---- 1 file changed, 308 insertions(+), 45 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index c7e1d8d2..58290653 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -16,20 +16,28 @@ ) from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( + init_anat_second_derivatives_wf, + init_ds_anat_volumes_wf, init_ds_dseg_wf, init_ds_fs_registration_wf, + init_ds_grayord_metrics_wf, init_ds_mask_wf, init_ds_surface_metrics_wf, init_ds_surfaces_wf, init_ds_template_registration_wf, init_ds_template_wf, init_ds_tpms_wf, + init_template_iterator_wf, ) from smriprep.workflows.surfaces import ( init_fsLR_reg_wf, init_gifti_morphometrics_wf, init_gifti_surfaces_wf, + init_hcp_morphometrics_wf, + init_morph_grayords_wf, init_refinement_wf, + init_resample_midthickness_wf, + init_surface_derivatives_wf, ) from nibabies import config @@ -50,12 +58,12 @@ def init_infant_anat_fit_wf( t1w: list, t2w: list, flair: list, - bids_root: Path, + bids_root: str, precomputed: dict, hires: bool, longitudinal: bool, omp_nthreads: int, - output_dir: Path, + output_dir: str, segmentation_atlases: Path | None, skull_strip_mode: ty.Literal['auto', 'skip', 'force'], skull_strip_template: 'Reference', @@ -63,6 +71,7 @@ def init_infant_anat_fit_wf( spaces: 'SpatialReferences', recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, cifti_output: ty.Literal['91k', '170k'] | None, + msm_sulc: bool = False, name: str = 'infant_anat_fit_wf', ): """ @@ -430,13 +439,12 @@ def init_infant_anat_fit_wf( (t2w_validate, t2w_buffer, [('out_file', 't2w_preproc')]), ]) # fmt:skip - # Stage 2: Use previously computed mask or calculate # If we only have one mask (could be either T1w/T2w), # just apply transform to get it in the other space t1w_mask = precomputed.get('t1w_mask', None) t2w_mask = precomputed.get('t2w_mask', None) - anat_mask = None + anat_mask = t1w_mask or t2w_mask # T1w masking - define pre-emptively apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') @@ -586,7 +594,8 @@ def init_infant_anat_fit_wf( if t1w_mask: LOGGER.info('ANAT T2w mask will be transformed into T1w space') transform_t1w_mask = pe.Node( - ApplyTransforms(interpolation='MultiLabel'), name='transform_t1w_mask', + ApplyTransforms(interpolation='MultiLabel'), + name='transform_t1w_mask', ) workflow.connect([ @@ -607,15 +616,25 @@ def init_infant_anat_fit_wf( debug='registration' in config.execution.debug, ) - workflow.connect([ - (t2w_validate, brain_extraction_wf, [ - ('out_file', 'inputnode.t2w_preproc'), - ]), - (brain_extraction_wf, t2w_buffer, [ - ('outputnode.out_mask', 't2w_mask'), - ('outputnode.t2w_brain', 't2w_brain'), - ]), - ]) + workflow.connect( + [ + ( + t2w_validate, + brain_extraction_wf, + [ + ('out_file', 'inputnode.t2w_preproc'), + ], + ), + ( + brain_extraction_wf, + t2w_buffer, + [ + ('outputnode.out_mask', 't2w_mask'), + ('outputnode.t2w_brain', 't2w_brain'), + ], + ), + ] + ) else: LOGGER.info('ANAT Found T2w brain mask') @@ -790,7 +809,6 @@ def init_infant_anat_fit_wf( return workflow # Stage 6: Surface reconstruction - if recon_method == 'mcribs': from nibabies.workflows.anatomical.surfaces import init_mcribs_surface_recon_wf @@ -834,7 +852,7 @@ def init_infant_anat_fit_wf( name='surface_recon_wf', omp_nthreads=omp_nthreads, hires=hires, - fs_no_resume=fs_no_resume, + fs_no_resume=False, precomputed=precomputed, ) @@ -882,17 +900,33 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf( - image_type=image_type, output_dir=str(output_dir) + image_type=image_type, output_dir=output_dir ) + + if recon_method == 'freesurfer': + workflow.connect([ + (surface_recon_wf, fsnative_buffer, [ + ('outputnode.fsnative2t1w_xfm', 'fsnative2anat_xfm'), + ('outputnode.t1w2fsnative_xfm', 'anat2fsnative_xfm'), + ]), + ]) # fmt:skip + else: + workflow.connect([ + (surface_recon_wf, fsnative_buffer, [ + ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), + ('outputnode.anat2fsnative_xfm', 'anat2fsnative_xfm'), + ]), + ]) # fmt:skip + workflow.connect([ (sourcefile_buffer, ds_fs_registration_wf, [ ('anat_source_files', 'inputnode.source_files'), ]), - (surface_recon_wf, ds_fs_registration_wf, [ - ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2anat_xfm'), + (fsnative_buffer, ds_fs_registration_wf, [ + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), - (ds_fs_registration_wf, outputnode, [ - ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), + (fsnative_buffer, outputnode, [ + ('fsnative2anat_xfm', 'fsnative2anat_xfm'), ]), ]) # fmt:skip elif 'reverse' in fsnative_xfms: @@ -903,7 +937,7 @@ def init_infant_anat_fit_wf( 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' ) - if not have_mask: + if not anat_mask: LOGGER.info('ANAT Stage 7: Preparing mask refinement workflow') # Stage 6: Refine ANTs mask with FreeSurfer segmentation refinement_wf = init_refinement_wf() @@ -957,8 +991,8 @@ def init_infant_anat_fit_wf( if surfs: gifti_surfaces_wf = init_gifti_surfaces_wf(surfaces=surfs) ds_surfaces_wf = init_ds_surfaces_wf( - bids_root=str(bids_root), - output_dir=str(output_dir), + bids_root=bids_root, + output_dir=output_dir, surfaces=surfs, ) @@ -983,8 +1017,8 @@ def init_infant_anat_fit_wf( surfaces=spheres, to_scanner=False, name='gifti_spheres_wf' ) ds_spheres_wf = init_ds_surfaces_wf( - bids_root=str(bids_root), - output_dir=str(output_dir), + bids_root=bids_root, + output_dir=output_dir, surfaces=spheres, name='ds_spheres_wf', ) @@ -1008,8 +1042,8 @@ def init_infant_anat_fit_wf( LOGGER.info(f'ANAT Stage 8: Creating GIFTI metrics for {metrics}') gifti_morph_wf = init_gifti_morphometrics_wf(morphometrics=metrics) ds_morph_wf = init_ds_surface_metrics_wf( - bids_root=str(bids_root), - output_dir=str(output_dir), + bids_root=bids_root, + output_dir=output_dir, metrics=metrics, name='ds_morph_wf', ) @@ -1032,8 +1066,8 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Stage 8a: Creating cortical ribbon mask') anat_ribbon_wf = init_anat_ribbon_wf() ds_ribbon_mask_wf = init_ds_mask_wf( - bids_root=str(bids_root), - output_dir=str(output_dir), + bids_root=bids_root, + output_dir=output_dir, mask_type='ribbon', name='ds_ribbon_mask_wf', ) @@ -1065,8 +1099,8 @@ def init_infant_anat_fit_wf( fsLR_reg_wf = init_fsLR_reg_wf() ds_fsLR_reg_wf = init_ds_surfaces_wf( - bids_root=str(bids_root), - output_dir=str(output_dir), + bids_root=bids_root, + output_dir=output_dir, surfaces=['sphere_reg_fsLR'], name='ds_fsLR_reg_wf', ) @@ -1090,22 +1124,251 @@ def init_infant_single_anat_fit_wf( *, age_months: int, anatomicals: list, - bids_root: str, - precomputed, - hires, - longitudinal, - omp_nthreads, - output_dir, - segmentation_atlases, - skull_strip_mode, - skull_strip_template, - sloppy, - spaces, - cifti_output, - name='infant_single_anat_fit_wf', + bids_root: str | Path, + precomputed: dict, + hires: bool, + longitudinal: bool, + omp_nthreads: bool, + output_dir: str | Path, + segmentation_atlases: str | Path | None, + skull_strip_mode: ty.Literal['force', 'skip', 'auto'], + skull_strip_template: 'Reference', + sloppy: bool, + spaces: 'SpatialReferences', + cifti_output: ty.Literal['91k', '170k'], + name: str = 'infant_single_anat_fit_wf', ): desc = ( '\nAnatomical data preprocessing\n\n: ', f'A total of {len(anatomicals)} {modality} images were found ' 'within the input BIDS dataset.\n', ) + + +def init_anat_preproc_wf( + *, + age_months: int, + ants_affine_init: bool, + t1w: list, + t2w: list, + bids_root: str, + precomputed: dict, + hires: bool | None, + longitudinal: bool, + omp_nthreads: int, + output_dir: str, + segmentation_atlases: str | Path | None, + skull_strip_mode: str, + skull_strip_template: Reference, + sloppy: bool, + spaces: SpatialReferences, + cifti_output: ty.Literal['91k', '170k', False], + name: str = 'infant_anat_wf', +) -> pe.Workflow: + workflow = pe.Workflow(name=name) + + inputnode = pe.Node( + niu.IdentityInterface(fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), + name='inputnode', + ) + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + 'template', + 'subjects_dir', + 'subject_id', + 't1w_preproc', + 't1w_mask', + 't1w_dseg', + 't1w_tpms', + 'anat2std_xfm', + 'std2anat_xfm', + 'fsnative2t1w_xfm', + 't1w_aparc', + 't1w_aseg', + 'sphere_reg', + 'sphere_reg_fsLR', + ] + ), + name='outputnode', + ) + msm_sulc = False # Not enabled for now + anat_fit_wf = init_infant_anat_fit_wf( + age_months=age_months, + bids_root=bids_root, + output_dir=output_dir, + hires=hires, + longitudinal=longitudinal, + msm_sulc=msm_sulc, + skull_strip_mode=skull_strip_mode, + skull_strip_template=skull_strip_template, + spaces=spaces, + t1w=t1w, + t2w=t2w, + flair=flair, + precomputed=precomputed, + sloppy=sloppy, + segmentation_atlases=segmentation_atlases, + cifti_output=cifti_output, + recon_method=recon_method, + omp_nthreads=omp_nthreads, + ) + + template_iterator_wf = init_template_iterator_wf(spaces=spaces, sloppy=sloppy) + ds_std_volumes_wf = init_ds_anat_volumes_wf( + bids_root=bids_root, + output_dir=output_dir, + name='ds_std_volumes_wf', + ) + + workflow.connect([ + (inputnode, anat_fit_wf, [ + ('t1w', 'inputnode.t1w'), + ('t2w', 'inputnode.t2w'), + ('roi', 'inputnode.roi'), + ('flair', 'inputnode.flair'), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ]), + (anat_fit_wf, outputnode, [ + ('outputnode.template', 'template'), + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ('outputnode.anat_preproc', 'anat_preproc'), + ('outputnode.anat_mask', 'anat_mask'), + ('outputnode.anat_dseg', 'anat_dseg'), + ('outputnode.anat_tpms', 'anat_tpms'), + ('outputnode.anat2std_xfm', 'anat2std_xfm'), + ('outputnode.std2anat_xfm', 'std2anat_xfm'), + ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), + ('outputnode.sphere_reg', 'sphere_reg'), + (f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", 'sphere_reg_fsLR'), + ('outputnode.anat_ribbon', 'anat_ribbon'), + ]), + (anat_fit_wf, template_iterator_wf, [ + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ]), + (anat_fit_wf, ds_std_volumes_wf, [ + ('outputnode.anat_valid_list', 'inputnode.source_files'), + ('outputnode.anat_preproc', 'inputnode.anat_preproc'), + ('outputnode.anat_mask', 'inputnode.anat_mask'), + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + ]), + (template_iterator_wf, ds_std_volumes_wf, [ + ('outputnode.std_t1w', 'inputnode.ref_file'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.space', 'inputnode.space'), + ('outputnode.cohort', 'inputnode.cohort'), + ('outputnode.resolution', 'inputnode.resolution'), + ]), + ]) # fmt:skip + + if recon_method is not None: + anat_second_derivatives_wf = init_anat_second_derivatives_wf( + bids_root=bids_root, + output_dir=output_dir, + cifti_output=cifti_output, + ) + surface_derivatives_wf = init_surface_derivatives_wf() + ds_surfaces_wf = init_ds_surfaces_wf( + bids_root=bids_root, output_dir=output_dir, surfaces=['inflated'] + ) + ds_curv_wf = init_ds_surface_metrics_wf( + bids_root=bids_root, output_dir=output_dir, metrics=['curv'], name='ds_curv_wf' + ) + + workflow.connect([ + (anat_fit_wf, surface_derivatives_wf, [ + ('outputnode.t1w_preproc', 'inputnode.reference'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ]), + (anat_fit_wf, ds_surfaces_wf, [ + ('outputnode.anat_valid_list', 'inputnode.source_files'), + ]), + (surface_derivatives_wf, ds_surfaces_wf, [ + ('outputnode.inflated', 'inputnode.inflated'), + ]), + (anat_fit_wf, ds_curv_wf, [ + ('outputnode.anat_valid_list', 'inputnode.source_files'), + ]), + (surface_derivatives_wf, ds_curv_wf, [ + ('outputnode.curv', 'inputnode.curv'), + ]), + (anat_fit_wf, anat_second_derivatives_wf, [ + ('outputnode.anat_valid_list', 'inputnode.source_files'), + ]), + (surface_derivatives_wf, anat_second_derivatives_wf, [ + ('outputnode.out_aseg', 'inputnode.t1w_fs_aseg'), + ('outputnode.out_aparc', 'inputnode.t1w_fs_aparc'), + ]), + (surface_derivatives_wf, outputnode, [ + ('outputnode.out_aseg', 'anat_aseg'), + ('outputnode.out_aparc', 'anat_aparc'), + ]), + ]) # fmt:skip + + if cifti_output: + hcp_morphometrics_wf = init_hcp_morphometrics_wf(omp_nthreads=omp_nthreads) + resample_midthickness_wf = init_resample_midthickness_wf(grayord_density=cifti_output) + morph_grayords_wf = init_morph_grayords_wf( + grayord_density=cifti_output, omp_nthreads=omp_nthreads + ) + + ds_grayord_metrics_wf = init_ds_grayord_metrics_wf( + bids_root=bids_root, + output_dir=output_dir, + metrics=['curv', 'thickness', 'sulc'], + cifti_output=cifti_output, + ) + + workflow.connect([ + (anat_fit_wf, hcp_morphometrics_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.thickness', 'inputnode.thickness'), + ('outputnode.midthickness', 'inputnode.midthickness'), + ]), + (surface_derivatives_wf, hcp_morphometrics_wf, [ + ('outputnode.curv', 'inputnode.curv'), + ]), + (anat_fit_wf, resample_midthickness_wf, [ + ('outputnode.midthickness', 'inputnode.midthickness'), + ( + f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", + 'inputnode.sphere_reg_fsLR', + ), + ]), + (anat_fit_wf, morph_grayords_wf, [ + ('outputnode.midthickness', 'inputnode.midthickness'), + ( + f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", + 'inputnode.sphere_reg_fsLR', + ), + ]), + (hcp_morphometrics_wf, morph_grayords_wf, [ + ('outputnode.curv', 'inputnode.curv'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.thickness', 'inputnode.thickness'), + ('outputnode.roi', 'inputnode.roi'), + ]), + (resample_midthickness_wf, morph_grayords_wf, [ + ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), + ]), + (anat_fit_wf, ds_grayord_metrics_wf, [ + ('outputnode.anat_valid_list', 'inputnode.source_files'), + ]), + (morph_grayords_wf, ds_grayord_metrics_wf, [ + ('outputnode.curv_fsLR', 'inputnode.curv'), + ('outputnode.curv_metadata', 'inputnode.curv_metadata'), + ('outputnode.thickness_fsLR', 'inputnode.thickness'), + ('outputnode.thickness_metadata', 'inputnode.thickness_metadata'), + ('outputnode.sulc_fsLR', 'inputnode.sulc'), + ('outputnode.sulc_metadata', 'inputnode.sulc_metadata'), + ]), + ]) # fmt:skip + + return workflow From 57f6053d0f314afa42b2ff27545060b59336a942 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 18 Apr 2024 17:49:23 -0400 Subject: [PATCH 021/142] RF: Drop derivatives class for dict approach --- nibabies/utils/derivatives.py | 77 +++++++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 nibabies/utils/derivatives.py diff --git a/nibabies/utils/derivatives.py b/nibabies/utils/derivatives.py new file mode 100644 index 00000000..fa3514d5 --- /dev/null +++ b/nibabies/utils/derivatives.py @@ -0,0 +1,77 @@ +import json +from pathlib import Path + +from niworkflows.data import load as nwf_load + +from nibabies.data import load + + +def collect_anatomical_derivatives( + derivatives_dir: Path | str, + subject_id: str, + std_spaces: list, + spec: dict | None = None, + patterns: list | None = None, +): + """ + Collect outputs from across processing stages. + + Potential files: + - T1w preproc + - T2w preproc + - T1w mask + - T2w mask + + + """ + from bids.layout import BIDSLayout + + if spec is None or patterns is None: + _spec, _patterns = tuple(json.loads(load('io_spec_anat.json').read_text()).values()) + + if spec is None: + spec = _spec + if patterns is None: + patterns = _patterns + + deriv_config = nwf_load('nipreps.json') + layout = BIDSLayout(derivatives_dir, config=deriv_config, validate=False) + derivs_cache = {} + + for key, qry in spec['baseline'].items(): + qry['subject'] = subject_id + item = layout.get(return_type='filename', **qry) + if not item: + continue + + derivs_cache[key] = item[0] if len(item) == 1 else item + + for key, qry in spec['coreg'].items(): # T1w->T2w, T2w->T1w + qry['subject'] = subject_id + item = layout.get(return_type='filename', **qry) + if not item: + continue + derivs_cache[key] = item[0] if len(item) == 1 else item + + transforms = derivs_cache.setdefault('transforms', {}) + for _space in std_spaces: + space = _space.replace(':cohort-', '+') + for key, qry in spec['transforms'].items(): + qry = qry.copy() + qry['subject'] = subject_id + qry['from'] = qry['from'] or space + qry['to'] = qry['to'] or space + item = layout.get(return_type='filename', **qry) + if not item: + continue + transforms.setdefault(_space, {})[key] = item[0] if len(item) == 1 else item + + for key, qry in spec['surfaces'].items(): + qry['subject'] = subject_id + item = layout.get(return_type='filename', **qry) + if not item or len(item) != 2: + continue + + derivs_cache[key] = sorted(item) + + return derivs_cache From b6ca0ab74188f3fbb701aafc9c87f9153c1d335f Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 18 Apr 2024 17:49:58 -0400 Subject: [PATCH 022/142] ADD: Expected anatomical files --- nibabies/data/io_spec_anat.json | 212 ++++++++++++++++++++++++++++++++ 1 file changed, 212 insertions(+) create mode 100644 nibabies/data/io_spec_anat.json diff --git a/nibabies/data/io_spec_anat.json b/nibabies/data/io_spec_anat.json new file mode 100644 index 00000000..8bd83345 --- /dev/null +++ b/nibabies/data/io_spec_anat.json @@ -0,0 +1,212 @@ +{ + "queries": { + "baseline": { + "t1w_preproc": { + "datatype": "anat", + "space": null, + "desc": "preproc", + "suffix": "T1w", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t2w_preproc": { + "datatype": "anat", + "space": null, + "desc": "preproc", + "suffix": "T2w", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t1w_mask": { + "datatype": "anat", + "space": null, + "desc": "brain", + "suffix": "mask", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t2w_mask": { + "datatype": "anat", + "space": null, + "desc": "brain", + "suffix": "mask", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "dseg": { + "datatype": "anat", + "space": null, + "desc": null, + "suffix": "dseg", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "tpms": { + "datatype": "anat", + "space": null, + "label": [ + "GM", + "WM", + "CSF" + ], + "suffix": "probseg", + "extension": [ + ".nii.gz", + ".nii" + ] + } + }, + "coreg": { + "t1w2t2w_xfm": { + "datatype": "anat", + "extension": [ + ".h5", + ".txt" + ], + "from": "T1w", + "to": "T2w", + "suffix": "xfm", + "mode": "image" + }, + "t2w2t1w_xfm": { + "datatype": "anat", + "extension": [ + ".h5", + ".txt" + ], + "from": "T2w", + "to": "T1w", + "suffix": "xfm", + "mode": "image" + } + }, + "transforms": { + "forward": { + "datatype": "anat", + "extension": [ + ".h5", + ".txt" + ], + "from": "anat", + "to": null, + "suffix": "xfm", + "mode": "image" + }, + "reverse": { + "datatype": "anat", + "extension": [ + ".h5", + ".txt" + ], + "from": null, + "to": "anat", + "suffix": "xfm", + "mode": "image" + } + }, + "surfaces": { + "white": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "white", + "extension": ".surf.gii" + }, + "pial": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "pial", + "extension": ".surf.gii" + }, + "midthickness": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "midthickness", + "extension": ".surf.gii" + }, + "sphere": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "desc": null, + "suffix": "sphere", + "extension": ".surf.gii" + }, + "thickness": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "thickness", + "extension": ".shape.gii" + }, + "sulc": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "sulc", + "extension": ".shape.gii" + }, + "curv": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "suffix": "curv", + "extension": ".shape.gii" + }, + "sphere_reg": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": null, + "desc": "reg", + "suffix": "sphere", + "extension": ".surf.gii" + }, + "sphere_reg_fsLR": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": "fsLR", + "desc": "reg", + "suffix": "sphere", + "extension": ".surf.gii" + }, + "sphere_reg_msm": { + "datatype": "anat", + "hemi": ["L", "R"], + "space": "fsLR", + "desc": "msmsulc", + "suffix": "sphere", + "extension": ".surf.gii" + } + }, + "masks": { + "anat_ribbon": { + "datatype": "anat", + "desc": "ribbon", + "suffix": "mask", + "extension": [ + ".nii.gz", + ".nii" + ] + } + } + }, + "patterns": [ + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}][_desc-{desc}]_{suffix}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_from-{from}_to-{to}_mode-{mode|image}_{suffix|xfm}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}]_hemi-{hemi}[_space-{space}][_desc-{desc}]_{suffix}.{extension}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_desc-{desc}_{suffix|mask}.{extension|nii.gz}", + "sub-{subject}[/ses-{session}]/{datatype|anat}/sub-{subject}[_ses-{session}][_acq-{acquisition}][_ce-{ceagent}][_rec-{reconstruction}][_space-{space}]_label-{label}[_desc-{desc}]_{suffix|probseg}.{extension|nii.gz}" + ] + } \ No newline at end of file From cdbfbd2a94549861a0c11685f8ce4c1d47b5c3b4 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 19 Apr 2024 10:50:07 -0400 Subject: [PATCH 023/142] RF: Make midthickness creation standalone workflow --- nibabies/workflows/anatomical/surfaces.py | 108 ++++++++++++++-------- 1 file changed, 72 insertions(+), 36 deletions(-) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index 023abc2c..c152d78c 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -7,8 +7,13 @@ from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow -from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert -from niworkflows.interfaces.freesurfer import PatchedRobustRegister as RobustRegister +from niworkflows.interfaces.freesurfer import ( + PatchedLTAConvert as LTAConvert, +) +from niworkflows.interfaces.freesurfer import ( + PatchedRobustRegister as RobustRegister, +) +from niworkflows.interfaces.patches import FreeSurferSource from smriprep.interfaces.freesurfer import MakeMidthickness from smriprep.workflows.surfaces import _extract_fs_fields @@ -21,6 +26,7 @@ 'subject_id', # Customize aseg 'in_aseg', + 'in_mask', ] SURFACE_OUTPUTS = [ 'subjects_dir', @@ -54,9 +60,7 @@ def init_mcribs_surface_recon_wf( 'A previously computed segmentation is required for the M-CRIB-S workflow.' ) - inputnode = pe.Node( - niu.IdentityInterface(fields=SURFACE_INPUTS + ['anat_mask']), name='inputnode' - ) + inputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_INPUTS), name='inputnode') outputnode = pe.Node(niu.IdentityInterface(fields=SURFACE_OUTPUTS), name='outputnode') workflow = LiterateWorkflow(name=name) @@ -129,7 +133,7 @@ def init_mcribs_surface_recon_wf( mask_dil = pe.Node(BinaryDilation(radius=3), name='mask_dil') mask_las = t2w_las.clone(name='mask_las') workflow.connect([ - (inputnode, mask_dil, [('anat_mask', 'in_mask')]), + (inputnode, mask_dil, [('in_mask', 'in_mask')]), (mask_dil, mask_las, [('out_mask', 'in_file')]), (mask_las, mcribs_recon, [('out_file', 'mask_file')]), ]) # fmt:skip @@ -140,22 +144,8 @@ def init_mcribs_surface_recon_wf( mem_gb=5, ) - fssource = pe.Node(nio.FreeSurferSource(), name='fssource', run_without_submitting=True) - midthickness = pe.MapNode( - MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'), - iterfield='in_file', - name='midthickness', - n_procs=min(omp_nthreads, 12), - ) - save_midthickness = pe.Node(nio.DataSink(parameterization=False), name='save_midthickness') - - sync = pe.Node( - niu.Function( - function=_extract_fs_fields, - output_names=['subjects_dir', 'subject_id'], - ), - name='sync', - ) + fssource = pe.Node(FreeSurferSource(), name='fssource', run_without_submitting=True) + midthickness_wf = init_midthickness_wf(omp_nthreads=omp_nthreads) workflow.connect([ (inputnode, t2w_las, [('t2w', 'in_file')]), @@ -171,16 +161,14 @@ def init_mcribs_surface_recon_wf( ('subject_id', 'subject_id')]), (mcribs_recon, mcribs_postrecon, [('mcribs_dir', 'outdir')]), (mcribs_postrecon, fssource, [('subjects_dir', 'subjects_dir')]), - (inputnode, fssource, [('subject_id', 'subject_id')]), - (fssource, midthickness, [ - ('white', 'in_file'), - ('graymid', 'graymid'), + (inputnode, fssource, [('subject_id', 'inputnode.subject_id')]), + (fssource, midthickness_wf, [ + ('white', 'inputnode.white'), + ('graymid', 'inputnode.graymid'), ]), - (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]), - (save_midthickness, sync, [('out_file', 'filenames')]), - (sync, outputnode, [ - ('subjects_dir', 'subjects_dir'), - ('subject_id', 'subject_id'), + (midthickness_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), ]), ]) # fmt:skip @@ -278,6 +266,7 @@ def init_infantfs_surface_recon_wf( *, age_months: int, precomputed: dict, + omp_nthreads: int, use_aseg: bool = False, name: str = 'infantfs_surface_recon_wf', ): @@ -299,10 +288,12 @@ def init_infantfs_surface_recon_wf( # inject the intensity-normalized skull-stripped t1w from the brain extraction workflow recon = pe.Node(InfantReconAll(age=age_months), name='reconall') - fssource = pe.Node(nio.FreeSurferSource(), name='fssource', run_without_submitting=True) if use_aseg: workflow.connect(inputnode, 'in_aseg', recon, 'aseg_file') + fssource = pe.Node(FreeSurferSource(), name='fssource', run_without_submitting=True) + midthickness_wf = init_midthickness_wf(omp_nthreads=omp_nthreads) + workflow.connect([ (inputnode, gen_recon_outdir, [ ('subjects_dir', 'subjects_dir'), @@ -315,14 +306,18 @@ def init_infantfs_surface_recon_wf( (gen_recon_outdir, recon, [ ('out', 'outdir'), ]), - (recon, outputnode, [ - ('subject_id', 'subject_id'), - (('outdir', _parent), 'subjects_dir'), - ]), (recon, fssource, [ ('subject_id', 'subject_id'), (('outdir', _parent), 'subjects_dir'), ]), + (fssource, midthickness_wf, [ + ('white', 'inputnode.white'), + ('graymid', 'inputnode.graymid'), + ]), + (midthickness_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ]) ]) # fmt:skip if 'fsnative' not in precomputed.get('transforms', {}): @@ -351,6 +346,47 @@ def init_infantfs_surface_recon_wf( return workflow +def init_midthickness_wf(*, omp_nthreads: int, name: str = 'make_midthickness_wf') -> pe.Workflow: + """ + Standalone workflow to create and save cortical midthickness, derived from + the generated white / graymid surfaces. + """ + + workflow = pe.Workflow(name=name) + inputnode = niu.IdentityInterface(fields=['white', 'graymid'], name='inputnode') + outputnode = niu.IdentityInterface(fields=['subject_id', 'subjects_dir'], name='outputnode') + + midthickness = pe.MapNode( + MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'), + iterfield='in_file', + name='midthickness', + n_procs=min(omp_nthreads, 12), + ) + save_midthickness = pe.Node(nio.DataSink(parameterization=False), name='save_midthickness') + + sync = pe.Node( + niu.Function( + function=_extract_fs_fields, + output_names=['subjects_dir', 'subject_id'], + ), + name='sync', + ) + + workflow.connect([ + (inputnode, midthickness, [ + ('white', 'in_file'), + ('graymid', 'graymid'), + ]), + (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]), + (save_midthickness, sync, [('out_file', 'filenames')]), + (sync, outputnode, [ + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ]), + ]) # fmt:skip + return workflow + + def _parent(p): from pathlib import Path From c8b8f246df2e4a8dabd1abc39443ce87f8b4b604 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 19 Apr 2024 14:56:05 -0400 Subject: [PATCH 024/142] FIX: Anat IO, brain extraction inputs --- nibabies/workflows/anatomical/fit.py | 103 +++++++++++++++++++-------- 1 file changed, 72 insertions(+), 31 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 58290653..7c43f7fe 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -4,6 +4,7 @@ from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe +from niworkflows.anat.ants import init_n4_only_wf from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.header import ValidateImage @@ -42,6 +43,7 @@ from nibabies import config from nibabies.workflows.anatomical.brain_extraction import init_infant_brain_extraction_wf +from nibabies.workflows.anatomical.outputs import init_anat_reports_wf from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf from nibabies.workflows.anatomical.surfaces import init_mcribs_dhcp_wf @@ -135,14 +137,21 @@ def init_infant_anat_fit_wf( niu.IdentityInterface( fields=[ # Primary derivatives + # T1w 't1w_preproc', - 't2w_preproc', - 't1w2t2w_xfm', 't1w_mask', - 't1w_dseg', - 't1w_tpms', + 't1w_valid_list', + # T2w + 't2w_preproc', + 't2w_mask', + 't2w_valid_list', + # Anat specific + 'anat_dseg', + 'anat_tpms', 'anat2std_xfm', - 'fsnative2t1w_xfm', + 'fsnative2anat_xfm', + 't1w2t2w_xfm', + 't2w2t1w_xfm', # Surface and metric derivatives for fsLR resampling 'white', 'pial', @@ -160,7 +169,7 @@ def init_infant_anat_fit_wf( 'template', 'subjects_dir', 'subject_id', - 't1w_valid_list', + 'anat_valid_list', ] ), name='outputnode', @@ -334,7 +343,6 @@ def init_infant_anat_fit_wf( num_files=num_t1w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, - sloppy=sloppy, name='t1w_template_wf', ) ds_t1w_template_wf = init_ds_template_wf( @@ -392,7 +400,6 @@ def init_infant_anat_fit_wf( num_files=num_t1w, longitudinal=longitudinal, omp_nthreads=omp_nthreads, - sloppy=sloppy, name='t2w_template_wf', ) ds_t2w_template_wf = init_ds_template_wf( @@ -607,34 +614,24 @@ def init_infant_anat_fit_wf( else: LOGGER.info('ANAT Brain mask will be calculated using T2w') brain_extraction_wf = init_infant_brain_extraction_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, age_months=age_months, - ants_affine_init=ants_affine_init, + ants_affine_init=True, skull_strip_template=skull_strip_template.space, template_specs=skull_strip_template.spec, - omp_nthreads=omp_nthreads, - sloppy=sloppy, debug='registration' in config.execution.debug, ) - workflow.connect( - [ - ( - t2w_validate, - brain_extraction_wf, - [ - ('out_file', 'inputnode.t2w_preproc'), - ], - ), - ( - brain_extraction_wf, - t2w_buffer, - [ - ('outputnode.out_mask', 't2w_mask'), - ('outputnode.t2w_brain', 't2w_brain'), - ], - ), - ] - ) + workflow.connect([ + (t2w_validate, brain_extraction_wf, [ + ('out_file', 'inputnode.t2w_preproc'), + ]), + (brain_extraction_wf, t2w_buffer, [ + ('outputnode.out_mask', 't2w_mask'), + ('outputnode.t2w_brain', 't2w_brain'), + ]), + ]) # fmt:skip else: LOGGER.info('ANAT Found T2w brain mask') @@ -876,7 +873,8 @@ def init_infant_anat_fit_wf( surface_recon_wf = init_infantfs_surface_recon_wf( age_months=age_months, precomputed=precomputed, - use_aseg=bool(anat_mask), + omp_nthreads=omp_nthreads, + use_aseg=bool(anat_aseg), ) # Force use of the T1w image @@ -897,6 +895,9 @@ def init_infant_anat_fit_wf( ]), ]) # fmt:skip + if anat_aseg: + workflow.conect(anat_buffer, 'anat_aseg', surface_recon_wf,'inputnode.in_aseg') + fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf( @@ -1144,6 +1145,46 @@ def init_infant_single_anat_fit_wf( 'within the input BIDS dataset.\n', ) + inputnode = pe.Node( + niu.IdentityInterface( + fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], + ), + name='inputnode', + ) + + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + # Primary derivatives + 'anat_preproc', + 'anat_mask', + 'anat_dseg', + 'anat_tpms', + 'anat2std_xfm', + 'fsnative2anat_xfm', + # Surface and metric derivatives for fsLR resampling + 'white', + 'pial', + 'midthickness', + 'sphere', + 'thickness', + 'sulc', + 'sphere_reg', + 'sphere_reg_fsLR', + 'sphere_reg_msm', + 'anat_ribbon', + # Reverse transform; not computable from forward transform + 'std2anat_xfm', + # Metadata + 'template', + 'subjects_dir', + 'subject_id', + 'anat_valid_list', + ] + ), + name='outputnode', + ) + def init_anat_preproc_wf( *, From 1eaf43e6dfb21382c3426d8e4054173aa80bd795 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Fri, 19 Apr 2024 14:59:04 -0400 Subject: [PATCH 025/142] RF: Brain extraction parameters, typing --- .../workflows/anatomical/brain_extraction.py | 177 +++++++++--------- 1 file changed, 89 insertions(+), 88 deletions(-) diff --git a/nibabies/workflows/anatomical/brain_extraction.py b/nibabies/workflows/anatomical/brain_extraction.py index d8cb8602..993380d9 100644 --- a/nibabies/workflows/anatomical/brain_extraction.py +++ b/nibabies/workflows/anatomical/brain_extraction.py @@ -6,16 +6,17 @@ def init_infant_brain_extraction_wf( - age_months=None, - ants_affine_init=True, - bspline_fitting_distance=200, - sloppy=False, - skull_strip_template="UNCInfant", - template_specs=None, - mem_gb=3.0, - debug=False, - name="infant_brain_extraction_wf", - omp_nthreads=None, + *, + omp_nthreads: int, + sloppy: bool, + age_months: int | None = None, + ants_affine_init: bool = True, + bspline_fitting_distance: int = 200, + skull_strip_template: str = 'UNCInfant', + template_specs: dict | None = None, + mem_gb: float = 3.0, + debug: bool = False, + name: str = 'infant_brain_extraction_wf', ): """ Build an atlas-based brain extraction pipeline for infant T2w MRI data. @@ -90,71 +91,71 @@ def init_infant_brain_extraction_wf( # handle template specifics template_specs = template_specs or {} - if skull_strip_template == "MNIInfant": - template_specs["resolution"] = 2 if sloppy else 1 + if skull_strip_template == 'MNIInfant': + template_specs['resolution'] = 2 if sloppy else 1 - if not template_specs.get("cohort"): + if not template_specs.get('cohort'): if age_months is None: - raise KeyError(f"Age or cohort for {skull_strip_template} must be provided!") - template_specs["cohort"] = cohort_by_months(skull_strip_template, age_months) + raise KeyError(f'Age or cohort for {skull_strip_template} must be provided!') + template_specs['cohort'] = cohort_by_months(skull_strip_template, age_months) template_files = fetch_templates(skull_strip_template, template_specs) # main workflow workflow = pe.Workflow(name) - inputnode = pe.Node(niu.IdentityInterface(fields=["t2w_preproc"]), name="inputnode") + inputnode = pe.Node(niu.IdentityInterface(fields=['t2w_preproc']), name='inputnode') outputnode = pe.Node( - niu.IdentityInterface(fields=["t2w_preproc", "t2w_brain", "out_mask", "out_probmap"]), - name="outputnode", + niu.IdentityInterface(fields=['t2w_preproc', 't2w_brain', 'out_mask', 'out_probmap']), + name='outputnode', ) # Ensure template comes with a range of intensities ANTs will like - clip_tmpl = pe.Node(IntensityClip(p_max=99), name="clip_tmpl") + clip_tmpl = pe.Node(IntensityClip(p_max=99), name='clip_tmpl') clip_tmpl.inputs.in_file = _pop(template_files['anat']) # Generate laplacian registration targets - lap_tmpl = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"), name="lap_tmpl") - lap_t2w = pe.Node(ImageMath(operation="Laplacian", op2="0.4 1"), name="lap_t2w") - norm_lap_tmpl = pe.Node(niu.Function(function=_norm_lap), name="norm_lap_tmpl") - norm_lap_t2w = pe.Node(niu.Function(function=_norm_lap), name="norm_lap_t2w") + lap_tmpl = pe.Node(ImageMath(operation='Laplacian', op2='0.4 1'), name='lap_tmpl') + lap_t2w = pe.Node(ImageMath(operation='Laplacian', op2='0.4 1'), name='lap_t2w') + norm_lap_tmpl = pe.Node(niu.Function(function=_norm_lap), name='norm_lap_tmpl') + norm_lap_t2w = pe.Node(niu.Function(function=_norm_lap), name='norm_lap_t2w') # Merge image nodes - mrg_tmpl = pe.Node(niu.Merge(2), name="mrg_tmpl", run_without_submitting=True) - mrg_t2w = pe.Node(niu.Merge(2), name="mrg_t2w", run_without_submitting=True) - bin_regmask = pe.Node(Binarize(thresh_low=0.20), name="bin_regmask") + mrg_tmpl = pe.Node(niu.Merge(2), name='mrg_tmpl', run_without_submitting=True) + mrg_t2w = pe.Node(niu.Merge(2), name='mrg_t2w', run_without_submitting=True) + bin_regmask = pe.Node(Binarize(thresh_low=0.20), name='bin_regmask') bin_regmask.inputs.in_file = str(template_files['mask']) - refine_mask = pe.Node(BinaryDilation(radius=3, iterations=2), name="refine_mask") + refine_mask = pe.Node(BinaryDilation(radius=3, iterations=2), name='refine_mask') - fixed_masks = pe.Node(niu.Merge(4), name="fixed_masks", run_without_submitting=True) - fixed_masks.inputs.in1 = "NULL" - fixed_masks.inputs.in2 = "NULL" + fixed_masks = pe.Node(niu.Merge(4), name='fixed_masks', run_without_submitting=True) + fixed_masks.inputs.in1 = 'NULL' + fixed_masks.inputs.in2 = 'NULL' fixed_masks.inputs.in3 = ( - "NULL" if not template_files['regmask'] else _pop(template_files['regmask']) + 'NULL' if not template_files['regmask'] else _pop(template_files['regmask']) ) # Set up initial spatial normalization - ants_params = "testing" if sloppy else "precise" + ants_params = 'testing' if sloppy else 'precise' norm = pe.Node( - Registration(from_file=load_data(f"antsBrainExtraction_{ants_params}.json")), - name="norm", + Registration(from_file=load_data(f'antsBrainExtraction_{ants_params}.json')), + name='norm', n_procs=omp_nthreads, mem_gb=mem_gb, ) norm.inputs.float = sloppy if debug: - norm.inputs.args = "--write-interval-volumes 5" + norm.inputs.args = '--write-interval-volumes 5' map_mask_t2w = pe.Node( - ApplyTransforms(interpolation="Gaussian", float=True), - name="map_mask_t2w", + ApplyTransforms(interpolation='Gaussian', float=True), + name='map_mask_t2w', mem_gb=1, ) # map template brainmask to t2w space map_mask_t2w.inputs.input_image = str(template_files['mask']) - thr_t2w_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_t2w_mask") + thr_t2w_mask = pe.Node(Binarize(thresh_low=0.80), name='thr_t2w_mask') # Refine INU correction final_n4 = pe.Node( @@ -169,66 +170,66 @@ def init_infant_brain_extraction_wf( shrink_factor=4, ), n_procs=omp_nthreads, - name="final_n4", + name='final_n4', ) - final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name="final_clip") - apply_mask = pe.Node(ApplyMask(), name="apply_mask") + final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name='final_clip') + apply_mask = pe.Node(ApplyMask(), name='apply_mask') # fmt:off workflow.connect([ - (inputnode, final_n4, [("t2w_preproc", "input_image")]), + (inputnode, final_n4, [('t2w_preproc', 'input_image')]), # 1. Massage T2w - (inputnode, mrg_t2w, [("t2w_preproc", "in1")]), - (inputnode, lap_t2w, [("t2w_preproc", "op1")]), - (inputnode, map_mask_t2w, [("t2w_preproc", "reference_image")]), - (bin_regmask, refine_mask, [("out_file", "in_file")]), - (refine_mask, fixed_masks, [("out_file", "in4")]), - (lap_t2w, norm_lap_t2w, [("output_image", "in_file")]), - (norm_lap_t2w, mrg_t2w, [("out", "in2")]), + (inputnode, mrg_t2w, [('t2w_preproc', 'in1')]), + (inputnode, lap_t2w, [('t2w_preproc', 'op1')]), + (inputnode, map_mask_t2w, [('t2w_preproc', 'reference_image')]), + (bin_regmask, refine_mask, [('out_file', 'in_file')]), + (refine_mask, fixed_masks, [('out_file', 'in4')]), + (lap_t2w, norm_lap_t2w, [('output_image', 'in_file')]), + (norm_lap_t2w, mrg_t2w, [('out', 'in2')]), # 2. Prepare template - (clip_tmpl, lap_tmpl, [("out_file", "op1")]), - (lap_tmpl, norm_lap_tmpl, [("output_image", "in_file")]), - (clip_tmpl, mrg_tmpl, [("out_file", "in1")]), - (norm_lap_tmpl, mrg_tmpl, [("out", "in2")]), + (clip_tmpl, lap_tmpl, [('out_file', 'op1')]), + (lap_tmpl, norm_lap_tmpl, [('output_image', 'in_file')]), + (clip_tmpl, mrg_tmpl, [('out_file', 'in1')]), + (norm_lap_tmpl, mrg_tmpl, [('out', 'in2')]), # 3. Set normalization node inputs - (mrg_tmpl, norm, [("out", "fixed_image")]), - (mrg_t2w, norm, [("out", "moving_image")]), - (fixed_masks, norm, [("out", "fixed_image_masks")]), + (mrg_tmpl, norm, [('out', 'fixed_image')]), + (mrg_t2w, norm, [('out', 'moving_image')]), + (fixed_masks, norm, [('out', 'fixed_image_masks')]), # 4. Map template brainmask into T2w space (norm, map_mask_t2w, [ - ("reverse_transforms", "transforms"), - ("reverse_invert_flags", "invert_transform_flags") + ('reverse_transforms', 'transforms'), + ('reverse_invert_flags', 'invert_transform_flags') ]), - (map_mask_t2w, thr_t2w_mask, [("output_image", "in_file")]), - (thr_t2w_mask, apply_mask, [("out_mask", "in_mask")]), - (final_n4, apply_mask, [("output_image", "in_file")]), + (map_mask_t2w, thr_t2w_mask, [('output_image', 'in_file')]), + (thr_t2w_mask, apply_mask, [('out_mask', 'in_mask')]), + (final_n4, apply_mask, [('output_image', 'in_file')]), # 5. Refine T2w INU correction with brain mask - (map_mask_t2w, final_n4, [("output_image", "weight_image")]), - (final_n4, final_clip, [("output_image", "in_file")]), + (map_mask_t2w, final_n4, [('output_image', 'weight_image')]), + (final_n4, final_clip, [('output_image', 'in_file')]), # 6. Outputs - (final_clip, outputnode, [("out_file", "t2w_preproc")]), - (map_mask_t2w, outputnode, [("output_image", "out_probmap")]), - (thr_t2w_mask, outputnode, [("out_mask", "out_mask")]), - (apply_mask, outputnode, [("out_file", "t2w_brain")]), + (final_clip, outputnode, [('out_file', 't2w_preproc')]), + (map_mask_t2w, outputnode, [('output_image', 'out_probmap')]), + (thr_t2w_mask, outputnode, [('out_mask', 'out_mask')]), + (apply_mask, outputnode, [('out_file', 't2w_brain')]), ]) # fmt:on if ants_affine_init: from nipype.interfaces.ants.utils import AI - ants_kwargs = dict( - metric=("Mattes", 32, "Regular", 0.2), - transform=("Affine", 0.1), - search_factor=(20, 0.12), - principal_axes=False, - convergence=(10, 1e-6, 10), - search_grid=(40, (0, 40, 40)), - verbose=True, - ) + ants_kwargs = { + 'metric': ('Mattes', 32, 'Regular', 0.2), + 'transform': ('Affine', 0.1), + 'search_factor': (20, 0.12), + 'principal_axes': False, + 'convergence': (10, 1e-6, 10), + 'search_grid': (40, (0, 40, 40)), + 'verbose': True, + } init_aff = pe.Node( AI(**ants_kwargs), - name="init_aff", + name='init_aff', n_procs=omp_nthreads, ) if template_files['regmask']: @@ -236,9 +237,9 @@ def init_infant_brain_extraction_wf( # fmt:off workflow.connect([ - (clip_tmpl, init_aff, [("out_file", "fixed_image")]), - (inputnode, init_aff, [("t2w_preproc", "moving_image")]), - (init_aff, norm, [("output_transform", "initial_moving_transform")]), + (clip_tmpl, init_aff, [('out_file', 'fixed_image')]), + (inputnode, init_aff, [('t2w_preproc', 'moving_image')]), + (init_aff, norm, [('output_transform', 'initial_moving_transform')]), ]) # fmt:on @@ -246,7 +247,7 @@ def init_infant_brain_extraction_wf( def _pop(in_files): - if isinstance(in_files, (list, tuple)): + if isinstance(in_files, list | tuple): return in_files[0] return in_files @@ -268,11 +269,11 @@ def _norm_lap(in_file): data = np.clip(data, a_min=-1.0, a_max=1.0) out_file = fname_presuffix( - Path(in_file).name, suffix="_norm", newpath=str(Path.cwd().absolute()) + Path(in_file).name, suffix='_norm', newpath=str(Path.cwd().absolute()) ) hdr = img.header.copy() - hdr.set_data_dtype("float32") - img.__class__(data.astype("float32"), img.affine, hdr).to_filename(out_file) + hdr.set_data_dtype('float32') + img.__class__(data.astype('float32'), img.affine, hdr).to_filename(out_file) return out_file @@ -281,13 +282,13 @@ def fetch_templates(template: str, specs: dict) -> dict: template_files = {} # Anatomical reference - template_files['anat'] = get(template, suffix="T1w", desc=None, raise_empty=True, **specs) + template_files['anat'] = get(template, suffix='T1w', desc=None, raise_empty=True, **specs) # Anatomical mask, prefer probseg if available - template_files['mask'] = get(template, label="brain", suffix="probseg", **specs) or get( - template, desc="brain", suffix="mask", **specs + template_files['mask'] = get(template, label='brain', suffix='probseg', **specs) or get( + template, desc='brain', suffix='mask', **specs ) # More dilated mask to facilitate registration template_files['regmask'] = get( - template, label="BrainCerebellumExtraction", suffix="mask", **specs + template, label='BrainCerebellumExtraction', suffix='mask', **specs ) return template_files From 3168621db57f33fc6ba526407acf588a2038989f Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 22 Apr 2024 23:56:50 -0400 Subject: [PATCH 026/142] RF: Branch fsLR resampling if using mcribs recon --- nibabies/workflows/anatomical/fit.py | 155 ++++++++++++++++----------- 1 file changed, 95 insertions(+), 60 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 7c43f7fe..3a1f0c56 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -17,10 +17,10 @@ ) from smriprep.workflows.fit.registration import init_register_template_wf from smriprep.workflows.outputs import ( - init_anat_second_derivatives_wf, init_ds_anat_volumes_wf, init_ds_dseg_wf, init_ds_fs_registration_wf, + init_ds_fs_segs_wf, init_ds_grayord_metrics_wf, init_ds_mask_wf, init_ds_surface_metrics_wf, @@ -46,7 +46,10 @@ from nibabies.workflows.anatomical.outputs import init_anat_reports_wf from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf -from nibabies.workflows.anatomical.surfaces import init_mcribs_dhcp_wf +from nibabies.workflows.anatomical.surfaces import ( + init_mcribs_dhcp_wf, + init_resample_midthickness_dhcp_wf, +) if ty.TYPE_CHECKING: from niworkflows.utils.spaces import Reference, SpatialReferences @@ -62,17 +65,17 @@ def init_infant_anat_fit_wf( flair: list, bids_root: str, precomputed: dict, - hires: bool, longitudinal: bool, omp_nthreads: int, output_dir: str, - segmentation_atlases: Path | None, + segmentation_atlases: str | Path | None, skull_strip_mode: ty.Literal['auto', 'skip', 'force'], skull_strip_template: 'Reference', + skull_strip_fixed_seed: bool, sloppy: bool, spaces: 'SpatialReferences', recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, - cifti_output: ty.Literal['91k', '170k'] | None, + cifti_output: ty.Literal['91k', '170k', False], msm_sulc: bool = False, name: str = 'infant_anat_fit_wf', ): @@ -94,22 +97,22 @@ def init_infant_anat_fit_wf( raise FileNotFoundError('No anatomical scans provided!') if not num_t1w or not num_t2w: - modality = 'T1w' if num_t1w else 'T2w' + image_type = 'T1w' if num_t1w else 'T2w' anatomicals = t1w or t2w workflow = init_infant_single_anat_fit_wf( - modality, + image_type, age_months=age_months, anatomicals=anatomicals, bids_root=bids_root, precomputed=precomputed, - hires=hires, longitudinal=longitudinal, omp_nthreads=omp_nthreads, output_dir=output_dir, segmentation_atlases=segmentation_atlases, skull_strip_mode=skull_strip_mode, - skull_strip_template=skull_strip_mode, + skull_strip_template=skull_strip_template, + skull_strip_fixed_seed=skull_strip_fixed_seed, sloppy=sloppy, spaces=spaces, cifti_output=cifti_output, @@ -848,7 +851,7 @@ def init_infant_anat_fit_wf( surface_recon_wf = init_surface_recon_wf( name='surface_recon_wf', omp_nthreads=omp_nthreads, - hires=hires, + hires=True, fs_no_resume=False, precomputed=precomputed, ) @@ -896,7 +899,7 @@ def init_infant_anat_fit_wf( ]) # fmt:skip if anat_aseg: - workflow.conect(anat_buffer, 'anat_aseg', surface_recon_wf,'inputnode.in_aseg') + workflow.conect(anat_buffer, 'anat_aseg', surface_recon_wf, 'inputnode.in_aseg') fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: @@ -1092,59 +1095,75 @@ def init_infant_anat_fit_wf( outputnode.inputs.anat_ribbon = precomputed['anat_ribbon'] # Stage 9: Baseline fsLR registration - if len(precomputed.get('sphere_reg_fsLR', [])) < 2: - LOGGER.info('ANAT Stage 9: Creating fsLR registration sphere') - if recon_method == 'mcribs': + if recon_method == 'mcribs': + if len(precomputed.get('sphere_reg_dhcpAsym', [])) < 2: + LOGGER.info('ANAT Stage 9: Creating dhcp-fsLR registration sphere') fsLR_reg_wf = init_mcribs_dhcp_wf() + + ds_fsLR_reg_wf = init_ds_surfaces_wf( + bids_root=bids_root, + output_dir=output_dir, + surfaces=['sphere_reg_dhcpAsym'], + name='ds_fsLR_reg_wf', + ) + + workflow.connect([ + (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), + (fsLR_reg_wf, ds_fsLR_reg_wf, [ + ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), + ]) # fmt:skip else: + LOGGER.info('ANAT Stage 9: Found pre-computed dhcp-fsLR registration sphere') + fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_dhcpAsym']) + + else: + if len(precomputed.get('sphere_reg_fsLR', [])) < 2: + LOGGER.info('ANAT Stage 9: Creating fsLR registration sphere') fsLR_reg_wf = init_fsLR_reg_wf() - ds_fsLR_reg_wf = init_ds_surfaces_wf( - bids_root=bids_root, - output_dir=output_dir, - surfaces=['sphere_reg_fsLR'], - name='ds_fsLR_reg_wf', - ) + ds_fsLR_reg_wf = init_ds_surfaces_wf( + bids_root=bids_root, + output_dir=output_dir, + surfaces=['sphere_reg_fsLR'], + name='ds_fsLR_reg_wf', + ) - workflow.connect([ - (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), - (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), - (fsLR_reg_wf, ds_fsLR_reg_wf, [ - ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') - ]), - (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), - ]) # fmt:skip - else: - LOGGER.info('ANAT Stage 9: Found pre-computed fsLR registration sphere') - fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_fsLR']) + workflow.connect([ + (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), + (fsLR_reg_wf, ds_fsLR_reg_wf, [ + ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 9: Found pre-computed fsLR registration sphere') + fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_fsLR']) return workflow def init_infant_single_anat_fit_wf( - modality, + image_type: ty.Literal['T1w', 'T2w'], *, age_months: int, anatomicals: list, bids_root: str | Path, precomputed: dict, - hires: bool, longitudinal: bool, - omp_nthreads: bool, + omp_nthreads: int, output_dir: str | Path, segmentation_atlases: str | Path | None, skull_strip_mode: ty.Literal['force', 'skip', 'auto'], skull_strip_template: 'Reference', + skull_strip_fixed_seed: bool, sloppy: bool, spaces: 'SpatialReferences', - cifti_output: ty.Literal['91k', '170k'], + cifti_output: ty.Literal['91k', '170k', False], name: str = 'infant_single_anat_fit_wf', ): - desc = ( - '\nAnatomical data preprocessing\n\n: ', - f'A total of {len(anatomicals)} {modality} images were found ' - 'within the input BIDS dataset.\n', - ) - inputnode = pe.Node( niu.IdentityInterface( fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], @@ -1185,25 +1204,35 @@ def init_infant_single_anat_fit_wf( name='outputnode', ) + workflow = Workflow(name=f'infant_single_{image_type}_fit_wf') + workflow.add_nodes([inputnode]) + + desc = ( + '\nAnatomical data preprocessing\n\n: ' + f'A total of {len(anatomicals)} {image_type} images were found ' + 'within the input BIDS dataset.\n' + ) + def init_anat_preproc_wf( *, age_months: int, - ants_affine_init: bool, t1w: list, t2w: list, + flair: list, bids_root: str, precomputed: dict, - hires: bool | None, longitudinal: bool, omp_nthreads: int, output_dir: str, segmentation_atlases: str | Path | None, - skull_strip_mode: str, + skull_strip_mode: ty.Literal['auto', 'skip', 'force'], + recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs', None], skull_strip_template: Reference, sloppy: bool, spaces: SpatialReferences, cifti_output: ty.Literal['91k', '170k', False], + skull_strip_fixed_seed: bool = False, name: str = 'infant_anat_wf', ) -> pe.Workflow: workflow = pe.Workflow(name=name) @@ -1218,15 +1247,15 @@ def init_anat_preproc_wf( 'template', 'subjects_dir', 'subject_id', - 't1w_preproc', - 't1w_mask', - 't1w_dseg', - 't1w_tpms', + 'anat_preproc', + 'anat_mask', + 'anat_dseg', + 'anat_tpms', 'anat2std_xfm', 'std2anat_xfm', - 'fsnative2t1w_xfm', - 't1w_aparc', - 't1w_aseg', + 'fsnative2anat_xfm', + 'anat_aparc', + 'anat_aseg', 'sphere_reg', 'sphere_reg_fsLR', ] @@ -1238,11 +1267,11 @@ def init_anat_preproc_wf( age_months=age_months, bids_root=bids_root, output_dir=output_dir, - hires=hires, longitudinal=longitudinal, msm_sulc=msm_sulc, skull_strip_mode=skull_strip_mode, skull_strip_template=skull_strip_template, + skull_strip_fixed_seed=skull_strip_fixed_seed, spaces=spaces, t1w=t1w, t2w=t2w, @@ -1307,10 +1336,9 @@ def init_anat_preproc_wf( ]) # fmt:skip if recon_method is not None: - anat_second_derivatives_wf = init_anat_second_derivatives_wf( + ds_fs_segs_wf = init_ds_fs_segs_wf( bids_root=bids_root, output_dir=output_dir, - cifti_output=cifti_output, ) surface_derivatives_wf = init_surface_derivatives_wf() ds_surfaces_wf = init_ds_surfaces_wf( @@ -1325,7 +1353,7 @@ def init_anat_preproc_wf( ('outputnode.t1w_preproc', 'inputnode.reference'), ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), (anat_fit_wf, ds_surfaces_wf, [ ('outputnode.anat_valid_list', 'inputnode.source_files'), @@ -1339,12 +1367,12 @@ def init_anat_preproc_wf( (surface_derivatives_wf, ds_curv_wf, [ ('outputnode.curv', 'inputnode.curv'), ]), - (anat_fit_wf, anat_second_derivatives_wf, [ + (anat_fit_wf, ds_fs_segs_wf, [ ('outputnode.anat_valid_list', 'inputnode.source_files'), ]), - (surface_derivatives_wf, anat_second_derivatives_wf, [ - ('outputnode.out_aseg', 'inputnode.t1w_fs_aseg'), - ('outputnode.out_aparc', 'inputnode.t1w_fs_aparc'), + (surface_derivatives_wf, ds_fs_segs_wf, [ + ('outputnode.out_aseg', 'inputnode.anat_fs_aseg'), + ('outputnode.out_aparc', 'inputnode.anat_fs_aparc'), ]), (surface_derivatives_wf, outputnode, [ ('outputnode.out_aseg', 'anat_aseg'), @@ -1354,7 +1382,14 @@ def init_anat_preproc_wf( if cifti_output: hcp_morphometrics_wf = init_hcp_morphometrics_wf(omp_nthreads=omp_nthreads) - resample_midthickness_wf = init_resample_midthickness_wf(grayord_density=cifti_output) + if recon_method == 'mcribs': + resample_midthickness_wf = init_resample_midthickness_dhcp_wf( + grayord_density=cifti_output + ) + else: + resample_midthickness_wf = init_resample_midthickness_wf( + grayord_density=cifti_output + ) morph_grayords_wf = init_morph_grayords_wf( grayord_density=cifti_output, omp_nthreads=omp_nthreads ) From 9b455afb33b40723e5042d3ce2a7257d34d165d4 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Mon, 22 Apr 2024 23:57:23 -0400 Subject: [PATCH 027/142] ADD: Midthickness resampling workflow for MCRIBS --- nibabies/workflows/anatomical/surfaces.py | 78 +++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index c152d78c..b4f46014 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -1,5 +1,6 @@ """Anatomical surface projections""" +import typing as ty import templateflow.api as tf from nipype.interfaces import freesurfer as fs @@ -15,6 +16,7 @@ ) from niworkflows.interfaces.patches import FreeSurferSource from smriprep.interfaces.freesurfer import MakeMidthickness +from smriprep.interfaces.workbench import SurfaceResample from smriprep.workflows.surfaces import _extract_fs_fields SURFACE_INPUTS = [ @@ -387,6 +389,82 @@ def init_midthickness_wf(*, omp_nthreads: int, name: str = 'make_midthickness_wf return workflow +def init_resample_midthickness_dhcp_wf( + grayord_density: ty.Literal['91k', '170k'], + name: str = 'resample_midthickness_wf', +): + """ + Resample subject midthickness surface to specified density. + + Workflow Graph + .. workflow:: + :graph2use: colored + :simple_form: yes + + from nibabies.workflows.anatomical.surfaces import init_resample_midthickness_wf + wf = init_resample_midthickness_wf(grayord_density="91k") + + Parameters + ---------- + grayord_density : :obj:`str` + Either `91k` or `170k`, representing the total of vertices or *grayordinates*. + name : :obj:`str` + Unique name for the subworkflow (default: ``"resample_midthickness_wf"``) + + Inputs + ------ + midthickness + GIFTI surface mesh corresponding to the midthickness surface + sphere_reg_fsLR + GIFTI surface mesh corresponding to the subject's fsLR registration sphere + + Outputs + ------- + midthickness + GIFTI surface mesh corresponding to the midthickness surface, resampled to fsLR + """ + workflow = LiterateWorkflow(name=name) + + fslr_density = '32k' if grayord_density == '91k' else '59k' + + inputnode = pe.Node( + niu.IdentityInterface(fields=['midthickness', 'sphere_reg_fsLR']), + name='inputnode', + ) + + outputnode = pe.Node(niu.IdentityInterface(fields=['midthickness_fsLR']), name='outputnode') + + resampler = pe.MapNode( + SurfaceResample(method='BARYCENTRIC'), + iterfield=['surface_in', 'current_sphere', 'new_sphere'], + name='resampler', + ) + resampler.inputs.new_sphere = [ + str( + tf.get( + template='dhcpAsym', + cohort='42', + density=fslr_density, + suffix='sphere', + hemi=hemi, + space=None, + extension='.surf.gii', + ) + ) + for hemi in ['L', 'R'] + ] + + workflow.connect([ + (inputnode, resampler, [ + ('midthickness', 'surface_in'), + ('sphere_reg_fsLR', 'current_sphere'), + ]), + (resampler, outputnode, [('surface_out', 'midthickness_fsLR')]), + ]) # fmt:skip + + return workflow + + def _parent(p): from pathlib import Path From 5ac10edbe3d52e1c58f8c866d9a91ea6aaeb45f1 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Tue, 23 Apr 2024 00:09:02 -0400 Subject: [PATCH 028/142] RF: Determine reference anatomical prior to fit workflow --- nibabies/workflows/anatomical/fit.py | 63 +++++++++++++--------------- 1 file changed, 29 insertions(+), 34 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 3a1f0c56..c36e0add 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -75,6 +75,7 @@ def init_infant_anat_fit_wf( sloppy: bool, spaces: 'SpatialReferences', recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, + reference_anat: ty.Literal['T1w', 'T2w'], cifti_output: ty.Literal['91k', '170k', False], msm_sulc: bool = False, name: str = 'infant_anat_fit_wf', @@ -97,13 +98,10 @@ def init_infant_anat_fit_wf( raise FileNotFoundError('No anatomical scans provided!') if not num_t1w or not num_t2w: - image_type = 'T1w' if num_t1w else 'T2w' - anatomicals = t1w or t2w - workflow = init_infant_single_anat_fit_wf( - image_type, + reference_anat='T1w' if num_t1w else 'T2w', age_months=age_months, - anatomicals=anatomicals, + anatomicals=t1w or t2w, bids_root=bids_root, precomputed=precomputed, longitudinal=longitudinal, @@ -120,6 +118,8 @@ def init_infant_anat_fit_wf( return workflow + anat = reference_anat.lower() + # Organization # ------------ # This workflow takes the usual (inputnode -> graph -> outputnode) format @@ -212,13 +212,7 @@ def init_infant_anat_fit_wf( name='anat_buffer', ) - # At this point, we should decide which anatomical we will use as the reference space. - # This will depend on the age of the participant, as myelination should be somewhat complete - # by 9+ months - reference_anat = 't2w' if age_months <= 8 else 't1w' - image_type = reference_anat.capitalize() - - if reference_anat == 't1w': + if reference_anat == 'T1w': LOGGER.info('ANAT: Using T1w as the reference anatomical') workflow.connect([ (t1w_buffer, anat_buffer, [ @@ -227,7 +221,7 @@ def init_infant_anat_fit_wf( ('t1w_brain', 'anat_brain'), ]), ]) # fmt:skip - elif reference_anat == 't2w': + elif reference_anat == 'T2w': LOGGER.info('ANAT: Using T2w as the reference anatomical') workflow.connect([ (t2w_buffer, anat_buffer, [ @@ -355,7 +349,7 @@ def init_infant_anat_fit_wf( name='ds_t1w_template_wf', ) - if reference_anat == 't1w': + if reference_anat == 'T1w': workflow.connect([ (t1w_template_wf, sourcefile_buffer, [('outputnode.anat_valid_list', 'anat_source_files')]), ]) # fmt:skip @@ -384,7 +378,7 @@ def init_infant_anat_fit_wf( t1w_validate.inputs.in_file = t1w_preproc sourcefile_buffer.inputs.t1w_source_files = [t1w_preproc] - if reference_anat == 't1w': + if reference_anat == 'T1w': sourcefile_buffer.inputs.anat_source_files = [t1w_preproc] workflow.connect([ @@ -412,7 +406,7 @@ def init_infant_anat_fit_wf( name='ds_t2w_template_wf', ) - if reference_anat == 't2w': + if reference_anat == 'T2w': workflow.connect( t2w_template_wf, 'outputnode.anat_valid_list', sourcefile_buffer, 'anat_source_files', @@ -442,7 +436,7 @@ def init_infant_anat_fit_wf( t2w_validate.inputs.in_file = t2w_preproc sourcefile_buffer.inputs.t2w_source_files = [t2w_preproc] - if reference_anat == 't2w': + if reference_anat == 'T2w': sourcefile_buffer.inputs.anat_source_files = [t2w_preproc] workflow.connect([ @@ -521,7 +515,7 @@ def init_infant_anat_fit_wf( (sourcefile_buffer, ds_t1w_mask_wf, [('t1w_source_files', 'inputnode.source_files')]), ]) # fmt:skip - if reference_anat == 't1w': + if reference_anat == 'T1w': workflow.connect([ (refined_buffer, ds_t1w_mask_wf, [('anat_mask', 'inputnode.mask_file')]), (ds_t1w_mask_wf, outputnode, [('outputnode.mask_file', 'anat_mask')]), @@ -532,7 +526,7 @@ def init_infant_anat_fit_wf( ]) # fmt:skip else: LOGGER.info('ANAT Found T1w brain mask') - if reference_anat == 't1w': + if reference_anat == 'T1w': desc += ( 'A pre-computed T1w brain mask was provided as input and used throughout the ' 'workflow.' @@ -577,7 +571,7 @@ def init_infant_anat_fit_wf( t2w_n4_only_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, - bids_suffix=image_type, + bids_suffix=reference_anat, atropos_use_random_seed=not skull_strip_fixed_seed, name='t2w_n4_only_wf', ) @@ -639,7 +633,7 @@ def init_infant_anat_fit_wf( else: LOGGER.info('ANAT Found T2w brain mask') - if reference_anat == 't2w': + if reference_anat == 'T2w': desc += ( 'A pre-computed T1w brain mask was provided as input and used throughout the ' 'workflow.' @@ -685,9 +679,9 @@ def init_infant_anat_fit_wf( ]) # fmt:skip # Stage 4: Segmentation - anat_dseg = getattr(precomputed, f'{image_type}_dseg', None) - anat_tpms = getattr(precomputed, f'{image_type}_tpms', None) - anat_aseg = getattr(precomputed, f'{image_type}_aseg', False) + anat_dseg = getattr(precomputed, f'{anat}_dseg', None) + anat_tpms = getattr(precomputed, f'{anat}_tpms', None) + anat_aseg = getattr(precomputed, f'{anat}_aseg', False) seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' if not (anat_dseg and anat_tpms): @@ -695,13 +689,13 @@ def init_infant_anat_fit_wf( segmentation_wf = init_segmentation_wf( sloppy=sloppy, method=seg_method, - image_type=image_type.capitalize(), + image_type=reference_anat, omp_nthreads=omp_nthreads, has_aseg=bool(anat_aseg), ) workflow.connect([ - (anat_buffer, segmentation_wf, [(f'{image_type}_brain', 'anat_brain')]), + (anat_buffer, segmentation_wf, [(f'{anat}_brain', 'anat_brain')]), (segmentation_wf, seg_buffer, [ ('outputnode.anat_dseg', 'anat_dseg'), ('outputnode.anat_tpms', 'anat_tpms'), @@ -771,15 +765,15 @@ def init_infant_anat_fit_wf( ) ds_template_registration_wf = init_ds_template_registration_wf( output_dir=str(output_dir), - image_type=image_type.capitalize(), + image_type=reference_anat, ) workflow.connect([ (inputnode, register_template_wf, [('roi', 'inputnode.lesion_mask')]), - (anat_buffer, register_template_wf, [(f'{image_type}_preproc', 'inputnode.moving_image')]), - (refined_buffer, register_template_wf, [(f'{image_type}_mask', 'inputnode.moving_mask')]), + (anat_buffer, register_template_wf, [(f'{anat}_preproc', 'inputnode.moving_image')]), + (refined_buffer, register_template_wf, [(f'{anat}_mask', 'inputnode.moving_mask')]), (sourcefile_buffer, ds_template_registration_wf, [ - (f'{image_type}_source_files', 'inputnode.source_files') + (f'{anat}_source_files', 'inputnode.source_files') ]), (register_template_wf, ds_template_registration_wf, [ ('outputnode.template', 'inputnode.template'), @@ -904,7 +898,7 @@ def init_infant_anat_fit_wf( fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: ds_fs_registration_wf = init_ds_fs_registration_wf( - image_type=image_type, output_dir=output_dir + image_type=reference_anat, output_dir=output_dir ) if recon_method == 'freesurfer': @@ -1146,7 +1140,7 @@ def init_infant_anat_fit_wf( def init_infant_single_anat_fit_wf( - image_type: ty.Literal['T1w', 'T2w'], + reference_anat: ty.Literal['T1w', 'T2w'], *, age_months: int, anatomicals: list, @@ -1204,12 +1198,13 @@ def init_infant_single_anat_fit_wf( name='outputnode', ) - workflow = Workflow(name=f'infant_single_{image_type}_fit_wf') + anat = reference_anat.lower() + workflow = Workflow(name=f'infant_single_{anat}_fit_wf') workflow.add_nodes([inputnode]) desc = ( '\nAnatomical data preprocessing\n\n: ' - f'A total of {len(anatomicals)} {image_type} images were found ' + f'A total of {len(anatomicals)} {anat} images were found ' 'within the input BIDS dataset.\n' ) From 8f56476311dbac07205a358c401dc99a3587f7ce Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 24 Apr 2024 16:29:39 -0400 Subject: [PATCH 029/142] ENH: Add fingerprint file to quickly determine MCRIB runs --- nibabies/interfaces/mcribs.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/nibabies/interfaces/mcribs.py b/nibabies/interfaces/mcribs.py index 856000cb..34a67275 100644 --- a/nibabies/interfaces/mcribs.py +++ b/nibabies/interfaces/mcribs.py @@ -219,13 +219,18 @@ def _list_outputs(self): surfrecon_dir = self._mcribs_dir / sid / 'SurfReconDeformable' / sid self._verify_surfrecon_outputs(surfrecon_dir, error=True) + mcribs_fs = self._mcribs_dir / sid / 'freesurfer' / sid + if self.inputs.autorecon_after_surf: + self._verify_autorecon_outputs(mcribs_fs, error=True) + outputs['mcribs_dir'] = str(self._mcribs_dir) if self.inputs.autorecon_after_surf and self.inputs.subjects_dir: - mcribs_fs = self._mcribs_dir / sid / 'freesurfer' / sid - self._verify_autorecon_outputs(mcribs_fs, error=True) dst = Path(self.inputs.subjects_dir) / self.inputs.subject_id if not dst.exists(): shutil.copytree(mcribs_fs, dst) + # Create a file to denote this SUBJECTS_DIR was derived from MCRIBS + logfile = self._mcribs_dir / sid / 'logs' / f'{sid}.log' + shutil.copyfile(logfile, (dst / 'scripts' / 'mcribs.log')) # Copy registration sphere to better match recon-all output for hemi in 'lr': orig = dst / 'surf' / f'{hemi}h.sphere.reg2' From 161e66a5fe61b662399b269fa09ffb1cb84cc807 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 24 Apr 2024 16:43:39 -0400 Subject: [PATCH 030/142] ENH: Beef up anatomical summary --- nibabies/interfaces/reports.py | 256 +++++++++++++++++++-------------- 1 file changed, 150 insertions(+), 106 deletions(-) diff --git a/nibabies/interfaces/reports.py b/nibabies/interfaces/reports.py index 2a2b3b6c..597ef0f2 100644 --- a/nibabies/interfaces/reports.py +++ b/nibabies/interfaces/reports.py @@ -7,6 +7,7 @@ import re import time from collections import Counter +from pathlib import Path from nipype.interfaces.base import ( BaseInterfaceInputSpec, @@ -20,19 +21,21 @@ traits, ) from niworkflows.interfaces.reportlets import base as nrb -from smriprep.interfaces.freesurfer import ReconAll -LOGGER = logging.getLogger("nipype.interface") +LOGGER = logging.getLogger('nipype.interface') SUBJECT_TEMPLATE = """\ \t
    \t\t
  • Subject ID: {subject_id}
  • -\t\t
  • Structural images: {n_t1s:d} T1-weighted {t2w}
  • +\t\t
  • Session ID: {session_id}
  • +\t\t
  • Structural images: {num_t1w} T1-weighted, {num_t2w} T2-weighted
  • +\t\t
  • Anatomical reference space: {anat_ref}
  • \t\t
  • Functional series: {n_bold:d}
  • {tasks} \t\t
  • Standard output spaces: {std_spaces}
  • \t\t
  • Non-standard output spaces: {nstd_spaces}
  • -\t\t
  • FreeSurfer reconstruction: {freesurfer_status}
  • +\t\t
  • Surface reconstruction method: {recon_method}
  • +\t\t
  • Surface reconstruction status: {recon_status}
  • \t
""" @@ -67,7 +70,7 @@ # TODO: Move to niworkflows class SummaryOutputSpec(TraitedSpec): - out_report = File(exists=True, desc="HTML segment containing summary") + out_report = File(exists=True, desc='HTML segment containing summary') class SummaryInterface(SimpleInterface): @@ -75,10 +78,10 @@ class SummaryInterface(SimpleInterface): def _run_interface(self, runtime): segment = self._generate_segment() - fname = os.path.join(runtime.cwd, "report.html") - with open(fname, "w") as fobj: + fname = os.path.join(runtime.cwd, 'report.html') + with open(fname, 'w') as fobj: fobj.write(segment) - self._results["out_report"] = fname + self._results['out_report'] = fname return runtime def _generate_segment(self): @@ -86,22 +89,31 @@ def _generate_segment(self): class SubjectSummaryInputSpec(BaseInterfaceInputSpec): - t1w = InputMultiObject(File(exists=True), desc="T1w structural images") - t2w = InputMultiObject(File(exists=True), desc="T2w structural images") - subjects_dir = Directory(desc="Infant FreeSurfer subjects directory") - subject_id = Str(desc="Subject ID") + t1w = InputMultiObject(File(exists=True), desc='T1w structural images') + t2w = InputMultiObject(File(exists=True), desc='T2w structural images') + subjects_dir = Directory(desc='Infant FreeSurfer subjects directory') + subject_id = Str(mandatory=True, desc='Subject ID') + session_id = Str(desc='Session ID') + anatomical_reference = traits.Enum('T1w', 'T2w', mandatory=True) bold = InputMultiObject( traits.Either(File(exists=True), traits.List(File(exists=True))), - desc="BOLD functional series", + desc='BOLD functional series', + ) + std_spaces = traits.List(Str, desc='list of standard spaces') + nstd_spaces = traits.List(Str, desc='list of non-standard spaces') + recon_method = traits.Enum( + None, + 'freesurfer', + 'infantfs', + 'mcribs', + desc='surface reconstruction method', ) - std_spaces = traits.List(Str, desc="list of standard spaces") - nstd_spaces = traits.List(Str, desc="list of non-standard spaces") class SubjectSummaryOutputSpec(SummaryOutputSpec): # This exists to ensure that the summary is run prior to the first ReconAll # call, allowing a determination whether there is a pre-existing directory - subject_id = Str(desc="Infant FreeSurfer subject ID") + subject_id = Str(desc='Surface reconstruction subject ID') class SubjectSummary(SummaryInterface): @@ -109,109 +121,141 @@ class SubjectSummary(SummaryInterface): output_spec = SubjectSummaryOutputSpec def _run_interface(self, runtime): - if isdefined(self.inputs.subject_id): - self._results["subject_id"] = self.inputs.subject_id - return super(SubjectSummary, self)._run_interface(runtime) + if self.inputs.subject_id: + self._recon_id = f'sub-{self.inputs.subject_id}' + if self.inputs.session_id: + self._recon_id += f'_ses-{self.inputs.session_id}' + self._results['subject_id'] = self._recon_id + return super()._run_interface(runtime) def _generate_segment(self): BIDS_NAME = re.compile( - r"^(.*\/)?" - "(?Psub-[a-zA-Z0-9]+)" - "(_(?Pses-[a-zA-Z0-9]+))?" - "(_(?Ptask-[a-zA-Z0-9]+))?" - "(_(?Pacq-[a-zA-Z0-9]+))?" - "(_(?Prec-[a-zA-Z0-9]+))?" - "(_(?Prun-[a-zA-Z0-9]+))?" + r'^(.*\/)?' + '(?Psub-[a-zA-Z0-9]+)' + '(_(?Pses-[a-zA-Z0-9]+))?' + '(_(?Ptask-[a-zA-Z0-9]+))?' + '(_(?Pacq-[a-zA-Z0-9]+))?' + '(_(?Prec-[a-zA-Z0-9]+))?' + '(_(?Prun-[a-zA-Z0-9]+))?' ) - if not isdefined(self.inputs.subjects_dir): - freesurfer_status = "Not run" + recon_method = self.inputs.recon_method + + statuses = {'no': 'Not run', 'todo': 'Run by NiBabies', 'done': 'Pre-existing directory'} + if not self.inputs.subjects_dir: + recon_status = statuses['no'] else: - recon = ReconAll( - subjects_dir=self.inputs.subjects_dir, - subject_id="sub-" + self.inputs.subject_id, - T1_files=self.inputs.t1w, - flags="-noskullstrip", - ) - if recon.cmdline.startswith("echo"): - freesurfer_status = "Pre-existing directory" - else: - freesurfer_status = "Run by NiBabies" + if recon_method == 'freesurfer': + from smriprep.interfaces.freesurfer import ReconAll + + recon = ReconAll( + subjects_dir=self.inputs.subjects_dir, + subject_id=self._recon_id, + T1_files=self.inputs.t1w, + flags='-noskullstrip', + ) + recon_status = ( + statuses['done'] if recon.cmdline.startswith('echo') else statuses['todo'] + ) + + elif recon_method == 'infantfs': + from nibabies.interfaces.freesurfer import InfantReconAll + + recon = InfantReconAll( + subjects_dir=self.inputs.subjects_dir, + subject_id=self._recon_id, + t1_file=self.inputs.t1w, + ) + recon_status = ( + statuses['done'] if recon.cmdline.startswith('echo') else statuses['todo'] + ) + + elif recon_method == 'mcribs': + # Use fingerprint logfile to identify "MCRIBS" runs vs FreeSurfer + fingerprint = ( + Path(self.inputs.subjects_dir) / self._recon_id / 'scripts' / 'mcribs.log' + ) + recon_status = statuses['done'] if fingerprint.exists() else statuses['todo'] - t2w_seg = "" + num_t1w, num_t2w = 0, 0 + if self.inputs.t1w: + num_t1w = len(self.inputs.t1w) if self.inputs.t2w: - t2w_seg = "(+ {:d} T2-weighted)".format(len(self.inputs.t2w)) + num_t2w = len(self.inputs.t2w) # Add list of tasks with number of runs bold_series = self.inputs.bold if isdefined(self.inputs.bold) else [] bold_series = [s[0] if isinstance(s, list) else s for s in bold_series] counts = Counter( - BIDS_NAME.search(series).groupdict()["task_id"][5:] for series in bold_series + BIDS_NAME.search(series).groupdict()['task_id'][5:] for series in bold_series ) - tasks = "" + tasks = '' if counts: header = '\t\t
    ' - footer = "\t\t
" + footer = '\t\t' lines = [ - "\t\t\t
  • Task: {task_id} ({n_runs:d} run{s})
  • ".format( - task_id=task_id, n_runs=n_runs, s="" if n_runs == 1 else "s" + '\t\t\t
  • Task: {task_id} ({n_runs:d} run{s})
  • '.format( + task_id=task_id, n_runs=n_runs, s='' if n_runs == 1 else 's' ) for task_id, n_runs in sorted(counts.items()) ] - tasks = "\n".join([header] + lines + [footer]) + tasks = '\n'.join([header] + lines + [footer]) return SUBJECT_TEMPLATE.format( subject_id=self.inputs.subject_id, - n_t1s=len(self.inputs.t1w), - t2w=t2w_seg, + session_id=self.inputs.session_id, + num_t1w=num_t1w, + num_t2w=num_t2w, + anat_ref=self.inputs.anatomical_reference, n_bold=len(bold_series), tasks=tasks, - std_spaces=", ".join(self.inputs.std_spaces), - nstd_spaces=", ".join(self.inputs.nstd_spaces), - freesurfer_status=freesurfer_status, + std_spaces=', '.join(self.inputs.std_spaces), + nstd_spaces=', '.join(self.inputs.nstd_spaces), + recon_method=recon_method, + recon_status=recon_status, ) class FunctionalSummaryInputSpec(BaseInterfaceInputSpec): slice_timing = traits.Enum( - False, True, "TooShort", usedefault=True, desc="Slice timing correction used" + False, True, 'TooShort', usedefault=True, desc='Slice timing correction used' ) distortion_correction = traits.Str( - desc="Susceptibility distortion correction method", mandatory=True + desc='Susceptibility distortion correction method', mandatory=True ) pe_direction = traits.Enum( None, - "i", - "i-", - "j", - "j-", - "k", - "k-", + 'i', + 'i-', + 'j', + 'j-', + 'k', + 'k-', mandatory=True, - desc="Phase-encoding direction detected", + desc='Phase-encoding direction detected', ) registration = traits.Enum( - "FSL", "FreeSurfer", mandatory=True, desc="Functional/anatomical registration method" + 'FSL', 'FreeSurfer', mandatory=True, desc='Functional/anatomical registration method' ) - fallback = traits.Bool(desc="Boundary-based registration rejected") + fallback = traits.Bool(desc='Boundary-based registration rejected') registration_dof = traits.Enum( - 6, 9, 12, desc="Registration degrees of freedom", mandatory=True + 6, 9, 12, desc='Registration degrees of freedom', mandatory=True ) registration_init = traits.Enum( - "register", - "header", + 'register', + 'header', mandatory=True, desc='Whether to initialize registration with the "header"' ' or by centering the volumes ("register")', ) - confounds_file = File(exists=True, desc="Confounds file") - tr = traits.Float(desc="Repetition time", mandatory=True) - dummy_scans = traits.Either(traits.Int(), None, desc="number of dummy scans specified by user") - algo_dummy_scans = traits.Int(desc="number of dummy scans determined by algorithm") - echo_idx = traits.List([], usedefault=True, desc="BIDS echo identifiers") - orientation = traits.Str(mandatory=True, desc="Orientation of the voxel axes") + confounds_file = File(exists=True, desc='Confounds file') + tr = traits.Float(desc='Repetition time', mandatory=True) + dummy_scans = traits.Either(traits.Int(), None, desc='number of dummy scans specified by user') + algo_dummy_scans = traits.Int(desc='number of dummy scans determined by algorithm') + echo_idx = traits.List([], usedefault=True, desc='BIDS echo identifiers') + orientation = traits.Str(mandatory=True, desc='Orientation of the voxel axes') class FunctionalSummary(SummaryInterface): @@ -219,20 +263,20 @@ class FunctionalSummary(SummaryInterface): def _generate_segment(self): dof = self.inputs.registration_dof - stc = {True: "Applied", False: "Not applied", "TooShort": "Skipped (too few volumes)"}[ + stc = {True: 'Applied', False: 'Not applied', 'TooShort': 'Skipped (too few volumes)'}[ self.inputs.slice_timing ] # #TODO: Add a note about registration_init below? reg = { - "FSL": [ - "FSL flirt with boundary-based registration" - " (BBR) metric - %d dof" % dof, - "FSL flirt rigid registration - 6 dof", + 'FSL': [ + 'FSL flirt with boundary-based registration' + ' (BBR) metric - %d dof' % dof, + 'FSL flirt rigid registration - 6 dof', ], - "FreeSurfer": [ - "FreeSurfer bbregister " - "(boundary-based registration, BBR) - %d dof" % dof, - "FreeSurfer mri_coreg - %d dof" % dof, + 'FreeSurfer': [ + 'FreeSurfer bbregister ' + '(boundary-based registration, BBR) - %d dof' % dof, + 'FreeSurfer mri_coreg - %d dof' % dof, ], }[self.inputs.registration][self.inputs.fallback] @@ -240,39 +284,39 @@ def _generate_segment(self): if isdefined(self.inputs.confounds_file): with open(self.inputs.confounds_file) as cfh: - conflist = cfh.readline().strip("\n").strip() + conflist = cfh.readline().strip('\n').strip() - dummy_scan_tmp = "{n_dum}" + dummy_scan_tmp = '{n_dum}' if self.inputs.dummy_scans == self.inputs.algo_dummy_scans: - dummy_scan_msg = " ".join( - [dummy_scan_tmp, "(Confirmed: {n_alg} automatically detected)"] + dummy_scan_msg = ' '.join( + [dummy_scan_tmp, '(Confirmed: {n_alg} automatically detected)'] ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was specified by the user and # it is not equal to the number detected by the algorithm elif self.inputs.dummy_scans is not None: - dummy_scan_msg = " ".join( - [dummy_scan_tmp, "(Warning: {n_alg} automatically detected)"] + dummy_scan_msg = ' '.join( + [dummy_scan_tmp, '(Warning: {n_alg} automatically detected)'] ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was not specified by the user else: dummy_scan_msg = dummy_scan_tmp.format(n_dum=self.inputs.algo_dummy_scans) - multiecho = "Single-echo EPI sequence." + multiecho = 'Single-echo EPI sequence.' n_echos = len(self.inputs.echo_idx) if n_echos == 1: multiecho = ( - f"Multi-echo EPI sequence: only echo {self.inputs.echo_idx[0]} processed " - "in single-echo mode." + f'Multi-echo EPI sequence: only echo {self.inputs.echo_idx[0]} processed ' + 'in single-echo mode.' ) if n_echos > 2: - multiecho = f"Multi-echo EPI sequence: {n_echos} echoes." + multiecho = f'Multi-echo EPI sequence: {n_echos} echoes.' return FUNCTIONAL_TEMPLATE.format( pedir=pedir, stc=stc, sdc=self.inputs.distortion_correction, registration=reg, - confounds=re.sub(r"[\t ]+", ", ", conflist), + confounds=re.sub(r'[\t ]+', ', ', conflist), tr=self.inputs.tr, dummy_scan_desc=dummy_scan_msg, multiecho=multiecho, @@ -281,8 +325,8 @@ def _generate_segment(self): class AboutSummaryInputSpec(BaseInterfaceInputSpec): - version = Str(desc="NiBabies version") - command = Str(desc="NiBabies command") + version = Str(desc='NiBabies version') + command = Str(desc='NiBabies command') # Date not included - update timestamp only if version or command changes @@ -293,18 +337,18 @@ def _generate_segment(self): return ABOUT_TEMPLATE.format( version=self.inputs.version, command=self.inputs.command, - date=time.strftime("%Y-%m-%d %H:%M:%S %z"), + date=time.strftime('%Y-%m-%d %H:%M:%S %z'), ) class LabeledHistogramInputSpec(nrb._SVGReportCapableInputSpec): - in_file = traits.File(exists=True, mandatory=True, desc="Image containing values to plot") + in_file = traits.File(exists=True, mandatory=True, desc='Image containing values to plot') label_file = traits.File( exists=True, - desc="Mask or label image where non-zero values will be used to extract data from in_file", + desc='Mask or label image where non-zero values will be used to extract data from in_file', ) - mapping = traits.Dict(desc="Map integer label values onto names of voxels") - xlabel = traits.Str("voxels", usedefault=True, desc="Description of values plotted") + mapping = traits.Dict(desc='Map integer label values onto names of voxels') + xlabel = traits.Str('voxels', usedefault=True, desc='Description of values plotted') class LabeledHistogram(nrb.ReportingInterface): @@ -324,7 +368,7 @@ def _generate_report(self): if self.inputs.label_file: label_img = nb.load(self.inputs.label_file) if label_img.shape != img.shape[:3] or not np.allclose(label_img.affine, img.affine): - label_img = resample_to_img(label_img, img, interpolation="nearest") + label_img = resample_to_img(label_img, img, interpolation='nearest') labels = np.uint16(label_img.dataobj) else: labels = np.uint8(data > 0) @@ -343,19 +387,19 @@ def _generate_report(self): def get_world_pedir(ornt, pe_direction): """Return world direction of phase encoding""" # TODO: Move to niworkflows - axes = (("Right", "Left"), ("Anterior", "Posterior"), ("Superior", "Inferior")) - ax_idcs = {"i": 0, "j": 1, "k": 2} + axes = (('Right', 'Left'), ('Anterior', 'Posterior'), ('Superior', 'Inferior')) + ax_idcs = {'i': 0, 'j': 1, 'k': 2} if pe_direction is not None: axcode = ornt[ax_idcs[pe_direction[0]]] - inv = pe_direction[1:] == "-" + inv = pe_direction[1:] == '-' for ax in axes: for flip in (ax, ax[::-1]): if flip[not inv].startswith(axcode): - return "-".join(flip) + return '-'.join(flip) LOGGER.warning( - "Cannot determine world direction of phase encoding. " - f"Orientation: {ornt}; PE dir: {pe_direction}" + 'Cannot determine world direction of phase encoding. ' + f'Orientation: {ornt}; PE dir: {pe_direction}' ) - return "Could not be determined - assuming Anterior-Posterior" + return 'Could not be determined - assuming Anterior-Posterior' From 034620bec6f3e0337b7b97b6a7d1196d5f5c41f3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 24 Apr 2024 23:15:53 -0400 Subject: [PATCH 031/142] ENH: Add new parser options --- nibabies/cli/parser.py | 751 +++++++++++++++++++++-------------------- 1 file changed, 385 insertions(+), 366 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 419d5448..993fdc94 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -26,25 +26,25 @@ def _build_parser(): def _path_exists(path, parser): """Ensure a given path exists.""" if path is None: - raise parser.error("No value provided!") + raise parser.error('No value provided!') path = Path(path).absolute() if not path.exists(): - raise parser.error(f"Path does not exist: <{path}>.") + raise parser.error(f'Path does not exist: <{path}>.') return path def _dir_not_empty(path, parser): path = _path_exists(path, parser) if not path.is_dir(): - raise parser.error(f"Path is not a directory <{path}>.") - for f in path.iterdir(): + raise parser.error(f'Path is not a directory <{path}>.') + for _ in path.iterdir(): return path - raise parser.error(f"Directory found with no contents <{path}>.") + raise parser.error(f'Directory found with no contents <{path}>.') def _is_file(path, parser): """Ensure a given path exists and it is a file.""" path = _path_exists(path, parser) if not path.is_file(): - raise parser.error(f"Path should point to a file (or symlink of file): <{path}>.") + raise parser.error(f'Path should point to a file (or symlink of file): <{path}>.') return path def _min_one(value, parser): @@ -55,22 +55,22 @@ def _min_one(value, parser): return value def _to_gb(value): - scale = {"G": 1, "T": 10**3, "M": 1e-3, "K": 1e-6, "B": 1e-9} - digits = "".join([c for c in value if c.isdigit()]) - units = value[len(digits) :] or "M" + scale = {'G': 1, 'T': 10**3, 'M': 1e-3, 'K': 1e-6, 'B': 1e-9} + digits = ''.join([c for c in value if c.isdigit()]) + units = value[len(digits) :] or 'M' return int(digits) * scale[units[0]] def _drop_sub(value): - return value[4:] if value.startswith("sub-") else value + return value[4:] if value.startswith('sub-') else value def _drop_ses(value): - return value[4:] if value.startswith("ses-") else value + return value[4:] if value.startswith('ses-') else value def _filter_pybids_none_any(dct): import bids return { - k: bids.layout.Query.NONE if v is None else (bids.layout.Query.ANY if v == "*" else v) + k: bids.layout.Query.NONE if v is None else (bids.layout.Query.ANY if v == '*' else v) for k, v in dct.items() } @@ -81,26 +81,25 @@ def _bids_filter(value): return loads(Path(value).read_text(), object_hook=_filter_pybids_none_any) def _slice_time_ref(value, parser): - if value == "start": + if value == 'start': value = 0 - elif value == "middle": + elif value == 'middle': value = 0.5 try: value = float(value) - except ValueError: + except ValueError as e: raise parser.error( - "Slice time reference must be number, 'start', or 'middle'. " f"Received {value}." - ) + "Slice time reference must be number, 'start', or 'middle'. " f'Received {value}.' + ) from e if not 0 <= value <= 1: - raise parser.error(f"Slice time reference must be in range 0-1. Received {value}.") + raise parser.error(f'Slice time reference must be in range 0-1. Received {value}.') return value - verstr = f"NiBabies v{config.environment.version}" + verstr = f'NiBabies v{config.environment.version}' currentv = Version(config.environment.version) - is_release = not any((currentv.is_devrelease, currentv.is_prerelease, currentv.is_postrelease)) parser = ArgumentParser( - description="NiBabies: Preprocessing workflows for infants v{config.environment.version}", + description='NiBabies: Preprocessing workflows for infants v{config.environment.version}', formatter_class=ArgumentDefaultsHelpFormatter, ) PathExists = partial(_path_exists, parser=parser) @@ -116,180 +115,199 @@ def _slice_time_ref(value, parser): # required, positional arguments # IMPORTANT: they must go directly with the parser object parser.add_argument( - "bids_dir", - action="store", + 'bids_dir', + action='store', type=PathExists, - help="the root folder of a BIDS valid dataset (sub-XXXXX folders should " - "be found at the top level in this folder).", + help='the root folder of a BIDS valid dataset (sub-XXXXX folders should ' + 'be found at the top level in this folder).', ) parser.add_argument( - "output_dir", - action="store", + 'output_dir', + action='store', type=Path, - help="the output path for the outcomes of preprocessing and visual " "reports", + help='the output path for the outcomes of preprocessing and visual ' 'reports', ) parser.add_argument( - "analysis_level", - choices=["participant"], + 'analysis_level', + choices=['participant'], help='processing stage to be run, only "participant" in the case of ' - "NiBabies (see BIDS-Apps specification).", + 'NiBabies (see BIDS-Apps specification).', ) # optional arguments - parser.add_argument("--version", action="version", version=verstr) + parser.add_argument('--version', action='version', version=verstr) - g_bids = parser.add_argument_group("Options for filtering BIDS queries") + g_bids = parser.add_argument_group('Options for filtering BIDS queries') g_bids.add_argument( - "--skip_bids_validation", - "--skip-bids-validation", - action="store_true", + '--skip_bids_validation', + '--skip-bids-validation', + action='store_true', default=False, - help="assume the input dataset is BIDS compliant and skip the validation", + help='assume the input dataset is BIDS compliant and skip the validation', ) g_bids.add_argument( - "--participant-label", - "--participant_label", - action="store", - nargs="+", + '--participant-label', + '--participant_label', + action='store', + nargs='+', type=_drop_sub, - help="a space delimited list of participant identifiers or a single " - "identifier (the sub- prefix can be removed)", + help='a space delimited list of participant identifiers or a single ' + 'identifier (the sub- prefix can be removed)', ) g_bids.add_argument( - "-s", - "--session-id", - action="store", - nargs="+", + '-s', + '--session-id', + action='store', + nargs='+', type=_drop_ses, - help="a space delimited list of session identifiers or a single identifier", + help='a space delimited list of session identifiers or a single identifier', ) # Re-enable when option is actually implemented # g_bids.add_argument('-r', '--run-id', action='store', default='single_run', # help='select a specific run to be processed') g_bids.add_argument( - "-t", "--task-id", action="store", help="select a specific task to be processed" + '-t', '--task-id', action='store', help='select a specific task to be processed' ) g_bids.add_argument( - "--echo-idx", - action="store", + '--echo-idx', + action='store', type=int, - help="select a specific echo to be processed in a multiecho series", + help='select a specific echo to be processed in a multiecho series', ) g_bids.add_argument( - "--bids-filter-file", - dest="bids_filters", - action="store", + '--bids-filter-file', + dest='bids_filters', + action='store', type=_bids_filter, - metavar="FILE", - help="a JSON file describing custom BIDS input filters using PyBIDS. " - "For further details, please check out " - "https://fmriprep.readthedocs.io/en/latest/faq.html#" - "how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep", + metavar='FILE', + help='a JSON file describing custom BIDS input filters using PyBIDS. ' + 'For further details, please check out ' + 'https://fmriprep.readthedocs.io/en/latest/faq.html#' + 'how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep', ) g_bids.add_argument( - "--anat-derivatives", - action="store", - metavar="PATH", + '--anat-derivatives', + action='store', + metavar='PATH', type=PathExists, - help="Reuse the anatomical derivatives from another NiBabies run or calculated " - "with an alternative processing tool (NOT RECOMMENDED).", + help='Reuse the anatomical derivatives from another NiBabies run or calculated ' + 'with an alternative processing tool (NOT RECOMMENDED).', ) g_bids.add_argument( - "--bids-database-dir", - metavar="PATH", + '--bids-database-dir', + metavar='PATH', type=PathExists, - help="Path to an existing PyBIDS database folder, for faster indexing " - "(especially useful for large datasets).", + help='Path to an existing PyBIDS database folder, for faster indexing ' + '(especially useful for large datasets).', ) - g_perfm = parser.add_argument_group("Options to handle performance") + g_perfm = parser.add_argument_group('Options to handle performance') g_perfm.add_argument( - "--nprocs", - "--nthreads", - "--n_cpus", - "--n-cpus", - dest="nprocs", - action="store", + '--nprocs', + '--nthreads', + '--n_cpus', + '--n-cpus', + dest='nprocs', + action='store', type=PositiveInt, - help="maximum number of threads across all processes", + help='maximum number of threads across all processes', ) g_perfm.add_argument( - "--omp-nthreads", - action="store", + '--omp-nthreads', + action='store', type=PositiveInt, - help="maximum number of threads per-process", + help='maximum number of threads per-process', ) g_perfm.add_argument( - "--mem", - "--mem_mb", - "--mem-mb", - dest="memory_gb", - action="store", + '--mem', + '--mem_mb', + '--mem-mb', + dest='memory_gb', + action='store', type=_to_gb, - help="upper bound memory limit for NiBabies processes", + help='upper bound memory limit for NiBabies processes', ) g_perfm.add_argument( - "--low-mem", - action="store_true", - help="attempt to reduce memory usage (will increase disk usage " "in working directory)", + '--low-mem', + action='store_true', + help='attempt to reduce memory usage (will increase disk usage ' 'in working directory)', ) g_perfm.add_argument( - "--use-plugin", - "--nipype-plugin-file", - action="store", - metavar="FILE", + '--use-plugin', + '--nipype-plugin-file', + action='store', + metavar='FILE', type=IsFile, - help="nipype plugin configuration file", + help='nipype plugin configuration file', ) - g_perfm.add_argument("--anat-only", action="store_true", help="run anatomical workflows only") g_perfm.add_argument( - "--boilerplate_only", - action="store_true", + '--md-only-boilerplate', + action='store_true', default=False, - help="generate boilerplate only", + help='skip generation of HTML and LaTeX formatted citation with pandoc', ) g_perfm.add_argument( - "--md-only-boilerplate", - action="store_true", + '--error-on-aroma-warnings', + action='store_true', + dest='aroma_err_on_warn', default=False, - help="skip generation of HTML and LaTeX formatted citation with pandoc", + help='Raise an error if ICA_AROMA does not produce sensible output ' + '(e.g., if all the components are classified as signal or noise)', ) g_perfm.add_argument( - "--error-on-aroma-warnings", - action="store_true", - dest="aroma_err_on_warn", + '-v', + '--verbose', + dest='verbose_count', + action='count', + default=0, + help='increases log verbosity for each occurence, debug level is -vvv', + ) + + g_subset = parser.add_argument_group('Options for performing only a subset of the workflow') + g_subset.add_argument('--anat-only', action='store_true', help='Run anatomical workflows only') + g_subset.add_argument( + '--level', + action='store', + default='full', + choices=['minimal', 'resampling', 'full'], + help="Processing level; may be 'minimal' (nothing that can be recomputed), " + "'resampling' (recomputable targets that aid in resampling) " + "or 'full' (all target outputs).", + ) + g_subset.add_argument( + '--boilerplate-only', + '--boilerplate_only', + action='store_true', default=False, - help="Raise an error if ICA_AROMA does not produce sensible output " - "(e.g., if all the components are classified as signal or noise)", + help='Generate boilerplate only', ) - g_perfm.add_argument( - "-v", - "--verbose", - dest="verbose_count", - action="count", - default=0, - help="increases log verbosity for each occurence, debug level is -vvv", + g_subset.add_argument( + '--reports-only', + action='store_true', + default=False, + help="Only generate reports, don't run workflows. This will only rerun report " + 'aggregation, not reportlet generation for specific nodes.', ) - g_conf = parser.add_argument_group("Workflow configuration") + g_conf = parser.add_argument_group('Workflow configuration') g_conf.add_argument( - "--ignore", + '--ignore', required=False, - action="store", - nargs="+", + action='store', + nargs='+', default=[], - choices=["fieldmaps", "slicetiming", "sbref", "t2w", "flair"], - help="ignore selected aspects of the input dataset to disable corresponding " - "parts of the workflow (a space delimited list)", + choices=['fieldmaps', 'slicetiming', 'sbref', 't1w', 't2w', 'flair', 'fmap-jacobian'], + help='ignore selected aspects of the input dataset to disable corresponding ' + 'parts of the workflow (a space delimited list)', ) g_conf.add_argument( - "--longitudinal", - action="store_true", - help="treat dataset as longitudinal - may increase runtime", + '--longitudinal', + action='store_true', + help='treat dataset as longitudinal - may increase runtime', ) g_conf.add_argument( - "--output-spaces", - nargs="*", + '--output-spaces', + nargs='*', action=OutputReferencesAction, help="""\ Standard and non-standard spaces to resample anatomical and functional images to. \ @@ -304,8 +322,8 @@ def _slice_time_ref(value, parser): https://fmriprep.readthedocs.io/en/latest/spaces.html""", ) g_conf.add_argument( - "--me-output-echos", - action="store_true", + '--me-output-echos', + action='store_true', default=False, help="""\ Output individual echo time series with slice, motion and susceptibility correction. \ @@ -313,311 +331,304 @@ def _slice_time_ref(value, parser): ) g_conf.add_argument( - "--bold2t1w-init", - action="store", - default="register", - choices=["register", "header"], + '--bold2t1w-init', + action='store', + default='register', + choices=['register', 'header'], help='Either "register" (the default) to initialize volumes at center or "header"' - " to use the header information when coregistering BOLD to T1w images.", + ' to use the header information when coregistering BOLD to T1w images.', ) g_conf.add_argument( - "--bold2t1w-dof", - action="store", + '--bold2t1w-dof', + action='store', default=6, choices=[6, 9, 12], type=int, - help="Degrees of freedom when registering BOLD to T1w images. " - "6 degrees (rotation and translation) are used by default.", + help='Degrees of freedom when registering BOLD to T1w images. ' + '6 degrees (rotation and translation) are used by default.', ) g_conf.add_argument( - "--force-bbr", - action="store_true", - dest="use_bbr", + '--force-bbr', + action='store_true', + dest='use_bbr', default=None, - help="Always use boundary-based registration (no goodness-of-fit checks)", + help='Always use boundary-based registration (no goodness-of-fit checks)', ) g_conf.add_argument( - "--force-no-bbr", - action="store_false", - dest="use_bbr", + '--force-no-bbr', + action='store_false', + dest='use_bbr', default=None, - help="Do not use boundary-based registration (no goodness-of-fit checks)", + help='Do not use boundary-based registration (no goodness-of-fit checks)', ) g_conf.add_argument( - "--medial-surface-nan", + '--medial-surface-nan', required=False, - action="store_true", + action='store_true', default=False, - help="Replace medial wall values with NaNs on functional GIFTI files. Only " - "performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).", + help='Replace medial wall values with NaNs on functional GIFTI files. Only ' + 'performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).', ) g_conf.add_argument( - "--project-goodvoxels", + '--project-goodvoxels', required=False, - action="store_true", + action='store_true', default=False, - help="Exclude voxels whose timeseries have locally high coefficient of variation " - "from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject " - "(fsaverage or fsnative).", + help='Exclude voxels whose timeseries have locally high coefficient of variation ' + 'from surface resampling. Only performed for GIFTI files mapped to a freesurfer subject ' + '(fsaverage or fsnative).', ) g_conf.add_argument( - "--slice-time-ref", + '--slice-time-ref', required=False, - action="store", + action='store', default=None, type=SliceTimeRef, - help="The time of the reference slice to correct BOLD values to, as a fraction " - "acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end " - "of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. " - "The default value is 0.5.", + help='The time of the reference slice to correct BOLD values to, as a fraction ' + 'acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end ' + 'of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. ' + 'The default value is 0.5.', ) g_conf.add_argument( - "--dummy-scans", + '--dummy-scans', required=False, - action="store", + action='store', default=None, type=int, - help="Number of non steady state volumes.", + help='Number of non steady state volumes.', ) g_conf.add_argument( - "--random-seed", - dest="_random_seed", - action="store", + '--random-seed', + dest='_random_seed', + action='store', type=int, default=None, - help="Initialize the random seed for the workflow", + help='Initialize the random seed for the workflow', ) # ICA_AROMA options - g_aroma = parser.add_argument_group("Specific options for running ICA_AROMA") + g_aroma = parser.add_argument_group('Specific options for running ICA_AROMA') g_aroma.add_argument( - "--use-aroma", - action="store_true", + '--use-aroma', + action='store_true', default=False, - help="add ICA_AROMA to your preprocessing stream", + help='add ICA_AROMA to your preprocessing stream', ) g_aroma.add_argument( - "--aroma-melodic-dimensionality", - dest="aroma_melodic_dim", - action="store", + '--aroma-melodic-dimensionality', + dest='aroma_melodic_dim', + action='store', default=-200, type=int, - help="Exact or maximum number of MELODIC components to estimate " - "(positive = exact, negative = maximum)", + help='Exact or maximum number of MELODIC components to estimate ' + '(positive = exact, negative = maximum)', ) # Confounds options - g_confounds = parser.add_argument_group("Specific options for estimating confounds") + g_confounds = parser.add_argument_group('Specific options for estimating confounds') g_confounds.add_argument( - "--return-all-components", - dest="regressors_all_comps", + '--return-all-components', + dest='regressors_all_comps', required=False, - action="store_true", + action='store_true', default=False, - help="Include all components estimated in CompCor decomposition in the confounds " - "file instead of only the components sufficient to explain 50 percent of " - "BOLD variance in each CompCor mask", + help='Include all components estimated in CompCor decomposition in the confounds ' + 'file instead of only the components sufficient to explain 50 percent of ' + 'BOLD variance in each CompCor mask', ) g_confounds.add_argument( - "--fd-spike-threshold", - dest="regressors_fd_th", + '--fd-spike-threshold', + dest='regressors_fd_th', required=False, - action="store", + action='store', default=0.5, type=float, - help="Threshold for flagging a frame as an outlier on the basis of framewise " - "displacement", + help='Threshold for flagging a frame as an outlier on the basis of framewise ' + 'displacement', ) g_confounds.add_argument( - "--dvars-spike-threshold", - dest="regressors_dvars_th", + '--dvars-spike-threshold', + dest='regressors_dvars_th', required=False, - action="store", + action='store', default=1.5, type=float, - help="Threshold for flagging a frame as an outlier on the basis of standardised " "DVARS", + help='Threshold for flagging a frame as an outlier on the basis of standardised ' 'DVARS', ) # ANTs options - g_ants = parser.add_argument_group("Specific options for ANTs registrations") + g_ants = parser.add_argument_group('Specific options for ANTs registrations') g_ants.add_argument( - "--skull-strip-template", - default="UNCInfant:cohort-1", + '--skull-strip-template', + default='UNCInfant:cohort-1', type=Reference.from_string, - help="select a template for skull-stripping with antsBrainExtraction", + help='select a template for skull-stripping with antsBrainExtraction', ) g_ants.add_argument( - "--skull-strip-fixed-seed", - action="store_true", - help="do not use a random seed for skull-stripping - will ensure " - "run-to-run replicability when used with --omp-nthreads 1 and " - "matching --random-seed ", + '--skull-strip-fixed-seed', + action='store_true', + help='do not use a random seed for skull-stripping - will ensure ' + 'run-to-run replicability when used with --omp-nthreads 1 and ' + 'matching --random-seed ', ) g_ants.add_argument( - "--skull-strip-t1w", - action="store", - choices=("auto", "skip", "force"), - default="force", - help="determiner for T1-weighted skull stripping ('force' ensures skull " + '--skull-strip-anat', + action='store', + choices=('auto', 'skip', 'force'), + default='force', + help="determiner for anatomical skull stripping ('force' ensures skull " "stripping, 'skip' ignores skull stripping, and 'auto' applies brain extraction " - "based on the outcome of a heuristic to check whether the brain is already masked).", + 'based on the outcome of a heuristic to check whether the brain is already masked).', ) # Fieldmap options - g_fmap = parser.add_argument_group("Specific options for handling fieldmaps") + g_fmap = parser.add_argument_group('Specific options for handling fieldmaps') g_fmap.add_argument( - "--fmap-bspline", - action="store_true", + '--fmap-bspline', + action='store_true', default=False, - help="fit a B-Spline field using least-squares (experimental)", + help='fit a B-Spline field using least-squares (experimental)', ) g_fmap.add_argument( - "--fmap-no-demean", - action="store_false", + '--fmap-no-demean', + action='store_false', default=True, - help="do not remove median (within mask) from fieldmap", + help='do not remove median (within mask) from fieldmap', ) # SyN-unwarp options - g_syn = parser.add_argument_group("Specific options for SyN distortion correction") + g_syn = parser.add_argument_group('Specific options for SyN distortion correction') g_syn.add_argument( - "--use-syn-sdc", - action="store_true", + '--use-syn-sdc', + action='store_true', default=False, - help="EXPERIMENTAL: Use fieldmap-free distortion correction", + help='EXPERIMENTAL: Use fieldmap-free distortion correction', ) g_syn.add_argument( - "--force-syn", - action="store_true", + '--force-syn', + action='store_true', default=False, - help="EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to " - "fieldmap correction, if available", + help='EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to ' + 'fieldmap correction, if available', ) # FreeSurfer options - g_fs = parser.add_argument_group("Specific options for FreeSurfer preprocessing") + g_fs = parser.add_argument_group('Specific options for FreeSurfer preprocessing') g_fs.add_argument( - "--fs-license-file", - metavar="FILE", + '--fs-license-file', + metavar='FILE', type=IsFile, - help="Path to FreeSurfer license key file. Get it (for free) by registering" - " at https://surfer.nmr.mgh.harvard.edu/registration.html", + help='Path to FreeSurfer license key file. Get it (for free) by registering' + ' at https://surfer.nmr.mgh.harvard.edu/registration.html', ) g_fs.add_argument( - "--fs-subjects-dir", - metavar="PATH", + '--fs-subjects-dir', + metavar='PATH', type=Path, - help="Path to existing FreeSurfer subjects directory to reuse. " - "(default: OUTPUT_DIR/freesurfer)", + help='Path to existing FreeSurfer subjects directory to reuse. ' + '(default: OUTPUT_DIR/freesurfer)', ) # Surface generation xor - g_surfs = parser.add_argument_group("Surface preprocessing options") + g_surfs = parser.add_argument_group('Surface preprocessing options') g_surfs.add_argument( - "--no-submm-recon", - action="store_false", - dest="hires", - help="disable sub-millimeter (hires) reconstruction", + '--no-submm-recon', + action='store_false', + dest='hires', + help='disable sub-millimeter (hires) reconstruction', ) g_surfs_xor = g_surfs.add_mutually_exclusive_group() g_surfs_xor.add_argument( - "--cifti-output", - nargs="?", - const="91k", + '--cifti-output', + nargs='?', + const='91k', default=False, - choices=("91k", "170k"), + choices=('91k', '170k'), type=str, - help="output preprocessed BOLD as a CIFTI dense timeseries. " - "Optionally, the number of grayordinate can be specified " - "(default is 91k, which equates to 2mm resolution)", + help='output preprocessed BOLD as a CIFTI dense timeseries. ' + 'Optionally, the number of grayordinate can be specified ' + '(default is 91k, which equates to 2mm resolution)', ) g_surfs_xor.add_argument( - "--fs-no-reconall", - action="store_false", - dest="run_reconall", - help="disable FreeSurfer surface preprocessing.", + '--fs-no-reconall', + action='store_false', + dest='run_reconall', + help='disable FreeSurfer surface preprocessing.', ) - g_other = parser.add_argument_group("Other options") + g_other = parser.add_argument_group('Other options') g_other.add_argument( - "--output-layout", - action="store", - default="bids", - choices=("bids", "legacy"), - help="Organization of outputs. bids (default) places NiBabies derivatives " - "directly in the output directory, and defaults to placing FreeSurfer " - "derivatives in /sourcedata/freesurfer. legacy creates derivative " - "datasets as subdirectories of outputs.", + '--output-layout', + action='store', + default='bids', + choices=('bids', 'legacy'), + help='Organization of outputs. bids (default) places NiBabies derivatives ' + 'directly in the output directory, and defaults to placing FreeSurfer ' + 'derivatives in /sourcedata/freesurfer. legacy creates derivative ' + 'datasets as subdirectories of outputs.', ) g_other.add_argument( - "-w", - "--work-dir", - action="store", + '-w', + '--work-dir', + action='store', type=Path, - default=Path("work").absolute(), - help="path where intermediate results should be stored", + default=Path('work').absolute(), + help='path where intermediate results should be stored', ) g_other.add_argument( - "--clean-workdir", - action="store_true", + '--clean-workdir', + action='store_true', default=False, - help="Clears working directory of contents. Use of this flag is not" - "recommended when running concurrent processes of NiBabies.", + help='Clears working directory of contents. Use of this flag is not' + 'recommended when running concurrent processes of NiBabies.', ) g_other.add_argument( - "--resource-monitor", - action="store_true", + '--resource-monitor', + action='store_true', default=False, help="enable Nipype's resource monitoring to keep track of memory and CPU usage", ) g_other.add_argument( - "--reports-only", - action="store_true", - default=False, - help="only generate reports, don't run workflows. This will only rerun report " - "aggregation, not reportlet generation for specific nodes.", + '--config-file', + action='store', + metavar='FILE', + help='Use pre-generated configuration file. Values in file will be overridden ' + 'by command-line arguments.', ) g_other.add_argument( - "--config-file", - action="store", - metavar="FILE", - help="Use pre-generated configuration file. Values in file will be overridden " - "by command-line arguments.", - ) - g_other.add_argument( - "--write-graph", - action="store_true", + '--write-graph', + action='store_true', default=False, - help="Write workflow graph.", + help='Write workflow graph.', ) g_other.add_argument( - "--stop-on-first-crash", - action="store_true", + '--stop-on-first-crash', + action='store_true', default=False, - help="Force stopping on first crash, even if a work directory" " was specified.", + help='Force stopping on first crash, even if a work directory' ' was specified.', ) g_other.add_argument( - "--notrack", - action="store_true", + '--notrack', + action='store_true', default=False, - help="Opt-out of sending tracking information of this run to " - "the NiBabies developers. This information helps to " - "improve NiBabies and provides an indicator of real " - "world usage crucial for obtaining funding.", + help='Opt-out of sending tracking information of this run to ' + 'the NiBabies developers. This information helps to ' + 'improve NiBabies and provides an indicator of real ' + 'world usage crucial for obtaining funding.', ) g_other.add_argument( - "--debug", - action="store", - nargs="+", - choices=config.DEBUG_MODES + ("all",), + '--debug', + action='store', + nargs='+', + choices=config.DEBUG_MODES + ('all',), help="Debug mode(s) to enable. 'all' is alias for all available modes.", ) g_other.add_argument( - "--sloppy", - action="store_true", + '--sloppy', + action='store_true', default=False, - help="Use low-quality tools for speed - TESTING ONLY", + help='Use low-quality tools for speed - TESTING ONLY', ) latest = check_latest() @@ -632,61 +643,69 @@ def _slice_time_ref(value, parser): _blist = is_flagged() if _blist[0]: - _reason = _blist[1] or "unknown" + _reason = _blist[1] or 'unknown' print( - """\ -WARNING: Version %s of NiBabies (current) has been FLAGGED -(reason: %s). + f"""\ +WARNING: Version {config.environment.version} of NiBabies (current) has been FLAGGED +(reason: {_reason}). That means some severe flaw was found in it and we strongly -discourage its usage.""" - % (config.environment.version, _reason), +discourage its usage.""", file=sys.stderr, ) # Add new options - g_baby = parser.add_argument_group("NiBabies specific options") + g_baby = parser.add_argument_group('NiBabies specific options') g_baby.add_argument( - "--age-months", - dest="age_months", + '--age-months', + dest='age_months', type=int, - help="Age in months", + help='Age in months', ) g_baby.add_argument( - "--segmentation-atlases-dir", + '--segmentation-atlases-dir', type=DirNotEmpty, - help="Directory containing precalculated segmentations to use for JointLabelFusion.", + help='Directory containing precalculated segmentations to use for JointLabelFusion.', ) g_baby.add_argument( - "--fd-radius", + '--fd-radius', type=float, default=45, - help="Head radius in mm for framewise displacement calculation.", + help='Head radius in mm for framewise displacement calculation.', ) g_baby.add_argument( - "-d", - "--derivatives", + '-d', + '--derivatives', type=DirNotEmpty, - nargs="+", - help="One or more directory containing pre-computed derivatives.", + nargs='+', + help='One or more directory containing pre-computed derivatives.', ) g_baby.add_argument( - "--deriv-filter-file", - dest="derivatives_filters", + '--deriv-filter-file', + dest='derivatives_filters', type=_bids_filter, - metavar="FILE", - help="A JSON file for customizing the derivatives queries.", + metavar='FILE', + help='A JSON file for customizing the derivatives queries.', ) g_baby.add_argument( - "--force-reconall", + '--force-reconall', default=False, - action="store_true", - help="Force traditional FreeSurfer surface reconstruction.", + action='store_true', + help='Force traditional FreeSurfer surface reconstruction.', ) g_baby.add_argument( - "--surface-recon-method", - choices=("infantfs", "freesurfer", "mcribs"), - default="infantfs", - help="Method to use for surface reconstruction", + '--surface-recon-method', + choices=('infantfs', 'freesurfer', 'mcribs', 'auto'), + default='auto', + help='Method to use for surface reconstruction', + ) + g_baby.add_argument( + '--reference-anat', + '--reference-anatomical', + choices=('T1w', 'T2w'), + default=None, + help='Override which anatomical to use as the structural reference. ' + 'Generally, this is determined based on availability, age, and surface ' + 'reconstruction method.', ) return parser @@ -701,15 +720,15 @@ def parse_args(args=None, namespace=None): # Deprecations if opts.force_reconall: config.loggers.cli.warning( - "--force-reconall is deprecated and will be removed in a future release." - "To run traditional `recon-all`, use `--surface-recon-method freesurfer` instead." + '--force-reconall is deprecated and will be removed in a future release.' + 'To run traditional `recon-all`, use `--surface-recon-method freesurfer` instead.' ) - opts.surface_recon_method = "freesurfer" + opts.surface_recon_method = 'freesurfer' if opts.config_file: - skip = {} if opts.reports_only else {"execution": ("run_uuid",)} + skip = {} if opts.reports_only else {'execution': ('run_uuid',)} config.load(opts.config_file, skip=skip) - config.loggers.cli.info(f"Loaded previous configuration file {opts.config_file}") + config.loggers.cli.info(f'Loaded previous configuration file {opts.config_file}') config.execution.log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) config.from_dict(vars(opts)) @@ -722,13 +741,13 @@ def parse_args(args=None, namespace=None): import yaml with open(opts.use_plugin) as f: - plugin_settings = yaml.load(f, Loader=yaml.FullLoader) - _plugin = plugin_settings.get("plugin") + plugin_settings = yaml.safe_load(f) + _plugin = plugin_settings.get('plugin') if _plugin: config.nipype.plugin = _plugin - config.nipype.plugin_args = plugin_settings.get("plugin_args", {}) + config.nipype.plugin_args = plugin_settings.get('plugin_args', {}) config.nipype.nprocs = opts.nprocs or config.nipype.plugin_args.get( - "n_procs", config.nipype.nprocs + 'n_procs', config.nipype.nprocs ) # Resource management options @@ -736,17 +755,17 @@ def parse_args(args=None, namespace=None): # This may need to be revisited if people try to use batch plugins if 1 < config.nipype.nprocs < config.nipype.omp_nthreads: build_log.warning( - f"Per-process threads (--omp-nthreads={config.nipype.omp_nthreads}) exceed " - f"total threads (--nthreads/--n_cpus={config.nipype.nprocs})" + f'Per-process threads (--omp-nthreads={config.nipype.omp_nthreads}) exceed ' + f'total threads (--nthreads/--n_cpus={config.nipype.nprocs})' ) # Inform the user about the risk of using brain-extracted images - if config.workflow.skull_strip_t1w == "auto": + if config.workflow.skull_strip_anat == 'auto': build_log.warning( """\ Option ``--skull-strip-t1w`` was set to 'auto'. A heuristic will be \ -applied to determine whether the input T1w image(s) have already been skull-stripped. -If that were the case, brain extraction and INU correction will be skipped for those T1w \ +applied to determine whether the input anatomical image(s) have already been skull-stripped. +If that were the case, brain extraction and INU correction will be skipped for those anatomical \ inputs. Please, BEWARE OF THE RISKS TO THE CONSISTENCY of results when using varying \ processing workflows across participants. To determine whether a participant has been run \ through the shortcut pipeline (meaning, brain extraction was skipped), please check the \ @@ -762,20 +781,20 @@ def parse_args(args=None, namespace=None): output_layout = config.execution.output_layout if config.execution.fs_subjects_dir is None: - if output_layout == "bids": - config.execution.fs_subjects_dir = output_dir / "sourcedata" / "freesurfer" - elif output_layout == "legacy": - config.execution.fs_subjects_dir = output_dir / "freesurfer" + if output_layout == 'bids': + config.execution.fs_subjects_dir = output_dir / 'sourcedata' / 'freesurfer' + elif output_layout == 'legacy': + config.execution.fs_subjects_dir = output_dir / 'freesurfer' if config.execution.nibabies_dir is None: - if output_layout == "bids": + if output_layout == 'bids': config.execution.nibabies_dir = output_dir - elif output_layout == "legacy": - config.execution.nibabies_dir = output_dir / "nibabies" - if config.workflow.surface_recon_method == "mcribs": - if output_layout == "bids": - config.execution.mcribs_dir = output_dir / "sourcedata" / "mcribs" - elif output_layout == "legacy": - config.execution.mcribs_dir = output_dir / "mcribs" + elif output_layout == 'legacy': + config.execution.nibabies_dir = output_dir / 'nibabies' + if config.workflow.surface_recon_method == 'mcribs': + if output_layout == 'bids': + config.execution.mcribs_dir = output_dir / 'sourcedata' / 'mcribs' + elif output_layout == 'legacy': + config.execution.mcribs_dir = output_dir / 'mcribs' # Ensure the directory is created config.execution.mcribs_dir.mkdir(exist_ok=True, parents=True) @@ -789,24 +808,24 @@ def parse_args(args=None, namespace=None): if opts.clean_workdir and work_dir.exists(): from niworkflows.utils.misc import clean_directory - build_log.info(f"Clearing previous NiBabies working directory: {work_dir}") + build_log.info(f'Clearing previous NiBabies working directory: {work_dir}') if not clean_directory(work_dir): - build_log.warning(f"Could not clear all contents of working directory: {work_dir}") + build_log.warning(f'Could not clear all contents of working directory: {work_dir}') # Ensure input and output folders are not the same if output_dir == bids_dir: parser.error( - "The selected output folder is the same as the input BIDS folder. " - "Please modify the output path (suggestion: %s)." + 'The selected output folder is the same as the input BIDS folder. ' + 'Please modify the output path (suggestion: %s).' % bids_dir - / "derivatives" - / ("nibabies-%s" % version.split("+")[0]) + / 'derivatives' + / ('nibabies-%s' % version.split('+')[0]) ) if bids_dir in work_dir.parents: parser.error( - "The selected working directory is a subdirectory of the input BIDS folder. " - "Please modify the output path." + 'The selected working directory is a subdirectory of the input BIDS folder. ' + 'Please modify the output path.' ) # Validate inputs @@ -814,20 +833,20 @@ def parse_args(args=None, namespace=None): from ..utils.bids import validate_input_dir build_log.info( - "Making sure the input data is BIDS compliant (warnings can be ignored in most " - "cases)." + 'Making sure the input data is BIDS compliant (warnings can be ignored in most ' + 'cases).' ) validate_input_dir(config.environment.exec_env, opts.bids_dir, opts.participant_label) # Setup directories - config.execution.log_dir = config.execution.nibabies_dir / "logs" + config.execution.log_dir = config.execution.nibabies_dir / 'logs' # Check and create output and working directories config.execution.log_dir.mkdir(exist_ok=True, parents=True) work_dir.mkdir(exist_ok=True, parents=True) # Force initialization of the BIDSLayout config.execution.init() - all_subjects = config.execution.layout.get_subjects(scope="raw") + all_subjects = config.execution.layout.get_subjects(scope='raw') if config.execution.participant_label is None: config.execution.participant_label = all_subjects @@ -849,14 +868,14 @@ def parse_args(args=None, namespace=None): config.workflow.skull_strip_template = config.workflow.skull_strip_template[0] # finally, write config to file - config_file = config.execution.work_dir / config.execution.run_uuid / "config.toml" + config_file = config.execution.work_dir / config.execution.run_uuid / 'config.toml' config_file.parent.mkdir(exist_ok=True, parents=True) config.to_filename(config_file) def compute_subworkflows( *, - layout: 'BIDSLayout', + layout: BIDSLayout, participant_ids: list, session_ids: list | None = None, ) -> list: From 5cb53e00191764be50f08be0b967fdfde517d8e1 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 01:39:42 -0400 Subject: [PATCH 032/142] ENH: Reflect new options within configuration --- nibabies/config.py | 232 +++++++++++++++++++++++---------------------- 1 file changed, 118 insertions(+), 114 deletions(-) diff --git a/nibabies/config.py b/nibabies/config.py index 25251c72..5922db8a 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -80,30 +80,30 @@ # <= 3.7 from importlib_metadata import version as get_version -__version__ = get_version("nibabies") +__version__ = get_version('nibabies') _pre_exec_env = dict(os.environ) # Disable NiPype etelemetry always -_disable_et = bool(os.getenv("NO_ET") is not None or os.getenv("NIPYPE_NO_ET") is not None) -os.environ["NIPYPE_NO_ET"] = "1" -os.environ["NO_ET"] = "1" +_disable_et = bool(os.getenv('NO_ET') is not None or os.getenv('NIPYPE_NO_ET') is not None) +os.environ['NIPYPE_NO_ET'] = '1' +os.environ['NO_ET'] = '1' -_yes_flags = ("1", "on", "true", "y", "yes") +_yes_flags = ('1', 'on', 'true', 'y', 'yes') # Only show warnings if requested -if os.getenv("NIBABIES_SHOW_WARNINGS", "0").lower() in _yes_flags: +if os.getenv('NIBABIES_SHOW_WARNINGS', '0').lower() in _yes_flags: import logging else: from ._warnings import logging - if not hasattr(sys, "_is_pytest_session"): + if not hasattr(sys, '_is_pytest_session'): sys._is_pytest_session = False # Trick to avoid sklearn's FutureWarnings - if "+" not in __version__ or not __version__.endswith(".dirty"): + if '+' not in __version__ or not __version__.endswith('.dirty'): # Disable all warnings in main and children processes only on production versions - os.environ["PYTHONWARNINGS"] = "ignore" + os.environ['PYTHONWARNINGS'] = 'ignore' -logging.addLevelName(25, "IMPORTANT") # Add a new level between INFO and WARNING -logging.addLevelName(15, "VERBOSE") # Add a new level between INFO and DEBUG +logging.addLevelName(25, 'IMPORTANT') # Add a new level between INFO and WARNING +logging.addLevelName(15, 'VERBOSE') # Add a new level between INFO and DEBUG DEFAULT_MEMORY_MIN_GB = 0.01 @@ -117,28 +117,28 @@ from requests import get as _get_url with suppress((ConnectionError, ReadTimeout)): - _get_url("https://rig.mit.edu/et/projects/nipy/nipype", timeout=0.05) + _get_url('https://rig.mit.edu/et/projects/nipy/nipype', timeout=0.05) # Execution environment _exec_env = os.name _docker_ver = None # special variable set in the container -if os.getenv("IS_DOCKER_8395080871"): - _exec_env = "singularity" - _cgroup = Path("/proc/1/cgroup") - if _cgroup.exists() and "docker" in _cgroup.read_text(): - _docker_ver = os.getenv("DOCKER_VERSION_8395080871") - _exec_env = "docker" +if os.getenv('IS_DOCKER_8395080871'): + _exec_env = 'singularity' + _cgroup = Path('/proc/1/cgroup') + if _cgroup.exists() and 'docker' in _cgroup.read_text(): + _docker_ver = os.getenv('DOCKER_VERSION_8395080871') + _exec_env = 'docker' del _cgroup -_fs_license = os.getenv("FS_LICENSE") -if not _fs_license and os.getenv("FREESURFER_HOME"): - _fs_home = os.getenv("FREESURFER_HOME") - if _fs_home and (Path(_fs_home) / "license.txt").is_file(): - _fs_license = str(Path(_fs_home) / "license.txt") +_fs_license = os.getenv('FS_LICENSE') +if not _fs_license and os.getenv('FREESURFER_HOME'): + _fs_home = os.getenv('FREESURFER_HOME') + if _fs_home and (Path(_fs_home) / 'license.txt').is_file(): + _fs_license = str(Path(_fs_home) / 'license.txt') del _fs_home -_templateflow_home = Path(os.getenv("TEMPLATEFLOW_HOME", Path.home() / ".cache" / "templateflow")) +_templateflow_home = Path(os.getenv('TEMPLATEFLOW_HOME', Path.home() / '.cache' / 'templateflow')) try: from psutil import virtual_memory @@ -147,34 +147,34 @@ except Exception: _free_mem_at_start = None -_oc_limit = "n/a" -_oc_policy = "n/a" +_oc_limit = 'n/a' +_oc_policy = 'n/a' try: # Memory policy may have a large effect on types of errors experienced - _proc_oc_path = Path("/proc/sys/vm/overcommit_memory") + _proc_oc_path = Path('/proc/sys/vm/overcommit_memory') if _proc_oc_path.exists(): - _oc_policy = {"0": "heuristic", "1": "always", "2": "never"}.get( - _proc_oc_path.read_text().strip(), "unknown" + _oc_policy = {'0': 'heuristic', '1': 'always', '2': 'never'}.get( + _proc_oc_path.read_text().strip(), 'unknown' ) - if _oc_policy != "never": - _proc_oc_kbytes = Path("/proc/sys/vm/overcommit_kbytes") + if _oc_policy != 'never': + _proc_oc_kbytes = Path('/proc/sys/vm/overcommit_kbytes') if _proc_oc_kbytes.exists(): _oc_limit = _proc_oc_kbytes.read_text().strip() - if _oc_limit in ("0", "n/a") and Path("/proc/sys/vm/overcommit_ratio").exists(): - _oc_limit = "{}%".format(Path("/proc/sys/vm/overcommit_ratio").read_text().strip()) + if _oc_limit in ('0', 'n/a') and Path('/proc/sys/vm/overcommit_ratio').exists(): + _oc_limit = '{}%'.format(Path('/proc/sys/vm/overcommit_ratio').read_text().strip()) except Exception: pass _memory_gb = None try: - if "linux" in sys.platform: - with open("/proc/meminfo", "r") as f_in: + if 'linux' in sys.platform: + with open('/proc/meminfo') as f_in: _meminfo_lines = f_in.readlines() - _mem_total_line = [line for line in _meminfo_lines if "MemTotal" in line][0] + _mem_total_line = [line for line in _meminfo_lines if 'MemTotal' in line][0] _mem_total = float(_mem_total_line.split()[1]) _memory_gb = _mem_total / (1024.0**2) - elif "darwin" in sys.platform: - _mem_str = os.popen("sysctl hw.memsize").read().strip().split(" ")[-1] + elif 'darwin' in sys.platform: + _mem_str = os.popen('sysctl hw.memsize').read().strip().split(' ')[-1] _memory_gb = float(_mem_str) / (1024.0**3) except Exception: pass @@ -186,15 +186,15 @@ _available_cpus = len(psutil.Process().cpu_affinity()) except (AttributeError, ImportError, NotImplementedError): - if hasattr(os, "sched_getaffinity"): + if hasattr(os, 'sched_getaffinity'): _available_cpus = len(os.sched_getaffinity(0)) # Reduce numpy's vms by limiting OMP_NUM_THREADS -_default_omp_threads = int(os.getenv("OMP_NUM_THREADS", _available_cpus)) +_default_omp_threads = int(os.getenv('OMP_NUM_THREADS', _available_cpus)) # Debug modes are names that influence the exposure of internal details to # the user, either through additional derivatives or increased verbosity -DEBUG_MODES = ("compcor", "registration", "fieldmaps") +DEBUG_MODES = ('compcor', 'registration', 'fieldmaps') class _Config: @@ -204,7 +204,7 @@ class _Config: def __init__(self): """Avert instantiation.""" - raise RuntimeError("Configuration type is not instantiable.") + raise RuntimeError('Configuration type is not instantiable.') @classmethod def load(cls, settings, init=True, ignore=None): @@ -221,7 +221,7 @@ def load(cls, settings, init=True, ignore=None): elif hasattr(cls, k): setattr(cls, k, v) - if init and hasattr(cls, "init"): + if init and hasattr(cls, 'init'): cls.init() @classmethod @@ -231,7 +231,7 @@ def get(cls): out = {} for k, v in cls.__dict__.items(): - if k.startswith("_") or v is None: + if k.startswith('_') or v is None: continue if callable(getattr(cls, k)): continue @@ -241,7 +241,7 @@ def get(cls): else: v = str(v) if isinstance(v, SpatialReferences): - v = " ".join([str(s) for s in v.references]) or None + v = ' '.join([str(s) for s in v.references]) or None if isinstance(v, Reference): v = str(v) or None out[k] = v @@ -274,9 +274,9 @@ class environment(_Config): """Linux's kernel virtual memory overcommit policy.""" overcommit_limit = _oc_limit """Linux's kernel virtual memory overcommit limits.""" - nipype_version = get_version("nipype") + nipype_version = get_version('nipype') """Nipype's current version.""" - templateflow_version = get_version("templateflow") + templateflow_version = get_version('templateflow') """The TemplateFlow client version installed.""" version = __version__ """*NiBabies*'s version.""" @@ -287,7 +287,7 @@ class environment(_Config): class nipype(_Config): """Nipype settings.""" - crashfile_format = "txt" + crashfile_format = 'txt' """The file format for crashfiles, either text or pickle.""" get_linked_libs = False """Run NiPype's tool to enlist linked libraries for every interface.""" @@ -297,11 +297,11 @@ class nipype(_Config): """Number of processes (compute tasks) that can be run in parallel (multiprocessing only).""" omp_nthreads = _default_omp_threads """Number of CPUs a single process can access for multithreaded execution.""" - plugin = "MultiProc" + plugin = 'MultiProc' """NiPype's execution plugin.""" plugin_args = { - "maxtasksperchild": 1, - "raise_insufficient": False, + 'maxtasksperchild': 1, + 'raise_insufficient': False, } """Settings for NiPype's execution plugin.""" resource_monitor = False @@ -313,13 +313,13 @@ class nipype(_Config): def get_plugin(cls): """Format a dictionary for Nipype consumption.""" out = { - "plugin": cls.plugin, - "plugin_args": cls.plugin_args, + 'plugin': cls.plugin, + 'plugin_args': cls.plugin_args, } - if cls.plugin in ("MultiProc", "LegacyMultiProc"): - out["plugin_args"]["n_procs"] = int(cls.nprocs) + if cls.plugin in ('MultiProc', 'LegacyMultiProc'): + out['plugin_args']['n_procs'] = int(cls.nprocs) if cls.memory_gb: - out["plugin_args"]["memory_gb"] = float(cls.memory_gb) + out['plugin_args']['memory_gb'] = float(cls.memory_gb) return out @classmethod @@ -331,10 +331,10 @@ def init(cls): if cls.resource_monitor: ncfg.update_config( { - "monitoring": { - "enabled": cls.resource_monitor, - "sample_frequency": "0.5", - "summary_append": True, + 'monitoring': { + 'enabled': cls.resource_monitor, + 'sample_frequency': '0.5', + 'summary_append': True, } } ) @@ -343,12 +343,12 @@ def init(cls): # Nipype config (logs and execution) ncfg.update_config( { - "execution": { - "crashdump_dir": str(execution.log_dir), - "crashfile_format": cls.crashfile_format, - "get_linked_libs": cls.get_linked_libs, - "stop_on_first_crash": cls.stop_on_first_crash, - "check_version": False, # disable future telemetry + 'execution': { + 'crashdump_dir': str(execution.log_dir), + 'crashfile_format': cls.crashfile_format, + 'get_linked_libs': cls.get_linked_libs, + 'stop_on_first_crash': cls.stop_on_first_crash, + 'check_version': False, # disable future telemetry } } ) @@ -411,6 +411,8 @@ class execution(_Config): output_spaces = None """List of (non)standard spaces designated (with the ``--output-spaces`` flag of the command line) as spatial references for outputs.""" + reference_anat = None + """Force usage of this anatomical scan as the structural reference.""" reports_only = False """Only build the reports, based on the reportlets found in a cached working directory.""" run_uuid = f"{strftime('%Y%m%d-%H%M%S')}_{uuid4()}" @@ -427,7 +429,7 @@ class execution(_Config): """The root folder of the TemplateFlow client.""" unique_labels = None """Combinations of subject + session identifiers to be preprocessed.""" - work_dir = Path("work").absolute() + work_dir = Path('work').absolute() """Path to a working directory where intermediate results will be available.""" write_graph = False """Write out the computational graph corresponding to the planned preprocessing.""" @@ -435,20 +437,20 @@ class execution(_Config): _layout = None _paths = ( - "anat_derivatives", - "bids_dir", - "bids_database_dir", - "derivatives", - "fs_license_file", - "fs_subjects_dir", - "layout", - "log_dir", - "mcribs_dir", - "nibabies_dir", - "output_dir", - "segmentation_atlases_dir", - "templateflow_home", - "work_dir", + 'anat_derivatives', + 'bids_dir', + 'bids_database_dir', + 'derivatives', + 'fs_license_file', + 'fs_subjects_dir', + 'layout', + 'log_dir', + 'mcribs_dir', + 'nibabies_dir', + 'output_dir', + 'segmentation_atlases_dir', + 'templateflow_home', + 'work_dir', ) @classmethod @@ -461,27 +463,27 @@ def init(cls): ] if cls.fs_license_file and Path(cls.fs_license_file).is_file(): - os.environ["FS_LICENSE"] = str(cls.fs_license_file) + os.environ['FS_LICENSE'] = str(cls.fs_license_file) if cls._layout is None: import re from bids.layout import BIDSLayout, BIDSLayoutIndexer - _db_path = cls.bids_database_dir or (cls.work_dir / cls.run_uuid / "bids_db") + _db_path = cls.bids_database_dir or (cls.work_dir / cls.run_uuid / 'bids_db') _db_path.mkdir(exist_ok=True, parents=True) # Recommended after PyBIDS 12.1 _indexer = BIDSLayoutIndexer( validate=False, ignore=( - "code", - "stimuli", - "sourcedata", - "models", - re.compile(r"^\."), + 'code', + 'stimuli', + 'sourcedata', + 'models', + re.compile(r'^\.'), re.compile( - r"sub-[a-zA-Z0-9]+(/ses-[a-zA-Z0-9]+)?/(beh|dwi|eeg|ieeg|meg|perf)" + r'sub-[a-zA-Z0-9]+(/ses-[a-zA-Z0-9]+)?/(beh|dwi|eeg|ieeg|meg|perf)' ), ), ) @@ -500,7 +502,7 @@ def _unserialize_bids_queries(queries): for acq, filters in queries.items(): queries[acq] = { - k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and "Query" in v else v + k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and 'Query' in v else v for k, v in filters.items() } return queries @@ -510,7 +512,7 @@ def _unserialize_bids_queries(queries): if cls.derivatives_filters: cls.derivatives_filters = _unserialize_bids_queries(cls.derivatives_filters) - if "all" in cls.debug: + if 'all' in cls.debug: cls.debug = list(DEBUG_MODES) @@ -528,7 +530,7 @@ class workflow(_Config): age_months = None """Age (in months)""" - analysis_level = "participant" + analysis_level = 'participant' """Level of analysis.""" anat_only = False """Execute the anatomical preprocessing only.""" @@ -539,7 +541,7 @@ class workflow(_Config): (positive = exact, negative = maximum).""" bold2t1w_dof = None """Degrees of freedom of the BOLD-to-T1w registration steps.""" - bold2t1w_init = "register" + bold2t1w_init = 'register' """Whether to use standard coregistration ('register') or to initialize coregistration from the BOLD image-header ('header').""" cifti_output = None @@ -558,6 +560,8 @@ class workflow(_Config): """Run FreeSurfer ``recon-all`` with the ``-hires`` flag.""" ignore = None """Ignore particular steps for *nibabies*.""" + level = None + """Level of preprocessing to complete. One of ['minimal', 'resampling', 'full'].""" longitudinal = False """Run FreeSurfer ``recon-all`` with the ``-logitudinal`` flag.""" medial_surface_nan = None @@ -574,10 +578,10 @@ class workflow(_Config): """Run FreeSurfer's surface reconstruction.""" skull_strip_fixed_seed = False """Fix a seed for skull-stripping.""" - skull_strip_template = "UNCInfant:cohort-1" + skull_strip_template = 'UNCInfant:cohort-1' """Change default brain extraction template.""" - skull_strip_t1w = "force" - """Skip brain extraction of the T1w image (default is ``force``, meaning that + skull_strip_anat = 'force' + """Skip brain extraction of the anatomical images (default is ``force``, meaning that *nibabies* will run brain extraction of the T1w).""" slice_time_ref = 0.5 """The time of the reference slice to correct BOLD values to, as a fraction @@ -587,7 +591,7 @@ class workflow(_Config): spaces = None """Keeps the :py:class:`~niworkflows.utils.spaces.SpatialReferences` instance keeping standard and nonstandard spaces.""" - surface_recon_method = "infantfs" + surface_recon_method = 'infantfs' """Method to use for surface reconstruction.""" use_aroma = None """Run ICA-:abbr:`AROMA (automatic removal of motion artifacts)`.""" @@ -601,18 +605,18 @@ class workflow(_Config): class loggers: """Keep loggers easily accessible (see :py:func:`init`).""" - _fmt = "%(asctime)s,%(msecs)d %(name)-2s " "%(levelname)-2s:\n\t %(message)s" - _datefmt = "%y%m%d-%H:%M:%S" + _fmt = '%(asctime)s,%(msecs)d %(name)-2s ' '%(levelname)-2s:\n\t %(message)s' + _datefmt = '%y%m%d-%H:%M:%S' default = logging.getLogger() """The root logger.""" - cli = logging.getLogger("cli") + cli = logging.getLogger('cli') """Command-line interface logging.""" - workflow = logging.getLogger("nipype.workflow") + workflow = logging.getLogger('nipype.workflow') """NiPype's workflow logger.""" - interface = logging.getLogger("nipype.interface") + interface = logging.getLogger('nipype.interface') """NiPype's interface logger.""" - utils = logging.getLogger("nipype.utils") + utils = logging.getLogger('nipype.utils') """NiPype's utils logger.""" @classmethod @@ -636,7 +640,7 @@ def init(cls): cls.workflow.setLevel(execution.log_level) cls.utils.setLevel(execution.log_level) ncfg.update_config( - {"logging": {"log_directory": str(execution.log_dir), "log_to_file": True}} + {'logging': {'log_directory': str(execution.log_dir), 'log_to_file': True}} ) @@ -666,7 +670,7 @@ def init(cls): def _set_ants_seed(): """Fix random seed for antsRegistration, antsAI, antsMotionCorr""" val = random.randint(1, 65536) - os.environ["ANTS_RANDOM_SEED"] = str(val) + os.environ['ANTS_RANDOM_SEED'] = str(val) return val @@ -696,7 +700,7 @@ def load(filename, skip=None): filename = Path(filename) settings = loads(filename.read_text()) for sectionname, configs in settings.items(): - if sectionname != "environment": + if sectionname != 'environment': section = getattr(sys.modules[__name__], sectionname) ignore = skip.get(sectionname) section.load(configs, ignore=ignore) @@ -705,17 +709,17 @@ def load(filename, skip=None): def get(flat=False): """Get config as a dict.""" settings = { - "environment": environment.get(), - "execution": execution.get(), - "workflow": workflow.get(), - "nipype": nipype.get(), - "seeds": seeds.get(), + 'environment': environment.get(), + 'execution': execution.get(), + 'workflow': workflow.get(), + 'nipype': nipype.get(), + 'seeds': seeds.get(), } if not flat: return settings return { - ".".join((section, k)): v + '.'.join((section, k)): v for section, configs in settings.items() for k, v in configs.items() } @@ -737,8 +741,8 @@ def to_filename(filename): def _process_initializer(cwd, omp_nthreads): """Initialize the environment of the child process.""" os.chdir(cwd) - os.environ["NIPYPE_NO_ET"] = "1" - os.environ["OMP_NUM_THREADS"] = f"{omp_nthreads}" + os.environ['NIPYPE_NO_ET'] = '1' + os.environ['OMP_NUM_THREADS'] = f'{omp_nthreads}' def restore_env(): From d67ad10553dba01adee8b6321e9cf0a2f856c758 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 01:40:16 -0400 Subject: [PATCH 033/142] FIX: Add session label when fetching derivatives --- nibabies/utils/derivatives.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/nibabies/utils/derivatives.py b/nibabies/utils/derivatives.py index fa3514d5..4d5723ed 100644 --- a/nibabies/utils/derivatives.py +++ b/nibabies/utils/derivatives.py @@ -10,6 +10,7 @@ def collect_anatomical_derivatives( derivatives_dir: Path | str, subject_id: str, std_spaces: list, + session_id: str | None, spec: dict | None = None, patterns: list | None = None, ): @@ -38,8 +39,14 @@ def collect_anatomical_derivatives( layout = BIDSLayout(derivatives_dir, config=deriv_config, validate=False) derivs_cache = {} + base_qry = { + 'subject': subject_id, + } + if session_id is not None: + base_qry['session'] = session_id + for key, qry in spec['baseline'].items(): - qry['subject'] = subject_id + qry.update(base_qry) item = layout.get(return_type='filename', **qry) if not item: continue @@ -47,7 +54,7 @@ def collect_anatomical_derivatives( derivs_cache[key] = item[0] if len(item) == 1 else item for key, qry in spec['coreg'].items(): # T1w->T2w, T2w->T1w - qry['subject'] = subject_id + qry.update(base_qry) item = layout.get(return_type='filename', **qry) if not item: continue @@ -58,7 +65,7 @@ def collect_anatomical_derivatives( space = _space.replace(':cohort-', '+') for key, qry in spec['transforms'].items(): qry = qry.copy() - qry['subject'] = subject_id + qry.update(base_qry) qry['from'] = qry['from'] or space qry['to'] = qry['to'] or space item = layout.get(return_type='filename', **qry) @@ -67,7 +74,7 @@ def collect_anatomical_derivatives( transforms.setdefault(_space, {})[key] = item[0] if len(item) == 1 else item for key, qry in spec['surfaces'].items(): - qry['subject'] = subject_id + qry.update(base_qry) item = layout.get(return_type='filename', **qry) if not item or len(item) != 2: continue From 224f6a172d23ee4481b897a90224d26620c68add Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 02:19:33 -0400 Subject: [PATCH 034/142] FIX: Proper typing, parameters --- nibabies/workflows/anatomical/fit.py | 31 +++++++++++++++++++--------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index c36e0add..95741096 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -79,7 +79,7 @@ def init_infant_anat_fit_wf( cifti_output: ty.Literal['91k', '170k', False], msm_sulc: bool = False, name: str = 'infant_anat_fit_wf', -): +) -> Workflow: """ Stage the anatomical preprocessing steps: - T1w reference @@ -115,7 +115,6 @@ def init_infant_anat_fit_wf( spaces=spaces, cifti_output=cifti_output, ) - return workflow anat = reference_anat.lower() @@ -351,7 +350,9 @@ def init_infant_anat_fit_wf( if reference_anat == 'T1w': workflow.connect([ - (t1w_template_wf, sourcefile_buffer, [('outputnode.anat_valid_list', 'anat_source_files')]), + (t1w_template_wf, sourcefile_buffer, [ + ('outputnode.anat_valid_list', 'anat_source_files'), + ]), ]) # fmt:skip workflow.connect([ @@ -501,7 +502,8 @@ def init_infant_anat_fit_wf( (t2w_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_mask')]), - (t1w_buffer, apply_t1w_mask, [('t1w_preproc', 'in_file')]), # TODO: Unsure about this connection + (t1w_buffer, apply_t1w_mask, [('t1w_preproc', 'in_file')]), + # TODO: Unsure about this connection^ ]) # fmt:skip # Save T1w mask @@ -606,7 +608,8 @@ def init_infant_anat_fit_wf( (t1w_buffer, transform_t1w_mask, [('t1w_mask', 'input_image')]), (coreg_buffer, transform_t1w_mask, [('t1w2t2w_xfm', 'transforms')]), (transform_t1w_mask, apply_t2w_mask, [('output_image', 'in_mask')]), - (t2w_buffer, apply_t1w_mask, [('t2w_preproc', 'in_file')]), # TODO: Unsure about this connection + (t2w_buffer, apply_t1w_mask, [('t2w_preproc', 'in_file')]), + # TODO: Unsure about this connection^ ]) # fmt:skip else: LOGGER.info('ANAT Brain mask will be calculated using T2w') @@ -650,7 +653,10 @@ def init_infant_anat_fit_wf( if t1w_preproc and t2w_preproc: if t1w2t2w_xfm: LOGGER.info('ANAT Found T1w-T2w xfm') - desc += ' A T1w-T2w coregistration transform was provided as input and used throughout the workflow.' + desc += ( + ' A T1w-T2w coregistration transform was provided as input and used throughout ' + 'the workflow.' + ) coreg_buffer.inputs.t1w2t2w_xfm = t1w2t2w_xfm if t2w2t1w_xfm: LOGGER.info('ANAT Found T2w-T1w xfm') @@ -788,7 +794,7 @@ def init_infant_anat_fit_wf( LOGGER.info(f'ANAT Stage 5: Found pre-computed registrations for {found_xfms}') # Only refine mask if necessary - if anat_mask or recon_method == None: + if anat_mask or recon_method is None: workflow.connect([ (anat_buffer, refined_buffer, [ ('anat_mask', 'anat_mask'), @@ -1157,7 +1163,7 @@ def init_infant_single_anat_fit_wf( spaces: 'SpatialReferences', cifti_output: ty.Literal['91k', '170k', False], name: str = 'infant_single_anat_fit_wf', -): +) -> Workflow: inputnode = pe.Node( niu.IdentityInterface( fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], @@ -1200,17 +1206,20 @@ def init_infant_single_anat_fit_wf( anat = reference_anat.lower() workflow = Workflow(name=f'infant_single_{anat}_fit_wf') - workflow.add_nodes([inputnode]) + workflow.add_nodes([inputnode, outputnode]) desc = ( '\nAnatomical data preprocessing\n\n: ' f'A total of {len(anatomicals)} {anat} images were found ' 'within the input BIDS dataset.\n' ) + workflow.__desc__ = desc + return workflow -def init_anat_preproc_wf( +def init_infant_anat_full_wf( *, + reference_anat: ty.Literal['T1w', 'T2w'], age_months: int, t1w: list, t2w: list, @@ -1230,6 +1239,7 @@ def init_anat_preproc_wf( skull_strip_fixed_seed: bool = False, name: str = 'infant_anat_wf', ) -> pe.Workflow: + """The full version of the fit workflow.""" workflow = pe.Workflow(name=name) inputnode = pe.Node( @@ -1259,6 +1269,7 @@ def init_anat_preproc_wf( ) msm_sulc = False # Not enabled for now anat_fit_wf = init_infant_anat_fit_wf( + reference_anat=reference_anat, age_months=age_months, bids_root=bids_root, output_dir=output_dir, From cbc4cb30637f0ab4ce4396c4b5290c6f5a424c3d Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 02:23:54 -0400 Subject: [PATCH 035/142] ENH: Add anatomical processing --- nibabies/workflows/base.py | 678 +++++++++++++++++++++++-------------- 1 file changed, 417 insertions(+), 261 deletions(-) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 08628d77..d069f0ef 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -54,7 +54,7 @@ from nibabies.interfaces import DerivativesDataSink from nibabies.interfaces.reports import AboutSummary, SubjectSummary from nibabies.utils.bids import parse_bids_for_age_months -from nibabies.workflows.bold import init_func_preproc_wf +from nibabies.workflows.anatomical.fit import init_infant_anat_fit_wf, init_infant_anat_full_wf if ty.TYPE_CHECKING: from bids.layout import BIDSLayout @@ -127,21 +127,20 @@ def init_nibabies_wf(subworkflows_list): ) output_spaces = init_workflow_spaces(execution_spaces, age) - # skull strip template cohort - single_subject_wf = init_single_subject_wf( - subject_id, - session_id=session_id, - age=age, - spaces=output_spaces, - ) - bids_level = [f'sub-{subject_id}'] - if session_id: + if session_id is not None: bids_level.append(f'ses-{session_id}') log_dir = ( config.execution.nibabies_dir.joinpath(*bids_level) / 'log' / config.execution.run_uuid ) + # skull strip template cohort + single_subject_wf = init_single_subject_wf( + subject_id=subject_id, + age=age, + session_id=session_id, + spaces=output_spaces, + ) single_subject_wf.config['execution']['crashdump_dir'] = str(log_dir) for node in single_subject_wf._get_all_nodes(): @@ -159,9 +158,10 @@ def init_nibabies_wf(subworkflows_list): def init_single_subject_wf( + *, subject_id: str, + age: int, session_id: str | None = None, - age: int | None = None, spaces: SpatialReferences | None = None, ): """ @@ -205,15 +205,41 @@ def init_single_subject_wf( from niworkflows.utils.bids import collect_data from niworkflows.utils.spaces import Reference - from ..utils.bids import Derivatives from ..utils.misc import fix_multi_source_name - from .anatomical import init_infant_anat_wf, init_infant_single_anat_wf - name = ( - f'single_subject_{subject_id}_{session_id}_wf' - if session_id - else f'single_subject_{subject_id}_wf' - ) + subject_session_id = _subject_session_id(subject_id, session_id) + workflow = Workflow(name=f'single_subject_{subject_session_id}_wf') + workflow.__desc__ = f""" +Results included in this manuscript come from preprocessing +performed using *NiBabies* {config.environment.version}, +derived from fMRIPrep (@fmriprep1; @fmriprep2; RRID:SCR_016216). +The underlying workflow engine used is *Nipype* {config.environment.nipype_version} +(@nipype1; @nipype2; RRID:SCR_002502). + +""" + workflow.__postdesc__ = f""" + +Many internal operations of *NiBabies* use +*Nilearn* {NILEARN_VERSION} [@nilearn, RRID:SCR_001362], +mostly within the functional processing workflow. +For more details of the pipeline, see [the section corresponding +to workflows in *nibabies*'s documentation]\ +(https://nibabies.readthedocs.io/en/latest/workflows.html \ +"NiBabies's documentation"). + + +### Copyright Waiver + +The above boilerplate text was automatically generated by NiBabies +with the express intention that users should copy and paste this +text into their manuscripts *unchanged*. +It is released under the [CC0]\ +(https://creativecommons.org/publicdomain/zero/1.0/) license. + +### References + +""" + subject_data = collect_data( config.execution.layout, subject_id, @@ -225,13 +251,12 @@ def init_single_subject_wf( if 'flair' in config.workflow.ignore: subject_data['flair'] = [] + if 't1w' in config.workflow.ignore: + subject_data['t1w'] = [] if 't2w' in config.workflow.ignore: subject_data['t2w'] = [] anat_only = config.workflow.anat_only - derivatives = Derivatives(bids_root=config.execution.layout.root) - contrast = 'T1w' if subject_data['t1w'] else 'T2w' - single_modality = not (subject_data['t1w'] and subject_data['t2w']) # Make sure we always go through these two checks if not anat_only and not subject_data['bold']: task_id = config.execution.task_id @@ -250,58 +275,68 @@ def init_single_subject_wf( # for run in subject_data['bold'] # ] - if config.execution.derivatives: - for deriv_path in config.execution.derivatives: - config.loggers.workflow.info('Searching for derivatives in %s', deriv_path) - derivatives.populate( - deriv_path, - subject_id, - session_id=session_id, - ) - config.loggers.workflow.info('Found precomputed derivatives %s', derivatives) - - workflow = Workflow(name=name) - workflow.__desc__ = f""" -Results included in this manuscript come from preprocessing -performed using *NiBabies* {config.environment.version}, -derived from fMRIPrep (@fmriprep1; @fmriprep2; RRID:SCR_016216). -The underlying workflow engine used is *Nipype* {config.environment.nipype_version} -(@nipype1; @nipype2; RRID:SCR_002502). - -""" - workflow.__postdesc__ = f""" - -Many internal operations of *NiBabies* use -*Nilearn* {NILEARN_VERSION} [@nilearn, RRID:SCR_001362], -mostly within the functional processing workflow. -For more details of the pipeline, see [the section corresponding -to workflows in *nibabies*'s documentation]\ -(https://nibabies.readthedocs.io/en/latest/workflows.html \ -"NiBabies's documentation"). - + # if subject_data['roi']: + # warnings.warn( + # f"Lesion mask {subject_data['roi']} found. " + # "Future versions of fMRIPrep will use alternative conventions. " + # "Please refer to the documentation before upgrading.", + # FutureWarning, + # stacklevel=1, + # ) -### Copyright Waiver + msm_sulc = False -The above boilerplate text was automatically generated by NiBabies -with the express intention that users should copy and paste this -text into their manuscripts *unchanged*. -It is released under the [CC0]\ -(https://creativecommons.org/publicdomain/zero/1.0/) license. - -### References + anatomical_cache = {} + if config.execution.derivatives: + from nibabies.utils.derivatives import collect_anatomical_derivatives + + std_spaces = spaces.get_spaces(nonstandard=False, dim=(3,)) + std_spaces.append('fsnative') + for deriv_dir in config.execution.derivatives.values(): + anatomical_cache.update( + collect_anatomical_derivatives( + derivatives_dir=deriv_dir, + subject_id=subject_id, + session_id=session_id, + std_spaces=std_spaces, + ) + ) -""" + # Determine some session level options here, as we should have + # all the required information + if config.workflow.surface_recon_method == 'auto': + if age <= 8: + recon_method = 'mcribs' + elif age <= 24: + recon_method = 'infantfs' + else: + recon_method = 'freesurfer' + + preferred_anat = config.execution.reference_anat + t1w = subject_data['t1w'] + t2w = subject_data['t2w'] + if not t1w and t2w: + reference_anat = 'T1w' if t1w else 'T2w' + if preferred_anat and reference_anat != preferred_anat: + raise AttributeError( + f'Requested to use {preferred_anat} as anatomical reference but none available' + ) + else: + if not (reference_anat := preferred_anat): + reference_anat = 'T2w' if age <= 8 else 'T1w' + anat = reference_anat.lower() # To be used for workflow connections - nibabies_dir = str(config.execution.nibabies_dir) + bids_root = str(config.execution.bids_dir) + output_dir = str(config.execution.nibabies_dir) + omp_nthreads = config.nipype.omp_nthreads inputnode = pe.Node(niu.IdentityInterface(fields=['subjects_dir']), name='inputnode') - # TODO: Revisit T1w/T2w restrictions for BIDSDataGrabber bidssrc = pe.Node( BIDSDataGrabber( subject_data=subject_data, - anat_only=anat_only, - anat_derivatives=False, + anat_only=config.workflow.anat_only, + anat_derivatives=anatomical_cache or None, subject_id=subject_id, ), name='bidssrc', @@ -314,6 +349,8 @@ def init_single_subject_wf( summary = pe.Node( SubjectSummary( + anatomical_reference=reference_anat, + recon_method=recon_method, std_spaces=spaces.get_spaces(nonstandard=False), nstd_spaces=spaces.get_spaces(standard=False), ), @@ -329,7 +366,7 @@ def init_single_subject_wf( ds_report_summary = pe.Node( DerivativesDataSink( - base_directory=nibabies_dir, + base_directory=output_dir, desc='summary', datatype='figures', dismiss_entities=('echo',), @@ -340,7 +377,7 @@ def init_single_subject_wf( ds_report_about = pe.Node( DerivativesDataSink( - base_directory=nibabies_dir, + base_directory=output_dir, desc='about', datatype='figures', dismiss_entities=('echo',), @@ -350,227 +387,346 @@ def init_single_subject_wf( ) wf_args = { - 'ants_affine_init': True, 'age_months': age, - 'contrast': contrast, - 't1w': subject_data['t1w'], - 't2w': subject_data['t2w'], - 'bids_root': config.execution.bids_dir, - 'derivatives': derivatives, - 'freesurfer': config.workflow.run_reconall, - 'hires': config.workflow.hires, + 't1w': t1w, + 't2w': t2w, + 'flair': subject_data['flair'], + 'bids_root': bids_root, 'longitudinal': config.workflow.longitudinal, - 'omp_nthreads': config.nipype.omp_nthreads, - 'output_dir': nibabies_dir, + 'msm_sulc': msm_sulc, + 'omp_nthreads': omp_nthreads, + 'output_dir': config.execution.nibabies_dir, + 'precomputed': anatomical_cache, 'segmentation_atlases': config.execution.segmentation_atlases_dir, - 'skull_strip_mode': config.workflow.skull_strip_t1w, + 'skull_strip_fixed_seed': config.workflow.skull_strip_fixed_seed, + 'skull_strip_mode': config.workflow.skull_strip_anat, 'skull_strip_template': Reference.from_string(config.workflow.skull_strip_template)[0], + 'recon_method': recon_method, + 'reference_anat': reference_anat, 'sloppy': config.execution.sloppy, 'spaces': spaces, 'cifti_output': config.workflow.cifti_output, } - anat_preproc_wf = ( - init_infant_anat_wf(**wf_args) - if not single_modality - else init_infant_single_anat_wf(**wf_args) + + anat_wf = ( + init_infant_anat_full_wf(**wf_args) + if config.workflow.level == 'full' + else init_infant_anat_fit_wf(**wf_args) ) - # fmt: off + # Ensure surface reconstruction is run at the per-session level + anat_wf.inputs.inputnode.subject_id = subject_session_id + + # allow to run with anat-fast-track on fMRI-only dataset + if ( + 't1w_preproc' in anatomical_cache or 't2w_preproc' in anatomical_cache + ) and not subject_data['t1w']: + workflow.connect([ + (bidssrc, bids_info, [(('bold', fix_multi_source_name), 'in_file')]), + (anat_wf, summary, [('outputnode.anat_preproc', anat)]), + (anat_wf, ds_report_summary, [('outputnode.anat_preproc', 'source_file')]), + (anat_wf, ds_report_about, [('outputnode.anat_preproc', 'source_file')]), + ]) # fmt:skip + else: + workflow.connect([ + (bidssrc, bids_info, [(('t1w', fix_multi_source_name), 'in_file')]), + (bidssrc, summary, [('t1w', 't1w')]), + (bidssrc, ds_report_summary, [(('t1w', fix_multi_source_name), 'source_file')]), + (bidssrc, ds_report_about, [(('t1w', fix_multi_source_name), 'source_file')]), + ]) # fmt:skip + workflow.connect([ - (inputnode, anat_preproc_wf, [ - ('subjects_dir', 'inputnode.subjects_dir'), - ]), - (inputnode, summary, [ - ('subjects_dir', 'subjects_dir'), - ]), - (bidssrc, summary, [ - ('bold', 'bold'), - ]), - (bids_info, summary, [ - ('subject', 'subject_id'), - ]), - (bids_info, anat_preproc_wf, [ - (('subject', _prefix), 'inputnode.subject_id'), - ]), - (bidssrc, anat_preproc_wf, [ + (inputnode, anat_wf, [('subjects_dir', 'inputnode.subjects_dir')]), + (bidssrc, anat_wf, [ ('t1w', 'inputnode.t1w'), ('t2w', 'inputnode.t2w'), + ('roi', 'inputnode.roi'), + ('flair', 'inputnode.flair'), ]), - (summary, ds_report_summary, [ - ('out_report', 'in_file'), - ]), - (about, ds_report_about, [ - ('out_report', 'in_file'), - ]), - ]) - - workflow.connect([ - (bidssrc, bids_info, [ - ((contrast.lower(), fix_multi_source_name), 'in_file'), - ]), - (bidssrc, summary, [ - ('t1w', 't1w'), - ('t2w', 't2w'), - ]), - (bidssrc, ds_report_summary, [ - ((contrast.lower(), fix_multi_source_name), 'source_file'), - ]), - (bidssrc, ds_report_about, [ - ((contrast.lower(), fix_multi_source_name), 'source_file'), - ]), - ]) - # fmt: on - - # Overwrite ``out_path_base`` of smriprep's DataSinks - for node in workflow.list_node_names(): - if node.split('.')[-1].startswith('ds_'): - workflow.get_node(node).interface.out_path_base = '' + # Reporting connections + (inputnode, summary, [('subjects_dir', 'subjects_dir')]), + (bidssrc, summary, [('t2w', 't2w'), ('bold', 'bold')]), + (bids_info, summary, [('subject', 'subject_id')]), + (summary, ds_report_summary, [('out_report', 'in_file')]), + (about, ds_report_about, [('out_report', 'in_file')]), + ]) # fmt:skip + + # template_iterator_wf = None + # select_MNI2009c_xfm = None + if config.workflow.level == 'full': + # Much of the logic here is extracted into a separate, fuller anatomical workflow + # TODO: + # - Grab template_iterator_wf workflow + # - Grab select_MNI2009c_xfm node + pass + + # if 'MNI152NLin2009cAsym' in spaces.get_spaces(): + # select_MNI2009c_xfm = pe.Node( + # KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), + # name='select_MNI2009c_xfm', + # run_without_submitting=True, + # ) + # workflow.connect([ + # (anat_fit_wf, select_MNI2009c_xfm, [ + # ('outputnode.std2anat_xfm', 'std2anat_xfm'), + # ('outputnode.template', 'keys'), + # ]), + # ]) # fmt:skip + + # Thread MNI152NLin6Asym standard outputs to CIFTI subworkflow, skipping + # the iterator, which targets only output spaces. + # This can lead to duplication in the working directory if people actually + # want MNI152NLin6Asym outputs, but we'll live with it. + # if config.workflow.cifti_output: + # from smriprep.interfaces.templateflow import TemplateFlowSelect + + # ref = Reference( + # 'MNI152NLin6Asym', + # {'res': 2 if config.workflow.cifti_output == '91k' else 1}, + # ) + + # select_MNI6_xfm = pe.Node( + # KeySelect(fields=['anat2std_xfm'], key=ref.fullname), + # name='select_MNI6', + # run_without_submitting=True, + # ) + # select_MNI6_tpl = pe.Node( + # TemplateFlowSelect(template=ref.fullname, resolution=ref.spec['res']), + # name='select_MNI6_tpl', + # ) + # workflow.connect([ + # (anat_fit_wf, select_MNI6_xfm, [ + # ('outputnode.anat2std_xfm', 'anat2std_xfm'), + # ('outputnode.template', 'keys'), + # ]), + # ]) # fmt:skip + + if config.workflow.anat_only: + return clean_datasinks(workflow) + + # TODO: FMAP, BOLD PROCESSING + return workflow - if anat_only: - return workflow + # # fmt: off + # workflow.connect([ + # (inputnode, anat_preproc_wf, [ + # ('subjects_dir', 'inputnode.subjects_dir'), + # ]), + # (inputnode, summary, [ + # ('subjects_dir', 'subjects_dir'), + # ]), + # (bidssrc, summary, [ + # ('bold', 'bold'), + # ]), + # (bids_info, summary, [ + # ('subject', 'subject_id'), + # ]), + # (bids_info, anat_preproc_wf, [ + # (('subject', _prefix), 'inputnode.subject_id'), + # ]), + # (bidssrc, anat_preproc_wf, [ + # ('t1w', 'inputnode.t1w'), + # ('t2w', 'inputnode.t2w'), + # ]), + # (summary, ds_report_summary, [ + # ('out_report', 'in_file'), + # ]), + # (about, ds_report_about, [ + # ('out_report', 'in_file'), + # ]), + # ]) + + # workflow.connect([ + # (bidssrc, bids_info, [ + # ((contrast.lower(), fix_multi_source_name), 'in_file'), + # ]), + # (bidssrc, summary, [ + # ('t1w', 't1w'), + # ('t2w', 't2w'), + # ]), + # (bidssrc, ds_report_summary, [ + # ((contrast.lower(), fix_multi_source_name), 'source_file'), + # ]), + # (bidssrc, ds_report_about, [ + # ((contrast.lower(), fix_multi_source_name), 'source_file'), + # ]), + # ]) + # # fmt: on + + # # Overwrite ``out_path_base`` of smriprep's DataSinks + # for node in workflow.list_node_names(): + # if node.split('.')[-1].startswith('ds_'): + # workflow.get_node(node).interface.out_path_base = '' + + # if anat_only: + # return workflow # Susceptibility distortion correction - fmap_estimators = None - if any((config.workflow.use_syn_sdc, config.workflow.force_syn)): - config.loggers.workflow.critical('SyN processing is not yet implemented.') - - if 'fieldmaps' not in config.workflow.ignore: - from sdcflows.utils.wrangler import find_estimators - - # SDC Step 1: Run basic heuristics to identify available data for fieldmap estimation - # For now, no fmapless - fmap_estimators = find_estimators( - layout=config.execution.layout, - subject=subject_id, - sessions=[session_id], - fmapless=False, # config.workflow.use_syn, - force_fmapless=False, # config.workflow.force_syn, - ) - - # Append the functional section to the existing anatomical exerpt - # That way we do not need to stream down the number of bold datasets - anat_preproc_wf.__postdesc__ = anat_preproc_wf.__postdesc__ or '' - func_pre_desc = f""" - -Functional data preprocessing -: For each of the {len(subject_data['bold'])} BOLD runs found per subject (across all -tasks and sessions), the following preprocessing was performed.""" - func_preproc_wfs = [] - has_fieldmap = bool(fmap_estimators) - for bold_file in subject_data['bold']: - func_preproc_wf = init_func_preproc_wf(bold_file, spaces, has_fieldmap=has_fieldmap) - if func_preproc_wf is None: - continue - - func_preproc_wf.__desc__ = func_pre_desc + (func_preproc_wf.__desc__ or '') - # fmt:off - workflow.connect([ - (anat_preproc_wf, func_preproc_wf, [ - ('outputnode.anat_preproc', 'inputnode.anat_preproc'), - ('outputnode.anat_mask', 'inputnode.anat_mask'), - ('outputnode.anat_brain', 'inputnode.anat_brain'), - ('outputnode.anat_dseg', 'inputnode.anat_dseg'), - ('outputnode.anat_aseg', 'inputnode.anat_aseg'), - ('outputnode.anat_aparc', 'inputnode.anat_aparc'), - ('outputnode.anat_tpms', 'inputnode.anat_tpms'), - ('outputnode.template', 'inputnode.template'), - ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), - ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), - # Undefined if --fs-no-reconall, but this is safe - ('outputnode.subjects_dir', 'inputnode.subjects_dir'), - ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.anat2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), - ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), - ('outputnode.surfaces', 'inputnode.surfaces'), - ('outputnode.morphometrics', 'inputnode.morphometrics'), - ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), - ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR'), - ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), - ]), - ]) - # fmt:on - func_preproc_wfs.append(func_preproc_wf) - - if not has_fieldmap: - config.loggers.workflow.warning( - 'Data for fieldmap estimation not present. Please note that these data ' - 'will not be corrected for susceptibility distortions.' - ) - return workflow - - config.loggers.workflow.info( - f'Fieldmap estimators found: {[e.method for e in fmap_estimators]}' - ) - - from sdcflows import fieldmaps as fm - from sdcflows.workflows.base import init_fmap_preproc_wf - - fmap_wf = init_fmap_preproc_wf( - sloppy=bool(config.execution.sloppy), - debug='fieldmaps' in config.execution.debug, - estimators=fmap_estimators, - omp_nthreads=config.nipype.omp_nthreads, - output_dir=nibabies_dir, - subject=subject_id, - ) - fmap_wf.__desc__ = f""" - -Preprocessing of B0 inhomogeneity mappings +# fmap_estimators = None +# if any((config.workflow.use_syn_sdc, config.workflow.force_syn)): +# config.loggers.workflow.critical('SyN processing is not yet implemented.') + +# if 'fieldmaps' not in config.workflow.ignore: +# from sdcflows.utils.wrangler import find_estimators + +# # SDC Step 1: Run basic heuristics to identify available data for fieldmap estimation +# # For now, no fmapless +# fmap_estimators = find_estimators( +# layout=config.execution.layout, +# subject=subject_id, +# sessions=[session_id], +# fmapless=False, # config.workflow.use_syn, +# force_fmapless=False, # config.workflow.force_syn, +# ) + +# # Append the functional section to the existing anatomical exerpt +# # That way we do not need to stream down the number of bold datasets +# anat_preproc_wf.__postdesc__ = anat_preproc_wf.__postdesc__ or '' +# func_pre_desc = f""" + +# Functional data preprocessing + +# : For each of the {len(subject_data['bold'])} BOLD runs found per subject (across all +# tasks and sessions), the following preprocessing was performed.""" + +# func_preproc_wfs = [] +# has_fieldmap = bool(fmap_estimators) +# for bold_file in subject_data['bold']: +# func_preproc_wf = init_func_preproc_wf(bold_file, spaces, has_fieldmap=has_fieldmap) +# if func_preproc_wf is None: +# continue + +# func_preproc_wf.__desc__ = func_pre_desc + (func_preproc_wf.__desc__ or '') +# # fmt:off +# workflow.connect([ +# (anat_preproc_wf, func_preproc_wf, [ +# ('outputnode.anat_preproc', 'inputnode.anat_preproc'), +# ('outputnode.anat_mask', 'inputnode.anat_mask'), +# ('outputnode.anat_brain', 'inputnode.anat_brain'), +# ('outputnode.anat_dseg', 'inputnode.anat_dseg'), +# ('outputnode.anat_aseg', 'inputnode.anat_aseg'), +# ('outputnode.anat_aparc', 'inputnode.anat_aparc'), +# ('outputnode.anat_tpms', 'inputnode.anat_tpms'), +# ('outputnode.template', 'inputnode.template'), +# ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), +# ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), +# # Undefined if --fs-no-reconall, but this is safe +# ('outputnode.subjects_dir', 'inputnode.subjects_dir'), +# ('outputnode.subject_id', 'inputnode.subject_id'), +# ('outputnode.anat2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), +# ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), +# ('outputnode.surfaces', 'inputnode.surfaces'), +# ('outputnode.morphometrics', 'inputnode.morphometrics'), +# ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), +# ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR'), +# ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), +# ]), +# ]) +# # fmt:on +# func_preproc_wfs.append(func_preproc_wf) + +# if not has_fieldmap: +# config.loggers.workflow.warning( +# 'Data for fieldmap estimation not present. Please note that these data ' +# 'will not be corrected for susceptibility distortions.' +# ) +# return workflow + +# config.loggers.workflow.info( +# f'Fieldmap estimators found: {[e.method for e in fmap_estimators]}' +# ) + +# from sdcflows import fieldmaps as fm +# from sdcflows.workflows.base import init_fmap_preproc_wf + +# fmap_wf = init_fmap_preproc_wf( +# sloppy=bool(config.execution.sloppy), +# debug='fieldmaps' in config.execution.debug, +# estimators=fmap_estimators, +# omp_nthreads=config.nipype.omp_nthreads, +# output_dir=nibabies_dir, +# subject=subject_id, +# ) +# fmap_wf.__desc__ = f""" + +# Preprocessing of B0 inhomogeneity mappings + +# : A total of {len(fmap_estimators)} fieldmaps were found available within the input +# BIDS structure for this particular subject. +# """ + +# for func_preproc_wf in func_preproc_wfs: +# # fmt: off +# workflow.connect([ +# (fmap_wf, func_preproc_wf, [ +# ('outputnode.fmap', 'inputnode.fmap'), +# ('outputnode.fmap_ref', 'inputnode.fmap_ref'), +# ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), +# ('outputnode.fmap_mask', 'inputnode.fmap_mask'), +# ('outputnode.fmap_id', 'inputnode.fmap_id'), +# ('outputnode.method', 'inputnode.sdc_method'), +# ]), +# ]) +# # fmt: on + +# # Overwrite ``out_path_base`` of sdcflows's DataSinks +# for node in fmap_wf.list_node_names(): +# if node.split('.')[-1].startswith('ds_'): +# fmap_wf.get_node(node).interface.out_path_base = '' + +# # Step 3: Manually connect PEPOLAR +# for estimator in fmap_estimators: +# config.loggers.workflow.info( +# f"""\ +# Setting-up fieldmap "{estimator.bids_id}" ({estimator.method}) with \ +# <{', '.join(s.path.name for s in estimator.sources)}>""" +# ) +# if estimator.method in (fm.EstimatorType.MAPPED, fm.EstimatorType.PHASEDIFF): +# continue + +# suffices = [s.suffix for s in estimator.sources] + +# if estimator.method == fm.EstimatorType.PEPOLAR: +# if set(suffices) == {'epi'} or sorted(suffices) == ['bold', 'epi']: +# fmap_wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') +# fmap_wf_inputs.in_data = [str(s.path) for s in estimator.sources] +# fmap_wf_inputs.metadata = [s.metadata for s in estimator.sources] +# else: +# raise NotImplementedError( +# 'Sophisticated PEPOLAR schemes (e.g., using DWI+EPI) are unsupported.' +# ) + +# return workflow + + +def _subject_session_id(subject_id: str, session_id: str | None) -> str: + """ + Combine a subject ID with a session ID (if available). + + >>> _subject_session_id('01', None) + 'sub-01' + >>> _subject_session_id('sub-01', '01') + 'sub-01_ses-01' + >>> _subject_session_id('01', 'ses-03') + 'sub-01_ses-03' + """ + entities = [] + entities.append(f'sub-{subject_id}' if not subject_id.startswith('sub-') else subject_id) + if session_id is not None: + entities.append(f'ses-{session_id}' if not session_id.startswith('ses-') else session_id) + return '_'.join(entities) -: A total of {len(fmap_estimators)} fieldmaps were found available within the input -BIDS structure for this particular subject. -""" - for func_preproc_wf in func_preproc_wfs: - # fmt: off - workflow.connect([ - (fmap_wf, func_preproc_wf, [ - ('outputnode.fmap', 'inputnode.fmap'), - ('outputnode.fmap_ref', 'inputnode.fmap_ref'), - ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), - ('outputnode.fmap_mask', 'inputnode.fmap_mask'), - ('outputnode.fmap_id', 'inputnode.fmap_id'), - ('outputnode.method', 'inputnode.sdc_method'), - ]), - ]) - # fmt: on - - # Overwrite ``out_path_base`` of sdcflows's DataSinks - for node in fmap_wf.list_node_names(): +def clean_datasinks(workflow: pe.Workflow) -> pe.Workflow: + # Overwrite ``out_path_base`` of smriprep's DataSinks + for node in workflow.list_node_names(): if node.split('.')[-1].startswith('ds_'): - fmap_wf.get_node(node).interface.out_path_base = '' - - # Step 3: Manually connect PEPOLAR - for estimator in fmap_estimators: - config.loggers.workflow.info( - f"""\ -Setting-up fieldmap "{estimator.bids_id}" ({estimator.method}) with \ -<{', '.join(s.path.name for s in estimator.sources)}>""" - ) - if estimator.method in (fm.EstimatorType.MAPPED, fm.EstimatorType.PHASEDIFF): - continue - - suffices = [s.suffix for s in estimator.sources] - - if estimator.method == fm.EstimatorType.PEPOLAR: - if set(suffices) == {'epi'} or sorted(suffices) == ['bold', 'epi']: - fmap_wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') - fmap_wf_inputs.in_data = [str(s.path) for s in estimator.sources] - fmap_wf_inputs.metadata = [s.metadata for s in estimator.sources] - else: - raise NotImplementedError( - 'Sophisticated PEPOLAR schemes (e.g., using DWI+EPI) are unsupported.' - ) - + workflow.get_node(node).interface.out_path_base = '' return workflow -def _prefix(subid): - return subid if subid.startswith('sub-') else f'sub-{subid}' - - def init_workflow_spaces(execution_spaces: SpatialReferences, age_months: int): """ Create output spaces at a per-subworkflow level. From a3d44b9c0b1c075249570534b42020c4ead96ae3 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 02:30:05 -0400 Subject: [PATCH 036/142] PIN: smriprep working branch --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d6b38be2..2857855d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "pybids >= 0.15.0", "requests", "sdcflows @ git+https://github.com/nipreps/sdcflows.git@master", - "smriprep @ git+https://github.com/nipreps/smriprep.git@master", + "smriprep @ git+https://github.com/nipreps/smriprep.git@enh/nibabies-fit-apply", "tedana >= 23.0.2", "templateflow >= 24.2.0", "toml", From fb7255c15f407adbead04b6f7f0dd3db17021363 Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 09:48:35 -0400 Subject: [PATCH 037/142] CHORE: Update build requirements --- pyproject.toml | 5 ++ requirements.txt | 125 +++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 116 insertions(+), 14 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2857855d..4b63dcbb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,11 @@ Documentation = "https://nibabies.readthedocs.io/en/latest/" "Docker Images" = "https://hub.docker.com/r/nipreps/nibabies" [project.optional-dependencies] +container = [ + "nibabies[telemetry]", + "datalad", + "datalad-osf", +] dev = [ "black ~= 22.3.0", "isort ~= 5.10.1", diff --git a/requirements.txt b/requirements.txt index 04159acc..75a443fc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,20 +2,31 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --extra=telemetry --resolver=backtracking +# pip-compile --extra=container --strip-extras # +annexremote==1.6.5 + # via + # datalad + # datalad-next + # datalad-osf astor==0.8.1 # via formulaic attrs==23.1.0 # via # niworkflows # sdcflows +backports-tarfile==1.1.1 + # via jaraco-context bids-validator==1.11.0 # via pybids bokeh==2.2.3 # via tedana +boto==2.49.0 + # via datalad certifi==2023.7.22 # via requests +chardet==5.2.0 + # via datalad charset-normalizer==3.1.0 # via requests ci-info==0.3.0 @@ -32,10 +43,23 @@ cycler==0.11.0 # via matplotlib cython==0.29.35 # via nitime +datalad==1.0.2 + # via + # datalad-next + # datalad-osf + # nibabies (pyproject.toml) +datalad-next==1.3.0 + # via datalad-osf +datalad-osf==0.3.0 + # via nibabies (pyproject.toml) +distro==1.9.0 + # via datalad docopt==0.6.2 # via num2words etelemetry==0.3.0 # via nipype +fasteners==0.19 + # via datalad filelock==3.12.2 # via nipype fonttools==4.40.0 @@ -46,27 +70,51 @@ greenlet==2.0.2 # via sqlalchemy h5py==3.9.0 # via nitransforms +humanize==4.9.0 + # via + # datalad + # datalad-next idna==3.4 # via requests imageio==2.31.1 # via scikit-image +importlib-metadata==7.1.0 + # via keyring importlib-resources==6.1.1 - # via niworkflows + # via + # nireports + # niworkflows + # templateflow indexed-gzip==1.7.1 # via smriprep interface-meta==1.3.0 # via formulaic +iso8601==2.1.0 + # via datalad isodate==0.6.1 # via rdflib +jaraco-classes==3.4.0 + # via + # keyring + # keyrings-alt +jaraco-context==5.3.0 + # via + # keyring + # keyrings-alt +jaraco-functools==4.0.1 + # via keyring jinja2==3.0.1 # via # bokeh # niworkflows - # tedana joblib==1.3.1 # via # nilearn # scikit-learn +keyring==25.1.0 + # via datalad +keyrings-alt==5.0.1 + # via datalad kiwisolver==1.4.4 # via matplotlib lazy-loader==0.2 @@ -75,6 +123,7 @@ lockfile==0.12.2 # via smriprep looseversion==1.2.0 # via + # datalad # nipype # niworkflows # smriprep @@ -89,13 +138,23 @@ markupsafe==2.1.3 # via jinja2 matplotlib==3.7.1 # via + # nireports # nitime # niworkflows # seaborn # smriprep # tedana migas==0.4.0 - # via nibabies (pyproject.toml) + # via + # nibabies (pyproject.toml) + # sdcflows +more-itertools==10.2.0 + # via + # datalad-next + # jaraco-classes + # jaraco-functools +msgpack==1.0.8 + # via datalad networkx==3.1 # via # nipype @@ -108,6 +167,7 @@ nibabel==5.1.0 # nibabies (pyproject.toml) # nilearn # nipype + # nireports # nitime # nitransforms # niworkflows @@ -118,14 +178,18 @@ nibabel==5.1.0 nilearn==0.10.1 # via # mapca + # nireports # niworkflows # tedana nipype==1.8.6 # via # nibabies (pyproject.toml) + # nireports # niworkflows # sdcflows # smriprep +nireports==23.2.0 + # via nibabies (pyproject.toml) nitime==0.10.1 # via nibabies (pyproject.toml) nitransforms==23.0.1 @@ -133,7 +197,7 @@ nitransforms==23.0.1 # nibabies (pyproject.toml) # niworkflows # sdcflows -niworkflows==1.9.0 +niworkflows @ git+https://github.com/nipreps/niworkflows.git@master # via # nibabies (pyproject.toml) # sdcflows @@ -153,6 +217,7 @@ numpy==1.25.0 # nibabies (pyproject.toml) # nilearn # nipype + # nireports # nitime # nitransforms # niworkflows @@ -167,9 +232,12 @@ numpy==1.25.0 # smriprep # tedana # tifffile +osfclient==0.0.5 + # via datalad-osf packaging==23.1 # via # bokeh + # datalad # matplotlib # nibabel # nibabies (pyproject.toml) @@ -183,23 +251,29 @@ pandas==2.0.3 # formulaic # nibabies (pyproject.toml) # nilearn + # nireports # niworkflows # pybids # seaborn # tedana +patool==2.2.0 + # via datalad pillow==10.0.1 # via # bokeh # imageio # matplotlib # scikit-image +platformdirs==4.2.1 + # via datalad prov==2.0.0 # via nipype psutil==5.9.5 # via nibabies (pyproject.toml) -pybids==0.16.1 +pybids==0.16.4 # via # nibabies (pyproject.toml) + # nireports # niworkflows # sdcflows # smriprep @@ -218,6 +292,8 @@ python-dateutil==2.8.2 # nipype # pandas # prov +python-gitlab==4.4.0 + # via datalad pytz==2023.3 # via pandas pywavelets==1.4.1 @@ -225,6 +301,7 @@ pywavelets==1.4.1 pyyaml==6.0 # via # bokeh + # nireports # niworkflows # smriprep rdflib==6.3.2 @@ -233,10 +310,16 @@ rdflib==6.3.2 # prov requests==2.31.0 # via + # datalad # etelemetry # nibabies (pyproject.toml) # nilearn + # osfclient + # python-gitlab + # requests-toolbelt # templateflow +requests-toolbelt==1.0.0 + # via python-gitlab scikit-image==0.21.0 # via # niworkflows @@ -260,27 +343,33 @@ scipy==1.11.1 # scikit-learn # sdcflows # tedana -sdcflows==2.5.2 +sdcflows @ git+https://github.com/nipreps/sdcflows.git@master # via nibabies (pyproject.toml) seaborn==0.12.2 - # via niworkflows + # via + # nireports + # niworkflows simplejson==3.19.1 # via nipype six==1.16.0 # via # isodate + # osfclient # python-dateutil -smriprep==0.12.2 +smriprep @ git+https://github.com/nipreps/smriprep.git@enh/nibabies-fit-apply # via nibabies (pyproject.toml) sqlalchemy==2.0.17 # via pybids svgutils==0.3.4 - # via niworkflows -tedana==0.0.13 + # via + # nireports + # niworkflows +tedana==23.0.2 # via nibabies (pyproject.toml) -templateflow==23.0.0 +templateflow==24.2.0 # via # nibabies (pyproject.toml) + # nireports # niworkflows # sdcflows # smriprep @@ -291,11 +380,16 @@ threadpoolctl==3.1.0 tifffile==2023.4.12 # via scikit-image toml==0.10.2 - # via nibabies (pyproject.toml) + # via + # nibabies (pyproject.toml) + # sdcflows tornado==6.3.3 # via bokeh tqdm==4.65.0 - # via templateflow + # via + # datalad + # osfclient + # templateflow traits==6.3.2 # via # nipype @@ -306,6 +400,7 @@ transforms3d==0.4.1 typing-extensions==4.7.0 # via # bokeh + # datalad # formulaic # sqlalchemy tzdata==2023.3 @@ -314,3 +409,5 @@ urllib3==2.0.7 # via requests wrapt==1.15.0 # via formulaic +zipp==3.18.1 + # via importlib-metadata From 6fc85093956cd5a96405747631830acde29dc96f Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 10:09:44 -0400 Subject: [PATCH 038/142] DOCKER: Update micromamba step --- Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index fe818678..30725883 100644 --- a/Dockerfile +++ b/Dockerfile @@ -92,17 +92,17 @@ RUN mkdir /opt/workbench && \ FROM downloader as micromamba WORKDIR / # Bump the date to current to force update micromamba -RUN echo "2023.06.29" -RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba +RUN echo "2024.04.25" && curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba ENV MAMBA_ROOT_PREFIX="/opt/conda" COPY env.yml /tmp/env.yml +COPY requirements.txt /tmp/requirements.txt +WORKDIR /tmp RUN micromamba create -y -f /tmp/env.yml && \ micromamba clean -y -a + ENV PATH="/opt/conda/envs/nibabies/bin:$PATH" -RUN /opt/conda/envs/nibabies/bin/npm install -g svgo@^2.8 bids-validator@1.11.0 && \ +RUN npm install -g svgo@^3.2.0 bids-validator@^1.14.0 && \ rm -r ~/.npm -COPY requirements.txt /tmp/requirements.txt -RUN /opt/conda/envs/nibabies/bin/pip install --no-cache-dir -r /tmp/requirements.txt # Main container FROM ${BASE_IMAGE} as nibabies From 23a0d244619eb0222cb2846e65735187568b3c6d Mon Sep 17 00:00:00 2001 From: mathiasg Date: Thu, 25 Apr 2024 10:27:46 -0400 Subject: [PATCH 039/142] FIX: Traits version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 75a443fc..d5f6140a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -390,7 +390,7 @@ tqdm==4.65.0 # datalad # osfclient # templateflow -traits==6.3.2 +traits=6.3.2 # via # nipype # niworkflows From a94f62766ac37e54c130f0c7763cb832d03f4b71 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:20:56 -0400 Subject: [PATCH 040/142] FIX: Determine which surface recon method to use, by default --- nibabies/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nibabies/config.py b/nibabies/config.py index 5922db8a..4f4a6734 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -591,7 +591,7 @@ class workflow(_Config): spaces = None """Keeps the :py:class:`~niworkflows.utils.spaces.SpatialReferences` instance keeping standard and nonstandard spaces.""" - surface_recon_method = 'infantfs' + surface_recon_method = 'auto' """Method to use for surface reconstruction.""" use_aroma = None """Run ICA-:abbr:`AROMA (automatic removal of motion artifacts)`.""" From f2e685eead825e9d048c320447157a8bc50b932d Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:21:47 -0400 Subject: [PATCH 041/142] PIN: niworkflows branch --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 4b63dcbb..30f91a95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dependencies = [ "nireports >= 23.2.0", "nitime", "nitransforms >= 23.0.1", - "niworkflows @ git+https://github.com/nipreps/niworkflows.git@master", + "niworkflows @ git+https://github.com/nipreps/niworkflows.git@enh/fs-source", "numpy >= 1.21.0", "packaging", "pandas", From 8e0becf422cc5e4c2c3235e4ddb467aaa3978bec Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:22:17 -0400 Subject: [PATCH 042/142] FIX: Remove eager imports --- nibabies/workflows/anatomical/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/nibabies/workflows/anatomical/__init__.py b/nibabies/workflows/anatomical/__init__.py index 021d133d..e69de29b 100644 --- a/nibabies/workflows/anatomical/__init__.py +++ b/nibabies/workflows/anatomical/__init__.py @@ -1 +0,0 @@ -from .base import init_infant_anat_wf, init_infant_single_anat_wf From d5066aa9b2e25f2bbfe26ad6e5c36317f4010918 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:23:52 -0400 Subject: [PATCH 043/142] FIX: Connections, missed parameters --- nibabies/workflows/anatomical/fit.py | 44 +++++++++++++++------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 95741096..d722894c 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -296,6 +296,7 @@ def init_infant_anat_fit_wf( # Reporting anat_reports_wf = init_anat_reports_wf( + spaces=spaces, surface_recon=recon_method, output_dir=output_dir, sloppy=sloppy, @@ -353,6 +354,9 @@ def init_infant_anat_fit_wf( (t1w_template_wf, sourcefile_buffer, [ ('outputnode.anat_valid_list', 'anat_source_files'), ]), + (t1w_template_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.anat_conform_report'), + ]), ]) # fmt:skip workflow.connect([ @@ -361,9 +365,9 @@ def init_infant_anat_fit_wf( (t1w_template_wf, sourcefile_buffer, [ ('outputnode.anat_valid_list', 't1w_source_files'), ]), - (t1w_template_wf, anat_reports_wf, [ - ('outputnode.out_report', 'inputnode.anat_conform_report'), - ]), + # (t1w_template_wf, anat_reports_wf, [ + # ('outputnode.out_report', 'inputnode.anat_conform_report'), + # ]), (t1w_template_wf, ds_t1w_template_wf, [ ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), ]), @@ -371,7 +375,7 @@ def init_infant_anat_fit_wf( ('t1w_source_files', 'inputnode.source_files'), ]), (anat_buffer, ds_t1w_template_wf, [('t1w_preproc', 'inputnode.anat_preproc')]), - (ds_t1w_template_wf, outputnode, [('outputnode.t1w_preproc', 't1w_preproc')]), + (ds_t1w_template_wf, outputnode, [('outputnode.anat_preproc', 't1w_preproc')]), ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') @@ -408,10 +412,14 @@ def init_infant_anat_fit_wf( ) if reference_anat == 'T2w': - workflow.connect( - t2w_template_wf, 'outputnode.anat_valid_list', - sourcefile_buffer, 'anat_source_files', - ) # fmt:skip + workflow.connect([ + (t2w_template_wf, sourcefile_buffer, [ + ('outputnode.anat_valid_list', 'anat_source_files'), + ]), + (t2w_template_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.anat_conform_report'), + ]), + ]) # fmt:skip workflow.connect([ (inputnode, t2w_template_wf, [('t2w', 'inputnode.anat_files')]), @@ -419,9 +427,6 @@ def init_infant_anat_fit_wf( (t2w_template_wf, sourcefile_buffer, [ ('outputnode.anat_valid_list', 't2w_source_files'), ]), - (t2w_template_wf, anat_reports_wf, [ - ('outputnode.out_report', 'inputnode.anat_conform_report'), - ]), (t2w_template_wf, ds_t2w_template_wf, [ ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), ]), @@ -429,7 +434,7 @@ def init_infant_anat_fit_wf( ('t2w_source_files', 'inputnode.source_files'), ]), (anat_buffer, ds_t2w_template_wf, [('t2w_preproc', 'inputnode.anat_preproc')]), - (ds_t2w_template_wf, outputnode, [('outputnode.t2w_preproc', 't2w_preproc')]), + (ds_t2w_template_wf, outputnode, [('outputnode.anat_preproc', 't2w_preproc')]), ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T2w - skipping Stage 1') @@ -701,7 +706,7 @@ def init_infant_anat_fit_wf( ) workflow.connect([ - (anat_buffer, segmentation_wf, [(f'{anat}_brain', 'anat_brain')]), + (anat_buffer, segmentation_wf, [(f'{anat}_brain', 'inputnode.anat_brain')]), (segmentation_wf, seg_buffer, [ ('outputnode.anat_dseg', 'anat_dseg'), ('outputnode.anat_tpms', 'anat_tpms'), @@ -723,19 +728,17 @@ def init_infant_anat_fit_wf( (segmentation_wf, ds_dseg_wf, [ ('outputnode.anat_dseg', 'inputnode.anat_dseg'), ]), - (ds_dseg_wf, seg_buffer, [('outputnode.anat_dseg', 'anat_dseg')]), ]) # fmt:skip if not anat_tpms: ds_tpms_wf = init_ds_tpms_wf(output_dir=str(output_dir)) workflow.connect([ - (sourcefile_buffer, ds_dseg_wf, [ + (sourcefile_buffer, ds_tpms_wf, [ ('anat_source_files', 'inputnode.source_files'), ]), (segmentation_wf, ds_tpms_wf, [ ('outputnode.anat_tpms', 'inputnode.anat_tpms'), ]), - (ds_tpms_wf, seg_buffer, [('outputnode.anat_tpms', 'anat_tpms')]), ]) # fmt:skip else: LOGGER.info('ANAT Stage 4: Skipping segmentation workflow') @@ -1227,14 +1230,15 @@ def init_infant_anat_full_wf( bids_root: str, precomputed: dict, longitudinal: bool, + msm_sulc: bool, omp_nthreads: int, output_dir: str, segmentation_atlases: str | Path | None, skull_strip_mode: ty.Literal['auto', 'skip', 'force'], recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs', None], - skull_strip_template: Reference, + skull_strip_template: 'Reference', sloppy: bool, - spaces: SpatialReferences, + spaces: 'SpatialReferences', cifti_output: ty.Literal['91k', '170k', False], skull_strip_fixed_seed: bool = False, name: str = 'infant_anat_wf', @@ -1267,14 +1271,14 @@ def init_infant_anat_full_wf( ), name='outputnode', ) - msm_sulc = False # Not enabled for now + anat_fit_wf = init_infant_anat_fit_wf( reference_anat=reference_anat, age_months=age_months, bids_root=bids_root, output_dir=output_dir, longitudinal=longitudinal, - msm_sulc=msm_sulc, + msm_sulc=False, # TODO: Enable skull_strip_mode=skull_strip_mode, skull_strip_template=skull_strip_template, skull_strip_fixed_seed=skull_strip_fixed_seed, From 665309a3832975c1a2d5f943d25aa017ace3396b Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:25:05 -0400 Subject: [PATCH 044/142] FIX: Use template iterator in anat reports --- nibabies/workflows/anatomical/outputs.py | 480 ++++++++++++----------- 1 file changed, 251 insertions(+), 229 deletions(-) diff --git a/nibabies/workflows/anatomical/outputs.py b/nibabies/workflows/anatomical/outputs.py index 4f86346d..56d1a4dc 100644 --- a/nibabies/workflows/anatomical/outputs.py +++ b/nibabies/workflows/anatomical/outputs.py @@ -9,16 +9,18 @@ from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from niworkflows.engine.workflows import LiterateWorkflow as Workflow +from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms +from smriprep.workflows.outputs import init_template_iterator_wf -from ...interfaces import DerivativesDataSink +from nibabies.interfaces import DerivativesDataSink if ty.TYPE_CHECKING: from niworkflows.utils.spaces import SpatialReferences -BIDS_TISSUE_ORDER = ("GM", "WM", "CSF") +BIDS_TISSUE_ORDER = ('GM', 'WM', 'CSF') -def init_coreg_report_wf(*, output_dir, name="coreg_report_wf"): +def init_coreg_report_wf(*, output_dir, name='coreg_report_wf'): """ Generate and store a report in the right location. @@ -48,34 +50,34 @@ def init_coreg_report_wf(*, output_dir, name="coreg_report_wf"): workflow = Workflow(name=name) inputfields = [ - "source_file", - "t1w_preproc", - "t2w_preproc", - "in_mask", + 'source_file', + 't1w_preproc', + 't2w_preproc', + 'in_mask', ] - inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name="inputnode") + inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name='inputnode') # Generate reportlets showing spatial normalization norm_rpt = pe.Node( - SimpleBeforeAfter(before_label="T2w", after_label="T1w"), - name="norm_rpt", + SimpleBeforeAfter(before_label='T2w', after_label='T1w'), + name='norm_rpt', mem_gb=0.1, ) ds_t1w_t2w_report = pe.Node( DerivativesDataSink( - base_directory=output_dir, space="T2w", suffix="T1w", datatype="figures" + base_directory=output_dir, space='T2w', suffix='T1w', datatype='figures' ), - name="ds_t1w_t2w_report", + name='ds_t1w_t2w_report', run_without_submitting=True, ) # fmt: off workflow.connect([ - (inputnode, norm_rpt, [("t2w_preproc", "before"), - ("t1w_preproc", "after"), - ("in_mask", "wm_seg")]), - (inputnode, ds_t1w_t2w_report, [("source_file", "source_file")]), - (norm_rpt, ds_t1w_t2w_report, [("out_report", "in_file")]), + (inputnode, norm_rpt, [('t2w_preproc', 'before'), + ('t1w_preproc', 'after'), + ('in_mask', 'wm_seg')]), + (inputnode, ds_t1w_t2w_report, [('source_file', 'source_file')]), + (norm_rpt, ds_t1w_t2w_report, [('out_report', 'in_file')]), ]) # fmt: on @@ -84,10 +86,11 @@ def init_coreg_report_wf(*, output_dir, name="coreg_report_wf"): def init_anat_reports_wf( *, + spaces: SpatialReferences, surface_recon: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, output_dir: str, sloppy: bool, - name="anat_reports_wf", + name='anat_reports_wf', ) -> Workflow: """ Patched workflow for reports to allow no resolution for templates @@ -128,54 +131,47 @@ def init_anat_reports_wf( from niworkflows.interfaces.reportlets.registration import ( SimpleBeforeAfterRPT as SimpleBeforeAfter, ) - from smriprep.interfaces.templateflow import TemplateFlowSelect from smriprep.workflows.outputs import ( - _drop_cohort, _empty_report, - _fmt, - _pick_cohort, _rpt_masks, ) workflow = Workflow(name=name) inputfields = [ - "source_file", - "anat_conform_report", - "anat_preproc", - "anat_dseg", - "anat_mask", - "template", - "std_t1w", - "std_mask", - "subject_id", - "subjects_dir", - "surfaces", - "morphometrics", + 'source_file', + 'anat_preproc', + 'anat_dseg', + 'anat_mask', + 'template', + 'anat2std_xfm', + # Optional + 'subject_id', + 'subjects_dir', + 'anat_conform_report', ] - inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name="inputnode") + inputnode = pe.Node(niu.IdentityInterface(fields=inputfields), name='inputnode') - seg_rpt = pe.Node(ROIsPlot(colors=["b", "magenta"], levels=[1.5, 2.5]), name="seg_rpt") + seg_rpt = pe.Node(ROIsPlot(colors=['b', 'magenta'], levels=[1.5, 2.5]), name='seg_rpt') anat_conform_check = pe.Node( niu.Function(function=_empty_report), - name="anat_conform_check", + name='anat_conform_check', run_without_submitting=True, ) ds_anat_conform_report = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc="conform", datatype="figures"), - name="ds_anat_conform_report", + DerivativesDataSink(base_directory=output_dir, desc='conform', datatype='figures'), + name='ds_anat_conform_report', run_without_submitting=True, ) ds_anat_dseg_mask_report = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="dseg", datatype="figures"), - name="ds_anat_dseg_mask_report", + DerivativesDataSink(base_directory=output_dir, suffix='dseg', datatype='figures'), + name='ds_anat_dseg_mask_report', run_without_submitting=True, ) - # fmt: off workflow.connect([ (inputnode, anat_conform_check, [('anat_conform_report', 'in_file')]), (anat_conform_check, ds_anat_conform_report, [('out', 'in_file')]), @@ -185,69 +181,95 @@ def init_anat_reports_wf( ('anat_mask', 'in_mask'), ('anat_dseg', 'in_rois')]), (seg_rpt, ds_anat_dseg_mask_report, [('out_report', 'in_file')]), - ]) - # fmt: on + ]) # fmt:skip - # Generate reportlets showing spatial normalization - tf_select = pe.Node( - TemplateFlowSelect(resolution=1), - name="tf_select", - run_without_submitting=True, - ) + if spaces._cached is not None and spaces.cached.references: + template_iterator_wf = init_template_iterator_wf(spaces=spaces, sloppy=sloppy) + t1w_std = pe.Node( + ApplyTransforms( + dimension=3, + default_value=0, + float=True, + interpolation='LanczosWindowedSinc', + ), + name='t1w_std', + ) + mask_std = pe.Node( + ApplyTransforms( + dimension=3, + default_value=0, + float=True, + interpolation='MultiLabel', + ), + name='mask_std', + ) - norm_msk = pe.Node( - niu.Function( - function=_rpt_masks, - output_names=["before", "after"], - input_names=["mask_file", "before", "after", "after_mask"], - ), - name="norm_msk", - ) - norm_rpt = pe.Node(SimpleBeforeAfter(), name="norm_rpt", mem_gb=0.1) - norm_rpt.inputs.after_label = "Participant" # after + norm_msk = pe.Node( + niu.Function( + function=_rpt_masks, + output_names=['before', 'after'], + input_names=['mask_file', 'before', 'after', 'after_mask'], + ), + name='norm_msk', + ) + norm_rpt = pe.Node(SimpleBeforeAfter(), name='norm_rpt', mem_gb=0.1) + norm_rpt.inputs.after_label = 'Participant' # after - ds_std_t1w_report = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="T1w", datatype="figures"), - name="ds_std_t1w_report", - run_without_submitting=True, - ) + ds_std_t1w_report = pe.Node( + DerivativesDataSink(base_directory=output_dir, suffix='T1w', datatype='figures'), + name='ds_std_t1w_report', + run_without_submitting=True, + ) - # fmt: off - workflow.connect([ - (inputnode, tf_select, [(('template', _drop_cohort), 'template'), - (('template', _pick_cohort), 'cohort')]), - (inputnode, norm_rpt, [('template', 'before_label')]), - (inputnode, norm_msk, [('std_t1w', 'after'), - ('std_mask', 'after_mask')]), - (tf_select, norm_msk, [('t1w_file', 'before'), - ('brain_mask', 'mask_file')]), - (norm_msk, norm_rpt, [('before', 'before'), - ('after', 'after')]), - (inputnode, ds_std_t1w_report, [ - (('template', _fmt), 'space'), - ('source_file', 'source_file')]), - (norm_rpt, ds_std_t1w_report, [('out_report', 'in_file')]), - ]) - # fmt: on + workflow.connect([ + (inputnode, template_iterator_wf, [ + ('template', 'inputnode.template'), + ('anat2std_xfm', 'inputnode.anat2std_xfm'), + ]), + (inputnode, t1w_std, [('anat_preproc', 'input_image')]), + (inputnode, mask_std, [('anat_mask', 'input_image')]), + (template_iterator_wf, t1w_std, [ + ('outputnode.anat2std_xfm', 'transforms'), + ('outputnode.std_t1w', 'reference_image'), + ]), + (template_iterator_wf, mask_std, [ + ('outputnode.anat2std_xfm', 'transforms'), + ('outputnode.std_t1w', 'reference_image'), + ]), + (template_iterator_wf, norm_rpt, [('outputnode.space', 'before_label')]), + (t1w_std, norm_msk, [('output_image', 'after')]), + (mask_std, norm_msk, [('output_image', 'after_mask')]), + (template_iterator_wf, norm_msk, [ + ('outputnode.std_t1w', 'before'), + ('outputnode.std_mask', 'mask_file'), + ]), + (norm_msk, norm_rpt, [ + ('before', 'before'), + ('after', 'after'), + ]), + (inputnode, ds_std_t1w_report, [('source_file', 'source_file')]), + (template_iterator_wf, ds_std_t1w_report, [('outputnode.space', 'space')]), + (norm_rpt, ds_std_t1w_report, [('out_report', 'in_file')]), + ]) # fmt:skip if not surface_recon: return workflow from smriprep.interfaces.reports import FSSurfaceReport - recon_report = pe.Node(FSSurfaceReport(), name="recon_report") + recon_report = pe.Node(FSSurfaceReport(), name='recon_report') recon_report.interface._always_run = True - if surface_recon == "freesurfer": - recon_desc = "reconall" - elif surface_recon == "infantfs": - recon_desc = "infantfs" - elif surface_recon == "mcribs": - recon_desc = "mcribs" + if surface_recon == 'freesurfer': + recon_desc = 'reconall' + elif surface_recon == 'infantfs': + recon_desc = 'infantfs' + elif surface_recon == 'mcribs': + recon_desc = 'mcribs' ds_recon_report = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc=recon_desc, datatype="figures"), - name="ds_recon_report", + DerivativesDataSink(base_directory=output_dir, desc=recon_desc, datatype='figures'), + name='ds_recon_report', run_without_submitting=True, ) workflow.connect([ @@ -269,8 +291,8 @@ def init_anat_derivatives_wf( num_t1w: int | None, num_t2w: int | None, surface_recon: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, - tpm_labels: ty.Tuple[str, str, str] = BIDS_TISSUE_ORDER, - name: str = "anat_derivatives_wf", + tpm_labels: tuple[str, str, str] = BIDS_TISSUE_ORDER, + name: str = 'anat_derivatives_wf', ): """ Set up a battery of datasinks to store derivatives in the right location. @@ -356,50 +378,50 @@ def init_anat_derivatives_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "template", + 'template', # T1w - "t1w_source_files", - "t1w_ref_xfms", - "t1w_preproc", + 't1w_source_files', + 't1w_ref_xfms', + 't1w_preproc', # T2w - "t2w_source_files", - "t2w_ref_xfms", - "t2w_preproc", + 't2w_source_files', + 't2w_ref_xfms', + 't2w_preproc', # Can be in either T1w/T2w space - "anat_mask", - "anat_dseg", - "anat_tpms", - "anat2std_xfm", - "std2anat_xfm", + 'anat_mask', + 'anat_dseg', + 'anat_tpms', + 'anat2std_xfm', + 'std2anat_xfm', # FS - "anat2fsnative_xfm", - "fsnative2anat_xfm", - "anat_fs_aseg", - "anat_fs_aparc", - "anat_ribbon", - "surfaces", - "morphometrics", + 'anat2fsnative_xfm', + 'fsnative2anat_xfm', + 'anat_fs_aseg', + 'anat_fs_aparc', + 'anat_ribbon', + 'surfaces', + 'morphometrics', # CIFTI - "cifti_metadata", - "cifti_density", - "cifti_morph", - "sphere_reg", - "sphere_reg_fsLR", + 'cifti_metadata', + 'cifti_density', + 'cifti_morph', + 'sphere_reg', + 'sphere_reg_fsLR', ] ), - name="inputnode", + name='inputnode', ) # The preferred space to use for to/from entities - source_files = "t1w_source_files" if num_t1w else "t2w_source_files" - space = "T1w" if num_t1w else "T2w" + source_files = 't1w_source_files' if num_t1w else 't2w_source_files' + space = 'T1w' if num_t1w else 'T2w' if num_t1w: - raw_sources = pe.Node(niu.Function(function=_bids_relative), name="t1w_raw_sources") + raw_sources = pe.Node(niu.Function(function=_bids_relative), name='t1w_raw_sources') raw_sources.inputs.bids_root = bids_root ds_t1w_preproc = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc="preproc", compress=True), - name="ds_t1w_preproc", + DerivativesDataSink(base_directory=output_dir, desc='preproc', compress=True), + name='ds_t1w_preproc', run_without_submitting=True, ) ds_t1w_preproc.inputs.SkullStripped = False @@ -410,14 +432,14 @@ def init_anat_derivatives_wf( ds_t1w_ref_xfms = pe.MapNode( DerivativesDataSink( base_directory=output_dir, - to="T1w", - mode="image", - suffix="xfm", - extension="txt", - **{"from": "orig"}, + to='T1w', + mode='image', + suffix='xfm', + extension='txt', + **{'from': 'orig'}, ), - iterfield=["source_file", "in_file"], - name="ds_t1w_ref_xfms", + iterfield=['source_file', 'in_file'], + name='ds_t1w_ref_xfms', run_without_submitting=True, ) # fmt:off @@ -429,18 +451,18 @@ def init_anat_derivatives_wf( if num_t2w: if not num_t1w: - raw_sources = pe.Node(niu.Function(function=_bids_relative), name="t2w_raw_sources") + raw_sources = pe.Node(niu.Function(function=_bids_relative), name='t2w_raw_sources') raw_sources.inputs.bids_root = bids_root ds_t2w_preproc = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc="preproc", compress=True), - name="ds_t2w_preproc", + DerivativesDataSink(base_directory=output_dir, desc='preproc', compress=True), + name='ds_t2w_preproc', run_without_submitting=True, ) ds_t2w_preproc.inputs.SkullStripped = False if num_t1w: - ds_t2w_preproc.inputs.space = "T1w" + ds_t2w_preproc.inputs.space = 'T1w' if num_t2w > 1: # Please note the dictionary unpacking to provide the from argument. @@ -448,14 +470,14 @@ def init_anat_derivatives_wf( ds_t2w_ref_xfms = pe.MapNode( DerivativesDataSink( base_directory=output_dir, - to="T1w", - mode="image", - suffix="xfm", - extension="txt", - **{"from": "orig"}, + to='T1w', + mode='image', + suffix='xfm', + extension='txt', + **{'from': 'orig'}, ), - iterfield=["source_file", "in_file"], - name="ds_t2w_ref_xfms", + iterfield=['source_file', 'in_file'], + name='ds_t2w_ref_xfms', run_without_submitting=True, ) # fmt:off @@ -466,21 +488,21 @@ def init_anat_derivatives_wf( # fmt:on ds_anat_mask = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc="brain", suffix="mask", compress=True), - name="ds_anat_mask", + DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask', compress=True), + name='ds_anat_mask', run_without_submitting=True, ) - ds_anat_mask.inputs.Type = "Brain" + ds_anat_mask.inputs.Type = 'Brain' ds_anat_dseg = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="dseg", compress=True), - name="ds_anat_dseg", + DerivativesDataSink(base_directory=output_dir, suffix='dseg', compress=True), + name='ds_anat_dseg', run_without_submitting=True, ) ds_anat_tpms = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="probseg", compress=True), - name="ds_anat_tpms", + DerivativesDataSink(base_directory=output_dir, suffix='probseg', compress=True), + name='ds_anat_tpms', run_without_submitting=True, ) ds_anat_tpms.inputs.label = tpm_labels @@ -488,12 +510,12 @@ def init_anat_derivatives_wf( ds_anat_ribbon = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="ribbon", - suffix="mask", - extension=".nii.gz", + desc='ribbon', + suffix='mask', + extension='.nii.gz', compress=True, ), - name="ds_anat_ribbon", + name='ds_anat_ribbon', run_without_submitting=True, ) @@ -527,18 +549,18 @@ def init_anat_derivatives_wf( # Transforms if spaces.get_spaces(nonstandard=False, dim=(3,)): ds_std2anat_xfm = pe.MapNode( - DerivativesDataSink(base_directory=output_dir, to=space, mode="image", suffix="xfm"), - iterfield=("in_file", "from"), - name="ds_std2anat_xfm", + DerivativesDataSink(base_directory=output_dir, to=space, mode='image', suffix='xfm'), + iterfield=('in_file', 'from'), + name='ds_std2anat_xfm', run_without_submitting=True, ) ds_anat2std_xfm = pe.MapNode( DerivativesDataSink( - base_directory=output_dir, mode="image", suffix="xfm", **{"from": space} + base_directory=output_dir, mode='image', suffix='xfm', **{'from': space} ), - iterfield=("in_file", "to"), - name="ds_anat2std_xfm", + iterfield=('in_file', 'to'), + name='ds_anat2std_xfm', run_without_submitting=True, ) @@ -556,7 +578,7 @@ def init_anat_derivatives_wf( # fmt:on # Write derivatives in standard spaces specified by --output-spaces - if getattr(spaces, "_cached") is not None and spaces.cached.references: + if spaces._cached is not None and spaces.cached.references: from niworkflows.interfaces.fixes import ( FixHeaderApplyTransforms as ApplyTransforms, ) @@ -564,28 +586,28 @@ def init_anat_derivatives_wf( from niworkflows.interfaces.space import SpaceDataSource from smriprep.interfaces.templateflow import TemplateFlowSelect - spacesource = pe.Node(SpaceDataSource(), name="spacesource", run_without_submitting=True) + spacesource = pe.Node(SpaceDataSource(), name='spacesource', run_without_submitting=True) spacesource.iterables = ( - "in_tuple", + 'in_tuple', [(s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3,))], ) gen_tplid = pe.Node( niu.Function(function=_fmt_cohort), - name="gen_tplid", + name='gen_tplid', run_without_submitting=True, ) select_xfm = pe.Node( - KeySelect(fields=["anat2std_xfm"]), - name="select_xfm", + KeySelect(fields=['anat2std_xfm']), + name='select_xfm', run_without_submitting=True, ) - select_tpl = pe.Node(TemplateFlowSelect(), name="select_tpl", run_without_submitting=True) + select_tpl = pe.Node(TemplateFlowSelect(), name='select_tpl', run_without_submitting=True) - gen_ref = pe.Node(GenerateSamplingReference(), name="gen_ref", mem_gb=0.01) + gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref', mem_gb=0.01) - mask_anat = pe.Node(ApplyMask(), name="mask_anat") + mask_anat = pe.Node(ApplyMask(), name='mask_anat') # Resample T1w-space inputs anat2std_t1w = pe.Node( @@ -593,54 +615,54 @@ def init_anat_derivatives_wf( dimension=3, default_value=0, float=True, - interpolation="LanczosWindowedSinc", + interpolation='LanczosWindowedSinc', ), - name="anat2std_t1w", + name='anat2std_t1w', ) - anat2std_mask = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="anat2std_mask") - anat2std_dseg = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="anat2std_dseg") + anat2std_mask = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='anat2std_mask') + anat2std_dseg = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='anat2std_dseg') anat2std_tpms = pe.MapNode( - ApplyTransforms(dimension=3, default_value=0, float=True, interpolation="Gaussian"), - iterfield=["input_image"], - name="anat2std_tpms", + ApplyTransforms(dimension=3, default_value=0, float=True, interpolation='Gaussian'), + iterfield=['input_image'], + name='anat2std_tpms', ) ds_std_t1w = pe.Node( DerivativesDataSink( base_directory=output_dir, - desc="preproc", + desc='preproc', compress=True, ), - name="ds_std_t1w", + name='ds_std_t1w', run_without_submitting=True, ) ds_std_t1w.inputs.SkullStripped = True ds_std_mask = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc="brain", suffix="mask", compress=True + base_directory=output_dir, desc='brain', suffix='mask', compress=True ), - name="ds_std_mask", + name='ds_std_mask', run_without_submitting=True, ) - ds_std_mask.inputs.Type = "Brain" + ds_std_mask.inputs.Type = 'Brain' ds_std_dseg = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="dseg", compress=True), - name="ds_std_dseg", + DerivativesDataSink(base_directory=output_dir, suffix='dseg', compress=True), + name='ds_std_dseg', run_without_submitting=True, ) ds_std_tpms = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix="probseg", compress=True), - name="ds_std_tpms", + DerivativesDataSink(base_directory=output_dir, suffix='probseg', compress=True), + name='ds_std_tpms', run_without_submitting=True, ) set_tpl_res = pe.Node( niu.Function(function=_set_tpl_res), - name="set_tpl_res", + name='set_tpl_res', run_without_submitting=True, mem_gb=0.1, ) @@ -684,16 +706,16 @@ def init_anat_derivatives_wf( workflow.connect( # Connect apply transforms nodes [ - (gen_ref, n, [("out_file", "reference_image")]) + (gen_ref, n, [('out_file', 'reference_image')]) for n in (anat2std_t1w, anat2std_mask, anat2std_dseg, anat2std_tpms) ] + [ - (select_xfm, n, [("anat2std_xfm", "transforms")]) + (select_xfm, n, [('anat2std_xfm', 'transforms')]) for n in (anat2std_t1w, anat2std_mask, anat2std_dseg, anat2std_tpms) ] # Connect the source_file input of these datasinks + [ - (inputnode, n, [(source_files, "source_file")]) + (inputnode, n, [(source_files, 'source_file')]) for n in (ds_std_t1w, ds_std_mask, ds_std_dseg, ds_std_tpms) ] # Connect the space input of these datasinks @@ -701,7 +723,7 @@ def init_anat_derivatives_wf( ( spacesource, n, - [("space", "space"), ("cohort", "cohort"), ("resolution", "resolution")], + [('space', 'space'), ('cohort', 'cohort'), ('resolution', 'resolution')], ) for n in (ds_std_t1w, ds_std_mask, ds_std_dseg, ds_std_tpms) ] @@ -714,95 +736,95 @@ def init_anat_derivatives_wf( from niworkflows.interfaces.surf import Path2BIDS # FS native space transforms - lta2itk_fwd = pe.Node(ConcatenateXFMs(), name="lta2itk_fwd", run_without_submitting=True) - lta2itk_inv = pe.Node(ConcatenateXFMs(), name="lta2itk_inv", run_without_submitting=True) + lta2itk_fwd = pe.Node(ConcatenateXFMs(), name='lta2itk_fwd', run_without_submitting=True) + lta2itk_inv = pe.Node(ConcatenateXFMs(), name='lta2itk_inv', run_without_submitting=True) ds_anat_fsnative = pe.Node( DerivativesDataSink( base_directory=output_dir, - mode="image", - to="fsnative", - suffix="xfm", - extension="txt", - **{"from": space}, + mode='image', + to='fsnative', + suffix='xfm', + extension='txt', + **{'from': space}, ), - name="ds_anat_fsnative", + name='ds_anat_fsnative', run_without_submitting=True, ) ds_fsnative_anat = pe.Node( DerivativesDataSink( base_directory=output_dir, - mode="image", + mode='image', to=space, - suffix="xfm", - extension="txt", - **{"from": "fsnative"}, + suffix='xfm', + extension='txt', + **{'from': 'fsnative'}, ), - name="ds_fsnative_anat", + name='ds_fsnative_anat', run_without_submitting=True, ) # Surfaces name_surfs = pe.MapNode( - Path2BIDS(), iterfield="in_file", name="name_surfs", run_without_submitting=True + Path2BIDS(), iterfield='in_file', name='name_surfs', run_without_submitting=True ) ds_surfs = pe.MapNode( - DerivativesDataSink(base_directory=output_dir, extension=".surf.gii"), - iterfield=["in_file", "hemi", "suffix"], - name="ds_surfs", + DerivativesDataSink(base_directory=output_dir, extension='.surf.gii'), + iterfield=['in_file', 'hemi', 'suffix'], + name='ds_surfs', run_without_submitting=True, ) name_regs = pe.MapNode( - Path2BIDS(), iterfield="in_file", name="name_regs", run_without_submitting=True + Path2BIDS(), iterfield='in_file', name='name_regs', run_without_submitting=True ) ds_regs = pe.MapNode( DerivativesDataSink( base_directory=output_dir, - desc="reg", - suffix="sphere", - extension=".surf.gii", + desc='reg', + suffix='sphere', + extension='.surf.gii', ), - iterfield=["in_file", "hemi"], - name="ds_regs", + iterfield=['in_file', 'hemi'], + name='ds_regs', run_without_submitting=True, ) name_reg_fsLR = pe.MapNode( - Path2BIDS(), iterfield="in_file", name="name_reg_fsLR", run_without_submitting=True + Path2BIDS(), iterfield='in_file', name='name_reg_fsLR', run_without_submitting=True ) ds_reg_fsLR = pe.MapNode( DerivativesDataSink( base_directory=output_dir, - space="dHCP" if surface_recon == "mcribs" else "fsLR", - desc="reg", - suffix="sphere", - extension=".surf.gii", + space='dHCP' if surface_recon == 'mcribs' else 'fsLR', + desc='reg', + suffix='sphere', + extension='.surf.gii', ), - iterfield=["in_file", "hemi"], - name="ds_reg_fsLR", + iterfield=['in_file', 'hemi'], + name='ds_reg_fsLR', run_without_submitting=True, ) # Morphometrics name_morphs = pe.MapNode( Path2BIDS(), - iterfield="in_file", - name="name_morphs", + iterfield='in_file', + name='name_morphs', run_without_submitting=True, ) ds_morphs = pe.MapNode( - DerivativesDataSink(base_directory=output_dir, extension=".shape.gii"), - iterfield=["in_file", "hemi", "suffix"], - name="ds_morphs", + DerivativesDataSink(base_directory=output_dir, extension='.shape.gii'), + iterfield=['in_file', 'hemi', 'suffix'], + name='ds_morphs', run_without_submitting=True, ) # Parcellations ds_anat_fsaseg = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc="aseg", suffix="dseg", compress=True), - name="ds_anat_fsaseg", + DerivativesDataSink(base_directory=output_dir, desc='aseg', suffix='dseg', compress=True), + name='ds_anat_fsaseg', run_without_submitting=True, ) ds_anat_fsparc = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc="aparcaseg", suffix="dseg", compress=True + base_directory=output_dir, desc='aparcaseg', suffix='dseg', compress=True ), - name="ds_anat_fsparc", + name='ds_anat_fsparc', run_without_submitting=True, ) @@ -848,7 +870,7 @@ def init_anat_derivatives_wf( ), name='ds_cifti_morph', run_without_submitting=True, - iterfield=["in_file", "meta_dict", "suffix"], + iterfield=['in_file', 'meta_dict', 'suffix'], ) # fmt:off workflow.connect([ @@ -862,7 +884,7 @@ def init_anat_derivatives_wf( def _set_tpl_res(space, resolution): - if space in ("UNCInfant", "Fischer344"): + if space in ('UNCInfant', 'Fischer344'): from nipype.interfaces.base import Undefined return Undefined From fcf201f92524c9059f9a8241c61d071385646cba Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 25 Apr 2024 13:26:23 -0400 Subject: [PATCH 045/142] ENH: output both forward and reverse coreg transforms --- nibabies/workflows/anatomical/registration.py | 122 +++++++++--------- 1 file changed, 62 insertions(+), 60 deletions(-) diff --git a/nibabies/workflows/anatomical/registration.py b/nibabies/workflows/anatomical/registration.py index f2f25cbe..eebbafe6 100644 --- a/nibabies/workflows/anatomical/registration.py +++ b/nibabies/workflows/anatomical/registration.py @@ -17,7 +17,7 @@ def init_coregistration_wf( debug: bool = False, t1w_mask: bool = False, probmap: bool = True, - name: str = "coregistration_wf", + name: str = 'coregistration_wf', ): """ Set-up a T2w-to-T1w within-baby co-registration framework. @@ -97,37 +97,38 @@ def init_coregistration_wf( workflow = pe.Workflow(name) inputnode = pe.Node( - niu.IdentityInterface(fields=["in_t1w", "in_t2w", "in_mask", "in_probmap"]), - name="inputnode", + niu.IdentityInterface(fields=['in_t1w', 'in_t2w', 'in_mask', 'in_probmap']), + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( fields=[ - "t1w_preproc", - "t1w_brain", - "t1w_mask", - "t1w2t2w_xfm", - "t2w_preproc", + 't1w_preproc', + 't1w_brain', + 't1w_mask', + 't1w2t2w_xfm', + 't2w2t1w_xfm', + 't2w_preproc', ] ), - name="outputnode", + name='outputnode', ) # Dilate t2w mask for easier t1->t2 registration - fixed_masks_arg = pe.Node(niu.Merge(3), name="fixed_masks_arg", run_without_submitting=True) - reg_mask = pe.Node(BinaryDilation(radius=8, iterations=3), name="reg_mask") - refine_mask = pe.Node(BinaryDilation(radius=8, iterations=1), name="refine_mask") + fixed_masks_arg = pe.Node(niu.Merge(3), name='fixed_masks_arg', run_without_submitting=True) + reg_mask = pe.Node(BinaryDilation(radius=8, iterations=3), name='reg_mask') + refine_mask = pe.Node(BinaryDilation(radius=8, iterations=1), name='refine_mask') # Set up T1w -> T2w within-subject registration coreg = pe.Node( - Registration(from_file=get_file("nibabies", "data/within_subject_t1t2.json")), - name="coreg", + Registration(from_file=get_file('nibabies', 'data/within_subject_t1t2.json')), + name='coreg', n_procs=omp_nthreads, mem_gb=mem_gb, ) coreg.inputs.float = sloppy if debug: - coreg.inputs.args = "--write-interval-volumes 5" + coreg.inputs.args = '--write-interval-volumes 5' coreg.inputs.output_inverse_warped_image = sloppy coreg.inputs.output_warped_image = sloppy @@ -143,34 +144,35 @@ def init_coregistration_wf( shrink_factor=4, ), n_procs=omp_nthreads, - name="final_n4", + name='final_n4', ) # Move the T2w into T1w space, and apply the mask to the T1w - map_t2w = pe.Node(ApplyTransforms(interpolation="BSpline"), name="map_t2w", mem_gb=1) - apply_mask = pe.Node(ApplyMask(), name="apply_mask") + map_t2w = pe.Node(ApplyTransforms(interpolation='BSpline'), name='map_t2w', mem_gb=1) + apply_mask = pe.Node(ApplyMask(), name='apply_mask') # fmt: off workflow.connect([ - (inputnode, final_n4, [("in_t1w", "input_image")]), - (inputnode, coreg, [("in_t1w", "moving_image"), - ("in_t2w", "fixed_image")]), + (inputnode, final_n4, [('in_t1w', 'input_image')]), + (inputnode, coreg, [('in_t1w', 'moving_image'), + ('in_t2w', 'fixed_image')]), (reg_mask, fixed_masks_arg, [ - ("out_file", "in1"), - ("out_file", "in2")]), - (refine_mask, fixed_masks_arg, [("out_file", "in3")]), + ('out_file', 'in1'), + ('out_file', 'in2')]), + (refine_mask, fixed_masks_arg, [('out_file', 'in3')]), (inputnode, map_t2w, [ - ("in_t1w", "reference_image"), - ("in_t2w", "input_image")]), - (fixed_masks_arg, coreg, [("out", "fixed_image_masks")]), + ('in_t1w', 'reference_image'), + ('in_t2w', 'input_image')]), + (fixed_masks_arg, coreg, [('out', 'fixed_image_masks')]), (coreg, map_t2w, [ - ("reverse_transforms", "transforms"), - ("reverse_invert_flags", "invert_transform_flags"), + ('reverse_transforms', 'transforms'), + ('reverse_invert_flags', 'invert_transform_flags'), ]), - (final_n4, apply_mask, [("output_image", "in_file")]), - (final_n4, outputnode, [("output_image", "t1w_preproc")]), - (map_t2w, outputnode, [("output_image", "t2w_preproc")]), - (apply_mask, outputnode, [("out_file", "t1w_brain")]), - (coreg, outputnode, [("forward_transforms", "t1w2t2w_xfm")]), + (final_n4, apply_mask, [('output_image', 'in_file')]), + (final_n4, outputnode, [('output_image', 't1w_preproc')]), + (map_t2w, outputnode, [('output_image', 't2w_preproc')]), + (apply_mask, outputnode, [('out_file', 't1w_brain')]), + (coreg, outputnode, [('forward_transforms', 't1w2t2w_xfm')]), + (coreg, outputnode, [('reverse_transforms', 't2w2t1w_xfm')]), ]) # fmt: on @@ -179,54 +181,54 @@ def init_coregistration_wf( # Generate a quick, rough mask of the T2w to be used to facilitate co-registration. from sdcflows.interfaces.brainmask import BrainExtraction - masker = pe.Node(BrainExtraction(), name="t2w_masker") + masker = pe.Node(BrainExtraction(), name='t2w_masker') # fmt:off workflow.connect([ - (inputnode, masker, [("in_t2w", "in_file")]), - (masker, reg_mask, [("out_mask", "in_file")]), - (masker, refine_mask, [("out_mask", "in_file")]), - (inputnode, apply_mask, [("in_mask", "in_mask")]), - (inputnode, outputnode, [("in_mask", "t1w_mask")]), + (inputnode, masker, [('in_t2w', 'in_file')]), + (masker, reg_mask, [('out_mask', 'in_file')]), + (masker, refine_mask, [('out_mask', 'in_file')]), + (inputnode, apply_mask, [('in_mask', 'in_mask')]), + (inputnode, outputnode, [('in_mask', 't1w_mask')]), ]) # fmt:on return workflow if probmap: # The T2w mask from the brain extraction workflow will be mapped to T1w space - map_mask = pe.Node(ApplyTransforms(interpolation="Gaussian"), name="map_mask", mem_gb=1) - thr_mask = pe.Node(Binarize(thresh_low=0.80), name="thr_mask") + map_mask = pe.Node(ApplyTransforms(interpolation='Gaussian'), name='map_mask', mem_gb=1) + thr_mask = pe.Node(Binarize(thresh_low=0.80), name='thr_mask') # fmt:off workflow.connect([ - (inputnode, reg_mask, [("in_mask", "in_file")]), - (inputnode, refine_mask, [("in_mask", "in_file")]), + (inputnode, reg_mask, [('in_mask', 'in_file')]), + (inputnode, refine_mask, [('in_mask', 'in_file')]), (inputnode, map_mask, [ - ("in_t1w", "reference_image"), - ("in_probmap", "input_image")]), + ('in_t1w', 'reference_image'), + ('in_probmap', 'input_image')]), (coreg, map_mask, [ - ("reverse_transforms", "transforms"), - ("reverse_invert_flags", "invert_transform_flags")]), - (map_mask, thr_mask, [("output_image", "in_file")]), - (map_mask, final_n4, [("output_image", "weight_image")]), - (thr_mask, outputnode, [("out_mask", "t1w_mask")]), - (thr_mask, apply_mask, [("out_mask", "in_mask")]), + ('reverse_transforms', 'transforms'), + ('reverse_invert_flags', 'invert_transform_flags')]), + (map_mask, thr_mask, [('output_image', 'in_file')]), + (map_mask, final_n4, [('output_image', 'weight_image')]), + (thr_mask, outputnode, [('out_mask', 't1w_mask')]), + (thr_mask, apply_mask, [('out_mask', 'in_mask')]), ]) # fmt:on return workflow # A precomputed T2w mask was provided map_precomp_mask = pe.Node( - ApplyTransforms(interpolation="MultiLabel"), name='map_precomp_mask' + ApplyTransforms(interpolation='MultiLabel'), name='map_precomp_mask' ) # fmt:off workflow.connect([ - (inputnode, reg_mask, [("in_mask", "in_file")]), - (inputnode, refine_mask, [("in_mask", "in_file")]), + (inputnode, reg_mask, [('in_mask', 'in_file')]), + (inputnode, refine_mask, [('in_mask', 'in_file')]), (inputnode, map_precomp_mask, [ ('in_t1w', 'reference_image'), ('in_mask', 'input_image')]), (coreg, map_precomp_mask, [ - ("reverse_transforms", "transforms"), - ("reverse_invert_flags", "invert_transform_flags")]), + ('reverse_transforms', 'transforms'), + ('reverse_invert_flags', 'invert_transform_flags')]), (map_precomp_mask, final_n4, [('output_image', 'weight_image')]), (map_precomp_mask, outputnode, [('output_image', 't1w_mask')]), (map_precomp_mask, apply_mask, [('output_image', 'in_mask')]), @@ -251,7 +253,7 @@ def init_coregister_derivatives_wf( ) if t1w_mask: - t1wmask2t2w = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name='t1wmask2t2w') + t1wmask2t2w = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='t1wmask2t2w') # fmt:off workflow.connect([ (inputnode, t1wmask2t2w, [ @@ -263,7 +265,7 @@ def init_coregister_derivatives_wf( # fmt:on if t1w_aseg: # fmt:off - t1waseg2t2w = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name='t1waseg2t2w') + t1waseg2t2w = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='t1waseg2t2w') workflow.connect([ (inputnode, t1waseg2t2w, [ ('t1w_aseg', 'input_image'), @@ -274,7 +276,7 @@ def init_coregister_derivatives_wf( # fmt:on if t2w_aseg: # fmt:off - t2waseg2t1w = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name='t2waseg2t1w') + t2waseg2t1w = pe.Node(ApplyTransforms(interpolation='MultiLabel'), name='t2waseg2t1w') t2waseg2t1w.inputs.invert_transform_flags = [True, False] workflow.connect([ (inputnode, t2waseg2t1w, [ From 7b0098a70509415087c5b2108c3ac264b54578ab Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2024 13:03:19 -0400 Subject: [PATCH 046/142] FIX: fstring + quotes --- nibabies/workflows/anatomical/segmentation.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index 7e01ede5..ad226e09 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -34,7 +34,10 @@ def init_segmentation_wf( name: str = 'segmentation_wf', ): workflow = Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=['anat_brain']), name='inputnode') + inputnode = pe.Node( + niu.IdentityInterface(fields=['anat_brain', 'anat_aseg']), + name='inputnode', + ) outputnode = pe.Node( niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'anat_aseg']), name='outputnode', @@ -50,7 +53,7 @@ def init_segmentation_wf( workflow.__desc__ = ( 'Brain tissue segmentation of cerebrospinal fluid (CSF), white-matter (WM), and ' f'gray-matter (GM) was performed on the brain-extracted {image_type} using FSL ' - f'FAST, distributed with {fsl.Info.version() or 'version unknown'}' + f'FAST, distributed with {fsl.Info.version() or "version unknown"}' ) fast = pe.Node( fsl.FAST(segments=True, no_bias=True, probability_maps=True), @@ -129,7 +132,7 @@ def init_jlf_wf( workflow.__desc__ = ( f'The {image_type} image was registered to {len(segmentations)} templates for ' - f'JointFusion, distributed with ANTs {ants.base.Info.version() or 'version unknown'}, ' + f'JointFusion, distributed with ANTs {ants.base.Info.version() or "version unknown"}, ' 'for image segmentation. Brain tissue segmentation of cerebrospinal fluid (CSF), ' 'white-matter (WM), and gray-matter (GM) were derived from the label fused image.' ) From 0fc5d0437c61b0ba8e393f3d73551bff145d16c6 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2024 19:23:42 -0400 Subject: [PATCH 047/142] ENH: Add named derivatives support --- nibabies/cli/parser.py | 43 +- nibabies/config.py | 20 +- wrapper/src/nibabies_wrapper/__main__.py | 474 +++++++++++------------ 3 files changed, 274 insertions(+), 263 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 993fdc94..c501f81c 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -14,7 +14,7 @@ def _build_parser(): """Build parser object.""" - from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser + from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, Action from functools import partial from pathlib import Path @@ -23,6 +23,23 @@ def _build_parser(): from .version import check_latest, is_flagged + class DerivToDict(Action): + def __call__(self, parser, namespace, values, option_string=None): + d = {} + for spec in values: + try: + name, loc = spec.split('=') + loc = Path(loc) + except ValueError: + loc = Path(spec) + name = loc.name + + if name in d: + raise ValueError(f'Received duplicate derivative name: {name}') + + d[name] = loc + setattr(namespace, self.dest, d) + def _path_exists(path, parser): """Ensure a given path exists.""" if path is None: @@ -186,12 +203,17 @@ def _slice_time_ref(value, parser): 'how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep', ) g_bids.add_argument( - '--anat-derivatives', - action='store', - metavar='PATH', - type=PathExists, - help='Reuse the anatomical derivatives from another NiBabies run or calculated ' - 'with an alternative processing tool (NOT RECOMMENDED).', + '-d', + '--derivatives', + action=DerivToDict, + metavar='PACKAGE=PATH', + type=str, + nargs='+', + help=( + 'Search PATH(s) for pre-computed derivatives. ' + 'These may be provided as named folders ' + '(e.g., `--derivatives smriprep=/path/to/smriprep`).' + ), ) g_bids.add_argument( '--bids-database-dir', @@ -672,13 +694,6 @@ def _slice_time_ref(value, parser): default=45, help='Head radius in mm for framewise displacement calculation.', ) - g_baby.add_argument( - '-d', - '--derivatives', - type=DirNotEmpty, - nargs='+', - help='One or more directory containing pre-computed derivatives.', - ) g_baby.add_argument( '--deriv-filter-file', dest='derivatives_filters', diff --git a/nibabies/config.py b/nibabies/config.py index 4f4a6734..085a5f8c 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -214,15 +214,20 @@ def load(cls, settings, init=True, ignore=None): if k in ignore or v is None: continue if k in cls._paths: - if isinstance(v, (list, tuple)): # Multiple paths + if isinstance(v, list | tuple): # Multiple paths setattr(cls, k, [Path(p).absolute() for p in v]) + elif isinstance(v, dict): + setattr(cls, k, {key: Path(val).absolute() for key, val in v.items()}) else: setattr(cls, k, Path(v).absolute()) elif hasattr(cls, k): setattr(cls, k, v) - if init and hasattr(cls, 'init'): - cls.init() + if init: + try: + cls.init() + except AttributeError: + pass @classmethod def get(cls): @@ -236,8 +241,10 @@ def get(cls): if callable(getattr(cls, k)): continue if k in cls._paths: - if isinstance(v, (list, tuple)): # Multiple paths + if isinstance(v, list | tuple): # Multiple paths v = [str(p) for p in v] + elif isinstance(v, dict): + v = {key: str(val) for key, val in v.items()} else: v = str(v) if isinstance(v, SpatialReferences): @@ -360,8 +367,6 @@ def init(cls): class execution(_Config): """Configure run-level settings.""" - anat_derivatives = None - """A path where anatomical derivatives are found to fast-track *sMRIPrep*.""" bids_dir = None """An existing path to the dataset, which must be BIDS-compliant.""" bids_database_dir = None @@ -376,7 +381,7 @@ class execution(_Config): """Run in sloppy mode (meaning, suboptimal parameters that minimize run-time).""" debug = [] """Debug mode(s).""" - derivatives = None + derivatives = {} """One or more paths where pre-computed derivatives are found.""" derivatives_filters = None """A dictionary of BIDS selection filters""" @@ -492,7 +497,6 @@ def init(cls): database_path=_db_path, reset_database=cls.bids_database_dir is None, indexer=_indexer, - derivatives=cls.derivatives or False, ) cls.bids_database_dir = _db_path cls.layout = cls._layout diff --git a/wrapper/src/nibabies_wrapper/__main__.py b/wrapper/src/nibabies_wrapper/__main__.py index 147a056f..0c416c7c 100755 --- a/wrapper/src/nibabies_wrapper/__main__.py +++ b/wrapper/src/nibabies_wrapper/__main__.py @@ -14,10 +14,10 @@ forget to credit all the authors of service that NiBabies uses (https://fmriprep.readthedocs.io/en/latest/citing.html). """ -import sys import os import re import subprocess +import sys try: from ._version import __version__ @@ -29,39 +29,39 @@ MISSING = """ Image '{}' is missing Would you like to download? [Y/n] """ -PKG_PATH = "/opt/conda/envs/nibabies/lib/python3.10/site-packages" +PKG_PATH = '/opt/conda/envs/nibabies/lib/python3.10/site-packages' TF_TEMPLATES = ( - "MNI152Lin", - "MNI152NLin2009cAsym", - "MNI152NLin6Asym", - "MNI152NLin6Sym", - "MNIInfant", - "MNIPediatricAsym", - "NKI", - "OASIS30ANTs", - "PNC", - "UNCInfant", - "fsLR", - "fsaverage", - "fsaverage5", - "fsaverage6", + 'MNI152Lin', + 'MNI152NLin2009cAsym', + 'MNI152NLin6Asym', + 'MNI152NLin6Sym', + 'MNIInfant', + 'MNIPediatricAsym', + 'NKI', + 'OASIS30ANTs', + 'PNC', + 'UNCInfant', + 'fsLR', + 'fsaverage', + 'fsaverage5', + 'fsaverage6', ) -NONSTANDARD_REFERENCES = ("anat", "T1w", "run", "func", "sbref", "fsnative") +NONSTANDARD_REFERENCES = ('anat', 'T1w', 'run', 'func', 'sbref', 'fsnative') # Monkey-patch Py2 subprocess -if not hasattr(subprocess, "DEVNULL"): +if not hasattr(subprocess, 'DEVNULL'): subprocess.DEVNULL = -3 -if not hasattr(subprocess, "run"): +if not hasattr(subprocess, 'run'): # Reimplement minimal functionality for usage in this file def _run(args, stdout=None, stderr=None): from collections import namedtuple - result = namedtuple("CompletedProcess", "stdout stderr returncode") + result = namedtuple('CompletedProcess', 'stdout stderr returncode') devnull = None if subprocess.DEVNULL in (stdout, stderr): - devnull = open(os.devnull, "r+") + devnull = open(os.devnull, 'r+') if stdout == subprocess.DEVNULL: stdout = devnull if stderr == subprocess.DEVNULL: @@ -87,7 +87,7 @@ def _run(args, stdout=None, stderr=None): # The helper class to facilate Docker / Singularity nuiances -class ContainerManager(object): +class ContainerManager: def __init__(self, service, image=None): """ Inputs @@ -97,9 +97,9 @@ def __init__(self, service, image=None): """ self.service = service self.image = image - self.command = [service, "run"] + self.command = [service, 'run'] self.mounts = [] - if service == "docker": + if service == 'docker': self.add_cmd('--rm') elif service == 'singularity': self.add_cmd('--cleanenv') @@ -121,7 +121,7 @@ def add_mount(self, src, dst, read_only=True): dst : absolute container path read_only : disable writing to bound path """ - self.mounts.append("{0}:{1}{2}".format(src, dst, ":ro" if read_only else "")) + self.mounts.append('{0}:{1}{2}'.format(src, dst, ':ro' if read_only else '')) def check_install(self): """Verify that the service is installed and the user has permission to @@ -135,7 +135,7 @@ def check_install(self): """ try: ret = subprocess.run( - [self.service, "version"], + [self.service, 'version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) @@ -145,45 +145,45 @@ def check_install(self): if e.errno == ENOENT: return -1 raise e - if ret.stderr.startswith(b"Cannot connect to the Docker daemon."): + if ret.stderr.startswith(b'Cannot connect to the Docker daemon.'): return 0 return 1 def check_image(self, image): """Check whether image is present on local system""" - if self.service == "docker": - ret = subprocess.run(["docker", "images", "-q", image], stdout=subprocess.PIPE) + if self.service == 'docker': + ret = subprocess.run(['docker', 'images', '-q', image], stdout=subprocess.PIPE) return bool(ret.stdout) - elif self.service == "singularity": + elif self.service == 'singularity': # check if the image file exists return os.path.exists(os.path.abspath(image)) raise NotImplementedError def check_memory(self, image): """Check total memory from within an image""" - if self.service == "docker": - cmd = ["docker", "run", "--rm", "--entrypoint=free", image, "-m"] - elif self.service == "singularity": - cmd = ["singularity", "exec", image, "free", "-m"] + if self.service == 'docker': + cmd = ['docker', 'run', '--rm', '--entrypoint=free', image, '-m'] + elif self.service == 'singularity': + cmd = ['singularity', 'exec', image, 'free', '-m'] ret = subprocess.run(cmd, stdout=subprocess.PIPE) if ret.returncode: return -1 mem = [ line.decode().split()[1] for line in ret.stdout.splitlines() - if line.startswith(b"Mem:") + if line.startswith(b'Mem:') ][0] return int(mem) def set_version(self): if self.service == 'docker': ret = subprocess.run( - ["docker", "version", "--format", "{{.Server.Version}}"], stdout=subprocess.PIPE + ['docker', 'version', '--format', '{{.Server.Version}}'], stdout=subprocess.PIPE ) elif self.service == 'singularity': - ret = subprocess.run(["singularity", "version"], stdout=subprocess.PIPE) - version = ret.stdout.decode("ascii").strip() - version_env = "{}_VERSION_8395080871".format(self.service.upper()) + ret = subprocess.run(['singularity', 'version'], stdout=subprocess.PIPE) + version = ret.stdout.decode('ascii').strip() + version_env = f'{self.service.upper()}_VERSION_8395080871' self.add_envvar((version_env, version)) def add_envvar(self, envtuple): @@ -194,21 +194,21 @@ def add_envvar(self, envtuple): envtuple : tuple in the form of ("ENV_VAR", "value") """ if self.service == 'docker': - env = "=".join(envtuple) - self.add_cmd(["-e", env]) - elif self.service == "singularity": + env = '='.join(envtuple) + self.add_cmd(['-e', env]) + elif self.service == 'singularity': # singularity will transfer over environmental variables # with the prefix: SINGULARITYENV_ envvar, value = envtuple - envvar = "SINGULARITYENV_" + envvar + envvar = 'SINGULARITYENV_' + envvar os.environ[envvar] = value def finalize_container_cmd(self): """Add bindings to final command, and finish with image""" - if self.service == "docker": - mflag = "-v" - elif self.service == "singularity": - mflag = "-B" + if self.service == 'docker': + mflag = '-v' + elif self.service == 'singularity': + mflag = '-B' for mount in self.mounts: self.add_cmd((mflag, mount)) @@ -224,29 +224,29 @@ def _get_posargs(usage): and the docker wrapper (`fmriprep-docker -h`). """ posargs = [] - for targ in usage.split("\n")[-3:]: + for targ in usage.split('\n')[-3:]: line = targ.lstrip() - if line.startswith("usage"): + if line.startswith('usage'): continue - if line[0].isalnum() or line[0] == "{": + if line[0].isalnum() or line[0] == '{': posargs.append(line) - elif line[0] == "[" and (line[1].isalnum() or line[1] == "{"): + elif line[0] == '[' and (line[1].isalnum() or line[1] == '{'): posargs.append(line) - return " ".join(posargs) + return ' '.join(posargs) # Matches all flags with up to one nested square bracket - opt_re = re.compile(r"(\[--?[\w-]+(?:[^\[\]]+(?:\[[^\[\]]+\])?)?\])") + opt_re = re.compile(r'(\[--?[\w-]+(?:[^\[\]]+(?:\[[^\[\]]+\])?)?\])') # Matches flag name only - flag_re = re.compile(r"\[--?([\w-]+)[ \]]") + flag_re = re.compile(r'\[--?([\w-]+)[ \]]') # Normalize to Unix-style line breaks - w_help = wrapper_help.rstrip().replace("\r", "") - t_help = target_help.rstrip().replace("\r", "") + w_help = wrapper_help.rstrip().replace('\r', '') + t_help = target_help.rstrip().replace('\r', '') - w_usage, w_details = w_help.split("\n\n", 1) - w_groups = w_details.split("\n\n") - t_usage, t_details = t_help.split("\n\n", 1) - t_groups = t_details.split("\n\n") + w_usage, w_details = w_help.split('\n\n', 1) + w_groups = w_details.split('\n\n') + t_usage, t_details = t_help.split('\n\n', 1) + t_groups = t_details.split('\n\n') w_posargs = _get_posargs(w_usage) t_posargs = _get_posargs(t_usage) @@ -263,34 +263,33 @@ def _get_posargs(usage): # Make sure we're not clobbering options we don't mean to overlap = set(w_flags).intersection(t_flags) expected_overlap = { - "anat-derivatives", - "bids-database-dir", - "bids-filter-file", - "derivatives", - "deriv-filter-file", - "fs-license-file", - "fs-subjects-dir", - "config-file", - "segmentation-atlases-dir", - "h", - "use-plugin", - "version", - "w", + 'bids-database-dir', + 'bids-filter-file', + 'derivatives', + 'deriv-filter-file', + 'fs-license-file', + 'fs-subjects-dir', + 'config-file', + 'segmentation-atlases-dir', + 'h', + 'use-plugin', + 'version', + 'w', } - assert overlap == expected_overlap, "Clobbering options: {}".format( - ", ".join(overlap - expected_overlap) + assert overlap == expected_overlap, 'Clobbering options: {}'.format( + ', '.join(overlap - expected_overlap) ) sections = [] # Construct usage - start = w_usage[: w_usage.index(" [")] - indent = " " * len(start) + start = w_usage[: w_usage.index(' [')] + indent = ' ' * len(start) new_options = sum( ( w_options[:2], - [opt for opt, flag in zip(t_options, t_flags) if flag not in overlap], + [opt for opt, flag in zip(t_options, t_flags, strict=False) if flag not in overlap], w_options[2:], ), [], @@ -299,7 +298,7 @@ def _get_posargs(usage): length = 0 opt_lines = [start] for opt in new_options: - opt = " " + opt + opt = ' ' + opt olen = len(opt) if length + olen <= opt_line_length: opt_lines[-1] += opt @@ -307,22 +306,22 @@ def _get_posargs(usage): else: opt_lines.append(indent + opt) length = olen - opt_lines.append(indent + " " + t_posargs) - sections.append("\n".join(opt_lines)) + opt_lines.append(indent + ' ' + t_posargs) + sections.append('\n'.join(opt_lines)) # Use target description and positional args sections.extend(t_groups[:2]) - for line in t_groups[2].split("\n")[1:]: - content = line.lstrip().split(",", 1)[0] + for line in t_groups[2].split('\n')[1:]: + content = line.lstrip().split(',', 1)[0] if content[1:] not in overlap: - w_groups[2] += "\n" + line + w_groups[2] += '\n' + line sections.append(w_groups[2]) # All remaining sections, show target then wrapper (skipping duplicates) sections.extend(t_groups[3:] + w_groups[6:]) - return "\n\n".join(sections) + return '\n\n'.join(sections) def is_in_directory(filepath, directory): @@ -338,7 +337,7 @@ class ToDict(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): d = {} for kv in values: - k, v = kv.split("=") + k, v = kv.split('=') d[k] = os.path.abspath(v) setattr(namespace, self.dest, d) @@ -346,7 +345,7 @@ def _is_file(path, parser): """Ensure a given path exists and it is a file.""" path = os.path.abspath(path) if not os.path.isfile(path): - raise parser.error("Path should point to a file (or symlink of file): <%s>." % path) + raise parser.error('Path should point to a file (or symlink of file): <%s>.' % path) return path parser = argparse.ArgumentParser( @@ -358,47 +357,47 @@ def _is_file(path, parser): IsFile = partial(_is_file, parser=parser) # require users to specify container service - parser.add_argument("service", nargs="?", choices=("docker", "singularity")) + parser.add_argument('service', nargs='?', choices=('docker', 'singularity')) # Standard NiBabies arguments - parser.add_argument("bids_dir", nargs="?", type=os.path.abspath, default="") - parser.add_argument("output_dir", nargs="?", type=os.path.abspath, default="") + parser.add_argument('bids_dir', nargs='?', type=os.path.abspath, default='') + parser.add_argument('output_dir', nargs='?', type=os.path.abspath, default='') parser.add_argument( - "analysis_level", nargs="?", choices=["participant"], default="participant" + 'analysis_level', nargs='?', choices=['participant'], default='participant' ) parser.add_argument( - "-h", "--help", action="store_true", help="show this help message and exit" + '-h', '--help', action='store_true', help='show this help message and exit' ) parser.add_argument( - "--version", action="store_true", help="show program's version number and exit" + '--version', action='store_true', help="show program's version number and exit" ) # Allow alternative images (semi-developer) parser.add_argument( - "-i", - "--image", - metavar="IMG", + '-i', + '--image', + metavar='IMG', type=str, - default="nipreps/nibabies:{}".format(__version__), - help="image name", + default=f'nipreps/nibabies:{__version__}', + help='image name', ) # Options for mapping files and directories into container # Update `expected_overlap` variable in merge_help() when adding to this g_wrap = parser.add_argument_group( - "Wrapper options", - "Standard options that require mapping files into the container", + 'Wrapper options', + 'Standard options that require mapping files into the container', ) g_wrap.add_argument( - "-w", - "--work-dir", - action="store", + '-w', + '--work-dir', + action='store', type=os.path.abspath, - help="path where intermediate results should be stored", + help='path where intermediate results should be stored', ) g_wrap.add_argument( - "--output-spaces", - nargs="*", + '--output-spaces', + nargs='*', help="""\ Standard and non-standard spaces to resample anatomical and functional images to. \ Standard spaces may be specified by the form \ @@ -410,125 +409,119 @@ def _is_file(path, parser): Important to note, the ``res-*`` modifier does not define the resolution used for \ the spatial normalization.""" % ( - ", ".join('"%s"' % s for s in TF_TEMPLATES), - ", ".join(NONSTANDARD_REFERENCES), + ', '.join('"%s"' % s for s in TF_TEMPLATES), + ', '.join(NONSTANDARD_REFERENCES), ), ) g_wrap.add_argument( - "--fs-license-file", - metavar="PATH", + '--fs-license-file', + metavar='PATH', type=IsFile, - default=os.getenv("FS_LICENSE", None), - help="Path to FreeSurfer license key file. Get it (for free) by registering" - " at https://surfer.nmr.mgh.harvard.edu/registration.html", - ) - g_wrap.add_argument( - "--fs-subjects-dir", - metavar="PATH", - type=os.path.abspath, - help="Path to existing Infant FreeSurfer subjects directory to reuse. ", + default=os.getenv('FS_LICENSE', None), + help='Path to FreeSurfer license key file. Get it (for free) by registering' + ' at https://surfer.nmr.mgh.harvard.edu/registration.html', ) g_wrap.add_argument( - "--config-file", - metavar="PATH", + '--fs-subjects-dir', + metavar='PATH', type=os.path.abspath, - help="Use pre-generated configuration file. Values in file will be overridden " - "by command-line arguments.", + help='Path to existing Infant FreeSurfer subjects directory to reuse. ', ) g_wrap.add_argument( - "--anat-derivatives", - metavar="PATH", + '--config-file', + metavar='PATH', type=os.path.abspath, - help="Path to existing NiBabies anatomical derivatives to fasttrack " - "the anatomical workflow.", + help='Use pre-generated configuration file. Values in file will be overridden ' + 'by command-line arguments.', ) g_wrap.add_argument( - "--use-plugin", - metavar="PATH", - action="store", + '--use-plugin', + metavar='PATH', + action='store', default=None, type=os.path.abspath, - help="nipype plugin configuration file", + help='nipype plugin configuration file', ) g_wrap.add_argument( - "--bids-database-dir", - metavar="PATH", + '--bids-database-dir', + metavar='PATH', type=os.path.abspath, - help="Path to an existing PyBIDS database folder, for faster indexing " - "(especially useful for large datasets).", + help='Path to an existing PyBIDS database folder, for faster indexing ' + '(especially useful for large datasets).', ) g_wrap.add_argument( - "--segmentation-atlases-dir", - metavar="PATH", + '--segmentation-atlases-dir', + metavar='PATH', type=os.path.abspath, - help="Directory containing prelabeled segmentations to use for JointLabelFusion.", + help='Directory containing prelabeled segmentations to use for JointLabelFusion.', ) g_wrap.add_argument( - "--derivatives", - nargs="+", - metavar="PATH", - type=os.path.abspath, - help="One or more directory containing pre-computed derivatives", + '-d', + '--derivatives', + nargs='+', + metavar='PATH', + action=ToDict, + help='Search PATH(s) for pre-computed derivatives.', ) g_wrap.add_argument( - "--bids-filter-file", - metavar="PATH", + '--bids-filter-file', + metavar='PATH', type=os.path.abspath, - help="Filter file", + help='Filter file', ) g_wrap.add_argument( - "--deriv-filter-file", - metavar="PATH", + '--deriv-filter-file', + metavar='PATH', type=os.path.abspath, - help="Filter file", + help='Filter file', ) # Developer patch/shell options g_dev = parser.add_argument_group( - "Developer options", "Tools for testing and debugging nibabies" + 'Developer options', 'Tools for testing and debugging nibabies' ) g_dev.add_argument( - "--patch", - nargs="+", - metavar="PACKAGE=PATH", + '--patch', + nargs='+', + metavar='PACKAGE=PATH', action=ToDict, - help="local repository to use within container", + help='local repository to use within container', ) g_dev.add_argument( - "--shell", - action="store_true", - help="open shell in image instead of running nibabies", + '--shell', + action='store_true', + help='open shell in image instead of running nibabies', ) g_dev.add_argument( - "--config", - metavar="PATH", - action="store", + '--config', + metavar='PATH', + action='store', type=os.path.abspath, - help="Use custom nipype.cfg file", + help='Use custom nipype.cfg file', ) g_dev.add_argument( - "-e", - "--env", - action="append", + '-e', + '--env', + action='append', nargs=2, - metavar=("ENV_VAR", "value"), - help="Set custom environment variable within container", + metavar=('ENV_VAR', 'value'), + help='Set custom environment variable within container', ) g_dev.add_argument( - "-u", - "--user", - action="store", - help="Run container as a given user/uid. Additionally, group/gid can be" - "assigned, (i.e., --user :)", + '-u', + '--user', + action='store', + help='Run container as a given user/uid. Additionally, group/gid can be' + 'assigned, (i.e., --user :)', ) g_dev.add_argument( - "--network", - action="store", - help="Run container with a different network driver " + '--network', + action='store', + help='Run container with a different network driver ' '("none" to simulate no internet connection)', ) - g_dev.add_argument("--no-tty", action="store_true", help="Run docker without TTY flag -it") + g_dev.add_argument('--no-tty', action='store_true', help='Run docker without TTY flag -it') return parser @@ -541,7 +534,7 @@ def main(): opts, unknown_args = parser.parse_known_args() if opts.version: - print("nibabies wrapper {!s}".format(__version__)) + print(f'nibabies wrapper {__version__!s}') return # Set help if no directories set @@ -554,7 +547,7 @@ def main(): if check < 1: if check == -1: print( - "nibabies: Could not find %s command... Is it installed?" % opts.service, + 'nibabies: Could not find %s command... Is it installed?' % opts.service, ) else: print( @@ -563,150 +556,148 @@ def main(): return 1 if not container.check_image(opts.image): - resp = "Y" - if opts.service == "singularity": - print("Singularity image must already exist locally.") + resp = 'Y' + if opts.service == 'singularity': + print('Singularity image must already exist locally.') return 1 try: resp = input(MISSING.format(opts.image)) except KeyboardInterrupt: print() return 1 - if resp not in ("y", "Y", ""): + if resp not in ('y', 'Y', ''): return 0 - print("Downloading. This may take a while...") + print('Downloading. This may take a while...') # Warn on low memory allocation mem_total = container.check_memory(opts.image) if mem_total == -1: print( - "Could not detect memory capacity of Docker container.\n" - "Do you have permission to run docker?" + 'Could not detect memory capacity of Docker container.\n' + 'Do you have permission to run docker?' ) return 1 - if "--reports-only" not in unknown_args and mem_total < 8000: + if '--reports-only' not in unknown_args and mem_total < 8000: print( - "Warning: <8GB of RAM is available within your environment.\n" - "Some parts of nibabies may fail to complete." + 'Warning: <8GB of RAM is available within your environment.\n' + 'Some parts of nibabies may fail to complete.' ) - if "--mem_mb" not in unknown_args: - resp = "N" + if '--mem_mb' not in unknown_args: + resp = 'N' try: - resp = input("Continue anyway? [y/N]") + resp = input('Continue anyway? [y/N]') except KeyboardInterrupt: print() return 1 - if resp not in ("y", "Y", ""): + if resp not in ('y', 'Y', ''): return 0 container.set_version() - if opts.service == "docker": + if opts.service == 'docker': if not opts.no_tty: - container.add_cmd("-it") + container.add_cmd('-it') if opts.user: - container.add_cmd(("-u", opts.user)) + container.add_cmd(('-u', opts.user)) if opts.network: - container.add_cmd("--network=%s" % opts.network) + container.add_cmd('--network=%s' % opts.network) # Patch working repositories into installed package directories if opts.patch: for pkg, repo_path in opts.patch.items(): - container.add_mount(repo_path, "{}/{}".format(PKG_PATH, pkg)) + container.add_mount(repo_path, f'{PKG_PATH}/{pkg}') if opts.env: for envvar in opts.env: container.add_envvar(tuple(envvar)) if opts.fs_license_file: - container.add_mount(opts.fs_license_file, "/opt/freesurfer/license.txt") + container.add_mount(opts.fs_license_file, '/opt/freesurfer/license.txt') main_args = [] if opts.bids_dir: - container.add_mount(opts.bids_dir, "/data") - main_args.append("/data") + container.add_mount(opts.bids_dir, '/data') + main_args.append('/data') if opts.output_dir: if not os.path.exists(opts.output_dir): # create it before the container does os.makedirs(opts.output_dir) - container.add_mount(opts.output_dir, "/out", read_only=False) - main_args.append("/out") + container.add_mount(opts.output_dir, '/out', read_only=False) + main_args.append('/out') main_args.append(opts.analysis_level) if opts.fs_subjects_dir: - container.add_mount(opts.fs_subjects_dir, "/opt/subjects", read_only=False) - unknown_args.extend(["--fs-subjects-dir", "/opt/subjects"]) + container.add_mount(opts.fs_subjects_dir, '/opt/subjects', read_only=False) + unknown_args.extend(['--fs-subjects-dir', '/opt/subjects']) if opts.config_file: - container.add_mount(opts.config_file, "/tmp/config.toml", read_only=False) - unknown_args.extend(["--config-file", "/tmp/config.toml"]) + container.add_mount(opts.config_file, '/tmp/config.toml', read_only=False) + unknown_args.extend(['--config-file', '/tmp/config.toml']) - if opts.anat_derivatives: - container.add_mount(opts.anat_derivatives, "/opt/smriprep/subjects", read_only=False) - unknown_args.extend(["--anat-derivatives", "/opt/smriprep/subjects"]) if opts.segmentation_atlases_dir: - container.add_mount(opts.segmentation_atlases_dir, "/opt/segmentations") - unknown_args.extend(["--segmentation-atlases-dir", "/opt/segmentations"]) + container.add_mount(opts.segmentation_atlases_dir, '/opt/segmentations') + unknown_args.extend(['--segmentation-atlases-dir', '/opt/segmentations']) if opts.bids_filter_file: - container.add_mount(opts.bids_filter_file, "/opt/bids_filters.json") - unknown_args.extend(["--bids-filter-file", "/opt/bids_filters.json"]) + container.add_mount(opts.bids_filter_file, '/opt/bids_filters.json') + unknown_args.extend(['--bids-filter-file', '/opt/bids_filters.json']) if opts.deriv_filter_file: - container.add_mount(opts.deriv_filter_file, "/opt/derivative_filters.json") - unknown_args.extend(["--deriv-filter-file", "/opt/derivative_filters.json"]) + container.add_mount(opts.deriv_filter_file, '/opt/derivative_filters.json') + unknown_args.extend(['--deriv-filter-file', '/opt/derivative_filters.json']) + # Patch derivatives for searching if opts.derivatives: - derivative_args = ["--derivatives"] - for derivative in opts.derivatives: - derivative_target = "/opt/derivatives/%s" % os.path.basename(derivative) - container.add_mount(derivative, derivative_target, read_only=False) - derivative_args.append(derivative_target) - unknown_args.extend(derivative_args) + deriv_args = ['--derivatives'] + for deriv, deriv_path in opts.derivatives.items(): + deriv_target = '/deriv/%s' % deriv + container.add_mount(deriv_path, deriv_target) + deriv_args.append('='.join([deriv, deriv_target])) + unknown_args.extend(deriv_args) # Check that work_dir is not a child of bids_dir if opts.work_dir and opts.bids_dir: if is_in_directory(opts.work_dir, opts.bids_dir): print( - "The selected working directory is a subdirectory of the input BIDS folder. " - "Please modify the output path." + 'The selected working directory is a subdirectory of the input BIDS folder. ' + 'Please modify the output path.' ) return 1 if not os.path.exists(opts.work_dir): # create it before the container does os.makedirs(opts.work_dir) - container.add_mount(opts.work_dir, "/scratch", read_only=False) - unknown_args.extend(["-w", "/scratch"]) + container.add_mount(opts.work_dir, '/scratch', read_only=False) + unknown_args.extend(['-w', '/scratch']) if opts.config: - container.add_mount("opts.config", "/home/fmriprep/.nipype/nipype.cfg") + container.add_mount('opts.config', '/home/fmriprep/.nipype/nipype.cfg') if opts.use_plugin: - container.add_mount(opts.use_plugin, "/tmp/plugin.yml") - unknown_args.extend(["--use-plugin", "/tmp/plugin.yml"]) + container.add_mount(opts.use_plugin, '/tmp/plugin.yml') + unknown_args.extend(['--use-plugin', '/tmp/plugin.yml']) if opts.bids_database_dir: - container.add_mount(opts.bids_database_dir, "/tmp/bids_db", read_only=False) - unknown_args.extend(["--bids-database-dir", "/tmp/bids_db"]) + container.add_mount(opts.bids_database_dir, '/tmp/bids_db', read_only=False) + unknown_args.extend(['--bids-database-dir', '/tmp/bids_db']) if opts.output_spaces: spaces = [] for space in opts.output_spaces: - if space.split(":")[0] not in (TF_TEMPLATES + NONSTANDARD_REFERENCES): + if space.split(':')[0] not in (TF_TEMPLATES + NONSTANDARD_REFERENCES): tpl = os.path.basename(space) - if not tpl.startswith("tpl-"): - raise RuntimeError("Custom template %s requires a `tpl-` prefix" % tpl) - target = "/home/fmriprep/.cache/templateflow/" + tpl + if not tpl.startswith('tpl-'): + raise RuntimeError('Custom template %s requires a `tpl-` prefix' % tpl) + target = '/home/fmriprep/.cache/templateflow/' + tpl container.add_mount(os.path.abspath(space), target) spaces.append(tpl[4:]) else: spaces.append(space) - unknown_args.extend(["--output-spaces"] + spaces) + unknown_args.extend(['--output-spaces'] + spaces) if opts.shell: - if opts.service == "docker": - container.add_cmd("--entrypoint=bash") - elif opts.service == "singularity": + if opts.service == 'docker': + container.add_cmd('--entrypoint=bash') + elif opts.service == 'singularity': # replace default "run" command - container.command[1] = "shell" + container.command[1] = 'shell' container.image = opts.image # after this, all call to ``container.add_cmd`` @@ -730,15 +721,16 @@ def main(): container.add_cmd(main_args) container.add_cmd(unknown_args) - print("RUNNING: " + " ".join(container.command)) + print('RUNNING: ' + ' '.join(container.command)) ret = subprocess.run(container.command) if ret.returncode: - print("nibabies: Please report errors to {}".format(__bugreports__)) + print(f'nibabies: Please report errors to {__bugreports__}') return ret.returncode -if __name__ == "__main__": +if __name__ == '__main__': if '__main__.py' in sys.argv[0]: from . import __name__ as module + sys.argv[0] = '%s -m %s' % (sys.executable, module) sys.exit(main()) From 40eb1de47b38e7d2562aa3007743fad591b7c006 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2024 19:25:04 -0400 Subject: [PATCH 048/142] FIX: Fetch other anatomical files --- nibabies/data/io_spec_anat.json | 14 ++++++++++++-- nibabies/workflows/anatomical/fit.py | 11 ++++++----- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/nibabies/data/io_spec_anat.json b/nibabies/data/io_spec_anat.json index 8bd83345..253812ef 100644 --- a/nibabies/data/io_spec_anat.json +++ b/nibabies/data/io_spec_anat.json @@ -41,7 +41,7 @@ ".nii" ] }, - "dseg": { + "anat_dseg": { "datatype": "anat", "space": null, "desc": null, @@ -51,7 +51,7 @@ ".nii" ] }, - "tpms": { + "anat_tpms": { "datatype": "anat", "space": null, "label": [ @@ -64,6 +64,16 @@ ".nii.gz", ".nii" ] + }, + "anat_aseg": { + "datatype": "anat", + "space": null, + "desc": "aseg", + "suffix": "dseg", + "extension": [ + ".nii.gz", + ".nii" + ] } }, "coreg": { diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index d722894c..f9eb0f03 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -690,9 +690,10 @@ def init_infant_anat_fit_wf( ]) # fmt:skip # Stage 4: Segmentation - anat_dseg = getattr(precomputed, f'{anat}_dseg', None) - anat_tpms = getattr(precomputed, f'{anat}_tpms', None) - anat_aseg = getattr(precomputed, f'{anat}_aseg', False) + anat_dseg = precomputed.get('anat_dseg') + anat_tpms = precomputed.get('anat_tpms') + anat_aseg = precomputed.get('anat_aseg') + seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' if not (anat_dseg and anat_tpms): @@ -827,10 +828,10 @@ def init_infant_anat_fit_wf( workflow.connect([ (t2w_buffer, surface_recon_wf, [ ('t2w_preproc', 'inputnode.t2w'), - ('t2w_mask', 'inputnode.t2w_mask'), + ('t2w_mask', 'inputnode.in_mask'), ]), (anat_buffer, surface_recon_wf, [ - ('anat_aseg', 'inputnode.t2w_aseg'), + ('anat_aseg', 'inputnode.in_aseg'), ]), ]) # fmt:skip From 27e5c905380f4efdde6f6ff7dba4d61fa5cd46b8 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2024 19:26:58 -0400 Subject: [PATCH 049/142] more workflow fixes --- nibabies/cli/workflow.py | 84 +++++++++++------------ nibabies/workflows/anatomical/surfaces.py | 7 +- nibabies/workflows/base.py | 3 +- 3 files changed, 49 insertions(+), 45 deletions(-) diff --git a/nibabies/cli/workflow.py b/nibabies/cli/workflow.py index d8bb2e77..111ed2ef 100644 --- a/nibabies/cli/workflow.py +++ b/nibabies/cli/workflow.py @@ -27,15 +27,15 @@ def build_workflow(config_file): nibabies_dir = config.execution.nibabies_dir version = config.environment.version - retval = {"return_code": 1, "workflow": None} + retval = {'return_code': 1, 'workflow': None} # warn if older results exist: check for dataset_description.json in output folder - msg = check_pipeline_version(version, nibabies_dir / "dataset_description.json") + msg = check_pipeline_version(version, nibabies_dir / 'dataset_description.json') if msg is not None: build_logger.warning(msg) # Please note this is the input folder's dataset_description.json - dset_desc_path = config.execution.bids_dir / "dataset_description.json" + dset_desc_path = config.execution.bids_dir / 'dataset_description.json' if dset_desc_path.exists(): from hashlib import sha256 @@ -46,10 +46,10 @@ def build_workflow(config_file): if config.execution.reports_only: build_logger.log( 25, - "Running --reports-only on participants %s", - ", ".join(config.execution.unique_labels), + 'Running --reports-only on participants %s', + ', '.join(config.execution.unique_labels), ) - retval["return_code"] = generate_reports( + retval['return_code'] = generate_reports( config.execution.unique_labels, nibabies_dir, config.execution.run_uuid, @@ -64,16 +64,16 @@ def build_workflow(config_file): * Run identifier: {config.execution.run_uuid}. * Output spaces: {config.execution.output_spaces or 'MNIInfant'}.""" - if config.execution.anat_derivatives: + if config.execution.derivatives: init_msg += f""" - * Anatomical derivatives: {config.execution.anat_derivatives}.""" + * Derivatives: {config.execution.derivatives}.""" if config.execution.fs_subjects_dir: init_msg += f""" * Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}.""" build_logger.log(25, init_msg) - retval["workflow"] = init_nibabies_wf(config.execution.unique_labels) + retval['workflow'] = init_nibabies_wf(config.execution.unique_labels) # Check for FS license after building the workflow if not check_valid_fs_license(): @@ -84,25 +84,25 @@ def build_workflow(config_file): 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ (for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""" ) - retval["return_code"] = 126 # 126 == Command invoked cannot execute. + retval['return_code'] = 126 # 126 == Command invoked cannot execute. return retval # Check workflow for missing commands - missing = check_deps(retval["workflow"]) + missing = check_deps(retval['workflow']) if missing: build_logger.critical( - "Cannot run nibabies. Missing dependencies:%s", - "\n\t* ".join([""] + [f"{cmd} (Interface: {iface})" for iface, cmd in missing]), + 'Cannot run nibabies. Missing dependencies:%s', + '\n\t* '.join([''] + [f'{cmd} (Interface: {iface})' for iface, cmd in missing]), ) - retval["return_code"] = 127 # 127 == command not found. + retval['return_code'] = 127 # 127 == command not found. return retval # config.to_filename(config_file) build_logger.info( - "NiBabies workflow graph with %d nodes built successfully.", - len(retval["workflow"]._get_all_nodes()), + 'NiBabies workflow graph with %d nodes built successfully.', + len(retval['workflow']._get_all_nodes()), ) - retval["return_code"] = 0 + retval['return_code'] = 0 return retval @@ -110,9 +110,9 @@ def build_boilerplate(workflow): """Write boilerplate in an isolated process.""" from .. import config - logs_path = config.execution.nibabies_dir / "logs" + logs_path = config.execution.nibabies_dir / 'logs' boilerplate = workflow.visit_desc() - citation_files = {ext: logs_path / f"CITATION.{ext}" for ext in ("bib", "tex", "md", "html")} + citation_files = {ext: logs_path / f'CITATION.{ext}' for ext in ('bib', 'tex', 'md', 'html')} if boilerplate: # To please git-annex users and also to guarantee consistency @@ -124,51 +124,51 @@ def build_boilerplate(workflow): except FileNotFoundError: pass - citation_files["md"].write_text(boilerplate) + citation_files['md'].write_text(boilerplate) - if not config.execution.md_only_boilerplate and citation_files["md"].exists(): + if not config.execution.md_only_boilerplate and citation_files['md'].exists(): from shutil import copyfile from subprocess import CalledProcessError, TimeoutExpired, check_call from nibabies.data import load as load_data - bib = load_data("boilerplate.bib") + bib = load_data('boilerplate.bib') # Generate HTML file resolving citations cmd = [ - "pandoc", - "-s", - "--bibliography", + 'pandoc', + '-s', + '--bibliography', bib, - "--citeproc", - "--metadata", + '--citeproc', + '--metadata', 'pagetitle="nibabies citation boilerplate"', - str(citation_files["md"]), - "-o", - str(citation_files["html"]), + str(citation_files['md']), + '-o', + str(citation_files['html']), ] - config.loggers.cli.info("Generating an HTML version of the citation boilerplate...") + config.loggers.cli.info('Generating an HTML version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning("Could not generate CITATION.html file:\n%s", " ".join(cmd)) + config.loggers.cli.warning('Could not generate CITATION.html file:\n%s', ' '.join(cmd)) # Generate LaTex file resolving citations cmd = [ - "pandoc", - "-s", - "--bibliography", + 'pandoc', + '-s', + '--bibliography', bib, - "--natbib", - str(citation_files["md"]), - "-o", - str(citation_files["tex"]), + '--natbib', + str(citation_files['md']), + '-o', + str(citation_files['tex']), ] - config.loggers.cli.info("Generating a LaTeX version of the citation boilerplate...") + config.loggers.cli.info('Generating a LaTeX version of the citation boilerplate...') try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning("Could not generate CITATION.tex file:\n%s", " ".join(cmd)) + config.loggers.cli.warning('Could not generate CITATION.tex file:\n%s', ' '.join(cmd)) else: - copyfile(bib, citation_files["bib"]) + copyfile(bib, citation_files['bib']) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index b4f46014..bd56af8c 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -355,8 +355,11 @@ def init_midthickness_wf(*, omp_nthreads: int, name: str = 'make_midthickness_wf """ workflow = pe.Workflow(name=name) - inputnode = niu.IdentityInterface(fields=['white', 'graymid'], name='inputnode') - outputnode = niu.IdentityInterface(fields=['subject_id', 'subjects_dir'], name='outputnode') + inputnode = pe.Node(niu.IdentityInterface(fields=['white', 'graymid']), name='inputnode') + outputnode = pe.Node( + niu.IdentityInterface(fields=['subject_id', 'subjects_dir']), + name='outputnode', + ) midthickness = pe.MapNode( MakeMidthickness(thickness=True, distance=0.5, out_name='midthickness'), diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index d069f0ef..abc48ca3 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -284,6 +284,7 @@ def init_single_subject_wf( # stacklevel=1, # ) + recon_method = config.workflow.surface_recon_method msm_sulc = False anatomical_cache = {} @@ -304,7 +305,7 @@ def init_single_subject_wf( # Determine some session level options here, as we should have # all the required information - if config.workflow.surface_recon_method == 'auto': + if recon_method == 'auto': if age <= 8: recon_method = 'mcribs' elif age <= 24: From c74ca502473329d520ed315a3c042c379695dd7e Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 26 Apr 2024 19:27:50 -0400 Subject: [PATCH 050/142] DOCKER: Debug build --- Dockerfile | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 30725883..c534d40e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ # SOFTWARE. # Ubuntu 22.04 LTS - Jammy -ARG BASE_IMAGE=ubuntu:jammy-20230605 +ARG BASE_IMAGE=ubuntu:jammy-20240405 # NiBabies wheel FROM python:slim AS src diff --git a/requirements.txt b/requirements.txt index d5f6140a..75a443fc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -390,7 +390,7 @@ tqdm==4.65.0 # datalad # osfclient # templateflow -traits=6.3.2 +traits==6.3.2 # via # nipype # niworkflows From f7178884f309e5e3381e5e0594b0d9dfe18916b0 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Sat, 27 Apr 2024 23:06:33 -0400 Subject: [PATCH 051/142] DKR: Add step to install traits, /src/nibabies->/src --- Dockerfile | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index c534d40e..46911265 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,8 +30,8 @@ FROM python:slim AS src RUN pip install build RUN apt-get update && \ apt-get install -y --no-install-recommends git -COPY . /src/nibabies -RUN python -m build /src/nibabies +COPY . /src +RUN python -m build /src # Older Python to support legacy MCRIBS FROM python:3.6.15-slim as pyenv @@ -90,6 +90,13 @@ RUN mkdir /opt/workbench && \ # Micromamba FROM downloader as micromamba + +# Install a C compiler to build extensions when needed. +# traits<6.4 wheels are not available for Python 3.11+, but build easily. +RUN apt-get update && \ + apt-get install -y --no-install-recommends build-essential && \ + apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* + WORKDIR / # Bump the date to current to force update micromamba RUN echo "2024.04.25" && curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba @@ -257,7 +264,7 @@ RUN ${CONDA_PYTHON} -m pip install --no-cache-dir --upgrade templateflow && \ find $HOME/.cache/templateflow -type f -exec chmod go=u {} + # Install pre-built wheel -COPY --from=src /src/nibabies/dist/*.whl . +COPY --from=src /src/dist/*.whl . RUN ${CONDA_PYTHON} -m pip install --no-cache-dir $( ls *.whl )[telemetry,test] # Facilitate Apptainer use From dcfcfcdae4bd7e4e11265abbc6f295d5d5fa24b4 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Sat, 27 Apr 2024 23:54:11 -0400 Subject: [PATCH 052/142] MAINT: Update new --patch path --- wrapper/src/nibabies_wrapper/__main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wrapper/src/nibabies_wrapper/__main__.py b/wrapper/src/nibabies_wrapper/__main__.py index 0c416c7c..b1056e0e 100755 --- a/wrapper/src/nibabies_wrapper/__main__.py +++ b/wrapper/src/nibabies_wrapper/__main__.py @@ -29,7 +29,7 @@ MISSING = """ Image '{}' is missing Would you like to download? [Y/n] """ -PKG_PATH = '/opt/conda/envs/nibabies/lib/python3.10/site-packages' +PKG_PATH = '/opt/conda/envs/nibabies/lib/python3.11/site-packages' TF_TEMPLATES = ( 'MNI152Lin', 'MNI152NLin2009cAsym', From 34834988a4ed31f8e0232a3ccd3772d606764d42 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Sat, 27 Apr 2024 23:55:51 -0400 Subject: [PATCH 053/142] FIX: Connections --- nibabies/workflows/anatomical/fit.py | 41 ++++++++++++----------- nibabies/workflows/anatomical/surfaces.py | 4 +-- 2 files changed, 23 insertions(+), 22 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index f9eb0f03..042cad15 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -148,6 +148,8 @@ def init_infant_anat_fit_wf( 't2w_mask', 't2w_valid_list', # Anat specific + 'anat_preproc', + 'anat_mask', 'anat_dseg', 'anat_tpms', 'anat2std_xfm', @@ -783,7 +785,7 @@ def init_infant_anat_fit_wf( (anat_buffer, register_template_wf, [(f'{anat}_preproc', 'inputnode.moving_image')]), (refined_buffer, register_template_wf, [(f'{anat}_mask', 'inputnode.moving_mask')]), (sourcefile_buffer, ds_template_registration_wf, [ - (f'{anat}_source_files', 'inputnode.source_files') + ('anat_source_files', 'inputnode.source_files') ]), (register_template_wf, ds_template_registration_wf, [ ('outputnode.template', 'inputnode.template'), @@ -998,11 +1000,7 @@ def init_infant_anat_fit_wf( LOGGER.info(f'ANAT Stage 8: Creating GIFTI surfaces for {surfs + spheres}') if surfs: gifti_surfaces_wf = init_gifti_surfaces_wf(surfaces=surfs) - ds_surfaces_wf = init_ds_surfaces_wf( - bids_root=bids_root, - output_dir=output_dir, - surfaces=surfs, - ) + ds_surfaces_wf = init_ds_surfaces_wf(output_dir=output_dir, surfaces=surfs) workflow.connect([ (surface_recon_wf, gifti_surfaces_wf, [ @@ -1015,7 +1013,7 @@ def init_infant_anat_fit_wf( (gifti_surfaces_wf, surfaces_buffer, [ (f'outputnode.{surf}', surf) for surf in surfs ]), - (sourcefile_buffer, ds_surfaces_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_surfaces_wf, [('anat_source_files', 'inputnode.source_files')]), (gifti_surfaces_wf, ds_surfaces_wf, [ (f'outputnode.{surf}', f'inputnode.{surf}') for surf in surfs ]), @@ -1025,7 +1023,6 @@ def init_infant_anat_fit_wf( surfaces=spheres, to_scanner=False, name='gifti_spheres_wf' ) ds_spheres_wf = init_ds_surfaces_wf( - bids_root=bids_root, output_dir=output_dir, surfaces=spheres, name='ds_spheres_wf', @@ -1040,7 +1037,7 @@ def init_infant_anat_fit_wf( (gifti_spheres_wf, surfaces_buffer, [ (f'outputnode.{sphere}', sphere) for sphere in spheres ]), - (sourcefile_buffer, ds_spheres_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_spheres_wf, [('anat_source_files', 'inputnode.source_files')]), (gifti_spheres_wf, ds_spheres_wf, [ (f'outputnode.{sphere}', f'inputnode.{sphere}') for sphere in spheres ]), @@ -1064,7 +1061,7 @@ def init_infant_anat_fit_wf( (gifti_morph_wf, surfaces_buffer, [ (f'outputnode.{metric}', metric) for metric in metrics ]), - (sourcefile_buffer, ds_morph_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_morph_wf, [('anat_source_files', 'inputnode.source_files')]), (gifti_morph_wf, ds_morph_wf, [ (f'outputnode.{metric}', f'inputnode.{metric}') for metric in metrics ]), @@ -1088,7 +1085,9 @@ def init_infant_anat_fit_wf( ('white', 'inputnode.white'), ('pial', 'inputnode.pial'), ]), - (sourcefile_buffer, ds_ribbon_mask_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_ribbon_mask_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), (anat_ribbon_wf, ds_ribbon_mask_wf, [ ('outputnode.anat_ribbon', 'inputnode.mask_file'), ]), @@ -1105,7 +1104,6 @@ def init_infant_anat_fit_wf( fsLR_reg_wf = init_mcribs_dhcp_wf() ds_fsLR_reg_wf = init_ds_surfaces_wf( - bids_root=bids_root, output_dir=output_dir, surfaces=['sphere_reg_dhcpAsym'], name='ds_fsLR_reg_wf', @@ -1113,11 +1111,15 @@ def init_infant_anat_fit_wf( workflow.connect([ (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), - (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), (fsLR_reg_wf, ds_fsLR_reg_wf, [ - ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') + ('outputnode.sphere_reg_dhcpAsym', 'inputnode.sphere_reg_dhcpAsym') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [ + ('outputnode.sphere_reg_dhcpAsym', 'sphere_reg_fsLR'), ]), - (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), ]) # fmt:skip else: LOGGER.info('ANAT Stage 9: Found pre-computed dhcp-fsLR registration sphere') @@ -1129,7 +1131,6 @@ def init_infant_anat_fit_wf( fsLR_reg_wf = init_fsLR_reg_wf() ds_fsLR_reg_wf = init_ds_surfaces_wf( - bids_root=bids_root, output_dir=output_dir, surfaces=['sphere_reg_fsLR'], name='ds_fsLR_reg_wf', @@ -1137,7 +1138,9 @@ def init_infant_anat_fit_wf( workflow.connect([ (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), - (sourcefile_buffer, ds_fsLR_reg_wf, [('source_files', 'inputnode.source_files')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), (fsLR_reg_wf, ds_fsLR_reg_wf, [ ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') ]), @@ -1352,9 +1355,7 @@ def init_infant_anat_full_wf( output_dir=output_dir, ) surface_derivatives_wf = init_surface_derivatives_wf() - ds_surfaces_wf = init_ds_surfaces_wf( - bids_root=bids_root, output_dir=output_dir, surfaces=['inflated'] - ) + ds_surfaces_wf = init_ds_surfaces_wf(output_dir=output_dir, surfaces=['inflated']) ds_curv_wf = init_ds_surface_metrics_wf( bids_root=bids_root, output_dir=output_dir, metrics=['curv'], name='ds_curv_wf' ) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index bd56af8c..81275ae8 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -210,7 +210,7 @@ def init_mcribs_dhcp_wf(*, name='mcribs_dhcp_wf'): name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(['sphere_reg_fsLR']), + niu.IdentityInterface(['sphere_reg_dhcpAsym']), name='outputnode', ) @@ -258,7 +258,7 @@ def init_mcribs_dhcp_wf(*, name='mcribs_dhcp_wf'): workflow.connect([ (inputnode, project_unproject, [('sphere_reg', 'sphere_in')]), - (project_unproject, outputnode, [('sphere_out', 'sphere_reg_fsLR')]), + (project_unproject, outputnode, [('sphere_out', 'sphere_reg_dhcpAsym')]), ]) # fmt:skip return workflow From cbc182649832d7a5911eaee8e19fa1400a0cd5ae Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Sun, 28 Apr 2024 22:41:08 -0400 Subject: [PATCH 054/142] ENH: Add pdb debug mode --- nibabies/cli/run.py | 53 ++++++++++++++------------- nibabies/config.py | 2 +- nibabies/utils/debug.py | 79 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 109 insertions(+), 25 deletions(-) create mode 100644 nibabies/utils/debug.py diff --git a/nibabies/cli/run.py b/nibabies/cli/run.py index 9b6a24ea..bbee923e 100644 --- a/nibabies/cli/run.py +++ b/nibabies/cli/run.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- """NiBabies runner.""" from .. import config @@ -22,26 +21,32 @@ def main(): parse_args() + if 'pdb' in config.execution.debug: + from nibabies.utils.debug import setup_exceptionhook + + setup_exceptionhook() + config.nipype.plugin = 'Linear' + # collect and submit telemetry information # if `--notrack` is specified, nothing is done. - if not config.execution.notrack: + if not config.execution.notrack and not config.execution.debug: from nibabies.utils.telemetry import setup_migas setup_migas() - if "participant" in config.workflow.analysis_level: + if 'participant' in config.workflow.analysis_level: _pool = None - if config.nipype.plugin == "MultiProc": + if config.nipype.plugin == 'MultiProc': import multiprocessing as mp from concurrent.futures import ProcessPoolExecutor from contextlib import suppress # should drastically reduce VMS # see https://github.com/nipreps/mriqc/pull/984 for more details - os.environ["OMP_NUM_THREADS"] = "1" + os.environ['OMP_NUM_THREADS'] = '1' with suppress(RuntimeError): - mp.set_start_method("fork") + mp.set_start_method('fork') gc.collect() _pool = ProcessPoolExecutor( @@ -50,7 +55,7 @@ def main(): initargs=(_cwd, config.nipype.omp_nthreads), ) - config_file = config.execution.work_dir / config.execution.run_uuid / "config.toml" + config_file = config.execution.work_dir / config.execution.run_uuid / 'config.toml' config_file.parent.mkdir(exist_ok=True, parents=True) config.to_filename(config_file) @@ -69,7 +74,7 @@ def main(): sys.exit(exitcode) if config.execution.write_graph: - nibabies_wf.write_graph(graph2use="colored", format="svg", simple_form=True) + nibabies_wf.write_graph(graph2use='colored', format='svg', simple_form=True) if exitcode != 0: sys.exit(exitcode) @@ -83,9 +88,9 @@ def main(): config.loggers.workflow.log( 15, - "\n".join(["nibabies config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()]), + '\n'.join(['nibabies config:'] + ['\t\t%s' % s for s in config.dumps().splitlines()]), ) - config.loggers.workflow.log(25, "nibabies started!") + config.loggers.workflow.log(25, 'nibabies started!') # Hack MultiProc's pool to reduce VMS _plugin = config.nipype.get_plugin() @@ -94,31 +99,31 @@ def main(): multiproc = MultiProcPlugin(plugin_args=config.nipype.plugin_args) multiproc.pool = _pool - _plugin = {"plugin": multiproc} + _plugin = {'plugin': multiproc} gc.collect() try: nibabies_wf.run(**_plugin) except Exception as e: - config.loggers.workflow.critical("nibabies failed: %s", e) + config.loggers.workflow.critical('nibabies failed: %s', e) raise else: - config.loggers.workflow.log(25, "nibabies finished successfully!") + config.loggers.workflow.log(25, 'nibabies finished successfully!') # Bother users with the boilerplate only iff the workflow went okay. - boiler_file = config.execution.nibabies_dir / "logs" / "CITATION.md" + boiler_file = config.execution.nibabies_dir / 'logs' / 'CITATION.md' if boiler_file.exists(): if config.environment.exec_env in ( - "singularity", - "docker", - "nibabies-docker", + 'singularity', + 'docker', + 'nibabies-docker', ): - boiler_file = Path("") / boiler_file.relative_to( + boiler_file = Path('') / boiler_file.relative_to( config.execution.output_dir ) config.loggers.workflow.log( 25, - "Works derived from this nibabies execution should include the " - f"boilerplate text found in {boiler_file}.", + 'Works derived from this nibabies execution should include the ' + f'boilerplate text found in {boiler_file}.', ) if config.workflow.run_reconall: @@ -135,8 +140,8 @@ def main(): extension=['.tsv'], ) ) - _copy_any(dseg_tsv, str(config.execution.nibabies_dir / "desc-aseg_dseg.tsv")) - _copy_any(dseg_tsv, str(config.execution.nibabies_dir / "desc-aparcaseg_dseg.tsv")) + _copy_any(dseg_tsv, str(config.execution.nibabies_dir / 'desc-aseg_dseg.tsv')) + _copy_any(dseg_tsv, str(config.execution.nibabies_dir / 'desc-aparcaseg_dseg.tsv')) # errno = 0 finally: from ..reports.core import generate_reports @@ -151,7 +156,7 @@ def main(): write_bidsignore(config.execution.nibabies_dir) -if __name__ == "__main__": +if __name__ == '__main__': raise RuntimeError( - "Please `pip install` this and run via the commandline interfaces, `nibabies `" + 'Please `pip install` this and run via the commandline interfaces, `nibabies `' ) diff --git a/nibabies/config.py b/nibabies/config.py index 085a5f8c..549af78a 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -194,7 +194,7 @@ # Debug modes are names that influence the exposure of internal details to # the user, either through additional derivatives or increased verbosity -DEBUG_MODES = ('compcor', 'registration', 'fieldmaps') +DEBUG_MODES = ('compcor', 'registration', 'fieldmaps', 'pdb') class _Config: diff --git a/nibabies/utils/debug.py b/nibabies/utils/debug.py new file mode 100644 index 00000000..a516a5db --- /dev/null +++ b/nibabies/utils/debug.py @@ -0,0 +1,79 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +# +# Copyright The NiPreps Developers +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# We support and encourage derived works from this project, please read +# about our expectations at +# +# https://www.nipreps.org/community/licensing/ +# +# STATEMENT OF CHANGES: This file is derived from sources licensed under the Apache-2.0 terms, +# and uses the following portion of the original code: +# https://github.com/dandi/dandi-cli/blob/da3b7a726c4a352dfb53a0c6bee59e660de827e6/dandi/utils.py#L49-L82 +# +# +# ORIGINAL WORK'S ATTRIBUTION NOTICE: +# +# Copyright DANDI Client Developers +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import sys + + +def is_interactive(): + """Return True if all in/outs are tty""" + # TODO: check on windows if hasattr check would work correctly and add value: + # + return sys.stdin.isatty() and sys.stdout.isatty() and sys.stderr.isatty() + + +def setup_exceptionhook(ipython=False): + """Overloads default sys.excepthook with our exceptionhook handler. + If interactive, our exceptionhook handler will invoke + pdb.post_mortem; if not interactive, then invokes default handler. + """ + + def _pdb_excepthook(type, value, tb): + import traceback + + traceback.print_exception(type, value, tb) + print() + if is_interactive(): + import pdb + + pdb.post_mortem(tb) + + if ipython: + from IPython.core import ultratb + + sys.excepthook = ultratb.FormattedTB( + mode='Verbose', + # color_scheme='Linux', + call_pdb=is_interactive(), + ) + else: + sys.excepthook = _pdb_excepthook From 567e007f06814e6b73cd5924842465d431f5a493 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Sun, 28 Apr 2024 23:27:48 -0400 Subject: [PATCH 055/142] FIX: Misfired connections --- nibabies/workflows/anatomical/fit.py | 33 +++++++++++++++-------- nibabies/workflows/anatomical/surfaces.py | 6 ++--- nibabies/workflows/base.py | 5 ++-- 3 files changed, 27 insertions(+), 17 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 042cad15..2cf65c84 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -117,8 +117,6 @@ def init_infant_anat_fit_wf( ) return workflow - anat = reference_anat.lower() - # Organization # ------------ # This workflow takes the usual (inputnode -> graph -> outputnode) format @@ -240,7 +238,7 @@ def init_infant_anat_fit_wf( # Stage 4 - Segmentation seg_buffer = pe.Node( - niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs']), + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs', 'anat_aseg']), name='seg_buffer', ) # Stage 5 - collated template names, forward and reverse transforms @@ -650,7 +648,10 @@ def init_infant_anat_fit_wf( ) t2w_buffer.inputs.t2w_mask = t2w_mask apply_t2w_mask.inputs.in_mask = t2w_mask - workflow.connect(t2w_validate, 'out_file', apply_t2w_mask, 'in_file') + workflow.connect([ + (t2w_validate, apply_t2w_mask, [('out_file', 'in_file')]), + (apply_t2w_mask, t2w_buffer, [('out_file', 't2w_brain')]), + ]) # fmt:skip # Stage 3: Coregistration t1w2t2w_xfm = precomputed.get('t1w2t2w_xfm') @@ -680,8 +681,10 @@ def init_infant_anat_fit_wf( probmap=not t2w_mask, ) workflow.connect([ - (anat_buffer, coregistration_wf, [ + (t1w_buffer, coregistration_wf, [ ('t1w_preproc', 'inputnode.in_t1w'), + ]), + (t2w_buffer, coregistration_wf, [ ('t2w_preproc', 'inputnode.in_t2w'), ('t2w_mask', 'inputnode.in_mask'), ]), @@ -709,7 +712,7 @@ def init_infant_anat_fit_wf( ) workflow.connect([ - (anat_buffer, segmentation_wf, [(f'{anat}_brain', 'inputnode.anat_brain')]), + (anat_buffer, segmentation_wf, [('anat_brain', 'inputnode.anat_brain')]), (segmentation_wf, seg_buffer, [ ('outputnode.anat_dseg', 'anat_dseg'), ('outputnode.anat_tpms', 'anat_tpms'), @@ -782,8 +785,8 @@ def init_infant_anat_fit_wf( workflow.connect([ (inputnode, register_template_wf, [('roi', 'inputnode.lesion_mask')]), - (anat_buffer, register_template_wf, [(f'{anat}_preproc', 'inputnode.moving_image')]), - (refined_buffer, register_template_wf, [(f'{anat}_mask', 'inputnode.moving_mask')]), + (anat_buffer, register_template_wf, [('anat_preproc', 'inputnode.moving_image')]), + (refined_buffer, register_template_wf, [('anat_mask', 'inputnode.moving_mask')]), (sourcefile_buffer, ds_template_registration_wf, [ ('anat_source_files', 'inputnode.source_files') ]), @@ -828,13 +831,21 @@ def init_infant_anat_fit_wf( ) workflow.connect([ + (inputnode, surface_recon_wf, [ + ('subject_id', 'inputnode.subject_id'), + ('subjects_dir', 'inputnode.subjects_dir'), + ]), (t2w_buffer, surface_recon_wf, [ ('t2w_preproc', 'inputnode.t2w'), ('t2w_mask', 'inputnode.in_mask'), ]), - (anat_buffer, surface_recon_wf, [ + (seg_buffer, surface_recon_wf, [ ('anat_aseg', 'inputnode.in_aseg'), ]), + (surface_recon_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ]), ]) # fmt:skip else: @@ -1008,7 +1019,7 @@ def init_infant_anat_fit_wf( ('outputnode.subjects_dir', 'inputnode.subjects_dir'), ]), (fsnative_buffer, gifti_surfaces_wf, [ - ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), (gifti_surfaces_wf, surfaces_buffer, [ (f'outputnode.{surf}', surf) for surf in surfs @@ -1245,7 +1256,7 @@ def init_infant_anat_full_wf( spaces: 'SpatialReferences', cifti_output: ty.Literal['91k', '170k', False], skull_strip_fixed_seed: bool = False, - name: str = 'infant_anat_wf', + name: str = 'infant_anat_full_wf', ) -> pe.Workflow: """The full version of the fit workflow.""" workflow = pe.Workflow(name=name) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index 81275ae8..58b71457 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -108,7 +108,7 @@ def init_mcribs_surface_recon_wf( 63: 50, 253: 48, } - map_labels = pe.Node(MapLabels(mappings=fs2mcribs), name='map_labels') + fs_to_mcribs = pe.Node(MapLabels(mappings=fs2mcribs), name='fs_to_mcribs') t2w_las = pe.Node(ReorientImage(target_orientation='LAS'), name='t2w_las') seg_las = t2w_las.clone(name='seg_las') @@ -151,8 +151,8 @@ def init_mcribs_surface_recon_wf( workflow.connect([ (inputnode, t2w_las, [('t2w', 'in_file')]), - (inputnode, map_labels, [('in_aseg', 'in_file')]), - (map_labels, seg_las, [('out_file', 'in_file')]), + (inputnode, fs_to_mcribs, [('in_aseg', 'in_file')]), + (fs_to_mcribs, seg_las, [('out_file', 'in_file')]), (inputnode, mcribs_recon, [ ('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index abc48ca3..47f93293 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -208,6 +208,7 @@ def init_single_subject_wf( from ..utils.misc import fix_multi_source_name subject_session_id = _subject_session_id(subject_id, session_id) + print(f'{subject_session_id=}') workflow = Workflow(name=f'single_subject_{subject_session_id}_wf') workflow.__desc__ = f""" Results included in this manuscript come from preprocessing @@ -415,9 +416,6 @@ def init_single_subject_wf( else init_infant_anat_fit_wf(**wf_args) ) - # Ensure surface reconstruction is run at the per-session level - anat_wf.inputs.inputnode.subject_id = subject_session_id - # allow to run with anat-fast-track on fMRI-only dataset if ( 't1w_preproc' in anatomical_cache or 't2w_preproc' in anatomical_cache @@ -449,6 +447,7 @@ def init_single_subject_wf( (bidssrc, summary, [('t2w', 't2w'), ('bold', 'bold')]), (bids_info, summary, [('subject', 'subject_id')]), (summary, ds_report_summary, [('out_report', 'in_file')]), + (summary, anat_wf, [('subject_id', 'inputnode.subject_id')]), (about, ds_report_about, [('out_report', 'in_file')]), ]) # fmt:skip From cb13b5b78fdeea4a255963869b33176c4378f669 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 29 Apr 2024 11:43:23 -0400 Subject: [PATCH 056/142] FIX: aaand more missing/incorrect connections --- nibabies/workflows/anatomical/fit.py | 35 ++++++++++++++----- nibabies/workflows/anatomical/segmentation.py | 2 +- nibabies/workflows/base.py | 5 ++- 3 files changed, 31 insertions(+), 11 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 2cf65c84..5ec5c82c 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -236,9 +236,14 @@ def init_infant_anat_fit_wf( name='coreg_buffer', ) + aseg_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_aseg']), + name='aseg_buffer', + ) + # Stage 4 - Segmentation seg_buffer = pe.Node( - niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs', 'anat_aseg']), + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs']), name='seg_buffer', ) # Stage 5 - collated template names, forward and reverse transforms @@ -270,6 +275,13 @@ def init_infant_anat_fit_wf( msm_buffer = pe.Node(niu.IdentityInterface(fields=['sphere_reg_msm']), name='msm_buffer') workflow.connect([ + (anat_buffer, outputnode, [ + ('anat_preproc', 'anat_preproc'), + ]), + (refined_buffer, outputnode, [ + ('anat_mask', 'anat_mask'), + ('anat_brain', 'anat_brain'), + ]), (seg_buffer, outputnode, [ ('anat_dseg', 'anat_dseg'), ('anat_tpms', 'anat_tpms'), @@ -280,6 +292,7 @@ def init_infant_anat_fit_wf( (sourcefile_buffer, outputnode, [ ('t1w_source_files', 't1w_valid_list'), ('t2w_source_files', 't2w_valid_list'), + ('anat_source_files', 'anat_valid_list'), # Alias for reference anat files ]), (surfaces_buffer, outputnode, [ ('white', 'white'), @@ -701,6 +714,10 @@ def init_infant_anat_fit_wf( seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' + if anat_aseg: + LOGGER.info('ANAT Found precomputed anatomical segmentation') + aseg_buffer.inputs.anat_aseg = anat_aseg + if not (anat_dseg and anat_tpms): LOGGER.info('ANAT Stage 4: Tissue segmentation') segmentation_wf = init_segmentation_wf( @@ -718,13 +735,13 @@ def init_infant_anat_fit_wf( ('outputnode.anat_tpms', 'anat_tpms'), ]), ]) # fmt:skip - if anat_aseg or seg_method == 'jlf': - workflow.connect(segmentation_wf, 'outputnode.anat_aseg', seg_buffer, 'anat_aseg') - if anat_aseg: - LOGGER.info('ANAT Found precomputed anatomical segmentation') - segmentation_wf.inputs.inputnode.anat_aseg = anat_aseg - # TODO: datasink + if anat_aseg: + workflow.connect(aseg_buffer, 'anat_aseg', segmentation_wf, 'inputnode.anat_aseg') + elif seg_method == 'jlf': + workflow.connect(segmentation_wf, 'outputnode.anat_aseg', aseg_buffer, 'anat_aseg') + # TODO: datasink aseg + if not anat_dseg: ds_dseg_wf = init_ds_dseg_wf(output_dir=str(output_dir)) workflow.connect([ @@ -839,7 +856,7 @@ def init_infant_anat_fit_wf( ('t2w_preproc', 'inputnode.t2w'), ('t2w_mask', 'inputnode.in_mask'), ]), - (seg_buffer, surface_recon_wf, [ + (aseg_buffer, surface_recon_wf, [ ('anat_aseg', 'inputnode.in_aseg'), ]), (surface_recon_wf, outputnode, [ @@ -916,7 +933,7 @@ def init_infant_anat_fit_wf( ]) # fmt:skip if anat_aseg: - workflow.conect(anat_buffer, 'anat_aseg', surface_recon_wf, 'inputnode.in_aseg') + workflow.conect(aseg_buffer, 'anat_aseg', surface_recon_wf, 'inputnode.in_aseg') fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') if not fsnative_xfms: diff --git a/nibabies/workflows/anatomical/segmentation.py b/nibabies/workflows/anatomical/segmentation.py index ad226e09..5547d9b5 100644 --- a/nibabies/workflows/anatomical/segmentation.py +++ b/nibabies/workflows/anatomical/segmentation.py @@ -70,7 +70,7 @@ def init_segmentation_wf( workflow.connect([ (inputnode, fast, [('anat_brain', 'in_files')]), (fast, to_dseg, [('partial_volume_map', 'in_dseg')]), - (to_dseg, outputnode, [('out', 'anat_dseg')]), + (to_dseg, outputnode, [('out_dseg', 'anat_dseg')]), (fast, fast2bids, [('partial_volume_files', 'inlist')]), (fast2bids, outputnode, [('out', 'anat_tpms')]), ]) # fmt:skip diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 47f93293..7091f618 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -445,7 +445,10 @@ def init_single_subject_wf( # Reporting connections (inputnode, summary, [('subjects_dir', 'subjects_dir')]), (bidssrc, summary, [('t2w', 't2w'), ('bold', 'bold')]), - (bids_info, summary, [('subject', 'subject_id')]), + (bids_info, summary, [ + ('subject', 'subject_id'), + ('session', 'session_id'), + ]), (summary, ds_report_summary, [('out_report', 'in_file')]), (summary, anat_wf, [('subject_id', 'inputnode.subject_id')]), (about, ds_report_about, [('out_report', 'in_file')]), From 32d9be533ffcc30df96f2f12c35daf7f4736886d Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 29 Apr 2024 15:12:40 -0400 Subject: [PATCH 057/142] RF: Add "apply" workflow to facilitate fit caching --- nibabies/workflows/anatomical/fit.py | 164 +++++++++++---------------- nibabies/workflows/base.py | 103 +++++++++++------ 2 files changed, 133 insertions(+), 134 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 5ec5c82c..0d7236b6 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -1252,36 +1252,44 @@ def init_infant_single_anat_fit_wf( return workflow -def init_infant_anat_full_wf( +def init_infant_anat_apply_wf( *, - reference_anat: ty.Literal['T1w', 'T2w'], - age_months: int, - t1w: list, - t2w: list, - flair: list, bids_root: str, - precomputed: dict, - longitudinal: bool, msm_sulc: bool, omp_nthreads: int, output_dir: str, - segmentation_atlases: str | Path | None, - skull_strip_mode: ty.Literal['auto', 'skip', 'force'], recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs', None], - skull_strip_template: 'Reference', sloppy: bool, spaces: 'SpatialReferences', cifti_output: ty.Literal['91k', '170k', False], - skull_strip_fixed_seed: bool = False, - name: str = 'infant_anat_full_wf', + name: str = 'infant_anat_apply_wf', ) -> pe.Workflow: """The full version of the fit workflow.""" workflow = pe.Workflow(name=name) + reg_sphere = f'sphere_reg_{"msm" if msm_sulc else "fsLR"}' inputnode = pe.Node( - niu.IdentityInterface(fields=['t1w', 't2w', 'roi', 'flair', 'subjects_dir', 'subject_id']), + niu.IdentityInterface( + fields=[ + 'anat2std_xfm', + 'anat_valid_list', + 'anat_preproc', + 'anat_mask', + 'anat_dseg', + 'anat_tpms', + 'subjects_dir', + 'subject_id', + 'fsnative2anat_xfm', + 'sulc', + 'template', + 'thickness', + 'midthickness', + reg_sphere, + ] + ), name='inputnode', ) + outputnode = pe.Node( niu.IdentityInterface( fields=[ @@ -1293,7 +1301,6 @@ def init_infant_anat_full_wf( 'anat_dseg', 'anat_tpms', 'anat2std_xfm', - 'std2anat_xfm', 'fsnative2anat_xfm', 'anat_aparc', 'anat_aseg', @@ -1304,28 +1311,6 @@ def init_infant_anat_full_wf( name='outputnode', ) - anat_fit_wf = init_infant_anat_fit_wf( - reference_anat=reference_anat, - age_months=age_months, - bids_root=bids_root, - output_dir=output_dir, - longitudinal=longitudinal, - msm_sulc=False, # TODO: Enable - skull_strip_mode=skull_strip_mode, - skull_strip_template=skull_strip_template, - skull_strip_fixed_seed=skull_strip_fixed_seed, - spaces=spaces, - t1w=t1w, - t2w=t2w, - flair=flair, - precomputed=precomputed, - sloppy=sloppy, - segmentation_atlases=segmentation_atlases, - cifti_output=cifti_output, - recon_method=recon_method, - omp_nthreads=omp_nthreads, - ) - template_iterator_wf = init_template_iterator_wf(spaces=spaces, sloppy=sloppy) ds_std_volumes_wf = init_ds_anat_volumes_wf( bids_root=bids_root, @@ -1334,39 +1319,28 @@ def init_infant_anat_full_wf( ) workflow.connect([ - (inputnode, anat_fit_wf, [ - ('t1w', 'inputnode.t1w'), - ('t2w', 'inputnode.t2w'), - ('roi', 'inputnode.roi'), - ('flair', 'inputnode.flair'), - ('subjects_dir', 'inputnode.subjects_dir'), - ('subject_id', 'inputnode.subject_id'), - ]), - (anat_fit_wf, outputnode, [ - ('outputnode.template', 'template'), - ('outputnode.subjects_dir', 'subjects_dir'), - ('outputnode.subject_id', 'subject_id'), - ('outputnode.anat_preproc', 'anat_preproc'), - ('outputnode.anat_mask', 'anat_mask'), - ('outputnode.anat_dseg', 'anat_dseg'), - ('outputnode.anat_tpms', 'anat_tpms'), - ('outputnode.anat2std_xfm', 'anat2std_xfm'), - ('outputnode.std2anat_xfm', 'std2anat_xfm'), - ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), - ('outputnode.sphere_reg', 'sphere_reg'), - (f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", 'sphere_reg_fsLR'), - ('outputnode.anat_ribbon', 'anat_ribbon'), + (inputnode, outputnode, [ + ('template', 'template'), + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ('anat_preproc', 'anat_preproc'), + ('anat_mask', 'anat_mask'), + ('anat_dseg', 'anat_dseg'), + ('anat_tpms', 'anat_tpms'), + ('anat2std_xfm', 'anat2std_xfm'), + ('fsnative2anat_xfm', 'fsnative2anat_xfm'), + (reg_sphere, 'sphere_reg_fsLR'), ]), - (anat_fit_wf, template_iterator_wf, [ - ('outputnode.template', 'inputnode.template'), - ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + (inputnode, template_iterator_wf, [ + ('template', 'inputnode.template'), + ('anat2std_xfm', 'inputnode.anat2std_xfm'), ]), - (anat_fit_wf, ds_std_volumes_wf, [ - ('outputnode.anat_valid_list', 'inputnode.source_files'), - ('outputnode.anat_preproc', 'inputnode.anat_preproc'), - ('outputnode.anat_mask', 'inputnode.anat_mask'), - ('outputnode.anat_dseg', 'inputnode.anat_dseg'), - ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + (inputnode, ds_std_volumes_wf, [ + ('anat_valid_list', 'inputnode.source_files'), + ('anat_preproc', 'inputnode.anat_preproc'), + ('anat_mask', 'inputnode.anat_mask'), + ('anat_dseg', 'inputnode.anat_dseg'), + ('anat_tpms', 'inputnode.anat_tpms'), ]), (template_iterator_wf, ds_std_volumes_wf, [ ('outputnode.std_t1w', 'inputnode.ref_file'), @@ -1389,26 +1363,26 @@ def init_infant_anat_full_wf( ) workflow.connect([ - (anat_fit_wf, surface_derivatives_wf, [ - ('outputnode.t1w_preproc', 'inputnode.reference'), - ('outputnode.subjects_dir', 'inputnode.subjects_dir'), - ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + (inputnode, surface_derivatives_wf, [ + ('anat_preproc', 'inputnode.reference'), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), - (anat_fit_wf, ds_surfaces_wf, [ - ('outputnode.anat_valid_list', 'inputnode.source_files'), + (inputnode, ds_surfaces_wf, [ + ('anat_valid_list', 'inputnode.source_files'), ]), (surface_derivatives_wf, ds_surfaces_wf, [ ('outputnode.inflated', 'inputnode.inflated'), ]), - (anat_fit_wf, ds_curv_wf, [ - ('outputnode.anat_valid_list', 'inputnode.source_files'), + (inputnode, ds_curv_wf, [ + ('anat_valid_list', 'inputnode.source_files'), ]), (surface_derivatives_wf, ds_curv_wf, [ ('outputnode.curv', 'inputnode.curv'), ]), - (anat_fit_wf, ds_fs_segs_wf, [ - ('outputnode.anat_valid_list', 'inputnode.source_files'), + (inputnode, ds_fs_segs_wf, [ + ('anat_valid_list', 'inputnode.source_files'), ]), (surface_derivatives_wf, ds_fs_segs_wf, [ ('outputnode.out_aseg', 'inputnode.anat_fs_aseg'), @@ -1442,28 +1416,22 @@ def init_infant_anat_full_wf( ) workflow.connect([ - (anat_fit_wf, hcp_morphometrics_wf, [ - ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.sulc', 'inputnode.sulc'), - ('outputnode.thickness', 'inputnode.thickness'), - ('outputnode.midthickness', 'inputnode.midthickness'), + (inputnode, hcp_morphometrics_wf, [ + ('subject_id', 'inputnode.subject_id'), + ('sulc', 'inputnode.sulc'), + ('thickness', 'inputnode.thickness'), + ('midthickness', 'inputnode.midthickness'), ]), (surface_derivatives_wf, hcp_morphometrics_wf, [ ('outputnode.curv', 'inputnode.curv'), ]), - (anat_fit_wf, resample_midthickness_wf, [ - ('outputnode.midthickness', 'inputnode.midthickness'), - ( - f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", - 'inputnode.sphere_reg_fsLR', - ), + (inputnode, resample_midthickness_wf, [ + ('midthickness', 'inputnode.midthickness'), + (reg_sphere, 'inputnode.sphere_reg_fsLR'), ]), - (anat_fit_wf, morph_grayords_wf, [ - ('outputnode.midthickness', 'inputnode.midthickness'), - ( - f"outputnode.sphere_reg_{'msm' if msm_sulc else 'fsLR'}", - 'inputnode.sphere_reg_fsLR', - ), + (inputnode, morph_grayords_wf, [ + ('midthickness', 'inputnode.midthickness'), + (reg_sphere, 'inputnode.sphere_reg_fsLR'), ]), (hcp_morphometrics_wf, morph_grayords_wf, [ ('outputnode.curv', 'inputnode.curv'), @@ -1474,8 +1442,8 @@ def init_infant_anat_full_wf( (resample_midthickness_wf, morph_grayords_wf, [ ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), ]), - (anat_fit_wf, ds_grayord_metrics_wf, [ - ('outputnode.anat_valid_list', 'inputnode.source_files'), + (inputnode, ds_grayord_metrics_wf, [ + ('anat_valid_list', 'inputnode.source_files'), ]), (morph_grayords_wf, ds_grayord_metrics_wf, [ ('outputnode.curv_fsLR', 'inputnode.curv'), diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 7091f618..8a96af2d 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -54,7 +54,7 @@ from nibabies.interfaces import DerivativesDataSink from nibabies.interfaces.reports import AboutSummary, SubjectSummary from nibabies.utils.bids import parse_bids_for_age_months -from nibabies.workflows.anatomical.fit import init_infant_anat_fit_wf, init_infant_anat_full_wf +from nibabies.workflows.anatomical.fit import init_infant_anat_apply_wf, init_infant_anat_fit_wf if ty.TYPE_CHECKING: from bids.layout import BIDSLayout @@ -208,7 +208,6 @@ def init_single_subject_wf( from ..utils.misc import fix_multi_source_name subject_session_id = _subject_session_id(subject_id, session_id) - print(f'{subject_session_id=}') workflow = Workflow(name=f'single_subject_{subject_session_id}_wf') workflow.__desc__ = f""" Results included in this manuscript come from preprocessing @@ -388,32 +387,30 @@ def init_single_subject_wf( run_without_submitting=True, ) - wf_args = { - 'age_months': age, - 't1w': t1w, - 't2w': t2w, - 'flair': subject_data['flair'], - 'bids_root': bids_root, - 'longitudinal': config.workflow.longitudinal, - 'msm_sulc': msm_sulc, - 'omp_nthreads': omp_nthreads, - 'output_dir': config.execution.nibabies_dir, - 'precomputed': anatomical_cache, - 'segmentation_atlases': config.execution.segmentation_atlases_dir, - 'skull_strip_fixed_seed': config.workflow.skull_strip_fixed_seed, - 'skull_strip_mode': config.workflow.skull_strip_anat, - 'skull_strip_template': Reference.from_string(config.workflow.skull_strip_template)[0], - 'recon_method': recon_method, - 'reference_anat': reference_anat, - 'sloppy': config.execution.sloppy, - 'spaces': spaces, - 'cifti_output': config.workflow.cifti_output, - } - - anat_wf = ( - init_infant_anat_full_wf(**wf_args) - if config.workflow.level == 'full' - else init_infant_anat_fit_wf(**wf_args) + output_dir = config.execution.nibabies_dir + sloppy = config.execution.sloppy + cifti_output = config.workflow.cifti_output + + anat_fit_wf = init_infant_anat_fit_wf( + age_months=age, + t1w=t1w, + t2w=t2w, + flair=subject_data['flair'], + bids_root=bids_root, + longitudinal=config.workflow.longitudinal, + msm_sulc=msm_sulc, + omp_nthreads=omp_nthreads, + output_dir=output_dir, + precomputed=anatomical_cache, + segmentation_atlases=config.execution.segmentation_atlases_dir, + skull_strip_fixed_seed=config.workflow.skull_strip_fixed_seed, + skull_strip_mode=config.workflow.skull_strip_anat, + skull_strip_template=Reference.from_string(config.workflow.skull_strip_template), + recon_method=recon_method, + reference_anat=reference_anat, + sloppy=sloppy, + spaces=spaces, + cifti_output=cifti_output, ) # allow to run with anat-fast-track on fMRI-only dataset @@ -422,9 +419,9 @@ def init_single_subject_wf( ) and not subject_data['t1w']: workflow.connect([ (bidssrc, bids_info, [(('bold', fix_multi_source_name), 'in_file')]), - (anat_wf, summary, [('outputnode.anat_preproc', anat)]), - (anat_wf, ds_report_summary, [('outputnode.anat_preproc', 'source_file')]), - (anat_wf, ds_report_about, [('outputnode.anat_preproc', 'source_file')]), + (anat_fit_wf, summary, [('outputnode.anat_preproc', anat)]), + (anat_fit_wf, ds_report_summary, [('outputnode.anat_preproc', 'source_file')]), + (anat_fit_wf, ds_report_about, [('outputnode.anat_preproc', 'source_file')]), ]) # fmt:skip else: workflow.connect([ @@ -435,8 +432,8 @@ def init_single_subject_wf( ]) # fmt:skip workflow.connect([ - (inputnode, anat_wf, [('subjects_dir', 'inputnode.subjects_dir')]), - (bidssrc, anat_wf, [ + (inputnode, anat_fit_wf, [('subjects_dir', 'inputnode.subjects_dir')]), + (bidssrc, anat_fit_wf, [ ('t1w', 'inputnode.t1w'), ('t2w', 'inputnode.t2w'), ('roi', 'inputnode.roi'), @@ -450,18 +447,52 @@ def init_single_subject_wf( ('session', 'session_id'), ]), (summary, ds_report_summary, [('out_report', 'in_file')]), - (summary, anat_wf, [('subject_id', 'inputnode.subject_id')]), + (summary, anat_fit_wf, [('subject_id', 'inputnode.subject_id')]), (about, ds_report_about, [('out_report', 'in_file')]), ]) # fmt:skip # template_iterator_wf = None # select_MNI2009c_xfm = None if config.workflow.level == 'full': - # Much of the logic here is extracted into a separate, fuller anatomical workflow + anat_apply_wf = init_infant_anat_apply_wf( + bids_root=bids_root, + msm_sulc=msm_sulc, + omp_nthreads=omp_nthreads, + output_dir=output_dir, + recon_method=recon_method, + sloppy=sloppy, + spaces=spaces, + cifti_output=cifti_output, + ) + + reg_sphere = f'sphere_reg_{"msm" if msm_sulc else "fsLR"}' + workflow.connect( + [ + ( + anat_fit_wf, + anat_apply_wf, + [ + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.anat_valid_list', 'inputnode.anat_valid_list'), + ('outputnode.anat_preproc', 'inputnode.anat_preproc'), + ('outputnode.anat_mask', 'inputnode.anat_mask'), + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ('outputnode.midthickness', 'inputnode.midthickness'), + (f'outputnode.{reg_sphere}', f'inputnode.{reg_sphere}'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.template', 'inputnode.template'), + ('outputnode.thickness', 'inputnode.thickness'), + ], + ), + ] + ) # TODO: # - Grab template_iterator_wf workflow # - Grab select_MNI2009c_xfm node - pass # if 'MNI152NLin2009cAsym' in spaces.get_spaces(): # select_MNI2009c_xfm = pe.Node( From 5e1f0646ca10ee0c57ab90dfb80d9c4a22ce08b6 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 12:34:11 -0400 Subject: [PATCH 058/142] FIX: Remove mcribs templates hack now that they are on templateflow --- nibabies/cli/parser.py | 6 ------ nibabies/utils/misc.py | 45 +++++++++++------------------------------- 2 files changed, 12 insertions(+), 39 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index c501f81c..93dc4d0e 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -813,12 +813,6 @@ def parse_args(args=None, namespace=None): # Ensure the directory is created config.execution.mcribs_dir.mkdir(exist_ok=True, parents=True) - # While waiting to confirm licensing for TemplateFlow distribution, - # include templates used to bring data to fsLR space - from nibabies.utils.misc import save_fsLR_mcribs - - save_fsLR_mcribs(config.execution.mcribs_dir) - # Wipe out existing work_dir if opts.clean_workdir and work_dir.exists(): from niworkflows.utils.misc import clean_directory diff --git a/nibabies/utils/misc.py b/nibabies/utils/misc.py index 98263225..4718ad72 100644 --- a/nibabies/utils/misc.py +++ b/nibabies/utils/misc.py @@ -3,12 +3,7 @@ """Miscellaneous utilities.""" from __future__ import annotations -import shutil from pathlib import Path -from typing import Union - -from nibabies import __version__ -from nibabies.data import load as load_data def fix_multi_source_name(in_files): @@ -31,11 +26,11 @@ def fix_multi_source_name(in_files): p = Path(filename_to_list(in_files)[0]) # subject_label = p.name.split("_", 1)[0].split("-")[1] try: - subj = re.search(r"(?<=^sub-)[a-zA-Z0-9]*", p.name).group() - suffix = re.search(r"(?<=_)\w+(?=\.)", p.name).group() + subj = re.search(r'(?<=^sub-)[a-zA-Z0-9]*', p.name).group() + suffix = re.search(r'(?<=_)\w+(?=\.)', p.name).group() except AttributeError: - raise AttributeError("Could not extract BIDS information") - return str(p.parent / f"sub-{subj}_{suffix}.nii.gz") + raise AttributeError('Could not extract BIDS information') + return str(p.parent / f'sub-{subj}_{suffix}.nii.gz') def check_deps(workflow): @@ -45,7 +40,7 @@ def check_deps(workflow): return sorted( (node.interface.__class__.__name__, node.interface._cmd) for node in workflow._get_all_nodes() - if (hasattr(node.interface, "_cmd") and which(node.interface._cmd.split()[0]) is None) + if (hasattr(node.interface, '_cmd') and which(node.interface._cmd.split()[0]) is None) ) @@ -54,7 +49,7 @@ def cohort_by_months(template, months): Produce a recommended cohort based on partipants age """ cohort_key = { - "MNIInfant": ( + 'MNIInfant': ( # upper bound of template | cohort 2, # 1 5, # 2 @@ -68,7 +63,7 @@ def cohort_by_months(template, months): 44, # 10 60, # 11 ), - "UNCInfant": ( + 'UNCInfant': ( 8, # 1 12, # 2 24, # 3 @@ -76,12 +71,12 @@ def cohort_by_months(template, months): } ages = cohort_key.get(template) if ages is None: - raise KeyError("Template cohort information does not exist.") + raise KeyError('Template cohort information does not exist.') for cohort, age in enumerate(ages, 1): if months <= age: return cohort - raise KeyError("Age exceeds all cohorts!") + raise KeyError('Age exceeds all cohorts!') def check_total_memory(recommended_gb): @@ -115,12 +110,12 @@ def combine_meepi_source(in_files): from nipype.utils.filemanip import filename_to_list base, in_file = os.path.split(filename_to_list(in_files)[0]) - entities = [ent for ent in in_file.split("_") if not ent.startswith("echo-")] - basename = "_".join(entities) + entities = [ent for ent in in_file.split('_') if not ent.startswith('echo-')] + basename = '_'.join(entities) return os.path.join(base, basename) -def get_file(pkg: str, src_path: Union[str, Path]) -> str: +def get_file(pkg: str, src_path: str | Path) -> str: """ Get or extract a source file. Assures the file will be available until the lifetime of the current Python process. @@ -138,19 +133,3 @@ def get_file(pkg: str, src_path: Union[str, Path]) -> str: ref = files(pkg) / str(src_path) fl = file_manager.enter_context(as_file(ref)) return str(fl) - - -def save_fsLR_mcribs(mcribs_dir: str | Path) -> None: - template_dir = Path(mcribs_dir) / 'templates_fsLR' - template_dir.mkdir(exist_ok=True) - - atlases = load_data.cached('atlases') - - for src in atlases.glob('*sphere.surf.gii'): - if not (dst := (template_dir / src.name)).exists(): - try: - shutil.copyfile(src, dst) - except Exception: - import warnings - - warnings.warn(f"Could not save {src.name} to MCRIBS outputs") From 58a9c10398a9c814662a70d1f19bf7c3a38e142d Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 12:35:51 -0400 Subject: [PATCH 059/142] FIX: Ensure midthickness is saved to subjects_dir --- nibabies/workflows/anatomical/surfaces.py | 25 ++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/nibabies/workflows/anatomical/surfaces.py b/nibabies/workflows/anatomical/surfaces.py index 58b71457..e9e417a6 100644 --- a/nibabies/workflows/anatomical/surfaces.py +++ b/nibabies/workflows/anatomical/surfaces.py @@ -147,7 +147,7 @@ def init_mcribs_surface_recon_wf( ) fssource = pe.Node(FreeSurferSource(), name='fssource', run_without_submitting=True) - midthickness_wf = init_midthickness_wf(omp_nthreads=omp_nthreads) + midthickness_wf = init_make_midthickness_wf(omp_nthreads=omp_nthreads) workflow.connect([ (inputnode, t2w_las, [('t2w', 'in_file')]), @@ -163,7 +163,9 @@ def init_mcribs_surface_recon_wf( ('subject_id', 'subject_id')]), (mcribs_recon, mcribs_postrecon, [('mcribs_dir', 'outdir')]), (mcribs_postrecon, fssource, [('subjects_dir', 'subjects_dir')]), - (inputnode, fssource, [('subject_id', 'inputnode.subject_id')]), + (mcribs_postrecon, midthickness_wf, [('subjects_dir', 'inputnode.subjects_dir')]), + (inputnode, fssource, [('subject_id', 'subject_id')]), + (inputnode, midthickness_wf, [('subject_id', 'inputnode.subject_id')]), (fssource, midthickness_wf, [ ('white', 'inputnode.white'), ('graymid', 'inputnode.graymid'), @@ -294,7 +296,7 @@ def init_infantfs_surface_recon_wf( workflow.connect(inputnode, 'in_aseg', recon, 'aseg_file') fssource = pe.Node(FreeSurferSource(), name='fssource', run_without_submitting=True) - midthickness_wf = init_midthickness_wf(omp_nthreads=omp_nthreads) + midthickness_wf = init_make_midthickness_wf(omp_nthreads=omp_nthreads) workflow.connect([ (inputnode, gen_recon_outdir, [ @@ -312,6 +314,10 @@ def init_infantfs_surface_recon_wf( ('subject_id', 'subject_id'), (('outdir', _parent), 'subjects_dir'), ]), + (recon, midthickness_wf, [ + ('subject_id', 'inputnode.subject_id'), + (('outdir', _parent), 'inputnode.subjects_dir'), + ]), (fssource, midthickness_wf, [ ('white', 'inputnode.white'), ('graymid', 'inputnode.graymid'), @@ -348,14 +354,19 @@ def init_infantfs_surface_recon_wf( return workflow -def init_midthickness_wf(*, omp_nthreads: int, name: str = 'make_midthickness_wf') -> pe.Workflow: +def init_make_midthickness_wf( + *, omp_nthreads: int, name: str = 'make_midthickness_wf' +) -> pe.Workflow: """ Standalone workflow to create and save cortical midthickness, derived from the generated white / graymid surfaces. """ workflow = pe.Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface(fields=['white', 'graymid']), name='inputnode') + inputnode = pe.Node( + niu.IdentityInterface(fields=['subject_id', 'subjects_dir', 'white', 'graymid']), + name='inputnode', + ) outputnode = pe.Node( niu.IdentityInterface(fields=['subject_id', 'subjects_dir']), name='outputnode', @@ -383,6 +394,10 @@ def init_midthickness_wf(*, omp_nthreads: int, name: str = 'make_midthickness_wf ('graymid', 'graymid'), ]), (midthickness, save_midthickness, [('out_file', 'surf.@graymid')]), + (inputnode, save_midthickness, [ + ('subjects_dir', 'base_directory'), + ('subject_id', 'container'), + ]), (save_midthickness, sync, [('out_file', 'filenames')]), (sync, outputnode, [ ('subjects_dir', 'subjects_dir'), From c5d6365196a56bb052833ab21add138a3c9f665a Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 12:38:10 -0400 Subject: [PATCH 060/142] ENH: Clean up dual anat workflow, port logic to single anat --- nibabies/workflows/anatomical/fit.py | 311 ++++++++++++++++++++++++++- 1 file changed, 301 insertions(+), 10 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 0d7236b6..2d9f5ca8 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -59,6 +59,7 @@ def init_infant_anat_fit_wf( + *, age_months: int, t1w: list, t2w: list, @@ -338,7 +339,9 @@ def init_infant_anat_fit_wf( t2w_preproc = precomputed.get('t2w_preproc', None) # Stage 1: Conform & valid T1w/T2w images - t1w_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) + # Note: Since stage 1 & 2 are tightly knit together, it may be more intuitive + # to combine them in a separate workflow. + t1w_validate = pe.Node(ValidateImage(), name='t1w_validate', run_without_submitting=True) t2w_validate = t1w_validate.clone('t2w_validate') if not t1w_preproc: @@ -378,9 +381,6 @@ def init_infant_anat_fit_wf( (t1w_template_wf, sourcefile_buffer, [ ('outputnode.anat_valid_list', 't1w_source_files'), ]), - # (t1w_template_wf, anat_reports_wf, [ - # ('outputnode.out_report', 'inputnode.anat_conform_report'), - # ]), (t1w_template_wf, ds_t1w_template_wf, [ ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), ]), @@ -472,6 +472,21 @@ def init_infant_anat_fit_wf( apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') + # T1w masking logic: + # + # PCM = Pre-computed mask + # PCT = Pre-computed template + # SS = Skull stripping required + # + # PCM, PCT, SS -> Apply PCM to PCT + # PCM, PCT, !SS -> Apply PCM to PCT + # PCM, !PCT, SS -> Apply PCM to template, then run INU + # PCM, !PCT, !SS -> Apply PCM to template, then run INU + # !PCM, PCT, SS -> Transform T2w brain mask with t2w2t1w_xfm + # !PCM, PCT, !SS -> Binarize PCT + # !PCM, !PCT, SS -> Transform T2w brain mask with t2w2t1w_xfm + # !PCM, !PCT, !SS -> INU correct template + if not t1w_mask: if skull_strip_mode == 'auto': run_t1w_skull_strip = not all(_is_skull_stripped(img) for img in t1w) @@ -574,6 +589,21 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Skipping T1w masking') workflow.connect(apply_t1w_mask, 'out_file', t1w_buffer, 't1w_brain') + # T2w masking logic: + # + # PCM-T2 = Pre-computed mask + # PCM-T1 = Pre-computed mask for PCT-T1 + # PCT = Pre-computed template + # SS = Skull stripping required + # + # PCM, PCT, SS -> Apply PCM to PCT + # PCM, PCT, !SS -> Apply PCM to PCT + # PCM, !PCT, SS -> Apply PCM to template, then run INU + # PCM, !PCT, !SS -> Apply PCM to template, then run INU + # !PCM, PCT, SS -> Run brain extraction + # !PCM, PCT, !SS -> Binarize PCT + # !PCM, !PCT, SS -> Run brain extraction + # !PCM, !PCT, !SS -> INU correct template if not t2w_mask: if skull_strip_mode == 'auto': run_t2w_skull_strip = not all(_is_skull_stripped(img) for img in t2w) @@ -608,7 +638,7 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Stage 2b: Skipping skull-strip, generating mask from input') binarize_t2w = pe.Node(Binarize(thresh_low=2), name='binarize_t2w') workflow.connect([ - (t2w_validate, binarize_t1w, [('out_file', 'in_file')]), + (t2w_validate, binarize_t2w, [('out_file', 'in_file')]), (t2w_validate, t2w_buffer, [('out_file', 't2w_brain')]), (binarize_t2w, t2w_buffer, [('out_file', 't2w_mask')]), ]) # fmt:skip @@ -616,7 +646,7 @@ def init_infant_anat_fit_wf( # Check whether we can convert a previously computed T2w mask # or need to run the atlas based brain extraction if t1w_mask: - LOGGER.info('ANAT T2w mask will be transformed into T1w space') + LOGGER.info('ANAT T1w mask will be transformed into T2w space') transform_t1w_mask = pe.Node( ApplyTransforms(interpolation='MultiLabel'), name='transform_t1w_mask', @@ -630,7 +660,7 @@ def init_infant_anat_fit_wf( # TODO: Unsure about this connection^ ]) # fmt:skip else: - LOGGER.info('ANAT Brain mask will be calculated using T2w') + LOGGER.info('ANAT Atlas-based brain mask will be calculated on the T2w') brain_extraction_wf = init_infant_brain_extraction_wf( omp_nthreads=omp_nthreads, sloppy=sloppy, @@ -1181,24 +1211,33 @@ def init_infant_anat_fit_wf( def init_infant_single_anat_fit_wf( - reference_anat: ty.Literal['T1w', 'T2w'], *, age_months: int, anatomicals: list, - bids_root: str | Path, + bids_root: str, precomputed: dict, longitudinal: bool, omp_nthreads: int, - output_dir: str | Path, + output_dir: str, segmentation_atlases: str | Path | None, skull_strip_mode: ty.Literal['force', 'skip', 'auto'], skull_strip_template: 'Reference', skull_strip_fixed_seed: bool, sloppy: bool, spaces: 'SpatialReferences', + recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs'] | None, + reference_anat: ty.Literal['T1w', 'T2w'], cifti_output: ty.Literal['91k', '170k', False], + msm_sulc: bool = False, name: str = 'infant_single_anat_fit_wf', ) -> Workflow: + """ + Create a fit workflow with just a single anatomical. + + Note: Treat this functionality as a workaround that enables the processing of incomplete data. + For best results, especially in periods of transitioning myelination (usually 3-8 months), + a combination of T1w and T2w images will produce more accurate results. + """ inputnode = pe.Node( niu.IdentityInterface( fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], @@ -1243,11 +1282,263 @@ def init_infant_single_anat_fit_wf( workflow = Workflow(name=f'infant_single_{anat}_fit_wf') workflow.add_nodes([inputnode, outputnode]) + # Stage 1 inputs (filtered) + sourcefile_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_source_files']), + name='sourcefile_buffer', + ) + + # Stage 2 + anat_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_preproc', 'anat_mask', 'anat_brain']), + name='anat_buffer', + ) + + aseg_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_aseg']), + name='aseg_buffer', + ) + + # Stage 3 - Segmentation + seg_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_dseg', 'anat_tpms', 'ants_segs']), + name='seg_buffer', + ) + # Stage 4 - collated template names, forward and reverse transforms + template_buffer = pe.Node(niu.Merge(2), name='template_buffer') + anat2std_buffer = pe.Node(niu.Merge(2), name='anat2std_buffer') + std2anat_buffer = pe.Node(niu.Merge(2), name='std2anat_buffer') + + # Stage 5 results: Refined stage 2 results; may be direct copy if no refinement + refined_buffer = pe.Node( + niu.IdentityInterface(fields=['anat_mask', 'anat_brain']), + name='refined_buffer', + ) + + fsnative_buffer = pe.Node( + niu.IdentityInterface(fields=['fsnative2anat_xfm', 'anat2fsnative_xfm']), + name='fsnative_buffer', + ) + + # Stage 6 results: GIFTI surfaces + surfaces_buffer = pe.Node( + niu.IdentityInterface( + fields=['white', 'pial', 'midthickness', 'sphere', 'sphere_reg', 'thickness', 'sulc'] + ), + name='surfaces_buffer', + ) + + # Stage 7 and 8 results: fsLR sphere registration + fsLR_buffer = pe.Node(niu.IdentityInterface(fields=['sphere_reg_fsLR']), name='fsLR_buffer') + msm_buffer = pe.Node(niu.IdentityInterface(fields=['sphere_reg_msm']), name='msm_buffer') + + workflow.connect([ + (anat_buffer, outputnode, [ + ('anat_preproc', 'anat_preproc'), + ]), + (refined_buffer, outputnode, [ + ('anat_mask', 'anat_mask'), + ('anat_brain', 'anat_brain'), + ]), + (seg_buffer, outputnode, [ + ('anat_dseg', 'anat_dseg'), + ('anat_tpms', 'anat_tpms'), + ]), + (anat2std_buffer, outputnode, [('out', 'anat2std_xfm')]), + (std2anat_buffer, outputnode, [('out', 'std2anat_xfm')]), + (template_buffer, outputnode, [('out', 'template')]), + (sourcefile_buffer, outputnode, [('anat_source_files', 'anat_valid_list')]), + (surfaces_buffer, outputnode, [ + ('white', 'white'), + ('pial', 'pial'), + ('midthickness', 'midthickness'), + ('sphere', 'sphere'), + ('sphere_reg', 'sphere_reg'), + ('thickness', 'thickness'), + ('sulc', 'sulc'), + ]), + (fsLR_buffer, outputnode, [('sphere_reg_fsLR', 'sphere_reg_fsLR')]), + (msm_buffer, outputnode, [('sphere_reg_msm', 'sphere_reg_msm')]), + ]) # fmt:skip + + # Reporting + anat_reports_wf = init_anat_reports_wf( + spaces=spaces, + surface_recon=recon_method, + output_dir=output_dir, + sloppy=sloppy, + ) + + workflow.connect([ + (outputnode, anat_reports_wf, [ + ('anat_valid_list', 'inputnode.source_file'), + ('anat_preproc', 'inputnode.anat_preproc'), + ('anat_mask', 'inputnode.anat_mask'), + ('anat_dseg', 'inputnode.anat_dseg'), + ('template', 'inputnode.template'), + ('anat2std_xfm', 'inputnode.anat2std_xfm'), + ('subjects_dir', 'inputnode.subjects_dir'), + ('subject_id', 'inputnode.subject_id'), + ]), + ]) # fmt:skip + desc = ( '\nAnatomical data preprocessing\n\n: ' f'A total of {len(anatomicals)} {anat} images were found ' 'within the input BIDS dataset.\n' ) + # Lowercase to match pattern + anat = reference_anat.lower() + + # Derivatives + anat_preproc = precomputed.get(f'{anat}_preproc', None) + anat_mask = precomputed.get(f'{anat}_mask', None) + anat_tpms = precomputed.get(f'{anat}_tpms', None) + anat_dseg = precomputed.get(f'{anat}_dseg', None) + + anat_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) + if not anat_preproc: + LOGGER.info(f'ANAT Stage 1: Adding {reference_anat} template workflow') + desc += ( + f'The {reference_anat} image was denoised and corrected for intensity ' + 'non-uniformity (INU)' + ) + + anat_template_wf = init_anat_template_wf( + image_type=reference_anat, + num_files=len(anatomicals), + longitudinal=longitudinal, + omp_nthreads=omp_nthreads, + name=f'{anat}_template_wf', + ) + ds_anat_template_wf = init_ds_template_wf( + image_type=reference_anat, + output_dir=output_dir, + num_anat=len(anatomicals), + name='ds_t1w_template_wf', + ) + + workflow.connect([ + (anat_template_wf, sourcefile_buffer, [ + ('outputnode.anat_valid_list', 'anat_source_files'), + ]), + (anat_template_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.anat_conform_report'), + ]), + ]) # fmt:skip + + workflow.connect([ + (inputnode, anat_template_wf, [('t1w', 'inputnode.anat_files')]), + (anat_template_wf, anat_validate, [('outputnode.anat_ref', 'in_file')]), + (anat_template_wf, ds_anat_template_wf, [ + ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), + ]), + (sourcefile_buffer, ds_anat_template_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (anat_buffer, ds_anat_template_wf, [('anat_preproc', 'inputnode.anat_preproc')]), + (ds_anat_template_wf, outputnode, [('outputnode.anat_preproc', 'anat_preproc')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') + desc += ' A preprocessed T1w image was provided as input.' + + anat_validate.inputs.in_file = anat_preproc + sourcefile_buffer.inputs.anat_source_files = [anat_preproc] + + workflow.connect([ + (anat_validate, anat_buffer, [('out_file', 'anat_preproc')]), + ]) # fmt:skip + + # Stage 2: Use previously computed mask or calculate + # If we only have one mask (could be either T1w/T2w), + # just apply transform to get it in the other space + # T2w masking logic: + # PCM = Pre-computed mask + # PCM = Pre-computed mask for PCT + # PCT = Pre-computed template + # SS = Skull stripping required + # + # PCM, PCT, SS -> Apply PCM to PCT + # PCM, PCT, !SS -> Apply PCM to PCT + # PCM, !PCT, SS -> Apply PCM to template, then run INU + # PCM, !PCT, !SS -> Apply PCM to template, then run INU + # !PCM, PCT, SS -> Run brain extraction + # !PCM, PCT, !SS -> Binarize PCT + # !PCM, !PCT, SS -> Run brain extraction + # !PCM, !PCT, !SS -> INU correct template + apply_mask = pe.Node(ApplyMask(), name='apply_mask') + if not anat_mask: + if skull_strip_mode == 'auto': + run_skull_strip = not all(_is_skull_stripped(img) for img in anatomicals) + else: + run_skull_strip = {'force': True, 'skip': False}[skull_strip_mode] + + if not run_skull_strip: + desc += ( + f'The {reference_anat} reference was previously skull-stripped; ' + 'a brain mask was derived from the input image.' + ) + + if not anat_preproc: + LOGGER.info('ANAT Stage 2b: Skipping skull-strip, INU-correction only') + + n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + bids_suffix=reference_anat, + atropos_use_random_seed=not skull_strip_fixed_seed, + name='n4_only_wf', + ) + workflow.connect([ + (anat_validate, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, anat_buffer, [ + (('outputnode.bias_corrected', pop_file), 't2w_preproc'), + ('outputnode.out_mask', 't2w_mask'), + (('outputnode.out_file', pop_file), 't2w_brain'), + ('outputnode.out_segm', 'ants_seg'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 2b: Skipping skull-strip, generating mask from input') + binarize = pe.Node(Binarize(thresh_low=2), name='binarize') + workflow.connect([ + (anat_validate, binarize, [('out_file', 'in_file')]), + (anat_validate, anat_buffer, [('out_file', 'anat_brain')]), + (binarize, anat_buffer, [('out_file', 'anat_mask')]), + ]) # fmt:skip + else: + LOGGER.info(f'ANAT Atlas-based brain mask will be calculated on the {reference_anat}') + brain_extraction_wf = init_infant_brain_extraction_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, + age_months=age_months, + ants_affine_init=True, + skull_strip_template=skull_strip_template.space, + template_specs=skull_strip_template.spec, + debug='registration' in config.execution.debug, + ) + + workflow.connect([ + (anat_validate, brain_extraction_wf, [ + ('out_file', 'inputnode.t2w_preproc'), + ]), + (brain_extraction_wf, anat_buffer, [ + ('outputnode.out_mask', 'anat_mask'), + ('outputnode.t2w_brain', 'anat_brain'), + ]), + ]) # fmt:skip + + else: + LOGGER.info(f'ANAT Found {reference_anat} brain mask') + desc += 'A pre-computed brain mask was provided as input and used throughout the workflow.' + anat_buffer.inputs.anat_mask = anat_mask + apply_mask.inputs.in_mask = anat_mask + workflow.connect([ + (anat_validate, apply_mask, [('out_file', 'in_file')]), + (apply_mask, anat_buffer, [('out_file', 'anat_brain')]), + ]) # fmt:skip + + ## Ending workflow.__desc__ = desc return workflow From 2d94ad1e1b7e5aa898dd02e78b2c45c9fdd4f85d Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 13:35:02 -0400 Subject: [PATCH 061/142] ENH: Finalize single anat workflow --- nibabies/workflows/anatomical/fit.py | 459 ++++++++++++++++++++++++++- 1 file changed, 454 insertions(+), 5 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 2d9f5ca8..09639504 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -1391,10 +1391,11 @@ def init_infant_single_anat_fit_wf( anat = reference_anat.lower() # Derivatives - anat_preproc = precomputed.get(f'{anat}_preproc', None) - anat_mask = precomputed.get(f'{anat}_mask', None) - anat_tpms = precomputed.get(f'{anat}_tpms', None) - anat_dseg = precomputed.get(f'{anat}_dseg', None) + anat_preproc = precomputed.get(f'{anat}_preproc') + anat_mask = precomputed.get(f'{anat}_mask') + anat_tpms = precomputed.get(f'{anat}_tpms') + anat_dseg = precomputed.get(f'{anat}_dseg') + anat_aseg = precomputed.get(f'{anat}_aseg') anat_validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) if not anat_preproc: @@ -1538,8 +1539,456 @@ def init_infant_single_anat_fit_wf( (apply_mask, anat_buffer, [('out_file', 'anat_brain')]), ]) # fmt:skip - ## Ending + # Stage 3: Segmentation + seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' + if anat_aseg: + LOGGER.info('ANAT Found precomputed anatomical segmentation') + aseg_buffer.inputs.anat_aseg = anat_aseg + + if not (anat_dseg and anat_tpms): + LOGGER.info('ANAT Stage 3: Tissue segmentation') + segmentation_wf = init_segmentation_wf( + sloppy=sloppy, + method=seg_method, + image_type=reference_anat, + omp_nthreads=omp_nthreads, + has_aseg=bool(anat_aseg), + ) + + workflow.connect([ + (anat_buffer, segmentation_wf, [('anat_brain', 'inputnode.anat_brain')]), + (segmentation_wf, seg_buffer, [ + ('outputnode.anat_dseg', 'anat_dseg'), + ('outputnode.anat_tpms', 'anat_tpms'), + ]), + ]) # fmt:skip + + if anat_aseg: + workflow.connect(aseg_buffer, 'anat_aseg', segmentation_wf, 'inputnode.anat_aseg') + elif seg_method == 'jlf': + workflow.connect(segmentation_wf, 'outputnode.anat_aseg', aseg_buffer, 'anat_aseg') + # TODO: datasink aseg + + if not anat_dseg: + ds_dseg_wf = init_ds_dseg_wf(output_dir=str(output_dir)) + workflow.connect([ + (sourcefile_buffer, ds_dseg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (segmentation_wf, ds_dseg_wf, [ + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ]), + ]) # fmt:skip + + if not anat_tpms: + ds_tpms_wf = init_ds_tpms_wf(output_dir=str(output_dir)) + workflow.connect([ + (sourcefile_buffer, ds_tpms_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (segmentation_wf, ds_tpms_wf, [ + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 3: Skipping segmentation workflow') + if anat_dseg: + LOGGER.info('ANAT Found discrete segmentation') + desc += 'Precomputed discrete tissue segmentations were provided as inputs.\n' + seg_buffer.inputs.anat_dseg = anat_dseg + if anat_tpms: + LOGGER.info('ANAT Found tissue probability maps') + desc += 'Precomputed tissue probabiilty maps were provided as inputs.\n' + seg_buffer.inputs.anat_tpms = anat_tpms + + # Stage 4: Normalization + templates = [] + found_xfms = {} + for template in spaces.get_spaces(nonstandard=False, dim=(3,)): + xfms = precomputed.get('transforms', {}).get(template, {}) + if set(xfms) != {'forward', 'reverse'}: + templates.append(template) + else: + found_xfms[template] = xfms + + template_buffer.inputs.in1 = list(found_xfms) + anat2std_buffer.inputs.in1 = [xfm['forward'] for xfm in found_xfms.values()] + std2anat_buffer.inputs.in1 = [xfm['reverse'] for xfm in found_xfms.values()] + + if templates: + LOGGER.info(f'ANAT Stage 4: Preparing normalization workflow for {templates}') + register_template_wf = init_register_template_wf( + sloppy=sloppy, + omp_nthreads=omp_nthreads, + templates=templates, + ) + ds_template_registration_wf = init_ds_template_registration_wf( + output_dir=str(output_dir), + image_type=reference_anat, + ) + + workflow.connect([ + (inputnode, register_template_wf, [('roi', 'inputnode.lesion_mask')]), + (anat_buffer, register_template_wf, [('anat_preproc', 'inputnode.moving_image')]), + (refined_buffer, register_template_wf, [('anat_mask', 'inputnode.moving_mask')]), + (sourcefile_buffer, ds_template_registration_wf, [ + ('anat_source_files', 'inputnode.source_files') + ]), + (register_template_wf, ds_template_registration_wf, [ + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), + ]), + (register_template_wf, template_buffer, [('outputnode.template', 'in2')]), + (ds_template_registration_wf, std2anat_buffer, [('outputnode.std2anat_xfm', 'in2')]), + (ds_template_registration_wf, anat2std_buffer, [('outputnode.anat2std_xfm', 'in2')]), + ]) # fmt:skip + if found_xfms: + LOGGER.info(f'ANAT Stage 4: Found pre-computed registrations for {found_xfms}') + + # Only refine mask if necessary + if anat_mask or recon_method is None: + workflow.connect([ + (anat_buffer, refined_buffer, [ + ('anat_mask', 'anat_mask'), + ('anat_brain', 'anat_brain'), + ]), + ]) # fmt:skip + workflow.__desc__ = desc + + # Stage 5: Surface reconstruction + if recon_method == 'mcribs': + if reference_anat == 'T1w': + LOGGER.warning('Attempting to use MCRIBS with a T1w file, good luck.') + + from nibabies.workflows.anatomical.surfaces import init_mcribs_surface_recon_wf + + LOGGER.info('ANAT Stage 5: Preparing M-CRIB-S reconstruction workflow') + surface_recon_wf = init_mcribs_surface_recon_wf( + omp_nthreads=omp_nthreads, + use_aseg=bool(anat_aseg), + use_mask=True, + precomputed=precomputed, + mcribs_dir=str(config.execution.mcribs_dir), + ) + + workflow.connect([ + (inputnode, surface_recon_wf, [ + ('subject_id', 'inputnode.subject_id'), + ('subjects_dir', 'inputnode.subjects_dir'), + ]), + (anat_buffer, surface_recon_wf, [ + ('anat_preproc', 'inputnode.t2w'), + ('anat_mask', 'inputnode.in_mask'), + ]), + (aseg_buffer, surface_recon_wf, [ + ('anat_aseg', 'inputnode.in_aseg'), + ]), + (surface_recon_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ]), + ]) # fmt:skip + + else: + from smriprep.utils.misc import fs_isRunning + + fs_isrunning = pe.Node( + niu.Function(function=fs_isRunning), overwrite=True, name='fs_isrunning' + ) + fs_isrunning.inputs.logger = LOGGER + + if recon_method == 'freesurfer': + from smriprep.workflows.surfaces import init_surface_recon_wf + + LOGGER.info('ANAT Stage 5: Preparing FreeSurfer recon-all workflow') + fs_isrunning = pe.Node( + niu.Function(function=fs_isRunning), overwrite=True, name='fs_isrunning' + ) + fs_isrunning.inputs.logger = LOGGER + + surface_recon_wf = init_surface_recon_wf( + name='surface_recon_wf', + omp_nthreads=omp_nthreads, + hires=True, + fs_no_resume=False, + precomputed=precomputed, + ) + + elif recon_method == 'infantfs': + from nibabies.workflows.anatomical.surfaces import init_infantfs_surface_recon_wf + + LOGGER.info('ANAT Stage 5: Preparing Infant FreeSurfer workflow') + surface_recon_wf = init_infantfs_surface_recon_wf( + age_months=age_months, + precomputed=precomputed, + omp_nthreads=omp_nthreads, + use_aseg=bool(anat_aseg), + ) + + workflow.connect([ + (inputnode, fs_isrunning, [ + ('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id'), + ]), + (inputnode, surface_recon_wf, [ + ('subject_id', 'inputnode.subject_id'), + ]), + (fs_isrunning, surface_recon_wf, [('out', 'inputnode.subjects_dir')]), + (anat_validate, surface_recon_wf, [('out_file', 'inputnode.t1w')]), + (anat_buffer, surface_recon_wf, [('anat_brain', 'inputnode.skullstripped_t1')]), + (surface_recon_wf, outputnode, [ + ('outputnode.subjects_dir', 'subjects_dir'), + ('outputnode.subject_id', 'subject_id'), + ]), + ]) # fmt:skip + + if anat_aseg: + workflow.conect(aseg_buffer, 'anat_aseg', surface_recon_wf, 'inputnode.in_aseg') + + fsnative_xfms = precomputed.get('transforms', {}).get('fsnative') + if not fsnative_xfms: + ds_fs_registration_wf = init_ds_fs_registration_wf( + image_type=reference_anat, output_dir=output_dir + ) + + if recon_method == 'freesurfer': + workflow.connect([ + (surface_recon_wf, fsnative_buffer, [ + ('outputnode.fsnative2t1w_xfm', 'fsnative2anat_xfm'), + ('outputnode.t1w2fsnative_xfm', 'anat2fsnative_xfm'), + ]), + ]) # fmt:skip + else: + workflow.connect([ + (surface_recon_wf, fsnative_buffer, [ + ('outputnode.fsnative2anat_xfm', 'fsnative2anat_xfm'), + ('outputnode.anat2fsnative_xfm', 'anat2fsnative_xfm'), + ]), + ]) # fmt:skip + + workflow.connect([ + (sourcefile_buffer, ds_fs_registration_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (fsnative_buffer, ds_fs_registration_wf, [ + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ]), + (fsnative_buffer, outputnode, [ + ('fsnative2anat_xfm', 'fsnative2anat_xfm'), + ]), + ]) # fmt:skip + elif 'reverse' in fsnative_xfms: + LOGGER.info('ANAT Found fsnative-to-anatomical transform - skipping registration') + outputnode.inputs.fsnative2anat_xfm = fsnative_xfms['reverse'] + else: + raise RuntimeError( + 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' + ) + + if not anat_mask: + LOGGER.info('ANAT Stage 6: Preparing mask refinement workflow') + # Stage 6: Refine ANTs mask with FreeSurfer segmentation + refinement_wf = init_refinement_wf() + applyrefined = pe.Node(ApplyMask(), name='applyrefined') + + workflow.connect([ + (surface_recon_wf, refinement_wf, [ + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ]), + (fsnative_buffer, refinement_wf, [ + ('fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ]), + (anat_buffer, refinement_wf, [ + ('anat_preproc', 'inputnode.reference_image'), + ('ants_seg', 'inputnode.ants_segs'), # TODO: Verify this is the same as dseg + ]), + (anat_buffer, applyrefined, [('anat_preproc', 'in_file')]), + (refinement_wf, applyrefined, [('outputnode.out_brainmask', 'in_mask')]), + (refinement_wf, refined_buffer, [('outputnode.out_brainmask', 'anat_mask')]), + (applyrefined, refined_buffer, [('out_file', 'anat_brain')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Found brain mask - skipping Stage 6') + + # Stages 7-9: Surface conversion and registration + # sphere_reg is needed to generate sphere_reg_fsLR + # sphere and sulc are needed to generate sphere_reg_msm + # white, pial, midthickness and thickness are needed to resample in the cortical ribbon + # TODO: Consider paring down or splitting into a subworkflow that can be called on-demand + # A subworkflow would still need to check for precomputed outputs + needed_anat_surfs = ['white', 'pial', 'midthickness'] + needed_metrics = ['thickness', 'sulc'] + needed_spheres = ['sphere_reg', 'sphere'] + + # Detect pre-computed surfaces + found_surfs = { + surf: sorted(precomputed[surf]) + for surf in needed_anat_surfs + needed_metrics + needed_spheres + if len(precomputed.get(surf, [])) == 2 + } + if found_surfs: + LOGGER.info(f'ANAT Stage 7: Found pre-converted surfaces for {list(found_surfs)}') + surfaces_buffer.inputs.trait_set(**found_surfs) + + # Stage 8: Surface conversion + surfs = [surf for surf in needed_anat_surfs if surf not in found_surfs] + spheres = [sphere for sphere in needed_spheres if sphere not in found_surfs] + if surfs or spheres: + LOGGER.info(f'ANAT Stage 7: Creating GIFTI surfaces for {surfs + spheres}') + if surfs: + gifti_surfaces_wf = init_gifti_surfaces_wf(surfaces=surfs) + ds_surfaces_wf = init_ds_surfaces_wf(output_dir=output_dir, surfaces=surfs) + + workflow.connect([ + (surface_recon_wf, gifti_surfaces_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ]), + (fsnative_buffer, gifti_surfaces_wf, [ + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ]), + (gifti_surfaces_wf, surfaces_buffer, [ + (f'outputnode.{surf}', surf) for surf in surfs + ]), + (sourcefile_buffer, ds_surfaces_wf, [('anat_source_files', 'inputnode.source_files')]), + (gifti_surfaces_wf, ds_surfaces_wf, [ + (f'outputnode.{surf}', f'inputnode.{surf}') for surf in surfs + ]), + ]) # fmt:skip + if spheres: + gifti_spheres_wf = init_gifti_surfaces_wf( + surfaces=spheres, to_scanner=False, name='gifti_spheres_wf' + ) + ds_spheres_wf = init_ds_surfaces_wf( + output_dir=output_dir, + surfaces=spheres, + name='ds_spheres_wf', + ) + + workflow.connect([ + (surface_recon_wf, gifti_spheres_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + # No transform for spheres, following HCP pipelines' lead + ]), + (gifti_spheres_wf, surfaces_buffer, [ + (f'outputnode.{sphere}', sphere) for sphere in spheres + ]), + (sourcefile_buffer, ds_spheres_wf, [('anat_source_files', 'inputnode.source_files')]), + (gifti_spheres_wf, ds_spheres_wf, [ + (f'outputnode.{sphere}', f'inputnode.{sphere}') for sphere in spheres + ]), + ]) # fmt:skip + metrics = [metric for metric in needed_metrics if metric not in found_surfs] + if metrics: + LOGGER.info(f'ANAT Stage 8: Creating GIFTI metrics for {metrics}') + gifti_morph_wf = init_gifti_morphometrics_wf(morphometrics=metrics) + ds_morph_wf = init_ds_surface_metrics_wf( + bids_root=bids_root, + output_dir=output_dir, + metrics=metrics, + name='ds_morph_wf', + ) + + workflow.connect([ + (surface_recon_wf, gifti_morph_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ]), + (gifti_morph_wf, surfaces_buffer, [ + (f'outputnode.{metric}', metric) for metric in metrics + ]), + (sourcefile_buffer, ds_morph_wf, [('anat_source_files', 'inputnode.source_files')]), + (gifti_morph_wf, ds_morph_wf, [ + (f'outputnode.{metric}', f'inputnode.{metric}') for metric in metrics + ]), + ]) # fmt:skip + + if 'anat_ribbon' not in precomputed: + LOGGER.info('ANAT Stage 8a: Creating cortical ribbon mask') + anat_ribbon_wf = init_anat_ribbon_wf() + ds_ribbon_mask_wf = init_ds_mask_wf( + bids_root=bids_root, + output_dir=output_dir, + mask_type='ribbon', + name='ds_ribbon_mask_wf', + ) + + workflow.connect([ + (anat_buffer, anat_ribbon_wf, [ + ('anat_preproc', 'inputnode.ref_file'), + ]), + (surfaces_buffer, anat_ribbon_wf, [ + ('white', 'inputnode.white'), + ('pial', 'inputnode.pial'), + ]), + (sourcefile_buffer, ds_ribbon_mask_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (anat_ribbon_wf, ds_ribbon_mask_wf, [ + ('outputnode.anat_ribbon', 'inputnode.mask_file'), + ]), + (ds_ribbon_mask_wf, outputnode, [('outputnode.mask_file', 'anat_ribbon')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 8a: Found pre-computed cortical ribbon mask') + outputnode.inputs.anat_ribbon = precomputed['anat_ribbon'] + + # Stage 9: Baseline fsLR registration + if recon_method == 'mcribs': + if len(precomputed.get('sphere_reg_dhcpAsym', [])) < 2: + LOGGER.info('ANAT Stage 9: Creating dhcp-fsLR registration sphere') + fsLR_reg_wf = init_mcribs_dhcp_wf() + + ds_fsLR_reg_wf = init_ds_surfaces_wf( + output_dir=output_dir, + surfaces=['sphere_reg_dhcpAsym'], + name='ds_fsLR_reg_wf', + ) + + workflow.connect([ + (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (fsLR_reg_wf, ds_fsLR_reg_wf, [ + ('outputnode.sphere_reg_dhcpAsym', 'inputnode.sphere_reg_dhcpAsym') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [ + ('outputnode.sphere_reg_dhcpAsym', 'sphere_reg_fsLR'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 9: Found pre-computed dhcp-fsLR registration sphere') + fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_dhcpAsym']) + + else: + if len(precomputed.get('sphere_reg_fsLR', [])) < 2: + LOGGER.info('ANAT Stage 9: Creating fsLR registration sphere') + fsLR_reg_wf = init_fsLR_reg_wf() + + ds_fsLR_reg_wf = init_ds_surfaces_wf( + output_dir=output_dir, + surfaces=['sphere_reg_fsLR'], + name='ds_fsLR_reg_wf', + ) + + workflow.connect([ + (surfaces_buffer, fsLR_reg_wf, [('sphere_reg', 'inputnode.sphere_reg')]), + (sourcefile_buffer, ds_fsLR_reg_wf, [ + ('anat_source_files', 'inputnode.source_files'), + ]), + (fsLR_reg_wf, ds_fsLR_reg_wf, [ + ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR') + ]), + (ds_fsLR_reg_wf, fsLR_buffer, [('outputnode.sphere_reg_fsLR', 'sphere_reg_fsLR')]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Stage 9: Found pre-computed fsLR registration sphere') + fsLR_buffer.inputs.sphere_reg_fsLR = sorted(precomputed['sphere_reg_fsLR']) return workflow From 1a0be9315f1e28f77b7acfdf10b02062fa0ae0a1 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 21:04:47 -0400 Subject: [PATCH 062/142] RF: Specify T1w/T2w space in derivatives --- nibabies/data/io_spec_anat.json | 50 +++++++++++++++++++++++++++------ 1 file changed, 42 insertions(+), 8 deletions(-) diff --git a/nibabies/data/io_spec_anat.json b/nibabies/data/io_spec_anat.json index 253812ef..4d8ebee9 100644 --- a/nibabies/data/io_spec_anat.json +++ b/nibabies/data/io_spec_anat.json @@ -23,7 +23,7 @@ }, "t1w_mask": { "datatype": "anat", - "space": null, + "space": "T1w", "desc": "brain", "suffix": "mask", "extension": [ @@ -33,7 +33,7 @@ }, "t2w_mask": { "datatype": "anat", - "space": null, + "space": "T2w", "desc": "brain", "suffix": "mask", "extension": [ @@ -41,9 +41,9 @@ ".nii" ] }, - "anat_dseg": { + "t1w_dseg": { "datatype": "anat", - "space": null, + "space": "T1w", "desc": null, "suffix": "dseg", "extension": [ @@ -51,9 +51,19 @@ ".nii" ] }, - "anat_tpms": { + "t2w_dseg": { "datatype": "anat", - "space": null, + "space": "T2w", + "desc": null, + "suffix": "dseg", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t1w_tpms": { + "datatype": "anat", + "space": "T1w", "label": [ "GM", "WM", @@ -65,9 +75,33 @@ ".nii" ] }, - "anat_aseg": { + "t2w_tpms": { "datatype": "anat", - "space": null, + "space": "T2w", + "label": [ + "GM", + "WM", + "CSF" + ], + "suffix": "probseg", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t1w_aseg": { + "datatype": "anat", + "space": "T1w", + "desc": "aseg", + "suffix": "dseg", + "extension": [ + ".nii.gz", + ".nii" + ] + }, + "t2w_aseg": { + "datatype": "anat", + "space": "T2w", "desc": "aseg", "suffix": "dseg", "extension": [ From 2ec91e7f52cab12a9b9974d626320b68da53343f Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 21:07:01 -0400 Subject: [PATCH 063/142] FIX: Specify space on outputs that may be in T1w/T2w space --- nibabies/workflows/anatomical/fit.py | 252 ++++++++++++++++++--------- nibabies/workflows/base.py | 43 ++--- 2 files changed, 186 insertions(+), 109 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 09639504..dedd4c86 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -50,6 +50,7 @@ init_mcribs_dhcp_wf, init_resample_midthickness_dhcp_wf, ) +from nibabies.interfaces import DerivativesDataSink if ty.TYPE_CHECKING: from niworkflows.utils.spaces import Reference, SpatialReferences @@ -98,25 +99,7 @@ def init_infant_anat_fit_wf( if not num_t1w and not num_t2w: raise FileNotFoundError('No anatomical scans provided!') - if not num_t1w or not num_t2w: - workflow = init_infant_single_anat_fit_wf( - reference_anat='T1w' if num_t1w else 'T2w', - age_months=age_months, - anatomicals=t1w or t2w, - bids_root=bids_root, - precomputed=precomputed, - longitudinal=longitudinal, - omp_nthreads=omp_nthreads, - output_dir=output_dir, - segmentation_atlases=segmentation_atlases, - skull_strip_mode=skull_strip_mode, - skull_strip_template=skull_strip_template, - skull_strip_fixed_seed=skull_strip_fixed_seed, - sloppy=sloppy, - spaces=spaces, - cifti_output=cifti_output, - ) - return workflow + anat = reference_anat.lower() # Organization # ------------ @@ -465,9 +448,9 @@ def init_infant_anat_fit_wf( # Stage 2: Use previously computed mask or calculate # If we only have one mask (could be either T1w/T2w), # just apply transform to get it in the other space - t1w_mask = precomputed.get('t1w_mask', None) - t2w_mask = precomputed.get('t2w_mask', None) - anat_mask = t1w_mask or t2w_mask + t1w_mask = precomputed.get('t1w_mask') + t2w_mask = precomputed.get('t2w_mask') + anat_mask = precomputed.get(f'{anat}_mask') # T1w masking - define pre-emptively apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') @@ -502,13 +485,13 @@ def init_infant_anat_fit_wf( if not t1w_preproc: LOGGER.info('ANAT Stage 2: Skipping skull-strip, INU-correction only') - n4_only_wf = init_n4_only_wf( + t1w_n4_only_wf = init_n4_only_wf( omp_nthreads=omp_nthreads, atropos_use_random_seed=not skull_strip_fixed_seed, ) workflow.connect([ - (t1w_validate, n4_only_wf, [('out_file', 'inputnode.in_files')]), - (n4_only_wf, t1w_buffer, [ + (t1w_validate, t1w_n4_only_wf, [('out_file', 'inputnode.in_files')]), + (t1w_n4_only_wf, t1w_buffer, [ (('outputnode.bias_corrected', pop_file), 't1w_preproc'), ('outputnode.out_mask', 't1w_mask'), (('outputnode.out_file', pop_file), 't1w_brain'), @@ -531,12 +514,16 @@ def init_infant_anat_fit_wf( transform_t2w_mask = pe.Node( ApplyTransforms(interpolation='MultiLabel'), name='transform_t2w_mask' ) + # TODO: May need to differentiate presence of t1w_preproc workflow.connect([ (t2w_buffer, transform_t2w_mask, [('t2w_mask', 'input_image')]), (coreg_buffer, transform_t2w_mask, [('t2w2t1w_xfm', 'transforms')]), + (t1w_validate, transform_t2w_mask, [('out_file', 'reference_image')]), + + (transform_t2w_mask, t1w_buffer, [('output_image', 't1w_mask')]), (transform_t2w_mask, apply_t1w_mask, [('output_image', 'in_mask')]), - (t1w_buffer, apply_t1w_mask, [('t1w_preproc', 'in_file')]), - # TODO: Unsure about this connection^ + (t1w_validate, apply_t1w_mask, [('out_file', 'in_file')]), + (apply_t1w_mask, t1w_buffer, [('out_file', 't1w_brain')]), ]) # fmt:skip # Save T1w mask @@ -544,6 +531,7 @@ def init_infant_anat_fit_wf( bids_root=bids_root, output_dir=output_dir, mask_type='brain', + extra_entities={'space': 'T1w'}, name='ds_t1w_mask_wf', ) workflow.connect([ @@ -645,45 +633,66 @@ def init_infant_anat_fit_wf( else: # Check whether we can convert a previously computed T2w mask # or need to run the atlas based brain extraction - if t1w_mask: - LOGGER.info('ANAT T1w mask will be transformed into T2w space') - transform_t1w_mask = pe.Node( - ApplyTransforms(interpolation='MultiLabel'), - name='transform_t1w_mask', - ) - workflow.connect([ - (t1w_buffer, transform_t1w_mask, [('t1w_mask', 'input_image')]), - (coreg_buffer, transform_t1w_mask, [('t1w2t2w_xfm', 'transforms')]), - (transform_t1w_mask, apply_t2w_mask, [('output_image', 'in_mask')]), - (t2w_buffer, apply_t1w_mask, [('t2w_preproc', 'in_file')]), - # TODO: Unsure about this connection^ - ]) # fmt:skip - else: - LOGGER.info('ANAT Atlas-based brain mask will be calculated on the T2w') - brain_extraction_wf = init_infant_brain_extraction_wf( - omp_nthreads=omp_nthreads, - sloppy=sloppy, - age_months=age_months, - ants_affine_init=True, - skull_strip_template=skull_strip_template.space, - template_specs=skull_strip_template.spec, - debug='registration' in config.execution.debug, - ) + # if t1w_mask: + # LOGGER.info('ANAT T1w mask will be transformed into T2w space') + # transform_t1w_mask = pe.Node( + # ApplyTransforms(interpolation='MultiLabel'), + # name='transform_t1w_mask', + # ) + + # workflow.connect([ + # (t1w_buffer, transform_t1w_mask, [('t1w_mask', 'input_image')]), + # (coreg_buffer, transform_t1w_mask, [('t1w2t2w_xfm', 'transforms')]), + # (transform_t1w_mask, apply_t2w_mask, [('output_image', 'in_mask')]), + # (t2w_buffer, apply_t1w_mask, [('t2w_preproc', 'in_file')]), + # # TODO: Unsure about this connection^ + # ]) # fmt:skip + # else: + LOGGER.info('ANAT Atlas-based brain mask will be calculated on the T2w') + brain_extraction_wf = init_infant_brain_extraction_wf( + omp_nthreads=omp_nthreads, + sloppy=sloppy, + age_months=age_months, + ants_affine_init=True, + skull_strip_template=skull_strip_template.space, + template_specs=skull_strip_template.spec, + debug='registration' in config.execution.debug, + ) - workflow.connect([ - (t2w_validate, brain_extraction_wf, [ - ('out_file', 'inputnode.t2w_preproc'), - ]), - (brain_extraction_wf, t2w_buffer, [ - ('outputnode.out_mask', 't2w_mask'), - ('outputnode.t2w_brain', 't2w_brain'), - ]), - ]) # fmt:skip + workflow.connect([ + (t2w_validate, brain_extraction_wf, [ + ('out_file', 'inputnode.t2w_preproc'), + ]), + (brain_extraction_wf, t2w_buffer, [ + ('outputnode.out_mask', 't2w_mask'), + ('outputnode.t2w_brain', 't2w_brain'), + ]), + ]) # fmt:skip + + # Save T2w mask + ds_t2w_mask_wf = init_ds_mask_wf( + bids_root=bids_root, + output_dir=output_dir, + mask_type='brain', + extra_entities={'space': 'T2w'}, + name='ds_t2w_mask_wf', + ) + workflow.connect([ + (sourcefile_buffer, ds_t2w_mask_wf, [('t2w_source_files', 'inputnode.source_files')]), + ]) # fmt:skip + if reference_anat == 'T2w': + workflow.connect([ + (refined_buffer, ds_t2w_mask_wf, [('anat_mask', 'inputnode.mask_file')]), + (ds_t2w_mask_wf, outputnode, [('outputnode.mask_file', 'anat_mask')]), + ]) # fmt:skip + else: + workflow.connect([ + (t2w_buffer, ds_t2w_mask_wf, [('t2w_mask', 'inputnode.mask_file')]), + ]) # fmt:skip else: LOGGER.info('ANAT Found T2w brain mask') - if reference_anat == 'T2w': desc += ( 'A pre-computed T1w brain mask was provided as input and used throughout the ' @@ -693,27 +702,42 @@ def init_infant_anat_fit_wf( apply_t2w_mask.inputs.in_mask = t2w_mask workflow.connect([ (t2w_validate, apply_t2w_mask, [('out_file', 'in_file')]), - (apply_t2w_mask, t2w_buffer, [('out_file', 't2w_brain')]), ]) # fmt:skip + if not t2w_preproc: + LOGGER.info('ANAT Skipping skull-strip, INU-correction only') + t2w_n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + atropos_use_random_seed=not skull_strip_fixed_seed, + bids_suffix='T2w', + name='t2w_n4_only_wf', + ) + workflow.connect([ + (apply_t2w_mask, t2w_n4_only_wf, [('out_file', 'inputnode.in_files')]), + (t2w_n4_only_wf, t2w_buffer, [ + (('outputnode.bias_corrected', pop_file), 't2w_preproc'), + (('outputnode.out_file', pop_file), 't2w_brain'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Skipping T2w masking') + workflow.connect(apply_t2w_mask, 'out_file', t2w_buffer, 't2w_brain') + # Stage 3: Coregistration t1w2t2w_xfm = precomputed.get('t1w2t2w_xfm') t2w2t1w_xfm = precomputed.get('t2w2t1w_xfm') # To use the found xfm, requires both precomputed anatomicals to be found as well - if t1w_preproc and t2w_preproc: - if t1w2t2w_xfm: - LOGGER.info('ANAT Found T1w-T2w xfm') - desc += ( - ' A T1w-T2w coregistration transform was provided as input and used throughout ' - 'the workflow.' - ) - coreg_buffer.inputs.t1w2t2w_xfm = t1w2t2w_xfm - if t2w2t1w_xfm: - LOGGER.info('ANAT Found T2w-T1w xfm') - coreg_buffer.inputs.t2w2t1w_xfm = t2w2t1w_xfm + if (t1w_preproc and t2w_preproc) and (t1w2t2w_xfm and t2w2t1w_xfm): + LOGGER.info('ANAT Found T1w<->T2w xfms') + desc += ( + ' A T1w-T2w coregistration transform was provided as input and used throughout ' + 'the workflow.' + ) + coreg_buffer.inputs.t1w2t2w_xfm = t1w2t2w_xfm + coreg_buffer.inputs.t2w2t1w_xfm = t2w2t1w_xfm else: - LOGGER.info('ANAT Coregistering anatomical references') + LOGGER.info('ANAT Coregistering anatomicals') desc += ' The T1w and T2w reference volumes were co-registered using ANTs.' coregistration_wf = init_coregistration_wf( @@ -723,14 +747,58 @@ def init_infant_anat_fit_wf( t1w_mask=False, probmap=not t2w_mask, ) + + # TODO: Currently the XFMs are transform0GenericAffine.mat, transform1Warp.nii.gz + # The coregistration should be chaged to instead save + # 'composite_transform' and 'inverse_composite_transform' + # from antsRegistration (single h5 files) + # + # ds_t1w2t2w_xfm = pe.Node( + # DerivativesDataSink( + # base_directory=output_dir, + # to='T2w', + # mode='image', + # suffix='xfm', + # dismiss_entites=('desc', 'echo'), + # **{'from': 'T1w'} + # ), + # name='ds_t1w2t2w_xfm', + # run_without_submitting=True, + # ) + + # ds_t2w2t1w_xfm = pe.Node( + # DerivativesDataSink( + # base_directory=output_dir, + # to='T1w', + # mode='image', + # suffix='xfm', + # dismiss_entites=('desc', 'echo'), + # **{'from': 'T2w'} + # ), + # name='ds_t2w2t1w_xfm', + # run_without_submitting=True, + # ) + workflow.connect([ - (t1w_buffer, coregistration_wf, [ - ('t1w_preproc', 'inputnode.in_t1w'), + (t1w_validate, coregistration_wf, [ + ('out_file', 'inputnode.in_t1w'), ]), (t2w_buffer, coregistration_wf, [ ('t2w_preproc', 'inputnode.in_t2w'), ('t2w_mask', 'inputnode.in_mask'), ]), + # (coregistration_wf, ds_t1w2t2w_xfm, [ + # ('outputnode.t1w2t2w_xfm', 'in_file'), + # ]), + # (sourcefile_buffer, ds_t1w2t2w_xfm, [ + # ('t1w_source_files', 'source_file'), + # ]), + # (coregistration_wf, ds_t2w2t1w_xfm, [ + # ('outputnode.t2w2t1w_xfm', 'in_file'), + # ]), + # (sourcefile_buffer, ds_t2w2t1w_xfm, [ + # ('t2w_source_files', 'source_file'), + # ]), (coregistration_wf, coreg_buffer, [ ('outputnode.t1w2t2w_xfm', 't1w2t2w_xfm'), ('outputnode.t2w2t1w_xfm', 't2w2t1w_xfm'), @@ -738,9 +806,9 @@ def init_infant_anat_fit_wf( ]) # fmt:skip # Stage 4: Segmentation - anat_dseg = precomputed.get('anat_dseg') - anat_tpms = precomputed.get('anat_tpms') - anat_aseg = precomputed.get('anat_aseg') + anat_dseg = precomputed.get(f'{anat}_dseg') + anat_tpms = precomputed.get(f'{anat}_tpms') + anat_aseg = precomputed.get(f'{anat}_aseg') seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' @@ -773,7 +841,10 @@ def init_infant_anat_fit_wf( # TODO: datasink aseg if not anat_dseg: - ds_dseg_wf = init_ds_dseg_wf(output_dir=str(output_dir)) + ds_dseg_wf = init_ds_dseg_wf( + output_dir=str(output_dir), + extra_entities={'space': reference_anat}, + ) workflow.connect([ (sourcefile_buffer, ds_dseg_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -784,7 +855,10 @@ def init_infant_anat_fit_wf( ]) # fmt:skip if not anat_tpms: - ds_tpms_wf = init_ds_tpms_wf(output_dir=str(output_dir)) + ds_tpms_wf = init_ds_tpms_wf( + output_dir=str(output_dir), + extra_entities={'space': reference_anat}, + ) workflow.connect([ (sourcefile_buffer, ds_tpms_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -1132,6 +1206,7 @@ def init_infant_anat_fit_wf( bids_root=bids_root, output_dir=output_dir, mask_type='ribbon', + extra_entities={'space': reference_anat}, name='ds_ribbon_mask_wf', ) @@ -1570,7 +1645,10 @@ def init_infant_single_anat_fit_wf( # TODO: datasink aseg if not anat_dseg: - ds_dseg_wf = init_ds_dseg_wf(output_dir=str(output_dir)) + ds_dseg_wf = init_ds_dseg_wf( + output_dir=str(output_dir), + extra_entities={'space': reference_anat}, + ) workflow.connect([ (sourcefile_buffer, ds_dseg_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -1581,7 +1659,10 @@ def init_infant_single_anat_fit_wf( ]) # fmt:skip if not anat_tpms: - ds_tpms_wf = init_ds_tpms_wf(output_dir=str(output_dir)) + ds_tpms_wf = init_ds_tpms_wf( + output_dir=str(output_dir), + extra_entities={'space': reference_anat}, + ) workflow.connect([ (sourcefile_buffer, ds_tpms_wf, [ ('anat_source_files', 'inputnode.source_files'), @@ -1914,6 +1995,7 @@ def init_infant_single_anat_fit_wf( bids_root=bids_root, output_dir=output_dir, mask_type='ribbon', + extra_entities={'space': reference_anat}, name='ds_ribbon_mask_wf', ) @@ -2001,6 +2083,7 @@ def init_infant_anat_apply_wf( recon_method: ty.Literal['freesurfer', 'infantfs', 'mcribs', None], sloppy: bool, spaces: 'SpatialReferences', + reference_anat: ty.Literal['T1w', 'T2w'], cifti_output: ty.Literal['91k', '170k', False], name: str = 'infant_anat_apply_wf', ) -> pe.Workflow: @@ -2093,8 +2176,7 @@ def init_infant_anat_apply_wf( if recon_method is not None: ds_fs_segs_wf = init_ds_fs_segs_wf( - bids_root=bids_root, - output_dir=output_dir, + bids_root=bids_root, output_dir=output_dir, extra_entities={'space': reference_anat} ) surface_derivatives_wf = init_surface_derivatives_wf() ds_surfaces_wf = init_ds_surfaces_wf(output_dir=output_dir, surfaces=['inflated']) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 8a96af2d..7ecfc857 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -460,36 +460,31 @@ def init_single_subject_wf( omp_nthreads=omp_nthreads, output_dir=output_dir, recon_method=recon_method, + reference_anat=reference_anat, sloppy=sloppy, spaces=spaces, cifti_output=cifti_output, ) reg_sphere = f'sphere_reg_{"msm" if msm_sulc else "fsLR"}' - workflow.connect( - [ - ( - anat_fit_wf, - anat_apply_wf, - [ - ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), - ('outputnode.anat_valid_list', 'inputnode.anat_valid_list'), - ('outputnode.anat_preproc', 'inputnode.anat_preproc'), - ('outputnode.anat_mask', 'inputnode.anat_mask'), - ('outputnode.anat_dseg', 'inputnode.anat_dseg'), - ('outputnode.anat_tpms', 'inputnode.anat_tpms'), - ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), - ('outputnode.midthickness', 'inputnode.midthickness'), - (f'outputnode.{reg_sphere}', f'inputnode.{reg_sphere}'), - ('outputnode.sulc', 'inputnode.sulc'), - ('outputnode.subjects_dir', 'inputnode.subjects_dir'), - ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.template', 'inputnode.template'), - ('outputnode.thickness', 'inputnode.thickness'), - ], - ), - ] - ) + workflow.connect([ + (anat_fit_wf, anat_apply_wf, [ + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.anat_valid_list', 'inputnode.anat_valid_list'), + ('outputnode.anat_preproc', 'inputnode.anat_preproc'), + ('outputnode.anat_mask', 'inputnode.anat_mask'), + ('outputnode.anat_dseg', 'inputnode.anat_dseg'), + ('outputnode.anat_tpms', 'inputnode.anat_tpms'), + ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), + ('outputnode.midthickness', 'inputnode.midthickness'), + (f'outputnode.{reg_sphere}', f'inputnode.{reg_sphere}'), + ('outputnode.sulc', 'inputnode.sulc'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.template', 'inputnode.template'), + ('outputnode.thickness', 'inputnode.thickness'), + ]), + ]) # fmt:skip # TODO: # - Grab template_iterator_wf workflow # - Grab select_MNI2009c_xfm node From bdcd4523114c72669e5f427b52e8a98e6190eb24 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 1 May 2024 21:08:54 -0400 Subject: [PATCH 064/142] DKR: Cache new templates --- scripts/fetch_templates.py | 29 ++++++++++++++++++++++++----- 1 file changed, 24 insertions(+), 5 deletions(-) diff --git a/scripts/fetch_templates.py b/scripts/fetch_templates.py index 56ad1319..646f6f27 100755 --- a/scripts/fetch_templates.py +++ b/scripts/fetch_templates.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -"Pre-emptive caching of commonly used TemplateFlow templates" +'Pre-emptive caching of commonly used TemplateFlow templates' import templateflow.api as tf @@ -31,7 +31,7 @@ def fetch_UNCInfant(): tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-brain_mask.nii.gz tpl-UNCInfant/cohort-1/tpl-UNCInfant_cohort-1_label-BrainCerebellumExtraction_mask.nii.gz """ - template = "UNCInfant" + template = 'UNCInfant' tf.get(template, cohort=1, desc=None, suffix='T1w') tf.get(template, cohort=1, label='brain', suffix='probseg') @@ -47,11 +47,14 @@ def fetch_fsaverage(): tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-std_sphere.surf.gii tpl-fsaverage/tpl-fsaverage_hemi-L_den-164k_desc-vaavg_midthickness.shape.gii tpl-fsaverage/tpl-fsaverage_hemi-R_den-164k_desc-vaavg_midthickness.shape.gii + tpl-fsaverage/tpl-fsaverage_hemi-L_den-41k_sphere.surf.gii + tpl-fsaverage/tpl-fsaverage_hemi-R_den-41k_sphere.surf.gii """ template = 'fsaverage' tf.get(template, density='164k', desc='std', suffix='sphere') tf.get(template, density='164k', desc='vaavg', suffix='midthickness') + tf.get(template, density='41k', desc=None, suffix='sphere', extension='.surf.gii') def fetch_fsLR(): @@ -68,8 +71,8 @@ def fetch_fsLR(): tpl-fsLR/tpl-fsLR_space-fsaverage_hemi-R_den-32k_sphere.surf.gii """ tf.get('fsLR', density='32k') - tf.get('fsLR', density="164k", desc="std", suffix="sphere") - tf.get('fsLR', density="164k", suffix="midthickness") + tf.get('fsLR', density='164k', desc='std', suffix='sphere') + tf.get('fsLR', density='164k', suffix='midthickness') def fetch_MNIInfant(cohort=1): @@ -90,13 +93,29 @@ def fetch_MNIInfant(cohort=1): tf.get(template, cohort=cohort, desc='brain', suffix='mask') +def fetch_dhcpAsym(cohort=42): + """ + Expected templates: + + tpl-dhcpAsym_cohort-42_hemi-L_den-32k_sphere.surf.gii + tpl-dhcpAsym_cohort-42_hemi-R_den-32k_sphere.surf.gii + tpl-dhcpAsym_cohort-42_space-fsaverage_hemi-L_den-41k_desc-reg_sphere.surf.gii + tpl-dhcpAsym_cohort-42_space-fsaverage_hemi-R_den-41k_desc-reg_sphere.surf.gii + """ + template = 'dchpAsym' + + tf.get(template, cohort=cohort, density='32k', desc=None, suffix='sphere') + tf.get(template, cohort=cohort, space='fsaverage', density='41k', desc='reg', suffix='sphere') + + def main(): fetch_MNI6() fetch_UNCInfant() fetch_fsaverage() fetch_fsLR() fetch_MNIInfant() + fetch_dhcpAsym() -if __name__ == "__main__": +if __name__ == '__main__': main() From 67feb16c72bb3636cfdac2a2763534ee11c1633c Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 2 May 2024 00:39:08 -0400 Subject: [PATCH 065/142] FIX: Handle case of multiple T1w files --- nibabies/interfaces/reports.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/nibabies/interfaces/reports.py b/nibabies/interfaces/reports.py index 597ef0f2..5d0a9178 100644 --- a/nibabies/interfaces/reports.py +++ b/nibabies/interfaces/reports.py @@ -159,12 +159,14 @@ def _generate_segment(self): ) elif recon_method == 'infantfs': + from niworkflows.utils.connections import pop_file + from nibabies.interfaces.freesurfer import InfantReconAll recon = InfantReconAll( subjects_dir=self.inputs.subjects_dir, subject_id=self._recon_id, - t1_file=self.inputs.t1w, + t1_file=pop_file(self.inputs.t1w), ) recon_status = ( statuses['done'] if recon.cmdline.startswith('echo') else statuses['todo'] From 4e2b238e8e895239090647885ba8a347efe94c9a Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 2 May 2024 00:45:13 -0400 Subject: [PATCH 066/142] FIX: Catch errors with basic, no derivatives, processing --- nibabies/workflows/anatomical/fit.py | 95 +++++++++++++++++++++------- 1 file changed, 71 insertions(+), 24 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index dedd4c86..144f22f3 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -44,13 +44,13 @@ from nibabies import config from nibabies.workflows.anatomical.brain_extraction import init_infant_brain_extraction_wf from nibabies.workflows.anatomical.outputs import init_anat_reports_wf +from nibabies.workflows.anatomical.preproc import init_anat_preproc_wf from nibabies.workflows.anatomical.registration import init_coregistration_wf from nibabies.workflows.anatomical.segmentation import init_segmentation_wf from nibabies.workflows.anatomical.surfaces import ( init_mcribs_dhcp_wf, init_resample_midthickness_dhcp_wf, ) -from nibabies.interfaces import DerivativesDataSink if ty.TYPE_CHECKING: from niworkflows.utils.spaces import Reference, SpatialReferences @@ -181,7 +181,7 @@ def init_infant_anat_fit_wf( name='t1w_buffer', ) t2w_buffer = pe.Node( - niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask' 't2w_brain']), + niu.IdentityInterface(fields=['t2w_preproc', 't2w_mask' 't2w_brain', 't2w_probmap']), name='t2w_buffer', ) anat_buffer = pe.Node( @@ -318,8 +318,8 @@ def init_infant_anat_fit_wf( 'were found within the input BIDS dataset.' ) - t1w_preproc = precomputed.get('t1w_preproc', None) - t2w_preproc = precomputed.get('t2w_preproc', None) + t1w_preproc = precomputed.get('t1w_preproc') + t2w_preproc = precomputed.get('t2w_preproc') # Stage 1: Conform & valid T1w/T2w images # Note: Since stage 1 & 2 are tightly knit together, it may be more intuitive @@ -370,7 +370,7 @@ def init_infant_anat_fit_wf( (sourcefile_buffer, ds_t1w_template_wf, [ ('t1w_source_files', 'inputnode.source_files'), ]), - (anat_buffer, ds_t1w_template_wf, [('t1w_preproc', 'inputnode.anat_preproc')]), + (t1w_buffer, ds_t1w_template_wf, [('t1w_preproc', 'inputnode.anat_preproc')]), (ds_t1w_template_wf, outputnode, [('outputnode.anat_preproc', 't1w_preproc')]), ]) # fmt:skip else: @@ -429,7 +429,7 @@ def init_infant_anat_fit_wf( (sourcefile_buffer, ds_t2w_template_wf, [ ('t2w_source_files', 'inputnode.source_files'), ]), - (anat_buffer, ds_t2w_template_wf, [('t2w_preproc', 'inputnode.anat_preproc')]), + (t2w_buffer, ds_t2w_template_wf, [('t2w_preproc', 'inputnode.anat_preproc')]), (ds_t2w_template_wf, outputnode, [('outputnode.anat_preproc', 't2w_preproc')]), ]) # fmt:skip else: @@ -451,6 +451,7 @@ def init_infant_anat_fit_wf( t1w_mask = precomputed.get('t1w_mask') t2w_mask = precomputed.get('t2w_mask') anat_mask = precomputed.get(f'{anat}_mask') + refine_mask = False # T1w masking - define pre-emptively apply_t1w_mask = pe.Node(ApplyMask(), name='apply_t1w_mask') apply_t2w_mask = apply_t1w_mask.clone(name='apply_t2w_mask') @@ -495,9 +496,16 @@ def init_infant_anat_fit_wf( (('outputnode.bias_corrected', pop_file), 't1w_preproc'), ('outputnode.out_mask', 't1w_mask'), (('outputnode.out_file', pop_file), 't1w_brain'), - ('outputnode.out_segm', 'ants_seg'), ]), ]) # fmt:skip + + if reference_anat == 'T1w': + refine_mask = True + workflow.connect([ + (t1w_n4_only_wf, seg_buffer, [ + ('outputnode.out_segm', 'ants_segs'), + ]), + ]) # fmt:skip else: LOGGER.info('ANAT Stage 2: Skipping skull-strip, generating mask from input') binarize_t1w = pe.Node(Binarize(thresh_low=2), name='binarize_t1w') @@ -526,6 +534,17 @@ def init_infant_anat_fit_wf( (apply_t1w_mask, t1w_buffer, [('out_file', 't1w_brain')]), ]) # fmt:skip + if not t1w_preproc: + t1w_preproc_wf = init_anat_preproc_wf(name='t1w_preproc_wf') + workflow.connect([ + (t1w_validate, t1w_preproc_wf, [ + ('out_file', 'inputnode.in_anat'), + ]), + (t1w_preproc_wf, t1w_buffer, [ + ('outputnode.anat_preproc', 't1w_preproc'), + ]), + ]) # fmt:skip + # Save T1w mask ds_t1w_mask_wf = init_ds_mask_wf( bids_root=bids_root, @@ -541,7 +560,6 @@ def init_infant_anat_fit_wf( if reference_anat == 'T1w': workflow.connect([ (refined_buffer, ds_t1w_mask_wf, [('anat_mask', 'inputnode.mask_file')]), - (ds_t1w_mask_wf, outputnode, [('outputnode.mask_file', 'anat_mask')]), ]) # fmt:skip else: workflow.connect([ @@ -619,9 +637,15 @@ def init_infant_anat_fit_wf( (('outputnode.bias_corrected', pop_file), 't2w_preproc'), ('outputnode.out_mask', 't2w_mask'), (('outputnode.out_file', pop_file), 't2w_brain'), - ('outputnode.out_segm', 'ants_seg'), ]), ]) # fmt:skip + if reference_anat == 'T2w': + refine_mask = True + workflow.connect([ + (t2w_n4_only_wf, seg_buffer, [ + ('outputnode.out_segm', 'ants_segs'), + ]), + ]) # fmt:skip else: LOGGER.info('ANAT Stage 2b: Skipping skull-strip, generating mask from input') binarize_t2w = pe.Node(Binarize(thresh_low=2), name='binarize_t2w') @@ -667,6 +691,8 @@ def init_infant_anat_fit_wf( (brain_extraction_wf, t2w_buffer, [ ('outputnode.out_mask', 't2w_mask'), ('outputnode.t2w_brain', 't2w_brain'), + ('outputnode.t2w_preproc', 't2w_preproc'), + ('outputnode.out_probmap', 't2w_probmap') ]), ]) # fmt:skip @@ -685,7 +711,6 @@ def init_infant_anat_fit_wf( if reference_anat == 'T2w': workflow.connect([ (refined_buffer, ds_t2w_mask_wf, [('anat_mask', 'inputnode.mask_file')]), - (ds_t2w_mask_wf, outputnode, [('outputnode.mask_file', 'anat_mask')]), ]) # fmt:skip else: workflow.connect([ @@ -740,12 +765,13 @@ def init_infant_anat_fit_wf( LOGGER.info('ANAT Coregistering anatomicals') desc += ' The T1w and T2w reference volumes were co-registered using ANTs.' + probmap = not t2w_preproc and not t2w_mask coregistration_wf = init_coregistration_wf( omp_nthreads=omp_nthreads, sloppy=sloppy, debug='registration' in config.execution.debug, t1w_mask=False, - probmap=not t2w_mask, + probmap=probmap, ) # TODO: Currently the XFMs are transform0GenericAffine.mat, transform1Warp.nii.gz @@ -805,6 +831,13 @@ def init_infant_anat_fit_wf( ]), ]) # fmt:skip + if probmap: + workflow.connect([ + (t2w_buffer, coregistration_wf, [ + ('t2w_probmap', 'inputnode.in_probmap'), + ]) + ]) # fmt:skip + # Stage 4: Segmentation anat_dseg = precomputed.get(f'{anat}_dseg') anat_tpms = precomputed.get(f'{anat}_tpms') @@ -924,7 +957,7 @@ def init_infant_anat_fit_wf( LOGGER.info(f'ANAT Stage 5: Found pre-computed registrations for {found_xfms}') # Only refine mask if necessary - if anat_mask or recon_method is None: + if anat_mask or recon_method is None or not refine_mask: workflow.connect([ (anat_buffer, refined_buffer, [ ('anat_mask', 'anat_mask'), @@ -1079,7 +1112,7 @@ def init_infant_anat_fit_wf( 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' ) - if not anat_mask: + if not anat_mask and refine_mask: LOGGER.info('ANAT Stage 7: Preparing mask refinement workflow') # Stage 6: Refine ANTs mask with FreeSurfer segmentation refinement_wf = init_refinement_wf() @@ -1091,17 +1124,21 @@ def init_infant_anat_fit_wf( ('outputnode.subject_id', 'inputnode.subject_id'), ]), (fsnative_buffer, refinement_wf, [ - ('fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), (anat_buffer, refinement_wf, [ ('anat_preproc', 'inputnode.reference_image'), - ('ants_seg', 'inputnode.ants_segs'), # TODO: Verify this is the same as dseg + ]), + (seg_buffer, refinement_wf, [ + ('ants_segs', 'inputnode.ants_segs'), # TODO: Verify this is the same as dseg ]), (anat_buffer, applyrefined, [('anat_preproc', 'in_file')]), (refinement_wf, applyrefined, [('outputnode.out_brainmask', 'in_mask')]), (refinement_wf, refined_buffer, [('outputnode.out_brainmask', 'anat_mask')]), (applyrefined, refined_buffer, [('out_file', 'anat_brain')]), ]) # fmt:skip + elif not refine_mask: + LOGGER.info('ANAT Skipping mask refinement workflow') else: LOGGER.info('ANAT Found brain mask - skipping Stage 7') @@ -1288,7 +1325,9 @@ def init_infant_anat_fit_wf( def init_infant_single_anat_fit_wf( *, age_months: int, - anatomicals: list, + t1w: list, + t2w: list, + flair: list, bids_root: str, precomputed: dict, longitudinal: bool, @@ -1313,6 +1352,8 @@ def init_infant_single_anat_fit_wf( For best results, especially in periods of transitioning myelination (usually 3-8 months), a combination of T1w and T2w images will produce more accurate results. """ + anatomicals = t1w or t2w + inputnode = pe.Node( niu.IdentityInterface( fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], @@ -1513,7 +1554,6 @@ def init_infant_single_anat_fit_wf( ('anat_source_files', 'inputnode.source_files'), ]), (anat_buffer, ds_anat_template_wf, [('anat_preproc', 'inputnode.anat_preproc')]), - (ds_anat_template_wf, outputnode, [('outputnode.anat_preproc', 'anat_preproc')]), ]) # fmt:skip else: LOGGER.info('ANAT Found preprocessed T1w - skipping Stage 1') @@ -1543,6 +1583,7 @@ def init_infant_single_anat_fit_wf( # !PCM, PCT, !SS -> Binarize PCT # !PCM, !PCT, SS -> Run brain extraction # !PCM, !PCT, !SS -> INU correct template + refine_mask = False apply_mask = pe.Node(ApplyMask(), name='apply_mask') if not anat_mask: if skull_strip_mode == 'auto': @@ -1565,13 +1606,17 @@ def init_infant_single_anat_fit_wf( atropos_use_random_seed=not skull_strip_fixed_seed, name='n4_only_wf', ) + + refine_mask = True workflow.connect([ (anat_validate, n4_only_wf, [('out_file', 'inputnode.in_files')]), (n4_only_wf, anat_buffer, [ - (('outputnode.bias_corrected', pop_file), 't2w_preproc'), - ('outputnode.out_mask', 't2w_mask'), - (('outputnode.out_file', pop_file), 't2w_brain'), - ('outputnode.out_segm', 'ants_seg'), + (('outputnode.bias_corrected', pop_file), 'anat_preproc'), + ('outputnode.out_mask', 'anat_mask'), + (('outputnode.out_file', pop_file), 'anat_brain'), + ]), + (n4_only_wf, seg_buffer, [ + ('outputnode.out_segm', 'ants_segs'), ]), ]) # fmt:skip else: @@ -1868,7 +1913,7 @@ def init_infant_single_anat_fit_wf( 'Found an anatomical-to-fsnative transform without the reverse. Time to handle this.' ) - if not anat_mask: + if not anat_mask and refine_mask: LOGGER.info('ANAT Stage 6: Preparing mask refinement workflow') # Stage 6: Refine ANTs mask with FreeSurfer segmentation refinement_wf = init_refinement_wf() @@ -1880,11 +1925,13 @@ def init_infant_single_anat_fit_wf( ('outputnode.subject_id', 'inputnode.subject_id'), ]), (fsnative_buffer, refinement_wf, [ - ('fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ]), (anat_buffer, refinement_wf, [ ('anat_preproc', 'inputnode.reference_image'), - ('ants_seg', 'inputnode.ants_segs'), # TODO: Verify this is the same as dseg + ]), + (seg_buffer, refinement_wf, [ + ('ants_segs', 'inputnode.ants_segs'), ]), (anat_buffer, applyrefined, [('anat_preproc', 'in_file')]), (refinement_wf, applyrefined, [('outputnode.out_brainmask', 'in_mask')]), From 8d4017633f1b06424e464a60950e3ef123335b67 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 2 May 2024 00:46:21 -0400 Subject: [PATCH 067/142] FIX: Add entrypoint for single anat workflow, prioritize recon_method for anat ref --- nibabies/workflows/base.py | 59 +++++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 7ecfc857..4dc68039 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -54,7 +54,11 @@ from nibabies.interfaces import DerivativesDataSink from nibabies.interfaces.reports import AboutSummary, SubjectSummary from nibabies.utils.bids import parse_bids_for_age_months -from nibabies.workflows.anatomical.fit import init_infant_anat_apply_wf, init_infant_anat_fit_wf +from nibabies.workflows.anatomical.fit import ( + init_infant_anat_apply_wf, + init_infant_anat_fit_wf, + init_infant_single_anat_fit_wf, +) if ty.TYPE_CHECKING: from bids.layout import BIDSLayout @@ -316,7 +320,9 @@ def init_single_subject_wf( preferred_anat = config.execution.reference_anat t1w = subject_data['t1w'] t2w = subject_data['t2w'] + single_anat = False if not t1w and t2w: + single_anat = True reference_anat = 'T1w' if t1w else 'T2w' if preferred_anat and reference_anat != preferred_anat: raise AttributeError( @@ -324,7 +330,11 @@ def init_single_subject_wf( ) else: if not (reference_anat := preferred_anat): - reference_anat = 'T2w' if age <= 8 else 'T1w' + if recon_method is None: + reference_anat = 'T2w' if age <= 8 else 'T1w' + else: + reference_anat = 'T2w' if recon_method == 'mcribs' else 'T1w' + anat = reference_anat.lower() # To be used for workflow connections bids_root = str(config.execution.bids_dir) @@ -391,27 +401,30 @@ def init_single_subject_wf( sloppy = config.execution.sloppy cifti_output = config.workflow.cifti_output - anat_fit_wf = init_infant_anat_fit_wf( - age_months=age, - t1w=t1w, - t2w=t2w, - flair=subject_data['flair'], - bids_root=bids_root, - longitudinal=config.workflow.longitudinal, - msm_sulc=msm_sulc, - omp_nthreads=omp_nthreads, - output_dir=output_dir, - precomputed=anatomical_cache, - segmentation_atlases=config.execution.segmentation_atlases_dir, - skull_strip_fixed_seed=config.workflow.skull_strip_fixed_seed, - skull_strip_mode=config.workflow.skull_strip_anat, - skull_strip_template=Reference.from_string(config.workflow.skull_strip_template), - recon_method=recon_method, - reference_anat=reference_anat, - sloppy=sloppy, - spaces=spaces, - cifti_output=cifti_output, - ) + wf_args = { + 'age_months': age, + 't1w': t1w, + 't2w': t2w, + 'flair': subject_data['flair'], + 'bids_root': bids_root, + 'longitudinal': config.workflow.longitudinal, + 'msm_sulc': msm_sulc, + 'omp_nthreads': omp_nthreads, + 'output_dir': output_dir, + 'precomputed': anatomical_cache, + 'segmentation_atlases': config.execution.segmentation_atlases_dir, + 'skull_strip_fixed_seed': config.workflow.skull_strip_fixed_seed, + 'skull_strip_mode': config.workflow.skull_strip_anat, + 'skull_strip_template': Reference.from_string(config.workflow.skull_strip_template)[0], + 'recon_method': recon_method, + 'reference_anat': reference_anat, + 'sloppy': sloppy, + 'spaces': spaces, + 'cifti_output': cifti_output, + } + + fit_wf = init_infant_single_anat_fit_wf if single_anat else init_infant_anat_fit_wf + anat_fit_wf = fit_wf(**wf_args) # allow to run with anat-fast-track on fMRI-only dataset if ( From c10d842d0caa4d61c1b20992650ba9ba9ce0471a Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Thu, 2 May 2024 00:47:25 -0400 Subject: [PATCH 068/142] FIX: Remove superfluous denoise --- nibabies/workflows/anatomical/preproc.py | 34 ++++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/nibabies/workflows/anatomical/preproc.py b/nibabies/workflows/anatomical/preproc.py index 1af9d311..4e0f9daf 100644 --- a/nibabies/workflows/anatomical/preproc.py +++ b/nibabies/workflows/anatomical/preproc.py @@ -6,7 +6,7 @@ def init_anat_preproc_wf( *, bspline_fitting_distance: int = 200, - name: str = "anat_preproc_wf", + name: str = 'anat_preproc_wf', ) -> LiterateWorkflow: """Polish up raw anatomical data. @@ -28,24 +28,24 @@ def init_anat_preproc_wf( anat_preproc: :obj:`str` Preprocessed anatomical image (Denoising/INU/Clipping) """ - from nipype.interfaces.ants import DenoiseImage, N4BiasFieldCorrection + from nipype.interfaces.ants import N4BiasFieldCorrection from niworkflows.interfaces.header import ValidateImage from niworkflows.interfaces.nibabel import IntensityClip wf = LiterateWorkflow(name=name) inputnode = pe.Node( - niu.IdentityInterface(fields=["in_anat"]), - name="inputnode", + niu.IdentityInterface(fields=['in_anat']), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=["anat_preproc"]), - name="outputnode", + niu.IdentityInterface(fields=['anat_preproc']), + name='outputnode', ) # validate image - validate = pe.Node(ValidateImage(), name="anat_validate", run_without_submitting=True) - clip = pe.Node(IntensityClip(p_min=10.0, p_max=99.5), name="clip") - denoise = pe.Node(DenoiseImage(dimension=3, noise_model="Rician"), name="denoise") + validate = pe.Node(ValidateImage(), name='anat_validate', run_without_submitting=True) + clip = pe.Node(IntensityClip(p_min=10.0, p_max=99.5), name='clip') + # denoise = pe.Node(DenoiseImage(dimension=3, noise_model="Rician"), name="denoise") n4_correct = pe.Node( N4BiasFieldCorrection( dimension=3, @@ -57,18 +57,18 @@ def init_anat_preproc_wf( rescale_intensities=True, shrink_factor=4, ), - name="n4_correct", + name='n4_correct', ) - final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name="final_clip") + final_clip = pe.Node(IntensityClip(p_min=5.0, p_max=99.5), name='final_clip') # fmt:off wf.connect([ - (inputnode, validate, [("in_anat", "in_file")]), - (validate, clip, [("out_file", "in_file")]), - (clip, denoise, [("out_file", "input_image")]), - (denoise, n4_correct, [("output_image", "input_image")]), - (n4_correct, final_clip, [("output_image", "in_file")]), - (final_clip, outputnode, [("out_file", "anat_preproc")]), + (inputnode, validate, [('in_anat', 'in_file')]), + (validate, clip, [('out_file', 'in_file')]), + (clip, n4_correct, [('out_file', 'input_image')]), + # (denoise, n4_correct, [("output_image", "input_image")]), + (n4_correct, final_clip, [('output_image', 'in_file')]), + (final_clip, outputnode, [('out_file', 'anat_preproc')]), ]) # fmt:on return wf From 1c4d3c8a9ff866ad6b1258da9b0ee3896b2291f6 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 6 May 2024 14:35:12 -0400 Subject: [PATCH 069/142] FIX: Single anat problems (T2-only) --- nibabies/workflows/anatomical/fit.py | 4 ++-- nibabies/workflows/base.py | 13 +++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 144f22f3..46b5e1a4 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -1356,7 +1356,7 @@ def init_infant_single_anat_fit_wf( inputnode = pe.Node( niu.IdentityInterface( - fields=['anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], + fields=['t1w', 't2w', 'anat', 'roi', 'flair', 'subjects_dir', 'subject_id'], ), name='inputnode', ) @@ -1545,7 +1545,7 @@ def init_infant_single_anat_fit_wf( ]) # fmt:skip workflow.connect([ - (inputnode, anat_template_wf, [('t1w', 'inputnode.anat_files')]), + (inputnode, anat_template_wf, [('anat', 'inputnode.anat_files')]), (anat_template_wf, anat_validate, [('outputnode.anat_ref', 'in_file')]), (anat_template_wf, ds_anat_template_wf, [ ('outputnode.anat_realign_xfm', 'inputnode.anat_ref_xfms'), diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 4dc68039..67a5fe0d 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -427,9 +427,7 @@ def init_single_subject_wf( anat_fit_wf = fit_wf(**wf_args) # allow to run with anat-fast-track on fMRI-only dataset - if ( - 't1w_preproc' in anatomical_cache or 't2w_preproc' in anatomical_cache - ) and not subject_data['t1w']: + if f'{anat}_preproc' in anatomical_cache and not subject_data[anat]: workflow.connect([ (bidssrc, bids_info, [(('bold', fix_multi_source_name), 'in_file')]), (anat_fit_wf, summary, [('outputnode.anat_preproc', anat)]), @@ -438,12 +436,15 @@ def init_single_subject_wf( ]) # fmt:skip else: workflow.connect([ - (bidssrc, bids_info, [(('t1w', fix_multi_source_name), 'in_file')]), + (bidssrc, bids_info, [((anat, fix_multi_source_name), 'in_file')]), (bidssrc, summary, [('t1w', 't1w')]), - (bidssrc, ds_report_summary, [(('t1w', fix_multi_source_name), 'source_file')]), - (bidssrc, ds_report_about, [(('t1w', fix_multi_source_name), 'source_file')]), + (bidssrc, ds_report_summary, [((anat, fix_multi_source_name), 'source_file')]), + (bidssrc, ds_report_about, [((anat, fix_multi_source_name), 'source_file')]), ]) # fmt:skip + if single_anat: + workflow.connect(bidssrc, anat, anat_fit_wf, 'inputnode.anat') + workflow.connect([ (inputnode, anat_fit_wf, [('subjects_dir', 'inputnode.subjects_dir')]), (bidssrc, anat_fit_wf, [ From 2b0482008a5589bf7112c1440995bf7d45ede595 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Mon, 6 May 2024 22:44:41 -0400 Subject: [PATCH 070/142] FIX: Port bold2anat feature into BOLD workflow --- nibabies/cli/parser.py | 49 ++- nibabies/config.py | 6 +- nibabies/workflows/bold/registration.py | 534 ++++++++++++------------ 3 files changed, 303 insertions(+), 286 deletions(-) diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index 93dc4d0e..a059eb66 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -23,6 +23,24 @@ def _build_parser(): from .version import check_latest, is_flagged + deprecations = { + # parser attribute name: (replacement flag, version slated to be removed in) + 'bold2t1w_init': ('--bold2anat-init', '24.2.0'), + 'bold2t1w_dof': ('--bold2anat-dof', '24.2.0'), + } + + class DeprecatedAction(Action): + def __call__(self, parser, namespace, values, option_string=None): + new_opt, rem_vers = deprecations.get(self.dest, (None, None)) + msg = ( + f"{self.option_strings} has been deprecated and will be removed in " + f"{rem_vers or 'a later version'}." + ) + if new_opt: + msg += f' Please use `{new_opt}` instead.' + print(msg, file=sys.stderr) + delattr(namespace, self.dest) + class DerivToDict(Action): def __call__(self, parser, namespace, values, option_string=None): d = {} @@ -354,19 +372,32 @@ def _slice_time_ref(value, parser): g_conf.add_argument( '--bold2t1w-init', - action='store', - default='register', + action=DeprecatedAction, choices=['register', 'header'], - help='Either "register" (the default) to initialize volumes at center or "header"' - ' to use the header information when coregistering BOLD to T1w images.', + help='Deprecated - use `--bold2anat-init` instead.', ) g_conf.add_argument( '--bold2t1w-dof', + action=DeprecatedAction, + choices=[6, 9, 12], + type=int, + help='Deprecated - use `--bold2anat-dof` instead.', + ) + g_conf.add_argument( + '--bold2anat-init', + choices=['auto', 't1w', 't2w', 'header'], + default='auto', + help='Method of initial BOLD to anatomical coregistration. If `auto`, a T2w image is used ' + 'if available, otherwise the T1w image. `t1w` forces use of the T1w, `t2w` forces use of ' + 'the T2w, and `header` uses the BOLD header information without an initial registration.', + ) + g_conf.add_argument( + '--bold2anat-dof', action='store', default=6, choices=[6, 9, 12], type=int, - help='Degrees of freedom when registering BOLD to T1w images. ' + help='Degrees of freedom when registering BOLD to anatomical images. ' '6 degrees (rotation and translation) are used by default.', ) g_conf.add_argument( @@ -732,14 +763,6 @@ def parse_args(args=None, namespace=None): parser = _build_parser() opts = parser.parse_args(args, namespace) - # Deprecations - if opts.force_reconall: - config.loggers.cli.warning( - '--force-reconall is deprecated and will be removed in a future release.' - 'To run traditional `recon-all`, use `--surface-recon-method freesurfer` instead.' - ) - opts.surface_recon_method = 'freesurfer' - if opts.config_file: skip = {} if opts.reports_only else {'execution': ('run_uuid',)} config.load(opts.config_file, skip=skip) diff --git a/nibabies/config.py b/nibabies/config.py index 549af78a..922d525a 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -543,9 +543,9 @@ class workflow(_Config): aroma_melodic_dim = None """Number of ICA components to be estimated by MELODIC (positive = exact, negative = maximum).""" - bold2t1w_dof = None - """Degrees of freedom of the BOLD-to-T1w registration steps.""" - bold2t1w_init = 'register' + bold2anat_dof = None + """Degrees of freedom of the BOLD-to-anatomical registration steps.""" + bold2anat_init = 'auto' """Whether to use standard coregistration ('register') or to initialize coregistration from the BOLD image-header ('header').""" cifti_output = None diff --git a/nibabies/workflows/bold/registration.py b/nibabies/workflows/bold/registration.py index 8e0b6702..47127388 100644 --- a/nibabies/workflows/bold/registration.py +++ b/nibabies/workflows/bold/registration.py @@ -12,27 +12,30 @@ """ import logging import os +import typing as ty from nipype.interfaces import c3, fsl from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe -from nibabies.config import DEFAULT_MEMORY_MIN_GB -from nibabies.data import load as load_data +from nibabies import config, data -LOGGER = logging.getLogger("nipype.workflow") +DEFAULT_MEMORY_MIN_GB = config.DEFAULT_MEMORY_MIN_GB +LOGGER = logging.getLogger('nipype.workflow') + +AffineDOF = ty.Literal[6, 9, 12] +RegistrationInit = ty.Literal['t1w', 't2w', 'header'] def init_bold_reg_wf( - freesurfer, - use_bbr, - bold2t1w_dof, - bold2t1w_init, - mem_gb, - omp_nthreads, - name="bold_reg_wf", - sloppy=False, - write_report=True, + freesurfer: bool, + use_bbr: bool, + bold2anat_dof: AffineDOF, + bold2anat_init: RegistrationInit, + mem_gb: float, + omp_nthreads: int, + name: str = 'bold_reg_wf', + sloppy: bool = False, ): """ Build a workflow to run same-subject, BOLD-to-T1w image-registration. @@ -56,7 +59,7 @@ def init_bold_reg_wf( omp_nthreads=1, use_bbr=True, bold2t1w_dof=9, - bold2t1w_init='register') + bold2t1w_init='auto') Parameters ---------- @@ -95,7 +98,7 @@ def init_bold_reg_wf( FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID - fsnative2t1w_xfm + fsnative2anat_xfm LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w Outputs @@ -115,40 +118,40 @@ def init_bold_reg_wf( """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from ...interfaces import DerivativesDataSink workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( fields=[ - "ref_bold_brain", - "t1w_brain", - "t1w_dseg", - "subjects_dir", - "subject_id", - "fsnative2t1w_xfm", + 'ref_bold_brain', + 'anat_preproc', + 'anat_mask', + 'anat_dseg', + 'subjects_dir', + 'subject_id', + 'fsnative2anat_xfm', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=["itk_bold_to_t1", "itk_t1_to_bold", "fallback"]), - name="outputnode", + niu.IdentityInterface(fields=['itk_bold_to_t1', 'itk_t1_to_bold', 'fallback']), + name='outputnode', ) if freesurfer: bbr_wf = init_bbreg_wf( use_bbr=use_bbr, - bold2t1w_dof=bold2t1w_dof, - bold2t1w_init=bold2t1w_init, + bold2anat_dof=bold2anat_dof, + bold2anat_init=bold2anat_init, omp_nthreads=omp_nthreads, ) else: bbr_wf = init_fsl_bbr_wf( use_bbr=use_bbr, - bold2t1w_dof=bold2t1w_dof, - bold2t1w_init=bold2t1w_init, + bold2anat_dof=bold2anat_dof, + bold2anat_init=bold2anat_init, sloppy=sloppy, ) @@ -156,43 +159,25 @@ def init_bold_reg_wf( workflow.connect([ (inputnode, bbr_wf, [ ('ref_bold_brain', 'inputnode.in_file'), - ('fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), + ('fsnative2anat_xfm', 'inputnode.fsnative2anat_xfm'), ('subjects_dir', 'inputnode.subjects_dir'), ('subject_id', 'inputnode.subject_id'), + ('t1w_preproc', 'inputnode.t1w_preproc'), + ('t1w_mask', 'inputnode.t1w_mask'), ('t1w_dseg', 'inputnode.t1w_dseg'), - ('t1w_brain', 'inputnode.t1w_brain')]), - (bbr_wf, outputnode, [('outputnode.itk_bold_to_t1', 'itk_bold_to_t1'), - ('outputnode.itk_t1_to_bold', 'itk_t1_to_bold'), - ('outputnode.fallback', 'fallback')]), - ]) - # fmt: on - - if write_report: - ds_report_reg = pe.Node( - DerivativesDataSink(datatype="figures", dismiss_entities=("echo",)), - name="ds_report_reg", - run_without_submitting=True, - mem_gb=mem_gb, - ) - - def _bold_reg_suffix(fallback, freesurfer): - if fallback: - return "coreg" if freesurfer else "flirtnobbr" - return "bbregister" if freesurfer else "flirtbbr" - - # fmt: off - workflow.connect([ - (bbr_wf, ds_report_reg, [ - ('outputnode.out_report', 'in_file'), - (('outputnode.fallback', _bold_reg_suffix, freesurfer), 'desc')]), - ]) - # fmt: on + ]), + (bbr_wf, outputnode, [ + ('outputnode.itk_bold_to_t1', 'itk_bold_to_t1'), + ('outputnode.itk_t1_to_bold', 'itk_t1_to_bold'), + ('outputnode.fallback', 'fallback'), + ]), + ]) # fmt:skip return workflow def init_bold_t1_trans_wf( - freesurfer, mem_gb, omp_nthreads, use_compression=True, name="bold_t1_trans_wf" + freesurfer, mem_gb, omp_nthreads, use_compression=True, name='bold_t1_trans_wf' ): """ Co-register the reference BOLD image to T1w-space. @@ -283,35 +268,35 @@ def init_bold_t1_trans_wf( inputnode = pe.Node( niu.IdentityInterface( fields=[ - "name_source", - "ref_bold_brain", - "ref_bold_mask", - "t1w_brain", - "t1w_mask", - "t1w_aseg", - "t1w_aparc", - "bold_split", - "fieldwarp", - "hmc_xforms", - "itk_bold_to_t1", + 'name_source', + 'ref_bold_brain', + 'ref_bold_mask', + 't1w_brain', + 't1w_mask', + 't1w_aseg', + 't1w_aparc', + 'bold_split', + 'fieldwarp', + 'hmc_xforms', + 'itk_bold_to_t1', ] ), - name="inputnode", + name='inputnode', ) outputnode = pe.Node( niu.IdentityInterface( - fields=["bold_t1", "bold_t1_ref", "bold_mask_t1", "bold_aseg_t1", "bold_aparc_t1"] + fields=['bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1'] ), - name="outputnode", + name='outputnode', ) gen_ref = pe.Node( - GenerateSamplingReference(), name="gen_ref", mem_gb=0.3 + GenerateSamplingReference(), name='gen_ref', mem_gb=0.3 ) # 256x256x256 * 64 / 8 ~ 150MB mask_t1w_tfm = pe.Node( - ApplyTransforms(interpolation="MultiLabel"), name="mask_t1w_tfm", mem_gb=0.1 + ApplyTransforms(interpolation='MultiLabel'), name='mask_t1w_tfm', mem_gb=0.1 ) # fmt: off @@ -329,13 +314,13 @@ def init_bold_t1_trans_wf( if freesurfer: # Resample aseg and aparc in T1w space (no transforms needed) aseg_t1w_tfm = pe.Node( - ApplyTransforms(interpolation="MultiLabel", transforms="identity"), - name="aseg_t1w_tfm", + ApplyTransforms(interpolation='MultiLabel', transforms='identity'), + name='aseg_t1w_tfm', mem_gb=0.1, ) aparc_t1w_tfm = pe.Node( - ApplyTransforms(interpolation="MultiLabel", transforms="identity"), - name="aparc_t1w_tfm", + ApplyTransforms(interpolation='MultiLabel', transforms='identity'), + name='aparc_t1w_tfm', mem_gb=0.1, ) @@ -351,14 +336,14 @@ def init_bold_t1_trans_wf( # fmt: on bold_to_t1w_transform = pe.Node( - MultiApplyTransforms(interpolation="LanczosWindowedSinc", float=True, copy_dtype=True), - name="bold_to_t1w_transform", + MultiApplyTransforms(interpolation='LanczosWindowedSinc', float=True, copy_dtype=True), + name='bold_to_t1w_transform', mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads, ) # merge 3D volumes into 4D timeseries - merge = pe.Node(Merge(compress=use_compression), name="merge", mem_gb=mem_gb) + merge = pe.Node(Merge(compress=use_compression), name='merge', mem_gb=mem_gb) # Generate a reference on the target T1w space gen_final_ref = init_bold_reference_wf(omp_nthreads, pre_mask=True) @@ -366,7 +351,7 @@ def init_bold_t1_trans_wf( # Merge transforms placing the head motion correction last merge_xforms = pe.Node( niu.Merge(3), - name="merge_xforms", + name='merge_xforms', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) @@ -392,12 +377,18 @@ def init_bold_t1_trans_wf( return workflow -def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name="bbreg_wf"): +def init_bbreg_wf( + use_bbr: bool, + bold2anat_dof: AffineDOF, + bold2anat_init: RegistrationInit, + omp_nthreads: int, + name: str = 'bbreg_wf', +): """ Build a workflow to run FreeSurfer's ``bbregister``. This workflow uses FreeSurfer's ``bbregister`` to register a BOLD image to - a T1-weighted structural image. + a T2-weighted or T1-weighted structural image. It is a counterpart to :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`, which performs the same task using FSL's FLIRT with a BBR cost function. @@ -416,9 +407,9 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name="bbre :graph2use: orig :simple_form: yes - from fmriprep.workflows.bold.registration import init_bbreg_wf - wf = init_bbreg_wf(use_bbr=True, bold2t1w_dof=9, - bold2t1w_init='register', omp_nthreads=1) + from nibabies.workflows.bold.registration import init_bbreg_wf + wf = init_bbreg_wf(use_bbr=True, bold2anat_dof=9, + bold2anat_init='t1w', omp_nthreads=1) Parameters @@ -426,11 +417,12 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name="bbre use_bbr : :obj:`bool` or None Enable/disable boundary-based registration refinement. If ``None``, test BBR result for distortion before accepting. - bold2t1w_dof : 6, 9 or 12 - Degrees-of-freedom for BOLD-T1w registration - bold2t1w_init : str, 'header' or 'register' + bold2anat_dof : 6, 9 or 12 + Degrees-of-freedom for BOLD-anatomical registration + bold2anat_init : str, 't1w', 't2w' or 'header' If ``'header'``, use header information for initialization of BOLD and T1 images. - If ``'register'``, align volumes by their centers. + If ``'t1w'``, align BOLD to T1w by their centers. + If ``'t2w'``, align BOLD to T1w using the T2w as an intermediate. name : :obj:`str`, optional Workflow name (default: bbreg_wf) @@ -438,13 +430,15 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name="bbre ------ in_file Reference BOLD image to be registered - fsnative2t1w_xfm + fsnative2anat_xfm FSL-style affine matrix translating from FreeSurfer T1.mgz to T1w subjects_dir FreeSurfer SUBJECTS_DIR subject_id FreeSurfer subject ID (must have folder in SUBJECTS_DIR) - t1w_brain + t1w_preproc + Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`) + t1w_mask Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`) t1w_dseg Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf`) @@ -455,178 +449,165 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name="bbre Affine transform from ``ref_bold_brain`` to T1 space (ITK format) itk_t1_to_bold Affine transform from T1 space to BOLD space (ITK format) - out_report - Reportlet for assessing registration quality fallback Boolean indicating whether BBR was rejected (mri_coreg registration returned) """ + from fmriprep.interfaces.patches import FreeSurferSource, MRICoreg + from nipype.interfaces.freesurfer import BBRegister from niworkflows.engine.workflows import LiterateWorkflow as Workflow - - # See https://github.com/nipreps/fmriprep/issues/768 - from niworkflows.interfaces.freesurfer import PatchedBBRegisterRPT as BBRegisterRPT - from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert - from niworkflows.interfaces.freesurfer import PatchedMRICoregRPT as MRICoregRPT from niworkflows.interfaces.nitransforms import ConcatenateXFMs workflow = Workflow(name=name) workflow.__desc__ = """\ -The BOLD reference was then co-registered to the T1w reference using +The BOLD reference was then co-registered to the anatomical reference using `bbregister` (FreeSurfer) which implements boundary-based registration [@bbr]. Co-registration was configured with {dof} degrees of freedom{reason}. """.format( - dof={6: "six", 9: "nine", 12: "twelve"}[bold2t1w_dof], + dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2anat_dof], reason=( - "" - if bold2t1w_dof == 6 - else "to account for distortions remaining in the BOLD reference" + '' + if bold2anat_dof == 6 + else 'to account for distortions remaining in the BOLD reference' ), ) + use_t2w = bold2anat_init == 't2w' + if use_t2w: + workflow.__desc__ += ' The aligned T2w image was used for initial co-registration.' + inputnode = pe.Node( niu.IdentityInterface( [ - "in_file", - "fsnative2t1w_xfm", - "subjects_dir", - "subject_id", # BBRegister - "t1w_dseg", - "t1w_brain", + 'in_file', + 'fsnative2anat_xfm', # BBRegister + 'subjects_dir', + 'subject_id', + 't1w_preproc', # FLIRT BBR + 't1w_mask', + 't1w_dseg', ] - ), # FLIRT BBR - name="inputnode", + ), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(["itk_bold_to_t1", "itk_t1_to_bold", "out_report", "fallback"]), - name="outputnode", + niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'fallback']), + name='outputnode', ) - if bold2t1w_init not in ("register", "header"): - raise ValueError(f"Unknown BOLD-T1w initialization option: {bold2t1w_init}") + if bold2anat_init not in ty.get_args(RegistrationInit): + raise ValueError(f'Unknown BOLD-to-anatomical initialization option: {bold2anat_init}') # For now make BBR unconditional - in the future, we can fall back to identity, # but adding the flexibility without testing seems a bit dangerous - if bold2t1w_init == "header": + if bold2anat_init == 'header': if use_bbr is False: - raise ValueError("Cannot disable BBR and use header registration") + raise ValueError('Cannot disable BBR and use header registration') if use_bbr is None: - LOGGER.warning("Initializing BBR with header; affine fallback disabled") + LOGGER.warning('Initializing BBR with header; affine fallback disabled') use_bbr = True - # Define both nodes, but only connect conditionally + fssource = pe.Node(FreeSurferSource(), name='fssource') + mri_coreg = pe.Node( - MRICoregRPT( - dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01, generate_report=not use_bbr - ), - name="mri_coreg", + MRICoreg(dof=bold2anat_dof, sep=[4], ftol=0.0001, linmintol=0.01), + name='mri_coreg', n_procs=omp_nthreads, mem_gb=5, ) + if use_t2w: + mri_coreg.inputs.reference_mask = False bbregister = pe.Node( - BBRegisterRPT( - dof=bold2t1w_dof, - contrast_type="t2", - registered_file=True, + BBRegister( + dof=bold2anat_dof, + contrast_type='t2', out_lta_file=True, - generate_report=True, ), - name="bbregister", + name='bbregister', mem_gb=12, ) - if bold2t1w_init == "header": - bbregister.inputs.init = "header" + if bold2anat_init == 'header': + bbregister.inputs.init = 'header' - transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name="transforms") - lta_ras2ras = pe.MapNode( - LTAConvert(out_lta=True), iterfield=["in_lta"], name="lta_ras2ras", mem_gb=2 - ) + transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms') # In cases where Merge(2) only has `in1` or `in2` defined # output list will just contain a single element select_transform = pe.Node( - niu.Select(index=0), run_without_submitting=True, name="select_transform" + niu.Select(index=0), run_without_submitting=True, name='select_transform' ) + merge_ltas = pe.Node(niu.Merge(2), name='merge_ltas', run_without_submitting=True) + concat_xfm = pe.Node(ConcatenateXFMs(inverse=True), name='concat_xfm') - merge_ltas = pe.Node(niu.Merge(2), name="merge_ltas", run_without_submitting=True) - concat_xfm = pe.Node(ConcatenateXFMs(inverse=True), name="concat_xfm") - - # fmt: off workflow.connect([ - (inputnode, merge_ltas, [('fsnative2t1w_xfm', 'in2')]), + (inputnode, merge_ltas, [('fsnative2anat_xfm', 'in2')]), # Wire up the co-registration alternatives - (transforms, lta_ras2ras, [('out', 'in_lta')]), - (lta_ras2ras, select_transform, [('out_lta', 'inlist')]), + (transforms, select_transform, [('out', 'inlist')]), (select_transform, merge_ltas, [('out', 'in1')]), (merge_ltas, concat_xfm, [('out', 'in_xfms')]), (concat_xfm, outputnode, [('out_xfm', 'itk_bold_to_t1')]), (concat_xfm, outputnode, [('out_inv', 'itk_t1_to_bold')]), - ]) + ]) # fmt:skip # Do not initialize with header, use mri_coreg - if bold2t1w_init == "register": + if bold2anat_init != 'header': workflow.connect([ (inputnode, mri_coreg, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), ('in_file', 'source_file')]), (mri_coreg, transforms, [('out_lta_file', 'in2')]), - ]) + ]) # fmt:skip + + if use_t2w: + workflow.connect([ + (inputnode, fssource, [('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (fssource, mri_coreg, [('T2', 'reference_file')]), + ]) # fmt:skip # Short-circuit workflow building, use initial registration if use_bbr is False: - workflow.connect([ - (mri_coreg, outputnode, [('out_report', 'out_report')]), - ]), outputnode.inputs.fallback = True return workflow + # Otherwise bbregister will also be used + workflow.connect(mri_coreg, 'out_lta_file', bbregister, 'init_reg_file') + # Use bbregister workflow.connect([ (inputnode, bbregister, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), ('in_file', 'source_file')]), (bbregister, transforms, [('out_lta_file', 'in1')]), - ]) - - if bold2t1w_init == 'register': - workflow.connect(mri_coreg, 'out_lta_file', bbregister, 'init_reg_file') + ]) # fmt:skip # Short-circuit workflow building, use boundary-based registration if use_bbr is True: - workflow.connect([ - (bbregister, outputnode, [('out_report', 'out_report')]), - ]) outputnode.inputs.fallback = False return workflow - # fmt: on - # Only reach this point if bold2t1w_init is "register" and use_bbr is None - reports = pe.Node(niu.Merge(2), run_without_submitting=True, name="reports") + # Only reach this point if bold2anat_init is "t1w" or "t2w" and use_bbr is None + compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms') - compare_transforms = pe.Node(niu.Function(function=compare_xforms), name="compare_transforms") - select_report = pe.Node(niu.Select(), run_without_submitting=True, name="select_report") - - # fmt: off workflow.connect([ - # Normalize LTA transforms to RAS2RAS (inputs are VOX2VOX) and compare - (lta_ras2ras, compare_transforms, [('out_lta', 'lta_list')]), + (transforms, compare_transforms, [('out', 'lta_list')]), (compare_transforms, outputnode, [('out', 'fallback')]), - # Select output transform (compare_transforms, select_transform, [('out', 'index')]), - # Select output report - (bbregister, reports, [('out_report', 'in1')]), - (mri_coreg, reports, [('out_report', 'in2')]), - (reports, select_report, [('out', 'inlist')]), - (compare_transforms, select_report, [('out', 'index')]), - (select_report, outputnode, [('out', 'out_report')]), - ]) - # fmt: on + ]) # fmt:skip return workflow -def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fsl_bbr_wf"): +def init_fsl_bbr_wf( + use_bbr: bool, + bold2anat_dof: AffineDOF, + bold2anat_init: RegistrationInit, + omp_nthreads: int, + sloppy: bool = False, + name: str = 'fsl_bbr_wf', +): """ Build a workflow to run FSL's ``flirt``. @@ -650,7 +631,9 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fs :simple_form: yes from fmriprep.workflows.bold.registration import init_fsl_bbr_wf - wf = init_fsl_bbr_wf(use_bbr=True, bold2t1w_dof=9, bold2t1w_init='register') + wf = init_fsl_bbr_wf( + use_bbr=True, bold2anat_dof=9, bold2anat_init='t1w', omp_nthreads=1 + ) Parameters @@ -658,11 +641,12 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fs use_bbr : :obj:`bool` or None Enable/disable boundary-based registration refinement. If ``None``, test BBR result for distortion before accepting. - bold2t1w_dof : 6, 9 or 12 - Degrees-of-freedom for BOLD-T1w registration - bold2t1w_init : str, 'header' or 'register' + bold2anat_dof : 6, 9 or 12 + Degrees-of-freedom for BOLD-anatomical registration + bold2anat_init : str, 't1w', 't2w' or 'header' If ``'header'``, use header information for initialization of BOLD and T1 images. - If ``'register'``, align volumes by their centers. + If ``'t1w'``, align BOLD to T1w by their centers. + If ``'t2w'``, align BOLD to T1w using the T2w as an intermediate. name : :obj:`str`, optional Workflow name (default: fsl_bbr_wf) @@ -670,10 +654,12 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fs ------ in_file Reference BOLD image to be registered - t1w_brain - Skull-stripped T1-weighted structural image + t1w_preproc + T1-weighted structural image + t1w_mask + Brain mask of structural image t1w_dseg - FAST segmentation of ``t1w_brain`` + FAST segmentation of masked ``t1w_preproc`` fsnative2t1w_xfm Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`) subjects_dir @@ -687,176 +673,189 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fs Affine transform from ``ref_bold_brain`` to T1w space (ITK format) itk_t1_to_bold Affine transform from T1 space to BOLD space (ITK format) - out_report - Reportlet for assessing registration quality fallback Boolean indicating whether BBR was rejected (rigid FLIRT registration returned) """ + from nipype.interfaces.freesurfer import MRICoreg from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert - from niworkflows.interfaces.reportlets.registration import FLIRTRPT + from niworkflows.interfaces.nibabel import ApplyMask from niworkflows.utils.images import dseg_label as _dseg_label workflow = Workflow(name=name) workflow.__desc__ = """\ The BOLD reference was then co-registered to the T1w reference using -`flirt` [FSL {fsl_ver}, @flirt] with the boundary-based registration [@bbr] -cost-function. -Co-registration was configured with nine degrees of freedom to account -for distortions remaining in the BOLD reference. +`mri_coreg` (FreeSurfer) followed by `flirt` [FSL {fsl_ver}, @flirt] +with the boundary-based registration [@bbr] cost-function. +Co-registration was configured with {dof} degrees of freedom{reason}. """.format( - fsl_ver=FLIRTRPT().version or "" + fsl_ver=fsl.FLIRT().version or '', + dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2anat_dof], + reason=( + '' + if bold2anat_dof == 6 + else 'to account for distortions remaining in the BOLD reference' + ), ) inputnode = pe.Node( niu.IdentityInterface( [ - "in_file", - "fsnative2t1w_xfm", - "subjects_dir", - "subject_id", # BBRegister - "t1w_dseg", - "t1w_brain", + 'in_file', + 'fsnative2t1w_xfm', # BBRegister + 'subjects_dir', + 'subject_id', + 't1w_preproc', # FLIRT BBR + 't1w_mask', + 't1w_dseg', ] - ), # FLIRT BBR - name="inputnode", + ), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(["itk_bold_to_t1", "itk_t1_to_bold", "out_report", "fallback"]), - name="outputnode", + niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'fallback']), + name='outputnode', ) - wm_mask = pe.Node(niu.Function(function=_dseg_label), name="wm_mask") + wm_mask = pe.Node(niu.Function(function=_dseg_label), name='wm_mask') wm_mask.inputs.label = 2 # BIDS default is WM=2 - flt_bbr_init = pe.Node( - FLIRTRPT(dof=6, generate_report=not use_bbr, uses_qform=True), name="flt_bbr_init" - ) - if bold2t1w_init not in ("register", "header"): - raise ValueError(f"Unknown BOLD-T1w initialization option: {bold2t1w_init}") + if bold2anat_init not in ty.get_args(RegistrationInit): + raise ValueError(f'Unknown BOLD-T1w initialization option: {bold2anat_init}') + + if bold2anat_init == 'header': + raise NotImplementedError('Header-based registration initialization not supported for FSL') + if bold2anat_init == 't2w': + LOGGER.warning( + 'T2w intermediate for FSL is not implemented, registering with T1w instead.' + ) + + # Mask T1w_preproc with T1w_mask to make T1w_brain + mask_t1w_brain = pe.Node(ApplyMask(), name='mask_t1w_brain') + + mri_coreg = pe.Node( + MRICoreg(dof=bold2anat_dof, sep=[4], ftol=0.0001, linmintol=0.01), + name='mri_coreg', + n_procs=omp_nthreads, + mem_gb=5, + ) - if bold2t1w_init == "header": - raise NotImplementedError("Header-based registration initialization not supported for FSL") + lta_to_fsl = pe.Node(LTAConvert(out_fsl=True), name='lta_to_fsl', mem_gb=DEFAULT_MEMORY_MIN_GB) invt_bbr = pe.Node( - fsl.ConvertXFM(invert_xfm=True), name="invt_bbr", mem_gb=DEFAULT_MEMORY_MIN_GB + fsl.ConvertXFM(invert_xfm=True), name='invt_bbr', mem_gb=DEFAULT_MEMORY_MIN_GB ) # BOLD to T1 transform matrix is from fsl, using c3 tools to convert to # something ANTs will like. fsl2itk_fwd = pe.Node( c3.C3dAffineTool(fsl2ras=True, itk_transform=True), - name="fsl2itk_fwd", + name='fsl2itk_fwd', mem_gb=DEFAULT_MEMORY_MIN_GB, ) fsl2itk_inv = pe.Node( c3.C3dAffineTool(fsl2ras=True, itk_transform=True), - name="fsl2itk_inv", + name='fsl2itk_inv', mem_gb=DEFAULT_MEMORY_MIN_GB, ) - - # fmt: off + # fmt:off workflow.connect([ - (inputnode, flt_bbr_init, [('in_file', 'in_file'), - ('t1w_brain', 'reference')]), - (inputnode, fsl2itk_fwd, [('t1w_brain', 'reference_file'), - ('in_file', 'source_file')]), - (inputnode, fsl2itk_inv, [('in_file', 'reference_file'), - ('t1w_brain', 'source_file')]), + (inputnode, mask_t1w_brain, [('t1w_preproc', 'in_file'), + ('t1w_mask', 'in_mask')]), + (inputnode, mri_coreg, [('in_file', 'source_file')]), + (inputnode, fsl2itk_fwd, [('in_file', 'source_file')]), + (inputnode, fsl2itk_inv, [('in_file', 'reference_file')]), + (mask_t1w_brain, mri_coreg, [('out_file', 'reference_file')]), + (mask_t1w_brain, fsl2itk_fwd, [('out_file', 'reference_file')]), + (mask_t1w_brain, fsl2itk_inv, [('out_file', 'source_file')]), + (mri_coreg, lta_to_fsl, [('out_lta_file', 'in_lta')]), (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]), (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]), (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]), ]) - # fmt: on + # fmt:on # Short-circuit workflow building, use rigid registration if use_bbr is False: - # fmt: off + # fmt:off workflow.connect([ - (flt_bbr_init, invt_bbr, [('out_matrix_file', 'in_file')]), - (flt_bbr_init, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]), - (flt_bbr_init, outputnode, [('out_report', 'out_report')]), + (lta_to_fsl, invt_bbr, [('out_fsl', 'in_file')]), + (lta_to_fsl, fsl2itk_fwd, [('out_fsl', 'transform_file')]), ]) - # fmt: on + # fmt:on outputnode.inputs.fallback = True return workflow flt_bbr = pe.Node( - FLIRTRPT(cost_func="bbr", dof=bold2t1w_dof, generate_report=True), name="flt_bbr" + fsl.FLIRT(cost_func='bbr', dof=bold2anat_dof, args='-basescale 1'), + name='flt_bbr', ) - FSLDIR = os.getenv("FSLDIR") - if FSLDIR: - flt_bbr.inputs.schedule = os.path.join(FSLDIR, "etc/flirtsch/bbr.sch") + FSLDIR = os.getenv('FSLDIR') + if FSLDIR and os.path.exists(schedule := os.path.join(FSLDIR, 'etc/flirtsch/bbr.sch')): + flt_bbr.inputs.schedule = schedule else: # Should mostly be hit while building docs - LOGGER.warning("FSLDIR unset - using packaged BBR schedule") - flt_bbr.inputs.schedule = load_data("flirtsch/bbr.sch") - - # fmt: off + LOGGER.warning('FSLDIR unset - using packaged BBR schedule') + flt_bbr.inputs.schedule = data.load('flirtsch/bbr.sch') + # fmt:off workflow.connect([ (inputnode, wm_mask, [('t1w_dseg', 'in_seg')]), (inputnode, flt_bbr, [('in_file', 'in_file')]), - (flt_bbr_init, flt_bbr, [('out_matrix_file', 'in_matrix_file')]), + (lta_to_fsl, flt_bbr, [('out_fsl', 'in_matrix_file')]), ]) - # fmt: on - + # fmt:on if sloppy is True: downsample = pe.Node( niu.Function( - function=_conditional_downsampling, output_names=["out_file", "out_mask"] + function=_conditional_downsampling, output_names=['out_file', 'out_mask'] ), - name="downsample", + name='downsample', ) - - # fmt: off + # fmt:off workflow.connect([ - (inputnode, downsample, [("t1w_brain", "in_file")]), - (wm_mask, downsample, [("out", "in_mask")]), + (mask_t1w_brain, downsample, [('out_file', 'in_file')]), + (wm_mask, downsample, [('out', 'in_mask')]), (downsample, flt_bbr, [('out_file', 'reference'), ('out_mask', 'wm_seg')]), ]) - # fmt: on + # fmt:on else: - # fmt: off + # fmt:off workflow.connect([ - (inputnode, flt_bbr, [('t1w_brain', 'reference')]), + (mask_t1w_brain, flt_bbr, [('out_file', 'reference')]), (wm_mask, flt_bbr, [('out', 'wm_seg')]), ]) - # fmt: on + # fmt:on # Short-circuit workflow building, use boundary-based registration if use_bbr is True: - # fmt: off + # fmt:off workflow.connect([ (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]), (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]), - (flt_bbr, outputnode, [('out_report', 'out_report')]), ]) - # fmt: on + # fmt:on outputnode.inputs.fallback = False return workflow - transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name="transforms") - reports = pe.Node(niu.Merge(2), run_without_submitting=True, name="reports") - - compare_transforms = pe.Node(niu.Function(function=compare_xforms), name="compare_transforms") + transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms') - select_transform = pe.Node(niu.Select(), run_without_submitting=True, name="select_transform") - select_report = pe.Node(niu.Select(), run_without_submitting=True, name="select_report") + compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms') - fsl_to_lta = pe.MapNode(LTAConvert(out_lta=True), iterfield=["in_fsl"], name="fsl_to_lta") + select_transform = pe.Node(niu.Select(), run_without_submitting=True, name='select_transform') - # fmt: off + fsl_to_lta = pe.MapNode(LTAConvert(out_lta=True), iterfield=['in_fsl'], name='fsl_to_lta') + # fmt:off workflow.connect([ (flt_bbr, transforms, [('out_matrix_file', 'in1')]), - (flt_bbr_init, transforms, [('out_matrix_file', 'in2')]), + (lta_to_fsl, transforms, [('out_fsl', 'in2')]), # Convert FSL transforms to LTA (RAS2RAS) transforms and compare - (inputnode, fsl_to_lta, [('in_file', 'source_file'), - ('t1w_brain', 'target_file')]), + (inputnode, fsl_to_lta, [('in_file', 'source_file')]), + (mask_t1w_brain, fsl_to_lta, [('out_file', 'target_file')]), (transforms, fsl_to_lta, [('out', 'in_fsl')]), (fsl_to_lta, compare_transforms, [('out_lta', 'lta_list')]), (compare_transforms, outputnode, [('out', 'fallback')]), @@ -865,13 +864,8 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, sloppy=False, name="fs (compare_transforms, select_transform, [('out', 'index')]), (select_transform, invt_bbr, [('out', 'in_file')]), (select_transform, fsl2itk_fwd, [('out', 'transform_file')]), - (flt_bbr, reports, [('out_report', 'in1')]), - (flt_bbr_init, reports, [('out_report', 'in2')]), - (reports, select_report, [('out', 'inlist')]), - (compare_transforms, select_report, [('out', 'index')]), - (select_report, outputnode, [('out', 'out_report')]), ]) - # fmt: on + # fmt:on return workflow @@ -934,8 +928,8 @@ def _conditional_downsampling(in_file, in_mask, zoom_th=4.0): if not np.any(zooms < zoom_th): return in_file, in_mask - out_file = Path("desc-resampled_input.nii.gz").absolute() - out_mask = Path("desc-resampled_mask.nii.gz").absolute() + out_file = Path('desc-resampled_input.nii.gz').absolute() + out_mask = Path('desc-resampled_mask.nii.gz').absolute() shape = np.array(img.shape[:3]) scaling = zoom_th / zooms From 0c2fce330f19ecfeab573a5bb710f8d2e58a8ed9 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Fri, 10 May 2024 17:12:24 -0400 Subject: [PATCH 071/142] FIX: Handle some derivatives but not preproc --- nibabies/workflows/anatomical/fit.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/nibabies/workflows/anatomical/fit.py b/nibabies/workflows/anatomical/fit.py index 46b5e1a4..c359d85a 100644 --- a/nibabies/workflows/anatomical/fit.py +++ b/nibabies/workflows/anatomical/fit.py @@ -1656,9 +1656,27 @@ def init_infant_single_anat_fit_wf( apply_mask.inputs.in_mask = anat_mask workflow.connect([ (anat_validate, apply_mask, [('out_file', 'in_file')]), - (apply_mask, anat_buffer, [('out_file', 'anat_brain')]), ]) # fmt:skip + if not anat_preproc: + LOGGER.info('ANAT Skipping skull-strip, INU-correction only') + n4_only_wf = init_n4_only_wf( + omp_nthreads=omp_nthreads, + atropos_use_random_seed=not skull_strip_fixed_seed, + bids_suffix=reference_anat, + name='n4_only_wf', + ) + workflow.connect([ + (apply_mask, n4_only_wf, [('out_file', 'inputnode.in_files')]), + (n4_only_wf, anat_buffer, [ + (('outputnode.bias_corrected', pop_file), 'anat_preproc'), + (('outputnode.out_file', pop_file), 'anat_brain'), + ]), + ]) # fmt:skip + else: + LOGGER.info('ANAT Skipping T2w masking') + workflow.connect(apply_mask, 'out_file', anat_buffer, 'anat_brain') + # Stage 3: Segmentation seg_method = 'jlf' if config.execution.segmentation_atlases_dir else 'fast' if anat_aseg: From 69fa82414b90fe3b867856c26ee176eefe77c259 Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 8 May 2024 23:29:21 -0400 Subject: [PATCH 072/142] WIP: BOLD registration fit/apply --- nibabies/workflows/base.py | 525 +++++++++++++----------- nibabies/workflows/bold/registration.py | 15 +- 2 files changed, 283 insertions(+), 257 deletions(-) diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 67a5fe0d..05cb542b 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -12,7 +12,7 @@ # This change is to treat sessions as a "first-class" identifier, to better handle the # potential rapid changing of brain morphometry. # -# Copyright 2023 The NiPreps Developers +# Copyright The NiPreps Developers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -44,10 +44,13 @@ import os import sys import typing as ty +import warnings from copy import deepcopy from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe +from niworkflows.interfaces.utility import KeySelect +from niworkflows.utils.connections import listify from packaging.version import Version from nibabies import config @@ -271,22 +274,22 @@ def init_single_subject_wf( ) ) - # bold_runs = [ - # sorted( - # listify(run), - # key=lambda fl: config.execution.layout.get_metadata(fl).get('EchoTime', 0), - # ) - # for run in subject_data['bold'] - # ] - - # if subject_data['roi']: - # warnings.warn( - # f"Lesion mask {subject_data['roi']} found. " - # "Future versions of fMRIPrep will use alternative conventions. " - # "Please refer to the documentation before upgrading.", - # FutureWarning, - # stacklevel=1, - # ) + bold_runs = [ + sorted( + listify(run), + key=lambda fl: config.execution.layout.get_metadata(fl).get('EchoTime', 0), + ) + for run in subject_data['bold'] + ] + + if subject_data['roi']: + warnings.warn( + f"Lesion mask {subject_data['roi']} found. " + "Future versions of NiBabies will use alternative conventions. " + "Please refer to the documentation before upgrading.", + FutureWarning, + stacklevel=1, + ) recon_method = config.workflow.surface_recon_method msm_sulc = False @@ -500,248 +503,272 @@ def init_single_subject_wf( ]), ]) # fmt:skip # TODO: - # - Grab template_iterator_wf workflow - # - Grab select_MNI2009c_xfm node - - # if 'MNI152NLin2009cAsym' in spaces.get_spaces(): - # select_MNI2009c_xfm = pe.Node( - # KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), - # name='select_MNI2009c_xfm', - # run_without_submitting=True, - # ) - # workflow.connect([ - # (anat_fit_wf, select_MNI2009c_xfm, [ - # ('outputnode.std2anat_xfm', 'std2anat_xfm'), - # ('outputnode.template', 'keys'), - # ]), - # ]) # fmt:skip + if 'MNI152NLin2009cAsym' in spaces.get_spaces(): + select_MNI2009c_xfm = pe.Node( + KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), + name='select_MNI2009c_xfm', + run_without_submitting=True, + ) + workflow.connect([ + (anat_fit_wf, select_MNI2009c_xfm, [ + ('outputnode.std2anat_xfm', 'std2anat_xfm'), + ('outputnode.template', 'keys'), + ]), + ]) # fmt:skip # Thread MNI152NLin6Asym standard outputs to CIFTI subworkflow, skipping # the iterator, which targets only output spaces. # This can lead to duplication in the working directory if people actually # want MNI152NLin6Asym outputs, but we'll live with it. - # if config.workflow.cifti_output: - # from smriprep.interfaces.templateflow import TemplateFlowSelect - - # ref = Reference( - # 'MNI152NLin6Asym', - # {'res': 2 if config.workflow.cifti_output == '91k' else 1}, - # ) - - # select_MNI6_xfm = pe.Node( - # KeySelect(fields=['anat2std_xfm'], key=ref.fullname), - # name='select_MNI6', - # run_without_submitting=True, - # ) - # select_MNI6_tpl = pe.Node( - # TemplateFlowSelect(template=ref.fullname, resolution=ref.spec['res']), - # name='select_MNI6_tpl', - # ) - # workflow.connect([ - # (anat_fit_wf, select_MNI6_xfm, [ - # ('outputnode.anat2std_xfm', 'anat2std_xfm'), - # ('outputnode.template', 'keys'), - # ]), - # ]) # fmt:skip + if config.workflow.cifti_output: + from smriprep.interfaces.templateflow import TemplateFlowSelect + + ref = Reference( + 'MNI152NLin6Asym', + {'res': 2 if config.workflow.cifti_output == '91k' else 1}, + ) + + select_MNI6_xfm = pe.Node( + KeySelect(fields=['anat2std_xfm'], key=ref.fullname), + name='select_MNI6', + run_without_submitting=True, + ) + select_MNI6_tpl = pe.Node( + TemplateFlowSelect(template=ref.fullname, resolution=ref.spec['res']), + name='select_MNI6_tpl', + ) + workflow.connect([ + (anat_fit_wf, select_MNI6_xfm, [ + ('outputnode.anat2std_xfm', 'anat2std_xfm'), + ('outputnode.template', 'keys'), + ]), + ]) # fmt:skip if config.workflow.anat_only: return clean_datasinks(workflow) - # TODO: FMAP, BOLD PROCESSING - return workflow + fmap_estimators, estimator_map = map_fieldmap_estimation( + layout=config.execution.layout, + subject_id=subject_id, + bold_data=bold_runs, + ignore_fieldmaps='fieldmaps' in config.workflow.ignore, + use_syn=config.workflow.use_syn_sdc, + force_syn=config.workflow.force_syn, + filters=config.execution.get().get('bids_filters', {}).get('fmap'), + ) - # # fmt: off - # workflow.connect([ - # (inputnode, anat_preproc_wf, [ - # ('subjects_dir', 'inputnode.subjects_dir'), - # ]), - # (inputnode, summary, [ - # ('subjects_dir', 'subjects_dir'), - # ]), - # (bidssrc, summary, [ - # ('bold', 'bold'), - # ]), - # (bids_info, summary, [ - # ('subject', 'subject_id'), - # ]), - # (bids_info, anat_preproc_wf, [ - # (('subject', _prefix), 'inputnode.subject_id'), - # ]), - # (bidssrc, anat_preproc_wf, [ - # ('t1w', 'inputnode.t1w'), - # ('t2w', 'inputnode.t2w'), - # ]), - # (summary, ds_report_summary, [ - # ('out_report', 'in_file'), - # ]), - # (about, ds_report_about, [ - # ('out_report', 'in_file'), - # ]), - # ]) - - # workflow.connect([ - # (bidssrc, bids_info, [ - # ((contrast.lower(), fix_multi_source_name), 'in_file'), - # ]), - # (bidssrc, summary, [ - # ('t1w', 't1w'), - # ('t2w', 't2w'), - # ]), - # (bidssrc, ds_report_summary, [ - # ((contrast.lower(), fix_multi_source_name), 'source_file'), - # ]), - # (bidssrc, ds_report_about, [ - # ((contrast.lower(), fix_multi_source_name), 'source_file'), - # ]), - # ]) - # # fmt: on - - # # Overwrite ``out_path_base`` of smriprep's DataSinks - # for node in workflow.list_node_names(): - # if node.split('.')[-1].startswith('ds_'): - # workflow.get_node(node).interface.out_path_base = '' - - # if anat_only: - # return workflow - - # Susceptibility distortion correction - - -# fmap_estimators = None -# if any((config.workflow.use_syn_sdc, config.workflow.force_syn)): -# config.loggers.workflow.critical('SyN processing is not yet implemented.') - -# if 'fieldmaps' not in config.workflow.ignore: -# from sdcflows.utils.wrangler import find_estimators - -# # SDC Step 1: Run basic heuristics to identify available data for fieldmap estimation -# # For now, no fmapless -# fmap_estimators = find_estimators( -# layout=config.execution.layout, -# subject=subject_id, -# sessions=[session_id], -# fmapless=False, # config.workflow.use_syn, -# force_fmapless=False, # config.workflow.force_syn, -# ) - -# # Append the functional section to the existing anatomical exerpt -# # That way we do not need to stream down the number of bold datasets -# anat_preproc_wf.__postdesc__ = anat_preproc_wf.__postdesc__ or '' -# func_pre_desc = f""" - -# Functional data preprocessing - -# : For each of the {len(subject_data['bold'])} BOLD runs found per subject (across all -# tasks and sessions), the following preprocessing was performed.""" - -# func_preproc_wfs = [] -# has_fieldmap = bool(fmap_estimators) -# for bold_file in subject_data['bold']: -# func_preproc_wf = init_func_preproc_wf(bold_file, spaces, has_fieldmap=has_fieldmap) -# if func_preproc_wf is None: -# continue - -# func_preproc_wf.__desc__ = func_pre_desc + (func_preproc_wf.__desc__ or '') -# # fmt:off -# workflow.connect([ -# (anat_preproc_wf, func_preproc_wf, [ -# ('outputnode.anat_preproc', 'inputnode.anat_preproc'), -# ('outputnode.anat_mask', 'inputnode.anat_mask'), -# ('outputnode.anat_brain', 'inputnode.anat_brain'), -# ('outputnode.anat_dseg', 'inputnode.anat_dseg'), -# ('outputnode.anat_aseg', 'inputnode.anat_aseg'), -# ('outputnode.anat_aparc', 'inputnode.anat_aparc'), -# ('outputnode.anat_tpms', 'inputnode.anat_tpms'), -# ('outputnode.template', 'inputnode.template'), -# ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), -# ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), -# # Undefined if --fs-no-reconall, but this is safe -# ('outputnode.subjects_dir', 'inputnode.subjects_dir'), -# ('outputnode.subject_id', 'inputnode.subject_id'), -# ('outputnode.anat2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), -# ('outputnode.fsnative2anat_xfm', 'inputnode.fsnative2t1w_xfm'), -# ('outputnode.surfaces', 'inputnode.surfaces'), -# ('outputnode.morphometrics', 'inputnode.morphometrics'), -# ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), -# ('outputnode.sphere_reg_fsLR', 'inputnode.sphere_reg_fsLR'), -# ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), -# ]), -# ]) -# # fmt:on -# func_preproc_wfs.append(func_preproc_wf) - -# if not has_fieldmap: -# config.loggers.workflow.warning( -# 'Data for fieldmap estimation not present. Please note that these data ' -# 'will not be corrected for susceptibility distortions.' -# ) -# return workflow - -# config.loggers.workflow.info( -# f'Fieldmap estimators found: {[e.method for e in fmap_estimators]}' -# ) - -# from sdcflows import fieldmaps as fm -# from sdcflows.workflows.base import init_fmap_preproc_wf - -# fmap_wf = init_fmap_preproc_wf( -# sloppy=bool(config.execution.sloppy), -# debug='fieldmaps' in config.execution.debug, -# estimators=fmap_estimators, -# omp_nthreads=config.nipype.omp_nthreads, -# output_dir=nibabies_dir, -# subject=subject_id, -# ) -# fmap_wf.__desc__ = f""" - -# Preprocessing of B0 inhomogeneity mappings - -# : A total of {len(fmap_estimators)} fieldmaps were found available within the input -# BIDS structure for this particular subject. -# """ - -# for func_preproc_wf in func_preproc_wfs: -# # fmt: off -# workflow.connect([ -# (fmap_wf, func_preproc_wf, [ -# ('outputnode.fmap', 'inputnode.fmap'), -# ('outputnode.fmap_ref', 'inputnode.fmap_ref'), -# ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), -# ('outputnode.fmap_mask', 'inputnode.fmap_mask'), -# ('outputnode.fmap_id', 'inputnode.fmap_id'), -# ('outputnode.method', 'inputnode.sdc_method'), -# ]), -# ]) -# # fmt: on - -# # Overwrite ``out_path_base`` of sdcflows's DataSinks -# for node in fmap_wf.list_node_names(): -# if node.split('.')[-1].startswith('ds_'): -# fmap_wf.get_node(node).interface.out_path_base = '' - -# # Step 3: Manually connect PEPOLAR -# for estimator in fmap_estimators: -# config.loggers.workflow.info( -# f"""\ -# Setting-up fieldmap "{estimator.bids_id}" ({estimator.method}) with \ -# <{', '.join(s.path.name for s in estimator.sources)}>""" -# ) -# if estimator.method in (fm.EstimatorType.MAPPED, fm.EstimatorType.PHASEDIFF): -# continue - -# suffices = [s.suffix for s in estimator.sources] - -# if estimator.method == fm.EstimatorType.PEPOLAR: -# if set(suffices) == {'epi'} or sorted(suffices) == ['bold', 'epi']: -# fmap_wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') -# fmap_wf_inputs.in_data = [str(s.path) for s in estimator.sources] -# fmap_wf_inputs.metadata = [s.metadata for s in estimator.sources] -# else: -# raise NotImplementedError( -# 'Sophisticated PEPOLAR schemes (e.g., using DWI+EPI) are unsupported.' -# ) - -# return workflow + if fmap_estimators: + config.loggers.workflow.info( + 'B0 field inhomogeneity map will be estimated with the following ' + f'{len(fmap_estimators)} estimator(s): ' + f'{[e.method for e in fmap_estimators]}.' + ) + + from sdcflows import fieldmaps as fm + from sdcflows.workflows.base import init_fmap_preproc_wf + + fmap_wf = init_fmap_preproc_wf( + debug='fieldmaps' in config.execution.debug, + estimators=fmap_estimators, + omp_nthreads=omp_nthreads, + output_dir=output_dir, + subject=subject_id, + ) + fmap_wf.__desc__ = f""" + +Preprocessing of B0 inhomogeneity mappings + +: A total of {len(fmap_estimators)} fieldmaps were found available within the input +BIDS structure for this particular subject. +""" + + # Overwrite ``out_path_base`` of sdcflows's DataSinks + for node in fmap_wf.list_node_names(): + if node.split('.')[-1].startswith('ds_'): + fmap_wf.get_node(node).interface.out_path_base = '' + + fmap_select_std = pe.Node( + KeySelect(fields=['std2anat_xfm'], key='MNI152NLin2009cAsym'), + name='fmap_select_std', + run_without_submitting=True, + ) + if any(estimator.method == fm.EstimatorType.ANAT for estimator in fmap_estimators): + workflow.connect([ + (anat_fit_wf, fmap_select_std, [ + ('outputnode.std2anat_xfm', 'std2anat_xfm'), + ('outputnode.template', 'keys')]), + ]) # fmt:skip + + for estimator in fmap_estimators: + config.loggers.workflow.info( + f"""\ +Setting-up fieldmap "{estimator.bids_id}" ({estimator.method}) with \ +<{', '.join(s.path.name for s in estimator.sources)}>""" + ) + + # Mapped and phasediff can be connected internally by SDCFlows + if estimator.method in (fm.EstimatorType.MAPPED, fm.EstimatorType.PHASEDIFF): + continue + + suffices = [s.suffix for s in estimator.sources] + + if estimator.method == fm.EstimatorType.PEPOLAR: + if len(suffices) == 2 and all(suf in ('epi', 'bold', 'sbref') for suf in suffices): + wf_inputs = getattr(fmap_wf.inputs, f'in_{estimator.bids_id}') + wf_inputs.in_data = [str(s.path) for s in estimator.sources] + wf_inputs.metadata = [s.metadata for s in estimator.sources] + else: + raise NotImplementedError('Sophisticated PEPOLAR schemes are unsupported.') + + elif estimator.method == fm.EstimatorType.ANAT: + from sdcflows.workflows.fit.syn import init_syn_preprocessing_wf + + sources = [str(s.path) for s in estimator.sources if s.suffix in ('bold', 'sbref')] + source_meta = [ + s.metadata for s in estimator.sources if s.suffix in ('bold', 'sbref') + ] + syn_preprocessing_wf = init_syn_preprocessing_wf( + omp_nthreads=omp_nthreads, + debug=config.execution.sloppy, + auto_bold_nss=True, + t1w_inversion=False, + name=f'syn_preprocessing_{estimator.bids_id}', + ) + syn_preprocessing_wf.inputs.inputnode.in_epis = sources + syn_preprocessing_wf.inputs.inputnode.in_meta = source_meta + + workflow.connect([ + (anat_fit_wf, syn_preprocessing_wf, [ + ('outputnode.anat_preproc', 'inputnode.in_anat'), + ('outputnode.anat_mask', 'inputnode.mask_anat'), + ]), + (fmap_select_std, syn_preprocessing_wf, [ + ('std2anat_xfm', 'inputnode.std2anat_xfm'), + ]), + (syn_preprocessing_wf, fmap_wf, [ + ('outputnode.epi_ref', f'in_{estimator.bids_id}.epi_ref'), + ('outputnode.epi_mask', f'in_{estimator.bids_id}.epi_mask'), + ('outputnode.anat_ref', f'in_{estimator.bids_id}.anat_ref'), + ('outputnode.anat_mask', f'in_{estimator.bids_id}.anat_mask'), + ('outputnode.sd_prior', f'in_{estimator.bids_id}.sd_prior'), + ]), + ]) # fmt:skip + + # Append the functional section to the existing anatomical excerpt + # That way we do not need to stream down the number of bold datasets + func_pre_desc = f""" +Functional data preprocessing + +: For each of the {len(bold_runs)} BOLD runs found per subject (across all +tasks and sessions), the following preprocessing was performed. +""" + + # Before initializing BOLD workflow, select/verify anatomical target for coregistration + if config.workflow.bold2anat_init in ('auto', 't2w'): + has_t2w = subject_data['t2w'] or 't2w_preproc' in anatomical_cache + if config.workflow.bold2anat_init == 't2w' and not has_t2w: + raise OSError( + 'A T2w image is expected for BOLD-to-anatomical coregistration and was not found' + ) + config.workflow.bold2anat_init = 't2w' if has_t2w else 't1w' + + for bold_series in bold_runs: + bold_file = bold_series[0] + fieldmap_id = estimator_map.get(bold_file) + + functional_cache = {} + if config.execution.derivatives: + from fmriprep.utils.bids import collect_derivatives, extract_entities + + entities = extract_entities(bold_series) + + for deriv_dir in config.execution.derivatives.values(): + functional_cache.update( + collect_derivatives( + derivatives_dir=deriv_dir, + entities=entities, + fieldmap_id=fieldmap_id, + ) + ) + + bold_wf = init_bold_wf( + bold_series=bold_series, + precomputed=functional_cache, + fieldmap_id=fieldmap_id, + ) + if bold_wf is None: + continue + + bold_wf.__desc__ = func_pre_desc + (bold_wf.__desc__ or '') + + workflow.connect([ + (anat_fit_wf, bold_wf, [ + ('outputnode.t1w_preproc', 'inputnode.t1w_preproc'), + ('outputnode.t1w_mask', 'inputnode.t1w_mask'), + ('outputnode.t1w_dseg', 'inputnode.t1w_dseg'), + ('outputnode.t1w_tpms', 'inputnode.t1w_tpms'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm'), + ('outputnode.white', 'inputnode.white'), + ('outputnode.pial', 'inputnode.pial'), + ('outputnode.midthickness', 'inputnode.midthickness'), + ('outputnode.anat_ribbon', 'inputnode.anat_ribbon'), + ( + f'outputnode.sphere_reg_{"msm" if msm_sulc else "fsLR"}', + 'inputnode.sphere_reg_fsLR', + ), + ]), + ]) # fmt:skip + if fieldmap_id: + workflow.connect([ + (fmap_wf, bold_wf, [ + ('outputnode.fmap', 'inputnode.fmap'), + ('outputnode.fmap_ref', 'inputnode.fmap_ref'), + ('outputnode.fmap_coeff', 'inputnode.fmap_coeff'), + ('outputnode.fmap_mask', 'inputnode.fmap_mask'), + ('outputnode.fmap_id', 'inputnode.fmap_id'), + ('outputnode.method', 'inputnode.sdc_method'), + ]), + ]) # fmt:skip + + if config.workflow.level == 'full': + if template_iterator_wf is not None: + workflow.connect([ + (template_iterator_wf, bold_wf, [ + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.space', 'inputnode.std_space'), + ('outputnode.resolution', 'inputnode.std_resolution'), + ('outputnode.cohort', 'inputnode.std_cohort'), + ('outputnode.std_t1w', 'inputnode.std_t1w'), + ('outputnode.std_mask', 'inputnode.std_mask'), + ]), + ]) # fmt:skip + + if select_MNI2009c_xfm is not None: + workflow.connect([ + (select_MNI2009c_xfm, bold_wf, [ + ('std2anat_xfm', 'inputnode.mni2009c2anat_xfm'), + ]), + ]) # fmt:skip + + # Thread MNI152NLin6Asym standard outputs to CIFTI subworkflow, skipping + # the iterator, which targets only output spaces. + # This can lead to duplication in the working directory if people actually + # want MNI152NLin6Asym outputs, but we'll live with it. + if config.workflow.cifti_output: + workflow.connect([ + (select_MNI6_xfm, bold_wf, [('anat2std_xfm', 'inputnode.anat2mni6_xfm')]), + (select_MNI6_tpl, bold_wf, [('brain_mask', 'inputnode.mni6_mask')]), + (anat_apply_wf, bold_wf, [ + ('outputnode.roi', 'inputnode.cortex_mask'), + ]), + (anat_apply_wf, bold_wf, [ + ('outputnode.midthickness_fsLR', 'inputnode.midthickness_fsLR'), + ]), + ]) # fmt:skip + + return clean_datasinks(workflow) def _subject_session_id(subject_id: str, session_id: str | None) -> str: diff --git a/nibabies/workflows/bold/registration.py b/nibabies/workflows/bold/registration.py index 47127388..d9daca8b 100644 --- a/nibabies/workflows/bold/registration.py +++ b/nibabies/workflows/bold/registration.py @@ -99,14 +99,14 @@ def init_bold_reg_wf( subject_id FreeSurfer subject ID fsnative2anat_xfm - LTA-style affine matrix translating from FreeSurfer-conformed subject space to T1w + LTA-style affine matrix translating from FreeSurfer-conformed subject space to anatomical Outputs ------- - itk_bold_to_t1 - Affine transform from ``ref_bold_brain`` to T1 space (ITK format) - itk_t1_to_bold - Affine transform from T1 space to BOLD space (ITK format) + itk_bold_to_anat + Affine transform from ``ref_bold_brain`` to anatomical space (ITK format) + itk_anat_to_bold + Affine transform from anatomical space to BOLD space (ITK format) fallback Boolean indicating whether BBR was rejected (mri_coreg registration returned) @@ -118,7 +118,6 @@ def init_bold_reg_wf( """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow - workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( @@ -660,7 +659,7 @@ def init_fsl_bbr_wf( Brain mask of structural image t1w_dseg FAST segmentation of masked ``t1w_preproc`` - fsnative2t1w_xfm + fsnative2anat_xfm Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`) subjects_dir Unused (see :py:func:`~fmriprep.workflows.bold.registration.init_bbreg_wf`) @@ -703,7 +702,7 @@ def init_fsl_bbr_wf( niu.IdentityInterface( [ 'in_file', - 'fsnative2t1w_xfm', # BBRegister + 'fsnative2anat_xfm', # BBRegister 'subjects_dir', 'subject_id', 't1w_preproc', # FLIRT BBR From 4d8af2fc1751de4e824af1d939007f637b286aff Mon Sep 17 00:00:00 2001 From: Mathias Goncalves Date: Wed, 8 May 2024 23:29:37 -0400 Subject: [PATCH 073/142] DOCS: Start outlining outputs --- docs/outputs.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/docs/outputs.md b/docs/outputs.md index bdf7a6c1..1a756786 100644 --- a/docs/outputs.md +++ b/docs/outputs.md @@ -1,3 +1,13 @@ # Outputs -Refer to [*fMRIPrep* outputs](https://fmriprep.org/en/latest/outputs.html) +NiBabies outputs conform to the BIDS Derivatives specification (see BIDS Derivatives, along with the upcoming BEP 011 and BEP 012). NiBabies generates three broad classes of outcomes: + + +# Processing level +As of version 24.0.0, NiBabies supports three levels of derivatives: + +--level minimal: This processing mode aims to produce the smallest working directory and output dataset possible, while enabling all further processing results to be deterministically generated. Most components of the visual reports can be generated at this level, so the quality of preprocessing can be assessed. Because no resampling is done, confounds and carpetplots will be missing from the reports. + +--level resampling: This processing mode aims to produce additional derivatives that enable third-party resampling, resampling BOLD series in the working directory as needed, but these are not saved to the output directory. The --me-output-echos flag will be enabled at this level, in which case the individual echos will be saved to the working directory after slice-timing correction, head-motion correction, and susceptibility distortion correction. + +--level full: This processing mode aims to produce all derivatives that have previously been a part of the NiBabies output dataset. This is the default processing level. From e8153c73367880b29812cc6170096d8924e256ca Mon Sep 17 00:00:00 2001 From: mathiasg Date: Wed, 15 May 2024 15:53:15 -0400 Subject: [PATCH 074/142] DOCS: Flesh out outputs from fmriprep --- docs/outputs.md | 771 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 765 insertions(+), 6 deletions(-) diff --git a/docs/outputs.md b/docs/outputs.md index 1a756786..6c35605e 100644 --- a/docs/outputs.md +++ b/docs/outputs.md @@ -1,13 +1,772 @@ -# Outputs +.. include:: links.rst -NiBabies outputs conform to the BIDS Derivatives specification (see BIDS Derivatives, along with the upcoming BEP 011 and BEP 012). NiBabies generates three broad classes of outcomes: +.. _outputs: +--------------------- +Outputs of *NiBabies* +--------------------- +*NiBabies* outputs conform to the :abbr:`BIDS (brain imaging data structure)` +Derivatives specification (see `BIDS Derivatives`_, along with the +upcoming `BEP 011`_ and `BEP 012`_). +*NiBabies* generates three broad classes of outcomes: -# Processing level +1. **Visual QA (quality assessment) reports**: + one :abbr:`HTML (hypertext markup language)` per subject, + that allows the user a thorough visual assessment of the quality + of processing and ensures the transparency of *NiBabies* operations. + +2. **Derivatives (preprocessed data)** the input fMRI data ready for + analysis, i.e., after the various preparation procedures + have been applied. + For example, :abbr:`INU (intensity non-uniformity)`-corrected versions + of the T1-weighted image (per subject), the brain mask, + or :abbr:`BOLD (blood-oxygen level dependent)` + images after head-motion correction, slice-timing correction and aligned into + the same-subject's T1w space or in some standard space. + +3. **Confounds**: this is a special family of derivatives that can be utilized + to inform subsequent denoising steps. + + .. warning:: + These modules are still in alpha and require additional testing. + + .. important:: + In order to remain agnostic to any possible subsequent analysis, + *NiBabies* does not perform any denoising (e.g., spatial smoothing) itself. + There are exceptions to this principle (described in its corresponding + section below): + + - CompCor regressors, which are calculated after temporal high-pass filtering. + +Layout +------ +Assuming NiBabies is invoked with:: + + NiBabies / / participant [OPTIONS] + +The outputs will be a `BIDS Derivatives`_ dataset of the form:: + + / + logs/ + sub-