From 0050663eec6260f099c25893a92dbf39db263a18 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 7 Oct 2019 11:45:40 -0400 Subject: [PATCH 1/3] RF: Redirect nipype.workflows to niflow.nipype1.workflows --- nipype/interfaces/fsl/dti.py | 2 +- nipype/interfaces/fsl/epi.py | 2 +- nipype/interfaces/mrtrix/convert.py | 23 +- nipype/workflows/__init__.py | 22 + nipype/workflows/data/__init__.py | 17 - nipype/workflows/data/ecc.sch | 67 - nipype/workflows/data/hmc.sch | 64 - nipype/workflows/dmri/__init__.py | 4 - nipype/workflows/dmri/camino/__init__.py | 5 - .../dmri/camino/connectivity_mapping.py | 534 ------- nipype/workflows/dmri/camino/diffusion.py | 245 ---- .../dmri/camino/group_connectivity.py | 115 -- .../workflows/dmri/connectivity/__init__.py | 9 - .../dmri/connectivity/group_connectivity.py | 631 -------- nipype/workflows/dmri/connectivity/nx.py | 178 --- nipype/workflows/dmri/dipy/__init__.py | 7 - nipype/workflows/dmri/dipy/denoise.py | 121 -- nipype/workflows/dmri/dtitk/__init__.py | 8 - .../dmri/dtitk/tensor_registration.py | 144 -- nipype/workflows/dmri/fsl/__init__.py | 15 - nipype/workflows/dmri/fsl/artifacts.py | 1061 -------------- nipype/workflows/dmri/fsl/dti.py | 276 ---- nipype/workflows/dmri/fsl/epi.py | 885 ----------- nipype/workflows/dmri/fsl/tbss.py | 590 -------- nipype/workflows/dmri/fsl/tests/__init__.py | 3 - nipype/workflows/dmri/fsl/tests/test_dti.py | 85 -- nipype/workflows/dmri/fsl/tests/test_epi.py | 46 - nipype/workflows/dmri/fsl/tests/test_tbss.py | 211 --- nipype/workflows/dmri/fsl/utils.py | 847 ----------- nipype/workflows/dmri/mrtrix/__init__.py | 5 - .../dmri/mrtrix/connectivity_mapping.py | 639 -------- nipype/workflows/dmri/mrtrix/diffusion.py | 186 --- .../dmri/mrtrix/group_connectivity.py | 139 -- nipype/workflows/fmri/__init__.py | 4 - nipype/workflows/fmri/fsl/__init__.py | 8 - nipype/workflows/fmri/fsl/estimate.py | 298 ---- nipype/workflows/fmri/fsl/preprocess.py | 1293 ----------------- nipype/workflows/fmri/fsl/tests/__init__.py | 2 - .../fmri/fsl/tests/test_preprocess.py | 25 - nipype/workflows/fmri/spm/__init__.py | 3 - nipype/workflows/fmri/spm/estimate.py | 3 - nipype/workflows/fmri/spm/preprocess.py | 332 ----- nipype/workflows/fmri/spm/tests/__init__.py | 2 - nipype/workflows/graph/__init__.py | 3 - nipype/workflows/misc/__init__.py | 1 - nipype/workflows/misc/utils.py | 91 -- nipype/workflows/rsfmri/__init__.py | 5 - nipype/workflows/rsfmri/fsl/__init__.py | 2 - nipype/workflows/rsfmri/fsl/resting.py | 162 --- nipype/workflows/rsfmri/fsl/tests/__init__.py | 0 .../rsfmri/fsl/tests/test_resting.py | 107 -- nipype/workflows/smri/__init__.py | 7 - .../workflows/smri/ants/ANTSBuildTemplate.py | 388 ----- nipype/workflows/smri/ants/__init__.py | 3 - .../ants/antsRegistrationBuildTemplate.py | 535 ------- nipype/workflows/smri/freesurfer/__init__.py | 5 - .../workflows/smri/freesurfer/autorecon1.py | 512 ------- .../workflows/smri/freesurfer/autorecon2.py | 720 --------- .../workflows/smri/freesurfer/autorecon3.py | 959 ------------ nipype/workflows/smri/freesurfer/ba_maps.py | 172 --- nipype/workflows/smri/freesurfer/bem.py | 81 -- nipype/workflows/smri/freesurfer/recon.py | 604 -------- nipype/workflows/smri/freesurfer/utils.py | 498 ------- nipype/workflows/smri/niftyreg/__init__.py | 5 - nipype/workflows/smri/niftyreg/groupwise.py | 384 ----- nipype/workflows/warp/__init__.py | 1 - 66 files changed, 46 insertions(+), 14355 deletions(-) delete mode 100644 nipype/workflows/data/__init__.py delete mode 100644 nipype/workflows/data/ecc.sch delete mode 100644 nipype/workflows/data/hmc.sch delete mode 100644 nipype/workflows/dmri/__init__.py delete mode 100644 nipype/workflows/dmri/camino/__init__.py delete mode 100644 nipype/workflows/dmri/camino/connectivity_mapping.py delete mode 100644 nipype/workflows/dmri/camino/diffusion.py delete mode 100644 nipype/workflows/dmri/camino/group_connectivity.py delete mode 100644 nipype/workflows/dmri/connectivity/__init__.py delete mode 100644 nipype/workflows/dmri/connectivity/group_connectivity.py delete mode 100644 nipype/workflows/dmri/connectivity/nx.py delete mode 100644 nipype/workflows/dmri/dipy/__init__.py delete mode 100644 nipype/workflows/dmri/dipy/denoise.py delete mode 100644 nipype/workflows/dmri/dtitk/__init__.py delete mode 100644 nipype/workflows/dmri/dtitk/tensor_registration.py delete mode 100644 nipype/workflows/dmri/fsl/__init__.py delete mode 100644 nipype/workflows/dmri/fsl/artifacts.py delete mode 100644 nipype/workflows/dmri/fsl/dti.py delete mode 100644 nipype/workflows/dmri/fsl/epi.py delete mode 100644 nipype/workflows/dmri/fsl/tbss.py delete mode 100644 nipype/workflows/dmri/fsl/tests/__init__.py delete mode 100644 nipype/workflows/dmri/fsl/tests/test_dti.py delete mode 100644 nipype/workflows/dmri/fsl/tests/test_epi.py delete mode 100644 nipype/workflows/dmri/fsl/tests/test_tbss.py delete mode 100644 nipype/workflows/dmri/fsl/utils.py delete mode 100644 nipype/workflows/dmri/mrtrix/__init__.py delete mode 100644 nipype/workflows/dmri/mrtrix/connectivity_mapping.py delete mode 100644 nipype/workflows/dmri/mrtrix/diffusion.py delete mode 100644 nipype/workflows/dmri/mrtrix/group_connectivity.py delete mode 100644 nipype/workflows/fmri/__init__.py delete mode 100644 nipype/workflows/fmri/fsl/__init__.py delete mode 100644 nipype/workflows/fmri/fsl/estimate.py delete mode 100644 nipype/workflows/fmri/fsl/preprocess.py delete mode 100644 nipype/workflows/fmri/fsl/tests/__init__.py delete mode 100644 nipype/workflows/fmri/fsl/tests/test_preprocess.py delete mode 100644 nipype/workflows/fmri/spm/__init__.py delete mode 100644 nipype/workflows/fmri/spm/estimate.py delete mode 100644 nipype/workflows/fmri/spm/preprocess.py delete mode 100644 nipype/workflows/fmri/spm/tests/__init__.py delete mode 100644 nipype/workflows/graph/__init__.py delete mode 100644 nipype/workflows/misc/__init__.py delete mode 100644 nipype/workflows/misc/utils.py delete mode 100644 nipype/workflows/rsfmri/__init__.py delete mode 100644 nipype/workflows/rsfmri/fsl/__init__.py delete mode 100644 nipype/workflows/rsfmri/fsl/resting.py delete mode 100644 nipype/workflows/rsfmri/fsl/tests/__init__.py delete mode 100644 nipype/workflows/rsfmri/fsl/tests/test_resting.py delete mode 100644 nipype/workflows/smri/__init__.py delete mode 100644 nipype/workflows/smri/ants/ANTSBuildTemplate.py delete mode 100644 nipype/workflows/smri/ants/__init__.py delete mode 100644 nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py delete mode 100644 nipype/workflows/smri/freesurfer/__init__.py delete mode 100644 nipype/workflows/smri/freesurfer/autorecon1.py delete mode 100644 nipype/workflows/smri/freesurfer/autorecon2.py delete mode 100644 nipype/workflows/smri/freesurfer/autorecon3.py delete mode 100644 nipype/workflows/smri/freesurfer/ba_maps.py delete mode 100644 nipype/workflows/smri/freesurfer/bem.py delete mode 100644 nipype/workflows/smri/freesurfer/recon.py delete mode 100644 nipype/workflows/smri/freesurfer/utils.py delete mode 100644 nipype/workflows/smri/niftyreg/__init__.py delete mode 100644 nipype/workflows/smri/niftyreg/groupwise.py delete mode 100644 nipype/workflows/warp/__init__.py diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index c842ff05cf..75cdfeebed 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -424,7 +424,7 @@ class BEDPOSTX5(FSLXCommand): .. note:: Consider using - :func:`nipype.workflows.fsl.dmri.create_bedpostx_pipeline` instead. + :func:`niflow.nipype1.workflows.fsl.dmri.create_bedpostx_pipeline` instead. Example diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index f94bda1147..a0fdf843da 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -1100,7 +1100,7 @@ class EPIDeWarp(FSLCommand): `_. .. warning:: deprecated in FSL, please use - :func:`nipype.workflows.dmri.preprocess.epi.sdc_fmb` instead. + :func:`niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb` instead. Examples -------- diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index a3a280c895..4c1b6e4ef5 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -15,13 +15,34 @@ from ... import logging from ...utils.filemanip import split_filename -from ...workflows.misc.utils import get_data_dims, get_vox_dims from ..base import TraitedSpec, File, isdefined from ..dipy.base import DipyBaseInterface, HAVE_DIPY as have_dipy iflogger = logging.getLogger('nipype.interface') +def get_vox_dims(volume): + import nibabel as nb + from nipype.utils import NUMPY_MMAP + if isinstance(volume, list): + volume = volume[0] + nii = nb.load(volume, mmap=NUMPY_MMAP) + hdr = nii.header + voxdims = hdr.get_zooms() + return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] + + +def get_data_dims(volume): + import nibabel as nb + from nipype.utils import NUMPY_MMAP + if isinstance(volume, list): + volume = volume[0] + nii = nb.load(volume, mmap=NUMPY_MMAP) + hdr = nii.header + datadims = hdr.get_data_shape() + return [int(datadims[0]), int(datadims[1]), int(datadims[2])] + + def transform_to_affine(streams, header, affine): from dipy.tracking.utils import move_streamlines rotation, scale = np.linalg.qr(affine) diff --git a/nipype/workflows/__init__.py b/nipype/workflows/__init__.py index 99fb243f19..85df461615 100644 --- a/nipype/workflows/__init__.py +++ b/nipype/workflows/__init__.py @@ -1,3 +1,25 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: + +_msg = ["Nipype 1 workflows have been moved to the niflow-nipype1-workflows package."] +try: + from niflow.nipype1.workflows import data, dmri, fmri, misc, rsfmri, smri +except ImportError: + _msg.append("pip install niflow-nipype1-workflows to continue using them.") +else: + import sys + # Hack to make `from nipype.workflows.X import Y` work + sys.modules['nipype.workflows.data'] = data + sys.modules['nipype.workflows.dmri'] = dmri + sys.modules['nipype.workflows.fmri'] = fmri + sys.modules['nipype.workflows.misc'] = misc + sys.modules['nipype.workflows.rsfmri'] = rsfmri + sys.modules['nipype.workflows.smri'] = smri + _msg.append("nipype.workflows.* provides a reference for backwards compatibility. " + "Please use niflow.nipype1.workflows.* to avoid this warning.") + del sys + +import warnings +warnings.warn(' '.join(_msg)) +del warnings, _msg diff --git a/nipype/workflows/data/__init__.py b/nipype/workflows/data/__init__.py deleted file mode 100644 index 85fcd2dee0..0000000000 --- a/nipype/workflows/data/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -import os.path as op - - -def get_flirt_schedule(name): - if name == 'ecc': - return op.abspath(op.join(op.dirname(__file__), 'ecc.sch')) - elif name == 'hmc': - return op.abspath(op.join(op.dirname(__file__), 'hmc.sch')) - else: - raise RuntimeError('Requested file does not exist.') diff --git a/nipype/workflows/data/ecc.sch b/nipype/workflows/data/ecc.sch deleted file mode 100644 index b9e8d8c3c3..0000000000 --- a/nipype/workflows/data/ecc.sch +++ /dev/null @@ -1,67 +0,0 @@ -# 4mm scale -setscale 4 -setoption smoothing 6 -setoption paramsubset 1 0 0 0 0 0 0 1 1 1 1 1 1 -clear U -clear UA -clear UB -clear US -clear UP -# try the identity transform as a starting point at this resolution -clear UQ -setrow UQ 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 7 UQ 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 -sort U -copy U UA -# select best 4 optimised solutions and try perturbations of these -clear U -copy UA:1-4 U -optimise 7 UA:1-4 1.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 -optimise 7 UA:1-4 -1.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 1.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 -1.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 1.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 -1.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 0.1 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 -0.1 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 0.2 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 -0.2 abs 4 -sort U -copy U UB -# 2mm scale -setscale 2 -setoption smoothing 4 -setoption paramsubset 1 0 0 0 0 0 0 1 1 1 1 1 1 -clear U -clear UC -clear UD -clear UE -clear UF -# remeasure costs at this scale -measurecost 7 UB 0 0 0 0 0 0 rel -sort U -copy U UC -clear U -optimise 7 UC:1-3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 2 -copy U UD -sort U -copy U UF -# also try the identity transform as a starting point at this resolution -sort U -clear U UG -clear U -setrow UG 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 7 UG 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 2 -sort U -copy U UG -# 1mm scale -setscale 1 -setoption smoothing 2 -setoption boundguess 1 -setoption paramsubset 1 0 0 0 0 0 0 1 1 1 1 1 1 -clear U -#also try the identity transform as a starting point at this resolution -setrow UK 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 12 UK:1-2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 1 -sort U - diff --git a/nipype/workflows/data/hmc.sch b/nipype/workflows/data/hmc.sch deleted file mode 100644 index aeabcae29a..0000000000 --- a/nipype/workflows/data/hmc.sch +++ /dev/null @@ -1,64 +0,0 @@ -# 4mm scale -setscale 4 -setoption smoothing 6 -clear U -clear UA -clear UB -clear US -clear UP -# try the identity transform as a starting point at this resolution -clear UQ -setrow UQ 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 7 UQ 0.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 -sort U -copy U UA -# select best 4 optimised solutions and try perturbations of these -clear U -copy UA:1-4 U -optimise 7 UA:1-4 1.0 0.0 0.0 0.0 0.0 0.0 0.0 rel 4 -optimise 7 UA:1-4 -1.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 1.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 -1.0 0.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 1.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 -1.0 0.0 0.0 0.0 0.0 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 0.1 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 -0.1 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 0.2 abs 4 -optimise 7 UA:1-4 0.0 0.0 0.0 0.0 0.0 0.0 -0.2 abs 4 -sort U -copy U UB -# 2mm scale -setscale 2 -setoption smoothing 4 -clear U -clear UC -clear UD -clear UE -clear UF -# remeasure costs at this scale -measurecost 7 UB 0 0 0 0 0 0 rel -sort U -copy U UC -clear U -optimise 7 UC:1-3 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 2 -copy U UD -sort U -copy U UF -# also try the identity transform as a starting point at this resolution -sort U -clear U UG -clear U -setrow UG 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 7 UG 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 2 -sort U -copy U UG -# 1mm scale -setscale 1 -setoption smoothing 2 -setoption boundguess 1 -clear U -#also try the identity transform as a starting point at this resolution -setrow UK 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 -optimise 12 UK:1-2 0.0 0.0 0.0 0.0 0.0 0.0 0.0 abs 1 -sort U - diff --git a/nipype/workflows/dmri/__init__.py b/nipype/workflows/dmri/__init__.py deleted file mode 100644 index 628b6c2bc1..0000000000 --- a/nipype/workflows/dmri/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from . import camino, mrtrix, fsl, dipy diff --git a/nipype/workflows/dmri/camino/__init__.py b/nipype/workflows/dmri/camino/__init__.py deleted file mode 100644 index 07ba37fc52..0000000000 --- a/nipype/workflows/dmri/camino/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .diffusion import create_camino_dti_pipeline -from .connectivity_mapping import create_connectivity_pipeline -from .group_connectivity import create_group_connectivity_pipeline diff --git a/nipype/workflows/dmri/camino/connectivity_mapping.py b/nipype/workflows/dmri/camino/connectivity_mapping.py deleted file mode 100644 index 3283b5f4e1..0000000000 --- a/nipype/workflows/dmri/camino/connectivity_mapping.py +++ /dev/null @@ -1,534 +0,0 @@ -# -*- coding: utf-8 -*- -import inspect -import os.path as op - -from ....interfaces import io as nio # Data i/o -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....interfaces import camino as camino -from ....interfaces import fsl as fsl -from ....interfaces import camino2trackvis as cam2trk -from ....interfaces import freesurfer as fs # freesurfer -from ....interfaces import cmtk as cmtk -from ....algorithms import misc as misc -from ...misc.utils import (get_affine, get_data_dims, get_vox_dims, - select_aparc, select_aparc_annot) - - -def create_connectivity_pipeline(name="connectivity"): - """Creates a pipeline that does the same connectivity processing as in the - :ref:`example_dmri_connectivity` example script. Given a subject id (and completed Freesurfer reconstruction) - diffusion-weighted image, b-values, and b-vectors, the workflow will return the subject's connectome - as a Connectome File Format (CFF) file for use in Connectome Viewer (http://www.cmtk.org). - - Example - ------- - - >>> from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline - >>> conmapper = create_connectivity_pipeline("nipype_conmap") - >>> conmapper.inputs.inputnode.subjects_dir = '.' - >>> conmapper.inputs.inputnode.subject_id = 'subj1' - >>> conmapper.inputs.inputnode.dwi = 'data.nii.gz' - >>> conmapper.inputs.inputnode.bvecs = 'bvecs' - >>> conmapper.inputs.inputnode.bvals = 'bvals' - >>> conmapper.run() # doctest: +SKIP - - Inputs:: - - inputnode.subject_id - inputnode.subjects_dir - inputnode.dwi - inputnode.bvecs - inputnode.bvals - inputnode.resolution_network_file - - Outputs:: - - outputnode.connectome - outputnode.cmatrix - outputnode.gpickled_network - outputnode.fa - outputnode.struct - outputnode.trace - outputnode.tracts - outputnode.tensors - - """ - - inputnode_within = pe.Node( - interface=util.IdentityInterface(fields=[ - "subject_id", - "dwi", - "bvecs", - "bvals", - "subjects_dir", - "resolution_network_file", - ]), - name="inputnode_within") - - FreeSurferSource = pe.Node( - interface=nio.FreeSurferSource(), name='fssource') - - FreeSurferSourceLH = pe.Node( - interface=nio.FreeSurferSource(), name='fssourceLH') - FreeSurferSourceLH.inputs.hemi = 'lh' - - FreeSurferSourceRH = pe.Node( - interface=nio.FreeSurferSource(), name='fssourceRH') - FreeSurferSourceRH.inputs.hemi = 'rh' - """ - Since the b values and b vectors come from the FSL course, we must convert it to a scheme file - for use in Camino. - """ - - fsl2scheme = pe.Node(interface=camino.FSL2Scheme(), name="fsl2scheme") - fsl2scheme.inputs.usegradmod = True - """ - FSL's Brain Extraction tool is used to create a mask from the b0 image - """ - - b0Strip = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') - """ - FSL's FLIRT function is used to coregister the b0 mask and the structural image. - A convert_xfm node is then used to obtain the inverse of the transformation matrix. - FLIRT is used once again to apply the inverse transformation to the parcellated brain image. - """ - - coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') - coregister.inputs.cost = ('normmi') - - convertxfm = pe.Node(interface=fsl.ConvertXFM(), name='convertxfm') - convertxfm.inputs.invert_xfm = True - - inverse = pe.Node(interface=fsl.FLIRT(), name='inverse') - inverse.inputs.interp = ('nearestneighbour') - - inverse_AparcAseg = pe.Node( - interface=fsl.FLIRT(), name='inverse_AparcAseg') - inverse_AparcAseg.inputs.interp = ('nearestneighbour') - """ - A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject. - Nodes are used to convert the following: - * Original structural image to NIFTI - * Parcellated white matter image to NIFTI - * Parcellated whole-brain image to NIFTI - * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres - are converted to GIFTI for visualization in ConnectomeViewer - * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI - """ - - mri_convert_Brain = pe.Node( - interface=fs.MRIConvert(), name='mri_convert_Brain') - mri_convert_Brain.inputs.out_type = 'nii' - - mri_convert_AparcAseg = mri_convert_Brain.clone('mri_convert_AparcAseg') - - mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH') - mris_convertLH.inputs.out_datatype = 'gii' - mris_convertRH = mris_convertLH.clone('mris_convertRH') - mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite') - mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite') - mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated') - mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated') - mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere') - mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere') - mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels') - mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels') - """ - In this section we create the nodes necessary for diffusion analysis. - First, the diffusion image is converted to voxel order, since this is the format in which Camino does - its processing. - """ - - image2voxel = pe.Node(interface=camino.Image2Voxel(), name="image2voxel") - """ - Second, diffusion tensors are fit to the voxel-order data. - If desired, these tensors can be converted to a Nifti tensor image using the DT2NIfTI interface. - """ - - dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') - """ - Next, a lookup table is generated from the schemefile and the - signal-to-noise ratio (SNR) of the unweighted (q=0) data. - """ - - dtlutgen = pe.Node(interface=camino.DTLUTGen(), name="dtlutgen") - dtlutgen.inputs.snr = 16.0 - dtlutgen.inputs.inversion = 1 - """ - In this tutorial we implement probabilistic tractography using the PICo algorithm. - PICo tractography requires an estimate of the fibre direction and a model of its uncertainty in each voxel; - this probabilitiy distribution map is produced using the following node. - """ - - picopdfs = pe.Node(interface=camino.PicoPDFs(), name="picopdfs") - picopdfs.inputs.inputmodel = 'dt' - """ - Finally, tractography is performed. In this tutorial, we will use only one iteration for time-saving purposes. - It is important to note that we use the TrackPICo interface here. This interface now expects the files required - for PICo tracking (i.e. the output from picopdfs). Similar interfaces exist for alternative types of tracking, - such as Bayesian tracking with Dirac priors (TrackBayesDirac). - """ - - track = pe.Node(interface=camino.TrackPICo(), name="track") - track.inputs.iterations = 1 - """ - Currently, the best program for visualizing tracts is TrackVis. For this reason, a node is included to - convert the raw tract data to .trk format. Solely for testing purposes, another node is added to perform the reverse. - """ - - camino2trackvis = pe.Node( - interface=cam2trk.Camino2Trackvis(), name="camino2trackvis") - camino2trackvis.inputs.min_length = 30 - camino2trackvis.inputs.voxel_order = 'LAS' - trk2camino = pe.Node( - interface=cam2trk.Trackvis2Camino(), name="trk2camino") - """ - Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview, - using the following two nodes. - """ - - vtkstreamlines = pe.Node( - interface=camino.VtkStreamlines(), name="vtkstreamlines") - procstreamlines = pe.Node( - interface=camino.ProcStreamlines(), name="procstreamlines") - """ - We can easily produce a variety of scalar values from our fitted tensors. The following nodes generate the - fractional anisotropy and diffusivity trace maps and their associated headers, and then merge them back - into a single .nii file. - """ - - fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') - trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') - dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') - - analyzeheader_fa = pe.Node( - interface=camino.AnalyzeHeader(), name='analyzeheader_fa') - analyzeheader_fa.inputs.datatype = 'double' - analyzeheader_trace = pe.Node( - interface=camino.AnalyzeHeader(), name='analyzeheader_trace') - analyzeheader_trace.inputs.datatype = 'double' - - fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') - trace2nii = fa2nii.clone("trace2nii") - """ - This section adds the Connectome Mapping Toolkit (CMTK) nodes. - These interfaces are fairly experimental and may not function properly. - In order to perform connectivity mapping using CMTK, the parcellated structural data is rewritten - using the indices and parcellation scheme from the connectome mapper (CMP). This process has been - written into the ROIGen interface, which will output a remapped aparc+aseg image as well as a - dictionary of label information (i.e. name, display colours) pertaining to the original and remapped regions. - These label values are input from a user-input lookup table, if specified, and otherwise the default - Freesurfer LUT (/freesurfer/FreeSurferColorLUT.txt). - """ - - roigen = pe.Node(interface=cmtk.ROIGen(), name="ROIGen") - roigen_structspace = roigen.clone("ROIGen_structspace") - """ - The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts - and outputs a number of different files. The most important of which is the connectivity network itself, which is stored - as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various - NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and - standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the - specific tracts that connect between user-selected regions. - """ - - createnodes = pe.Node(interface=cmtk.CreateNodes(), name="CreateNodes") - creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") - creatematrix.inputs.count_region_intersections = True - """ - Here we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use - the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. - """ - - CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") - - giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces") - giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") - niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") - fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") - """ - Since we have now created all our nodes, we can define our workflow and start making connections. - """ - - mapping = pe.Workflow(name='mapping') - """ - First, we connect the input node to the early conversion functions. - FreeSurfer input nodes: - """ - - mapping.connect([(inputnode_within, FreeSurferSource, [("subjects_dir", - "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSource, [("subject_id", - "subject_id")])]) - - mapping.connect([(inputnode_within, FreeSurferSourceLH, - [("subjects_dir", "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subject_id", - "subject_id")])]) - - mapping.connect([(inputnode_within, FreeSurferSourceRH, - [("subjects_dir", "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subject_id", - "subject_id")])]) - """ - Required conversions for processing in Camino: - """ - - mapping.connect([(inputnode_within, image2voxel, - [("dwi", "in_file")]), (inputnode_within, fsl2scheme, - [("bvecs", "bvec_file"), - ("bvals", "bval_file")]), - (image2voxel, dtifit, [['voxel_order', 'in_file']]), - (fsl2scheme, dtifit, [['scheme', 'scheme_file']])]) - """ - Nifti conversions for the subject's stripped brain image from Freesurfer: - """ - - mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', - 'in_file')])]) - """ - Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) - """ - - mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', - 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', - 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', - 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', - 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, - [('inflated', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, - [('inflated', 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, - [('sphere', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, - [('sphere', 'in_file')])]) - """ - The annotation files are converted using the pial surface as a map via the MRIsConvert interface. - One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files - specifically (rather than i.e. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. - """ - - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, - [('pial', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, - [('pial', 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, - [(('annot', select_aparc_annot), 'annot_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, - [(('annot', select_aparc_annot), 'annot_file')])]) - """ - This section coregisters the diffusion-weighted and parcellated white-matter / whole brain images. - At present the conmap node connection is left commented, as there have been recent changes in Camino - code that have presented some users with errors. - """ - - mapping.connect([(inputnode_within, b0Strip, [('dwi', 'in_file')])]) - mapping.connect([(inputnode_within, b0Strip, [('dwi', 't2_guided')]) - ]) # Added to improve damaged brain extraction - mapping.connect([(b0Strip, coregister, [('out_file', 'in_file')])]) - mapping.connect([(mri_convert_Brain, coregister, [('out_file', - 'reference')])]) - mapping.connect([(coregister, convertxfm, [('out_matrix_file', - 'in_file')])]) - mapping.connect([(b0Strip, inverse, [('out_file', 'reference')])]) - mapping.connect([(convertxfm, inverse, [('out_file', 'in_matrix_file')])]) - mapping.connect([(mri_convert_Brain, inverse, [('out_file', 'in_file')])]) - """ - The tractography pipeline consists of the following nodes. Further information about the tractography - can be found in nipype/examples/dmri_camino_dti.py. - """ - - mapping.connect([(b0Strip, track, [("mask_file", "seed_file")])]) - mapping.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) - mapping.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) - mapping.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) - mapping.connect([(picopdfs, track, [("pdfs", "in_file")])]) - """ - Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the - tensor fitting. This is also where our voxel- and data-grabbing functions come in. We pass these functions, - along with the original DWI image from the input node, to the header-generating nodes. This ensures that the - files will be correct and readable. - """ - - mapping.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) - mapping.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) - mapping.connect([(inputnode_within, analyzeheader_fa, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - mapping.connect([(fa, fa2nii, [('fa', 'data_file')])]) - mapping.connect([(inputnode_within, fa2nii, [(('dwi', get_affine), - 'affine')])]) - mapping.connect([(analyzeheader_fa, fa2nii, [('header', 'header_file')])]) - - mapping.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) - mapping.connect([(trace, analyzeheader_trace, [("trace", "in_file")])]) - mapping.connect([(inputnode_within, analyzeheader_trace, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - mapping.connect([(trace, trace2nii, [('trace', 'data_file')])]) - mapping.connect([(inputnode_within, trace2nii, [(('dwi', get_affine), - 'affine')])]) - mapping.connect([(analyzeheader_trace, trace2nii, [('header', - 'header_file')])]) - - mapping.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) - """ - The output tracts are converted to Trackvis format (and back). Here we also use the voxel- and data-grabbing - functions defined at the beginning of the pipeline. - """ - - mapping.connect([(track, camino2trackvis, [('tracked', 'in_file')]), - (track, vtkstreamlines, [['tracked', 'in_file']]), - (camino2trackvis, trk2camino, [['trackvis', 'in_file']])]) - mapping.connect([(inputnode_within, camino2trackvis, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - """ - Here the CMTK connectivity mapping nodes are connected. - The original aparc+aseg image is converted to NIFTI, then registered to - the diffusion image and delivered to the ROIGen node. The remapped parcellation, - original tracts, and label file are then given to CreateMatrix. - """ - - mapping.connect(inputnode_within, 'resolution_network_file', createnodes, - 'resolution_network_file') - mapping.connect(createnodes, 'node_network', creatematrix, - 'resolution_network_file') - mapping.connect([(FreeSurferSource, mri_convert_AparcAseg, - [(('aparc_aseg', select_aparc), 'in_file')])]) - - mapping.connect([(b0Strip, inverse_AparcAseg, [('out_file', - 'reference')])]) - mapping.connect([(convertxfm, inverse_AparcAseg, [('out_file', - 'in_matrix_file')])]) - mapping.connect([(mri_convert_AparcAseg, inverse_AparcAseg, - [('out_file', 'in_file')])]) - mapping.connect([(mri_convert_AparcAseg, roigen_structspace, - [('out_file', 'aparc_aseg_file')])]) - mapping.connect([(roigen_structspace, createnodes, [("roi_file", - "roi_file")])]) - - mapping.connect([(inverse_AparcAseg, roigen, [("out_file", - "aparc_aseg_file")])]) - mapping.connect([(roigen, creatematrix, [("roi_file", "roi_file")])]) - mapping.connect([(camino2trackvis, creatematrix, [("trackvis", - "tract_file")])]) - mapping.connect([(inputnode_within, creatematrix, [("subject_id", - "out_matrix_file")])]) - mapping.connect([(inputnode_within, creatematrix, - [("subject_id", "out_matrix_mat_file")])]) - """ - The merge nodes defined earlier are used here to create lists of the files which are - destined for the CFFConverter. - """ - - mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) - mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) - mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", - "in3")])]) - mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", - "in4")])]) - mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", - "in5")])]) - mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", - "in6")])]) - mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", - "in7")])]) - mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", - "in8")])]) - - mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", - "in1")])]) - mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", - "in2")])]) - - mapping.connect([(roigen, niftiVolumes, [("roi_file", "in1")])]) - mapping.connect([(inputnode_within, niftiVolumes, [("dwi", "in2")])]) - mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) - - mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", - "in1")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", - "in2")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", - "in3")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", - "in4")])]) - """ - This block actually connects the merged lists to the CFF converter. We pass the surfaces - and volumes that are to be included, as well as the tracts and the network itself. The currently - running pipeline (dmri_connectivity.py) is also scraped and included in the CFF file. This - makes it easy for the user to examine the entire processing pathway used to generate the end - product. - """ - - CFFConverter.inputs.script_files = op.abspath( - inspect.getfile(inspect.currentframe())) - mapping.connect([(giftiSurfaces, CFFConverter, [("out", - "gifti_surfaces")])]) - mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) - mapping.connect([(creatematrix, CFFConverter, [("matrix_files", - "gpickled_networks")])]) - - mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) - mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) - mapping.connect([(camino2trackvis, CFFConverter, [("trackvis", - "tract_files")])]) - mapping.connect([(inputnode_within, CFFConverter, [("subject_id", - "title")])]) - """ - Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes - declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding - their names to the subject list and their data to the proper folders. - """ - - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - "subject_id", "dwi", "bvecs", "bvals", "subjects_dir", - "resolution_network_file" - ]), - name="inputnode") - - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - "fa", "struct", "trace", "tracts", "connectome", "cmatrix", - "networks", "rois", "mean_fiber_length", "fiber_length_std", - "tensors" - ]), - name="outputnode") - - connectivity = pe.Workflow(name="connectivity") - connectivity.base_output_dir = name - - connectivity.connect([ - (inputnode, mapping, - [("dwi", "inputnode_within.dwi"), ("bvals", "inputnode_within.bvals"), - ("bvecs", "inputnode_within.bvecs"), ("subject_id", - "inputnode_within.subject_id"), - ("subjects_dir", "inputnode_within.subjects_dir"), - ("resolution_network_file", - "inputnode_within.resolution_network_file")]) - ]) - - connectivity.connect( - [(mapping, outputnode, - [("camino2trackvis.trackvis", - "tracts"), ("CFFConverter.connectome_file", "connectome"), - ("CreateMatrix.matrix_mat_file", - "cmatrix"), ("CreateMatrix.mean_fiber_length_matrix_mat_file", - "mean_fiber_length"), - ("CreateMatrix.fiber_length_std_matrix_mat_file", - "fiber_length_std"), ("fa2nii.nifti_file", - "fa"), ("CreateMatrix.matrix_files", - "networks"), ("ROIGen.roi_file", - "rois"), - ("mri_convert_Brain.out_file", - "struct"), ("trace2nii.nifti_file", - "trace"), ("dtifit.tensor_fitted", "tensors")])]) - - return connectivity diff --git a/nipype/workflows/dmri/camino/diffusion.py b/nipype/workflows/dmri/camino/diffusion.py deleted file mode 100644 index 708ddb8bc4..0000000000 --- a/nipype/workflows/dmri/camino/diffusion.py +++ /dev/null @@ -1,245 +0,0 @@ -# -*- coding: utf-8 -*- -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....interfaces import camino as camino -from ....interfaces import fsl as fsl -from ....interfaces import camino2trackvis as cam2trk -from ....algorithms import misc as misc -from ...misc.utils import get_affine, get_data_dims, get_vox_dims - - -def create_camino_dti_pipeline(name="dtiproc"): - """Creates a pipeline that does the same diffusion processing as in the - :doc:`../../users/examples/dmri_camino_dti` example script. Given a diffusion-weighted image, - b-values, and b-vectors, the workflow will return the tractography - computed from diffusion tensors and from PICo probabilistic tractography. - - Example - ------- - - >>> import os - >>> nipype_camino_dti = create_camino_dti_pipeline("nipype_camino_dti") - >>> nipype_camino_dti.inputs.inputnode.dwi = os.path.abspath('dwi.nii') - >>> nipype_camino_dti.inputs.inputnode.bvecs = os.path.abspath('bvecs') - >>> nipype_camino_dti.inputs.inputnode.bvals = os.path.abspath('bvals') - >>> nipype_camino_dti.run() # doctest: +SKIP - - Inputs:: - - inputnode.dwi - inputnode.bvecs - inputnode.bvals - - Outputs:: - - outputnode.fa - outputnode.trace - outputnode.tracts_pico - outputnode.tracts_dt - outputnode.tensors - - """ - - inputnode1 = pe.Node( - interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), - name="inputnode1") - """ - Setup for Diffusion Tensor Computation - -------------------------------------- - In this section we create the nodes necessary for diffusion analysis. - First, the diffusion image is converted to voxel order. - """ - - image2voxel = pe.Node(interface=camino.Image2Voxel(), name="image2voxel") - fsl2scheme = pe.Node(interface=camino.FSL2Scheme(), name="fsl2scheme") - fsl2scheme.inputs.usegradmod = True - """ - Second, diffusion tensors are fit to the voxel-order data. - """ - - dtifit = pe.Node(interface=camino.DTIFit(), name='dtifit') - """ - Next, a lookup table is generated from the schemefile and the - signal-to-noise ratio (SNR) of the unweighted (q=0) data. - """ - - dtlutgen = pe.Node(interface=camino.DTLUTGen(), name="dtlutgen") - dtlutgen.inputs.snr = 16.0 - dtlutgen.inputs.inversion = 1 - """ - In this tutorial we implement probabilistic tractography using the PICo algorithm. - PICo tractography requires an estimate of the fibre direction and a model of its - uncertainty in each voxel; this is produced using the following node. - """ - - picopdfs = pe.Node(interface=camino.PicoPDFs(), name="picopdfs") - picopdfs.inputs.inputmodel = 'dt' - """ - An FSL BET node creates a brain mask is generated from the diffusion image for seeding the PICo tractography. - """ - - bet = pe.Node(interface=fsl.BET(), name="bet") - bet.inputs.mask = True - """ - Finally, tractography is performed. - First DT streamline tractography. - """ - - trackdt = pe.Node(interface=camino.TrackDT(), name="trackdt") - """ - Now camino's Probablistic Index of connectivity algorithm. - In this tutorial, we will use only 1 iteration for time-saving purposes. - """ - - trackpico = pe.Node(interface=camino.TrackPICo(), name="trackpico") - trackpico.inputs.iterations = 1 - """ - Currently, the best program for visualizing tracts is TrackVis. For this reason, a node is included to convert the raw tract data to .trk format. Solely for testing purposes, another node is added to perform the reverse. - """ - - cam2trk_dt = pe.Node( - interface=cam2trk.Camino2Trackvis(), name="cam2trk_dt") - cam2trk_dt.inputs.min_length = 30 - cam2trk_dt.inputs.voxel_order = 'LAS' - - cam2trk_pico = pe.Node( - interface=cam2trk.Camino2Trackvis(), name="cam2trk_pico") - cam2trk_pico.inputs.min_length = 30 - cam2trk_pico.inputs.voxel_order = 'LAS' - """ - Tracts can also be converted to VTK and OOGL formats, for use in programs such as GeomView and Paraview, using the following two nodes. - """ - - # vtkstreamlines = pe.Node(interface=camino.VtkStreamlines(), name="vtkstreamlines") - # procstreamlines = pe.Node(interface=camino.ProcStreamlines(), name="procstreamlines") - # procstreamlines.inputs.outputtracts = 'oogl' - """ - We can also produce a variety of scalar values from our fitted tensors. The following nodes generate the fractional anisotropy and diffusivity trace maps and their associated headers. - """ - - fa = pe.Node(interface=camino.ComputeFractionalAnisotropy(), name='fa') - # md = pe.Node(interface=camino.MD(),name='md') - trace = pe.Node(interface=camino.ComputeTensorTrace(), name='trace') - dteig = pe.Node(interface=camino.ComputeEigensystem(), name='dteig') - - analyzeheader_fa = pe.Node( - interface=camino.AnalyzeHeader(), name="analyzeheader_fa") - analyzeheader_fa.inputs.datatype = "double" - analyzeheader_trace = analyzeheader_fa.clone('analyzeheader_trace') - - # analyzeheader_md = pe.Node(interface= camino.AnalyzeHeader(), name = "analyzeheader_md") - # analyzeheader_md.inputs.datatype = "double" - # analyzeheader_trace = analyzeheader_md.clone('analyzeheader_trace') - - fa2nii = pe.Node(interface=misc.CreateNifti(), name='fa2nii') - trace2nii = fa2nii.clone("trace2nii") - """ - Since we have now created all our nodes, we can now define our workflow and start making connections. - """ - - tractography = pe.Workflow(name='tractography') - - tractography.connect([(inputnode1, bet, [("dwi", "in_file")])]) - """ - File format conversion - """ - - tractography.connect([(inputnode1, image2voxel, [("dwi", "in_file")]), - (inputnode1, fsl2scheme, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) - """ - Tensor fitting - """ - - tractography.connect([(image2voxel, dtifit, [['voxel_order', 'in_file']]), - (fsl2scheme, dtifit, [['scheme', 'scheme_file']])]) - """ - Workflow for applying DT streamline tractogpahy - """ - - tractography.connect([(bet, trackdt, [("mask_file", "seed_file")])]) - tractography.connect([(dtifit, trackdt, [("tensor_fitted", "in_file")])]) - """ - Workflow for applying PICo - """ - - tractography.connect([(bet, trackpico, [("mask_file", "seed_file")])]) - tractography.connect([(fsl2scheme, dtlutgen, [("scheme", "scheme_file")])]) - tractography.connect([(dtlutgen, picopdfs, [("dtLUT", "luts")])]) - tractography.connect([(dtifit, picopdfs, [("tensor_fitted", "in_file")])]) - tractography.connect([(picopdfs, trackpico, [("pdfs", "in_file")])]) - - # Mean diffusivity still appears broken - # tractography.connect([(dtifit, md,[("tensor_fitted","in_file")])]) - # tractography.connect([(md, analyzeheader_md,[("md","in_file")])]) - # tractography.connect([(inputnode, analyzeheader_md,[(('dwi', get_vox_dims), 'voxel_dims'), - # (('dwi', get_data_dims), 'data_dims')])]) - # This line is commented out because the ProcStreamlines node keeps throwing memory errors - # tractography.connect([(track, procstreamlines,[("tracked","in_file")])]) - """ - Connecting the Fractional Anisotropy and Trace nodes is simple, as they obtain their input from the - tensor fitting. - - This is also where our voxel- and data-grabbing functions come in. We pass these functions, along with the original DWI image from the input node, to the header-generating nodes. This ensures that the files will be correct and readable. - """ - - tractography.connect([(dtifit, fa, [("tensor_fitted", "in_file")])]) - tractography.connect([(fa, analyzeheader_fa, [("fa", "in_file")])]) - tractography.connect([(inputnode1, analyzeheader_fa, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - tractography.connect([(fa, fa2nii, [('fa', 'data_file')])]) - tractography.connect([(inputnode1, fa2nii, [(('dwi', get_affine), - 'affine')])]) - tractography.connect([(analyzeheader_fa, fa2nii, [('header', - 'header_file')])]) - - tractography.connect([(dtifit, trace, [("tensor_fitted", "in_file")])]) - tractography.connect([(trace, analyzeheader_trace, [("trace", - "in_file")])]) - tractography.connect([(inputnode1, analyzeheader_trace, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - tractography.connect([(trace, trace2nii, [('trace', 'data_file')])]) - tractography.connect([(inputnode1, trace2nii, [(('dwi', get_affine), - 'affine')])]) - tractography.connect([(analyzeheader_trace, trace2nii, [('header', - 'header_file')])]) - - tractography.connect([(dtifit, dteig, [("tensor_fitted", "in_file")])]) - - tractography.connect([(trackpico, cam2trk_pico, [('tracked', 'in_file')])]) - tractography.connect([(trackdt, cam2trk_dt, [('tracked', 'in_file')])]) - tractography.connect([(inputnode1, cam2trk_pico, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - - tractography.connect([(inputnode1, cam2trk_dt, - [(('dwi', get_vox_dims), 'voxel_dims'), - (('dwi', get_data_dims), 'data_dims')])]) - - inputnode = pe.Node( - interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), - name="inputnode") - - outputnode = pe.Node( - interface=util.IdentityInterface( - fields=["fa", "trace", "tracts_pico", "tracts_dt", "tensors"]), - name="outputnode") - - workflow = pe.Workflow(name=name) - workflow.base_output_dir = name - - workflow.connect([(inputnode, tractography, - [("dwi", "inputnode1.dwi"), - ("bvals", "inputnode1.bvals"), ("bvecs", - "inputnode1.bvecs")])]) - - workflow.connect([(tractography, outputnode, - [("cam2trk_dt.trackvis", "tracts_dt"), - ("cam2trk_pico.trackvis", - "tracts_pico"), ("fa2nii.nifti_file", "fa"), - ("trace2nii.nifti_file", - "trace"), ("dtifit.tensor_fitted", "tensors")])]) - - return workflow diff --git a/nipype/workflows/dmri/camino/group_connectivity.py b/nipype/workflows/dmri/camino/group_connectivity.py deleted file mode 100644 index 1307f8c4b6..0000000000 --- a/nipype/workflows/dmri/camino/group_connectivity.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -import os.path as op # system functions - -from .connectivity_mapping import create_connectivity_pipeline -from ....interfaces import io as nio # Data i/o -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine - - -def create_group_connectivity_pipeline(group_list, - group_id, - data_dir, - subjects_dir, - output_dir, - template_args_dict=0): - """Creates a pipeline that performs basic Camino structural connectivity processing - on groups of subjects. Given a diffusion-weighted image, and text files containing - the associated b-values and b-vectors, the workflow will return each subjects' connectomes - in a Connectome File Format (CFF) file, for use in Connectome Viewer (http://www.cmtk.org). - - Example - ------- - - >>> import nipype.interfaces.freesurfer as fs - >>> import nipype.workflows.dmri.camino.group_connectivity as groupwork - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> fs.FSCommand.set_default_subjects_dir(subjects_dir) - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> template_args = dict(dwi=[['subject_id', 'dwi']], bvecs=[['subject_id', 'bvecs']], bvals=[['subject_id', 'bvals']]) - >>> group_id = 'group1' - >>> l1pipeline = groupwork.create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_dir, output_dir, template_args) - >>> l1pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - group_id: String containing the group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - template_args_dict: Dictionary of template arguments for the connectivity pipeline datasource - e.g. info = dict(dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) - """ - group_infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name="group_infosource") - group_infosource.inputs.group_id = group_id - subject_list = group_list[group_id] - subj_infosource = pe.Node( - interface=util.IdentityInterface(fields=['subject_id']), - name="subj_infosource") - subj_infosource.iterables = ('subject_id', subject_list) - - if template_args_dict == 0: - info = dict( - dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id', 'bvecs']], - bvals=[['subject_id', 'bvals']]) - else: - info = template_args_dict - - datasource = pe.Node( - interface=nio.DataGrabber( - infields=['subject_id'], outfields=list(info.keys())), - name='datasource') - - datasource.inputs.template = "%s/%s" - datasource.inputs.base_directory = data_dir - datasource.inputs.field_template = dict(dwi='%s/%s.nii') - datasource.inputs.template_args = info - datasource.inputs.sort_filelist = True - """ - Create a connectivity mapping workflow - """ - conmapper = create_connectivity_pipeline("nipype_conmap") - conmapper.inputs.inputnode.subjects_dir = subjects_dir - conmapper.base_dir = op.abspath('conmapper') - - datasink = pe.Node(interface=nio.DataSink(), name="datasink") - datasink.inputs.base_directory = output_dir - datasink.inputs.container = group_id - - l1pipeline = pe.Workflow(name="l1pipeline_" + group_id) - l1pipeline.base_dir = output_dir - l1pipeline.base_output_dir = group_id - l1pipeline.connect([(subj_infosource, datasource, [('subject_id', - 'subject_id')])]) - l1pipeline.connect([(subj_infosource, conmapper, - [('subject_id', 'inputnode.subject_id')])]) - l1pipeline.connect([(datasource, conmapper, [ - ("dwi", "inputnode.dwi"), - ("bvals", "inputnode.bvals"), - ("bvecs", "inputnode.bvecs"), - ])]) - l1pipeline.connect([(conmapper, datasink, [ - ("outputnode.connectome", "@l1output.cff"), - ("outputnode.fa", "@l1output.fa"), - ("outputnode.tracts", "@l1output.tracts"), - ("outputnode.trace", "@l1output.trace"), - ("outputnode.cmatrix", "@l1output.cmatrix"), - ("outputnode.rois", "@l1output.rois"), - ("outputnode.struct", "@l1output.struct"), - ("outputnode.networks", "@l1output.networks"), - ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), - ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), - ])]) - l1pipeline.connect([(group_infosource, datasink, [('group_id', - '@group_id')])]) - return l1pipeline diff --git a/nipype/workflows/dmri/connectivity/__init__.py b/nipype/workflows/dmri/connectivity/__init__.py deleted file mode 100644 index b34ca0dacb..0000000000 --- a/nipype/workflows/dmri/connectivity/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .nx import (create_networkx_pipeline, create_cmats_to_csv_pipeline) -from .group_connectivity import ( - create_merge_networks_by_group_workflow, - create_merge_network_results_by_group_workflow, - create_merge_group_networks_workflow, - create_merge_group_network_results_workflow, - create_average_networks_by_group_workflow) diff --git a/nipype/workflows/dmri/connectivity/group_connectivity.py b/nipype/workflows/dmri/connectivity/group_connectivity.py deleted file mode 100644 index a918104bd1..0000000000 --- a/nipype/workflows/dmri/connectivity/group_connectivity.py +++ /dev/null @@ -1,631 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open - -from future.utils import raise_from - -import os.path as op - -from ....interfaces import io as nio # Data i/o -from ....interfaces import utility as util # utility -from ....interfaces import cmtk as cmtk -from ....algorithms import misc as misc -from ....pipeline import engine as pe # pypeline engine -from ....interfaces.utility import Function -from ....utils.misc import package_check - -have_cmp = True -try: - package_check('cmp') -except Exception as e: - have_cmp = False -else: - import cmp - - -def pullnodeIDs(in_network, name_key='dn_name'): - """ This function will return the values contained, for each node in - a network, given an input key. By default it will return the node names - """ - import networkx as nx - import numpy as np - from nipype.interfaces.base import isdefined - if not isdefined(in_network): - raise ValueError - return None - try: - ntwk = nx.read_graphml(in_network) - except: - ntwk = nx.read_gpickle(in_network) - nodedata = ntwk.node - ids = [] - integer_nodelist = [] - for node in list(nodedata.keys()): - integer_nodelist.append(int(node)) - for node in np.sort(integer_nodelist): - try: - nodeid = nodedata[node][name_key] - except KeyError: - nodeid = nodedata[str(node)][name_key] - ids.append(nodeid) - return ids - - -def concatcsv(in_files): - """ This function will contatenate two "comma-separated value" - text files, but remove the first row (usually column headers) from - all but the first file. - """ - import os.path as op - from nipype.utils.filemanip import split_filename - - if not isinstance(in_files, list): - return in_files - if isinstance(in_files[0], list): - in_files = in_files[0] - first = open(in_files[0], 'r') - path, name, ext = split_filename(in_files[0]) - out_name = op.abspath('concat.csv') - out_file = open(out_name, 'w') - out_file.write(first.readline()) - first.close() - for in_file in in_files: - file_to_read = open(in_file, 'r') - file_to_read.readline() # scrap first line - for line in file_to_read: - out_file.write(line) - return out_name - - -def create_merge_networks_by_group_workflow(group_list, group_id, data_dir, - subjects_dir, output_dir): - """Creates a second-level pipeline to merge the Connectome File Format (CFF) outputs from the group-level - MRtrix structural connectivity processing pipeline into a single CFF file for each group. - - Example - ------- - - >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> group_id = 'group1' - >>> l2pipeline = groupwork.create_merge_networks_by_group_workflow(group_list, group_id, data_dir, subjects_dir, output_dir) - >>> l2pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - group_id: String containing the group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - """ - group_infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name="group_infosource") - group_infosource.inputs.group_id = group_id - - l2infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name='l2infosource') - - l2source = pe.Node( - nio.DataGrabber(infields=['group_id'], outfields=['CFFfiles']), - name='l2source') - l2source.inputs.template_args = dict(CFFfiles=[['group_id']]) - l2source.inputs.template = op.join(output_dir, '%s/cff/*/connectome.cff') - l2source.inputs.base_directory = data_dir - l2source.inputs.sort_filelist = True - - l2inputnode = pe.Node( - interface=util.IdentityInterface(fields=['CFFfiles']), - name='l2inputnode') - MergeCNetworks = pe.Node( - interface=cmtk.MergeCNetworks(), name="MergeCNetworks") - - l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink") - l2datasink.inputs.base_directory = output_dir - l2datasink.inputs.container = group_id - - l2pipeline = pe.Workflow(name="l2output_" + group_id) - l2pipeline.base_dir = op.join(output_dir, 'l2output') - l2pipeline.connect([(group_infosource, l2infosource, [('group_id', - 'group_id')])]) - - l2pipeline.connect([ - (l2infosource, l2source, [('group_id', 'group_id')]), - (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), - ]) - - l2pipeline.connect([(l2inputnode, MergeCNetworks, [('CFFfiles', - 'in_files')])]) - l2pipeline.connect([(group_infosource, MergeCNetworks, [('group_id', - 'out_file')])]) - l2pipeline.connect([(MergeCNetworks, l2datasink, [('connectome_file', - '@l2output')])]) - l2pipeline.connect([(group_infosource, l2datasink, [('group_id', - '@group_id')])]) - return l2pipeline - - -def create_merge_network_results_by_group_workflow( - group_list, group_id, data_dir, subjects_dir, output_dir): - """Creates a second-level pipeline to merge the Connectome File Format (CFF) outputs from the group-level - MRtrix structural connectivity processing pipeline into a single CFF file for each group. - - Example - ------- - - >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> group_id = 'group1' - >>> l2pipeline = groupwork.create_merge_network_results_by_group_workflow(group_list, group_id, data_dir, subjects_dir, output_dir) - >>> l2pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - group_id: String containing the group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - """ - group_infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name="group_infosource") - group_infosource.inputs.group_id = group_id - - l2infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id', 'merged']), - name='l2infosource') - - l2source = pe.Node( - nio.DataGrabber( - infields=['group_id'], - outfields=[ - 'CFFfiles', 'CSVmatrices', 'CSVfibers', 'CSVnodal', 'CSVglobal' - ]), - name='l2source') - - l2source.inputs.template_args = dict( - CFFfiles=[['group_id']], - CSVmatrices=[['group_id']], - CSVnodal=[['group_id']], - CSVglobal=[['group_id']], - CSVfibers=[['group_id']]) - l2source.inputs.base_directory = data_dir - l2source.inputs.template = '%s/%s' - l2source.inputs.field_template = dict( - CFFfiles=op.join(output_dir, '%s/cff/*/connectome.cff'), - CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv'), - CSVnodal=op.join(output_dir, '%s/nxcsv/*/*nodal*.csv'), - CSVglobal=op.join(output_dir, '%s/nxcsv/*/*global*.csv'), - CSVfibers=op.join(output_dir, '%s/fiber_csv/*/*fibers*.csv')) - l2source.inputs.sort_filelist = True - - l2inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'CFFfiles', 'CSVfibers', 'CSVmatrices', 'CSVnodal', 'CSVglobal', - 'network_file' - ]), - name='l2inputnode') - - MergeCNetworks = pe.Node( - interface=cmtk.MergeCNetworks(), name="MergeCNetworks") - - l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink") - l2datasink.inputs.base_directory = output_dir - l2datasink.inputs.container = group_id - - l2pipeline = pe.Workflow(name="l2output_" + group_id) - l2pipeline.base_dir = op.join(output_dir, 'l2output') - l2pipeline.connect([(group_infosource, l2infosource, [('group_id', - 'group_id')])]) - - l2pipeline.connect([ - (l2infosource, l2source, [('group_id', 'group_id')]), - (l2source, l2inputnode, [('CFFfiles', 'CFFfiles')]), - (l2source, l2inputnode, [('CSVmatrices', 'CSVmatrices')]), - (l2source, l2inputnode, [('CSVnodal', 'CSVnodal')]), - (l2source, l2inputnode, [('CSVglobal', 'CSVglobal')]), - (l2source, l2inputnode, [('CSVfibers', 'CSVfibers')]), - ]) - - l2pipeline.connect([(l2inputnode, MergeCNetworks, [('CFFfiles', - 'in_files')])]) - - l2pipeline.connect([(group_infosource, MergeCNetworks, [('group_id', - 'out_file')])]) - l2pipeline.connect([(MergeCNetworks, l2datasink, [('connectome_file', - '@l2output')])]) - - AddCSVColumn_node = pe.Node( - interface=misc.AddCSVColumn(), name="AddCSVColumn_node") - AddCSVColumn_node.inputs.extra_column_heading = 'group' - AddCSVColumn_global = AddCSVColumn_node.clone(name="AddCSVColumn_global") - AddCSVColumn_matrices = AddCSVColumn_node.clone( - name="AddCSVColumn_matrices") - AddCSVColumn_fibers = AddCSVColumn_node.clone(name="AddCSVColumn_fibers") - - concat_csv_interface = Function( - input_names=["in_files"], - output_names=["out_name"], - function=concatcsv) - - concat_node_csvs = pe.Node( - interface=concat_csv_interface, name='concat_node_csvs') - concat_global_csvs = pe.Node( - interface=concat_csv_interface, name='concat_global_csvs') - concat_matrix_csvs = pe.Node( - interface=concat_csv_interface, name='concat_matrix_csvs') - concat_fiber_csvs = pe.Node( - interface=concat_csv_interface, name='concat_fiber_csvs') - - l2pipeline.connect([(l2inputnode, concat_node_csvs, [('CSVnodal', - 'in_files')])]) - l2pipeline.connect([(concat_node_csvs, AddCSVColumn_node, [('out_name', - 'in_file')])]) - l2pipeline.connect([(group_infosource, AddCSVColumn_node, - [('group_id', 'extra_field')])]) - l2pipeline.connect([(AddCSVColumn_node, l2datasink, - [('csv_file', '@l2output.node_csv')])]) - l2pipeline.connect([(group_infosource, l2datasink, [('group_id', - '@group_id')])]) - - l2pipeline.connect([(l2inputnode, concat_global_csvs, [('CSVglobal', - 'in_files')])]) - l2pipeline.connect([(concat_global_csvs, AddCSVColumn_global, - [('out_name', 'in_file')])]) - l2pipeline.connect([(group_infosource, AddCSVColumn_global, - [('group_id', 'extra_field')])]) - l2pipeline.connect([(AddCSVColumn_global, l2datasink, - [('csv_file', '@l2output.global_csv')])]) - - l2pipeline.connect([(l2inputnode, concat_matrix_csvs, [('CSVmatrices', - 'in_files')])]) - l2pipeline.connect([(concat_matrix_csvs, AddCSVColumn_matrices, - [('out_name', 'in_file')])]) - l2pipeline.connect([(group_infosource, AddCSVColumn_matrices, - [('group_id', 'extra_field')])]) - l2pipeline.connect([(AddCSVColumn_matrices, l2datasink, - [('csv_file', '@l2output.cmatrices_csv')])]) - - l2pipeline.connect([(l2inputnode, concat_fiber_csvs, [('CSVmatrices', - 'in_files')])]) - l2pipeline.connect([(concat_fiber_csvs, AddCSVColumn_fibers, - [('out_name', 'in_file')])]) - l2pipeline.connect([(group_infosource, AddCSVColumn_fibers, - [('group_id', 'extra_field')])]) - l2pipeline.connect([(AddCSVColumn_fibers, l2datasink, - [('csv_file', '@l2output.fibers_csv')])]) - return l2pipeline - - -def create_merge_group_networks_workflow(group_list, - data_dir, - subjects_dir, - output_dir, - title='group'): - """Creates a third-level pipeline to merge the Connectome File Format (CFF) outputs from each group - and combines them into a single CFF file for each group. - - Example - ------- - - >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> l3pipeline = groupwork.create_merge_group_networks_workflow(group_list, data_dir, subjects_dir, output_dir) - >>> l3pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - title: String to use as a title for the output merged CFF file (default 'group') - """ - l3infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name='l3infosource') - l3infosource.inputs.group_id = list(group_list.keys()) - - l3source = pe.Node( - nio.DataGrabber(infields=['group_id'], outfields=['CFFfiles']), - name='l3source') - l3source.inputs.template_args = dict(CFFfiles=[['group_id', 'group_id']]) - l3source.inputs.template = op.join(output_dir, '%s/%s.cff') - l3source.inputs.sort_filelist = True - - l3inputnode = pe.Node( - interface=util.IdentityInterface(fields=['Group_CFFs']), - name='l3inputnode') - - MergeCNetworks_grp = pe.Node( - interface=cmtk.MergeCNetworks(), name="MergeCNetworks_grp") - MergeCNetworks_grp.inputs.out_file = title - - l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink") - l3datasink.inputs.base_directory = output_dir - - l3pipeline = pe.Workflow(name="l3output") - l3pipeline.base_dir = output_dir - l3pipeline.connect([ - (l3infosource, l3source, [('group_id', 'group_id')]), - (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), - ]) - - l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs', - 'in_files')])]) - l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file', - '@l3output')])]) - return l3pipeline - - -def create_merge_group_network_results_workflow(group_list, - data_dir, - subjects_dir, - output_dir, - title='group'): - """Creates a third-level pipeline to merge the Connectome File Format (CFF) outputs from each group - and combines them into a single CFF file for each group. This version of the third-level pipeline also - concatenates the comma-separated value files for the NetworkX metrics and the connectivity matrices - into single files. - - Example - ------- - - >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> l3pipeline = groupwork.create_merge_group_network_results_workflow(group_list, data_dir, subjects_dir, output_dir) - >>> l3pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - title: String to use as a title for the output merged CFF file (default 'group') - """ - l3infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name='l3infosource') - l3infosource.inputs.group_id = list(group_list.keys()) - - l3source = pe.Node( - nio.DataGrabber( - infields=['group_id'], - outfields=[ - 'CFFfiles', 'CSVnodemetrics', 'CSVglobalmetrics', 'CSVmatrices' - ]), - name='l3source') - l3source.inputs.template_args = dict( - CFFfiles=[['group_id']], - CSVnodemetrics=[['group_id']], - CSVglobalmetrics=[['group_id']], - CSVmatrices=[['group_id']]) - l3source.inputs.template = op.join(output_dir, '%s/%s') - l3source.inputs.sort_filelist = True - - l3source.inputs.field_template = dict( - CFFfiles=op.join(output_dir, '%s/*.cff'), - CSVnodemetrics=op.join(output_dir, '%s/node_csv/*.csv'), - CSVglobalmetrics=op.join(output_dir, '%s/global_csv/*.csv'), - CSVmatrices=op.join(output_dir, '%s/cmatrices_csv/*/*.csv')) - - l3inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'Group_CFFs', 'Group_CSVnodemetrics', 'Group_CSVglobalmetrics', - 'Group_CSVmatrices' - ]), - name='l3inputnode') - - MergeCNetworks_grp = pe.Node( - interface=cmtk.MergeCNetworks(), name="MergeCNetworks_grp") - MergeCNetworks_grp.inputs.out_file = title - - l3datasink = pe.Node(interface=nio.DataSink(), name="l3datasink") - l3datasink.inputs.base_directory = output_dir - - l3pipeline = pe.Workflow(name="l3output") - l3pipeline.base_dir = output_dir - l3pipeline.connect([ - (l3infosource, l3source, [('group_id', 'group_id')]), - (l3source, l3inputnode, [('CFFfiles', 'Group_CFFs')]), - (l3source, l3inputnode, [('CSVnodemetrics', 'Group_CSVnodemetrics')]), - (l3source, l3inputnode, [('CSVglobalmetrics', - 'Group_CSVglobalmetrics')]), - (l3source, l3inputnode, [('CSVmatrices', 'Group_CSVmatrices')]), - ]) - - l3pipeline.connect([(l3inputnode, MergeCNetworks_grp, [('Group_CFFs', - 'in_files')])]) - l3pipeline.connect([(MergeCNetworks_grp, l3datasink, [('connectome_file', - '@l3output')])]) - - concat_csv_interface = Function( - input_names=["in_files"], - output_names=["out_name"], - function=concatcsv) - - concat_node_csvs = pe.Node( - interface=concat_csv_interface, name='concat_node_csvs') - concat_global_csvs = pe.Node( - interface=concat_csv_interface, name='concat_global_csvs') - concat_matrix_csvs = pe.Node( - interface=concat_csv_interface, name='concat_matrix_csvs') - - l3pipeline.connect([(l3inputnode, concat_node_csvs, - [('Group_CSVnodemetrics', 'in_files')])]) - l3pipeline.connect([(concat_node_csvs, l3datasink, - [('out_name', '@l3output.nodal_csv')])]) - - l3pipeline.connect([(l3inputnode, concat_global_csvs, - [('Group_CSVglobalmetrics', 'in_files')])]) - l3pipeline.connect([(concat_global_csvs, l3datasink, - [('out_name', '@l3output.global_csv')])]) - - l3pipeline.connect([(l3inputnode, concat_matrix_csvs, - [('Group_CSVmatrices', 'in_files')])]) - l3pipeline.connect([(concat_matrix_csvs, l3datasink, - [('out_name', '@l3output.csvmatrices')])]) - return l3pipeline - - -def create_average_networks_by_group_workflow(group_list, - data_dir, - subjects_dir, - output_dir, - title='group_average'): - """Creates a fourth-level pipeline to average the networks for two groups and merge them into a single - CFF file. This pipeline will also output the average networks in .gexf format, for visualization in other - graph viewers, such as Gephi. - - Example - ------- - - >>> import nipype.workflows.dmri.connectivity.group_connectivity as groupwork - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> l4pipeline = groupwork.create_average_networks_by_group_workflow(group_list, data_dir, subjects_dir, output_dir) - >>> l4pipeline.run() # doctest: +SKIP - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - title: String to use as a title for the output merged CFF file (default 'group') - """ - l4infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id1', 'group_id2']), - name='l4infosource') - try: - l4infosource.inputs.group_id1 = list(group_list.keys())[0] - l4infosource.inputs.group_id2 = list(group_list.keys())[1] - except IndexError as e: - raise_from( - Exception( - 'The create_average_networks_by_group_workflow requires 2 groups' - ), e) - - l4info = dict( - networks=[['group_id', '']], - CMatrices=[['group_id', '']], - fibmean=[['group_id', 'mean_fiber_length']], - fibdev=[['group_id', 'fiber_length_std']]) - - l4source_grp1 = pe.Node( - nio.DataGrabber(infields=['group_id'], outfields=list(l4info.keys())), - name='l4source_grp1') - l4source_grp1.inputs.template = '%s/%s' - l4source_grp1.inputs.field_template = dict( - networks=op.join(output_dir, '%s/networks/*/*%s*intersections*.pck'), - CMatrices=op.join(output_dir, '%s/cmatrix/*/*%s*.mat'), - fibmean=op.join(output_dir, '%s/mean_fiber_length/*/*%s*.mat'), - fibdev=op.join(output_dir, '%s/fiber_length_std/*/*%s*.mat')) - l4source_grp1.inputs.base_directory = output_dir - l4source_grp1.inputs.template_args = l4info - l4source_grp1.inputs.sort_filelist = True - - l4source_grp2 = l4source_grp1.clone(name='l4source_grp2') - - l4inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'networks_grp1', 'networks_grp2', 'CMatrices_grp1', - 'CMatrices_grp2', 'fibmean_grp1', 'fibmean_grp2', 'fibdev_grp1', - 'fibdev_grp2' - ]), - name='l4inputnode') - - average_networks_grp1 = pe.Node( - interface=cmtk.AverageNetworks(), name='average_networks_grp1') - average_networks_grp2 = average_networks_grp1.clone( - 'average_networks_grp2') - - averagecff = pe.Node(interface=cmtk.CFFConverter(), name="averagecff") - averagecff.inputs.out_file = title - - merge_gpickled_averages = pe.Node( - interface=util.Merge(2), name='merge_gpickled_averages') - merge_gexf_averages = merge_gpickled_averages.clone('merge_gexf_averages') - - l4datasink = pe.Node(interface=nio.DataSink(), name="l4datasink") - l4datasink.inputs.base_directory = output_dir - - l4pipeline = pe.Workflow(name="l4output") - l4pipeline.base_dir = output_dir - l4pipeline.connect([ - (l4infosource, l4source_grp1, [('group_id1', 'group_id')]), - (l4infosource, l4source_grp2, [('group_id2', 'group_id')]), - (l4source_grp1, l4inputnode, [('CMatrices', 'CMatrices_grp1')]), - (l4source_grp2, l4inputnode, [('CMatrices', 'CMatrices_grp2')]), - (l4source_grp1, l4inputnode, [('networks', 'networks_grp1')]), - (l4source_grp2, l4inputnode, [('networks', 'networks_grp2')]), - (l4source_grp1, l4inputnode, [('fibmean', 'fibmean_grp1')]), - (l4source_grp2, l4inputnode, [('fibmean', 'fibmean_grp2')]), - (l4source_grp1, l4inputnode, [('fibdev', 'fibdev_grp1')]), - (l4source_grp2, l4inputnode, [('fibdev', 'fibdev_grp2')]), - ]) - - l4pipeline.connect([(l4inputnode, average_networks_grp1, [('networks_grp1', - 'in_files')])]) - l4pipeline.connect([(l4infosource, average_networks_grp1, [('group_id1', - 'group_id')])]) - - l4pipeline.connect([(l4inputnode, average_networks_grp2, [('networks_grp2', - 'in_files')])]) - l4pipeline.connect([(l4infosource, average_networks_grp2, [('group_id2', - 'group_id')])]) - - l4pipeline.connect([(average_networks_grp1, merge_gpickled_averages, - [('gpickled_groupavg', 'in1')])]) - l4pipeline.connect([(average_networks_grp2, merge_gpickled_averages, - [('gpickled_groupavg', 'in2')])]) - - l4pipeline.connect([(average_networks_grp1, merge_gexf_averages, - [('gexf_groupavg', 'in1')])]) - l4pipeline.connect([(average_networks_grp2, merge_gexf_averages, - [('gexf_groupavg', 'in2')])]) - - l4pipeline.connect([(merge_gpickled_averages, l4datasink, - [('out', '@l4output.gpickled')])]) - l4pipeline.connect([(merge_gpickled_averages, averagecff, - [('out', 'gpickled_networks')])]) - l4pipeline.connect([(averagecff, l4datasink, [('connectome_file', - '@l4output.averagecff')])]) - - l4pipeline.connect([(merge_gexf_averages, l4datasink, - [('out', '@l4output.gexf')])]) - return l4pipeline diff --git a/nipype/workflows/dmri/connectivity/nx.py b/nipype/workflows/dmri/connectivity/nx.py deleted file mode 100644 index 95159dae8f..0000000000 --- a/nipype/workflows/dmri/connectivity/nx.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -from ....pipeline import engine as pe -from ....interfaces import utility as util -from ....interfaces import cmtk as cmtk -from ....algorithms import misc as misc -from ....algorithms.misc import remove_identical_paths -from .group_connectivity import pullnodeIDs - - -def add_global_to_filename(in_file): - from nipype.utils.filemanip import split_filename - path, name, ext = split_filename(in_file) - return name + '_global' + ext - - -def add_nodal_to_filename(in_file): - from nipype.utils.filemanip import split_filename - path, name, ext = split_filename(in_file) - return name + '_nodal' + ext - - -def create_networkx_pipeline(name="networkx", extra_column_heading="subject"): - """Creates a workflow to calculate various graph measures (via NetworkX) on - an input network. The output measures are then converted to comma-separated value - text files, and an extra column / field is also added. Typically, the user would - connect the subject name to this field. - - Example - ------- - - >>> from nipype.workflows.dmri.connectivity.nx import create_networkx_pipeline - >>> nx = create_networkx_pipeline("networkx", "subject_id") - >>> nx.inputs.inputnode.extra_field = 'subj1' - >>> nx.inputs.inputnode.network_file = 'subj1.pck' - >>> nx.run() # doctest: +SKIP - - Inputs:: - - inputnode.extra_field - inputnode.network_file - - Outputs:: - - outputnode.network_files - outputnode.csv_files - outputnode.matlab_files - - """ - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=["extra_field", "network_file"]), - name="inputnode") - - pipeline = pe.Workflow(name=name) - - ntwkMetrics = pe.Node( - interface=cmtk.NetworkXMetrics(), name="NetworkXMetrics") - Matlab2CSV_node = pe.Node( - interface=misc.Matlab2CSV(), name="Matlab2CSV_node") - MergeCSVFiles_node = pe.Node( - interface=misc.MergeCSVFiles(), name="MergeCSVFiles_node") - MergeCSVFiles_node.inputs.extra_column_heading = extra_column_heading - - Matlab2CSV_global = Matlab2CSV_node.clone(name="Matlab2CSV_global") - MergeCSVFiles_global = MergeCSVFiles_node.clone( - name="MergeCSVFiles_global") - MergeCSVFiles_global.inputs.extra_column_heading = extra_column_heading - - mergeNetworks = pe.Node(interface=util.Merge(2), name="mergeNetworks") - mergeCSVs = mergeNetworks.clone("mergeCSVs") - - pipeline.connect([(inputnode, ntwkMetrics, [("network_file", "in_file")])]) - pipeline.connect([(ntwkMetrics, Matlab2CSV_node, [("node_measures_matlab", - "in_file")])]) - pipeline.connect([(ntwkMetrics, Matlab2CSV_global, - [("global_measures_matlab", "in_file")])]) - - pipeline.connect([(Matlab2CSV_node, MergeCSVFiles_node, [("csv_files", - "in_files")])]) - pipeline.connect([(inputnode, MergeCSVFiles_node, - [(("extra_field", add_nodal_to_filename), - "out_file")])]) - pipeline.connect([(inputnode, MergeCSVFiles_node, [("extra_field", - "extra_field")])]) - pipeline.connect([(inputnode, MergeCSVFiles_node, - [(("network_file", pullnodeIDs), "row_headings")])]) - - pipeline.connect([(Matlab2CSV_global, MergeCSVFiles_global, - [("csv_files", "in_files")])]) - pipeline.connect([(Matlab2CSV_global, MergeCSVFiles_global, - [(("csv_files", remove_identical_paths), - "column_headings")])]) - # MergeCSVFiles_global.inputs.row_heading_title = 'metric' - # MergeCSVFiles_global.inputs.column_headings = ['average'] - - pipeline.connect([(inputnode, MergeCSVFiles_global, - [(("extra_field", add_global_to_filename), - "out_file")])]) - pipeline.connect([(inputnode, MergeCSVFiles_global, [("extra_field", - "extra_field")])]) - - pipeline.connect([(inputnode, mergeNetworks, [("network_file", "in1")])]) - pipeline.connect([(ntwkMetrics, mergeNetworks, [("gpickled_network_files", - "in2")])]) - - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - "network_files", "csv_files", "matlab_files", "node_csv", - "global_csv" - ]), - name="outputnode") - - pipeline.connect([(MergeCSVFiles_node, outputnode, [("csv_file", - "node_csv")])]) - pipeline.connect([(MergeCSVFiles_global, outputnode, [("csv_file", - "global_csv")])]) - - pipeline.connect([(MergeCSVFiles_node, mergeCSVs, [("csv_file", "in1")])]) - pipeline.connect([(MergeCSVFiles_global, mergeCSVs, [("csv_file", - "in2")])]) - pipeline.connect([(mergeNetworks, outputnode, [("out", "network_files")])]) - pipeline.connect([(mergeCSVs, outputnode, [("out", "csv_files")])]) - pipeline.connect([(ntwkMetrics, outputnode, [("matlab_matrix_files", - "matlab_files")])]) - return pipeline - - -def create_cmats_to_csv_pipeline(name="cmats_to_csv", - extra_column_heading="subject"): - """Creates a workflow to convert the outputs from CreateMatrix into a single - comma-separated value text file. An extra column / field is also added to the - text file. Typically, the user would connect the subject name to this field. - - Example - ------- - - >>> from nipype.workflows.dmri.connectivity.nx import create_cmats_to_csv_pipeline - >>> csv = create_cmats_to_csv_pipeline("cmats_to_csv", "subject_id") - >>> csv.inputs.inputnode.extra_field = 'subj1' - >>> csv.inputs.inputnode.matlab_matrix_files = ['subj1_cmatrix.mat', 'subj1_mean_fiber_length.mat', 'subj1_median_fiber_length.mat', 'subj1_fiber_length_std.mat'] - >>> csv.run() # doctest: +SKIP - - Inputs:: - - inputnode.extra_field - inputnode.matlab_matrix_files - - Outputs:: - - outputnode.csv_file - - """ - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=["extra_field", "matlab_matrix_files"]), - name="inputnode") - - pipeline = pe.Workflow(name=name) - - Matlab2CSV = pe.MapNode( - interface=misc.Matlab2CSV(), name="Matlab2CSV", iterfield=["in_file"]) - MergeCSVFiles = pe.Node( - interface=misc.MergeCSVFiles(), name="MergeCSVFiles") - MergeCSVFiles.inputs.extra_column_heading = extra_column_heading - - pipeline.connect([(inputnode, Matlab2CSV, [("matlab_matrix_files", - "in_file")])]) - pipeline.connect([(Matlab2CSV, MergeCSVFiles, [("csv_files", - "in_files")])]) - pipeline.connect([(inputnode, MergeCSVFiles, [("extra_field", - "extra_field")])]) - - outputnode = pe.Node( - interface=util.IdentityInterface(fields=["csv_file"]), - name="outputnode") - - pipeline.connect([(MergeCSVFiles, outputnode, [("csv_file", "csv_file")])]) - return pipeline diff --git a/nipype/workflows/dmri/dipy/__init__.py b/nipype/workflows/dmri/dipy/__init__.py deleted file mode 100644 index 354ba7a7e6..0000000000 --- a/nipype/workflows/dmri/dipy/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from __future__ import absolute_import -from .denoise import nlmeans_pipeline diff --git a/nipype/workflows/dmri/dipy/denoise.py b/nipype/workflows/dmri/dipy/denoise.py deleted file mode 100644 index a45f507b3c..0000000000 --- a/nipype/workflows/dmri/dipy/denoise.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from builtins import range -from ....pipeline import engine as pe -from ....interfaces import utility as niu -from ....interfaces import dipy - - -def nlmeans_pipeline(name='Denoise', - params={ - 'patch_radius': 1, - 'block_radius': 5 - }): - """ - Workflow that performs nlmeans denoising - - Example - ------- - - >>> from nipype.workflows.dmri.dipy.denoise import nlmeans_pipeline - >>> denoise = nlmeans_pipeline() - >>> denoise.inputs.inputnode.in_file = 'diffusion.nii' - >>> denoise.inputs.inputnode.in_mask = 'mask.nii' - >>> denoise.run() # doctest: +SKIP - - - """ - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_file', 'in_mask']), name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file']), name='outputnode') - - nmask = pe.Node( - niu.Function( - input_names=['in_file', 'in_mask'], - output_names=['out_file'], - function=bg_mask), - name='NoiseMsk') - nlmeans = pe.Node(dipy.Denoise(**params), name='NLMeans') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, nmask, [ - ('in_file', 'in_file'), ('in_mask', 'in_mask') - ]), (inputnode, nlmeans, [('in_file', 'in_file'), ('in_mask', 'in_mask')]), - (nmask, nlmeans, [('out_file', 'noise_mask')]), - (nlmeans, outputnode, [('out_file', 'out_file')])]) - return wf - - -def csf_mask(in_file, in_mask, out_file=None): - """ - Artesanal mask of csf in T2w-like images - """ - import nibabel as nb - import numpy as np - from scipy.ndimage import binary_erosion, binary_opening, label - import scipy.ndimage as nd - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_csfmask%s" % (fname, ext)) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - hdr = im.header.copy() - hdr.set_data_dtype(np.uint8) - hdr.set_xyzt_units('mm') - imdata = im.get_data() - msk = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() - msk = binary_erosion(msk, structure=np.ones((15, 15, 10))).astype(np.uint8) - thres = np.percentile(imdata[msk > 0].reshape(-1), 90.0) - imdata[imdata < thres] = 0 - imdata = imdata * msk - imdata[imdata > 0] = 1 - imdata = binary_opening( - imdata, structure=np.ones((2, 2, 2))).astype(np.uint8) - - label_im, nb_labels = label(imdata) - sizes = nd.sum(imdata, label_im, list(range(nb_labels + 1))) - mask_size = sizes != sizes.max() - remove_pixel = mask_size[label_im] - label_im[remove_pixel] = 0 - label_im[label_im > 0] = 1 - nb.Nifti1Image(label_im.astype(np.uint8), im.affine, - hdr).to_filename(out_file) - return out_file - - -def bg_mask(in_file, in_mask, out_file=None): - """ - Rough mask of background from brain masks - """ - import nibabel as nb - import numpy as np - from scipy.ndimage import binary_dilation - import scipy.ndimage as nd - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_bgmask%s" % (fname, ext)) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - hdr = im.header.copy() - hdr.set_data_dtype(np.uint8) - hdr.set_xyzt_units('mm') - msk = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() - msk = 1 - binary_dilation(msk, structure=np.ones((20, 20, 20))) - nb.Nifti1Image(msk.astype(np.uint8), im.affine, hdr).to_filename(out_file) - return out_file diff --git a/nipype/workflows/dmri/dtitk/__init__.py b/nipype/workflows/dmri/dtitk/__init__.py deleted file mode 100644 index 02dbf25549..0000000000 --- a/nipype/workflows/dmri/dtitk/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from __future__ import absolute_import -from .tensor_registration import (affine_tensor_pipeline, - diffeomorphic_tensor_pipeline) diff --git a/nipype/workflows/dmri/dtitk/tensor_registration.py b/nipype/workflows/dmri/dtitk/tensor_registration.py deleted file mode 100644 index faae608a44..0000000000 --- a/nipype/workflows/dmri/dtitk/tensor_registration.py +++ /dev/null @@ -1,144 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from ....pipeline import engine as pe -from ....interfaces import utility as niu -from ....interfaces import dtitk - - -def affine_tensor_pipeline(name='AffTen'): - - """ - Workflow that performs a linear registration - (Rigid followed by Affine) - - Example - ------- - - >>> from nipype.workflows.dmri.dtitk.tensor_registration import affine_tensor_pipeline - >>> affine = affine_tensor_pipeline() - >>> affine.inputs.inputnode.fixed_file = 'im1.nii' - >>> affine.inputs.inputnode.moving_file = 'im2.nii' - >>> affine.run() # doctest: +SKIP - - - """ - inputnode = pe.Node(niu.IdentityInterface( - fields=['fixed_file', 'moving_file']), - name='inputnode') - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_file_xfm']), - name='outputnode') - - rigid_node = pe.Node(dtitk.Rigid(), name='rigid_node') - affine_node = pe.Node(dtitk.Affine(), name='affine_node') - - wf = pe.Workflow(name=name) - - wf.connect(inputnode, 'fixed_file', rigid_node, 'fixed_file') - wf.connect(inputnode, 'moving_file', rigid_node, 'moving_file') - wf.connect(rigid_node, 'out_file_xfm', affine_node, 'initialize_xfm') - wf.connect(inputnode, 'fixed_file', affine_node, 'fixed_file') - wf.connect(inputnode, 'moving_file', affine_node, 'moving_file') - wf.connect(affine_node, 'out_file', outputnode, 'out_file') - wf.connect(affine_node, 'out_file_xfm', outputnode, 'out_file_xfm') - - return wf - - -def diffeomorphic_tensor_pipeline(name='DiffeoTen', - params={'array_size': (128, 128, 64)}): - """ - Workflow that performs a diffeomorphic registration - (Rigid and Affine followed by Diffeomorphic) - Note: the requirements for a diffeomorphic registration specify that - the dimension 0 is a power of 2 so images are resliced prior to - registration. Remember to move origin and reslice prior to applying xfm to - another file! - - Example - ------- - - >>> from nipype.workflows.dmri.dtitk.tensor_registration import diffeomorphic_tensor_pipeline - >>> diffeo = diffeomorphic_tensor_pipeline() - >>> diffeo.inputs.inputnode.fixed_file = 'im1.nii' - >>> diffeo.inputs.inputnode.moving_file = 'im2.nii' - >>> diffeo.run() # doctest: +SKIP - - - """ - inputnode = pe.Node(niu.IdentityInterface( - fields=['fixed_file', 'moving_file']), - name='inputnode') - outputnode = pe.Node(niu.IdentityInterface( - fields=['out_file', 'out_file_xfm', - 'fixed_resliced', 'moving_resliced']), - name='outputnode') - origin_node_fixed = pe.Node(dtitk.TVAdjustVoxSp(origin=(0, 0, 0)), - name='origin_node_fixed') - origin_node_moving = origin_node_fixed.clone(name='origin_node_moving') - reslice_node_pow2 = pe.Node(dtitk.TVResample( - origin=(0, 0, 0), - array_size=params['array_size']), - name='reslice_node_pow2') - reslice_node_moving = pe.Node(dtitk.TVResample(), - name='reslice_node_moving') - mask_node = pe.Node(dtitk.BinThresh(lower_bound=0.01, upper_bound=100, - inside_value=1, outside_value=0), - name='mask_node') - rigid_node = pe.Node(dtitk.Rigid(), name='rigid_node') - affine_node = pe.Node(dtitk.Affine(), name='affine_node') - diffeo_node = pe.Node(dtitk.Diffeo(n_iters=6, ftol=0.002), - name='diffeo_node') - compose_xfm_node = pe.Node(dtitk.ComposeXfm(), name='compose_xfm_node') - apply_xfm_node = pe.Node(dtitk.DiffeoSymTensor3DVol(), - name='apply_xfm_node') - adjust_vs_node_to_input = pe.Node(dtitk.TVAdjustVoxSp(), - name='adjust_vs_node_to_input') - reslice_node_to_input = pe.Node(dtitk.TVResample(), - name='reslice_node_to_input') - input_fa = pe.Node(dtitk.TVtool(in_flag='fa'), name='input_fa') - - wf = pe.Workflow(name=name) - - # calculate input FA image for origin reference - wf.connect(inputnode, 'fixed_file', input_fa, 'in_file') - # Reslice input images - wf.connect(inputnode, 'fixed_file', origin_node_fixed, 'in_file') - wf.connect(origin_node_fixed, 'out_file', reslice_node_pow2, 'in_file') - wf.connect(reslice_node_pow2, 'out_file', - reslice_node_moving, 'target_file') - wf.connect(inputnode, 'moving_file', origin_node_moving, 'in_file') - wf.connect(origin_node_moving, 'out_file', reslice_node_moving, 'in_file') - # Rigid registration - wf.connect(reslice_node_pow2, 'out_file', rigid_node, 'fixed_file') - wf.connect(reslice_node_moving, 'out_file', rigid_node, 'moving_file') - # Affine registration - wf.connect(rigid_node, 'out_file_xfm', affine_node, 'initialize_xfm') - wf.connect(reslice_node_pow2, 'out_file', affine_node, 'fixed_file') - wf.connect(reslice_node_moving, 'out_file', affine_node, 'moving_file') - # Diffeo registration - wf.connect(reslice_node_pow2, 'out_file', mask_node, 'in_file') - wf.connect(reslice_node_pow2, 'out_file', diffeo_node, 'fixed_file') - wf.connect(affine_node, 'out_file', diffeo_node, 'moving_file') - wf.connect(mask_node, 'out_file', diffeo_node, 'mask_file') - # Compose transform - wf.connect(diffeo_node, 'out_file_xfm', compose_xfm_node, 'in_df') - wf.connect(affine_node, 'out_file_xfm', compose_xfm_node, 'in_aff') - # Apply transform - wf.connect(reslice_node_moving, 'out_file', apply_xfm_node, 'in_file') - wf.connect(compose_xfm_node, 'out_file', apply_xfm_node, 'transform') - # Move origin and reslice to match original fixed input image - wf.connect(apply_xfm_node, 'out_file', adjust_vs_node_to_input, 'in_file') - wf.connect(input_fa, 'out_file', adjust_vs_node_to_input, 'target_file') - wf.connect(adjust_vs_node_to_input, 'out_file', reslice_node_to_input, 'in_file') - wf.connect(input_fa, 'out_file', reslice_node_to_input, 'target_file') - # Send to output - wf.connect(reslice_node_to_input, 'out_file', outputnode, 'out_file') - wf.connect(compose_xfm_node, 'out_file', outputnode, 'out_file_xfm') - wf.connect(reslice_node_pow2, 'out_file', outputnode, 'fixed_resliced') - wf.connect(reslice_node_moving, 'out_file', outputnode, 'moving_resliced') - - return wf diff --git a/nipype/workflows/dmri/fsl/__init__.py b/nipype/workflows/dmri/fsl/__init__.py deleted file mode 100644 index 66be352b84..0000000000 --- a/nipype/workflows/dmri/fsl/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .dti import create_bedpostx_pipeline, bedpostx_parallel - -from .artifacts import (all_fmb_pipeline, all_peb_pipeline, all_fsl_pipeline, - hmc_pipeline, ecc_pipeline, sdc_fmb, sdc_peb, - remove_bias) - -from .epi import (fieldmap_correction, topup_correction, - create_eddy_correct_pipeline, create_epidewarp_pipeline, - create_dmri_preprocessing) - -from .tbss import (create_tbss_1_preproc, create_tbss_2_reg, - create_tbss_3_postreg, create_tbss_4_prestats, - create_tbss_all, create_tbss_non_FA) diff --git a/nipype/workflows/dmri/fsl/artifacts.py b/nipype/workflows/dmri/fsl/artifacts.py deleted file mode 100644 index 0bdd0a5d66..0000000000 --- a/nipype/workflows/dmri/fsl/artifacts.py +++ /dev/null @@ -1,1061 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from ....interfaces.io import JSONFileGrabber -from ....interfaces import utility as niu -from ....interfaces import ants -from ....interfaces import fsl -from ....pipeline import engine as pe -from ...data import get_flirt_schedule - -from .utils import ( - b0_indices, - time_avg, - apply_all_corrections, - b0_average, - hmc_split, - dwi_flirt, - eddy_rotate_bvecs, - rotate_bvecs, - insert_mat, - extract_bval, - recompose_dwi, - recompose_xfm, - siemens2rads, - rads2radsec, - demean_image, - cleanup_edge_pipeline, - add_empty_vol, - vsm2warp, - compute_readout, -) - - -def all_fmb_pipeline(name='hmc_sdc_ecc', fugue_params=dict(smooth3d=2.0)): - """ - Builds a pipeline including three artifact corrections: head-motion - correction (HMC), susceptibility-derived distortion correction (SDC), - and Eddy currents-derived distortion correction (ECC). - - The displacement fields from each kind of distortions are combined. Thus, - only one interpolation occurs between input data and result. - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_fmb_pipeline - >>> allcorr = all_fmb_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.inputs.inputnode.bmap_mag = 'magnitude.nii' - >>> allcorr.inputs.inputnode.bmap_pha = 'phase.nii' - >>> allcorr.inputs.inputnode.epi_param = 'epi_param.txt' - >>> allcorr.run() # doctest: +SKIP - - """ - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file', 'in_bvec', 'in_bval', 'bmap_pha', 'bmap_mag', - 'epi_param' - ]), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_mask', 'out_bvec']), - name='outputnode') - - list_b0 = pe.Node( - niu.Function( - input_names=['in_bval'], - output_names=['out_idx'], - function=b0_indices), - name='B0indices') - - avg_b0_0 = pe.Node( - niu.Function( - input_names=['in_file', 'index'], - output_names=['out_file'], - function=time_avg), - name='b0_avg_pre') - avg_b0_1 = pe.Node( - niu.Function( - input_names=['in_file', 'index'], - output_names=['out_file'], - function=time_avg), - name='b0_avg_post') - - bet_dwi0 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_pre') - bet_dwi1 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_post') - - hmc = hmc_pipeline() - sdc = sdc_fmb(fugue_params=fugue_params) - ecc = ecc_pipeline() - unwarp = apply_all_corrections() - - wf = pe.Workflow(name=name) - wf.connect( - [(inputnode, hmc, - [('in_file', 'inputnode.in_file'), ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), (inputnode, list_b0, - [('in_bval', 'in_bval')]), - (inputnode, avg_b0_0, [('in_file', 'in_file')]), (list_b0, avg_b0_0, - [('out_idx', - 'index')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), (bet_dwi0, hmc, [ - ('mask_file', 'inputnode.in_mask') - ]), (hmc, sdc, [('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, - [('mask_file', 'inputnode.in_mask')]), (inputnode, sdc, [ - ('bmap_pha', 'inputnode.bmap_pha'), - ('bmap_mag', 'inputnode.bmap_mag'), ('epi_param', - 'inputnode.settings') - ]), (list_b0, sdc, [('out_idx', 'inputnode.in_ref')]), (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms') - ]), (inputnode, ecc, - [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), (bet_dwi0, ecc, [ - ('mask_file', 'inputnode.in_mask') - ]), (ecc, avg_b0_1, [('outputnode.out_file', - 'in_file')]), (list_b0, avg_b0_1, - [('out_idx', 'index')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), (inputnode, unwarp, [ - ('in_file', 'inputnode.in_dwi') - ]), (hmc, unwarp, - [('outputnode.out_xfms', 'inputnode.in_hmc')]), (ecc, unwarp, [ - ('outputnode.out_xfms', 'inputnode.in_ecc') - ]), (sdc, unwarp, [('outputnode.out_warp', - 'inputnode.in_sdc')]), (hmc, outputnode, [ - ('outputnode.out_bvec', 'out_bvec') - ]), (unwarp, outputnode, - [('outputnode.out_file', - 'out_file')]), (bet_dwi1, outputnode, - [('mask_file', - 'out_mask')])]) - return wf - - -def all_peb_pipeline(name='hmc_sdc_ecc', - epi_params=dict( - echospacing=0.77e-3, - acc_factor=3, - enc_dir='y-', - epi_factor=1), - altepi_params=dict( - echospacing=0.77e-3, - acc_factor=3, - enc_dir='y', - epi_factor=1)): - """ - Builds a pipeline including three artifact corrections: head-motion - correction (HMC), susceptibility-derived distortion correction (SDC), - and Eddy currents-derived distortion correction (ECC). - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_peb_pipeline - >>> allcorr = all_peb_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.run() # doctest: +SKIP - - """ - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_mask', 'out_bvec']), - name='outputnode') - - avg_b0_0 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg_pre') - avg_b0_1 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg_post') - bet_dwi0 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_pre') - bet_dwi1 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_post') - - hmc = hmc_pipeline() - sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) - ecc = ecc_pipeline() - - unwarp = apply_all_corrections() - - wf = pe.Workflow(name=name) - wf.connect( - [(inputnode, hmc, - [('in_file', 'inputnode.in_file'), ('in_bvec', 'inputnode.in_bvec'), - ('in_bval', 'inputnode.in_bval')]), (inputnode, avg_b0_0, - [('in_file', 'in_dwi'), - ('in_bval', 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), (bet_dwi0, hmc, [ - ('mask_file', 'inputnode.in_mask') - ]), (hmc, sdc, [('outputnode.out_file', 'inputnode.in_file')]), - (bet_dwi0, sdc, - [('mask_file', 'inputnode.in_mask')]), (inputnode, sdc, [ - ('in_bval', 'inputnode.in_bval'), ('alt_file', - 'inputnode.alt_file') - ]), (inputnode, ecc, [('in_file', 'inputnode.in_file'), - ('in_bval', 'inputnode.in_bval')]), - (bet_dwi0, ecc, [('mask_file', 'inputnode.in_mask')]), (hmc, ecc, [ - ('outputnode.out_xfms', 'inputnode.in_xfms') - ]), (ecc, avg_b0_1, [('outputnode.out_file', - 'in_dwi')]), (inputnode, avg_b0_1, - [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), (inputnode, unwarp, [ - ('in_file', 'inputnode.in_dwi') - ]), (hmc, unwarp, - [('outputnode.out_xfms', 'inputnode.in_hmc')]), (ecc, unwarp, [ - ('outputnode.out_xfms', 'inputnode.in_ecc') - ]), (sdc, unwarp, [('outputnode.out_warp', - 'inputnode.in_sdc')]), (hmc, outputnode, [ - ('outputnode.out_bvec', 'out_bvec') - ]), (unwarp, outputnode, - [('outputnode.out_file', - 'out_file')]), (bet_dwi1, outputnode, - [('mask_file', - 'out_mask')])]) - return wf - - -def all_fsl_pipeline(name='fsl_all_correct', - epi_params=dict( - echospacing=0.77e-3, acc_factor=3, enc_dir='y-'), - altepi_params=dict( - echospacing=0.77e-3, acc_factor=3, enc_dir='y')): - """ - Workflow that integrates FSL ``topup`` and ``eddy``. - - - .. warning:: this workflow rotates the gradients table (*b*-vectors) - [Leemans09]_. - - - .. warning:: this workflow does not perform jacobian modulation of each - *DWI* [Jones10]_. - - - Examples - -------- - - >>> from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline - >>> allcorr = all_fsl_pipeline() - >>> allcorr.inputs.inputnode.in_file = 'epi.nii' - >>> allcorr.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> allcorr.inputs.inputnode.in_bval = 'diffusion.bval' - >>> allcorr.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> allcorr.run() # doctest: +SKIP - - """ - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_file', 'in_bvec', 'in_bval', 'alt_file']), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_mask', 'out_bvec']), - name='outputnode') - - def gen_index(in_file): - import numpy as np - import nibabel as nb - import os - from nipype.utils import NUMPY_MMAP - out_file = os.path.abspath('index.txt') - vols = nb.load(in_file, mmap=NUMPY_MMAP).get_data().shape[-1] - np.savetxt(out_file, np.ones((vols, )).T) - return out_file - - gen_idx = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=gen_index), - name='gen_index') - avg_b0_0 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg_pre') - bet_dwi0 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_pre') - - sdc = sdc_peb(epi_params=epi_params, altepi_params=altepi_params) - ecc = pe.Node(fsl.Eddy(method='jac'), name='fsl_eddy') - rot_bvec = pe.Node( - niu.Function( - input_names=['in_bvec', 'eddy_params'], - output_names=['out_file'], - function=eddy_rotate_bvecs), - name='Rotate_Bvec') - avg_b0_1 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg_post') - bet_dwi1 = pe.Node( - fsl.BET(frac=0.3, mask=True, robust=True), name='bet_dwi_post') - - wf = pe.Workflow(name=name) - wf.connect( - [(inputnode, avg_b0_0, [('in_file', 'in_dwi'), ('in_bval', - 'in_bval')]), - (avg_b0_0, bet_dwi0, [('out_file', 'in_file')]), (bet_dwi0, sdc, [ - ('mask_file', 'inputnode.in_mask') - ]), (inputnode, sdc, [('in_file', 'inputnode.in_file'), - ('alt_file', 'inputnode.alt_file'), - ('in_bval', 'inputnode.in_bval')]), - (sdc, ecc, [('topup.out_enc_file', 'in_acqp'), - ('topup.out_fieldcoef', 'in_topup_fieldcoef'), - ('topup.out_movpar', - 'in_topup_movpar')]), (bet_dwi0, ecc, [('mask_file', - 'in_mask')]), - (inputnode, gen_idx, [('in_file', 'in_file')]), (inputnode, ecc, [ - ('in_file', 'in_file'), ('in_bval', 'in_bval'), ('in_bvec', - 'in_bvec') - ]), (gen_idx, ecc, - [('out_file', 'in_index')]), (inputnode, rot_bvec, [ - ('in_bvec', 'in_bvec') - ]), (ecc, rot_bvec, - [('out_parameter', 'eddy_params')]), (ecc, avg_b0_1, [ - ('out_corrected', 'in_dwi') - ]), (inputnode, avg_b0_1, [('in_bval', 'in_bval')]), - (avg_b0_1, bet_dwi1, [('out_file', 'in_file')]), (ecc, outputnode, [ - ('out_corrected', 'out_file') - ]), (rot_bvec, outputnode, - [('out_file', 'out_bvec')]), (bet_dwi1, outputnode, - [('mask_file', 'out_mask')])]) - return wf - - -def hmc_pipeline(name='motion_correct'): - """ - HMC stands for head-motion correction. - - Creates a pipeline that corrects for head motion artifacts in dMRI - sequences. - It takes a series of diffusion weighted images and rigidly co-registers - them to one reference image. Finally, the `b`-matrix is rotated accordingly - [Leemans09]_ making use of the rotation matrix obtained by FLIRT. - - Search angles have been limited to 4 degrees, based on results in - [Yendiki13]_. - - A list of rigid transformation matrices is provided, so that transforms - can be chained. - This is useful to correct for artifacts with only one interpolation process - (as previously discussed `here - `_), - and also to compute nuisance regressors as proposed by [Yendiki13]_. - - .. warning:: This workflow rotates the `b`-vectors, so please be advised - that not all the dicom converters ensure the consistency between the - resulting nifti orientation and the gradients table (e.g. dcm2nii - checks it). - - .. admonition:: References - - .. [Leemans09] Leemans A, and Jones DK, `The B-matrix must be rotated - when correcting for subject motion in DTI data - `_, - Magn Reson Med. 61(6):1336-49. 2009. doi: 10.1002/mrm.21890. - - .. [Yendiki13] Yendiki A et al., `Spurious group differences due to head - motion in a diffusion MRI study - `_. - Neuroimage. 21(88C):79-90. 2013. doi: 10.1016/j.neuroimage.2013.11.027 - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import hmc_pipeline - >>> hmc = hmc_pipeline() - >>> hmc.inputs.inputnode.in_file = 'diffusion.nii' - >>> hmc.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> hmc.inputs.inputnode.in_bval = 'diffusion.bval' - >>> hmc.inputs.inputnode.in_mask = 'mask.nii' - >>> hmc.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - input dwi file - inputnode.in_mask - weights mask of reference image (a file with data \ -range in [0.0, 1.0], indicating the weight of each voxel when computing the \ -metric. - inputnode.in_bval - b-values file - inputnode.in_bvec - gradients file (b-vectors) - inputnode.ref_num (optional, default=0) index of the b0 volume that \ -should be taken as reference - - Outputs:: - - outputnode.out_file - corrected dwi file - outputnode.out_bvec - rotated gradient vectors table - outputnode.out_xfms - list of transformation matrices - - """ - params = dict( - dof=6, - bgvalue=0, - save_log=True, - no_search=True, - # cost='mutualinfo', cost_func='mutualinfo', bins=64, - schedule=get_flirt_schedule('hmc')) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_file', 'ref_num', 'in_bvec', 'in_bval', 'in_mask']), - name='inputnode') - split = pe.Node( - niu.Function( - output_names=['out_ref', 'out_mov', 'out_bval', 'volid'], - input_names=['in_file', 'in_bval', 'ref_num'], - function=hmc_split), - name='SplitDWI') - flirt = dwi_flirt(flirt_param=params) - insmat = pe.Node( - niu.Function( - input_names=['inlist', 'volid'], - output_names=['out'], - function=insert_mat), - name='InsertRefmat') - rot_bvec = pe.Node( - niu.Function( - function=rotate_bvecs, - input_names=['in_bvec', 'in_matrix'], - output_names=['out_file']), - name='Rotate_Bvec') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_bvec', 'out_xfms']), - name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, split, - [('in_file', 'in_file'), ('in_bval', 'in_bval'), - ('ref_num', 'ref_num')]), (inputnode, flirt, [ - ('in_mask', 'inputnode.ref_mask') - ]), (split, flirt, [('out_ref', 'inputnode.reference'), - ('out_mov', 'inputnode.in_file'), - ('out_bval', 'inputnode.in_bval')]), - (flirt, insmat, [('outputnode.out_xfms', 'inlist')]), - (split, insmat, [('volid', 'volid')]), (inputnode, rot_bvec, [ - ('in_bvec', 'in_bvec') - ]), (insmat, rot_bvec, - [('out', 'in_matrix')]), (rot_bvec, outputnode, - [('out_file', 'out_bvec')]), - (flirt, outputnode, [('outputnode.out_file', - 'out_file')]), (insmat, outputnode, - [('out', 'out_xfms')])]) - return wf - - -def ecc_pipeline(name='eddy_correct'): - """ - ECC stands for Eddy currents correction. - - Creates a pipeline that corrects for artifacts induced by Eddy currents in - dMRI sequences. - It takes a series of diffusion weighted images and linearly co-registers - them to one reference image (the average of all b0s in the dataset). - - DWIs are also modulated by the determinant of the Jacobian as indicated by - [Jones10]_ and [Rohde04]_. - - A list of rigid transformation matrices can be provided, sourcing from a - :func:`.hmc_pipeline` workflow, to initialize registrations in a *motion - free* framework. - - A list of affine transformation matrices is available as output, so that - transforms can be chained (discussion - `here `_). - - .. admonition:: References - - .. [Jones10] Jones DK, `The signal intensity must be modulated by the - determinant of the Jacobian when correcting for eddy currents in - diffusion MRI - `_, - Proc. ISMRM 18th Annual Meeting, (2010). - - .. [Rohde04] Rohde et al., `Comprehensive Approach for Correction of - Motion and Distortion in Diffusion-Weighted MRI - `_, MRM - 51:103-114 (2004). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import ecc_pipeline - >>> ecc = ecc_pipeline() - >>> ecc.inputs.inputnode.in_file = 'diffusion.nii' - >>> ecc.inputs.inputnode.in_bval = 'diffusion.bval' - >>> ecc.inputs.inputnode.in_mask = 'mask.nii' - >>> ecc.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - input dwi file - inputnode.in_mask - weights mask of reference image (a file with data \ -range sin [0.0, 1.0], indicating the weight of each voxel when computing the \ -metric. - inputnode.in_bval - b-values table - inputnode.in_xfms - list of matrices to initialize registration (from \ -head-motion correction) - - Outputs:: - - outputnode.out_file - corrected dwi file - outputnode.out_xfms - list of transformation matrices - """ - - params = dict( - dof=12, - no_search=True, - interp='spline', - bgvalue=0, - schedule=get_flirt_schedule('ecc')) - # cost='normmi', cost_func='normmi', bins=64, - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_file', 'in_bval', 'in_mask', 'in_xfms']), - name='inputnode') - avg_b0 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg') - pick_dws = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval', 'b'], - output_names=['out_file'], - function=extract_bval), - name='ExtractDWI') - pick_dws.inputs.b = 'diff' - - flirt = dwi_flirt(flirt_param=params, excl_nodiff=True) - - mult = pe.MapNode( - fsl.BinaryMaths(operation='mul'), - name='ModulateDWIs', - iterfield=['in_file', 'operand_value']) - thres = pe.MapNode( - fsl.Threshold(thresh=0.0), - iterfield=['in_file'], - name='RemoveNegative') - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - get_mat = pe.Node( - niu.Function( - input_names=['in_bval', 'in_xfms'], - output_names=['out_files'], - function=recompose_xfm), - name='GatherMatrices') - merge = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval', 'in_corrected'], - output_names=['out_file'], - function=recompose_dwi), - name='MergeDWIs') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_xfms']), - name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, avg_b0, [('in_file', 'in_dwi'), ('in_bval', 'in_bval')]), - (inputnode, pick_dws, [('in_file', 'in_dwi'), ('in_bval', 'in_bval')]), - (inputnode, merge, - [('in_file', 'in_dwi'), ('in_bval', 'in_bval')]), (inputnode, flirt, [ - ('in_mask', 'inputnode.ref_mask'), - ('in_xfms', 'inputnode.in_xfms'), ('in_bval', 'inputnode.in_bval') - ]), (inputnode, get_mat, [('in_bval', 'in_bval')]), (avg_b0, flirt, [ - ('out_file', 'inputnode.reference') - ]), (pick_dws, flirt, [('out_file', 'inputnode.in_file')]), - (flirt, get_mat, [('outputnode.out_xfms', 'in_xfms')]), (flirt, mult, [ - (('outputnode.out_xfms', _xfm_jacobian), 'operand_value') - ]), (flirt, split, - [('outputnode.out_file', 'in_file')]), (split, mult, [ - ('out_files', 'in_file') - ]), (mult, thres, [('out_file', 'in_file')]), (thres, merge, [ - ('out_file', 'in_corrected') - ]), (get_mat, outputnode, - [('out_files', 'out_xfms')]), (merge, outputnode, - [('out_file', 'out_file')]) - ]) - return wf - - -def sdc_fmb(name='fmb_correction', - interp='Linear', - fugue_params=dict(smooth3d=2.0)): - """ - SDC stands for susceptibility distortion correction. FMB stands for - fieldmap-based. - - The fieldmap based (FMB) method implements SDC by using a mapping of the - B0 field as proposed by [Jezzard95]_. This workflow uses the implementation - of FSL (`FUGUE `_). Phase - unwrapping is performed using `PRELUDE - `_ - [Jenkinson03]_. Preparation of the fieldmap is performed reproducing the - script in FSL `fsl_prepare_fieldmap - `_. - - - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import sdc_fmb - >>> fmb = sdc_fmb() - >>> fmb.inputs.inputnode.in_file = 'diffusion.nii' - >>> fmb.inputs.inputnode.in_ref = list(range(0, 30, 6)) - >>> fmb.inputs.inputnode.in_mask = 'mask.nii' - >>> fmb.inputs.inputnode.bmap_mag = 'magnitude.nii' - >>> fmb.inputs.inputnode.bmap_pha = 'phase.nii' - >>> fmb.inputs.inputnode.settings = 'epi_param.txt' - >>> fmb.run() # doctest: +SKIP - - .. warning:: Only SIEMENS format fieldmaps are supported. - - .. admonition:: References - - .. [Jezzard95] Jezzard P, and Balaban RS, `Correction for geometric - distortion in echo planar images from B0 field variations - `_, - MRM 34(1):65-73. (1995). doi: 10.1002/mrm.1910340111. - - .. [Jenkinson03] Jenkinson M., `Fast, automated, N-dimensional - phase-unwrapping algorithm `_, - MRM 49(1):193-197, 2003, doi: 10.1002/mrm.10354. - - """ - - epi_defaults = { - 'delta_te': 2.46e-3, - 'echospacing': 0.77e-3, - 'acc_factor': 2, - 'enc_dir': u'AP' - } - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file', 'in_ref', 'in_mask', 'bmap_pha', 'bmap_mag', 'settings' - ]), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_vsm', 'out_warp']), - name='outputnode') - - r_params = pe.Node( - JSONFileGrabber(defaults=epi_defaults), name='SettingsGrabber') - eff_echo = pe.Node( - niu.Function( - function=_eff_t_echo, - input_names=['echospacing', 'acc_factor'], - output_names=['eff_echo']), - name='EffEcho') - - firstmag = pe.Node(fsl.ExtractROI(t_min=0, t_size=1), name='GetFirst') - n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') - bet = pe.Node(fsl.BET(frac=0.4, mask=True), name='BrainExtraction') - dilate = pe.Node( - fsl.maths.MathsCommand(nan2zeros=True, args='-kernel sphere 5 -dilM'), - name='MskDilate') - pha2rads = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=siemens2rads), - name='PreparePhase') - prelude = pe.Node(fsl.PRELUDE(process3d=True), name='PhaseUnwrap') - rad2rsec = pe.Node( - niu.Function( - input_names=['in_file', 'delta_te'], - output_names=['out_file'], - function=rads2radsec), - name='ToRadSec') - - baseline = pe.Node( - niu.Function( - input_names=['in_file', 'index'], - output_names=['out_file'], - function=time_avg), - name='Baseline') - - fmm2b0 = pe.Node( - ants.Registration(output_warped_image=True), name="FMm_to_B0") - fmm2b0.inputs.transforms = ['Rigid'] * 2 - fmm2b0.inputs.transform_parameters = [(1.0, )] * 2 - fmm2b0.inputs.number_of_iterations = [[50], [20]] - fmm2b0.inputs.dimension = 3 - fmm2b0.inputs.metric = ['Mattes', 'Mattes'] - fmm2b0.inputs.metric_weight = [1.0] * 2 - fmm2b0.inputs.radius_or_number_of_bins = [64, 64] - fmm2b0.inputs.sampling_strategy = ['Regular', 'Random'] - fmm2b0.inputs.sampling_percentage = [None, 0.2] - fmm2b0.inputs.convergence_threshold = [1.e-5, 1.e-8] - fmm2b0.inputs.convergence_window_size = [20, 10] - fmm2b0.inputs.smoothing_sigmas = [[6.0], [2.0]] - fmm2b0.inputs.sigma_units = ['vox'] * 2 - fmm2b0.inputs.shrink_factors = [[6], [1]] # ,[1] ] - fmm2b0.inputs.use_estimate_learning_rate_once = [True] * 2 - fmm2b0.inputs.use_histogram_matching = [True] * 2 - fmm2b0.inputs.initial_moving_transform_com = 0 - fmm2b0.inputs.collapse_output_transforms = True - fmm2b0.inputs.winsorize_upper_quantile = 0.995 - - applyxfm = pe.Node( - ants.ApplyTransforms(dimension=3, interpolation=interp), - name='FMp_to_B0') - - pre_fugue = pe.Node(fsl.FUGUE(save_fmap=True), name='PreliminaryFugue') - demean = pe.Node( - niu.Function( - input_names=['in_file', 'in_mask'], - output_names=['out_file'], - function=demean_image), - name='DemeanFmap') - - cleanup = cleanup_edge_pipeline() - - addvol = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=add_empty_vol), - name='AddEmptyVol') - - vsm = pe.Node( - fsl.FUGUE(save_shift=True, **fugue_params), name="ComputeVSM") - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - unwarp = pe.MapNode( - fsl.FUGUE(icorr=True, forward_warping=False), - iterfield=['in_file'], - name='UnwarpDWIs') - thres = pe.MapNode( - fsl.Threshold(thresh=0.0), - iterfield=['in_file'], - name='RemoveNegative') - vsm2dfm = vsm2warp() - vsm2dfm.inputs.inputnode.scaling = 1.0 - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, r_params, - [('settings', 'in_file')]), (r_params, eff_echo, [ - ('echospacing', 'echospacing'), ('acc_factor', 'acc_factor') - ]), (inputnode, pha2rads, - [('bmap_pha', 'in_file')]), (inputnode, firstmag, - [('bmap_mag', 'in_file')]), - (inputnode, baseline, - [('in_file', 'in_file'), ('in_ref', 'index')]), (firstmag, n4, [ - ('roi_file', 'input_image') - ]), (n4, bet, [('output_image', 'in_file')]), (bet, dilate, [ - ('mask_file', 'in_file') - ]), (pha2rads, prelude, [('out_file', 'phase_file')]), (n4, prelude, [ - ('output_image', 'magnitude_file') - ]), (dilate, prelude, [('out_file', 'mask_file')]), - (r_params, rad2rsec, [('delta_te', 'delta_te')]), (prelude, rad2rsec, [ - ('unwrapped_phase_file', 'in_file') - ]), (baseline, fmm2b0, [('out_file', 'fixed_image')]), (n4, fmm2b0, [ - ('output_image', 'moving_image') - ]), (inputnode, fmm2b0, - [('in_mask', 'fixed_image_mask')]), (dilate, fmm2b0, [ - ('out_file', 'moving_image_mask') - ]), (baseline, applyxfm, [('out_file', 'reference_image')]), - (rad2rsec, applyxfm, - [('out_file', 'input_image')]), (fmm2b0, applyxfm, [ - ('forward_transforms', 'transforms'), ('forward_invert_flags', - 'invert_transform_flags') - ]), (applyxfm, pre_fugue, - [('output_image', 'fmap_in_file')]), (inputnode, pre_fugue, [ - ('in_mask', 'mask_file') - ]), (pre_fugue, demean, - [('fmap_out_file', 'in_file')]), (inputnode, demean, [ - ('in_mask', 'in_mask') - ]), (demean, cleanup, [('out_file', 'inputnode.in_file')]), - (inputnode, cleanup, - [('in_mask', 'inputnode.in_mask')]), (cleanup, addvol, [ - ('outputnode.out_file', 'in_file') - ]), (inputnode, vsm, [('in_mask', 'mask_file')]), (addvol, vsm, [ - ('out_file', 'fmap_in_file') - ]), (r_params, vsm, [('delta_te', 'asym_se_time')]), (eff_echo, vsm, [ - ('eff_echo', 'dwell_time') - ]), (inputnode, split, [('in_file', 'in_file')]), (split, unwarp, [ - ('out_files', 'in_file') - ]), (vsm, unwarp, - [('shift_out_file', 'shift_in_file')]), (r_params, unwarp, [ - (('enc_dir', _fix_enc_dir), 'unwarp_direction') - ]), (unwarp, thres, [('unwarped_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), (r_params, vsm2dfm, [ - (('enc_dir', _fix_enc_dir), 'inputnode.enc_dir') - ]), (merge, vsm2dfm, - [('merged_file', 'inputnode.in_ref')]), (vsm, vsm2dfm, [ - ('shift_out_file', 'inputnode.in_vsm') - ]), (merge, outputnode, - [('merged_file', 'out_file')]), (vsm, outputnode, [ - ('shift_out_file', 'out_vsm') - ]), (vsm2dfm, outputnode, [('outputnode.out_warp', - 'out_warp')]) - ]) - return wf - - -def sdc_peb(name='peb_correction', - epi_params=dict( - echospacing=0.77e-3, acc_factor=3, enc_dir='y-', epi_factor=1), - altepi_params=dict( - echospacing=0.77e-3, acc_factor=3, enc_dir='y', epi_factor=1)): - """ - SDC stands for susceptibility distortion correction. PEB stands for - phase-encoding-based. - - The phase-encoding-based (PEB) method implements SDC by acquiring - diffusion images with two different enconding directions [Andersson2003]_. - The most typical case is acquiring with opposed phase-gradient blips - (e.g. *A>>>P* and *P>>>A*, or equivalently, *-y* and *y*) - as in [Chiou2000]_, but it is also possible to use orthogonal - configurations [Cordes2000]_ (e.g. *A>>>P* and *L>>>R*, - or equivalently *-y* and *x*). - This workflow uses the implementation of FSL - (`TOPUP `_). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import sdc_peb - >>> peb = sdc_peb() - >>> peb.inputs.inputnode.in_file = 'epi.nii' - >>> peb.inputs.inputnode.alt_file = 'epi_rev.nii' - >>> peb.inputs.inputnode.in_bval = 'diffusion.bval' - >>> peb.inputs.inputnode.in_mask = 'mask.nii' - >>> peb.run() # doctest: +SKIP - - .. admonition:: References - - .. [Andersson2003] Andersson JL et al., `How to correct susceptibility - distortions in spin-echo echo-planar images: application to diffusion - tensor imaging `_. - Neuroimage. 2003 Oct;20(2):870-88. doi: 10.1016/S1053-8119(03)00336-7 - - .. [Cordes2000] Cordes D et al., Geometric distortion correction in EPI - using two images with orthogonal phase-encoding directions, in Proc. - ISMRM (8), p.1712, Denver, US, 2000. - - .. [Chiou2000] Chiou JY, and Nalcioglu O, A simple method to correct - off-resonance related distortion in echo planar imaging, in Proc. - ISMRM (8), p.1712, Denver, US, 2000. - - """ - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_file', 'in_bval', 'in_mask', 'alt_file', 'ref_num']), - name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_vsm', 'out_warp']), - name='outputnode') - - b0_ref = pe.Node(fsl.ExtractROI(t_size=1), name='b0_ref') - b0_alt = pe.Node(fsl.ExtractROI(t_size=1), name='b0_alt') - b0_comb = pe.Node(niu.Merge(2), name='b0_list') - b0_merge = pe.Node(fsl.Merge(dimension='t'), name='b0_merged') - - topup = pe.Node(fsl.TOPUP(), name='topup') - topup.inputs.encoding_direction = [ - epi_params['enc_dir'], altepi_params['enc_dir'] - ] - - readout = compute_readout(epi_params) - topup.inputs.readout_times = [readout, compute_readout(altepi_params)] - - unwarp = pe.Node(fsl.ApplyTOPUP(in_index=[1], method='jac'), name='unwarp') - - # scaling = pe.Node(niu.Function(input_names=['in_file', 'enc_dir'], - # output_names=['factor'], function=_get_zoom), - # name='GetZoom') - # scaling.inputs.enc_dir = epi_params['enc_dir'] - vsm2dfm = vsm2warp() - vsm2dfm.inputs.inputnode.enc_dir = epi_params['enc_dir'] - vsm2dfm.inputs.inputnode.scaling = readout - - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, b0_ref, [('in_file', 'in_file'), (('ref_num', _checkrnum), - 't_min')]), - (inputnode, b0_alt, [('alt_file', 'in_file'), (('ref_num', _checkrnum), - 't_min')]), - (b0_ref, b0_comb, [('roi_file', 'in1')]), - (b0_alt, b0_comb, [('roi_file', 'in2')]), - (b0_comb, b0_merge, [('out', 'in_files')]), - (b0_merge, topup, [('merged_file', 'in_file')]), - (topup, unwarp, [('out_fieldcoef', 'in_topup_fieldcoef'), - ('out_movpar', 'in_topup_movpar'), - ('out_enc_file', 'encoding_file')]), - (inputnode, unwarp, [('in_file', 'in_files')]), - (unwarp, outputnode, [('out_corrected', 'out_file')]), - # (b0_ref, scaling, [('roi_file', 'in_file')]), - # (scaling, vsm2dfm, [('factor', 'inputnode.scaling')]), - (b0_ref, vsm2dfm, [('roi_file', 'inputnode.in_ref')]), - (topup, vsm2dfm, [('out_field', 'inputnode.in_vsm')]), - (topup, outputnode, [('out_field', 'out_vsm')]), - (vsm2dfm, outputnode, [('outputnode.out_warp', 'out_warp')]) - ]) - return wf - - -def remove_bias(name='bias_correct'): - """ - This workflow estimates a single multiplicative bias field from the - averaged *b0* image, as suggested in [Jeurissen2014]_. - - .. admonition:: References - - .. [Jeurissen2014] Jeurissen B. et al., `Multi-tissue constrained - spherical deconvolution for improved analysis of multi-shell diffusion - MRI data `_. - NeuroImage (2014). doi: 10.1016/j.neuroimage.2014.07.061 - - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.artifacts import remove_bias - >>> bias = remove_bias() - >>> bias.inputs.inputnode.in_file = 'epi.nii' - >>> bias.inputs.inputnode.in_bval = 'diffusion.bval' - >>> bias.inputs.inputnode.in_mask = 'mask.nii' - >>> bias.run() # doctest: +SKIP - - """ - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_file', 'in_bval', 'in_mask']), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file']), name='outputnode') - - avg_b0 = pe.Node( - niu.Function( - input_names=['in_dwi', 'in_bval'], - output_names=['out_file'], - function=b0_average), - name='b0_avg') - n4 = pe.Node( - ants.N4BiasFieldCorrection( - dimension=3, save_bias=True, bspline_fitting_distance=600), - name='Bias_b0') - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - mult = pe.MapNode( - fsl.MultiImageMaths(op_string='-div %s'), - iterfield=['in_file'], - name='RemoveBiasOfDWIs') - thres = pe.MapNode( - fsl.Threshold(thresh=0.0), - iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.utils.Merge(dimension='t'), name='MergeDWIs') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, avg_b0, [ - ('in_file', 'in_dwi'), ('in_bval', 'in_bval') - ]), (avg_b0, n4, [('out_file', 'input_image')]), (inputnode, n4, [ - ('in_mask', 'mask_image') - ]), (inputnode, split, [('in_file', 'in_file')]), (n4, mult, [ - ('bias_image', 'operand_files') - ]), (split, mult, [('out_files', 'in_file')]), (mult, thres, - [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')])]) - return wf - - -def _eff_t_echo(echospacing, acc_factor): - eff_echo = echospacing / (1.0 * acc_factor) - return eff_echo - - -def _fix_enc_dir(enc_dir): - enc_dir = enc_dir.lower() - if enc_dir == 'lr': - return 'x-' - if enc_dir == 'rl': - return 'x' - if enc_dir == 'ap': - return 'y-' - if enc_dir == 'pa': - return 'y' - return enc_dir - - -def _checkrnum(ref_num): - from nipype.interfaces.base import isdefined - if (ref_num is None) or not isdefined(ref_num): - return 0 - return ref_num - - -def _nonb0(in_bval): - import numpy as np - bvals = np.loadtxt(in_bval) - return np.where(bvals != 0)[0].tolist() - - -def _xfm_jacobian(in_xfm): - import numpy as np - from math import fabs - return [fabs(np.linalg.det(np.loadtxt(xfm))) for xfm in in_xfm] - - -def _get_zoom(in_file, enc_dir): - import nibabel as nb - from nipype.utils import NUMPY_MMAP - - zooms = nb.load(in_file, mmap=NUMPY_MMAP).header.get_zooms() - - if 'y' in enc_dir: - return zooms[1] - elif 'x' in enc_dir: - return zooms[0] - elif 'z' in enc_dir: - return zooms[2] - else: - raise ValueError('Wrong encoding direction string') diff --git a/nipype/workflows/dmri/fsl/dti.py b/nipype/workflows/dmri/fsl/dti.py deleted file mode 100644 index ee7e48dd5a..0000000000 --- a/nipype/workflows/dmri/fsl/dti.py +++ /dev/null @@ -1,276 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 - -from __future__ import absolute_import - -from ....pipeline import engine as pe -from ....interfaces import utility as niu -from ....interfaces import fsl -from ....algorithms import misc - -# backwards compatibility -from .epi import create_eddy_correct_pipeline - - -def transpose(samples_over_fibres): - import numpy as np - a = np.array(samples_over_fibres) - return np.squeeze(a.T).tolist() - - -def create_bedpostx_pipeline( - name='bedpostx', - params={ - 'n_fibres': 2, - 'fudge': 1, - 'burn_in': 1000, - 'n_jumps': 1250, - 'sample_every': 25, - 'model': 2, - 'cnlinear': True - }): - """ - Creates a pipeline that does the same as bedpostx script from FSL - - calculates diffusion model parameters (distributions not MLE) voxelwise for - the whole volume (by splitting it slicewise). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.dti import create_bedpostx_pipeline - >>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000, - ... n_jumps = 1250, sample_every = 25) - >>> bpwf = create_bedpostx_pipeline('nipype_bedpostx', params) - >>> bpwf.inputs.inputnode.dwi = 'diffusion.nii' - >>> bpwf.inputs.inputnode.mask = 'mask.nii' - >>> bpwf.inputs.inputnode.bvecs = 'bvecs' - >>> bpwf.inputs.inputnode.bvals = 'bvals' - >>> bpwf.run() # doctest: +SKIP - - Inputs:: - - inputnode.dwi - inputnode.mask - inputnode.bvecs - inputnode.bvals - - Outputs:: - - outputnode wraps all XFibres outputs - - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=['dwi', 'mask', 'bvecs', 'bvals']), - name='inputnode') - - slice_dwi = pe.Node(fsl.Split(dimension='z'), name='slice_dwi') - slice_msk = pe.Node(fsl.Split(dimension='z'), name='slice_msk') - mask_dwi = pe.MapNode( - fsl.ImageMaths(op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='mask_dwi') - - xfib_if = fsl.XFibres(**params) - xfibres = pe.MapNode(xfib_if, name='xfibres', iterfield=['dwi', 'mask']) - - make_dyads = pe.MapNode( - fsl.MakeDyadicVectors(), - name="make_dyads", - iterfield=['theta_vol', 'phi_vol']) - out_fields = [ - 'dyads', 'dyads_disp', 'thsamples', 'phsamples', 'fsamples', - 'mean_thsamples', 'mean_phsamples', 'mean_fsamples' - ] - - outputnode = pe.Node( - niu.IdentityInterface(fields=out_fields), name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect( - [(inputnode, slice_dwi, [('dwi', 'in_file')]), (inputnode, slice_msk, - [('mask', 'in_file')]), - (slice_dwi, mask_dwi, - [('out_files', 'in_file')]), (slice_msk, mask_dwi, [('out_files', - 'in_file2')]), - (slice_dwi, xfibres, - [('out_files', 'dwi')]), (mask_dwi, xfibres, [('out_file', 'mask')]), - (inputnode, xfibres, [('bvecs', 'bvecs'), - ('bvals', 'bvals')]), (inputnode, make_dyads, - [('mask', 'mask')])]) - - mms = {} - for k in ['thsamples', 'phsamples', 'fsamples']: - mms[k] = merge_and_mean(k) - wf.connect([(xfibres, mms[k], [(k, 'inputnode.in_files')]), - (mms[k], outputnode, [('outputnode.merged', k), - ('outputnode.mean', - 'mean_%s' % k)])]) - - # m_mdsamples = pe.Node(fsl.Merge(dimension="z"), - # name="merge_mean_dsamples") - wf.connect([ - (mms['thsamples'], make_dyads, [('outputnode.merged', 'theta_vol')]), - (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]), - # (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), - (make_dyads, outputnode, [('dyads', 'dyads'), ('dispersion', - 'dyads_disp')]) - ]) - return wf - - -def merge_and_mean(name='mm'): - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_files']), name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['merged', 'mean']), name='outputnode') - merge = pe.MapNode( - fsl.Merge(dimension='z'), name='Merge', iterfield=['in_files']) - mean = pe.MapNode( - fsl.ImageMaths(op_string='-Tmean'), name='Mean', iterfield=['in_file']) - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, merge, [(('in_files', transpose), 'in_files')]), - (merge, mean, [('merged_file', 'in_file')]), - (merge, outputnode, - [('merged_file', 'merged')]), (mean, outputnode, [('out_file', - 'mean')])]) - return wf - - -def bedpostx_parallel( - name='bedpostx_parallel', - compute_all_outputs=True, - params={ - 'n_fibres': 2, - 'fudge': 1, - 'burn_in': 1000, - 'n_jumps': 1250, - 'sample_every': 25, - 'model': 1, - 'cnlinear': True - }): - """ - Does the same as :func:`.create_bedpostx_pipeline` by splitting - the input dMRI in small ROIs that are better suited for parallel - processing). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl.dti import bedpostx_parallel - >>> params = dict(n_fibres = 2, fudge = 1, burn_in = 1000, - ... n_jumps = 1250, sample_every = 25) - >>> bpwf = bedpostx_parallel('nipype_bedpostx_parallel', params=params) - >>> bpwf.inputs.inputnode.dwi = 'diffusion.nii' - >>> bpwf.inputs.inputnode.mask = 'mask.nii' - >>> bpwf.inputs.inputnode.bvecs = 'bvecs' - >>> bpwf.inputs.inputnode.bvals = 'bvals' - >>> bpwf.run(plugin='CondorDAGMan') # doctest: +SKIP - - Inputs:: - - inputnode.dwi - inputnode.mask - inputnode.bvecs - inputnode.bvals - - Outputs:: - - outputnode wraps all XFibres outputs - - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=['dwi', 'mask', 'bvecs', 'bvals']), - name='inputnode') - slice_dwi = pe.Node(misc.SplitROIs(roi_size=(5, 5, 1)), name='slice_dwi') - if params is not None: - xfib_if = fsl.XFibres5(**params) - else: - xfib_if = fsl.XFibres5() - xfibres = pe.MapNode(xfib_if, name='xfibres', iterfield=['dwi', 'mask']) - - mrg_dyads = pe.MapNode( - misc.MergeROIs(), name='Merge_dyads', iterfield=['in_files']) - mrg_fsamp = pe.MapNode( - misc.MergeROIs(), name='Merge_mean_fsamples', iterfield=['in_files']) - out_fields = ['dyads', 'fsamples'] - - if compute_all_outputs: - out_fields += [ - 'dyads_disp', 'thsamples', 'phsamples', 'mean_fsamples', - 'mean_thsamples', 'mean_phsamples', 'merged_fsamples', - 'merged_thsamples', 'merged_phsamples' - ] - - outputnode = pe.Node( - niu.IdentityInterface(fields=out_fields), name='outputnode') - - wf = pe.Workflow(name=name) - wf.connect( - [(inputnode, slice_dwi, [('dwi', 'in_file'), ('mask', 'in_mask')]), - (slice_dwi, xfibres, [('out_files', 'dwi'), ('out_masks', 'mask')]), - (inputnode, xfibres, - [('bvecs', 'bvecs'), ('bvals', 'bvals')]), (inputnode, mrg_dyads, [ - ('mask', 'in_reference') - ]), (xfibres, mrg_dyads, - [(('dyads', transpose), 'in_files')]), (slice_dwi, mrg_dyads, [ - ('out_index', 'in_index') - ]), (inputnode, mrg_fsamp, - [('mask', 'in_reference')]), (xfibres, mrg_fsamp, [ - (('mean_fsamples', transpose), 'in_files') - ]), (slice_dwi, mrg_fsamp, [('out_index', 'in_index')]), - (mrg_dyads, outputnode, - [('merged_file', 'dyads')]), (mrg_fsamp, outputnode, - [('merged_file', 'fsamples')])]) - - if compute_all_outputs: - make_dyads = pe.MapNode( - fsl.MakeDyadicVectors(), - name="Make_dyads", - iterfield=['theta_vol', 'phi_vol']) - - wf.connect([(inputnode, make_dyads, [('mask', 'mask')])]) - mms = {} - for k in ['thsamples', 'phsamples', 'fsamples']: - mms[k] = merge_and_mean_parallel(k) - wf.connect( - [(slice_dwi, mms[k], [('out_index', 'inputnode.in_index')]), - (inputnode, mms[k], [('mask', 'inputnode.in_reference')]), - (xfibres, mms[k], [(k, 'inputnode.in_files')]), - (mms[k], outputnode, [('outputnode.merged', 'merged_%s' % k), - ('outputnode.mean', 'mean_%s' % k)])]) - - # m_mdsamples = pe.Node(fsl.Merge(dimension="z"), - # name="merge_mean_dsamples") - wf.connect([ - (mms['thsamples'], make_dyads, [('outputnode.merged', - 'theta_vol')]), - (mms['phsamples'], make_dyads, [('outputnode.merged', 'phi_vol')]), - # (xfibres, m_mdsamples, [('mean_dsamples', 'in_files')]), - (make_dyads, outputnode, [('dispersion', 'dyads_disp')]) - ]) - - return wf - - -def merge_and_mean_parallel(name='mm'): - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_files', 'in_reference', 'in_index']), - name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['merged', 'mean']), name='outputnode') - merge = pe.MapNode(misc.MergeROIs(), name='Merge', iterfield=['in_files']) - mean = pe.MapNode( - fsl.ImageMaths(op_string='-Tmean'), name='Mean', iterfield=['in_file']) - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, merge, - [(('in_files', transpose), 'in_files'), - ('in_reference', 'in_reference'), ('in_index', 'in_index')]), - (merge, mean, [('merged_file', 'in_file')]), - (merge, outputnode, - [('merged_file', 'merged')]), (mean, outputnode, [('out_file', - 'mean')])]) - return wf diff --git a/nipype/workflows/dmri/fsl/epi.py b/nipype/workflows/dmri/fsl/epi.py deleted file mode 100644 index 3bd88a99b7..0000000000 --- a/nipype/workflows/dmri/fsl/epi.py +++ /dev/null @@ -1,885 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open, str - -import warnings - -from ....pipeline import engine as pe -from ....interfaces import utility as niu -from ....interfaces import fsl as fsl - - -def create_dmri_preprocessing(name='dMRI_preprocessing', - use_fieldmap=True, - fieldmap_registration=False): - """ - Creates a workflow that chains the necessary pipelines to - correct for motion, eddy currents, and, if selected, susceptibility - artifacts in EPI dMRI sequences. - - .. deprecated:: 0.9.3 - Use :func:`nipype.workflows.dmri.preprocess.epi.all_fmb_pipeline` or - :func:`nipype.workflows.dmri.preprocess.epi.all_peb_pipeline` instead. - - - .. warning:: This workflow rotates the b-vectors, so please be - advised that not all the dicom converters ensure the consistency between the resulting - nifti orientation and the b matrix table (e.g. dcm2nii checks it). - - - Example - ------- - - >>> nipype_dmri_preprocess = create_dmri_preprocessing('nipype_dmri_prep') - >>> nipype_dmri_preprocess.inputs.inputnode.in_file = 'diffusion.nii' - >>> nipype_dmri_preprocess.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> nipype_dmri_preprocess.inputs.inputnode.ref_num = 0 - >>> nipype_dmri_preprocess.inputs.inputnode.fieldmap_mag = 'magnitude.nii' - >>> nipype_dmri_preprocess.inputs.inputnode.fieldmap_pha = 'phase.nii' - >>> nipype_dmri_preprocess.inputs.inputnode.te_diff = 2.46 - >>> nipype_dmri_preprocess.inputs.inputnode.epi_echospacing = 0.77 - >>> nipype_dmri_preprocess.inputs.inputnode.epi_rev_encoding = False - >>> nipype_dmri_preprocess.inputs.inputnode.pi_accel_factor = True - >>> nipype_dmri_preprocess.run() # doctest: +SKIP - - - Inputs:: - - inputnode.in_file - The diffusion data - inputnode.in_bvec - The b-matrix file, in FSL format and consistent with the in_file orientation - inputnode.ref_num - The reference volume (a b=0 volume in dMRI) - inputnode.fieldmap_mag - The magnitude of the fieldmap - inputnode.fieldmap_pha - The phase difference of the fieldmap - inputnode.te_diff - TE increment used (in msec.) on the fieldmap acquisition (generally 2.46ms for 3T scanners) - inputnode.epi_echospacing - The EPI EchoSpacing parameter (in msec.) - inputnode.epi_rev_encoding - True if reverse encoding was used (generally False) - inputnode.pi_accel_factor - Parallel imaging factor (aka GRAPPA acceleration factor) - inputnode.vsm_sigma - Sigma (in mm.) of the gaussian kernel used for in-slice smoothing of the deformation field (voxel shift map, vsm) - - - Outputs:: - - outputnode.dmri_corrected - outputnode.bvec_rotated - - - Optional arguments:: - - use_fieldmap - True if there are fieldmap files that should be used (default True) - fieldmap_registration - True if registration to fieldmap should be performed (default False) - - - """ - - warnings.warn( - ('This workflow is deprecated from v.1.0.0, use of available ' - 'nipype.workflows.dmri.preprocess.epi.all_*'), DeprecationWarning) - - pipeline = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file', 'in_bvec', 'ref_num', 'fieldmap_mag', 'fieldmap_pha', - 'te_diff', 'epi_echospacing', 'epi_rev_encoding', - 'pi_accel_factor', 'vsm_sigma' - ]), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['dmri_corrected', 'bvec_rotated']), - name='outputnode') - - motion = create_motion_correct_pipeline() - eddy = create_eddy_correct_pipeline() - - if use_fieldmap: # we have a fieldmap, so lets use it (yay!) - susceptibility = create_epidewarp_pipeline( - fieldmap_registration=fieldmap_registration) - - pipeline.connect( - [(inputnode, motion, [('in_file', 'inputnode.in_file'), - ('in_bvec', 'inputnode.in_bvec'), - ('ref_num', 'inputnode.ref_num')]), - (inputnode, eddy, - [('ref_num', 'inputnode.ref_num')]), (motion, eddy, [ - ('outputnode.motion_corrected', 'inputnode.in_file') - ]), (eddy, susceptibility, - [('outputnode.eddy_corrected', 'inputnode.in_file')]), - (inputnode, susceptibility, - [('ref_num', 'inputnode.ref_num'), ('fieldmap_mag', - 'inputnode.fieldmap_mag'), - ('fieldmap_pha', 'inputnode.fieldmap_pha'), - ('te_diff', 'inputnode.te_diff'), ('epi_echospacing', - 'inputnode.epi_echospacing'), - ('epi_rev_encoding', - 'inputnode.epi_rev_encoding'), ('pi_accel_factor', - 'inputnode.pi_accel_factor'), - ('vsm_sigma', 'inputnode.vsm_sigma')]), (motion, outputnode, [ - ('outputnode.out_bvec', 'bvec_rotated') - ]), (susceptibility, outputnode, [('outputnode.epi_corrected', - 'dmri_corrected')])]) - else: # we don't have a fieldmap, so we just carry on without it :( - pipeline.connect([(inputnode, motion, [ - ('in_file', 'inputnode.in_file'), ('in_bvec', 'inputnode.in_bvec'), - ('ref_num', 'inputnode.ref_num') - ]), (inputnode, eddy, [('ref_num', 'inputnode.ref_num')]), - (motion, eddy, [('outputnode.motion_corrected', - 'inputnode.in_file')]), - (motion, outputnode, - [('outputnode.out_bvec', - 'bvec_rotated')]), (eddy, outputnode, - [('outputnode.eddy_corrected', - 'dmri_corrected')])]) - - return pipeline - - -def create_motion_correct_pipeline(name='motion_correct'): - """Creates a pipeline that corrects for motion artifact in dMRI sequences. - It takes a series of diffusion weighted images and rigidly co-registers - them to one reference image. Finally, the b-matrix is rotated accordingly - (Leemans et al. 2009 - http://www.ncbi.nlm.nih.gov/pubmed/19319973), - making use of the rotation matrix obtained by FLIRT. - - - .. deprecated:: 0.9.3 - Use :func:`nipype.workflows.dmri.preprocess.epi.hmc_pipeline` instead. - - - .. warning:: This workflow rotates the b-vectors, so please be adviced - that not all the dicom converters ensure the consistency between the resulting - nifti orientation and the b matrix table (e.g. dcm2nii checks it). - - - Example - ------- - - >>> nipype_motioncorrect = create_motion_correct_pipeline('nipype_motioncorrect') - >>> nipype_motioncorrect.inputs.inputnode.in_file = 'diffusion.nii' - >>> nipype_motioncorrect.inputs.inputnode.in_bvec = 'diffusion.bvec' - >>> nipype_motioncorrect.inputs.inputnode.ref_num = 0 - >>> nipype_motioncorrect.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - inputnode.ref_num - inputnode.in_bvec - - Outputs:: - - outputnode.motion_corrected - outputnode.out_bvec - - """ - - warnings.warn( - ('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.hmc_pipeline instead'), - DeprecationWarning) - - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_file', 'ref_num', 'in_bvec']), - name='inputnode') - - pipeline = pe.Workflow(name=name) - - split = pe.Node(fsl.Split(dimension='t'), name='split') - pick_ref = pe.Node(niu.Select(), name='pick_ref') - coregistration = pe.MapNode( - fsl.FLIRT(no_search=True, interp='spline', padding_size=1, dof=6), - name='coregistration', - iterfield=['in_file']) - rotate_bvecs = pe.Node( - niu.Function( - input_names=['in_bvec', 'in_matrix'], - output_names=['out_file'], - function=_rotate_bvecs), - name='rotate_b_matrix') - merge = pe.Node(fsl.Merge(dimension='t'), name='merge') - outputnode = pe.Node( - niu.IdentityInterface(fields=['motion_corrected', 'out_bvec']), - name='outputnode') - - pipeline.connect( - [(inputnode, split, [('in_file', 'in_file')]), - (split, pick_ref, [('out_files', 'inlist')]), (inputnode, pick_ref, [ - ('ref_num', 'index') - ]), (split, coregistration, - [('out_files', 'in_file')]), (inputnode, rotate_bvecs, - [('in_bvec', 'in_bvec')]), - (coregistration, rotate_bvecs, - [('out_matrix_file', 'in_matrix')]), (pick_ref, coregistration, - [('out', 'reference')]), - (coregistration, merge, - [('out_file', 'in_files')]), (merge, outputnode, [ - ('merged_file', 'motion_corrected') - ]), (rotate_bvecs, outputnode, [('out_file', 'out_bvec')])]) - - return pipeline - - -def create_eddy_correct_pipeline(name='eddy_correct'): - """ - - .. deprecated:: 0.9.3 - Use :func:`nipype.workflows.dmri.preprocess.epi.ecc_pipeline` instead. - - - Creates a pipeline that replaces eddy_correct script in FSL. It takes a - series of diffusion weighted images and linearly co-registers them to one - reference image. No rotation of the B-matrix is performed, so this pipeline - should be executed after the motion correction pipeline. - - Example - ------- - - >>> nipype_eddycorrect = create_eddy_correct_pipeline('nipype_eddycorrect') - >>> nipype_eddycorrect.inputs.inputnode.in_file = 'diffusion.nii' - >>> nipype_eddycorrect.inputs.inputnode.ref_num = 0 - >>> nipype_eddycorrect.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - inputnode.ref_num - - Outputs:: - - outputnode.eddy_corrected - """ - - warnings.warn( - ('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.ecc_pipeline instead'), - DeprecationWarning) - - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_file', 'ref_num']), name='inputnode') - - pipeline = pe.Workflow(name=name) - - split = pe.Node(fsl.Split(dimension='t'), name='split') - pick_ref = pe.Node(niu.Select(), name='pick_ref') - coregistration = pe.MapNode( - fsl.FLIRT(no_search=True, padding_size=1, interp='trilinear'), - name='coregistration', - iterfield=['in_file']) - merge = pe.Node(fsl.Merge(dimension='t'), name='merge') - outputnode = pe.Node( - niu.IdentityInterface(fields=['eddy_corrected']), name='outputnode') - - pipeline.connect([(inputnode, split, [('in_file', 'in_file')]), - (split, pick_ref, - [('out_files', 'inlist')]), (inputnode, pick_ref, - [('ref_num', 'index')]), - (split, coregistration, - [('out_files', 'in_file')]), (pick_ref, coregistration, - [('out', 'reference')]), - (coregistration, merge, - [('out_file', 'in_files')]), (merge, outputnode, - [('merged_file', - 'eddy_corrected')])]) - return pipeline - - -def fieldmap_correction(name='fieldmap_correction', nocheck=False): - """ - - .. deprecated:: 0.9.3 - Use :func:`nipype.workflows.dmri.preprocess.epi.sdc_fmb` instead. - - - Fieldmap-based retrospective correction of EPI images for the susceptibility distortion - artifact (Jezzard et al., 1995). Fieldmap images are assumed to be already registered - to EPI data, and a brain mask is required. - - Replaces the former workflow, still available as create_epidewarp_pipeline(). The difference - with respect the epidewarp pipeline is that now the workflow uses the new fsl_prepare_fieldmap - available as of FSL 5.0. - - - Example - ------- - - >>> nipype_epicorrect = fieldmap_correction('nipype_epidewarp') - >>> nipype_epicorrect.inputs.inputnode.in_file = 'diffusion.nii' - >>> nipype_epicorrect.inputs.inputnode.in_mask = 'brainmask.nii' - >>> nipype_epicorrect.inputs.inputnode.fieldmap_pha = 'phase.nii' - >>> nipype_epicorrect.inputs.inputnode.fieldmap_mag = 'magnitude.nii' - >>> nipype_epicorrect.inputs.inputnode.te_diff = 2.46 - >>> nipype_epicorrect.inputs.inputnode.epi_echospacing = 0.77 - >>> nipype_epicorrect.inputs.inputnode.encoding_direction = 'y' - >>> nipype_epicorrect.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - The volume acquired with EPI sequence - inputnode.in_mask - A brain mask - inputnode.fieldmap_pha - The phase difference map from the fieldmapping, registered to in_file - inputnode.fieldmap_mag - The magnitud maps (usually 4D, one magnitude per GRE scan) - from the fieldmapping, registered to in_file - inputnode.te_diff - Time difference in msec. between TE in ms of the fieldmapping (usually a GRE sequence). - inputnode.epi_echospacing - The effective echo spacing (aka dwell time) in msec. of the EPI sequence. If - EPI was acquired with parallel imaging, then the effective echo spacing is - eff_es = es / acc_factor. - inputnode.encoding_direction - The phase encoding direction in EPI acquisition (default y) - inputnode.vsm_sigma - Sigma value of the gaussian smoothing filter applied to the vsm (voxel shift map) - - - Outputs:: - - outputnode.epi_corrected - outputnode.out_vsm - - """ - - warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.sdc_fmb instead'), - DeprecationWarning) - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file', 'in_mask', 'fieldmap_pha', 'fieldmap_mag', 'te_diff', - 'epi_echospacing', 'vsm_sigma', 'encoding_direction' - ]), - name='inputnode') - - pipeline = pe.Workflow(name=name) - - # Keep first frame from magnitude - select_mag = pe.Node( - fsl.utils.ExtractROI(t_size=1, t_min=0), name='select_magnitude') - - # Mask magnitude (it is required by PreparedFieldMap) - mask_mag = pe.Node(fsl.maths.ApplyMask(), name='mask_magnitude') - - # Run fsl_prepare_fieldmap - fslprep = pe.Node(fsl.PrepareFieldmap(), name='prepare_fieldmap') - - if nocheck: - fslprep.inputs.nocheck = True - - # Use FUGUE to generate the voxel shift map (vsm) - vsm = pe.Node(fsl.FUGUE(save_shift=True), name='generate_vsm') - - # VSM demean is not anymore present in the epi_reg script - # vsm_mean = pe.Node(niu.Function(input_names=['in_file', 'mask_file', 'in_unwarped'], output_names=[ - # 'out_file'], function=_vsm_remove_mean), name='vsm_mean_shift') - - # fugue_epi - dwi_split = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_files'], - function=_split_dwi), - name='dwi_split') - - # 'fugue -i %s -u %s --loadshift=%s --mask=%s' % ( vol_name, out_vol_name, vsm_name, mask_name ) - dwi_applyxfm = pe.MapNode( - fsl.FUGUE(icorr=True, save_shift=False), - iterfield=['in_file'], - name='dwi_fugue') - # Merge back all volumes - dwi_merge = pe.Node(fsl.utils.Merge(dimension='t'), name='dwi_merge') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['epi_corrected', 'out_vsm']), - name='outputnode') - - pipeline.connect( - [(inputnode, select_mag, - [('fieldmap_mag', 'in_file')]), (inputnode, fslprep, [ - ('fieldmap_pha', 'in_phase'), ('te_diff', 'delta_TE') - ]), (inputnode, mask_mag, - [('in_mask', 'mask_file')]), (select_mag, mask_mag, - [('roi_file', 'in_file')]), - (mask_mag, fslprep, [('out_file', 'in_magnitude')]), (fslprep, vsm, [ - ('out_fieldmap', 'phasemap_in_file') - ]), (inputnode, - vsm, [('fieldmap_mag', - 'in_file'), ('encoding_direction', 'unwarp_direction'), - (('te_diff', _ms2sec), 'asym_se_time'), - ('vsm_sigma', 'smooth2d'), (('epi_echospacing', _ms2sec), - 'dwell_time')]), - (mask_mag, vsm, [('out_file', 'mask_file')]), (inputnode, dwi_split, [ - ('in_file', 'in_file') - ]), (dwi_split, dwi_applyxfm, - [('out_files', 'in_file')]), (mask_mag, dwi_applyxfm, - [('out_file', 'mask_file')]), - (vsm, dwi_applyxfm, - [('shift_out_file', 'shift_in_file')]), (inputnode, dwi_applyxfm, [ - ('encoding_direction', 'unwarp_direction') - ]), (dwi_applyxfm, dwi_merge, - [('unwarped_file', 'in_files')]), (dwi_merge, outputnode, [ - ('merged_file', 'epi_corrected') - ]), (vsm, outputnode, [('shift_out_file', 'out_vsm')])]) - - return pipeline - - -def topup_correction(name='topup_correction'): - """ - - .. deprecated:: 0.9.3 - Use :func:`nipype.workflows.dmri.preprocess.epi.sdc_peb` instead. - - - Corrects for susceptibilty distortion of EPI images when one reverse encoding dataset has - been acquired - - - Example - ------- - - >>> nipype_epicorrect = topup_correction('nipype_topup') - >>> nipype_epicorrect.inputs.inputnode.in_file_dir = 'epi.nii' - >>> nipype_epicorrect.inputs.inputnode.in_file_rev = 'epi_rev.nii' - >>> nipype_epicorrect.inputs.inputnode.encoding_direction = ['y', 'y-'] - >>> nipype_epicorrect.inputs.inputnode.ref_num = 0 - >>> nipype_epicorrect.run() # doctest: +SKIP - - - Inputs:: - - inputnode.in_file_dir - EPI volume acquired in 'forward' phase encoding - inputnode.in_file_rev - EPI volume acquired in 'reversed' phase encoding - inputnode.encoding_direction - Direction encoding of in_file_dir - inputnode.ref_num - Identifier of the reference volumes (usually B0 volume) - - - Outputs:: - - outputnode.epi_corrected - - - """ - - warnings.warn(('This workflow is deprecated from v.1.0.0, use ' - 'nipype.workflows.dmri.preprocess.epi.sdc_peb instead'), - DeprecationWarning) - - pipeline = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file_dir', 'in_file_rev', 'encoding_direction', - 'readout_times', 'ref_num' - ]), - name='inputnode') - - outputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'out_fieldcoef', 'out_movpar', 'out_enc_file', 'epi_corrected' - ]), - name='outputnode') - - b0_dir = pe.Node(fsl.ExtractROI(t_size=1), name='b0_1') - b0_rev = pe.Node(fsl.ExtractROI(t_size=1), name='b0_2') - combin = pe.Node(niu.Merge(2), name='merge') - combin2 = pe.Node(niu.Merge(2), name='merge2') - merged = pe.Node(fsl.Merge(dimension='t'), name='b0_comb') - - topup = pe.Node(fsl.TOPUP(), name='topup') - applytopup = pe.Node(fsl.ApplyTOPUP(in_index=[1, 2]), name='applytopup') - - pipeline.connect( - [(inputnode, b0_dir, [('in_file_dir', 'in_file'), ('ref_num', - 't_min')]), - (inputnode, b0_rev, - [('in_file_rev', - 'in_file'), ('ref_num', 't_min')]), (inputnode, combin2, [ - ('in_file_dir', 'in1'), ('in_file_rev', 'in2') - ]), (b0_dir, combin, [('roi_file', 'in1')]), (b0_rev, combin, [ - ('roi_file', 'in2') - ]), (combin, merged, [('out', 'in_files')]), - (merged, topup, [('merged_file', 'in_file')]), (inputnode, topup, [ - ('encoding_direction', 'encoding_direction'), ('readout_times', - 'readout_times') - ]), (topup, applytopup, [('out_fieldcoef', 'in_topup_fieldcoef'), - ('out_movpar', 'in_topup_movpar'), - ('out_enc_file', 'encoding_file')]), - (combin2, applytopup, [('out', 'in_files')]), (topup, outputnode, [ - ('out_fieldcoef', 'out_fieldcoef'), ('out_movpar', 'out_movpar'), - ('out_enc_file', 'out_enc_file') - ]), (applytopup, outputnode, [('out_corrected', 'epi_corrected')])]) - - return pipeline - - -def create_epidewarp_pipeline(name='epidewarp', fieldmap_registration=False): - """ - Replaces the epidewarp.fsl script (http://www.nmr.mgh.harvard.edu/~greve/fbirn/b0/epidewarp.fsl) - for susceptibility distortion correction of dMRI & fMRI acquired with EPI sequences and the fieldmap - information (Jezzard et al., 1995) using FSL's FUGUE. The registration to the (warped) fieldmap - (strictly following the original script) is available using fieldmap_registration=True. - - - .. warning:: This workflow makes use of ``epidewarp.fsl`` a script of FSL deprecated long - time ago. The use of this workflow is not recommended, use - :func:`nipype.workflows.dmri.preprocess.epi.sdc_fmb` instead. - - - Example - ------- - - >>> nipype_epicorrect = create_epidewarp_pipeline('nipype_epidewarp', fieldmap_registration=False) - >>> nipype_epicorrect.inputs.inputnode.in_file = 'diffusion.nii' - >>> nipype_epicorrect.inputs.inputnode.fieldmap_mag = 'magnitude.nii' - >>> nipype_epicorrect.inputs.inputnode.fieldmap_pha = 'phase.nii' - >>> nipype_epicorrect.inputs.inputnode.te_diff = 2.46 - >>> nipype_epicorrect.inputs.inputnode.epi_echospacing = 0.77 - >>> nipype_epicorrect.inputs.inputnode.epi_rev_encoding = False - >>> nipype_epicorrect.inputs.inputnode.ref_num = 0 - >>> nipype_epicorrect.inputs.inputnode.pi_accel_factor = 1.0 - >>> nipype_epicorrect.run() # doctest: +SKIP - - Inputs:: - - inputnode.in_file - The volume acquired with EPI sequence - inputnode.fieldmap_mag - The magnitude of the fieldmap - inputnode.fieldmap_pha - The phase difference of the fieldmap - inputnode.te_diff - Time difference between TE in ms. - inputnode.epi_echospacing - The echo spacing (aka dwell time) in the EPI sequence - inputnode.epi_ph_encoding_dir - The phase encoding direction in EPI acquisition (default y) - inputnode.epi_rev_encoding - True if it is acquired with reverse encoding - inputnode.pi_accel_factor - Acceleration factor used for EPI parallel imaging (GRAPPA) - inputnode.vsm_sigma - Sigma value of the gaussian smoothing filter applied to the vsm (voxel shift map) - inputnode.ref_num - The reference volume (B=0 in dMRI or a central frame in fMRI) - - - Outputs:: - - outputnode.epi_corrected - - - Optional arguments:: - - fieldmap_registration - True if registration to fieldmap should be done (default False) - - """ - - warnings.warn(('This workflow reproduces a deprecated FSL script.'), - DeprecationWarning) - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'in_file', 'fieldmap_mag', 'fieldmap_pha', 'te_diff', - 'epi_echospacing', 'epi_ph_encoding_dir', 'epi_rev_encoding', - 'pi_accel_factor', 'vsm_sigma', 'ref_num', 'unwarp_direction' - ]), - name='inputnode') - - pipeline = pe.Workflow(name=name) - - # Keep first frame from magnitude - select_mag = pe.Node( - fsl.utils.ExtractROI(t_size=1, t_min=0), name='select_magnitude') - - # mask_brain - mask_mag = pe.Node(fsl.BET(mask=True), name='mask_magnitude') - mask_mag_dil = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=_dilate_mask), - name='mask_dilate') - - # Compute dwell time - dwell_time = pe.Node( - niu.Function( - input_names=['dwell_time', 'pi_factor', 'is_reverse_encoding'], - output_names=['dwell_time'], - function=_compute_dwelltime), - name='dwell_time') - - # Normalize phase diff to be [-pi, pi) - norm_pha = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=_prepare_phasediff), - name='normalize_phasediff') - # Execute FSL PRELUDE: prelude -p %s -a %s -o %s -f -v -m %s - prelude = pe.Node(fsl.PRELUDE(process3d=True), name='phase_unwrap') - fill_phase = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_file'], - function=_fill_phase), - name='fill_phasediff') - - # to assure that vsm is same dimension as mag. The input only affects the output dimension. - # The content of the input has no effect on the vsm. The de-warped mag volume is - # meaningless and will be thrown away - # fugue -i %s -u %s -p %s --dwell=%s --asym=%s --mask=%s --saveshift=%s % - # ( mag_name, magdw_name, ph_name, esp, tediff, mask_name, vsmmag_name) - vsm = pe.Node(fsl.FUGUE(save_shift=True), name='generate_vsm') - vsm_mean = pe.Node( - niu.Function( - input_names=['in_file', 'mask_file', 'in_unwarped'], - output_names=['out_file'], - function=_vsm_remove_mean), - name='vsm_mean_shift') - - # fugue_epi - dwi_split = pe.Node( - niu.Function( - input_names=['in_file'], - output_names=['out_files'], - function=_split_dwi), - name='dwi_split') - # 'fugue -i %s -u %s --loadshift=%s --mask=%s' % ( vol_name, out_vol_name, vsm_name, mask_name ) - dwi_applyxfm = pe.MapNode( - fsl.FUGUE(icorr=True, save_shift=False), - iterfield=['in_file'], - name='dwi_fugue') - # Merge back all volumes - dwi_merge = pe.Node(fsl.utils.Merge(dimension='t'), name='dwi_merge') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['epi_corrected']), name='outputnode') - - pipeline.connect( - [(inputnode, dwell_time, - [('epi_echospacing', 'dwell_time'), ('pi_accel_factor', 'pi_factor'), - ('epi_rev_encoding', - 'is_reverse_encoding')]), (inputnode, select_mag, [('fieldmap_mag', - 'in_file')]), - (inputnode, norm_pha, [('fieldmap_pha', - 'in_file')]), (select_mag, mask_mag, - [('roi_file', 'in_file')]), - (mask_mag, mask_mag_dil, - [('mask_file', 'in_file')]), (select_mag, prelude, [ - ('roi_file', 'magnitude_file') - ]), (norm_pha, prelude, - [('out_file', 'phase_file')]), (mask_mag_dil, prelude, [ - ('out_file', 'mask_file') - ]), (prelude, fill_phase, - [('unwrapped_phase_file', 'in_file')]), (inputnode, vsm, [ - ('fieldmap_mag', 'in_file') - ]), (fill_phase, vsm, [('out_file', 'phasemap_in_file')]), - (inputnode, vsm, [(('te_diff', _ms2sec), 'asym_se_time'), - ('vsm_sigma', 'smooth2d')]), (dwell_time, vsm, [ - (('dwell_time', _ms2sec), 'dwell_time') - ]), (mask_mag_dil, vsm, [('out_file', - 'mask_file')]), - (mask_mag_dil, vsm_mean, - [('out_file', 'mask_file')]), (vsm, vsm_mean, [ - ('unwarped_file', 'in_unwarped'), ('shift_out_file', 'in_file') - ]), (inputnode, dwi_split, - [('in_file', 'in_file')]), (dwi_split, dwi_applyxfm, [ - ('out_files', 'in_file') - ]), (dwi_applyxfm, dwi_merge, - [('unwarped_file', 'in_files')]), (dwi_merge, outputnode, - [('merged_file', - 'epi_corrected')])]) - - if fieldmap_registration: - """ Register magfw to example epi. There are some parameters here that may need to be tweaked. Should probably strip the mag - Pre-condition: forward warp the mag in order to reg with func. What does mask do here? - """ - # Select reference volume from EPI (B0 in dMRI and a middle frame in - # fMRI) - select_epi = pe.Node(fsl.utils.ExtractROI(t_size=1), name='select_epi') - - # fugue -i %s -w %s --loadshift=%s --mask=%s % ( mag_name, magfw_name, - # vsmmag_name, mask_name ), log ) # Forward Map - vsm_fwd = pe.Node(fsl.FUGUE(forward_warping=True), name='vsm_fwd') - vsm_reg = pe.Node( - fsl.FLIRT( - bins=256, - cost='corratio', - dof=6, - interp='spline', - searchr_x=[-10, 10], - searchr_y=[-10, 10], - searchr_z=[-10, 10]), - name='vsm_registration') - # 'flirt -in %s -ref %s -out %s -init %s -applyxfm' % ( vsmmag_name, ref_epi, vsmmag_name, magfw_mat_out ) - vsm_applyxfm = pe.Node( - fsl.ApplyXfm(interp='spline'), name='vsm_apply_xfm') - # 'flirt -in %s -ref %s -out %s -init %s -applyxfm' % ( mask_name, ref_epi, mask_name, magfw_mat_out ) - msk_applyxfm = pe.Node( - fsl.ApplyXfm(interp='nearestneighbour'), name='msk_apply_xfm') - - pipeline.connect( - [(inputnode, select_epi, - [('in_file', 'in_file'), - ('ref_num', 't_min')]), (select_epi, vsm_reg, [('roi_file', - 'reference')]), - (vsm, vsm_fwd, [('shift_out_file', 'shift_in_file')]), - (mask_mag_dil, vsm_fwd, - [('out_file', 'mask_file')]), (inputnode, vsm_fwd, [ - ('fieldmap_mag', 'in_file') - ]), (vsm_fwd, vsm_reg, - [('warped_file', 'in_file')]), (vsm_reg, msk_applyxfm, [ - ('out_matrix_file', 'in_matrix_file') - ]), (select_epi, msk_applyxfm, [('roi_file', 'reference')]), - (mask_mag_dil, msk_applyxfm, - [('out_file', 'in_file')]), (vsm_reg, vsm_applyxfm, [ - ('out_matrix_file', 'in_matrix_file') - ]), (select_epi, vsm_applyxfm, - [('roi_file', 'reference')]), (vsm_mean, vsm_applyxfm, - [('out_file', 'in_file')]), - (msk_applyxfm, dwi_applyxfm, - [('out_file', 'mask_file')]), (vsm_applyxfm, dwi_applyxfm, - [('out_file', 'shift_in_file')])]) - else: - pipeline.connect( - [(mask_mag_dil, dwi_applyxfm, [('out_file', 'mask_file')]), - (vsm_mean, dwi_applyxfm, [('out_file', 'shift_in_file')])]) - - return pipeline - - -def _rotate_bvecs(in_bvec, in_matrix): - import os - import numpy as np - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_rotated.bvec' % name) - bvecs = np.loadtxt(in_bvec) - new_bvecs = np.zeros( - shape=bvecs.T.shape) # pre-initialise array, 3 col format - - for i, vol_matrix in enumerate(in_matrix[0::]): # start index at 0 - bvec = np.matrix(bvecs[:, i]) - rot = np.matrix(np.loadtxt(vol_matrix)[0:3, 0:3]) - new_bvecs[i] = (np.array( - rot * bvec.T).T)[0] # fill each volume with x,y,z as we go along - np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') - return out_file - - -def _cat_logs(in_files): - import shutil - import os - - name, fext = os.path.splitext(os.path.basename(in_files[0])) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_ecclog.log' % name) - with open(out_file, 'wb') as totallog: - for i, fname in enumerate(in_files): - totallog.write('\n\npreprocessing %d\n' % i) - with open(fname) as inlog: - for line in inlog: - totallog.write(line) - return out_file - - -def _compute_dwelltime(dwell_time=0.68, - pi_factor=1.0, - is_reverse_encoding=False): - dwell_time *= (1.0 / pi_factor) - - if is_reverse_encoding: - dwell_time *= -1.0 - - return dwell_time - - -def _effective_echospacing(dwell_time, pi_factor=1.0): - dwelltime = 1.0e-3 * dwell_time * (1.0 / pi_factor) - return dwelltime - - -def _prepare_phasediff(in_file): - import nibabel as nb - import os - import numpy as np - from nipype.utils import NUMPY_MMAP - img = nb.load(in_file, mmap=NUMPY_MMAP) - max_diff = np.max(img.get_data().reshape(-1)) - min_diff = np.min(img.get_data().reshape(-1)) - A = (2.0 * np.pi) / (max_diff - min_diff) - B = np.pi - (A * max_diff) - diff_norm = img.get_data() * A + B - - name, fext = os.path.splitext(os.path.basename(in_file)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_2pi.nii.gz' % name) - nb.save(nb.Nifti1Image(diff_norm, img.affine, img.header), out_file) - return out_file - - -def _dilate_mask(in_file, iterations=4): - import nibabel as nb - import scipy.ndimage as ndimage - import os - from nipype.utils import NUMPY_MMAP - img = nb.load(in_file, mmap=NUMPY_MMAP) - dilated_img = img.__class__( - ndimage.binary_dilation(img.get_data(), iterations=iterations), - img.affine, img.header) - - name, fext = os.path.splitext(os.path.basename(in_file)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_dil.nii.gz' % name) - nb.save(dilated_img, out_file) - return out_file - - -def _fill_phase(in_file): - import nibabel as nb - import os - import numpy as np - from nipype.utils import NUMPY_MMAP - img = nb.load(in_file, mmap=NUMPY_MMAP) - dumb_img = nb.Nifti1Image(np.zeros(img.shape), img.affine, img.header) - out_nii = nb.funcs.concat_images((img, dumb_img)) - name, fext = os.path.splitext(os.path.basename(in_file)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_fill.nii.gz' % name) - nb.save(out_nii, out_file) - return out_file - - -def _vsm_remove_mean(in_file, mask_file, in_unwarped): - import nibabel as nb - import os - import numpy as np - import numpy.ma as ma - from nipype.utils import NUMPY_MMAP - img = nb.load(in_file, mmap=NUMPY_MMAP) - msk = nb.load(mask_file, mmap=NUMPY_MMAP).get_data() - img_data = img.get_data() - img_data[msk == 0] = 0 - vsmmag_masked = ma.masked_values(img_data.reshape(-1), 0.0) - vsmmag_masked = vsmmag_masked - vsmmag_masked.mean() - masked_img = img.__class__( - vsmmag_masked.reshape(img.shape), img.affine, img.header) - name, fext = os.path.splitext(os.path.basename(in_file)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('./%s_demeaned.nii.gz' % name) - nb.save(masked_img, out_file) - return out_file - - -def _ms2sec(val): - return val * 1e-3 - - -def _split_dwi(in_file): - import nibabel as nb - import os - from nipype.utils import NUMPY_MMAP - out_files = [] - frames = nb.funcs.four_to_three(nb.load(in_file, mmap=NUMPY_MMAP)) - name, fext = os.path.splitext(os.path.basename(in_file)) - if fext == '.gz': - name, _ = os.path.splitext(name) - for i, frame in enumerate(frames): - out_file = os.path.abspath('./%s_%03d.nii.gz' % (name, i)) - nb.save(frame, out_file) - out_files.append(out_file) - return out_files diff --git a/nipype/workflows/dmri/fsl/tbss.py b/nipype/workflows/dmri/fsl/tbss.py deleted file mode 100644 index 3aef3e734a..0000000000 --- a/nipype/workflows/dmri/fsl/tbss.py +++ /dev/null @@ -1,590 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -import os -from warnings import warn - -from ....pipeline import engine as pe -from ....interfaces import utility as util -from ....interfaces import fsl as fsl - - -def tbss1_op_string(in_files): - import nibabel as nb - from nipype.utils import NUMPY_MMAP - op_strings = [] - for infile in in_files: - img = nb.load(infile, mmap=NUMPY_MMAP) - dimtup = tuple(d - 2 for d in img.shape) - dimtup = dimtup[0:3] - op_str = '-min 1 -ero -roi 1 %d 1 %d 1 %d 0 1' % dimtup - op_strings.append(op_str) - return op_strings - - -def create_tbss_1_preproc(name='tbss_1_preproc'): - """Preprocess FA data for TBSS: erodes a little and zero end slicers and - creates masks(for use in FLIRT & FNIRT from FSL). - A pipeline that does the same as tbss_1_preproc script in FSL - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss1 = tbss.create_tbss_1_preproc() - >>> tbss1.inputs.inputnode.fa_list = ['s1_FA.nii', 's2_FA.nii', 's3_FA.nii'] - - Inputs:: - - inputnode.fa_list - - Outputs:: - - outputnode.fa_list - outputnode.mask_list - outputnode.slices - - """ - - # Define the inputnode - inputnode = pe.Node( - interface=util.IdentityInterface(fields=["fa_list"]), name="inputnode") - - # Prep the FA images - prepfa = pe.MapNode( - fsl.ImageMaths(suffix="_prep"), - name="prepfa", - iterfield=['in_file', 'op_string']) - - # Slicer - slicer = pe.MapNode( - fsl.Slicer(all_axial=True, image_width=1280), - name='slicer', - iterfield=['in_file']) - - # Create a mask - getmask1 = pe.MapNode( - fsl.ImageMaths(op_string="-bin", suffix="_mask"), - name="getmask1", - iterfield=['in_file']) - getmask2 = pe.MapNode( - fsl.MultiImageMaths(op_string="-dilD -dilD -sub 1 -abs -add %s"), - name="getmask2", - iterfield=['in_file', 'operand_files']) - - # $FSLDIR/bin/fslmaths FA/${f}_FA_mask -dilD -dilD -sub 1 -abs -add FA/${f}_FA_mask FA/${f}_FA_mask -odt char - # Define the tbss1 workflow - tbss1 = pe.Workflow(name=name) - tbss1.connect([ - (inputnode, prepfa, [("fa_list", "in_file")]), - (inputnode, prepfa, [(("fa_list", tbss1_op_string), "op_string")]), - (prepfa, getmask1, [("out_file", "in_file")]), - (getmask1, getmask2, [("out_file", "in_file"), ("out_file", - "operand_files")]), - (prepfa, slicer, [('out_file', 'in_file')]), - ]) - - # Define the outputnode - outputnode = pe.Node( - interface=util.IdentityInterface( - fields=["fa_list", "mask_list", "slices"]), - name="outputnode") - tbss1.connect([(prepfa, outputnode, [("out_file", "fa_list")]), - (getmask2, outputnode, [("out_file", "mask_list")]), - (slicer, outputnode, [('out_file', 'slices')])]) - return tbss1 - - -def create_tbss_2_reg(name="tbss_2_reg"): - """TBSS nonlinear registration: - A pipeline that does the same as 'tbss_2_reg -t' script in FSL. '-n' option - is not supported at the moment. - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss2 = create_tbss_2_reg(name="tbss2") - >>> tbss2.inputs.inputnode.target = fsl.Info.standard_image("FMRIB58_FA_1mm.nii.gz") # doctest: +SKIP - >>> tbss2.inputs.inputnode.fa_list = ['s1_FA.nii', 's2_FA.nii', 's3_FA.nii'] - >>> tbss2.inputs.inputnode.mask_list = ['s1_mask.nii', 's2_mask.nii', 's3_mask.nii'] - - Inputs:: - - inputnode.fa_list - inputnode.mask_list - inputnode.target - - Outputs:: - - outputnode.field_list - - """ - - # Define the inputnode - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=["fa_list", "mask_list", "target"]), - name="inputnode") - - # Flirt the FA image to the target - flirt = pe.MapNode( - interface=fsl.FLIRT(dof=12), - iterfield=['in_file', 'in_weight'], - name="flirt") - - fnirt = pe.MapNode( - interface=fsl.FNIRT(fieldcoeff_file=True), - iterfield=['in_file', 'inmask_file', 'affine_file'], - name="fnirt") - # Fnirt the FA image to the target - if fsl.no_fsl(): - warn('NO FSL found') - else: - config_file = os.path.join(os.environ["FSLDIR"], - "etc/flirtsch/FA_2_FMRIB58_1mm.cnf") - fnirt.inputs.config_file = config_file - - # Define the registration workflow - tbss2 = pe.Workflow(name=name) - - # Connect up the registration workflow - tbss2.connect([ - (inputnode, flirt, [("fa_list", "in_file"), ("target", "reference"), - ("mask_list", "in_weight")]), - (inputnode, fnirt, [("fa_list", "in_file"), - ("mask_list", "inmask_file"), ("target", - "ref_file")]), - (flirt, fnirt, [("out_matrix_file", "affine_file")]), - ]) - - # Define the outputnode - outputnode = pe.Node( - interface=util.IdentityInterface(fields=['field_list']), - name="outputnode") - - tbss2.connect([(fnirt, outputnode, [('fieldcoeff_file', 'field_list')])]) - return tbss2 - - -def create_tbss_3_postreg(name='tbss_3_postreg', estimate_skeleton=True): - """Post-registration processing: derive mean_FA and mean_FA_skeleton from - mean of all subjects in study. Target is assumed to be FMRIB58_FA_1mm. - A pipeline that does the same as 'tbss_3_postreg -S' script from FSL - Setting 'estimate_skeleton to False will use precomputed FMRIB58_FA-skeleton_1mm - skeleton (same as 'tbss_3_postreg -T'). - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss3 = tbss.create_tbss_3_postreg() - >>> tbss3.inputs.inputnode.fa_list = ['s1_wrapped_FA.nii', 's2_wrapped_FA.nii', 's3_wrapped_FA.nii'] - - Inputs:: - - inputnode.field_list - inputnode.fa_list - - Outputs:: - - outputnode.groupmask - outputnode.skeleton_file - outputnode.meanfa_file - outputnode.mergefa_file - - """ - - # Create the inputnode - inputnode = pe.Node( - interface=util.IdentityInterface(fields=['field_list', 'fa_list']), - name='inputnode') - - # Apply the warpfield to the masked FA image - applywarp = pe.MapNode( - interface=fsl.ApplyWarp(), - iterfield=['in_file', 'field_file'], - name="applywarp") - if fsl.no_fsl(): - warn('NO FSL found') - else: - applywarp.inputs.ref_file = fsl.Info.standard_image( - "FMRIB58_FA_1mm.nii.gz") - - # Merge the FA files into a 4D file - mergefa = pe.Node(fsl.Merge(dimension="t"), name="mergefa") - - # Get a group mask - groupmask = pe.Node( - fsl.ImageMaths( - op_string="-max 0 -Tmin -bin", - out_data_type="char", - suffix="_mask"), - name="groupmask") - - maskgroup = pe.Node( - fsl.ImageMaths(op_string="-mas", suffix="_masked"), name="maskgroup") - - tbss3 = pe.Workflow(name=name) - tbss3.connect([ - (inputnode, applywarp, [("fa_list", "in_file"), ("field_list", - "field_file")]), - (applywarp, mergefa, [("out_file", "in_files")]), - (mergefa, groupmask, [("merged_file", "in_file")]), - (mergefa, maskgroup, [("merged_file", "in_file")]), - (groupmask, maskgroup, [("out_file", "in_file2")]), - ]) - - # Create outputnode - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'groupmask', 'skeleton_file', 'meanfa_file', 'mergefa_file' - ]), - name='outputnode') - - if estimate_skeleton: - # Take the mean over the fourth dimension - meanfa = pe.Node( - fsl.ImageMaths(op_string="-Tmean", suffix="_mean"), name="meanfa") - - # Use the mean FA volume to generate a tract skeleton - makeskeleton = pe.Node( - fsl.TractSkeleton(skeleton_file=True), name="makeskeleton") - tbss3.connect( - [(maskgroup, meanfa, [("out_file", "in_file")]), - (meanfa, makeskeleton, - [("out_file", "in_file")]), (groupmask, outputnode, - [('out_file', 'groupmask')]), - (makeskeleton, outputnode, - [('skeleton_file', 'skeleton_file')]), (meanfa, outputnode, [ - ('out_file', 'meanfa_file') - ]), (maskgroup, outputnode, [('out_file', 'mergefa_file')])]) - else: - # $FSLDIR/bin/fslmaths $FSLDIR/data/standard/FMRIB58_FA_1mm -mas mean_FA_mask mean_FA - maskstd = pe.Node( - fsl.ImageMaths(op_string="-mas", suffix="_masked"), name="maskstd") - if fsl.no_fsl(): - warn('NO FSL found') - else: - maskstd.inputs.in_file = fsl.Info.standard_image( - "FMRIB58_FA_1mm.nii.gz") - - # $FSLDIR/bin/fslmaths mean_FA -bin mean_FA_mask - binmaskstd = pe.Node( - fsl.ImageMaths(op_string="-bin"), name="binmaskstd") - - # $FSLDIR/bin/fslmaths all_FA -mas mean_FA_mask all_FA - maskgroup2 = pe.Node( - fsl.ImageMaths(op_string="-mas", suffix="_masked"), - name="maskgroup2") - - tbss3.connect([(groupmask, maskstd, [("out_file", "in_file2")]), - (maskstd, binmaskstd, [("out_file", "in_file")]), - (maskgroup, maskgroup2, [("out_file", "in_file")]), - (binmaskstd, maskgroup2, [("out_file", "in_file2")])]) - - if fsl.no_fsl(): - warn('NO FSL found') - else: - outputnode.inputs.skeleton_file = fsl.Info.standard_image( - "FMRIB58_FA-skeleton_1mm.nii.gz") - tbss3.connect([(binmaskstd, outputnode, [('out_file', 'groupmask')]), - (maskstd, outputnode, [('out_file', 'meanfa_file')]), - (maskgroup2, outputnode, [('out_file', - 'mergefa_file')])]) - return tbss3 - - -def tbss4_op_string(skeleton_thresh): - op_string = "-thr %.1f -bin" % skeleton_thresh - return op_string - - -def create_tbss_4_prestats(name='tbss_4_prestats'): - """Post-registration processing:Creating skeleton mask using a threshold - projecting all FA data onto skeleton. - A pipeline that does the same as tbss_4_prestats script from FSL - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss4 = tbss.create_tbss_4_prestats(name='tbss4') - >>> tbss4.inputs.inputnode.skeleton_thresh = 0.2 - - Inputs:: - - inputnode.skeleton_thresh - inputnode.groupmask - inputnode.skeleton_file - inputnode.meanfa_file - inputnode.mergefa_file - - Outputs:: - - outputnode.all_FA_skeletonised - outputnode.mean_FA_skeleton_mask - outputnode.distance_map - outputnode.skeleton_file - - """ - # Create inputnode - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'groupmask', 'skeleton_file', 'meanfa_file', 'mergefa_file', - 'skeleton_thresh' - ]), - name='inputnode') - - # Mask the skeleton at the threshold - skeletonmask = pe.Node(fsl.ImageMaths(suffix="_mask"), name="skeletonmask") - - # Invert the brainmask then add in the tract skeleton - invertmask = pe.Node( - fsl.ImageMaths(suffix="_inv", op_string="-mul -1 -add 1 -add"), - name="invertmask") - - # Generate a distance map with the tract skeleton - distancemap = pe.Node(fsl.DistanceMap(), name="distancemap") - - # Project the FA values onto the skeleton - projectfa = pe.Node( - fsl.TractSkeleton( - project_data=True, skeleton_file=True, use_cingulum_mask=True), - name="projectfa") - - # Create tbss4 workflow - tbss4 = pe.Workflow(name=name) - tbss4.connect([ - (inputnode, invertmask, [("groupmask", "in_file")]), - (inputnode, skeletonmask, [("skeleton_file", "in_file"), - (('skeleton_thresh', tbss4_op_string), - 'op_string')]), - (inputnode, projectfa, [('skeleton_thresh', 'threshold'), - ("meanfa_file", "in_file"), ("mergefa_file", - "data_file")]), - (skeletonmask, invertmask, [("out_file", "in_file2")]), - (invertmask, distancemap, [("out_file", "in_file")]), - (distancemap, projectfa, [("distance_map", "distance_map")]), - ]) - - # Create the outputnode - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'projectedfa_file', 'skeleton_mask', 'distance_map', - 'skeleton_file' - ]), - name='outputnode') - - tbss4.connect( - [(projectfa, outputnode, [('projected_data', 'projectedfa_file'), - ('skeleton_file', 'skeleton_file')]), - (distancemap, outputnode, [('distance_map', 'distance_map')]), - (skeletonmask, outputnode, [('out_file', 'skeleton_mask')])]) - - return tbss4 - - -def create_tbss_all(name='tbss_all', estimate_skeleton=True): - """Create a pipeline that combines create_tbss_* pipelines - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss_wf = tbss.create_tbss_all('tbss', estimate_skeleton=True) - >>> tbss_wf.inputs.inputnode.skeleton_thresh = 0.2 - >>> tbss_wf.inputs.inputnode.fa_list = ['s1_wrapped_FA.nii', 's2_wrapped_FA.nii', 's3_wrapped_FA.nii'] - - >>> tbss_wf = tbss.create_tbss_all('tbss', estimate_skeleton=False) - >>> tbss_wf.inputs.inputnode.skeleton_thresh = 0.2 - >>> tbss_wf.inputs.inputnode.fa_list = ['s1_wrapped_FA.nii', 's2_wrapped_FA.nii', 's3_wrapped_FA.nii'] - - - Inputs:: - - inputnode.fa_list - inputnode.skeleton_thresh - - Outputs:: - - outputnode.meanfa_file - outputnode.projectedfa_file - outputnode.skeleton_file - outputnode.skeleton_mask - - """ - - # Define the inputnode - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=['fa_list', 'skeleton_thresh']), - name='inputnode') - - tbss1 = create_tbss_1_preproc(name='tbss1') - tbss2 = create_tbss_2_reg(name='tbss2') - if fsl.no_fsl(): - warn('NO FSL found') - else: - tbss2.inputs.inputnode.target = fsl.Info.standard_image( - "FMRIB58_FA_1mm.nii.gz") - tbss3 = create_tbss_3_postreg( - name='tbss3', estimate_skeleton=estimate_skeleton) - tbss4 = create_tbss_4_prestats(name='tbss4') - - tbss_all = pe.Workflow(name=name) - tbss_all.connect( - [(inputnode, tbss1, [('fa_list', 'inputnode.fa_list')]), - (inputnode, tbss4, - [('skeleton_thresh', 'inputnode.skeleton_thresh')]), (tbss1, tbss2, [ - ('outputnode.fa_list', 'inputnode.fa_list'), - ('outputnode.mask_list', 'inputnode.mask_list') - ]), (tbss1, tbss3, [('outputnode.fa_list', 'inputnode.fa_list')]), - (tbss2, tbss3, [('outputnode.field_list', 'inputnode.field_list')]), - (tbss3, tbss4, [('outputnode.groupmask', - 'inputnode.groupmask'), ('outputnode.skeleton_file', - 'inputnode.skeleton_file'), - ('outputnode.meanfa_file', 'inputnode.meanfa_file'), - ('outputnode.mergefa_file', - 'inputnode.mergefa_file')])]) - - # Define the outputnode - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'groupmask', 'skeleton_file3', 'meanfa_file', 'mergefa_file', - 'projectedfa_file', 'skeleton_file4', 'skeleton_mask', - 'distance_map' - ]), - name='outputnode') - outputall_node = pe.Node( - interface=util.IdentityInterface(fields=[ - 'fa_list1', 'mask_list1', 'field_list2', 'groupmask3', - 'skeleton_file3', 'meanfa_file3', 'mergefa_file3', - 'projectedfa_file4', 'skeleton_mask4', 'distance_map4' - ]), - name='outputall_node') - - tbss_all.connect([ - (tbss3, outputnode, [ - ('outputnode.meanfa_file', 'meanfa_file'), - ('outputnode.mergefa_file', 'mergefa_file'), - ('outputnode.groupmask', 'groupmask'), - ('outputnode.skeleton_file', 'skeleton_file3'), - ]), - (tbss4, outputnode, [ - ('outputnode.projectedfa_file', 'projectedfa_file'), - ('outputnode.skeleton_file', 'skeleton_file4'), - ('outputnode.skeleton_mask', 'skeleton_mask'), - ('outputnode.distance_map', 'distance_map'), - ]), - (tbss1, outputall_node, [ - ('outputnode.fa_list', 'fa_list1'), - ('outputnode.mask_list', 'mask_list1'), - ]), - (tbss2, outputall_node, [ - ('outputnode.field_list', 'field_list2'), - ]), - (tbss3, outputall_node, [ - ('outputnode.meanfa_file', 'meanfa_file3'), - ('outputnode.mergefa_file', 'mergefa_file3'), - ('outputnode.groupmask', 'groupmask3'), - ('outputnode.skeleton_file', 'skeleton_file3'), - ]), - (tbss4, outputall_node, [ - ('outputnode.projectedfa_file', 'projectedfa_file4'), - ('outputnode.skeleton_mask', 'skeleton_mask4'), - ('outputnode.distance_map', 'distance_map4'), - ]), - ]) - return tbss_all - - -def create_tbss_non_FA(name='tbss_non_FA'): - """ - A pipeline that implement tbss_non_FA in FSL - - Example - ------- - - >>> from nipype.workflows.dmri.fsl import tbss - >>> tbss_MD = tbss.create_tbss_non_FA() - >>> tbss_MD.inputs.inputnode.file_list = [] - >>> tbss_MD.inputs.inputnode.field_list = [] - >>> tbss_MD.inputs.inputnode.skeleton_thresh = 0.2 - >>> tbss_MD.inputs.inputnode.groupmask = './xxx' - >>> tbss_MD.inputs.inputnode.meanfa_file = './xxx' - >>> tbss_MD.inputs.inputnode.distance_map = [] - >>> tbss_MD.inputs.inputnode.all_FA_file = './xxx' - - Inputs:: - - inputnode.file_list - inputnode.field_list - inputnode.skeleton_thresh - inputnode.groupmask - inputnode.meanfa_file - inputnode.distance_map - inputnode.all_FA_file - - Outputs:: - - outputnode.projected_nonFA_file - - """ - - # Define the inputnode - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'file_list', 'field_list', 'skeleton_thresh', 'groupmask', - 'meanfa_file', 'distance_map', 'all_FA_file' - ]), - name='inputnode') - - # Apply the warpfield to the non FA image - applywarp = pe.MapNode( - interface=fsl.ApplyWarp(), - iterfield=['in_file', 'field_file'], - name="applywarp") - if fsl.no_fsl(): - warn('NO FSL found') - else: - applywarp.inputs.ref_file = fsl.Info.standard_image( - "FMRIB58_FA_1mm.nii.gz") - # Merge the non FA files into a 4D file - merge = pe.Node(fsl.Merge(dimension="t"), name="merge") - # merged_file="all_FA.nii.gz" - maskgroup = pe.Node( - fsl.ImageMaths(op_string="-mas", suffix="_masked"), name="maskgroup") - projectfa = pe.Node( - fsl.TractSkeleton( - project_data=True, - # projected_data = 'test.nii.gz', - use_cingulum_mask=True), - name="projectfa") - - tbss_non_FA = pe.Workflow(name=name) - tbss_non_FA.connect([ - (inputnode, applywarp, [ - ('file_list', 'in_file'), - ('field_list', 'field_file'), - ]), - (applywarp, merge, [("out_file", "in_files")]), - (merge, maskgroup, [("merged_file", "in_file")]), - (inputnode, maskgroup, [('groupmask', 'in_file2')]), - (maskgroup, projectfa, [('out_file', 'alt_data_file')]), - (inputnode, projectfa, - [('skeleton_thresh', 'threshold'), ("meanfa_file", "in_file"), - ("distance_map", "distance_map"), ("all_FA_file", 'data_file')]), - ]) - - # Define the outputnode - outputnode = pe.Node( - interface=util.IdentityInterface(fields=['projected_nonFA_file']), - name='outputnode') - tbss_non_FA.connect([ - (projectfa, outputnode, [ - ('projected_data', 'projected_nonFA_file'), - ]), - ]) - return tbss_non_FA diff --git a/nipype/workflows/dmri/fsl/tests/__init__.py b/nipype/workflows/dmri/fsl/tests/__init__.py deleted file mode 100644 index 99fb243f19..0000000000 --- a/nipype/workflows/dmri/fsl/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/dmri/fsl/tests/test_dti.py b/nipype/workflows/dmri/fsl/tests/test_dti.py deleted file mode 100644 index 23cd8f37d8..0000000000 --- a/nipype/workflows/dmri/fsl/tests/test_dti.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import unicode_literals, print_function, absolute_import -import os - -import pytest -import nipype.interfaces.fsl as fsl -import nipype.interfaces.utility as util -from nipype.interfaces.fsl import no_fsl, no_fsl_course_data - -import nipype.pipeline.engine as pe -import warnings -from nipype.workflows.dmri.fsl.dti import create_bedpostx_pipeline -from nipype.utils.filemanip import simplify_list - - -@pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.skipif(no_fsl_course_data(), reason="fsl data not available") -def test_create_bedpostx_pipeline(tmpdir): - fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA']) - - mask_file = os.path.join(fsl_course_dir, - "fdt2/subj1.bedpostX/nodif_brain_mask.nii.gz") - bvecs_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvecs") - bvals_file = os.path.join(fsl_course_dir, "fdt2/subj1/bvals") - dwi_file = os.path.join(fsl_course_dir, "fdt2/subj1/data.nii.gz") - z_min = 62 - z_size = 2 - - slice_mask = pe.Node( - fsl.ExtractROI( - x_min=0, x_size=-1, y_min=0, y_size=-1, z_min=z_min, - z_size=z_size), - name="slice_mask") - slice_mask.inputs.in_file = mask_file - - slice_dwi = pe.Node( - fsl.ExtractROI( - x_min=0, x_size=-1, y_min=0, y_size=-1, z_min=z_min, - z_size=z_size), - name="slice_dwi") - slice_dwi.inputs.in_file = dwi_file - - nipype_bedpostx = create_bedpostx_pipeline("nipype_bedpostx") - nipype_bedpostx.inputs.inputnode.bvecs = bvecs_file - nipype_bedpostx.inputs.inputnode.bvals = bvals_file - nipype_bedpostx.inputs.xfibres.n_fibres = 1 - nipype_bedpostx.inputs.xfibres.fudge = 1 - nipype_bedpostx.inputs.xfibres.burn_in = 0 - nipype_bedpostx.inputs.xfibres.n_jumps = 1 - nipype_bedpostx.inputs.xfibres.sample_every = 1 - nipype_bedpostx.inputs.xfibres.cnlinear = True - nipype_bedpostx.inputs.xfibres.seed = 0 - nipype_bedpostx.inputs.xfibres.model = 2 - - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - original_bedpostx = pe.Node( - interface=fsl.BEDPOSTX(), name="original_bedpostx") - original_bedpostx.inputs.bvecs = bvecs_file - original_bedpostx.inputs.bvals = bvals_file - original_bedpostx.inputs.environ['FSLPARALLEL'] = "" - original_bedpostx.inputs.n_fibres = 1 - original_bedpostx.inputs.fudge = 1 - original_bedpostx.inputs.burn_in = 0 - original_bedpostx.inputs.n_jumps = 1 - original_bedpostx.inputs.sample_every = 1 - original_bedpostx.inputs.seed = 0 - original_bedpostx.inputs.model = 2 - - test_f1 = pe.Node(util.AssertEqual(), name="mean_f1_test") - - pipeline = pe.Workflow(name="test_bedpostx") - pipeline.base_dir = tmpdir.mkdir("nipype_test_bedpostx_").strpath - - pipeline.connect([ - (slice_mask, original_bedpostx, [("roi_file", "mask")]), - (slice_mask, nipype_bedpostx, [("roi_file", "inputnode.mask")]), - (slice_dwi, original_bedpostx, [("roi_file", "dwi")]), - (slice_dwi, nipype_bedpostx, [("roi_file", "inputnode.dwi")]), - (nipype_bedpostx, test_f1, [(("outputnode.mean_fsamples", - simplify_list), "volume1")]), - (original_bedpostx, test_f1, [("mean_fsamples", "volume2")]), - ]) - - pipeline.run(plugin='Linear') diff --git a/nipype/workflows/dmri/fsl/tests/test_epi.py b/nipype/workflows/dmri/fsl/tests/test_epi.py deleted file mode 100644 index 24400d0747..0000000000 --- a/nipype/workflows/dmri/fsl/tests/test_epi.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -import os - -import pytest -import nipype.workflows.fmri.fsl as fsl_wf -import nipype.interfaces.fsl as fsl -import nipype.interfaces.utility as util -from nipype.interfaces.fsl import no_fsl, no_fsl_course_data - -import nipype.pipeline.engine as pe -import warnings -from nipype.workflows.dmri.fsl.epi import create_eddy_correct_pipeline - - -@pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.skipif(no_fsl_course_data(), reason="fsl data not available") -def test_create_eddy_correct_pipeline(tmpdir): - fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA']) - - dwi_file = os.path.join(fsl_course_dir, "fdt1/subj1/data.nii.gz") - - trim_dwi = pe.Node(fsl.ExtractROI(t_min=0, t_size=2), name="trim_dwi") - trim_dwi.inputs.in_file = dwi_file - - nipype_eddycorrect = create_eddy_correct_pipeline("nipype_eddycorrect") - nipype_eddycorrect.inputs.inputnode.ref_num = 0 - - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - original_eddycorrect = pe.Node( - interface=fsl.EddyCorrect(), name="original_eddycorrect") - original_eddycorrect.inputs.ref_num = 0 - - test = pe.Node(util.AssertEqual(), name="eddy_corrected_dwi_test") - - pipeline = pe.Workflow(name="test_eddycorrect") - pipeline.base_dir = tmpdir.mkdir("nipype_test_eddycorrect_").strpath - - pipeline.connect([ - (trim_dwi, original_eddycorrect, [("roi_file", "in_file")]), - (trim_dwi, nipype_eddycorrect, [("roi_file", "inputnode.in_file")]), - (nipype_eddycorrect, test, [("outputnode.eddy_corrected", "volume1")]), - (original_eddycorrect, test, [("eddy_corrected", "volume2")]), - ]) - - pipeline.run(plugin='Linear') diff --git a/nipype/workflows/dmri/fsl/tests/test_tbss.py b/nipype/workflows/dmri/fsl/tests/test_tbss.py deleted file mode 100644 index 34b49a1f86..0000000000 --- a/nipype/workflows/dmri/fsl/tests/test_tbss.py +++ /dev/null @@ -1,211 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -import os -from nipype.interfaces.fsl.base import no_fsl, no_fsl_course_data -import nipype.pipeline.engine as pe -import nipype.interfaces.utility as util -import pytest -import tempfile -import shutil -from subprocess import call -from nipype.workflows.dmri.fsl.tbss import create_tbss_all -import nipype.interfaces.io as nio -from nipype.interfaces import fsl - - -def _tbss_test_helper(estimate_skeleton): - fsl_course_dir = os.path.abspath(os.environ['FSL_COURSE_DATA']) - fsl.FSLCommand.set_default_output_type('NIFTI_GZ') - test_dir = tempfile.mkdtemp(prefix="nipype_test_tbss_") - tbss_orig_dir = os.path.join(test_dir, "tbss_all_original") - os.mkdir(tbss_orig_dir) - old_dir = os.getcwd() - os.chdir(tbss_orig_dir) - - subjects = ['1260', '1549'] - FA_list = [ - os.path.join(fsl_course_dir, 'tbss', subject_id + '.nii.gz') - for subject_id in subjects - ] - for f in FA_list: - shutil.copy(f, os.getcwd()) - - call( - ['tbss_1_preproc'] + - [subject_id + '.nii.gz' for subject_id in subjects], - env=os.environ.update({ - 'FSLOUTPUTTYPE': 'NIFTI_GZ' - })) - tbss1_orig_dir = os.path.join(test_dir, "tbss1_original") - shutil.copytree(tbss_orig_dir, tbss1_orig_dir) - - call( - ['tbss_2_reg', '-T'], - env=os.environ.update({ - 'FSLOUTPUTTYPE': 'NIFTI_GZ' - })) - tbss2_orig_dir = os.path.join(test_dir, "tbss2_original") - shutil.copytree(tbss_orig_dir, tbss2_orig_dir) - - if estimate_skeleton: - call( - ['tbss_3_postreg', '-S'], - env=os.environ.update({ - 'FSLOUTPUTTYPE': 'NIFTI_GZ' - })) - else: - call( - ['tbss_3_postreg', '-T'], - env=os.environ.update({ - 'FSLOUTPUTTYPE': 'NIFTI_GZ' - })) - tbss3_orig_dir = os.path.join(test_dir, "tbss3_original") - shutil.copytree(tbss_orig_dir, tbss3_orig_dir) - - call( - ['tbss_4_prestats', '0.2'], - env=os.environ.update({ - 'FSLOUTPUTTYPE': 'NIFTI_GZ' - })) - tbss4_orig_dir = os.path.join(test_dir, "tbss4_original") - shutil.copytree(tbss_orig_dir, tbss4_orig_dir) - - pipeline = pe.Workflow(name="test_tbss") - pipeline.base_dir = os.path.join(test_dir, "tbss_nipype") - - tbss = create_tbss_all(estimate_skeleton=estimate_skeleton) - tbss.inputs.inputnode.fa_list = FA_list - tbss.inputs.inputnode.skeleton_thresh = 0.2 - - tbss1_original_datasource = pe.Node( - nio.DataGrabber( - outfields=['fa_list', 'mask_list'], sort_filelist=False), - name='tbss1_original_datasource') - tbss1_original_datasource.inputs.base_directory = tbss1_orig_dir - tbss1_original_datasource.inputs.template = 'FA/%s_FA%s.nii.gz' - tbss1_original_datasource.inputs.template_args = dict( - fa_list=[[subjects, '']], mask_list=[[subjects, '_mask']]) - - tbss1_test_fa = pe.MapNode( - util.AssertEqual(), - name="tbss1_fa_test", - iterfield=['volume1', 'volume2']) - tbss1_test_mask = pe.MapNode( - util.AssertEqual(), - name="tbss1_mask_test", - iterfield=['volume1', 'volume2']) - - pipeline.connect(tbss, 'tbss1.outputnode.fa_list', tbss1_test_fa, - 'volume1') - pipeline.connect(tbss, 'tbss1.outputnode.mask_list', tbss1_test_mask, - 'volume1') - pipeline.connect(tbss1_original_datasource, 'fa_list', tbss1_test_fa, - 'volume2') - pipeline.connect(tbss1_original_datasource, 'mask_list', tbss1_test_mask, - 'volume2') - tbss2_original_datasource = pe.Node( - nio.DataGrabber(outfields=['field_list'], sort_filelist=False), - name='tbss2_original_datasource') - - tbss2_original_datasource.inputs.base_directory = tbss2_orig_dir - tbss2_original_datasource.inputs.template = 'FA/%s_FA%s.nii.gz' - tbss2_original_datasource.inputs.template_args = dict( - field_list=[[subjects, '_to_target_warp']]) - tbss2_test_field = pe.MapNode( - util.AssertEqual(), - name="tbss2_test_field", - iterfield=['volume1', 'volume2']) - - pipeline.connect(tbss, 'tbss2.outputnode.field_list', tbss2_test_field, - 'volume1') - pipeline.connect(tbss2_original_datasource, 'field_list', tbss2_test_field, - 'volume2') - - tbss3_original_datasource = pe.Node( - nio.DataGrabber( - outfields=[ - 'groupmask', 'skeleton_file', 'meanfa_file', 'mergefa_file' - ], - sort_filelist=False), - name='tbss3_original_datasource') - tbss3_original_datasource.inputs.base_directory = tbss3_orig_dir - tbss3_original_datasource.inputs.template = 'stats/%s.nii.gz' - tbss3_original_datasource.inputs.template_args = dict( - groupmask=[['mean_FA_mask']], - skeleton_file=[['mean_FA_skeleton']], - meanfa_file=[['mean_FA']], - mergefa_file=[['all_FA']]) - - tbss3_test_groupmask = pe.Node( - util.AssertEqual(), name="tbss3_test_groupmask") - tbss3_test_skeleton_file = pe.Node( - util.AssertEqual(), name="tbss3_test_skeleton_file") - tbss3_test_meanfa_file = pe.Node( - util.AssertEqual(), name="tbss3_test_meanfa_file") - tbss3_test_mergefa_file = pe.Node( - util.AssertEqual(), name="tbss3_test_mergefa_file") - - pipeline.connect(tbss, 'tbss3.outputnode.groupmask', tbss3_test_groupmask, - 'volume1') - pipeline.connect(tbss3_original_datasource, 'groupmask', - tbss3_test_groupmask, 'volume2') - pipeline.connect(tbss, 'tbss3.outputnode.skeleton_file', - tbss3_test_skeleton_file, 'volume1') - pipeline.connect(tbss3_original_datasource, 'skeleton_file', - tbss3_test_skeleton_file, 'volume2') - pipeline.connect(tbss, 'tbss3.outputnode.meanfa_file', - tbss3_test_meanfa_file, 'volume1') - pipeline.connect(tbss3_original_datasource, 'meanfa_file', - tbss3_test_meanfa_file, 'volume2') - pipeline.connect(tbss, 'tbss3.outputnode.mergefa_file', - tbss3_test_mergefa_file, 'volume1') - pipeline.connect(tbss3_original_datasource, 'mergefa_file', - tbss3_test_mergefa_file, 'volume2') - - tbss4_original_datasource = pe.Node( - nio.DataGrabber( - outfields=['all_FA_skeletonised', 'mean_FA_skeleton_mask'], - sort_filelist=False), - name='tbss4_original_datasource') - tbss4_original_datasource.inputs.base_directory = tbss4_orig_dir - tbss4_original_datasource.inputs.template = 'stats/%s.nii.gz' - tbss4_original_datasource.inputs.template_args = dict( - all_FA_skeletonised=[['all_FA_skeletonised']], - mean_FA_skeleton_mask=[['mean_FA_skeleton_mask']]) - tbss4_test_all_FA_skeletonised = pe.Node( - util.AssertEqual(), name="tbss4_test_all_FA_skeletonised") - tbss4_test_mean_FA_skeleton_mask = pe.Node( - util.AssertEqual(), name="tbss4_test_mean_FA_skeleton_mask") - - pipeline.connect(tbss, 'tbss4.outputnode.projectedfa_file', - tbss4_test_all_FA_skeletonised, 'volume1') - pipeline.connect(tbss4_original_datasource, 'all_FA_skeletonised', - tbss4_test_all_FA_skeletonised, 'volume2') - pipeline.connect(tbss, 'tbss4.outputnode.skeleton_mask', - tbss4_test_mean_FA_skeleton_mask, 'volume1') - pipeline.connect(tbss4_original_datasource, 'mean_FA_skeleton_mask', - tbss4_test_mean_FA_skeleton_mask, 'volume2') - - pipeline.run(plugin='Linear') - os.chdir(old_dir) - shutil.rmtree(test_dir) - - -# this test is disabled until we figure out what is wrong with TBSS in 5.0.9 - - -@pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.skipif(no_fsl_course_data(), reason="fsl data not available") -def test_disabled_tbss_est_skeleton(): - _tbss_test_helper(True) - - -# this test is disabled until we figure out what is wrong with TBSS in 5.0.9 - - -@pytest.mark.skipif(no_fsl(), reason="fsl is not installed") -@pytest.mark.skipif(no_fsl_course_data(), reason="fsl data not available") -def test_disabled_tbss_est_skeleton_use_precomputed_skeleton(): - _tbss_test_helper(False) diff --git a/nipype/workflows/dmri/fsl/utils.py b/nipype/workflows/dmri/fsl/utils.py deleted file mode 100644 index bd53f5cb55..0000000000 --- a/nipype/workflows/dmri/fsl/utils.py +++ /dev/null @@ -1,847 +0,0 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import zip, next, range, str - -from ....pipeline import engine as pe -from ....interfaces import utility as niu -from ....interfaces import fsl -from ....interfaces import ants - - -def cleanup_edge_pipeline(name='Cleanup'): - """ - Perform some de-spiking filtering to clean up the edge of the fieldmap - (copied from fsl_prepare_fieldmap) - """ - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_file', 'in_mask']), name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file']), name='outputnode') - - fugue = pe.Node( - fsl.FUGUE( - save_fmap=True, despike_2dfilter=True, despike_threshold=2.1), - name='Despike') - erode = pe.Node( - fsl.maths.MathsCommand(nan2zeros=True, args='-kernel 2D -ero'), - name='MskErode') - newmsk = pe.Node( - fsl.MultiImageMaths(op_string='-sub %s -thr 0.5 -bin'), name='NewMask') - applymsk = pe.Node(fsl.ApplyMask(nan2zeros=True), name='ApplyMask') - join = pe.Node(niu.Merge(2), name='Merge') - addedge = pe.Node( - fsl.MultiImageMaths(op_string='-mas %s -add %s'), name='AddEdge') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, fugue, [ - ('in_file', 'fmap_in_file'), ('in_mask', 'mask_file') - ]), (inputnode, erode, [('in_mask', 'in_file')]), (inputnode, newmsk, [ - ('in_mask', 'in_file') - ]), (erode, newmsk, [('out_file', 'operand_files')]), (fugue, applymsk, [ - ('fmap_out_file', 'in_file') - ]), (newmsk, applymsk, - [('out_file', 'mask_file')]), (erode, join, [('out_file', 'in1')]), - (applymsk, join, [('out_file', 'in2')]), (inputnode, addedge, [ - ('in_file', 'in_file') - ]), (join, addedge, [('out', 'operand_files')]), - (addedge, outputnode, [('out_file', 'out_file')])]) - return wf - - -def vsm2warp(name='Shiftmap2Warping'): - """ - Converts a voxel shift map (vsm) to a displacements field (warp). - """ - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_vsm', 'in_ref', 'scaling', 'enc_dir']), - name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_warp']), name='outputnode') - fixhdr = pe.Node( - niu.Function( - input_names=['in_file', 'in_file_hdr'], - output_names=['out_file'], - function=copy_hdr), - name='Fix_hdr') - vsm = pe.Node(fsl.maths.BinaryMaths(operation='mul'), name='ScaleField') - vsm2dfm = pe.Node( - fsl.ConvertWarp(relwarp=True, out_relwarp=True), name='vsm2dfm') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, fixhdr, [('in_vsm', 'in_file'), ('in_ref', - 'in_file_hdr')]), - (inputnode, vsm, - [('scaling', 'operand_value')]), (fixhdr, vsm, [('out_file', - 'in_file')]), - (vsm, vsm2dfm, - [('out_file', 'shift_in_file')]), (inputnode, vsm2dfm, [ - ('in_ref', 'reference'), ('enc_dir', 'shift_direction') - ]), (vsm2dfm, outputnode, [('out_file', 'out_warp')])]) - return wf - - -def dwi_flirt(name='DWICoregistration', excl_nodiff=False, flirt_param={}): - """ - Generates a workflow for linear registration of dwi volumes - """ - inputnode = pe.Node( - niu.IdentityInterface( - fields=['reference', 'in_file', 'ref_mask', 'in_xfms', 'in_bval']), - name='inputnode') - - initmat = pe.Node( - niu.Function( - input_names=['in_bval', 'in_xfms', 'excl_nodiff'], - output_names=['init_xfms'], - function=_checkinitxfm), - name='InitXforms') - initmat.inputs.excl_nodiff = excl_nodiff - dilate = pe.Node( - fsl.maths.MathsCommand(nan2zeros=True, args='-kernel sphere 5 -dilM'), - name='MskDilate') - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - n4 = pe.Node(ants.N4BiasFieldCorrection(dimension=3), name='Bias') - enhb0 = pe.Node( - niu.Function( - input_names=['in_file', 'in_mask', 'clip_limit'], - output_names=['out_file'], - function=enhance), - name='B0Equalize') - enhb0.inputs.clip_limit = 0.015 - enhdw = pe.MapNode( - niu.Function( - input_names=['in_file', 'in_mask'], - output_names=['out_file'], - function=enhance), - name='DWEqualize', - iterfield=['in_file']) - flirt = pe.MapNode( - fsl.FLIRT(**flirt_param), - name='CoRegistration', - iterfield=['in_file', 'in_matrix_file']) - apply_xfms = pe.MapNode( - fsl.ApplyXFM( - apply_xfm=True, - interp='spline', - bgvalue=0), - name='ApplyXFMs', - iterfield=['in_file', 'in_matrix_file'] - ) - thres = pe.MapNode( - fsl.Threshold(thresh=0.0), - iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - outputnode = pe.Node( - niu.IdentityInterface(fields=['out_file', 'out_xfms']), - name='outputnode') - wf = pe.Workflow(name=name) - wf.connect([ - (inputnode, split, [('in_file', 'in_file')]), - (inputnode, dilate, [('ref_mask', 'in_file')]), - (inputnode, enhb0, [('ref_mask', 'in_mask')]), - (inputnode, initmat, [('in_xfms', 'in_xfms'), - ('in_bval', 'in_bval')]), - (inputnode, n4, [('reference', 'input_image'), - ('ref_mask', 'mask_image')]), - (dilate, flirt, [('out_file', 'ref_weight'), - ('out_file', 'in_weight')]), - (n4, enhb0, [('output_image', 'in_file')]), - (split, enhdw, [('out_files', 'in_file')]), - (split, apply_xfms, [('out_files', 'in_file')]), - (dilate, enhdw, [('out_file', 'in_mask')]), - (enhb0, flirt, [('out_file', 'reference')]), - (enhb0, apply_xfms, [('out_file', 'reference')]), - (enhdw, flirt, [('out_file', 'in_file')]), - (initmat, flirt, [('init_xfms', 'in_matrix_file')]), - (flirt, apply_xfms, [('out_matrix_file', 'in_matrix_file')]), - (apply_xfms, thres, [('out_file', 'in_file')]), - (thres, merge, [('out_file', 'in_files')]), - (merge, outputnode, [('merged_file', 'out_file')]), - (flirt, outputnode, [('out_matrix_file', 'out_xfms')]) - ]) - return wf - - -def apply_all_corrections(name='UnwarpArtifacts'): - """ - Combines two lists of linear transforms with the deformation field - map obtained typically after the SDC process. - Additionally, computes the corresponding bspline coefficients and - the map of determinants of the jacobian. - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_sdc', 'in_hmc', 'in_ecc', 'in_dwi']), - name='inputnode') - outputnode = pe.Node( - niu.IdentityInterface( - fields=['out_file', 'out_warp', 'out_coeff', 'out_jacobian']), - name='outputnode') - warps = pe.MapNode( - fsl.ConvertWarp(relwarp=True), - iterfield=['premat', 'postmat'], - name='ConvertWarp') - - selref = pe.Node(niu.Select(index=[0]), name='Reference') - - split = pe.Node(fsl.Split(dimension='t'), name='SplitDWIs') - unwarp = pe.MapNode( - fsl.ApplyWarp(), - iterfield=['in_file', 'field_file'], - name='UnwarpDWIs') - - coeffs = pe.MapNode( - fsl.WarpUtils(out_format='spline'), - iterfield=['in_file'], - name='CoeffComp') - jacobian = pe.MapNode( - fsl.WarpUtils(write_jacobian=True), - iterfield=['in_file'], - name='JacobianComp') - jacmult = pe.MapNode( - fsl.MultiImageMaths(op_string='-mul %s'), - iterfield=['in_file', 'operand_files'], - name='ModulateDWIs') - - thres = pe.MapNode( - fsl.Threshold(thresh=0.0), - iterfield=['in_file'], - name='RemoveNegative') - merge = pe.Node(fsl.Merge(dimension='t'), name='MergeDWIs') - - wf = pe.Workflow(name=name) - wf.connect([(inputnode, warps, [ - ('in_sdc', 'warp1'), ('in_hmc', 'premat'), ('in_ecc', 'postmat'), - ('in_dwi', 'reference') - ]), (inputnode, split, [('in_dwi', 'in_file')]), (split, selref, [ - ('out_files', 'inlist') - ]), (warps, unwarp, [('out_file', 'field_file')]), (split, unwarp, [ - ('out_files', 'in_file') - ]), (selref, unwarp, [('out', 'ref_file')]), (selref, coeffs, [ - ('out', 'reference') - ]), (warps, coeffs, [('out_file', 'in_file')]), (selref, jacobian, [ - ('out', 'reference') - ]), (coeffs, jacobian, [('out_file', 'in_file')]), (unwarp, jacmult, [ - ('out_file', 'in_file') - ]), (jacobian, jacmult, [('out_jacobian', 'operand_files')]), - (jacmult, thres, [('out_file', 'in_file')]), (thres, merge, [ - ('out_file', 'in_files') - ]), (warps, outputnode, [('out_file', 'out_warp')]), - (coeffs, outputnode, - [('out_file', 'out_coeff')]), (jacobian, outputnode, [ - ('out_jacobian', 'out_jacobian') - ]), (merge, outputnode, [('merged_file', 'out_file')])]) - return wf - - -def extract_bval(in_dwi, in_bval, b=0, out_file=None): - """ - Writes an image containing only the volumes with b-value specified at - input - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_tsoi%s" % (fname, ext)) - - im = nb.load(in_dwi, mmap=NUMPY_MMAP) - dwidata = im.get_data() - bvals = np.loadtxt(in_bval) - - if b == 'diff': - selection = np.where(bvals != 0) - elif b == 'nodiff': - selection = np.where(bvals == 0) - else: - selection = np.where(bvals == b) - - extdata = np.squeeze(dwidata.take(selection, axis=3)) - hdr = im.header.copy() - hdr.set_data_shape(extdata.shape) - nb.Nifti1Image(extdata, im.affine, hdr).to_filename(out_file) - return out_file - - -def hmc_split(in_file, in_bval, ref_num=0, lowbval=5.0): - """ - Selects the reference and moving volumes from a dwi dataset - for the purpose of HMC. - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.interfaces.base import isdefined - from nipype.utils import NUMPY_MMAP - - im = nb.load(in_file, mmap=NUMPY_MMAP) - data = im.get_data() - hdr = im.header.copy() - bval = np.loadtxt(in_bval) - - lowbs = np.where(bval <= lowbval)[0] - - volid = lowbs[0] - if (isdefined(ref_num) and (ref_num < len(lowbs))): - volid = ref_num - - if volid == 0: - data = data[..., 1:] - bval = bval[1:] - elif volid == (data.shape[-1] - 1): - data = data[..., :-1] - bval = bval[:-1] - else: - data = np.concatenate( - (data[..., :volid], data[..., (volid + 1):]), axis=3) - bval = np.hstack((bval[:volid], bval[(volid + 1):])) - - out_ref = op.abspath('hmc_ref.nii.gz') - out_mov = op.abspath('hmc_mov.nii.gz') - out_bval = op.abspath('bval_split.txt') - - refdata = data[..., volid] - hdr.set_data_shape(refdata.shape) - nb.Nifti1Image(refdata, im.affine, hdr).to_filename(out_ref) - - hdr.set_data_shape(data.shape) - nb.Nifti1Image(data, im.affine, hdr).to_filename(out_mov) - np.savetxt(out_bval, bval) - return [out_ref, out_mov, out_bval, volid] - - -def remove_comp(in_file, in_bval, volid=0, out_file=None): - """ - Removes the volume ``volid`` from the 4D nifti file - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_extract%s" % (fname, ext)) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - data = im.get_data() - hdr = im.header.copy() - bval = np.loadtxt(in_bval) - - if volid == 0: - data = data[..., 1:] - bval = bval[1:] - elif volid == (data.shape[-1] - 1): - data = data[..., :-1] - bval = bval[:-1] - else: - data = np.concatenate( - (data[..., :volid], data[..., (volid + 1):]), axis=3) - bval = np.hstack((bval[:volid], bval[(volid + 1):])) - hdr.set_data_shape(data.shape) - nb.Nifti1Image(data, im.affine, hdr).to_filename(out_file) - - out_bval = op.abspath('bval_extract.txt') - np.savetxt(out_bval, bval) - return out_file, out_bval - - -def insert_mat(inlist, volid=0): - import numpy as np - import os.path as op - idfname = op.abspath('identity.mat') - out = inlist - np.savetxt(idfname, np.eye(4)) - out.insert(volid, idfname) - return out - - -def recompose_dwi(in_dwi, in_bval, in_corrected, out_file=None): - """ - Recompose back the dMRI data accordingly the b-values table after EC - correction - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_eccorrect%s" % (fname, ext)) - - im = nb.load(in_dwi, mmap=NUMPY_MMAP) - dwidata = im.get_data() - bvals = np.loadtxt(in_bval) - dwis = np.where(bvals != 0)[0].tolist() - - if len(dwis) != len(in_corrected): - raise RuntimeError(('Length of DWIs in b-values table and after' - 'correction should match')) - - for bindex, dwi in zip(dwis, in_corrected): - dwidata[..., bindex] = nb.load(dwi, mmap=NUMPY_MMAP).get_data() - - nb.Nifti1Image(dwidata, im.affine, im.header).to_filename(out_file) - return out_file - - -def recompose_xfm(in_bval, in_xfms): - """ - Insert identity transformation matrices in b0 volumes to build up a list - """ - import numpy as np - import os.path as op - - bvals = np.loadtxt(in_bval) - xfms = iter([np.loadtxt(xfm) for xfm in in_xfms]) - out_files = [] - - for i, b in enumerate(bvals): - if b == 0.0: - mat = np.eye(4) - else: - mat = next(xfms) - - out_name = op.abspath('eccor_%04d.mat' % i) - out_files.append(out_name) - np.savetxt(out_name, mat) - - return out_files - - -def time_avg(in_file, index=[0], out_file=None): - """ - Average the input time-series, selecting the indices given in index - - .. warning:: time steps should be already registered (corrected for - head motion artifacts). - - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_file)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_baseline%s" % (fname, ext)) - - index = np.atleast_1d(index).tolist() - - imgs = np.array(nb.four_to_three(nb.load(in_file, mmap=NUMPY_MMAP)))[index] - if len(index) == 1: - data = imgs[0].get_data().astype(np.float32) - else: - data = np.average( - np.array([im.get_data().astype(np.float32) for im in imgs]), - axis=0) - - hdr = imgs[0].header.copy() - hdr.set_data_shape(data.shape) - hdr.set_xyzt_units('mm') - hdr.set_data_dtype(np.float32) - nb.Nifti1Image(data, imgs[0].affine, hdr).to_filename(out_file) - return out_file - - -def b0_indices(in_bval, max_b=10.0): - """ - Extract the indices of slices in a b-values file with a low b value - """ - import numpy as np - bval = np.loadtxt(in_bval) - return np.argwhere(bval <= max_b).flatten().tolist() - - -def b0_average(in_dwi, in_bval, max_b=10.0, out_file=None): - """ - A function that averages the *b0* volumes from a DWI dataset. - As current dMRI data are being acquired with all b-values > 0.0, - the *lowb* volumes are selected by specifying the parameter max_b. - - .. warning:: *b0* should be already registered (head motion artifact should - be corrected). - - """ - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, ext = op.splitext(op.basename(in_dwi)) - if ext == ".gz": - fname, ext2 = op.splitext(fname) - ext = ext2 + ext - out_file = op.abspath("%s_avg_b0%s" % (fname, ext)) - - imgs = np.array(nb.four_to_three(nb.load(in_dwi, mmap=NUMPY_MMAP))) - bval = np.loadtxt(in_bval) - index = np.argwhere(bval <= max_b).flatten().tolist() - - b0s = [im.get_data().astype(np.float32) for im in imgs[index]] - b0 = np.average(np.array(b0s), axis=0) - - hdr = imgs[0].header.copy() - hdr.set_data_shape(b0.shape) - hdr.set_xyzt_units('mm') - hdr.set_data_dtype(np.float32) - nb.Nifti1Image(b0, imgs[0].affine, hdr).to_filename(out_file) - return out_file - - -def rotate_bvecs(in_bvec, in_matrix): - """ - Rotates the input bvec file accordingly with a list of matrices. - - .. note:: the input affine matrix transforms points in the destination - image to their corresponding coordinates in the original image. - Therefore, this matrix should be inverted first, as we want to know - the target position of :math:`\\vec{r}`. - - """ - import os - import numpy as np - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_rotated.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - if len(bvecs) != len(in_matrix): - raise RuntimeError(('Number of b-vectors (%d) and rotation ' - 'matrices (%d) should match.') % (len(bvecs), - len(in_matrix))) - - for bvec, mat in zip(bvecs, in_matrix): - if np.all(bvec == 0.0): - new_bvecs.append(bvec) - else: - invrot = np.linalg.inv(np.loadtxt(mat))[:3, :3] - newbvec = invrot.dot(bvec) - new_bvecs.append((newbvec / np.linalg.norm(newbvec))) - - np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') - return out_file - - -def eddy_rotate_bvecs(in_bvec, eddy_params): - """ - Rotates the input bvec file accordingly with a list of parameters sourced - from ``eddy``, as explained `here - `_. - """ - import os - import numpy as np - from math import sin, cos - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_rotated.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - params = np.loadtxt(eddy_params) - - if len(bvecs) != len(params): - raise RuntimeError(('Number of b-vectors and rotation ' - 'matrices should match.')) - - for bvec, row in zip(bvecs, params): - if np.all(bvec == 0.0): - new_bvecs.append(bvec) - else: - ax = row[3] - ay = row[4] - az = row[5] - - Rx = np.array([[1.0, 0.0, 0.0], [0.0, cos(ax), -sin(ax)], - [0.0, sin(ax), cos(ax)]]) - Ry = np.array([[cos(ay), 0.0, sin(ay)], [0.0, 1.0, 0.0], - [-sin(ay), 0.0, cos(ay)]]) - Rz = np.array([[cos(az), -sin(az), 0.0], [sin(az), - cos(az), 0.0], - [0.0, 0.0, 1.0]]) - R = Rx.dot(Ry).dot(Rz) - - invrot = np.linalg.inv(R) - newbvec = invrot.dot(bvec) - new_bvecs.append(newbvec / np.linalg.norm(newbvec)) - - np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') - return out_file - - -def compute_readout(params): - """ - Computes readout time from epi params (see `eddy documentation - `_). - - .. warning:: ``params['echospacing']`` should be in *sec* units. - - - """ - epi_factor = 1.0 - acc_factor = 1.0 - try: - if params['epi_factor'] > 1: - epi_factor = float(params['epi_factor'] - 1) - except: - pass - try: - if params['acc_factor'] > 1: - acc_factor = 1.0 / params['acc_factor'] - except: - pass - return acc_factor * epi_factor * params['echospacing'] - - -def siemens2rads(in_file, out_file=None): - """ - Converts input phase difference map to rads - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_rads.nii.gz' % fname) - - in_file = np.atleast_1d(in_file).tolist() - im = nb.load(in_file[0]) - data = im.get_data().astype(np.float32) - hdr = im.header.copy() - - if len(in_file) == 2: - data = nb.load(in_file[1]).get_data().astype(np.float32) - data - elif (data.ndim == 4) and (data.shape[-1] == 2): - data = np.squeeze(data[..., 1] - data[..., 0]) - hdr.set_data_shape(data.shape[:3]) - - imin = data.min() - imax = data.max() - data = (2.0 * math.pi * (data - imin) / (imax - imin)) - math.pi - hdr.set_data_dtype(np.float32) - hdr.set_xyzt_units('mm') - hdr['datatype'] = 16 - nb.Nifti1Image(data, im.affine, hdr).to_filename(out_file) - return out_file - - -def rads2radsec(in_file, delta_te, out_file=None): - """ - Converts input phase difference map to rads - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_radsec.nii.gz' % fname) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - data = im.get_data().astype(np.float32) * (1.0 / delta_te) - nb.Nifti1Image(data, im.affine, im.header).to_filename(out_file) - return out_file - - -def demean_image(in_file, in_mask=None, out_file=None): - """ - Demean image data inside mask - """ - import numpy as np - import nibabel as nb - import os.path as op - import math - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_demean.nii.gz' % fname) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - data = im.get_data().astype(np.float32) - msk = np.ones_like(data) - - if in_mask is not None: - msk = nb.load(in_mask, mmap=NUMPY_MMAP).get_data().astype(np.float32) - msk[msk > 0] = 1.0 - msk[msk < 1] = 0.0 - - mean = np.median(data[msk == 1].reshape(-1)) - data[msk == 1] = data[msk == 1] - mean - nb.Nifti1Image(data, im.affine, im.header).to_filename(out_file) - return out_file - - -def add_empty_vol(in_file, out_file=None): - """ - Adds an empty vol to the phase difference image - """ - import nibabel as nb - import os.path as op - import numpy as np - import math - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_4D.nii.gz' % fname) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - zim = nb.Nifti1Image(np.zeros_like(im.get_data()), im.affine, im.header) - nb.funcs.concat_images([im, zim]).to_filename(out_file) - return out_file - - -def reorient_bvecs(in_dwi, old_dwi, in_bvec): - """ - Checks reorientations of ``in_dwi`` w.r.t. ``old_dwi`` and - reorients the in_bvec table accordingly. - """ - import os - import numpy as np - import nibabel as nb - from nipype.utils import NUMPY_MMAP - - name, fext = os.path.splitext(os.path.basename(in_bvec)) - if fext == '.gz': - name, _ = os.path.splitext(name) - out_file = os.path.abspath('%s_reorient.bvec' % name) - bvecs = np.loadtxt(in_bvec).T - new_bvecs = [] - - N = nb.load(in_dwi, mmap=NUMPY_MMAP).affine - O = nb.load(old_dwi, mmap=NUMPY_MMAP).affine - RS = N.dot(np.linalg.inv(O))[:3, :3] - sc_idx = np.where((np.abs(RS) != 1) & (RS != 0)) - S = np.ones_like(RS) - S[sc_idx] = RS[sc_idx] - R = RS / S - - new_bvecs = [R.dot(b) for b in bvecs] - np.savetxt(out_file, np.array(new_bvecs).T, fmt=b'%0.15f') - return out_file - - -def copy_hdr(in_file, in_file_hdr, out_file=None): - import numpy as np - import nibabel as nb - import os.path as op - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_fixhdr.nii.gz' % fname) - - imref = nb.load(in_file_hdr, mmap=NUMPY_MMAP) - hdr = imref.header.copy() - hdr.set_data_dtype(np.float32) - vsm = nb.load(in_file, mmap=NUMPY_MMAP).get_data().astype(np.float32) - hdr.set_data_shape(vsm.shape) - hdr.set_xyzt_units('mm') - nii = nb.Nifti1Image(vsm, imref.affine, hdr) - nii.to_filename(out_file) - return out_file - - -def enhance(in_file, clip_limit=0.010, in_mask=None, out_file=None): - import numpy as np - import nibabel as nb - import os.path as op - from skimage import exposure, img_as_int - from nipype.utils import NUMPY_MMAP - - if out_file is None: - fname, fext = op.splitext(op.basename(in_file)) - if fext == '.gz': - fname, _ = op.splitext(fname) - out_file = op.abspath('./%s_enh.nii.gz' % fname) - - im = nb.load(in_file, mmap=NUMPY_MMAP) - imdata = im.get_data() - imshape = im.shape - - if in_mask is not None: - msk = nb.load(in_mask, mmap=NUMPY_MMAP).get_data() - msk[msk > 0] = 1 - msk[msk < 1] = 0 - imdata = imdata * msk - - immin = imdata.min() - imdata = (imdata - immin).astype(np.uint16) - - adapted = exposure.equalize_adapthist( - imdata.reshape(imshape[0], -1), clip_limit=clip_limit) - - nb.Nifti1Image(adapted.reshape(imshape), im.affine, - im.header).to_filename(out_file) - - return out_file - - -def _checkinitxfm(in_bval, excl_nodiff, in_xfms=None): - from nipype.interfaces.base import isdefined - import numpy as np - import os.path as op - bvals = np.loadtxt(in_bval) - - gen_id = ((in_xfms is None) or (not isdefined(in_xfms)) - or (len(in_xfms) != len(bvals))) - - init_xfms = [] - if excl_nodiff: - dws = np.where(bvals != 0)[0].tolist() - else: - dws = list(range(len(bvals))) - - if gen_id: - for i in dws: - xfm_file = op.abspath('init_%04d.mat' % i) - np.savetxt(xfm_file, np.eye(4)) - init_xfms.append(xfm_file) - else: - init_xfms = [in_xfms[i] for i in dws] - - return init_xfms diff --git a/nipype/workflows/dmri/mrtrix/__init__.py b/nipype/workflows/dmri/mrtrix/__init__.py deleted file mode 100644 index 6851021111..0000000000 --- a/nipype/workflows/dmri/mrtrix/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import -from .diffusion import create_mrtrix_dti_pipeline -from .connectivity_mapping import create_connectivity_pipeline -from .group_connectivity import (create_group_connectivity_pipeline) diff --git a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py b/nipype/workflows/dmri/mrtrix/connectivity_mapping.py deleted file mode 100644 index e47dcb9531..0000000000 --- a/nipype/workflows/dmri/mrtrix/connectivity_mapping.py +++ /dev/null @@ -1,639 +0,0 @@ -# -*- coding: utf-8 -*- -import inspect -import os.path as op # system functions - -from ....interfaces import io as nio # Data i/o -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....interfaces import fsl as fsl -from ....interfaces import freesurfer as fs # freesurfer -from ....interfaces import mrtrix as mrtrix -from ....interfaces import cmtk as cmtk -from ....interfaces import dipy as dipy -from ....algorithms import misc as misc -from ..fsl.epi import create_eddy_correct_pipeline -from ..connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline -from ....interfaces.utility import Function -from ...misc.utils import select_aparc_annot - - -def create_connectivity_pipeline(name="connectivity", - parcellation_name='scale500'): - """Creates a pipeline that does the same connectivity processing as in the - :ref:`example_dmri_connectivity_advanced` example script. Given a subject id (and completed Freesurfer reconstruction) - diffusion-weighted image, b-values, and b-vectors, the workflow will return the subject's connectome - as a Connectome File Format (CFF) file for use in Connectome Viewer (http://www.cmtk.org). - - Example - ------- - - >>> from nipype.workflows.dmri.mrtrix.connectivity_mapping import create_connectivity_pipeline - >>> conmapper = create_connectivity_pipeline("nipype_conmap") - >>> conmapper.inputs.inputnode.subjects_dir = '.' - >>> conmapper.inputs.inputnode.subject_id = 'subj1' - >>> conmapper.inputs.inputnode.dwi = 'data.nii.gz' - >>> conmapper.inputs.inputnode.bvecs = 'bvecs' - >>> conmapper.inputs.inputnode.bvals = 'bvals' - >>> conmapper.run() # doctest: +SKIP - - Inputs:: - - inputnode.subject_id - inputnode.subjects_dir - inputnode.dwi - inputnode.bvecs - inputnode.bvals - inputnode.resolution_network_file - - Outputs:: - - outputnode.connectome - outputnode.cmatrix - outputnode.networks - outputnode.fa - outputnode.struct - outputnode.tracts - outputnode.rois - outputnode.odfs - outputnode.filtered_tractography - outputnode.tdi - outputnode.nxstatscff - outputnode.nxcsv - outputnode.cmatrices_csv - outputnode.mean_fiber_length - outputnode.median_fiber_length - outputnode.fiber_length_std - """ - - inputnode_within = pe.Node( - util.IdentityInterface(fields=[ - "subject_id", "dwi", "bvecs", "bvals", "subjects_dir", - "resolution_network_file" - ]), - name="inputnode_within") - - FreeSurferSource = pe.Node( - interface=nio.FreeSurferSource(), name='fssource') - FreeSurferSourceLH = pe.Node( - interface=nio.FreeSurferSource(), name='fssourceLH') - FreeSurferSourceLH.inputs.hemi = 'lh' - - FreeSurferSourceRH = pe.Node( - interface=nio.FreeSurferSource(), name='fssourceRH') - FreeSurferSourceRH.inputs.hemi = 'rh' - """ - Creating the workflow's nodes - ============================= - """ - """ - Conversion nodes - ---------------- - """ - """ - A number of conversion operations are required to obtain NIFTI files from the FreesurferSource for each subject. - Nodes are used to convert the following: - * Original structural image to NIFTI - * Pial, white, inflated, and spherical surfaces for both the left and right hemispheres are converted to GIFTI for visualization in ConnectomeViewer - * Parcellated annotation files for the left and right hemispheres are also converted to GIFTI - - """ - - mri_convert_Brain = pe.Node( - interface=fs.MRIConvert(), name='mri_convert_Brain') - mri_convert_Brain.inputs.out_type = 'nii' - mri_convert_ROI_scale500 = mri_convert_Brain.clone( - 'mri_convert_ROI_scale500') - - mris_convertLH = pe.Node(interface=fs.MRIsConvert(), name='mris_convertLH') - mris_convertLH.inputs.out_datatype = 'gii' - mris_convertRH = mris_convertLH.clone('mris_convertRH') - mris_convertRHwhite = mris_convertLH.clone('mris_convertRHwhite') - mris_convertLHwhite = mris_convertLH.clone('mris_convertLHwhite') - mris_convertRHinflated = mris_convertLH.clone('mris_convertRHinflated') - mris_convertLHinflated = mris_convertLH.clone('mris_convertLHinflated') - mris_convertRHsphere = mris_convertLH.clone('mris_convertRHsphere') - mris_convertLHsphere = mris_convertLH.clone('mris_convertLHsphere') - mris_convertLHlabels = mris_convertLH.clone('mris_convertLHlabels') - mris_convertRHlabels = mris_convertLH.clone('mris_convertRHlabels') - """ - Diffusion processing nodes - -------------------------- - - .. seealso:: - - dmri_mrtrix_dti.py - Tutorial that focuses solely on the MRtrix diffusion processing - - http://www.brain.org.au/software/mrtrix/index.html - MRtrix's online documentation - """ - """ - b-values and b-vectors stored in FSL's format are converted into a single encoding file for MRTrix. - """ - - fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') - """ - Distortions induced by eddy currents are corrected prior to fitting the tensors. - The first image is used as a reference for which to warp the others. - """ - - eddycorrect = create_eddy_correct_pipeline(name='eddycorrect') - eddycorrect.inputs.inputnode.ref_num = 1 - """ - Tensors are fitted to each voxel in the diffusion-weighted image and from these three maps are created: - * Major eigenvector in each voxel - * Apparent diffusion coefficient - * Fractional anisotropy - """ - - dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') - tensor2vector = pe.Node( - interface=mrtrix.Tensor2Vector(), name='tensor2vector') - tensor2adc = pe.Node( - interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') - tensor2fa = pe.Node( - interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') - MRconvert_fa = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert_fa') - MRconvert_fa.inputs.extension = 'nii' - """ - - These nodes are used to create a rough brain mask from the b0 image. - The b0 image is extracted from the original diffusion-weighted image, - put through a simple thresholding routine, and smoothed using a 3x3 median filter. - """ - - MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') - MRconvert.inputs.extract_at_axis = 3 - MRconvert.inputs.extract_at_coordinate = [0] - threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') - median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3d') - """ - The brain mask is also used to help identify single-fiber voxels. - This is done by passing the brain mask through two erosion steps, - multiplying the remaining mask with the fractional anisotropy map, and - thresholding the result to obtain some highly anisotropic within-brain voxels. - """ - - erode_mask_firstpass = pe.Node( - interface=mrtrix.Erode(), name='erode_mask_firstpass') - erode_mask_secondpass = pe.Node( - interface=mrtrix.Erode(), name='erode_mask_secondpass') - MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') - MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') - threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') - threshold_FA.inputs.absolute_threshold_value = 0.7 - """ - For whole-brain tracking we also require a broad white-matter seed mask. - This is created by generating a white matter mask, given a brainmask, and - thresholding it at a reasonably high level. - """ - - bet = pe.Node(interface=fsl.BET(mask=True), name='bet_b0') - gen_WM_mask = pe.Node( - interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') - threshold_wmmask = pe.Node( - interface=mrtrix.Threshold(), name='threshold_wmmask') - threshold_wmmask.inputs.absolute_threshold_value = 0.4 - """ - The spherical deconvolution step depends on the estimate of the response function - in the highly anisotropic voxels we obtained above. - - .. warning:: - - For damaged or pathological brains one should take care to lower the maximum harmonic order of these steps. - - """ - - estimateresponse = pe.Node( - interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') - estimateresponse.inputs.maximum_harmonic_order = 6 - csdeconv = pe.Node( - interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') - csdeconv.inputs.maximum_harmonic_order = 6 - """ - Finally, we track probabilistically using the orientation distribution functions obtained earlier. - The tracts are then used to generate a tract-density image, and they are also converted to TrackVis format. - """ - - probCSDstreamtrack = pe.Node( - interface=mrtrix.ProbabilisticSphericallyDeconvolutedStreamlineTrack(), - name='probCSDstreamtrack') - probCSDstreamtrack.inputs.inputmodel = 'SD_PROB' - probCSDstreamtrack.inputs.desired_number_of_tracks = 150000 - tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') - tracks2prob.inputs.colour = True - MRconvert_tracks2prob = MRconvert_fa.clone(name='MRconvert_tracks2prob') - tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') - trk2tdi = pe.Node(interface=dipy.TrackDensityMap(), name='trk2tdi') - """ - Structural segmentation nodes - ----------------------------- - """ - """ - The following node identifies the transformation between the diffusion-weighted - image and the structural image. This transformation is then applied to the tracts - so that they are in the same space as the regions of interest. - """ - - coregister = pe.Node(interface=fsl.FLIRT(dof=6), name='coregister') - coregister.inputs.cost = ('normmi') - """ - Parcellation is performed given the aparc+aseg image from Freesurfer. - The CMTK Parcellation step subdivides these regions to return a higher-resolution parcellation scheme. - The parcellation used here is entitled "scale500" and returns 1015 regions. - """ - - parcellate = pe.Node(interface=cmtk.Parcellate(), name="Parcellate") - parcellate.inputs.parcellation_name = parcellation_name - """ - The CreateMatrix interface takes in the remapped aparc+aseg image as well as the label dictionary and fiber tracts - and outputs a number of different files. The most important of which is the connectivity network itself, which is stored - as a 'gpickle' and can be loaded using Python's NetworkX package (see CreateMatrix docstring). Also outputted are various - NumPy arrays containing detailed tract information, such as the start and endpoint regions, and statistics on the mean and - standard deviation for the fiber length of each connection. These matrices can be used in the ConnectomeViewer to plot the - specific tracts that connect between user-selected regions. - - Here we choose the Lausanne2008 parcellation scheme, since we are incorporating the CMTK parcellation step. - """ - - creatematrix = pe.Node(interface=cmtk.CreateMatrix(), name="CreateMatrix") - creatematrix.inputs.count_region_intersections = True - """ - Next we define the endpoint of this tutorial, which is the CFFConverter node, as well as a few nodes which use - the Nipype Merge utility. These are useful for passing lists of the files we want packaged in our CFF file. - The inspect.getfile command is used to package this script into the resulting CFF file, so that it is easy to - look back at the processing parameters that were used. - """ - - CFFConverter = pe.Node(interface=cmtk.CFFConverter(), name="CFFConverter") - CFFConverter.inputs.script_files = op.abspath( - inspect.getfile(inspect.currentframe())) - giftiSurfaces = pe.Node(interface=util.Merge(8), name="GiftiSurfaces") - giftiLabels = pe.Node(interface=util.Merge(2), name="GiftiLabels") - niftiVolumes = pe.Node(interface=util.Merge(3), name="NiftiVolumes") - fiberDataArrays = pe.Node(interface=util.Merge(4), name="FiberDataArrays") - """ - We also create a node to calculate several network metrics on our resulting file, and another CFF converter - which will be used to package these networks into a single file. - """ - - networkx = create_networkx_pipeline(name='networkx') - cmats_to_csv = create_cmats_to_csv_pipeline(name='cmats_to_csv') - nfibs_to_csv = pe.Node(interface=misc.Matlab2CSV(), name='nfibs_to_csv') - merge_nfib_csvs = pe.Node( - interface=misc.MergeCSVFiles(), name='merge_nfib_csvs') - merge_nfib_csvs.inputs.extra_column_heading = 'Subject' - merge_nfib_csvs.inputs.out_file = 'fibers.csv' - NxStatsCFFConverter = pe.Node( - interface=cmtk.CFFConverter(), name="NxStatsCFFConverter") - NxStatsCFFConverter.inputs.script_files = op.abspath( - inspect.getfile(inspect.currentframe())) - """ - Connecting the workflow - ======================= - Here we connect our processing pipeline. - """ - """ - Connecting the inputs, FreeSurfer nodes, and conversions - -------------------------------------------------------- - """ - - mapping = pe.Workflow(name='mapping') - """ - First, we connect the input node to the FreeSurfer input nodes. - """ - - mapping.connect([(inputnode_within, FreeSurferSource, [("subjects_dir", - "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSource, [("subject_id", - "subject_id")])]) - - mapping.connect([(inputnode_within, FreeSurferSourceLH, - [("subjects_dir", "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceLH, [("subject_id", - "subject_id")])]) - - mapping.connect([(inputnode_within, FreeSurferSourceRH, - [("subjects_dir", "subjects_dir")])]) - mapping.connect([(inputnode_within, FreeSurferSourceRH, [("subject_id", - "subject_id")])]) - - mapping.connect([(inputnode_within, parcellate, [("subjects_dir", - "subjects_dir")])]) - mapping.connect([(inputnode_within, parcellate, [("subject_id", - "subject_id")])]) - mapping.connect([(parcellate, mri_convert_ROI_scale500, [('roi_file', - 'in_file')])]) - """ - Nifti conversion for subject's stripped brain image from Freesurfer: - """ - - mapping.connect([(FreeSurferSource, mri_convert_Brain, [('brain', - 'in_file')])]) - """ - Surface conversions to GIFTI (pial, white, inflated, and sphere for both hemispheres) - """ - - mapping.connect([(FreeSurferSourceLH, mris_convertLH, [('pial', - 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRH, [('pial', - 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHwhite, [('white', - 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHwhite, [('white', - 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHinflated, - [('inflated', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHinflated, - [('inflated', 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHsphere, - [('sphere', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHsphere, - [('sphere', 'in_file')])]) - """ - The annotation files are converted using the pial surface as a map via the MRIsConvert interface. - One of the functions defined earlier is used to select the lh.aparc.annot and rh.aparc.annot files - specifically (rather than e.g. rh.aparc.a2009s.annot) from the output list given by the FreeSurferSource. - """ - - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, - [('pial', 'in_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, - [('pial', 'in_file')])]) - mapping.connect([(FreeSurferSourceLH, mris_convertLHlabels, - [(('annot', select_aparc_annot), 'annot_file')])]) - mapping.connect([(FreeSurferSourceRH, mris_convertRHlabels, - [(('annot', select_aparc_annot), 'annot_file')])]) - """ - Diffusion Processing - -------------------- - Now we connect the tensor computations: - """ - - mapping.connect([(inputnode_within, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) - mapping.connect([(inputnode_within, eddycorrect, [("dwi", - "inputnode.in_file")])]) - mapping.connect([(eddycorrect, dwi2tensor, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", - "encoding_file")])]) - - mapping.connect([ - (dwi2tensor, tensor2vector, [['tensor', 'in_file']]), - (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), - (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), - ]) - mapping.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) - mapping.connect([(tensor2fa, MRconvert_fa, [("FA", "in_file")])]) - """ - - This block creates the rough brain mask to be multiplied, mulitplies it with the - fractional anisotropy image, and thresholds it to get the single-fiber voxels. - """ - - mapping.connect([(eddycorrect, MRconvert, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) - mapping.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) - mapping.connect([(median3d, erode_mask_firstpass, [("out_file", - "in_file")])]) - mapping.connect([(erode_mask_firstpass, erode_mask_secondpass, - [("out_file", "in_file")])]) - mapping.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", - "in2")])]) - mapping.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) - mapping.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) - """ - Here the thresholded white matter mask is created for seeding the tractography. - """ - - mapping.connect([(eddycorrect, bet, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(eddycorrect, gen_WM_mask, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) - mapping.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", - "encoding_file")])]) - mapping.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", - "in_file")])]) - """ - Next we estimate the fiber response distribution. - """ - - mapping.connect([(eddycorrect, estimateresponse, - [("outputnode.eddy_corrected", "in_file")])]) - mapping.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", - "encoding_file")])]) - mapping.connect([(threshold_FA, estimateresponse, [("out_file", - "mask_image")])]) - """ - Run constrained spherical deconvolution. - """ - - mapping.connect([(eddycorrect, csdeconv, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", - "mask_image")])]) - mapping.connect([(estimateresponse, csdeconv, [("response", - "response_file")])]) - mapping.connect([(fsl2mrtrix, csdeconv, [("encoding_file", - "encoding_file")])]) - """ - Connect the tractography and compute the tract density image. - """ - - mapping.connect([(threshold_wmmask, probCSDstreamtrack, [("out_file", - "seed_file")])]) - mapping.connect([(csdeconv, probCSDstreamtrack, - [("spherical_harmonics_image", "in_file")])]) - mapping.connect([(probCSDstreamtrack, tracks2prob, [("tracked", - "in_file")])]) - mapping.connect([(eddycorrect, tracks2prob, [("outputnode.eddy_corrected", - "template_file")])]) - mapping.connect([(tracks2prob, MRconvert_tracks2prob, [("tract_image", - "in_file")])]) - """ - Structural Processing - --------------------- - First, we coregister the diffusion image to the structural image - """ - - mapping.connect([(eddycorrect, coregister, [("outputnode.eddy_corrected", - "in_file")])]) - mapping.connect([(mri_convert_Brain, coregister, [('out_file', - 'reference')])]) - """ - The MRtrix-tracked fibers are converted to TrackVis format (with voxel and data dimensions grabbed from the DWI). - The connectivity matrix is created with the transformed .trk fibers and the parcellation file. - """ - - mapping.connect([(eddycorrect, tck2trk, [("outputnode.eddy_corrected", - "image_file")])]) - mapping.connect([(mri_convert_Brain, tck2trk, - [("out_file", "registration_image_file")])]) - mapping.connect([(coregister, tck2trk, [("out_matrix_file", - "matrix_file")])]) - mapping.connect([(probCSDstreamtrack, tck2trk, [("tracked", "in_file")])]) - mapping.connect([(tck2trk, creatematrix, [("out_file", "tract_file")])]) - mapping.connect([(tck2trk, trk2tdi, [("out_file", "in_file")])]) - mapping.connect(inputnode_within, 'resolution_network_file', creatematrix, - 'resolution_network_file') - mapping.connect([(inputnode_within, creatematrix, [("subject_id", - "out_matrix_file")])]) - mapping.connect([(inputnode_within, creatematrix, - [("subject_id", "out_matrix_mat_file")])]) - mapping.connect([(parcellate, creatematrix, [("roi_file", "roi_file")])]) - """ - The merge nodes defined earlier are used here to create lists of the files which are - destined for the CFFConverter. - """ - - mapping.connect([(mris_convertLH, giftiSurfaces, [("converted", "in1")])]) - mapping.connect([(mris_convertRH, giftiSurfaces, [("converted", "in2")])]) - mapping.connect([(mris_convertLHwhite, giftiSurfaces, [("converted", - "in3")])]) - mapping.connect([(mris_convertRHwhite, giftiSurfaces, [("converted", - "in4")])]) - mapping.connect([(mris_convertLHinflated, giftiSurfaces, [("converted", - "in5")])]) - mapping.connect([(mris_convertRHinflated, giftiSurfaces, [("converted", - "in6")])]) - mapping.connect([(mris_convertLHsphere, giftiSurfaces, [("converted", - "in7")])]) - mapping.connect([(mris_convertRHsphere, giftiSurfaces, [("converted", - "in8")])]) - - mapping.connect([(mris_convertLHlabels, giftiLabels, [("converted", - "in1")])]) - mapping.connect([(mris_convertRHlabels, giftiLabels, [("converted", - "in2")])]) - - mapping.connect([(parcellate, niftiVolumes, [("roi_file", "in1")])]) - mapping.connect([(eddycorrect, niftiVolumes, [("outputnode.eddy_corrected", - "in2")])]) - mapping.connect([(mri_convert_Brain, niftiVolumes, [("out_file", "in3")])]) - - mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file", - "in1")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("endpoint_file_mm", - "in2")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("fiber_length_file", - "in3")])]) - mapping.connect([(creatematrix, fiberDataArrays, [("fiber_label_file", - "in4")])]) - """ - This block actually connects the merged lists to the CFF converter. We pass the surfaces - and volumes that are to be included, as well as the tracts and the network itself. The currently - running pipeline (dmri_connectivity_advanced.py) is also scraped and included in the CFF file. This - makes it easy for the user to examine the entire processing pathway used to generate the end - product. - """ - - mapping.connect([(giftiSurfaces, CFFConverter, [("out", - "gifti_surfaces")])]) - mapping.connect([(giftiLabels, CFFConverter, [("out", "gifti_labels")])]) - mapping.connect([(creatematrix, CFFConverter, [("matrix_files", - "gpickled_networks")])]) - mapping.connect([(niftiVolumes, CFFConverter, [("out", "nifti_volumes")])]) - mapping.connect([(fiberDataArrays, CFFConverter, [("out", "data_files")])]) - mapping.connect([(creatematrix, CFFConverter, [("filtered_tractography", - "tract_files")])]) - mapping.connect([(inputnode_within, CFFConverter, [("subject_id", - "title")])]) - """ - The graph theoretical metrics which have been generated are placed into another CFF file. - """ - - mapping.connect([(inputnode_within, networkx, - [("subject_id", "inputnode.extra_field")])]) - mapping.connect([(creatematrix, networkx, [("intersection_matrix_file", - "inputnode.network_file")])]) - - mapping.connect([(networkx, NxStatsCFFConverter, - [("outputnode.network_files", "gpickled_networks")])]) - mapping.connect([(giftiSurfaces, NxStatsCFFConverter, - [("out", "gifti_surfaces")])]) - mapping.connect([(giftiLabels, NxStatsCFFConverter, [("out", - "gifti_labels")])]) - mapping.connect([(niftiVolumes, NxStatsCFFConverter, [("out", - "nifti_volumes")])]) - mapping.connect([(fiberDataArrays, NxStatsCFFConverter, [("out", - "data_files")])]) - mapping.connect([(inputnode_within, NxStatsCFFConverter, [("subject_id", - "title")])]) - - mapping.connect([(inputnode_within, cmats_to_csv, - [("subject_id", "inputnode.extra_field")])]) - mapping.connect([(creatematrix, cmats_to_csv, - [("matlab_matrix_files", - "inputnode.matlab_matrix_files")])]) - mapping.connect([(creatematrix, nfibs_to_csv, [("stats_file", - "in_file")])]) - mapping.connect([(nfibs_to_csv, merge_nfib_csvs, [("csv_files", - "in_files")])]) - mapping.connect([(inputnode_within, merge_nfib_csvs, [("subject_id", - "extra_field")])]) - """ - Create a higher-level workflow - -------------------------------------- - Finally, we create another higher-level workflow to connect our mapping workflow with the info and datagrabbing nodes - declared at the beginning. Our tutorial can is now extensible to any arbitrary number of subjects by simply adding - their names to the subject list and their data to the proper folders. - """ - - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=["subject_id", "dwi", "bvecs", "bvals", "subjects_dir"]), - name="inputnode") - - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - "fa", "struct", "tracts", "tracks2prob", "connectome", - "nxstatscff", "nxmatlab", "nxcsv", "fiber_csv", "cmatrices_csv", - "nxmergedcsv", "cmatrix", "networks", "filtered_tracts", "rois", - "odfs", "tdi", "mean_fiber_length", "median_fiber_length", - "fiber_length_std" - ]), - name="outputnode") - - connectivity = pe.Workflow(name="connectivity") - connectivity.base_output_dir = name - connectivity.base_dir = name - - connectivity.connect([ - (inputnode, mapping, - [("dwi", "inputnode_within.dwi"), ("bvals", "inputnode_within.bvals"), - ("bvecs", "inputnode_within.bvecs"), ("subject_id", - "inputnode_within.subject_id"), - ("subjects_dir", "inputnode_within.subjects_dir")]) - ]) - - connectivity.connect( - [(mapping, outputnode, - [("tck2trk.out_file", - "tracts"), ("CFFConverter.connectome_file", - "connectome"), ("NxStatsCFFConverter.connectome_file", - "nxstatscff"), - ("CreateMatrix.matrix_mat_file", - "cmatrix"), ("CreateMatrix.mean_fiber_length_matrix_mat_file", - "mean_fiber_length"), - ("CreateMatrix.median_fiber_length_matrix_mat_file", - "median_fiber_length"), - ("CreateMatrix.fiber_length_std_matrix_mat_file", - "fiber_length_std"), ("CreateMatrix.matrix_files", "networks"), - ("CreateMatrix.filtered_tractographies", - "filtered_tracts"), ("merge_nfib_csvs.csv_file", "fiber_csv"), - ("mri_convert_ROI_scale500.out_file", - "rois"), ("trk2tdi.out_file", - "tdi"), ("csdeconv.spherical_harmonics_image", "odfs"), - ("mri_convert_Brain.out_file", - "struct"), ("MRconvert_fa.converted", - "fa"), ("MRconvert_tracks2prob.converted", - "tracks2prob")])]) - - connectivity.connect([(cmats_to_csv, outputnode, [("outputnode.csv_file", - "cmatrices_csv")])]) - connectivity.connect([(networkx, outputnode, [("outputnode.csv_files", - "nxcsv")])]) - return connectivity diff --git a/nipype/workflows/dmri/mrtrix/diffusion.py b/nipype/workflows/dmri/mrtrix/diffusion.py deleted file mode 100644 index a4305bf04e..0000000000 --- a/nipype/workflows/dmri/mrtrix/diffusion.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....interfaces import fsl as fsl -from ....interfaces import mrtrix as mrtrix - - -def create_mrtrix_dti_pipeline(name="dtiproc", - tractography_type='probabilistic'): - """Creates a pipeline that does the same diffusion processing as in the - :doc:`../../users/examples/dmri_mrtrix_dti` example script. Given a diffusion-weighted image, - b-values, and b-vectors, the workflow will return the tractography - computed from spherical deconvolution and probabilistic streamline tractography - - Example - ------- - - >>> dti = create_mrtrix_dti_pipeline("mrtrix_dti") - >>> dti.inputs.inputnode.dwi = 'data.nii' - >>> dti.inputs.inputnode.bvals = 'bvals' - >>> dti.inputs.inputnode.bvecs = 'bvecs' - >>> dti.run() # doctest: +SKIP - - Inputs:: - - inputnode.dwi - inputnode.bvecs - inputnode.bvals - - Outputs:: - - outputnode.fa - outputnode.tdi - outputnode.tracts_tck - outputnode.tracts_trk - outputnode.csdeconv - - """ - - inputnode = pe.Node( - interface=util.IdentityInterface(fields=["dwi", "bvecs", "bvals"]), - name="inputnode") - - bet = pe.Node(interface=fsl.BET(), name="bet") - bet.inputs.mask = True - - fsl2mrtrix = pe.Node(interface=mrtrix.FSL2MRTrix(), name='fsl2mrtrix') - fsl2mrtrix.inputs.invert_y = True - - dwi2tensor = pe.Node(interface=mrtrix.DWI2Tensor(), name='dwi2tensor') - - tensor2vector = pe.Node( - interface=mrtrix.Tensor2Vector(), name='tensor2vector') - tensor2adc = pe.Node( - interface=mrtrix.Tensor2ApparentDiffusion(), name='tensor2adc') - tensor2fa = pe.Node( - interface=mrtrix.Tensor2FractionalAnisotropy(), name='tensor2fa') - - erode_mask_firstpass = pe.Node( - interface=mrtrix.Erode(), name='erode_mask_firstpass') - erode_mask_secondpass = pe.Node( - interface=mrtrix.Erode(), name='erode_mask_secondpass') - - threshold_b0 = pe.Node(interface=mrtrix.Threshold(), name='threshold_b0') - - threshold_FA = pe.Node(interface=mrtrix.Threshold(), name='threshold_FA') - threshold_FA.inputs.absolute_threshold_value = 0.7 - - threshold_wmmask = pe.Node( - interface=mrtrix.Threshold(), name='threshold_wmmask') - threshold_wmmask.inputs.absolute_threshold_value = 0.4 - - MRmultiply = pe.Node(interface=mrtrix.MRMultiply(), name='MRmultiply') - MRmult_merge = pe.Node(interface=util.Merge(2), name='MRmultiply_merge') - - median3d = pe.Node(interface=mrtrix.MedianFilter3D(), name='median3D') - - MRconvert = pe.Node(interface=mrtrix.MRConvert(), name='MRconvert') - MRconvert.inputs.extract_at_axis = 3 - MRconvert.inputs.extract_at_coordinate = [0] - - csdeconv = pe.Node( - interface=mrtrix.ConstrainedSphericalDeconvolution(), name='csdeconv') - - gen_WM_mask = pe.Node( - interface=mrtrix.GenerateWhiteMatterMask(), name='gen_WM_mask') - - estimateresponse = pe.Node( - interface=mrtrix.EstimateResponseForSH(), name='estimateresponse') - - if tractography_type == 'probabilistic': - CSDstreamtrack = pe.Node( - interface=mrtrix. - ProbabilisticSphericallyDeconvolutedStreamlineTrack(), - name='CSDstreamtrack') - else: - CSDstreamtrack = pe.Node( - interface=mrtrix.SphericallyDeconvolutedStreamlineTrack(), - name='CSDstreamtrack') - CSDstreamtrack.inputs.desired_number_of_tracks = 15000 - - tracks2prob = pe.Node(interface=mrtrix.Tracks2Prob(), name='tracks2prob') - tracks2prob.inputs.colour = True - tck2trk = pe.Node(interface=mrtrix.MRTrix2TrackVis(), name='tck2trk') - - workflow = pe.Workflow(name=name) - workflow.base_output_dir = name - - workflow.connect([(inputnode, fsl2mrtrix, [("bvecs", "bvec_file"), - ("bvals", "bval_file")])]) - workflow.connect([(inputnode, dwi2tensor, [("dwi", "in_file")])]) - workflow.connect([(fsl2mrtrix, dwi2tensor, [("encoding_file", - "encoding_file")])]) - - workflow.connect([ - (dwi2tensor, tensor2vector, [['tensor', 'in_file']]), - (dwi2tensor, tensor2adc, [['tensor', 'in_file']]), - (dwi2tensor, tensor2fa, [['tensor', 'in_file']]), - ]) - - workflow.connect([(inputnode, MRconvert, [("dwi", "in_file")])]) - workflow.connect([(MRconvert, threshold_b0, [("converted", "in_file")])]) - workflow.connect([(threshold_b0, median3d, [("out_file", "in_file")])]) - workflow.connect([(median3d, erode_mask_firstpass, [("out_file", - "in_file")])]) - workflow.connect([(erode_mask_firstpass, erode_mask_secondpass, - [("out_file", "in_file")])]) - - workflow.connect([(tensor2fa, MRmult_merge, [("FA", "in1")])]) - workflow.connect([(erode_mask_secondpass, MRmult_merge, [("out_file", - "in2")])]) - workflow.connect([(MRmult_merge, MRmultiply, [("out", "in_files")])]) - workflow.connect([(MRmultiply, threshold_FA, [("out_file", "in_file")])]) - workflow.connect([(threshold_FA, estimateresponse, [("out_file", - "mask_image")])]) - - workflow.connect([(inputnode, bet, [("dwi", "in_file")])]) - workflow.connect([(inputnode, gen_WM_mask, [("dwi", "in_file")])]) - workflow.connect([(bet, gen_WM_mask, [("mask_file", "binary_mask")])]) - workflow.connect([(fsl2mrtrix, gen_WM_mask, [("encoding_file", - "encoding_file")])]) - - workflow.connect([(inputnode, estimateresponse, [("dwi", "in_file")])]) - workflow.connect([(fsl2mrtrix, estimateresponse, [("encoding_file", - "encoding_file")])]) - - workflow.connect([(inputnode, csdeconv, [("dwi", "in_file")])]) - workflow.connect([(gen_WM_mask, csdeconv, [("WMprobabilitymap", - "mask_image")])]) - workflow.connect([(estimateresponse, csdeconv, [("response", - "response_file")])]) - workflow.connect([(fsl2mrtrix, csdeconv, [("encoding_file", - "encoding_file")])]) - - workflow.connect([(gen_WM_mask, threshold_wmmask, [("WMprobabilitymap", - "in_file")])]) - workflow.connect([(threshold_wmmask, CSDstreamtrack, [("out_file", - "seed_file")])]) - workflow.connect([(csdeconv, CSDstreamtrack, [("spherical_harmonics_image", - "in_file")])]) - - if tractography_type == 'probabilistic': - workflow.connect([(CSDstreamtrack, tracks2prob, [("tracked", - "in_file")])]) - workflow.connect([(inputnode, tracks2prob, [("dwi", - "template_file")])]) - - workflow.connect([(CSDstreamtrack, tck2trk, [("tracked", "in_file")])]) - workflow.connect([(inputnode, tck2trk, [("dwi", "image_file")])]) - - output_fields = ["fa", "tracts_trk", "csdeconv", "tracts_tck"] - if tractography_type == 'probabilistic': - output_fields.append("tdi") - outputnode = pe.Node( - interface=util.IdentityInterface(fields=output_fields), - name="outputnode") - - workflow.connect([(CSDstreamtrack, outputnode, - [("tracked", "tracts_tck")]), (csdeconv, outputnode, [ - ("spherical_harmonics_image", "csdeconv") - ]), (tensor2fa, outputnode, [("FA", "fa")]), - (tck2trk, outputnode, [("out_file", "tracts_trk")])]) - if tractography_type == 'probabilistic': - workflow.connect([(tracks2prob, outputnode, [("tract_image", "tdi")])]) - - return workflow diff --git a/nipype/workflows/dmri/mrtrix/group_connectivity.py b/nipype/workflows/dmri/mrtrix/group_connectivity.py deleted file mode 100644 index 10d961a18c..0000000000 --- a/nipype/workflows/dmri/mrtrix/group_connectivity.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -import os.path as op -import warnings - -from ....interfaces import io as nio # Data i/o -from ....interfaces import utility as util # utility -from ....interfaces import cmtk as cmtk -from ....algorithms import misc as misc -from ....pipeline import engine as pe # pipeline engine -from ....utils.misc import package_check -from .connectivity_mapping import create_connectivity_pipeline - -try: - package_check('cmp') -except Exception as e: - warnings.warn('cmp not installed') -else: - import cmp - - -def create_group_connectivity_pipeline(group_list, - group_id, - data_dir, - subjects_dir, - output_dir, - template_args_dict=0): - """Creates a pipeline that performs MRtrix structural connectivity processing - on groups of subjects. Given a diffusion-weighted image, and text files containing - the associated b-values and b-vectors, the workflow will return each subjects' connectomes - in a Connectome File Format (CFF) file, for use in Connectome Viewer (http://www.cmtk.org). - - Example - ------- - - >>> import nipype.interfaces.freesurfer as fs - >>> import nipype.workflows.dmri.mrtrix.group_connectivity as groupwork - >>> import cmp # doctest: +SKIP - >>> from nipype.testing import example_data - >>> subjects_dir = '.' - >>> data_dir = '.' - >>> output_dir = '.' - >>> fs.FSCommand.set_default_subjects_dir(subjects_dir) - >>> group_list = {} - >>> group_list['group1'] = ['subj1', 'subj2'] - >>> group_list['group2'] = ['subj3', 'subj4'] - >>> template_args = dict(dwi=[['subject_id', 'dwi']], bvecs=[['subject_id', 'bvecs']], bvals=[['subject_id', 'bvals']]) - >>> group_id = 'group1' - >>> l1pipeline = groupwork.create_group_connectivity_pipeline(group_list, group_id, data_dir, subjects_dir, output_dir, template_args) - >>> parcellation_name = 'scale500' - >>> l1pipeline.inputs.connectivity.mapping.Parcellate.parcellation_name = parcellation_name - >>> cmp_config = cmp.configuration.PipelineConfiguration() # doctest: +SKIP - >>> cmp_config.parcellation_scheme = "Lausanne2008" # doctest: +SKIP - >>> l1pipeline.inputs.connectivity.mapping.inputnode_within.resolution_network_file = cmp_config._get_lausanne_parcellation('Lausanne2008')[parcellation_name]['node_information_graphml'] # doctest: +SKIP - >>> l1pipeline.run() # doctest: +SKIP - - - Inputs:: - - group_list: Dictionary of subject lists, keyed by group name - group_id: String containing the group name - data_dir: Path to the data directory - subjects_dir: Path to the Freesurfer 'subjects' directory - output_dir: Path for the output files - template_args_dict: Dictionary of template arguments for the connectivity pipeline datasource - e.g. info = dict(dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id','bvecs']], - bvals=[['subject_id','bvals']]) - """ - group_infosource = pe.Node( - interface=util.IdentityInterface(fields=['group_id']), - name="group_infosource") - group_infosource.inputs.group_id = group_id - subject_list = group_list[group_id] - subj_infosource = pe.Node( - interface=util.IdentityInterface(fields=['subject_id']), - name="subj_infosource") - subj_infosource.iterables = ('subject_id', subject_list) - - if template_args_dict == 0: - info = dict( - dwi=[['subject_id', 'dwi']], - bvecs=[['subject_id', 'bvecs']], - bvals=[['subject_id', 'bvals']]) - else: - info = template_args_dict - - datasource = pe.Node( - interface=nio.DataGrabber( - infields=['subject_id'], outfields=list(info.keys())), - name='datasource') - - datasource.inputs.template = "%s/%s" - datasource.inputs.base_directory = data_dir - datasource.inputs.field_template = dict(dwi='%s/%s.nii') - datasource.inputs.template_args = info - datasource.inputs.sort_filelist = True - """ - Create a connectivity mapping workflow - """ - conmapper = create_connectivity_pipeline("nipype_conmap") - conmapper.inputs.inputnode.subjects_dir = subjects_dir - conmapper.base_dir = op.abspath('conmapper') - - datasink = pe.Node(interface=nio.DataSink(), name="datasink") - datasink.inputs.base_directory = output_dir - datasink.inputs.container = group_id - - l1pipeline = pe.Workflow(name="l1pipeline_" + group_id) - l1pipeline.base_dir = output_dir - l1pipeline.base_output_dir = group_id - l1pipeline.connect([(subj_infosource, conmapper, - [('subject_id', 'inputnode.subject_id')])]) - l1pipeline.connect([(subj_infosource, datasource, [('subject_id', - 'subject_id')])]) - l1pipeline.connect([(datasource, conmapper, [ - ("dwi", "inputnode.dwi"), - ("bvals", "inputnode.bvals"), - ("bvecs", "inputnode.bvecs"), - ])]) - l1pipeline.connect([(conmapper, datasink, [ - ("outputnode.connectome", "@l1output.cff"), - ("outputnode.nxstatscff", "@l1output.nxstatscff"), - ("outputnode.nxmatlab", "@l1output.nxmatlab"), - ("outputnode.nxcsv", "@l1output.nxcsv"), - ("outputnode.fiber_csv", "@l1output.fiber_csv"), - ("outputnode.cmatrices_csv", "@l1output.cmatrices_csv"), - ("outputnode.fa", "@l1output.fa"), - ("outputnode.filtered_tracts", "@l1output.filtered_tracts"), - ("outputnode.cmatrix", "@l1output.cmatrix"), - ("outputnode.rois", "@l1output.rois"), - ("outputnode.odfs", "@l1output.odfs"), - ("outputnode.struct", "@l1output.struct"), - ("outputnode.networks", "@l1output.networks"), - ("outputnode.mean_fiber_length", "@l1output.mean_fiber_length"), - ("outputnode.fiber_length_std", "@l1output.fiber_length_std"), - ])]) - l1pipeline.connect([(group_infosource, datasink, [('group_id', - '@group_id')])]) - return l1pipeline diff --git a/nipype/workflows/fmri/__init__.py b/nipype/workflows/fmri/__init__.py deleted file mode 100644 index 5523a0c412..0000000000 --- a/nipype/workflows/fmri/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from . import fsl, spm diff --git a/nipype/workflows/fmri/fsl/__init__.py b/nipype/workflows/fmri/fsl/__init__.py deleted file mode 100644 index 9f6ca78ee8..0000000000 --- a/nipype/workflows/fmri/fsl/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# -*- coding: utf-8 -*- -from .preprocess import (create_susan_smooth, create_fsl_fs_preproc, - create_parallelfeat_preproc, create_featreg_preproc, - create_reg_workflow) -from .estimate import create_modelfit_workflow, create_fixed_effects_flow - -# backwards compatibility -from ...rsfmri.fsl.resting import create_resting_preproc diff --git a/nipype/workflows/fmri/fsl/estimate.py b/nipype/workflows/fmri/fsl/estimate.py deleted file mode 100644 index 638e422bfc..0000000000 --- a/nipype/workflows/fmri/fsl/estimate.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from builtins import range - -from ....interfaces import fsl as fsl # fsl -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine - -from .... import LooseVersion - - -def create_modelfit_workflow(name='modelfit', f_contrasts=False): - """Create an FSL individual modelfitting workflow - - Example - ------- - - >>> modelfit = create_modelfit_workflow() - >>> modelfit.base_dir = '.' - >>> info = dict() - >>> modelfit.inputs.inputspec.session_info = info - >>> modelfit.inputs.inputspec.interscan_interval = 3. - >>> modelfit.inputs.inputspec.film_threshold = 1000 - >>> modelfit.run() #doctest: +SKIP - - Inputs:: - - inputspec.session_info : info generated by modelgen.SpecifyModel - inputspec.interscan_interval : interscan interval - inputspec.contrasts : list of contrasts - inputspec.film_threshold : image threshold for FILM estimation - inputspec.model_serial_correlations - inputspec.bases - - Outputs:: - - outputspec.copes - outputspec.varcopes - outputspec.dof_file - outputspec.pfiles - outputspec.zfiles - outputspec.parameter_estimates - """ - - version = 0 - if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): - version = 507 - - modelfit = pe.Workflow(name=name) - """ - Create the nodes - """ - - inputspec = pe.Node( - util.IdentityInterface(fields=[ - 'session_info', 'interscan_interval', 'contrasts', - 'film_threshold', 'functional_data', 'bases', - 'model_serial_correlations' - ]), - name='inputspec') - level1design = pe.Node(interface=fsl.Level1Design(), name="level1design") - modelgen = pe.MapNode( - interface=fsl.FEATModel(), - name='modelgen', - iterfield=['fsf_file', 'ev_files']) - if version < 507: - modelestimate = pe.MapNode( - interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), - name='modelestimate', - iterfield=['design_file', 'in_file']) - else: - if f_contrasts: - iterfield = ['design_file', 'in_file', 'tcon_file', 'fcon_file'] - else: - iterfield = ['design_file', 'in_file', 'tcon_file'] - modelestimate = pe.MapNode( - interface=fsl.FILMGLS(smooth_autocorr=True, mask_size=5), - name='modelestimate', - iterfield=iterfield) - - if version < 507: - if f_contrasts: - iterfield = [ - 'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds', - 'corrections', 'dof_file' - ] - else: - iterfield = [ - 'tcon_file', 'param_estimates', 'sigmasquareds', 'corrections', - 'dof_file' - ] - conestimate = pe.MapNode( - interface=fsl.ContrastMgr(), - name='conestimate', - iterfield=[ - 'tcon_file', 'fcon_file', 'param_estimates', 'sigmasquareds', - 'corrections', 'dof_file' - ]) - - if f_contrasts: - iterfield = ['in1', 'in2'] - else: - iterfield = ['in1'] - merge_contrasts = pe.MapNode( - interface=util.Merge(2), name='merge_contrasts', iterfield=iterfield) - ztopval = pe.MapNode( - interface=fsl.ImageMaths(op_string='-ztop', suffix='_pval'), - nested=True, - name='ztop', - iterfield=['in_file']) - outputspec = pe.Node( - util.IdentityInterface(fields=[ - 'copes', 'varcopes', 'dof_file', 'pfiles', 'zfiles', - 'parameter_estimates' - ]), - name='outputspec') - """ - Setup the connections - """ - - modelfit.connect([ - (inputspec, level1design, - [('interscan_interval', 'interscan_interval'), - ('session_info', 'session_info'), ('contrasts', 'contrasts'), - ('bases', 'bases'), ('model_serial_correlations', - 'model_serial_correlations')]), - (inputspec, modelestimate, [('film_threshold', 'threshold'), - ('functional_data', 'in_file')]), - (level1design, modelgen, [('fsf_files', 'fsf_file'), ('ev_files', - 'ev_files')]), - (modelgen, modelestimate, [('design_file', 'design_file')]), - (merge_contrasts, ztopval, [('out', 'in_file')]), - (ztopval, outputspec, [('out_file', 'pfiles')]), - (merge_contrasts, outputspec, [('out', 'zfiles')]), - (modelestimate, outputspec, [('param_estimates', - 'parameter_estimates'), ('dof_file', - 'dof_file')]), - ]) - if version < 507: - modelfit.connect([ - (modelgen, conestimate, [('con_file', 'tcon_file'), - ('fcon_file', 'fcon_file')]), - (modelestimate, conestimate, - [('param_estimates', 'param_estimates'), ('sigmasquareds', - 'sigmasquareds'), - ('corrections', 'corrections'), ('dof_file', 'dof_file')]), - (conestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', - 'in2')]), - (conestimate, outputspec, [('copes', 'copes'), ('varcopes', - 'varcopes')]), - ]) - else: - modelfit.connect([ - (modelgen, modelestimate, [('con_file', 'tcon_file'), - ('fcon_file', 'fcon_file')]), - (modelestimate, merge_contrasts, [('zstats', 'in1'), ('zfstats', - 'in2')]), - (modelestimate, outputspec, [('copes', 'copes'), ('varcopes', - 'varcopes')]), - ]) - return modelfit - - -def create_overlay_workflow(name='overlay'): - """Setup overlay workflow - """ - - overlay = pe.Workflow(name='overlay') - overlaystats = pe.MapNode( - interface=fsl.Overlay(), name="overlaystats", iterfield=['stat_image']) - overlaystats.inputs.show_negative_stats = True - overlaystats.inputs.auto_thresh_bg = True - - slicestats = pe.MapNode( - interface=fsl.Slicer(), name="slicestats", iterfield=['in_file']) - slicestats.inputs.all_axial = True - slicestats.inputs.image_width = 512 - - overlay.connect(overlaystats, 'out_file', slicestats, 'in_file') - return overlay - - -def create_fixed_effects_flow(name='fixedfx'): - """Create a fixed-effects workflow - - This workflow is used to combine registered copes and varcopes across runs - for an individual subject - - Example - ------- - - >>> fixedfx = create_fixed_effects_flow() - >>> fixedfx.base_dir = '.' - >>> fixedfx.inputs.inputspec.copes = [['cope1run1.nii.gz', 'cope1run2.nii.gz'], ['cope2run1.nii.gz', 'cope2run2.nii.gz']] # per contrast - >>> fixedfx.inputs.inputspec.varcopes = [['varcope1run1.nii.gz', 'varcope1run2.nii.gz'], ['varcope2run1.nii.gz', 'varcope2run2.nii.gz']] # per contrast - >>> fixedfx.inputs.inputspec.dof_files = ['dofrun1', 'dofrun2'] # per run - >>> fixedfx.run() #doctest: +SKIP - - Inputs:: - - inputspec.copes : list of list of cope files (one list per contrast) - inputspec.varcopes : list of list of varcope files (one list per - contrast) - inputspec.dof_files : degrees of freedom files for each run - - Outputs:: - - outputspec.res4d : 4d residual time series - outputspec.copes : contrast parameter estimates - outputspec.varcopes : variance of contrast parameter estimates - outputspec.zstats : z statistics of contrasts - outputspec.tstats : t statistics of contrasts - """ - - fixed_fx = pe.Workflow(name=name) - - inputspec = pe.Node( - util.IdentityInterface(fields=['copes', 'varcopes', 'dof_files']), - name='inputspec') - """ - Use :class:`nipype.interfaces.fsl.Merge` to merge the copes and - varcopes for each condition - """ - - copemerge = pe.MapNode( - interface=fsl.Merge(dimension='t'), - iterfield=['in_files'], - name="copemerge") - - varcopemerge = pe.MapNode( - interface=fsl.Merge(dimension='t'), - iterfield=['in_files'], - name="varcopemerge") - """ - Use :class:`nipype.interfaces.fsl.L2Model` to generate subject and condition - specific level 2 model design files - """ - - level2model = pe.Node(interface=fsl.L2Model(), name='l2model') - """ - Use :class:`nipype.interfaces.fsl.FLAMEO` to estimate a second level model - """ - - flameo = pe.MapNode( - interface=fsl.FLAMEO(run_mode='fe'), - name="flameo", - iterfield=['cope_file', 'var_cope_file']) - - def get_dofvolumes(dof_files, cope_files): - import os - import nibabel as nb - import numpy as np - img = nb.load(cope_files[0]) - if len(img.shape) > 3: - out_data = np.zeros(img.shape) - else: - out_data = np.zeros(list(img.shape) + [1]) - for i in range(out_data.shape[-1]): - dof = np.loadtxt(dof_files[i]) - out_data[:, :, :, i] = dof - filename = os.path.join(os.getcwd(), 'dof_file.nii.gz') - newimg = nb.Nifti1Image(out_data, None, img.header) - newimg.to_filename(filename) - return filename - - gendof = pe.Node( - util.Function( - input_names=['dof_files', 'cope_files'], - output_names=['dof_volume'], - function=get_dofvolumes), - name='gendofvolume') - - outputspec = pe.Node( - util.IdentityInterface( - fields=['res4d', 'copes', 'varcopes', 'zstats', 'tstats']), - name='outputspec') - - fixed_fx.connect( - [(inputspec, copemerge, - [('copes', 'in_files')]), (inputspec, varcopemerge, [('varcopes', - 'in_files')]), - (inputspec, gendof, [('dof_files', 'dof_files')]), (copemerge, gendof, - [('merged_file', - 'cope_files')]), - (copemerge, flameo, - [('merged_file', 'cope_file')]), (varcopemerge, flameo, [ - ('merged_file', 'var_cope_file') - ]), (level2model, flameo, - [('design_mat', 'design_file'), ('design_con', 't_con_file'), - ('design_grp', 'cov_split_file')]), (gendof, flameo, - [('dof_volume', - 'dof_var_cope_file')]), - (flameo, outputspec, - [('res4d', 'res4d'), ('copes', 'copes'), ('var_copes', 'varcopes'), - ('zstats', 'zstats'), ('tstats', 'tstats')])]) - return fixed_fx diff --git a/nipype/workflows/fmri/fsl/preprocess.py b/nipype/workflows/fmri/fsl/preprocess.py deleted file mode 100644 index ac235bdba1..0000000000 --- a/nipype/workflows/fmri/fsl/preprocess.py +++ /dev/null @@ -1,1293 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import division - -import os -from ....interfaces import fsl as fsl # fsl -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....interfaces import freesurfer as fs # freesurfer -from ....interfaces import spm as spm -from ...smri.freesurfer.utils import create_getmask_flow -from .... import LooseVersion - - -def getthreshop(thresh): - return ['-thr %.10f -Tmin -bin' % (0.1 * val[1]) for val in thresh] - - -def pickrun(files, whichrun): - """pick file from list of files""" - - filemap = {'first': 0, 'last': -1, 'middle': len(files) // 2} - - if isinstance(files, list): - - # whichrun is given as integer - if isinstance(whichrun, int): - return files[whichrun] - # whichrun is given as string - elif isinstance(whichrun, str): - if whichrun not in filemap.keys(): - raise (KeyError, 'Sorry, whichrun must be either integer index' - 'or string in form of "first", "last" or "middle') - else: - return files[filemap[whichrun]] - else: - # in case single file name is given - return files - - -def pickfirst(files): - if isinstance(files, list): - return files[0] - else: - return files - - -def pickmiddle(files): - from nibabel import load - import numpy as np - from nipype.utils import NUMPY_MMAP - middlevol = [] - for f in files: - middlevol.append(int(np.ceil(load(f, mmap=NUMPY_MMAP).shape[3] / 2))) - return middlevol - - -def pickvol(filenames, fileidx, which): - from nibabel import load - import numpy as np - from nipype.utils import NUMPY_MMAP - if which.lower() == 'first': - idx = 0 - elif which.lower() == 'middle': - idx = int( - np.ceil(load(filenames[fileidx], mmap=NUMPY_MMAP).shape[3] / 2)) - elif which.lower() == 'last': - idx = load(filenames[fileidx]).shape[3] - 1 - else: - raise Exception('unknown value for volume selection : %s' % which) - return idx - - -def getbtthresh(medianvals): - return [0.75 * val for val in medianvals] - - -def chooseindex(fwhm): - if fwhm < 1: - return [0] - else: - return [1] - - -def getmeanscale(medianvals): - return ['-mul %.10f' % (10000. / val) for val in medianvals] - - -def getusans(x): - return [[tuple([val[0], 0.75 * val[1]])] for val in x] - - -tolist = lambda x: [x] -highpass_operand = lambda x: '-bptf %.10f -1' % x - - -def create_parallelfeat_preproc(name='featpreproc', highpass=True): - """Preprocess each run with FSL independently of the others - - Parameters - ---------- - - :: - - name : name of workflow (default: featpreproc) - highpass : boolean (default: True) - - Inputs:: - - inputspec.func : functional runs (filename or list of filenames) - inputspec.fwhm : fwhm for smoothing with SUSAN - inputspec.highpass : HWHM in TRs (if created with highpass=True) - - Outputs:: - - outputspec.reference : volume to which runs are realigned - outputspec.motion_parameters : motion correction parameters - outputspec.realigned_files : motion corrected files - outputspec.motion_plots : plots of motion correction parameters - outputspec.mask : mask file used to mask the brain - outputspec.smoothed_files : smoothed functional data - outputspec.highpassed_files : highpassed functional data (if highpass=True) - outputspec.mean : mean file - - Example - ------- - - >>> preproc = create_parallelfeat_preproc() - >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii'] - >>> preproc.inputs.inputspec.fwhm = 5 - >>> preproc.inputs.inputspec.highpass = 128./(2*2.5) - >>> preproc.base_dir = '/tmp' - >>> preproc.run() # doctest: +SKIP - - >>> preproc = create_parallelfeat_preproc(highpass=False) - >>> preproc.inputs.inputspec.func = 'f3.nii' - >>> preproc.inputs.inputspec.fwhm = 5 - >>> preproc.base_dir = '/tmp' - >>> preproc.run() # doctest: +SKIP - """ - version = 0 - if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): - version = 507 - - featpreproc = pe.Workflow(name=name) - """ - Set up a node to define all inputs required for the preprocessing workflow - - """ - - if highpass: - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=['func', 'fwhm', 'highpass']), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask', 'smoothed_files', 'highpassed_files', - 'mean' - ]), - name='outputspec') - else: - inputnode = pe.Node( - interface=util.IdentityInterface(fields=['func', 'fwhm']), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask', 'smoothed_files', 'mean' - ]), - name='outputspec') - """ - Set up a node to define outputs for the preprocessing workflow - - """ - """ - Convert functional images to float representation. Since there can - be more than one functional run we use a MapNode to convert each - run. - """ - - img2float = pe.MapNode( - interface=fsl.ImageMaths( - out_data_type='float', op_string='', suffix='_dtype'), - iterfield=['in_file'], - name='img2float') - featpreproc.connect(inputnode, 'func', img2float, 'in_file') - """ - Extract the first volume of the first run as the reference - """ - - extract_ref = pe.MapNode( - interface=fsl.ExtractROI(t_size=1), - iterfield=['in_file', 't_min'], - name='extractref') - - featpreproc.connect(img2float, 'out_file', extract_ref, 'in_file') - featpreproc.connect(img2float, ('out_file', pickmiddle), extract_ref, - 't_min') - featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference') - """ - Realign the functional runs to the reference (1st volume of first run) - """ - - motion_correct = pe.MapNode( - interface=fsl.MCFLIRT(save_mats=True, save_plots=True), - name='realign', - iterfield=['in_file', 'ref_file']) - featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') - featpreproc.connect(extract_ref, 'roi_file', motion_correct, 'ref_file') - featpreproc.connect(motion_correct, 'par_file', outputnode, - 'motion_parameters') - featpreproc.connect(motion_correct, 'out_file', outputnode, - 'realigned_files') - """ - Plot the estimated motion parameters - """ - - plot_motion = pe.MapNode( - interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) - plot_motion.iterables = ('plot_type', ['rotations', 'translations']) - featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') - featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') - """ - Extract the mean volume of the first functional run - """ - - meanfunc = pe.MapNode( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc') - featpreproc.connect(motion_correct, 'out_file', meanfunc, 'in_file') - """ - Strip the skull from the mean functional to generate a mask - """ - - meanfuncmask = pe.MapNode( - interface=fsl.BET(mask=True, no_output=True, frac=0.3), - iterfield=['in_file'], - name='meanfuncmask') - featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file') - """ - Mask the functional runs with the extracted mask - """ - - maskfunc = pe.MapNode( - interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc') - featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') - featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2') - """ - Determine the 2nd and 98th percentile intensities of each functional run - """ - - getthresh = pe.MapNode( - interface=fsl.ImageStats(op_string='-p 2 -p 98'), - iterfield=['in_file'], - name='getthreshold') - featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file') - """ - Threshold the first run of the functional data at 10% of the 98th percentile - """ - - threshold = pe.MapNode( - interface=fsl.ImageMaths(out_data_type='char', suffix='_thresh'), - iterfield=['in_file', 'op_string'], - name='threshold') - featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file') - """ - Define a function to get 10% of the intensity - """ - - featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold, - 'op_string') - """ - Determine the median value of the functional runs using the mask - """ - - medianval = pe.MapNode( - interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield=['in_file', 'mask_file'], - name='medianval') - featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') - featpreproc.connect(threshold, 'out_file', medianval, 'mask_file') - """ - Dilate the mask - """ - - dilatemask = pe.MapNode( - interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'), - iterfield=['in_file'], - name='dilatemask') - featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file') - featpreproc.connect(dilatemask, 'out_file', outputnode, 'mask') - """ - Mask the motion corrected functional runs with the dilated mask - """ - - maskfunc2 = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc2') - featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file') - featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2') - """ - Smooth each run using SUSAN with the brightness threshold set to 75% - of the median value for each run and a mask consituting the mean - functional - """ - - smooth = create_susan_smooth() - - featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm') - featpreproc.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files') - featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file') - """ - Mask the smoothed data with the dilated mask - """ - - maskfunc3 = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc3') - featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, - 'in_file') - - featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2') - - concatnode = pe.Node(interface=util.Merge(2), name='concat') - featpreproc.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1') - featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2') - """ - The following nodes select smooth or unsmoothed data depending on the - fwhm. This is because SUSAN defaults to smoothing the data with about the - voxel size of the input data if the fwhm parameter is less than 1/3 of the - voxel size. - """ - selectnode = pe.Node(interface=util.Select(), name='select') - - featpreproc.connect(concatnode, 'out', selectnode, 'inlist') - - featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') - featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ - Scale the median value of the run is set to 10000 - """ - - meanscale = pe.MapNode( - interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file', 'op_string'], - name='meanscale') - featpreproc.connect(selectnode, 'out', meanscale, 'in_file') - """ - Define a function to get the scaling factor for intensity normalization - """ - - featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, - 'op_string') - """ - Perform temporal highpass filtering on the data - """ - - if highpass: - highpass = pe.MapNode( - interface=fsl.ImageMaths(suffix='_tempfilt'), - iterfield=['in_file'], - name='highpass') - featpreproc.connect(inputnode, ('highpass', highpass_operand), - highpass, 'op_string') - featpreproc.connect(meanscale, 'out_file', highpass, 'in_file') - - if version < 507: - featpreproc.connect(highpass, 'out_file', outputnode, - 'highpassed_files') - else: - """ - Add back the mean removed by the highpass filter operation as of FSL 5.0.7 - """ - meanfunc4 = pe.MapNode( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc4') - - featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file') - addmean = pe.MapNode( - interface=fsl.BinaryMaths(operation='add'), - iterfield=['in_file', 'operand_file'], - name='addmean') - featpreproc.connect(highpass, 'out_file', addmean, 'in_file') - featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file') - featpreproc.connect(addmean, 'out_file', outputnode, - 'highpassed_files') - """ - Generate a mean functional image from the first run - """ - - meanfunc3 = pe.MapNode( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc3') - - featpreproc.connect(meanscale, 'out_file', meanfunc3, 'in_file') - featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean') - - return featpreproc - - -def create_featreg_preproc(name='featpreproc', - highpass=True, - whichvol='middle', - whichrun=0): - """Create a FEAT preprocessing workflow with registration to one volume of the first run - - Parameters - ---------- - - :: - - name : name of workflow (default: featpreproc) - highpass : boolean (default: True) - whichvol : which volume of the first run to register to ('first', 'middle', 'last', 'mean') - whichrun : which run to draw reference volume from (integer index or 'first', 'middle', 'last') - - Inputs:: - - inputspec.func : functional runs (filename or list of filenames) - inputspec.fwhm : fwhm for smoothing with SUSAN - inputspec.highpass : HWHM in TRs (if created with highpass=True) - - Outputs:: - - outputspec.reference : volume to which runs are realigned - outputspec.motion_parameters : motion correction parameters - outputspec.realigned_files : motion corrected files - outputspec.motion_plots : plots of motion correction parameters - outputspec.mask : mask file used to mask the brain - outputspec.smoothed_files : smoothed functional data - outputspec.highpassed_files : highpassed functional data (if highpass=True) - outputspec.mean : mean file - - Example - ------- - - >>> preproc = create_featreg_preproc() - >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii'] - >>> preproc.inputs.inputspec.fwhm = 5 - >>> preproc.inputs.inputspec.highpass = 128./(2*2.5) - >>> preproc.base_dir = '/tmp' - >>> preproc.run() # doctest: +SKIP - - >>> preproc = create_featreg_preproc(highpass=False, whichvol='mean') - >>> preproc.inputs.inputspec.func = 'f3.nii' - >>> preproc.inputs.inputspec.fwhm = 5 - >>> preproc.base_dir = '/tmp' - >>> preproc.run() # doctest: +SKIP - """ - - version = 0 - if fsl.Info.version() and \ - LooseVersion(fsl.Info.version()) > LooseVersion('5.0.6'): - version = 507 - - featpreproc = pe.Workflow(name=name) - """ - Set up a node to define all inputs required for the preprocessing workflow - - """ - - if highpass: - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=['func', 'fwhm', 'highpass']), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask', 'smoothed_files', 'highpassed_files', - 'mean' - ]), - name='outputspec') - else: - inputnode = pe.Node( - interface=util.IdentityInterface(fields=['func', 'fwhm']), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask', 'smoothed_files', 'mean' - ]), - name='outputspec') - """ - Set up a node to define outputs for the preprocessing workflow - - """ - """ - Convert functional images to float representation. Since there can - be more than one functional run we use a MapNode to convert each - run. - """ - - img2float = pe.MapNode( - interface=fsl.ImageMaths( - out_data_type='float', op_string='', suffix='_dtype'), - iterfield=['in_file'], - name='img2float') - featpreproc.connect(inputnode, 'func', img2float, 'in_file') - """ - Extract the middle (or what whichvol points to) volume of the first run as the reference - """ - - if whichvol != 'mean': - extract_ref = pe.Node( - interface=fsl.ExtractROI(t_size=1), - iterfield=['in_file'], - name='extractref') - featpreproc.connect(img2float, ('out_file', pickrun, whichrun), - extract_ref, 'in_file') - featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol), - extract_ref, 't_min') - featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference') - """ - Realign the functional runs to the reference (`whichvol` volume of first run) - """ - - motion_correct = pe.MapNode( - interface=fsl.MCFLIRT( - save_mats=True, save_plots=True, interpolation='spline'), - name='realign', - iterfield=['in_file']) - featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') - if whichvol != 'mean': - featpreproc.connect(extract_ref, 'roi_file', motion_correct, - 'ref_file') - else: - motion_correct.inputs.mean_vol = True - featpreproc.connect(motion_correct, ('mean_img', pickrun, whichrun), - outputnode, 'reference') - - featpreproc.connect(motion_correct, 'par_file', outputnode, - 'motion_parameters') - featpreproc.connect(motion_correct, 'out_file', outputnode, - 'realigned_files') - """ - Plot the estimated motion parameters - """ - - plot_motion = pe.MapNode( - interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) - plot_motion.iterables = ('plot_type', ['rotations', 'translations']) - featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') - featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') - """ - Extract the mean volume of the first functional run - """ - - meanfunc = pe.Node( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - name='meanfunc') - featpreproc.connect(motion_correct, ('out_file', pickrun, whichrun), - meanfunc, 'in_file') - """ - Strip the skull from the mean functional to generate a mask - """ - - meanfuncmask = pe.Node( - interface=fsl.BET(mask=True, no_output=True, frac=0.3), - name='meanfuncmask') - featpreproc.connect(meanfunc, 'out_file', meanfuncmask, 'in_file') - """ - Mask the functional runs with the extracted mask - """ - - maskfunc = pe.MapNode( - interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), - iterfield=['in_file'], - name='maskfunc') - featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') - featpreproc.connect(meanfuncmask, 'mask_file', maskfunc, 'in_file2') - """ - Determine the 2nd and 98th percentile intensities of each functional run - """ - - getthresh = pe.MapNode( - interface=fsl.ImageStats(op_string='-p 2 -p 98'), - iterfield=['in_file'], - name='getthreshold') - featpreproc.connect(maskfunc, 'out_file', getthresh, 'in_file') - """ - Threshold the first run of the functional data at 10% of the 98th percentile - """ - - threshold = pe.MapNode( - interface=fsl.ImageMaths(out_data_type='char', suffix='_thresh'), - iterfield=['in_file', 'op_string'], - name='threshold') - featpreproc.connect(maskfunc, 'out_file', threshold, 'in_file') - """ - Define a function to get 10% of the intensity - """ - - featpreproc.connect(getthresh, ('out_stat', getthreshop), threshold, - 'op_string') - """ - Determine the median value of the functional runs using the mask - """ - - medianval = pe.MapNode( - interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield=['in_file', 'mask_file'], - name='medianval') - featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') - featpreproc.connect(threshold, 'out_file', medianval, 'mask_file') - """ - Dilate the mask - """ - - dilatemask = pe.MapNode( - interface=fsl.ImageMaths(suffix='_dil', op_string='-dilF'), - iterfield=['in_file'], - name='dilatemask') - featpreproc.connect(threshold, 'out_file', dilatemask, 'in_file') - featpreproc.connect(dilatemask, 'out_file', outputnode, 'mask') - """ - Mask the motion corrected functional runs with the dilated mask - """ - - maskfunc2 = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc2') - featpreproc.connect(motion_correct, 'out_file', maskfunc2, 'in_file') - featpreproc.connect(dilatemask, 'out_file', maskfunc2, 'in_file2') - """ - Smooth each run using SUSAN with the brightness threshold set to 75% - of the median value for each run and a mask constituting the mean - functional - """ - - smooth = create_susan_smooth() - - featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm') - featpreproc.connect(maskfunc2, 'out_file', smooth, 'inputnode.in_files') - featpreproc.connect(dilatemask, 'out_file', smooth, 'inputnode.mask_file') - """ - Mask the smoothed data with the dilated mask - """ - - maskfunc3 = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='maskfunc3') - featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, - 'in_file') - - featpreproc.connect(dilatemask, 'out_file', maskfunc3, 'in_file2') - - concatnode = pe.Node(interface=util.Merge(2), name='concat') - featpreproc.connect(maskfunc2, ('out_file', tolist), concatnode, 'in1') - featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2') - """ - The following nodes select smooth or unsmoothed data depending on the - fwhm. This is because SUSAN defaults to smoothing the data with about the - voxel size of the input data if the fwhm parameter is less than 1/3 of the - voxel size. - """ - selectnode = pe.Node(interface=util.Select(), name='select') - - featpreproc.connect(concatnode, 'out', selectnode, 'inlist') - - featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') - featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ - Scale the median value of the run is set to 10000 - """ - - meanscale = pe.MapNode( - interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file', 'op_string'], - name='meanscale') - featpreproc.connect(selectnode, 'out', meanscale, 'in_file') - """ - Define a function to get the scaling factor for intensity normalization - """ - - featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, - 'op_string') - """ - Generate a mean functional image from the first run - """ - - meanfunc3 = pe.Node( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc3') - - featpreproc.connect(meanscale, ('out_file', pickrun, whichrun), meanfunc3, - 'in_file') - featpreproc.connect(meanfunc3, 'out_file', outputnode, 'mean') - """ - Perform temporal highpass filtering on the data - """ - - if highpass: - highpass = pe.MapNode( - interface=fsl.ImageMaths(suffix='_tempfilt'), - iterfield=['in_file'], - name='highpass') - featpreproc.connect(inputnode, ('highpass', highpass_operand), - highpass, 'op_string') - featpreproc.connect(meanscale, 'out_file', highpass, 'in_file') - - if version < 507: - featpreproc.connect(highpass, 'out_file', outputnode, - 'highpassed_files') - else: - """ - Add back the mean removed by the highpass filter operation as of FSL 5.0.7 - """ - meanfunc4 = pe.MapNode( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc4') - - featpreproc.connect(meanscale, 'out_file', meanfunc4, 'in_file') - addmean = pe.MapNode( - interface=fsl.BinaryMaths(operation='add'), - iterfield=['in_file', 'operand_file'], - name='addmean') - featpreproc.connect(highpass, 'out_file', addmean, 'in_file') - featpreproc.connect(meanfunc4, 'out_file', addmean, 'operand_file') - featpreproc.connect(addmean, 'out_file', outputnode, - 'highpassed_files') - - return featpreproc - - -def create_susan_smooth(name="susan_smooth", separate_masks=True): - """Create a SUSAN smoothing workflow - - Parameters - ---------- - - :: - - name : name of workflow (default: susan_smooth) - separate_masks : separate masks for each run - - Inputs:: - - inputnode.in_files : functional runs (filename or list of filenames) - inputnode.fwhm : fwhm for smoothing with SUSAN (float or list of floats) - inputnode.mask_file : mask used for estimating SUSAN thresholds (but not for smoothing) - - Outputs:: - - outputnode.smoothed_files : functional runs (filename or list of filenames) - - Example - ------- - - >>> smooth = create_susan_smooth() - >>> smooth.inputs.inputnode.in_files = 'f3.nii' - >>> smooth.inputs.inputnode.fwhm = 5 - >>> smooth.inputs.inputnode.mask_file = 'mask.nii' - >>> smooth.run() # doctest: +SKIP - - """ - - # replaces the functionality of a "for loop" - def cartesian_product(fwhms, in_files, usans, btthresh): - from nipype.utils.filemanip import ensure_list - # ensure all inputs are lists - in_files = ensure_list(in_files) - fwhms = [fwhms] if isinstance(fwhms, (int, float)) else fwhms - # create cartesian product lists (s_ = single element of list) - cart_in_file = [ - s_in_file for s_in_file in in_files for s_fwhm in fwhms - ] - cart_fwhm = [s_fwhm for s_in_file in in_files for s_fwhm in fwhms] - cart_usans = [s_usans for s_usans in usans for s_fwhm in fwhms] - cart_btthresh = [ - s_btthresh for s_btthresh in btthresh for s_fwhm in fwhms - ] - - return cart_in_file, cart_fwhm, cart_usans, cart_btthresh - - susan_smooth = pe.Workflow(name=name) - """ - Set up a node to define all inputs required for the preprocessing workflow - - """ - - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=['in_files', 'fwhm', 'mask_file']), - name='inputnode') - """ - Smooth each run using SUSAN with the brightness threshold set to 75% - of the median value for each run and a mask consituting the mean - functional - """ - - multi_inputs = pe.Node( - util.Function( - function=cartesian_product, - output_names=[ - 'cart_in_file', 'cart_fwhm', 'cart_usans', 'cart_btthresh' - ]), - name='multi_inputs') - - smooth = pe.MapNode( - interface=fsl.SUSAN(), - iterfield=['in_file', 'brightness_threshold', 'usans', 'fwhm'], - name='smooth') - """ - Determine the median value of the functional runs using the mask - """ - - if separate_masks: - median = pe.MapNode( - interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield=['in_file', 'mask_file'], - name='median') - else: - median = pe.MapNode( - interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield=['in_file'], - name='median') - susan_smooth.connect(inputnode, 'in_files', median, 'in_file') - susan_smooth.connect(inputnode, 'mask_file', median, 'mask_file') - """ - Mask the motion corrected functional runs with the dilated mask - """ - - if separate_masks: - mask = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file', 'in_file2'], - name='mask') - else: - mask = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file'], - name='mask') - susan_smooth.connect(inputnode, 'in_files', mask, 'in_file') - susan_smooth.connect(inputnode, 'mask_file', mask, 'in_file2') - """ - Determine the mean image from each functional run - """ - - meanfunc = pe.MapNode( - interface=fsl.ImageMaths(op_string='-Tmean', suffix='_mean'), - iterfield=['in_file'], - name='meanfunc2') - susan_smooth.connect(mask, 'out_file', meanfunc, 'in_file') - """ - Merge the median values with the mean functional images into a coupled list - """ - - merge = pe.Node(interface=util.Merge(2, axis='hstack'), name='merge') - susan_smooth.connect(meanfunc, 'out_file', merge, 'in1') - susan_smooth.connect(median, 'out_stat', merge, 'in2') - """ - Define a function to get the brightness threshold for SUSAN - """ - - susan_smooth.connect([ - (inputnode, multi_inputs, [('in_files', 'in_files'), ('fwhm', - 'fwhms')]), - (median, multi_inputs, [(('out_stat', getbtthresh), 'btthresh')]), - (merge, multi_inputs, [(('out', getusans), 'usans')]), - (multi_inputs, smooth, - [('cart_in_file', 'in_file'), ('cart_fwhm', 'fwhm'), - ('cart_btthresh', 'brightness_threshold'), ('cart_usans', 'usans')]), - ]) - - outputnode = pe.Node( - interface=util.IdentityInterface(fields=['smoothed_files']), - name='outputnode') - - susan_smooth.connect(smooth, 'smoothed_file', outputnode, 'smoothed_files') - - return susan_smooth - - -def create_fsl_fs_preproc(name='preproc', highpass=True, whichvol='middle'): - """Create a FEAT preprocessing workflow together with freesurfer - - Parameters - ---------- - - :: - - name : name of workflow (default: preproc) - highpass : boolean (default: True) - whichvol : which volume of the first run to register to ('first', 'middle', 'mean') - - Inputs:: - - inputspec.func : functional runs (filename or list of filenames) - inputspec.fwhm : fwhm for smoothing with SUSAN - inputspec.highpass : HWHM in TRs (if created with highpass=True) - inputspec.subject_id : freesurfer subject id - inputspec.subjects_dir : freesurfer subjects dir - - Outputs:: - - outputspec.reference : volume to which runs are realigned - outputspec.motion_parameters : motion correction parameters - outputspec.realigned_files : motion corrected files - outputspec.motion_plots : plots of motion correction parameters - outputspec.mask_file : mask file used to mask the brain - outputspec.smoothed_files : smoothed functional data - outputspec.highpassed_files : highpassed functional data (if highpass=True) - outputspec.reg_file : bbregister registration files - outputspec.reg_cost : bbregister registration cost files - - Example - ------- - - >>> preproc = create_fsl_fs_preproc(whichvol='first') - >>> preproc.inputs.inputspec.highpass = 128./(2*2.5) - >>> preproc.inputs.inputspec.func = ['f3.nii', 'f5.nii'] - >>> preproc.inputs.inputspec.subjects_dir = '.' - >>> preproc.inputs.inputspec.subject_id = 's1' - >>> preproc.inputs.inputspec.fwhm = 6 - >>> preproc.run() # doctest: +SKIP - """ - - featpreproc = pe.Workflow(name=name) - """ - Set up a node to define all inputs required for the preprocessing workflow - - """ - - if highpass: - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'func', 'fwhm', 'subject_id', 'subjects_dir', 'highpass' - ]), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask_file', 'smoothed_files', - 'highpassed_files', 'reg_file', 'reg_cost' - ]), - name='outputspec') - else: - inputnode = pe.Node( - interface=util.IdentityInterface( - fields=['func', 'fwhm', 'subject_id', 'subjects_dir']), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'reference', 'motion_parameters', 'realigned_files', - 'motion_plots', 'mask_file', 'smoothed_files', 'reg_file', - 'reg_cost' - ]), - name='outputspec') - """ - Set up a node to define outputs for the preprocessing workflow - - """ - """ - Convert functional images to float representation. Since there can - be more than one functional run we use a MapNode to convert each - run. - """ - - img2float = pe.MapNode( - interface=fsl.ImageMaths( - out_data_type='float', op_string='', suffix='_dtype'), - iterfield=['in_file'], - name='img2float') - featpreproc.connect(inputnode, 'func', img2float, 'in_file') - """ - Extract the first volume of the first run as the reference - """ - - if whichvol != 'mean': - extract_ref = pe.Node( - interface=fsl.ExtractROI(t_size=1), - iterfield=['in_file'], - name='extractref') - featpreproc.connect(img2float, ('out_file', pickfirst), extract_ref, - 'in_file') - featpreproc.connect(img2float, ('out_file', pickvol, 0, whichvol), - extract_ref, 't_min') - featpreproc.connect(extract_ref, 'roi_file', outputnode, 'reference') - """ - Realign the functional runs to the reference (1st volume of first run) - """ - - motion_correct = pe.MapNode( - interface=fsl.MCFLIRT( - save_mats=True, save_plots=True, interpolation='sinc'), - name='realign', - iterfield=['in_file']) - featpreproc.connect(img2float, 'out_file', motion_correct, 'in_file') - if whichvol != 'mean': - featpreproc.connect(extract_ref, 'roi_file', motion_correct, - 'ref_file') - else: - motion_correct.inputs.mean_vol = True - featpreproc.connect(motion_correct, 'mean_img', outputnode, - 'reference') - - featpreproc.connect(motion_correct, 'par_file', outputnode, - 'motion_parameters') - featpreproc.connect(motion_correct, 'out_file', outputnode, - 'realigned_files') - """ - Plot the estimated motion parameters - """ - - plot_motion = pe.MapNode( - interface=fsl.PlotMotionParams(in_source='fsl'), - name='plot_motion', - iterfield=['in_file']) - plot_motion.iterables = ('plot_type', ['rotations', 'translations']) - featpreproc.connect(motion_correct, 'par_file', plot_motion, 'in_file') - featpreproc.connect(plot_motion, 'out_file', outputnode, 'motion_plots') - """Get the mask from subject for each run - """ - - maskflow = create_getmask_flow() - featpreproc.connect([(inputnode, maskflow, - [('subject_id', 'inputspec.subject_id'), - ('subjects_dir', 'inputspec.subjects_dir')])]) - maskflow.inputs.inputspec.contrast_type = 't2' - if whichvol != 'mean': - featpreproc.connect(extract_ref, 'roi_file', maskflow, - 'inputspec.source_file') - else: - featpreproc.connect(motion_correct, ('mean_img', pickfirst), maskflow, - 'inputspec.source_file') - """ - Mask the functional runs with the extracted mask - """ - - maskfunc = pe.MapNode( - interface=fsl.ImageMaths(suffix='_bet', op_string='-mas'), - iterfield=['in_file'], - name='maskfunc') - featpreproc.connect(motion_correct, 'out_file', maskfunc, 'in_file') - featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), - maskfunc, 'in_file2') - """ - Smooth each run using SUSAN with the brightness threshold set to 75% - of the median value for each run and a mask consituting the mean - functional - """ - - smooth = create_susan_smooth(separate_masks=False) - - featpreproc.connect(inputnode, 'fwhm', smooth, 'inputnode.fwhm') - featpreproc.connect(maskfunc, 'out_file', smooth, 'inputnode.in_files') - featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), smooth, - 'inputnode.mask_file') - """ - Mask the smoothed data with the dilated mask - """ - - maskfunc3 = pe.MapNode( - interface=fsl.ImageMaths(suffix='_mask', op_string='-mas'), - iterfield=['in_file'], - name='maskfunc3') - featpreproc.connect(smooth, 'outputnode.smoothed_files', maskfunc3, - 'in_file') - featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), - maskfunc3, 'in_file2') - - concatnode = pe.Node(interface=util.Merge(2), name='concat') - featpreproc.connect(maskfunc, ('out_file', tolist), concatnode, 'in1') - featpreproc.connect(maskfunc3, ('out_file', tolist), concatnode, 'in2') - """ - The following nodes select smooth or unsmoothed data depending on the - fwhm. This is because SUSAN defaults to smoothing the data with about the - voxel size of the input data if the fwhm parameter is less than 1/3 of the - voxel size. - """ - selectnode = pe.Node(interface=util.Select(), name='select') - - featpreproc.connect(concatnode, 'out', selectnode, 'inlist') - - featpreproc.connect(inputnode, ('fwhm', chooseindex), selectnode, 'index') - featpreproc.connect(selectnode, 'out', outputnode, 'smoothed_files') - """ - Scale the median value of the run is set to 10000 - """ - - meanscale = pe.MapNode( - interface=fsl.ImageMaths(suffix='_gms'), - iterfield=['in_file', 'op_string'], - name='meanscale') - featpreproc.connect(selectnode, 'out', meanscale, 'in_file') - """ - Determine the median value of the functional runs using the mask - """ - - medianval = pe.MapNode( - interface=fsl.ImageStats(op_string='-k %s -p 50'), - iterfield=['in_file'], - name='medianval') - featpreproc.connect(motion_correct, 'out_file', medianval, 'in_file') - featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), - medianval, 'mask_file') - """ - Define a function to get the scaling factor for intensity normalization - """ - - featpreproc.connect(medianval, ('out_stat', getmeanscale), meanscale, - 'op_string') - """ - Perform temporal highpass filtering on the data - """ - - if highpass: - highpass = pe.MapNode( - interface=fsl.ImageMaths(suffix='_tempfilt'), - iterfield=['in_file'], - name='highpass') - featpreproc.connect(inputnode, ('highpass', highpass_operand), - highpass, 'op_string') - featpreproc.connect(meanscale, 'out_file', highpass, 'in_file') - featpreproc.connect(highpass, 'out_file', outputnode, - 'highpassed_files') - - featpreproc.connect(maskflow, ('outputspec.mask_file', pickfirst), - outputnode, 'mask_file') - featpreproc.connect(maskflow, 'outputspec.reg_file', outputnode, - 'reg_file') - featpreproc.connect(maskflow, 'outputspec.reg_cost', outputnode, - 'reg_cost') - - return featpreproc - - -def create_reg_workflow(name='registration'): - """Create a FEAT preprocessing workflow - - Parameters - ---------- - - :: - - name : name of workflow (default: 'registration') - - Inputs:: - - inputspec.source_files : files (filename or list of filenames to register) - inputspec.mean_image : reference image to use - inputspec.anatomical_image : anatomical image to coregister to - inputspec.target_image : registration target - - Outputs:: - - outputspec.func2anat_transform : FLIRT transform - outputspec.anat2target_transform : FLIRT+FNIRT transform - outputspec.transformed_files : transformed files in target space - outputspec.transformed_mean : mean image in target space - - Example - ------- - - """ - - register = pe.Workflow(name=name) - - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'source_files', 'mean_image', 'anatomical_image', 'target_image', - 'target_image_brain', 'config_file' - ]), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'func2anat_transform', - 'anat2target_transform', - 'transformed_files', - 'transformed_mean', - ]), - name='outputspec') - """ - Estimate the tissue classes from the anatomical image. But use spm's segment - as FSL appears to be breaking. - """ - - stripper = pe.Node(fsl.BET(), name='stripper') - register.connect(inputnode, 'anatomical_image', stripper, 'in_file') - fast = pe.Node(fsl.FAST(), name='fast') - register.connect(stripper, 'out_file', fast, 'in_files') - """ - Binarize the segmentation - """ - - binarize = pe.Node( - fsl.ImageMaths(op_string='-nan -thr 0.5 -bin'), name='binarize') - pickindex = lambda x, i: x[i] - register.connect(fast, ('partial_volume_files', pickindex, 2), binarize, - 'in_file') - """ - Calculate rigid transform from mean image to anatomical image - """ - - mean2anat = pe.Node(fsl.FLIRT(), name='mean2anat') - mean2anat.inputs.dof = 6 - register.connect(inputnode, 'mean_image', mean2anat, 'in_file') - register.connect(stripper, 'out_file', mean2anat, 'reference') - """ - Now use bbr cost function to improve the transform - """ - - mean2anatbbr = pe.Node(fsl.FLIRT(), name='mean2anatbbr') - mean2anatbbr.inputs.dof = 6 - mean2anatbbr.inputs.cost = 'bbr' - mean2anatbbr.inputs.schedule = os.path.join( - os.getenv('FSLDIR'), 'etc/flirtsch/bbr.sch') - register.connect(inputnode, 'mean_image', mean2anatbbr, 'in_file') - register.connect(binarize, 'out_file', mean2anatbbr, 'wm_seg') - register.connect(inputnode, 'anatomical_image', mean2anatbbr, 'reference') - register.connect(mean2anat, 'out_matrix_file', mean2anatbbr, - 'in_matrix_file') - """ - Calculate affine transform from anatomical to target - """ - - anat2target_affine = pe.Node(fsl.FLIRT(), name='anat2target_linear') - anat2target_affine.inputs.searchr_x = [-180, 180] - anat2target_affine.inputs.searchr_y = [-180, 180] - anat2target_affine.inputs.searchr_z = [-180, 180] - register.connect(stripper, 'out_file', anat2target_affine, 'in_file') - register.connect(inputnode, 'target_image_brain', anat2target_affine, - 'reference') - """ - Calculate nonlinear transform from anatomical to target - """ - - anat2target_nonlinear = pe.Node(fsl.FNIRT(), name='anat2target_nonlinear') - anat2target_nonlinear.inputs.fieldcoeff_file = True - register.connect(anat2target_affine, 'out_matrix_file', - anat2target_nonlinear, 'affine_file') - register.connect(inputnode, 'anatomical_image', anat2target_nonlinear, - 'in_file') - register.connect(inputnode, 'config_file', anat2target_nonlinear, - 'config_file') - register.connect(inputnode, 'target_image', anat2target_nonlinear, - 'ref_file') - """ - Transform the mean image. First to anatomical and then to target - """ - - warpmean = pe.Node(fsl.ApplyWarp(interp='spline'), name='warpmean') - register.connect(inputnode, 'mean_image', warpmean, 'in_file') - register.connect(mean2anatbbr, 'out_matrix_file', warpmean, 'premat') - register.connect(inputnode, 'target_image', warpmean, 'ref_file') - register.connect(anat2target_nonlinear, 'fieldcoeff_file', warpmean, - 'field_file') - """ - Transform the remaining images. First to anatomical and then to target - """ - - warpall = pe.MapNode( - fsl.ApplyWarp(interp='spline'), - iterfield=['in_file'], - nested=True, - name='warpall') - register.connect(inputnode, 'source_files', warpall, 'in_file') - register.connect(mean2anatbbr, 'out_matrix_file', warpall, 'premat') - register.connect(inputnode, 'target_image', warpall, 'ref_file') - register.connect(anat2target_nonlinear, 'fieldcoeff_file', warpall, - 'field_file') - """ - Assign all the output files - """ - - register.connect(warpmean, 'out_file', outputnode, 'transformed_mean') - register.connect(warpall, 'out_file', outputnode, 'transformed_files') - register.connect(mean2anatbbr, 'out_matrix_file', outputnode, - 'func2anat_transform') - register.connect(anat2target_nonlinear, 'fieldcoeff_file', outputnode, - 'anat2target_transform') - - return register diff --git a/nipype/workflows/fmri/fsl/tests/__init__.py b/nipype/workflows/fmri/fsl/tests/__init__.py deleted file mode 100644 index 2986294d9d..0000000000 --- a/nipype/workflows/fmri/fsl/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -__author__ = 'satra' diff --git a/nipype/workflows/fmri/fsl/tests/test_preprocess.py b/nipype/workflows/fmri/fsl/tests/test_preprocess.py deleted file mode 100644 index 4f382bdc1a..0000000000 --- a/nipype/workflows/fmri/fsl/tests/test_preprocess.py +++ /dev/null @@ -1,25 +0,0 @@ -__author__ = 'oliver' - -from ..preprocess import create_featreg_preproc, pickrun - - -def test_pickrun(): - files = ['1', '2', '3', '4'] - assert pickrun(files, 0) == '1' - assert pickrun(files, 'first') == '1' - assert pickrun(files, -1) == '4' - assert pickrun(files, 'last') == '4' - assert pickrun(files, 'middle') == '3' - - -def test_create_featreg_preproc(): - """smoke test""" - wf = create_featreg_preproc(whichrun=0) - - # test type - import nipype - assert type(wf) == nipype.pipeline.engine.Workflow - - # test methods - assert wf.get_node('extractref') - assert wf._get_dot() diff --git a/nipype/workflows/fmri/spm/__init__.py b/nipype/workflows/fmri/spm/__init__.py deleted file mode 100644 index f974a663db..0000000000 --- a/nipype/workflows/fmri/spm/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from .preprocess import (create_spm_preproc, create_vbm_preproc, - create_DARTEL_template) diff --git a/nipype/workflows/fmri/spm/estimate.py b/nipype/workflows/fmri/spm/estimate.py deleted file mode 100644 index 99fb243f19..0000000000 --- a/nipype/workflows/fmri/spm/estimate.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/workflows/fmri/spm/preprocess.py b/nipype/workflows/fmri/spm/preprocess.py deleted file mode 100644 index f2957e4b03..0000000000 --- a/nipype/workflows/fmri/spm/preprocess.py +++ /dev/null @@ -1,332 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -import os - -from ....algorithms import rapidart as ra -from ....interfaces import spm as spm -from ....interfaces import utility as niu -from ....pipeline import engine as pe -from ...smri.freesurfer.utils import create_getmask_flow - -from .... import logging -logger = logging.getLogger('nipype.workflow') - - -def create_spm_preproc(name='preproc'): - """Create an spm preprocessing workflow with freesurfer registration and - artifact detection. - - The workflow realigns and smooths and registers the functional images with - the subject's freesurfer space. - - Example - ------- - - >>> preproc = create_spm_preproc() - >>> preproc.base_dir = '.' - >>> preproc.inputs.inputspec.fwhm = 6 - >>> preproc.inputs.inputspec.subject_id = 's1' - >>> preproc.inputs.inputspec.subjects_dir = '.' - >>> preproc.inputs.inputspec.functionals = ['f3.nii', 'f5.nii'] - >>> preproc.inputs.inputspec.norm_threshold = 1 - >>> preproc.inputs.inputspec.zintensity_threshold = 3 - - Inputs:: - - inputspec.functionals : functional runs use 4d nifti - inputspec.subject_id : freesurfer subject id - inputspec.subjects_dir : freesurfer subjects dir - inputspec.fwhm : smoothing fwhm - inputspec.norm_threshold : norm threshold for outliers - inputspec.zintensity_threshold : intensity threshold in z-score - - Outputs:: - - outputspec.realignment_parameters : realignment parameter files - outputspec.smoothed_files : smoothed functional files - outputspec.outlier_files : list of outliers - outputspec.outlier_stats : statistics of outliers - outputspec.outlier_plots : images of outliers - outputspec.mask_file : binary mask file in reference image space - outputspec.reg_file : registration file that maps reference image to - freesurfer space - outputspec.reg_cost : cost of registration (useful for detecting misalignment) - """ - """ - Initialize the workflow - """ - - workflow = pe.Workflow(name=name) - """ - Define the inputs to this workflow - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'functionals', 'subject_id', 'subjects_dir', 'fwhm', - 'norm_threshold', 'zintensity_threshold' - ]), - name='inputspec') - """ - Setup the processing nodes and create the mask generation and coregistration - workflow - """ - - poplist = lambda x: x.pop() - realign = pe.Node(spm.Realign(), name='realign') - workflow.connect(inputnode, 'functionals', realign, 'in_files') - maskflow = create_getmask_flow() - workflow.connect([(inputnode, maskflow, - [('subject_id', 'inputspec.subject_id'), - ('subjects_dir', 'inputspec.subjects_dir')])]) - maskflow.inputs.inputspec.contrast_type = 't2' - workflow.connect(realign, 'mean_image', maskflow, 'inputspec.source_file') - smooth = pe.Node(spm.Smooth(), name='smooth') - workflow.connect(inputnode, 'fwhm', smooth, 'fwhm') - workflow.connect(realign, 'realigned_files', smooth, 'in_files') - artdetect = pe.Node( - ra.ArtifactDetect( - mask_type='file', - parameter_source='SPM', - use_differences=[True, False], - use_norm=True, - save_plot=True), - name='artdetect') - workflow.connect([(inputnode, artdetect, - [('norm_threshold', 'norm_threshold'), - ('zintensity_threshold', 'zintensity_threshold')])]) - workflow.connect([(realign, artdetect, [('realigned_files', - 'realigned_files'), - ('realignment_parameters', - 'realignment_parameters')])]) - workflow.connect(maskflow, ('outputspec.mask_file', poplist), artdetect, - 'mask_file') - """ - Define the outputs of the workflow and connect the nodes to the outputnode - """ - - outputnode = pe.Node( - niu.IdentityInterface(fields=[ - "realignment_parameters", "smoothed_files", "mask_file", - "reg_file", "reg_cost", 'outlier_files', 'outlier_stats', - 'outlier_plots' - ]), - name="outputspec") - workflow.connect( - [(maskflow, outputnode, [("outputspec.reg_file", "reg_file")]), - (maskflow, outputnode, - [("outputspec.reg_cost", "reg_cost")]), (maskflow, outputnode, [ - (("outputspec.mask_file", poplist), "mask_file") - ]), (realign, outputnode, [('realignment_parameters', - 'realignment_parameters')]), - (smooth, outputnode, [('smoothed_files', 'smoothed_files')]), - (artdetect, outputnode, [('outlier_files', 'outlier_files'), - ('statistic_files', 'outlier_stats'), - ('plot_files', 'outlier_plots')])]) - return workflow - - -def create_vbm_preproc(name='vbmpreproc'): - """Create a vbm workflow that generates DARTEL-based warps to MNI space - - Based on: http://www.fil.ion.ucl.ac.uk/~john/misc/VBMclass10.pdf - - Example - ------- - - >>> preproc = create_vbm_preproc() - >>> preproc.inputs.inputspec.fwhm = 8 - >>> preproc.inputs.inputspec.structural_files = [ - ... os.path.abspath('s1.nii'), os.path.abspath('s3.nii')] - >>> preproc.inputs.inputspec.template_prefix = 'Template' - >>> preproc.run() # doctest: +SKIP - - Inputs:: - - inputspec.structural_files : structural data to be used to create templates - inputspec.fwhm: single of triplet for smoothing when normalizing to MNI space - inputspec.template_prefix : prefix for dartel template - - Outputs:: - - outputspec.normalized_files : normalized gray matter files - outputspec.template_file : DARTEL template - outputspec.icv : intracranial volume (cc - assuming dimensions in mm) - - """ - - workflow = pe.Workflow(name=name) - """ - Define the inputs to this workflow - """ - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['structural_files', 'fwhm', 'template_prefix']), - name='inputspec') - - dartel_template = create_DARTEL_template() - - workflow.connect(inputnode, 'template_prefix', dartel_template, - 'inputspec.template_prefix') - workflow.connect(inputnode, 'structural_files', dartel_template, - 'inputspec.structural_files') - - norm2mni = pe.Node(spm.DARTELNorm2MNI(modulate=True), name='norm2mni') - workflow.connect(dartel_template, 'outputspec.template_file', norm2mni, - 'template_file') - workflow.connect(dartel_template, 'outputspec.flow_fields', norm2mni, - 'flowfield_files') - - def getclass1images(class_images): - class1images = [] - for session in class_images: - class1images.extend(session[0]) - return class1images - - workflow.connect(dartel_template, - ('segment.native_class_images', getclass1images), - norm2mni, 'apply_to_files') - workflow.connect(inputnode, 'fwhm', norm2mni, 'fwhm') - - def compute_icv(class_images): - from nibabel import load - from numpy import prod - icv = [] - for session in class_images: - voxel_volume = prod(load(session[0][0]).header.get_zooms()) - img = load(session[0][0]).get_data() + \ - load(session[1][0]).get_data() + \ - load(session[2][0]).get_data() - img_icv = (img > 0.5).astype(int).sum() * voxel_volume * 1e-3 - icv.append(img_icv) - return icv - - calc_icv = pe.Node( - niu.Function( - function=compute_icv, - input_names=['class_images'], - output_names=['icv']), - name='calc_icv') - - workflow.connect(dartel_template, 'segment.native_class_images', calc_icv, - 'class_images') - """ - Define the outputs of the workflow and connect the nodes to the outputnode - """ - - outputnode = pe.Node( - niu.IdentityInterface( - fields=["normalized_files", "template_file", "icv"]), - name="outputspec") - workflow.connect([ - (dartel_template, outputnode, [('outputspec.template_file', - 'template_file')]), - (norm2mni, outputnode, [("normalized_files", "normalized_files")]), - (calc_icv, outputnode, [("icv", "icv")]), - ]) - - return workflow - - -def create_DARTEL_template(name='dartel_template'): - """Create a vbm workflow that generates DARTEL-based template - - - Example - ------- - - >>> preproc = create_DARTEL_template() - >>> preproc.inputs.inputspec.structural_files = [ - ... os.path.abspath('s1.nii'), os.path.abspath('s3.nii')] - >>> preproc.inputs.inputspec.template_prefix = 'Template' - >>> preproc.run() # doctest: +SKIP - - Inputs:: - - inputspec.structural_files : structural data to be used to create templates - inputspec.template_prefix : prefix for dartel template - - Outputs:: - - outputspec.template_file : DARTEL template - outputspec.flow_fields : warps from input struct files to the template - - """ - - workflow = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface(fields=['structural_files', 'template_prefix']), - name='inputspec') - - segment = pe.MapNode( - spm.NewSegment(), iterfield=['channel_files'], name='segment') - workflow.connect(inputnode, 'structural_files', segment, 'channel_files') - - spm_info = spm.Info.getinfo() - if spm_info: - spm_path = spm_info['path'] - if spm_info['name'] == 'SPM8': - tissue1 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 1), 2, - (True, True), (False, False)) - tissue2 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 2), 2, - (True, True), (False, False)) - tissue3 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 3), 2, - (True, False), (False, False)) - tissue4 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 4), 3, - (False, False), (False, False)) - tissue5 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 5), 4, - (False, False), (False, False)) - tissue6 = ((os.path.join(spm_path, 'toolbox/Seg/TPM.nii'), 6), 2, - (False, False), (False, False)) - elif spm_info['name'] == 'SPM12': - spm_path = spm_info['path'] - tissue1 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 1), 1, - (True, True), (False, False)) - tissue2 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 2), 1, - (True, True), (False, False)) - tissue3 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 3), 2, - (True, False), (False, False)) - tissue4 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 4), 3, - (False, False), (False, False)) - tissue5 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 5), 4, - (False, False), (False, False)) - tissue6 = ((os.path.join(spm_path, 'tpm/TPM.nii'), 6), 2, - (False, False), (False, False)) - else: - logger.critical('Unsupported version of SPM') - - segment.inputs.tissues = [ - tissue1, tissue2, tissue3, tissue4, tissue5, tissue6 - ] - else: - logger.critical('SPM not found') - - dartel = pe.Node(spm.DARTEL(), name='dartel') - """Get the gray and white segmentation classes generated by NewSegment - """ - - def get2classes(dartel_files): - class1images = [] - class2images = [] - for session in dartel_files: - class1images.extend(session[0]) - class2images.extend(session[1]) - return [class1images, class2images] - - workflow.connect(segment, ('dartel_input_images', get2classes), dartel, - 'image_files') - workflow.connect(inputnode, 'template_prefix', dartel, 'template_prefix') - - outputnode = pe.Node( - niu.IdentityInterface(fields=["template_file", "flow_fields"]), - name="outputspec") - workflow.connect([ - (dartel, outputnode, [('final_template_file', 'template_file'), - ('dartel_flow_fields', 'flow_fields')]), - ]) - - return workflow diff --git a/nipype/workflows/fmri/spm/tests/__init__.py b/nipype/workflows/fmri/spm/tests/__init__.py deleted file mode 100644 index 2986294d9d..0000000000 --- a/nipype/workflows/fmri/spm/tests/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -__author__ = 'satra' diff --git a/nipype/workflows/graph/__init__.py b/nipype/workflows/graph/__init__.py deleted file mode 100644 index ead6180dc8..0000000000 --- a/nipype/workflows/graph/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) diff --git a/nipype/workflows/misc/__init__.py b/nipype/workflows/misc/__init__.py deleted file mode 100644 index 40a96afc6f..0000000000 --- a/nipype/workflows/misc/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/workflows/misc/utils.py b/nipype/workflows/misc/utils.py deleted file mode 100644 index b581ec8c54..0000000000 --- a/nipype/workflows/misc/utils.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from builtins import map, range - - -def get_vox_dims(volume): - import nibabel as nb - from nipype.utils import NUMPY_MMAP - if isinstance(volume, list): - volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) - hdr = nii.header - voxdims = hdr.get_zooms() - return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] - - -def get_data_dims(volume): - import nibabel as nb - from nipype.utils import NUMPY_MMAP - if isinstance(volume, list): - volume = volume[0] - nii = nb.load(volume, mmap=NUMPY_MMAP) - hdr = nii.header - datadims = hdr.get_data_shape() - return [int(datadims[0]), int(datadims[1]), int(datadims[2])] - - -def get_affine(volume): - import nibabel as nb - from nipype.utils import NUMPY_MMAP - nii = nb.load(volume, mmap=NUMPY_MMAP) - return nii.affine - - -def select_aparc(list_of_files): - for in_file in list_of_files: - if 'aparc+aseg.mgz' in in_file: - idx = list_of_files.index(in_file) - return list_of_files[idx] - - -def select_aparc_annot(list_of_files): - for in_file in list_of_files: - if '.aparc.annot' in in_file: - idx = list_of_files.index(in_file) - return list_of_files[idx] - - -def region_list_from_volume(in_file): - import nibabel as nb - import numpy as np - from nipype.utils import NUMPY_MMAP - segmentation = nb.load(in_file, mmap=NUMPY_MMAP) - segmentationdata = segmentation.get_data() - rois = np.unique(segmentationdata) - region_list = list(rois) - region_list.sort() - region_list.remove(0) - region_list = list(map(int, region_list)) - return region_list - - -def id_list_from_lookup_table(lookup_file, region_list): - import numpy as np - LUTlabelsRGBA = np.loadtxt( - lookup_file, - skiprows=4, - usecols=[0, 1, 2, 3, 4, 5], - comments='#', - dtype={ - 'names': ('index', 'label', 'R', 'G', 'B', 'A'), - 'formats': ('int', '|S30', 'int', 'int', 'int', 'int') - }) - numLUTLabels = np.size(LUTlabelsRGBA) - LUTlabelDict = {} - for labels in range(0, numLUTLabels): - LUTlabelDict[LUTlabelsRGBA[labels][0]] = [ - LUTlabelsRGBA[labels][1], LUTlabelsRGBA[labels][2], - LUTlabelsRGBA[labels][3], LUTlabelsRGBA[labels][4], - LUTlabelsRGBA[labels][5] - ] - id_list = [] - for region in region_list: - label = LUTlabelDict[region][0] - id_list.append(label) - id_list = list(map(str, id_list)) - return id_list diff --git a/nipype/workflows/rsfmri/__init__.py b/nipype/workflows/rsfmri/__init__.py deleted file mode 100644 index bd58039343..0000000000 --- a/nipype/workflows/rsfmri/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from . import fsl diff --git a/nipype/workflows/rsfmri/fsl/__init__.py b/nipype/workflows/rsfmri/fsl/__init__.py deleted file mode 100644 index 2e17899066..0000000000 --- a/nipype/workflows/rsfmri/fsl/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# -*- coding: utf-8 -*- -from .resting import create_resting_preproc diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py deleted file mode 100644 index 176a0ed6f7..0000000000 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ /dev/null @@ -1,162 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from ....interfaces import fsl as fsl # fsl -from ....interfaces import utility as util # utility -from ....pipeline import engine as pe # pypeline engine -from ....algorithms import confounds - - -def select_volume(filename, which): - """Return the middle index of a file - """ - from nibabel import load - import numpy as np - from nipype.utils import NUMPY_MMAP - - if which.lower() == 'first': - idx = 0 - elif which.lower() == 'middle': - idx = int(np.ceil(load(filename, mmap=NUMPY_MMAP).shape[3] / 2)) - else: - raise Exception('unknown value for volume selection : %s' % which) - return idx - - -def create_realign_flow(name='realign'): - """Realign a time series to the middle volume using spline interpolation - - Uses MCFLIRT to realign the time series and ApplyWarp to apply the rigid - body transformations using spline interpolation (unknown order). - - Example - ------- - - >>> wf = create_realign_flow() - >>> wf.inputs.inputspec.func = 'f3.nii' - >>> wf.run() # doctest: +SKIP - - """ - realignflow = pe.Workflow(name=name) - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'func', - ]), name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'realigned_file', - ]), - name='outputspec') - realigner = pe.Node( - fsl.MCFLIRT(save_mats=True, stats_imgs=True), name='realigner') - splitter = pe.Node(fsl.Split(dimension='t'), name='splitter') - warper = pe.MapNode( - fsl.ApplyWarp(interp='spline'), - iterfield=['in_file', 'premat'], - name='warper') - joiner = pe.Node(fsl.Merge(dimension='t'), name='joiner') - - realignflow.connect(inputnode, 'func', realigner, 'in_file') - realignflow.connect(inputnode, ('func', select_volume, 'middle'), - realigner, 'ref_vol') - realignflow.connect(realigner, 'out_file', splitter, 'in_file') - realignflow.connect(realigner, 'mat_file', warper, 'premat') - realignflow.connect(realigner, 'variance_img', warper, 'ref_file') - realignflow.connect(splitter, 'out_files', warper, 'in_file') - realignflow.connect(warper, 'out_file', joiner, 'in_files') - realignflow.connect(joiner, 'merged_file', outputnode, 'realigned_file') - return realignflow - - -def create_resting_preproc(name='restpreproc', base_dir=None): - """Create a "resting" time series preprocessing workflow - - The noise removal is based on Behzadi et al. (2007) - - Parameters - ---------- - - name : name of workflow (default: restpreproc) - - Inputs:: - - inputspec.func : functional run (filename or list of filenames) - - Outputs:: - - outputspec.noise_mask_file : voxels used for PCA to derive noise - components - outputspec.filtered_file : bandpass filtered and noise-reduced time - series - - Example - ------- - - >>> TR = 3.0 - >>> wf = create_resting_preproc() - >>> wf.inputs.inputspec.func = 'f3.nii' - >>> wf.inputs.inputspec.num_noise_components = 6 - >>> wf.inputs.inputspec.highpass_sigma = 100/(2*TR) - >>> wf.inputs.inputspec.lowpass_sigma = 12.5/(2*TR) - >>> wf.run() # doctest: +SKIP - - """ - - restpreproc = pe.Workflow(name=name, base_dir=base_dir) - - # Define nodes - inputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'func', 'num_noise_components', 'highpass_sigma', 'lowpass_sigma' - ]), - name='inputspec') - outputnode = pe.Node( - interface=util.IdentityInterface(fields=[ - 'noise_mask_file', - 'filtered_file', - ]), - name='outputspec') - slicetimer = pe.Node(fsl.SliceTimer(), name='slicetimer') - realigner = create_realign_flow() - tsnr = pe.Node(confounds.TSNR(regress_poly=2), name='tsnr') - getthresh = pe.Node( - interface=fsl.ImageStats(op_string='-p 98'), name='getthreshold') - threshold_stddev = pe.Node(fsl.Threshold(), name='threshold') - compcor = pe.Node( - confounds.ACompCor( - components_file="noise_components.txt", pre_filter=False), - name='compcor') - remove_noise = pe.Node( - fsl.FilterRegressor(filter_all=True), name='remove_noise') - bandpass_filter = pe.Node(fsl.TemporalFilter(), name='bandpass_filter') - - # Define connections - restpreproc.connect(inputnode, 'func', slicetimer, 'in_file') - restpreproc.connect(slicetimer, 'slice_time_corrected_file', realigner, - 'inputspec.func') - restpreproc.connect(realigner, 'outputspec.realigned_file', tsnr, - 'in_file') - restpreproc.connect(tsnr, 'stddev_file', threshold_stddev, 'in_file') - restpreproc.connect(tsnr, 'stddev_file', getthresh, 'in_file') - restpreproc.connect(getthresh, 'out_stat', threshold_stddev, 'thresh') - restpreproc.connect(realigner, 'outputspec.realigned_file', compcor, - 'realigned_file') - restpreproc.connect(threshold_stddev, 'out_file', compcor, 'mask_files') - restpreproc.connect(inputnode, 'num_noise_components', compcor, - 'num_components') - restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file') - restpreproc.connect(compcor, 'components_file', remove_noise, - 'design_file') - restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter, - 'highpass_sigma') - restpreproc.connect(inputnode, 'lowpass_sigma', bandpass_filter, - 'lowpass_sigma') - restpreproc.connect(remove_noise, 'out_file', bandpass_filter, 'in_file') - restpreproc.connect(threshold_stddev, 'out_file', outputnode, - 'noise_mask_file') - restpreproc.connect(bandpass_filter, 'out_file', outputnode, - 'filtered_file') - return restpreproc diff --git a/nipype/workflows/rsfmri/fsl/tests/__init__.py b/nipype/workflows/rsfmri/fsl/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py deleted file mode 100644 index eba73a75b1..0000000000 --- a/nipype/workflows/rsfmri/fsl/tests/test_resting.py +++ /dev/null @@ -1,107 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -import pytest -import os -import mock -import numpy as np - -from .....testing import utils -from .....interfaces import IdentityInterface -from .....pipeline.engine import Node, Workflow - -from ..resting import create_resting_preproc - -ALL_FIELDS = [ - 'func', 'in_file', 'slice_time_corrected_file', 'stddev_file', 'out_stat', - 'thresh', 'num_noise_components', 'detrended_file', 'design_file', - 'highpass_sigma', 'lowpass_sigma', 'out_file', 'noise_mask_file', - 'filtered_file' -] - - -def stub_node_factory(*args, **kwargs): - if 'name' not in kwargs.keys(): - raise Exception() - name = kwargs['name'] - if name == 'compcor': - return Node(*args, **kwargs) - else: # replace with an IdentityInterface - return Node(IdentityInterface(fields=ALL_FIELDS), name=name) - - -def stub_wf(*args, **kwargs): - wflow = Workflow(name='realigner') - inputnode = Node(IdentityInterface(fields=['func']), name='inputspec') - outputnode = Node( - interface=IdentityInterface(fields=['realigned_file']), - name='outputspec') - wflow.connect(inputnode, 'func', outputnode, 'realigned_file') - return wflow - - -class TestResting(): - - in_filenames = { - 'realigned_file': 'rsfmrifunc.nii', - 'mask_file': 'rsfmrimask.nii' - } - - out_filenames = { - 'components_file': 'restpreproc/compcor/noise_components.txt' - } - - num_noise_components = 6 - - @pytest.fixture(autouse=True) - def setup_class(self, tmpdir): - # setup temp folder - tmpdir.chdir() - self.in_filenames = { - key: os.path.abspath(value) - for key, value in self.in_filenames.items() - } - - # create&save input files - utils.save_toy_nii(self.fake_data, self.in_filenames['realigned_file']) - mask = np.zeros(self.fake_data.shape[:3]) - for i in range(mask.shape[0]): - for j in range(mask.shape[1]): - if i == j: - mask[i, j] = 1 - utils.save_toy_nii(mask, self.in_filenames['mask_file']) - - @mock.patch( - 'nipype.workflows.rsfmri.fsl.resting.create_realign_flow', - side_effect=stub_wf) - @mock.patch('nipype.pipeline.engine.Node', side_effect=stub_node_factory) - def test_create_resting_preproc(self, mock_node, mock_realign_wf): - wflow = create_resting_preproc(base_dir=os.getcwd()) - - wflow.inputs.inputspec.num_noise_components = self.num_noise_components - mask_in = wflow.get_node('threshold').inputs - mask_in.out_file = self.in_filenames['mask_file'] - func_in = wflow.get_node('slicetimer').inputs - func_in.slice_time_corrected_file = self.in_filenames['realigned_file'] - - wflow.run() - - # assert - expected_file = os.path.abspath(self.out_filenames['components_file']) - with open(expected_file, 'r') as components_file: - components_data = [line.rstrip().split() - for line in components_file] - num_got_components = len(components_data) - assert (num_got_components == self.num_noise_components or - num_got_components == self.fake_data.shape[3]) - first_two = [row[:2] for row in components_data[1:]] - assert first_two == [['-0.5172356654', '-0.6973053243'], - ['0.2574722644', '0.1645270737'], - ['-0.0806469590', '0.5156853779'], - ['0.7187176051', '-0.3235820287'], - ['-0.3783072450', '0.3406749013']] - - fake_data = np.array([[[[2, 4, 3, 9, 1], [3, 6, 4, 7, 4]], - [[8, 3, 4, 6, 2], [4, 0, 4, 4, 2]]], - [[[9, 7, 5, 5, 7], [7, 8, 4, 8, 4]], - [[0, 4, 7, 1, 7], [6, 8, 8, 8, 7]]]]) diff --git a/nipype/workflows/smri/__init__.py b/nipype/workflows/smri/__init__.py deleted file mode 100644 index b6d7bf5731..0000000000 --- a/nipype/workflows/smri/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) - -from . import ants -from . import freesurfer -from . import niftyreg diff --git a/nipype/workflows/smri/ants/ANTSBuildTemplate.py b/nipype/workflows/smri/ants/ANTSBuildTemplate.py deleted file mode 100644 index 5a43d47bac..0000000000 --- a/nipype/workflows/smri/ants/ANTSBuildTemplate.py +++ /dev/null @@ -1,388 +0,0 @@ -# -*- coding: utf-8 -*- -################################################################################# -# Program: Build Template Parallel -# Language: Python -## -# Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa -## -# This software is distributed WITHOUT ANY WARRANTY; without even -# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR -# PURPOSE. -## -################################################################################# -from __future__ import print_function -from builtins import map -from builtins import zip -from builtins import range - -from ....pipeline import engine as pe -from ....interfaces import utility as util -from ....interfaces.utility import Function - -from ....interfaces.ants import (ANTS, WarpImageMultiTransform, AverageImages, - MultiplyImages, AverageAffineTransform) - - -def GetFirstListElement(this_list): - return this_list[0] - - -def MakeTransformListWithGradientWarps(averageAffineTranform, - gradientStepWarp): - return [ - averageAffineTranform, gradientStepWarp, gradientStepWarp, - gradientStepWarp, gradientStepWarp - ] - - -def RenestDeformedPassiveImages(deformedPassiveImages, - flattened_image_nametypes): - import os - """ Now make a list of lists of images where the outter list is per image type, - and the inner list is the same size as the number of subjects to be averaged. - In this case, the first element will be a list of all the deformed T2's, and - the second element will be a list of all deformed POSTERIOR_AIR, etc.. - """ - all_images_size = len(deformedPassiveImages) - image_dictionary_of_lists = dict() - nested_imagetype_list = list() - outputAverageImageName_list = list() - image_type_list = list() - # make empty_list, this is not efficient, but it works - for name in flattened_image_nametypes: - image_dictionary_of_lists[name] = list() - for index in range(0, all_images_size): - curr_name = flattened_image_nametypes[index] - curr_file = deformedPassiveImages[index] - image_dictionary_of_lists[curr_name].append(curr_file) - for image_type, image_list in list(image_dictionary_of_lists.items()): - nested_imagetype_list.append(image_list) - outputAverageImageName_list.append('AVG_' + image_type + '.nii.gz') - image_type_list.append('WARP_AVG_' + image_type) - print("\n" * 10) - print("HACK: ", nested_imagetype_list) - print("HACK: ", outputAverageImageName_list) - print("HACK: ", image_type_list) - return nested_imagetype_list, outputAverageImageName_list, image_type_list - - -# Utility Function -# This will make a list of list pairs for defining the concatenation of transforms -# wp=['wp1.nii','wp2.nii','wp3.nii'] -# af=['af1.mat','af2.mat','af3.mat'] -# ll=map(list,zip(af,wp)) -# ll -# #[['af1.mat', 'wp1.nii'], ['af2.mat', 'wp2.nii'], ['af3.mat', 'wp3.nii']] - - -def MakeListsOfTransformLists(warpTransformList, AffineTransformList): - return list(map(list, list(zip(warpTransformList, AffineTransformList)))) - - -# Flatten and return equal length transform and images lists. - - -def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries, - transformation_series): - import sys - print("HACK: DEBUG: ListOfPassiveImagesDictionaries\n{lpi}\n".format( - lpi=ListOfPassiveImagesDictionaries)) - subjCount = len(ListOfPassiveImagesDictionaries) - tranCount = len(transformation_series) - if subjCount != tranCount: - print("ERROR: subjCount must equal tranCount {0} != {1}".format( - subjCount, tranCount)) - sys.exit(-1) - flattened_images = list() - flattened_image_nametypes = list() - flattened_transforms = list() - passiveImagesCount = len(ListOfPassiveImagesDictionaries[0]) - for subjIndex in range(0, subjCount): - # if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): - # print "ERROR: all image lengths must be equal {0} != {1}".format(passiveImagesCount,len(ListOfPassiveImagesDictionaries[subjIndex])) - # sys.exit(-1) - subjImgDictionary = ListOfPassiveImagesDictionaries[subjIndex] - subjToAtlasTransform = transformation_series[subjIndex] - for imgname, img in list(subjImgDictionary.items()): - flattened_images.append(img) - flattened_image_nametypes.append(imgname) - flattened_transforms.append(subjToAtlasTransform) - print("HACK: flattened images {0}\n".format(flattened_images)) - print("HACK: flattened nametypes {0}\n".format(flattened_image_nametypes)) - print("HACK: flattened txfms {0}\n".format(flattened_transforms)) - return flattened_images, flattened_transforms, flattened_image_nametypes - - -def ANTSTemplateBuildSingleIterationWF(iterationPhasePrefix=''): - """ - - Inputs:: - - inputspec.images : - inputspec.fixed_image : - inputspec.ListOfPassiveImagesDictionaries : - - Outputs:: - - outputspec.template : - outputspec.transforms_list : - outputspec.passive_deformed_templates : - """ - - TemplateBuildSingleIterationWF = pe.Workflow( - name='ANTSTemplateBuildSingleIterationWF_' + - str(str(iterationPhasePrefix))) - - inputSpec = pe.Node( - interface=util.IdentityInterface(fields=[ - 'images', 'fixed_image', 'ListOfPassiveImagesDictionaries' - ]), - run_without_submitting=True, - name='inputspec') - # HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that - # they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger - # their hash to change. - # HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes - # HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. - outputSpec = pe.Node( - interface=util.IdentityInterface(fields=[ - 'template', 'transforms_list', 'passive_deformed_templates' - ]), - run_without_submitting=True, - name='outputspec') - - # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template - BeginANTS = pe.MapNode( - interface=ANTS(), name='BeginANTS', iterfield=['moving_image']) - BeginANTS.inputs.dimension = 3 - BeginANTS.inputs.output_transform_prefix = str( - iterationPhasePrefix) + '_tfm' - BeginANTS.inputs.metric = ['CC'] - BeginANTS.inputs.metric_weight = [1.0] - BeginANTS.inputs.radius = [5] - BeginANTS.inputs.transformation_model = 'SyN' - BeginANTS.inputs.gradient_step_length = 0.25 - BeginANTS.inputs.number_of_iterations = [50, 35, 15] - BeginANTS.inputs.number_of_affine_iterations = [ - 10000, 10000, 10000, 10000, 10000 - ] - BeginANTS.inputs.use_histogram_matching = True - BeginANTS.inputs.mi_option = [32, 16000] - BeginANTS.inputs.regularization = 'Gauss' - BeginANTS.inputs.regularization_gradient_field_sigma = 3 - BeginANTS.inputs.regularization_deformation_field_sigma = 0 - TemplateBuildSingleIterationWF.connect(inputSpec, 'images', BeginANTS, - 'moving_image') - TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, - 'fixed_image') - - MakeTransformsLists = pe.Node( - interface=util.Function( - function=MakeListsOfTransformLists, - input_names=['warpTransformList', 'AffineTransformList'], - output_names=['out']), - run_without_submitting=True, - name='MakeTransformsLists') - MakeTransformsLists.interface.ignore_exception = True - TemplateBuildSingleIterationWF.connect( - BeginANTS, 'warp_transform', MakeTransformsLists, 'warpTransformList') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'affine_transform', - MakeTransformsLists, - 'AffineTransformList') - - # Now warp all the input_images images - wimtdeformed = pe.MapNode( - interface=WarpImageMultiTransform(), - iterfield=['transformation_series', 'input_image'], - name='wimtdeformed') - TemplateBuildSingleIterationWF.connect(inputSpec, 'images', wimtdeformed, - 'input_image') - TemplateBuildSingleIterationWF.connect( - MakeTransformsLists, 'out', wimtdeformed, 'transformation_series') - - # Shape Update Next ===== - # Now Average All input_images deformed images together to create an updated template average - AvgDeformedImages = pe.Node( - interface=AverageImages(), name='AvgDeformedImages') - AvgDeformedImages.inputs.dimension = 3 - AvgDeformedImages.inputs.output_average_image = str( - iterationPhasePrefix) + '.nii.gz' - AvgDeformedImages.inputs.normalize = True - TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", - AvgDeformedImages, 'images') - - # Now average all affine transforms together - AvgAffineTransform = pe.Node( - interface=AverageAffineTransform(), name='AvgAffineTransform') - AvgAffineTransform.inputs.dimension = 3 - AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str( - iterationPhasePrefix) + '_Affine.mat' - TemplateBuildSingleIterationWF.connect(BeginANTS, 'affine_transform', - AvgAffineTransform, 'transforms') - - # Now average the warp fields togther - AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') - AvgWarpImages.inputs.dimension = 3 - AvgWarpImages.inputs.output_average_image = str( - iterationPhasePrefix) + 'warp.nii.gz' - AvgWarpImages.inputs.normalize = True - TemplateBuildSingleIterationWF.connect(BeginANTS, 'warp_transform', - AvgWarpImages, 'images') - - # Now average the images together - # TODO: For now GradientStep is set to 0.25 as a hard coded default value. - GradientStep = 0.25 - GradientStepWarpImage = pe.Node( - interface=MultiplyImages(), name='GradientStepWarpImage') - GradientStepWarpImage.inputs.dimension = 3 - GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep - GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str( - iterationPhasePrefix) + '_warp.nii.gz' - TemplateBuildSingleIterationWF.connect( - AvgWarpImages, 'output_average_image', GradientStepWarpImage, - 'first_input') - - # Now create the new template shape based on the average of all deformed images - UpdateTemplateShape = pe.Node( - interface=WarpImageMultiTransform(), name='UpdateTemplateShape') - UpdateTemplateShape.inputs.invert_affine = [1] - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', UpdateTemplateShape, - 'reference_image') - TemplateBuildSingleIterationWF.connect( - AvgAffineTransform, 'affine_transform', UpdateTemplateShape, - 'transformation_series') - TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, - 'output_product_image', - UpdateTemplateShape, 'input_image') - - ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node( - interface=util.Function( - function=MakeTransformListWithGradientWarps, - input_names=['averageAffineTranform', 'gradientStepWarp'], - output_names=['TransformListWithGradientWarps']), - run_without_submitting=True, - name='MakeTransformListWithGradientWarps') - ApplyInvAverageAndFourTimesGradientStepWarpImage.interface.ignore_exception = True - - TemplateBuildSingleIterationWF.connect( - AvgAffineTransform, 'affine_transform', - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'averageAffineTranform') - TemplateBuildSingleIterationWF.connect( - UpdateTemplateShape, 'output_image', - ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp') - - ReshapeAverageImageWithShapeUpdate = pe.Node( - interface=WarpImageMultiTransform(), - name='ReshapeAverageImageWithShapeUpdate') - ReshapeAverageImageWithShapeUpdate.inputs.invert_affine = [1] - ReshapeAverageImageWithShapeUpdate.inputs.out_postfix = '_Reshaped' - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', - ReshapeAverageImageWithShapeUpdate, 'input_image') - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', - ReshapeAverageImageWithShapeUpdate, 'reference_image') - TemplateBuildSingleIterationWF.connect( - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'TransformListWithGradientWarps', ReshapeAverageImageWithShapeUpdate, - 'transformation_series') - TemplateBuildSingleIterationWF.connect(ReshapeAverageImageWithShapeUpdate, - 'output_image', outputSpec, - 'template') - - ###### - ###### - # Process all the passive deformed images in a way similar to the main image used for registration - ###### - ###### - ###### - ############################################## - # Now warp all the ListOfPassiveImagesDictionaries images - FlattenTransformAndImagesListNode = pe.Node( - Function( - function=FlattenTransformAndImagesList, - input_names=[ - 'ListOfPassiveImagesDictionaries', 'transformation_series' - ], - output_names=[ - 'flattened_images', 'flattened_transforms', - 'flattened_image_nametypes' - ]), - run_without_submitting=True, - name="99_FlattenTransformAndImagesList") - TemplateBuildSingleIterationWF.connect( - inputSpec, 'ListOfPassiveImagesDictionaries', - FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') - TemplateBuildSingleIterationWF.connect(MakeTransformsLists, 'out', - FlattenTransformAndImagesListNode, - 'transformation_series') - wimtPassivedeformed = pe.MapNode( - interface=WarpImageMultiTransform(), - iterfield=['transformation_series', 'input_image'], - name='wimtPassivedeformed') - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', wimtPassivedeformed, - 'reference_image') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, - 'flattened_images', - wimtPassivedeformed, 'input_image') - TemplateBuildSingleIterationWF.connect( - FlattenTransformAndImagesListNode, 'flattened_transforms', - wimtPassivedeformed, 'transformation_series') - - RenestDeformedPassiveImagesNode = pe.Node( - Function( - function=RenestDeformedPassiveImages, - input_names=['deformedPassiveImages', 'flattened_image_nametypes'], - output_names=[ - 'nested_imagetype_list', 'outputAverageImageName_list', - 'image_type_list' - ]), - run_without_submitting=True, - name="99_RenestDeformedPassiveImages") - TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', - RenestDeformedPassiveImagesNode, - 'deformedPassiveImages') - TemplateBuildSingleIterationWF.connect( - FlattenTransformAndImagesListNode, 'flattened_image_nametypes', - RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') - # Now Average All passive input_images deformed images together to create an updated template average - AvgDeformedPassiveImages = pe.MapNode( - interface=AverageImages(), - iterfield=['images', 'output_average_image'], - name='AvgDeformedPassiveImages') - AvgDeformedPassiveImages.inputs.dimension = 3 - AvgDeformedPassiveImages.inputs.normalize = False - TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, - "nested_imagetype_list", - AvgDeformedPassiveImages, 'images') - TemplateBuildSingleIterationWF.connect( - RenestDeformedPassiveImagesNode, "outputAverageImageName_list", - AvgDeformedPassiveImages, 'output_average_image') - - # -- TODO: Now neeed to reshape all the passive images as well - ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode( - interface=WarpImageMultiTransform(), - iterfield=['input_image', 'reference_image', 'out_postfix'], - name='ReshapeAveragePassiveImageWithShapeUpdate') - ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_affine = [1] - TemplateBuildSingleIterationWF.connect( - RenestDeformedPassiveImagesNode, "image_type_list", - ReshapeAveragePassiveImageWithShapeUpdate, 'out_postfix') - TemplateBuildSingleIterationWF.connect( - AvgDeformedPassiveImages, 'output_average_image', - ReshapeAveragePassiveImageWithShapeUpdate, 'input_image') - TemplateBuildSingleIterationWF.connect( - AvgDeformedPassiveImages, 'output_average_image', - ReshapeAveragePassiveImageWithShapeUpdate, 'reference_image') - TemplateBuildSingleIterationWF.connect( - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'TransformListWithGradientWarps', - ReshapeAveragePassiveImageWithShapeUpdate, 'transformation_series') - TemplateBuildSingleIterationWF.connect( - ReshapeAveragePassiveImageWithShapeUpdate, 'output_image', outputSpec, - 'passive_deformed_templates') - - return TemplateBuildSingleIterationWF diff --git a/nipype/workflows/smri/ants/__init__.py b/nipype/workflows/smri/ants/__init__.py deleted file mode 100644 index 3cb140771c..0000000000 --- a/nipype/workflows/smri/ants/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -from .ANTSBuildTemplate import ANTSTemplateBuildSingleIterationWF -from .antsRegistrationBuildTemplate import antsRegistrationTemplateBuildSingleIterationWF diff --git a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py b/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py deleted file mode 100644 index 3574935fc1..0000000000 --- a/nipype/workflows/smri/ants/antsRegistrationBuildTemplate.py +++ /dev/null @@ -1,535 +0,0 @@ -# -*- coding: utf-8 -*- -################################################################################# -# Program: Build Template Parallel -# Language: Python -## -# Authors: Jessica Forbes, Grace Murray, and Hans Johnson, University of Iowa -## -# This software is distributed WITHOUT ANY WARRANTY; without even -# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR -# PURPOSE. -## -################################################################################# -from __future__ import print_function -from builtins import range - -from ....pipeline import engine as pe -from ....interfaces import utility as util -from ....interfaces.utility import Function - -from ....interfaces.ants import (Registration, ApplyTransforms, AverageImages, - MultiplyImages, AverageAffineTransform) - - -def makeListOfOneElement(inputFile): - outputList = [inputFile] - return outputList - - -def GetFirstListElement(this_list): - return this_list[0] - - -def MakeTransformListWithGradientWarps(averageAffineTranform, - gradientStepWarp): - return [ - averageAffineTranform, gradientStepWarp, gradientStepWarp, - gradientStepWarp, gradientStepWarp - ] - - -def RenestDeformedPassiveImages(deformedPassiveImages, - flattened_image_nametypes, - interpolationMapping): - import os - """ Now make a list of lists of images where the outter list is per image type, - and the inner list is the same size as the number of subjects to be averaged. - In this case, the first element will be a list of all the deformed T2's, and - the second element will be a list of all deformed POSTERIOR_AIR, etc.. - """ - all_images_size = len(deformedPassiveImages) - image_dictionary_of_lists = dict() - nested_imagetype_list = list() - outputAverageImageName_list = list() - image_type_list = list() - nested_interpolation_type = list() - # make empty_list, this is not efficient, but it works - for name in flattened_image_nametypes: - image_dictionary_of_lists[name] = list() - for index in range(0, all_images_size): - curr_name = flattened_image_nametypes[index] - curr_file = deformedPassiveImages[index] - image_dictionary_of_lists[curr_name].append(curr_file) - for image_type, image_list in list(image_dictionary_of_lists.items()): - nested_imagetype_list.append(image_list) - outputAverageImageName_list.append('AVG_' + image_type + '.nii.gz') - image_type_list.append('WARP_AVG_' + image_type) - if image_type in interpolationMapping: - nested_interpolation_type.append(interpolationMapping[image_type]) - else: - nested_interpolation_type.append( - 'Linear') # Linear is the default. - print("\n" * 10) - print("HACK: ", nested_imagetype_list) - print("HACK: ", outputAverageImageName_list) - print("HACK: ", image_type_list) - print("HACK: ", nested_interpolation_type) - return nested_imagetype_list, outputAverageImageName_list, image_type_list, nested_interpolation_type - - -def SplitAffineAndWarpComponents(list_of_transforms_lists): - # Nota bene: The outputs will include the initial_moving_transform from Registration (which depends on what - # the invert_initial_moving_transform is set to) - affine_component_list = [] - warp_component_list = [] - for transform in list_of_transforms_lists: - affine_component_list.append(transform[0]) - warp_component_list.append(transform[1]) - print("HACK ", affine_component_list, " ", warp_component_list) - return affine_component_list, warp_component_list - - -# Flatten and return equal length transform and images lists. - - -def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries, transforms, - invert_transform_flags, - interpolationMapping): - import sys - print("HACK: DEBUG: ListOfPassiveImagesDictionaries\n{lpi}\n".format( - lpi=ListOfPassiveImagesDictionaries)) - subjCount = len(ListOfPassiveImagesDictionaries) - tranCount = len(transforms) - if subjCount != tranCount: - print("ERROR: subjCount must equal tranCount {0} != {1}".format( - subjCount, tranCount)) - sys.exit(-1) - invertTfmsFlagsCount = len(invert_transform_flags) - if subjCount != invertTfmsFlagsCount: - print("ERROR: subjCount must equal invertTfmsFlags {0} != {1}".format( - subjCount, invertTfmsFlagsCount)) - sys.exit(-1) - flattened_images = list() - flattened_image_nametypes = list() - flattened_transforms = list() - flattened_invert_transform_flags = list() - flattened_interpolation_type = list() - passiveImagesCount = len(ListOfPassiveImagesDictionaries[0]) - for subjIndex in range(0, subjCount): - # if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): - # print "ERROR: all image lengths must be equal {0} != {1}".format(passiveImagesCount,len(ListOfPassiveImagesDictionaries[subjIndex])) - # sys.exit(-1) - subjImgDictionary = ListOfPassiveImagesDictionaries[subjIndex] - subjToAtlasTransform = transforms[subjIndex] - subjToAtlasInvertFlags = invert_transform_flags[subjIndex] - for imgname, img in list(subjImgDictionary.items()): - flattened_images.append(img) - flattened_image_nametypes.append(imgname) - flattened_transforms.append(subjToAtlasTransform) - flattened_invert_transform_flags.append(subjToAtlasInvertFlags) - if imgname in interpolationMapping: - flattened_interpolation_type.append( - interpolationMapping[imgname]) - else: - flattened_interpolation_type.append( - 'Linear') # Linear is the default. - print("HACK: flattened images {0}\n".format(flattened_images)) - print("HACK: flattened nametypes {0}\n".format(flattened_image_nametypes)) - print("HACK: flattened txfms {0}\n".format(flattened_transforms)) - print("HACK: flattened txfmsFlags{0}\n".format( - flattened_invert_transform_flags)) - return flattened_images, flattened_transforms, flattened_invert_transform_flags, flattened_image_nametypes, flattened_interpolation_type - - -def GetMovingImages(ListOfImagesDictionaries, registrationImageTypes, - interpolationMapping): - """ This currently ONLY works when registrationImageTypes has - length of exactly 1. When the new multi-variate registration - is introduced, it will be expanded. - """ - if len(registrationImageTypes) != 1: - print("ERROR: Multivariate imageing not supported yet!") - return [] - moving_images = [ - mdict[registrationImageTypes[0]] for mdict in ListOfImagesDictionaries - ] - moving_interpolation_type = interpolationMapping[registrationImageTypes[0]] - return moving_images, moving_interpolation_type - - -def GetPassiveImages(ListOfImagesDictionaries, registrationImageTypes): - if len(registrationImageTypes) != 1: - print("ERROR: Multivariate imageing not supported yet!") - return [dict()] - passive_images = list() - for mdict in ListOfImagesDictionaries: - ThisSubjectPassiveImages = dict() - for key, value in list(mdict.items()): - if key not in registrationImageTypes: - ThisSubjectPassiveImages[key] = value - passive_images.append(ThisSubjectPassiveImages) - return passive_images - - -## -# NOTE: The modes can be either 'SINGLE_IMAGE' or 'MULTI' -# 'SINGLE_IMAGE' is quick shorthand when you are building an atlas with a single subject, then registration can -# be short-circuted -# any other string indicates the normal mode that you would expect and replicates the shell script build_template_parallel.sh - - -def antsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): - """ - - Inputs:: - - inputspec.images : - inputspec.fixed_image : - inputspec.ListOfPassiveImagesDictionaries : - inputspec.interpolationMapping : - - Outputs:: - - outputspec.template : - outputspec.transforms_list : - outputspec.passive_deformed_templates : - """ - TemplateBuildSingleIterationWF = pe.Workflow( - name='antsRegistrationTemplateBuildSingleIterationWF_' + - str(iterationPhasePrefix)) - - inputSpec = pe.Node( - interface=util.IdentityInterface(fields=[ - 'ListOfImagesDictionaries', 'registrationImageTypes', - 'interpolationMapping', 'fixed_image' - ]), - run_without_submitting=True, - name='inputspec') - # HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that - # they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger - # their hash to change. - # HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes - # HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. - outputSpec = pe.Node( - interface=util.IdentityInterface(fields=[ - 'template', 'transforms_list', 'passive_deformed_templates' - ]), - run_without_submitting=True, - name='outputspec') - - # NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template - BeginANTS = pe.MapNode( - interface=Registration(), name='BeginANTS', iterfield=['moving_image']) - BeginANTS.inputs.dimension = 3 - BeginANTS.inputs.output_transform_prefix = str( - iterationPhasePrefix) + '_tfm' - BeginANTS.inputs.transforms = ["Affine", "SyN"] - BeginANTS.inputs.transform_parameters = [[0.9], [0.25, 3.0, 0.0]] - BeginANTS.inputs.metric = ['Mattes', 'CC'] - BeginANTS.inputs.metric_weight = [1.0, 1.0] - BeginANTS.inputs.radius_or_number_of_bins = [32, 5] - BeginANTS.inputs.number_of_iterations = [[1000, 1000, 1000], [50, 35, 15]] - BeginANTS.inputs.use_histogram_matching = [True, True] - BeginANTS.inputs.use_estimate_learning_rate_once = [False, False] - BeginANTS.inputs.shrink_factors = [[3, 2, 1], [3, 2, 1]] - BeginANTS.inputs.smoothing_sigmas = [[3, 2, 0], [3, 2, 0]] - BeginANTS.inputs.sigma_units = ["vox"] * 2 - - GetMovingImagesNode = pe.Node( - interface=util.Function( - function=GetMovingImages, - input_names=[ - 'ListOfImagesDictionaries', 'registrationImageTypes', - 'interpolationMapping' - ], - output_names=['moving_images', 'moving_interpolation_type']), - run_without_submitting=True, - name='99_GetMovingImagesNode') - TemplateBuildSingleIterationWF.connect( - inputSpec, 'ListOfImagesDictionaries', GetMovingImagesNode, - 'ListOfImagesDictionaries') - TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', - GetMovingImagesNode, - 'registrationImageTypes') - TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', - GetMovingImagesNode, - 'interpolationMapping') - - TemplateBuildSingleIterationWF.connect( - GetMovingImagesNode, 'moving_images', BeginANTS, 'moving_image') - TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, - 'moving_interpolation_type', - BeginANTS, 'interpolation') - TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, - 'fixed_image') - - # Now warp all the input_images images - wimtdeformed = pe.MapNode( - interface=ApplyTransforms(), - iterfield=['transforms', 'invert_transform_flags', 'input_image'], - name='wimtdeformed') - wimtdeformed.inputs.interpolation = 'Linear' - wimtdeformed.default_value = 0 - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', - wimtdeformed, 'transforms') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', - wimtdeformed, - 'invert_transform_flags') - TemplateBuildSingleIterationWF.connect( - GetMovingImagesNode, 'moving_images', wimtdeformed, 'input_image') - TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', - wimtdeformed, 'reference_image') - - # Shape Update Next ===== - # Now Average All input_images deformed images together to create an updated template average - AvgDeformedImages = pe.Node( - interface=AverageImages(), name='AvgDeformedImages') - AvgDeformedImages.inputs.dimension = 3 - AvgDeformedImages.inputs.output_average_image = str( - iterationPhasePrefix) + '.nii.gz' - AvgDeformedImages.inputs.normalize = True - TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", - AvgDeformedImages, 'images') - - # Now average all affine transforms together - AvgAffineTransform = pe.Node( - interface=AverageAffineTransform(), name='AvgAffineTransform') - AvgAffineTransform.inputs.dimension = 3 - AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str( - iterationPhasePrefix) + '_Affine.mat' - - SplitAffineAndWarpsNode = pe.Node( - interface=util.Function( - function=SplitAffineAndWarpComponents, - input_names=['list_of_transforms_lists'], - output_names=['affine_component_list', 'warp_component_list']), - run_without_submitting=True, - name='99_SplitAffineAndWarpsNode') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', - SplitAffineAndWarpsNode, - 'list_of_transforms_lists') - TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, - 'affine_component_list', - AvgAffineTransform, 'transforms') - - # Now average the warp fields togther - AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') - AvgWarpImages.inputs.dimension = 3 - AvgWarpImages.inputs.output_average_image = str( - iterationPhasePrefix) + 'warp.nii.gz' - AvgWarpImages.inputs.normalize = True - TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, - 'warp_component_list', - AvgWarpImages, 'images') - - # Now average the images together - # TODO: For now GradientStep is set to 0.25 as a hard coded default value. - GradientStep = 0.25 - GradientStepWarpImage = pe.Node( - interface=MultiplyImages(), name='GradientStepWarpImage') - GradientStepWarpImage.inputs.dimension = 3 - GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep - GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str( - iterationPhasePrefix) + '_warp.nii.gz' - TemplateBuildSingleIterationWF.connect( - AvgWarpImages, 'output_average_image', GradientStepWarpImage, - 'first_input') - - # Now create the new template shape based on the average of all deformed images - UpdateTemplateShape = pe.Node( - interface=ApplyTransforms(), name='UpdateTemplateShape') - UpdateTemplateShape.inputs.invert_transform_flags = [True] - UpdateTemplateShape.inputs.interpolation = 'Linear' - UpdateTemplateShape.default_value = 0 - - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', UpdateTemplateShape, - 'reference_image') - TemplateBuildSingleIterationWF.connect([ - (AvgAffineTransform, UpdateTemplateShape, - [(('affine_transform', makeListOfOneElement), 'transforms')]), - ]) - TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, - 'output_product_image', - UpdateTemplateShape, 'input_image') - - ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node( - interface=util.Function( - function=MakeTransformListWithGradientWarps, - input_names=['averageAffineTranform', 'gradientStepWarp'], - output_names=['TransformListWithGradientWarps']), - run_without_submitting=True, - name='99_MakeTransformListWithGradientWarps') - ApplyInvAverageAndFourTimesGradientStepWarpImage.interface.ignore_exception = True - - TemplateBuildSingleIterationWF.connect( - AvgAffineTransform, 'affine_transform', - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'averageAffineTranform') - TemplateBuildSingleIterationWF.connect( - UpdateTemplateShape, 'output_image', - ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp') - - ReshapeAverageImageWithShapeUpdate = pe.Node( - interface=ApplyTransforms(), name='ReshapeAverageImageWithShapeUpdate') - ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [ - True, False, False, False, False - ] - ReshapeAverageImageWithShapeUpdate.inputs.interpolation = 'Linear' - ReshapeAverageImageWithShapeUpdate.default_value = 0 - ReshapeAverageImageWithShapeUpdate.inputs.output_image = 'ReshapeAverageImageWithShapeUpdate.nii.gz' - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', - ReshapeAverageImageWithShapeUpdate, 'input_image') - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', - ReshapeAverageImageWithShapeUpdate, 'reference_image') - TemplateBuildSingleIterationWF.connect( - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'TransformListWithGradientWarps', ReshapeAverageImageWithShapeUpdate, - 'transforms') - TemplateBuildSingleIterationWF.connect(ReshapeAverageImageWithShapeUpdate, - 'output_image', outputSpec, - 'template') - - ###### - ###### - # Process all the passive deformed images in a way similar to the main image used for registration - ###### - ###### - ###### - ############################################## - # Now warp all the ListOfPassiveImagesDictionaries images - FlattenTransformAndImagesListNode = pe.Node( - Function( - function=FlattenTransformAndImagesList, - input_names=[ - 'ListOfPassiveImagesDictionaries', 'transforms', - 'invert_transform_flags', 'interpolationMapping' - ], - output_names=[ - 'flattened_images', 'flattened_transforms', - 'flattened_invert_transform_flags', - 'flattened_image_nametypes', 'flattened_interpolation_type' - ]), - run_without_submitting=True, - name="99_FlattenTransformAndImagesList") - - GetPassiveImagesNode = pe.Node( - interface=util.Function( - function=GetPassiveImages, - input_names=['ListOfImagesDictionaries', 'registrationImageTypes'], - output_names=['ListOfPassiveImagesDictionaries']), - run_without_submitting=True, - name='99_GetPassiveImagesNode') - TemplateBuildSingleIterationWF.connect( - inputSpec, 'ListOfImagesDictionaries', GetPassiveImagesNode, - 'ListOfImagesDictionaries') - TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', - GetPassiveImagesNode, - 'registrationImageTypes') - - TemplateBuildSingleIterationWF.connect( - GetPassiveImagesNode, 'ListOfPassiveImagesDictionaries', - FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') - TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', - FlattenTransformAndImagesListNode, - 'interpolationMapping') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', - FlattenTransformAndImagesListNode, - 'transforms') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', - FlattenTransformAndImagesListNode, - 'invert_transform_flags') - wimtPassivedeformed = pe.MapNode( - interface=ApplyTransforms(), - iterfield=[ - 'transforms', 'invert_transform_flags', 'input_image', - 'interpolation' - ], - name='wimtPassivedeformed') - wimtPassivedeformed.default_value = 0 - TemplateBuildSingleIterationWF.connect( - AvgDeformedImages, 'output_average_image', wimtPassivedeformed, - 'reference_image') - TemplateBuildSingleIterationWF.connect( - FlattenTransformAndImagesListNode, 'flattened_interpolation_type', - wimtPassivedeformed, 'interpolation') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, - 'flattened_images', - wimtPassivedeformed, 'input_image') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, - 'flattened_transforms', - wimtPassivedeformed, 'transforms') - TemplateBuildSingleIterationWF.connect( - FlattenTransformAndImagesListNode, 'flattened_invert_transform_flags', - wimtPassivedeformed, 'invert_transform_flags') - - RenestDeformedPassiveImagesNode = pe.Node( - Function( - function=RenestDeformedPassiveImages, - input_names=[ - 'deformedPassiveImages', 'flattened_image_nametypes', - 'interpolationMapping' - ], - output_names=[ - 'nested_imagetype_list', 'outputAverageImageName_list', - 'image_type_list', 'nested_interpolation_type' - ]), - run_without_submitting=True, - name="99_RenestDeformedPassiveImages") - TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', - RenestDeformedPassiveImagesNode, - 'interpolationMapping') - TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', - RenestDeformedPassiveImagesNode, - 'deformedPassiveImages') - TemplateBuildSingleIterationWF.connect( - FlattenTransformAndImagesListNode, 'flattened_image_nametypes', - RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') - # Now Average All passive input_images deformed images together to create an updated template average - AvgDeformedPassiveImages = pe.MapNode( - interface=AverageImages(), - iterfield=['images', 'output_average_image'], - name='AvgDeformedPassiveImages') - AvgDeformedPassiveImages.inputs.dimension = 3 - AvgDeformedPassiveImages.inputs.normalize = False - TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, - "nested_imagetype_list", - AvgDeformedPassiveImages, 'images') - TemplateBuildSingleIterationWF.connect( - RenestDeformedPassiveImagesNode, "outputAverageImageName_list", - AvgDeformedPassiveImages, 'output_average_image') - - # -- TODO: Now neeed to reshape all the passive images as well - ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode( - interface=ApplyTransforms(), - iterfield=[ - 'input_image', 'reference_image', 'output_image', 'interpolation' - ], - name='ReshapeAveragePassiveImageWithShapeUpdate') - ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [ - True, False, False, False, False - ] - ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0 - TemplateBuildSingleIterationWF.connect( - RenestDeformedPassiveImagesNode, 'nested_interpolation_type', - ReshapeAveragePassiveImageWithShapeUpdate, 'interpolation') - TemplateBuildSingleIterationWF.connect( - RenestDeformedPassiveImagesNode, 'outputAverageImageName_list', - ReshapeAveragePassiveImageWithShapeUpdate, 'output_image') - TemplateBuildSingleIterationWF.connect( - AvgDeformedPassiveImages, 'output_average_image', - ReshapeAveragePassiveImageWithShapeUpdate, 'input_image') - TemplateBuildSingleIterationWF.connect( - AvgDeformedPassiveImages, 'output_average_image', - ReshapeAveragePassiveImageWithShapeUpdate, 'reference_image') - TemplateBuildSingleIterationWF.connect( - ApplyInvAverageAndFourTimesGradientStepWarpImage, - 'TransformListWithGradientWarps', - ReshapeAveragePassiveImageWithShapeUpdate, 'transforms') - TemplateBuildSingleIterationWF.connect( - ReshapeAveragePassiveImageWithShapeUpdate, 'output_image', outputSpec, - 'passive_deformed_templates') - - return TemplateBuildSingleIterationWF diff --git a/nipype/workflows/smri/freesurfer/__init__.py b/nipype/workflows/smri/freesurfer/__init__.py deleted file mode 100644 index caa854f9c9..0000000000 --- a/nipype/workflows/smri/freesurfer/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# -*- coding: utf-8 -*- -from .utils import (create_getmask_flow, create_get_stats_flow, - create_tessellation_flow) -from .bem import create_bem_flow -from .recon import create_skullstripped_recon_flow, create_reconall_workflow diff --git a/nipype/workflows/smri/freesurfer/autorecon1.py b/nipype/workflows/smri/freesurfer/autorecon1.py deleted file mode 100644 index 0973e210a7..0000000000 --- a/nipype/workflows/smri/freesurfer/autorecon1.py +++ /dev/null @@ -1,512 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ....utils import NUMPY_MMAP -from ....pipeline import engine as pe -from ....interfaces.utility import Function, IdentityInterface -from ....interfaces.freesurfer import * -from .utils import copy_file - - -def checkT1s(T1_files, cw256=False): - """Verifying size of inputs and setting workflow parameters""" - import sys - import nibabel as nb - from nipype.utils.filemanip import ensure_list - - T1_files = ensure_list(T1_files) - if len(T1_files) == 0: - print("ERROR: No T1's Given") - sys.exit(-1) - - shape = nb.load(T1_files[0]).shape - for t1 in T1_files[1:]: - if nb.load(t1, mmap=NUMPY_MMAP).shape != shape: - print("ERROR: T1s not the same size. Cannot process {0} and {1} " - "together".format(T1_files[0], t1)) - sys.exit(-1) - - origvol_names = ["{0:03d}.mgz".format(i + 1) for i in range(len(T1_files))] - - # check if cw256 is set to crop the images if size is larger than 256 - if not cw256 and any(dim > 256 for dim in shape): - print("Setting MRI Convert to crop images to 256 FOV") - cw256 = True - - resample_type = 'cubic' if len(T1_files) > 1 else 'interpolate' - return T1_files, cw256, resample_type, origvol_names - - -def create_AutoRecon1(name="AutoRecon1", - longitudinal=False, - distance=None, - custom_atlas=None, - plugin_args=None, - shrink=None, - stop=None, - fsvernum=5.3): - """Creates the AutoRecon1 workflow in nipype. - - Inputs:: - inputspec.T1_files : T1 files (mandatory) - inputspec.T2_file : T2 file (optional) - inputspec.FLAIR_file : FLAIR file (optional) - inputspec.cw256 : Conform inputs to 256 FOV (optional) - inputspec.num_threads: Number of threads to use with EM Register (default=1) - Outpus:: - - """ - ar1_wf = pe.Workflow(name=name) - inputspec = pe.Node( - interface=IdentityInterface(fields=[ - 'T1_files', 'T2_file', 'FLAIR_file', 'cw256', 'num_threads', - 'reg_template_withskull', 'awk_file' - ]), - run_without_submitting=True, - name='inputspec') - - if not longitudinal: - # single session processing - verify_inputs = pe.Node( - Function(["T1_files", "cw256"], - ["T1_files", "cw256", "resample_type", "origvol_names"], - checkT1s), - name="Check_T1s") - ar1_wf.connect([(inputspec, verify_inputs, [('T1_files', 'T1_files'), - ('cw256', 'cw256')])]) - - # T1 image preparation - # For all T1's mri_convert ${InputVol} ${out_file} - T1_image_preparation = pe.MapNode( - MRIConvert(), iterfield=['in_file', 'out_file'], name="T1_prep") - - ar1_wf.connect([ - (verify_inputs, T1_image_preparation, - [('T1_files', 'in_file'), ('origvol_names', 'out_file')]), - ]) - - def convert_modalities(in_file=None, out_file=None): - """Returns an undefined output if the in_file is not defined""" - from nipype.interfaces.freesurfer import MRIConvert - import os - if in_file: - convert = MRIConvert() - convert.inputs.in_file = in_file - convert.inputs.out_file = out_file - convert.inputs.no_scale = True - out = convert.run() - out_file = os.path.abspath(out.outputs.out_file) - return out_file - - T2_convert = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], - convert_modalities), - name="T2_Convert") - T2_convert.inputs.out_file = 'T2raw.mgz' - ar1_wf.connect([(inputspec, T2_convert, [('T2_file', 'in_file')])]) - - FLAIR_convert = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], - convert_modalities), - name="FLAIR_Convert") - FLAIR_convert.inputs.out_file = 'FLAIRraw.mgz' - ar1_wf.connect([(inputspec, FLAIR_convert, [('FLAIR_file', - 'in_file')])]) - else: - # longitudinal inputs - inputspec = pe.Node( - interface=IdentityInterface(fields=[ - 'T1_files', 'iscales', 'ltas', 'subj_to_template_lta', - 'template_talairach_xfm', 'template_brainmask' - ]), - run_without_submitting=True, - name='inputspec') - - def output_names(T1_files): - """Create file names that are dependent on the number of T1 inputs""" - iscale_names = list() - lta_names = list() - for i, t1 in enumerate(T1_files): - # assign an input number - file_num = str(i + 1) - while len(file_num) < 3: - file_num = '0' + file_num - iscale_names.append("{0}-iscale.txt".format(file_num)) - lta_names.append("{0}.lta".format(file_num)) - return iscale_names, lta_names - - filenames = pe.Node( - Function(['T1_files'], ['iscale_names', 'lta_names'], - output_names), - name="Longitudinal_Filenames") - ar1_wf.connect([(inputspec, filenames, [('T1_files', 'T1_files')])]) - - copy_ltas = pe.MapNode( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - iterfield=['in_file', 'out_file'], - name='Copy_ltas') - ar1_wf.connect([(inputspec, copy_ltas, [('ltas', 'in_file')]), - (filenames, copy_ltas, [('lta_names', 'out_file')])]) - - copy_iscales = pe.MapNode( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - iterfield=['in_file', 'out_file'], - name='Copy_iscales') - ar1_wf.connect([(inputspec, copy_iscales, [('iscales', 'in_file')]), - (filenames, copy_iscales, [('iscale_names', - 'out_file')])]) - - concatenate_lta = pe.MapNode( - ConcatenateLTA(), iterfield=['in_file'], name="Concatenate_ltas") - ar1_wf.connect([(copy_ltas, concatenate_lta, [('out_file', - 'in_file')]), - (inputspec, concatenate_lta, [('subj_to_template_lta', - 'subj_to_base')])]) - - # Motion Correction - """ - When there are multiple source volumes, this step will correct for small - motions between them and then average them together. The output of the - motion corrected average is mri/rawavg.mgz which is then conformed to - 255 cubed char images (1mm isotropic voxels) in mri/orig.mgz. - """ - - def createTemplate(in_files, out_file): - import os - import shutil - if len(in_files) == 1: - # if only 1 T1 scan given, no need to run RobustTemplate - print( - "WARNING: only one run found. This is OK, but motion correction " - + - "cannot be performed on one run, so I'll copy the run to rawavg " - + "and continue.") - shutil.copyfile(in_files[0], out_file) - intensity_scales = None - transforms = None - else: - from nipype.interfaces.freesurfer import RobustTemplate - # if multiple T1 scans are given run RobustTemplate - intensity_scales = [ - os.path.basename(f.replace('.mgz', '-iscale.txt')) - for f in in_files - ] - transforms = [ - os.path.basename(f.replace('.mgz', '.lta')) for f in in_files - ] - robtemp = RobustTemplate() - robtemp.inputs.in_files = in_files - robtemp.inputs.average_metric = 'median' - robtemp.inputs.out_file = out_file - robtemp.inputs.no_iteration = True - robtemp.inputs.fixed_timepoint = True - robtemp.inputs.auto_detect_sensitivity = True - robtemp.inputs.initial_timepoint = 1 - robtemp.inputs.scaled_intensity_outputs = intensity_scales - robtemp.inputs.transform_outputs = transforms - robtemp.inputs.subsample_threshold = 200 - robtemp.inputs.intensity_scaling = True - robtemp_result = robtemp.run() - # collect the outputs from RobustTemplate - out_file = robtemp_result.outputs.out_file - intensity_scales = [ - os.path.abspath(f) - for f in robtemp_result.outputs.scaled_intensity_outputs - ] - transforms = [ - os.path.abspath(f) - for f in robtemp_result.outputs.transform_outputs - ] - out_file = os.path.abspath(out_file) - return out_file, intensity_scales, transforms - - if not longitudinal: - create_template = pe.Node( - Function(['in_files', 'out_file'], - ['out_file', 'intensity_scales', 'transforms'], - createTemplate), - name="Robust_Template") - create_template.inputs.out_file = 'rawavg.mgz' - ar1_wf.connect([(T1_image_preparation, create_template, - [('out_file', 'in_files')])]) - else: - create_template = pe.Node(RobustTemplate(), name="Robust_Template") - create_template.inputs.average_metric = 'median' - create_template.inputs.out_file = 'rawavg.mgz' - create_template.inputs.no_iteration = True - ar1_wf.connect([(concatenate_lta, create_template, - [('out_file', 'initial_transforms')]), - (inputSpec, create_template, [('in_t1s', 'in_files')]), - (copy_iscales, create_template, - [('out_file', 'in_intensity_scales')])]) - - # mri_convert - conform_template = pe.Node(MRIConvert(), name='Conform_Template') - conform_template.inputs.out_file = 'orig.mgz' - if not longitudinal: - conform_template.inputs.conform = True - ar1_wf.connect([(verify_inputs, conform_template, - [('cw256', 'cw256'), ('resample_type', - 'resample_type')])]) - else: - conform_template.inputs.out_datatype = 'uchar' - - ar1_wf.connect([(create_template, conform_template, [('out_file', - 'in_file')])]) - - # Talairach - """ - This computes the affine transform from the orig volume to the MNI305 atlas using Avi Snyders 4dfp - suite of image registration tools, through a FreeSurfer script called talairach_avi. - Several of the downstream programs use talairach coordinates as seed points. - """ - - bias_correction = pe.Node(MNIBiasCorrection(), name="Bias_correction") - bias_correction.inputs.iterations = 1 - bias_correction.inputs.protocol_iterations = 1000 - bias_correction.inputs.distance = distance - if stop: - bias_correction.inputs.stop = stop - if shrink: - bias_correction.inputs.shrink = shrink - bias_correction.inputs.no_rescale = True - bias_correction.inputs.out_file = 'orig_nu.mgz' - - ar1_wf.connect([ - (conform_template, bias_correction, [('out_file', 'in_file')]), - ]) - - if not longitudinal: - # single session processing - talairach_avi = pe.Node(TalairachAVI(), name="Compute_Transform") - if custom_atlas is not None: - # allows to specify a custom atlas - talairach_avi.inputs.atlas = custom_atlas - talairach_avi.inputs.out_file = 'talairach.auto.xfm' - ar1_wf.connect([(bias_correction, talairach_avi, [('out_file', - 'in_file')])]) - else: - # longitudinal processing - # Just copy the template xfm - talairach_avi = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_Transform') - talairach_avi.inputs.out_file = 'talairach.auto.xfm' - - ar1_wf.connect([(inputspec, talairach_avi, [('template_talairach_xfm', - 'in_file')])]) - - copy_transform = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Transform') - copy_transform.inputs.out_file = 'talairach.xfm' - - ar1_wf.connect([(talairach_avi, copy_transform, [('out_file', - 'in_file')])]) - - # In recon-all the talairach.xfm is added to orig.mgz, even though - # it does not exist yet. This is a compromise to keep from - # having to change the time stamp of the orig volume after talairaching. - # Here we are going to add xfm to the header after the xfm has been created. - # This may mess up the timestamp. - - add_xform_to_orig = pe.Node( - AddXFormToHeader(), name="Add_Transform_to_Orig") - add_xform_to_orig.inputs.copy_name = True - add_xform_to_orig.inputs.out_file = conform_template.inputs.out_file - - ar1_wf.connect( - [(conform_template, add_xform_to_orig, [('out_file', 'in_file')]), - (copy_transform, add_xform_to_orig, [('out_file', 'transform')])]) - - # This node adds the transform to the orig_nu.mgz file. This step does not - # exist in the recon-all workflow, because that workflow adds the talairach - # to the orig.mgz file header before the talairach actually exists. - add_xform_to_orig_nu = pe.Node( - AddXFormToHeader(), name="Add_Transform_to_Orig_Nu") - add_xform_to_orig_nu.inputs.copy_name = True - add_xform_to_orig_nu.inputs.out_file = bias_correction.inputs.out_file - - ar1_wf.connect( - [(bias_correction, add_xform_to_orig_nu, [('out_file', 'in_file')]), - (copy_transform, add_xform_to_orig_nu, [('out_file', 'transform')])]) - - # check the alignment of the talairach - # TODO: Figure out how to read output from this node. - check_alignment = pe.Node( - CheckTalairachAlignment(), name="Check_Talairach_Alignment") - check_alignment.inputs.threshold = 0.005 - ar1_wf.connect([ - (copy_transform, check_alignment, [('out_file', 'in_file')]), - ]) - - if not longitudinal: - - def awkfile(in_file, log_file): - """ - This method uses 'awk' which must be installed prior to running the workflow and is not a - part of nipype or freesurfer. - """ - import subprocess - import os - command = ['awk', '-f', in_file, log_file] - print(''.join(command)) - subprocess.call(command) - log_file = os.path.abspath(log_file) - return log_file - - awk_logfile = pe.Node( - Function(['in_file', 'log_file'], ['log_file'], awkfile), - name='Awk') - - ar1_wf.connect([(talairach_avi, awk_logfile, [('out_log', - 'log_file')]), - (inputspec, awk_logfile, [('awk_file', 'in_file')])]) - - # TODO datasink the output from TalirachQC...not sure how to do this - tal_qc = pe.Node(TalairachQC(), name="Detect_Aligment_Failures") - ar1_wf.connect([(awk_logfile, tal_qc, [('log_file', 'log_file')])]) - - if fsvernum < 6: - # intensity correction is performed before normalization - intensity_correction = pe.Node( - MNIBiasCorrection(), name="Intensity_Correction") - intensity_correction.inputs.out_file = 'nu.mgz' - intensity_correction.inputs.iterations = 2 - ar1_wf.connect([(add_xform_to_orig, intensity_correction, - [('out_file', 'in_file')]), - (copy_transform, intensity_correction, - [('out_file', 'transform')])]) - - add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU") - add_to_header_nu.inputs.copy_name = True - add_to_header_nu.inputs.out_file = 'nu.mgz' - ar1_wf.connect([(intensity_correction, add_to_header_nu, [ - ('out_file', 'in_file'), - ]), (copy_transform, add_to_header_nu, [('out_file', 'transform')])]) - - # Intensity Normalization - # Performs intensity normalization of the orig volume and places the result in mri/T1.mgz. - # Attempts to correct for fluctuations in intensity that would otherwise make intensity-based - # segmentation much more difficult. Intensities for all voxels are scaled so that the mean - # intensity of the white matter is 110. - - mri_normalize = pe.Node(Normalize(), name="Normalize_T1") - mri_normalize.inputs.gradient = 1 - mri_normalize.inputs.out_file = 'T1.mgz' - - if fsvernum < 6: - ar1_wf.connect([(add_to_header_nu, mri_normalize, [('out_file', - 'in_file')])]) - else: - ar1_wf.connect([(add_xform_to_orig_nu, mri_normalize, [('out_file', - 'in_file')])]) - - ar1_wf.connect([(copy_transform, mri_normalize, [('out_file', - 'transform')])]) - - # Skull Strip - """ - Removes the skull from mri/T1.mgz and stores the result in - mri/brainmask.auto.mgz and mri/brainmask.mgz. Runs the mri_watershed program. - """ - if not longitudinal: - mri_em_register = pe.Node(EMRegister(), name="EM_Register") - mri_em_register.inputs.out_file = 'talairach_with_skull.lta' - mri_em_register.inputs.skull = True - if plugin_args: - mri_em_register.plugin_args = plugin_args - - if fsvernum < 6: - ar1_wf.connect(add_to_header_nu, 'out_file', mri_em_register, - 'in_file') - else: - ar1_wf.connect(add_xform_to_orig_nu, 'out_file', mri_em_register, - 'in_file') - - ar1_wf.connect([(inputspec, mri_em_register, - [('num_threads', 'num_threads'), - ('reg_template_withskull', 'template')])]) - - brainmask = pe.Node( - WatershedSkullStrip(), name='Watershed_Skull_Strip') - brainmask.inputs.t1 = True - brainmask.inputs.out_file = 'brainmask.auto.mgz' - ar1_wf.connect([(mri_normalize, brainmask, [('out_file', 'in_file')]), - (mri_em_register, brainmask, [('out_file', - 'transform')]), - (inputspec, brainmask, [('reg_template_withskull', - 'brain_atlas')])]) - else: - copy_template_brainmask = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_Brainmask') - copy_template_brainmask.inputs.out_file = 'brainmask_{0}.mgz'.format( - config['long_template']) - - ar1_wf.connect([(inputspec, copy_template_brainmask, - [('template_brainmask', 'in_file')])]) - - mask1 = pe.Node(ApplyMask(), name="ApplyMask1") - mask1.inputs.keep_mask_deletion_edits = True - mask1.inputs.out_file = 'brainmask.auto.mgz' - - ar1_wf.connect([(mri_normalize, mask1, [('out_file', 'in_file')]), - (copy_template_brainmask, mask1, [('out_file', - 'mask_file')])]) - - brainmask = pe.Node(ApplyMask(), name="ApplyMask2") - brainmask.inputs.keep_mask_deletion_edits = True - brainmask.inputs.transfer = 255 - brainmask.inputs.out_file = mask1.inputs.out_file - - ar1_wf.connect([(mask1, brainmask, [('out_file', 'in_file')]), - (copy_template_brainmask, brainmask, [('out_file', - 'mask_file')])]) - - copy_brainmask = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Brainmask') - copy_brainmask.inputs.out_file = 'brainmask.mgz' - - ar1_wf.connect([(brainmask, copy_brainmask, [('out_file', 'in_file')])]) - - outputs = [ - 'origvols', 't2_raw', 'flair', 'rawavg', 'orig_nu', 'orig', - 'talairach_auto', 'talairach', 't1', 'talskull', 'brainmask_auto', - 'brainmask', 'braintemplate' - ] - - if fsvernum < 6: - outputspec = pe.Node( - IdentityInterface(fields=outputs + ['nu']), name="outputspec") - ar1_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')])]) - else: - outputspec = pe.Node( - IdentityInterface(fields=outputs), name="outputspec") - - ar1_wf.connect([ - (T1_image_preparation, outputspec, [('out_file', 'origvols')]), - (T2_convert, outputspec, [('out_file', 't2_raw')]), - (FLAIR_convert, outputspec, [('out_file', 'flair')]), - (create_template, outputspec, [('out_file', 'rawavg')]), - (add_xform_to_orig, outputspec, [('out_file', 'orig')]), - (add_xform_to_orig_nu, outputspec, [('out_file', 'orig_nu')]), - (talairach_avi, outputspec, [('out_file', 'talairach_auto')]), - (copy_transform, outputspec, [('out_file', 'talairach')]), - (mri_normalize, outputspec, [('out_file', 't1')]), - (brainmask, outputspec, [('out_file', 'brainmask_auto')]), - (copy_brainmask, outputspec, [('out_file', 'brainmask')]), - ]) - - if not longitudinal: - ar1_wf.connect([ - (mri_em_register, outputspec, [('out_file', 'talskull')]), - ]) - else: - ar1_wf.connect([ - (copy_template_brainmask, outputspec, [('out_file', - 'braintemplate')]), - ]) - - return ar1_wf, outputs diff --git a/nipype/workflows/smri/freesurfer/autorecon2.py b/nipype/workflows/smri/freesurfer/autorecon2.py deleted file mode 100644 index a11587412d..0000000000 --- a/nipype/workflows/smri/freesurfer/autorecon2.py +++ /dev/null @@ -1,720 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ....interfaces.utility import Function, IdentityInterface, Merge -from ....pipeline import engine as pe -from ....interfaces.freesurfer import * -from .utils import copy_file - - -def copy_ltas(in_file, subjects_dir, subject_id, long_template): - import os - out_file = copy_file(in_file, - os.path.basename(in_file).replace( - long_template, subject_id)) - return out_file - - -def create_AutoRecon2(name="AutoRecon2", - longitudinal=False, - plugin_args=None, - fsvernum=5.3, - stop=None, - shrink=None, - distance=None): - # AutoRecon2 - # Workflow - ar2_wf = pe.Workflow(name=name) - - inputspec = pe.Node( - IdentityInterface(fields=[ - 'orig', - 'nu', # version < 6 - 'brainmask', - 'transform', - 'subject_id', - 'template_talairach_lta', - 'template_talairach_m3z', - 'template_label_intensities', - 'template_aseg', - 'subj_to_template_lta', - 'alltps_to_template_ltas', - 'template_lh_white', - 'template_rh_white', - 'template_lh_pial', - 'template_rh_pial', - 'init_wm', - 'timepoints', - 'alltps_segs', - 'alltps_segs_noCC', - 'alltps_norms', - 'num_threads', - 'reg_template', - 'reg_template_withskull' - ]), - run_without_submitting=True, - name='inputspec') - - # Input node - if longitudinal: - # TODO: Work on longitudinal workflow - inputspec.inputs.timepoints = config['timepoints'] - - if fsvernum >= 6: - # NU Intensity Correction - """ - Non-parametric Non-uniform intensity Normalization (N3), corrects for - intensity non-uniformity in MR data, making relatively few assumptions about - the data. This runs the MINC tool 'nu_correct'. - """ - intensity_correction = pe.Node( - MNIBiasCorrection(), name="Intensity_Correction") - intensity_correction.inputs.out_file = 'nu.mgz' - ar2_wf.connect([(inputspec, intensity_correction, - [('orig', 'in_file'), ('brainmask', 'mask'), - ('transform', 'transform')])]) - - # intensity correction parameters are more specific in 6+ - intensity_correction.inputs.iterations = 1 - intensity_correction.inputs.protocol_iterations = 1000 - if stop: - intensity_correction.inputs.stop = stop - if shrink: - intensity_correction.inputs.shrink = shrink - intensity_correction.inputs.distance = distance - - add_to_header_nu = pe.Node(AddXFormToHeader(), name="Add_XForm_to_NU") - add_to_header_nu.inputs.copy_name = True - add_to_header_nu.inputs.out_file = 'nu.mgz' - ar2_wf.connect([(intensity_correction, add_to_header_nu, [ - ('out_file', 'in_file'), - ]), (inputspec, add_to_header_nu, [('transform', 'transform')])]) - - # EM Registration - """ - Computes the transform to align the mri/nu.mgz volume to the default GCA - atlas found in FREESURFER_HOME/average (see -gca flag for more info). - """ - if longitudinal: - align_transform = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Talairach_lta') - align_transform.inputs.out_file = 'talairach.lta' - - ar2_wf.connect([(inputspec, align_transform, - [('template_talairach_lta', 'in_file')])]) - else: - align_transform = pe.Node(EMRegister(), name="Align_Transform") - align_transform.inputs.out_file = 'talairach.lta' - align_transform.inputs.nbrspacing = 3 - if plugin_args: - align_transform.plugin_args = plugin_args - ar2_wf.connect([(inputspec, align_transform, - [('brainmask', 'mask'), ('reg_template', 'template'), - ('num_threads', 'num_threads')])]) - if fsvernum >= 6: - ar2_wf.connect([(add_to_header_nu, align_transform, - [('out_file', 'in_file')])]) - else: - ar2_wf.connect([(inputspec, align_transform, [('nu', 'in_file')])]) - - # CA Normalize - """ - Further normalization, based on GCA model. The normalization is based on an - estimate of the most certain segmentation voxels, which it then uses to - estimate the bias field/scalings. Creates mri/norm.mgz. - """ - ca_normalize = pe.Node(CANormalize(), name='CA_Normalize') - ca_normalize.inputs.out_file = 'norm.mgz' - if not longitudinal: - ca_normalize.inputs.control_points = 'ctrl_pts.mgz' - else: - copy_template_aseg = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_Aseg') - copy_template_aseg.inputs.out_file = 'aseg_{0}.mgz'.format( - config['long_template']) - - ar1_wf.connect( - [(inputspec, copy_template, [('template_aseg', 'in_file')]), - (copy_template, ca_normalize, [('out_file', 'long_file')])]) - - ar2_wf.connect([(align_transform, ca_normalize, [('out_file', - 'transform')]), - (inputspec, ca_normalize, [('brainmask', 'mask'), - ('reg_template', 'atlas')])]) - if fsvernum >= 6: - ar2_wf.connect([(add_to_header_nu, ca_normalize, [('out_file', - 'in_file')])]) - else: - ar2_wf.connect([(inputspec, ca_normalize, [('nu', 'in_file')])]) - - # CA Register - # Computes a nonlinear transform to align with GCA atlas. - ca_register = pe.Node(CARegister(), name='CA_Register') - ca_register.inputs.align = 'after' - ca_register.inputs.no_big_ventricles = True - ca_register.inputs.out_file = 'talairach.m3z' - if plugin_args: - ca_register.plugin_args = plugin_args - ar2_wf.connect([(ca_normalize, ca_register, [('out_file', 'in_file')]), - (inputspec, ca_register, - [('brainmask', 'mask'), ('num_threads', 'num_threads'), - ('reg_template', 'template')])]) - if not longitudinal: - ar2_wf.connect([(align_transform, ca_register, [('out_file', - 'transform')])]) - else: - ca_register.inputs.levels = 2 - ca_register.inputs.A = 1 - ar2_wf.connect([(inputspec, ca_register, [('template_talairach_m3z', - 'l_files')])]) - - # Remove Neck - """ - The neck region is removed from the NU-corrected volume mri/nu.mgz. Makes use - of transform computed from prior CA Register stage. - """ - remove_neck = pe.Node(RemoveNeck(), name='Remove_Neck') - remove_neck.inputs.radius = 25 - remove_neck.inputs.out_file = 'nu_noneck.mgz' - ar2_wf.connect([(ca_register, remove_neck, [('out_file', 'transform')]), - (inputspec, remove_neck, [('reg_template', 'template')])]) - if fsvernum >= 6: - ar2_wf.connect([(add_to_header_nu, remove_neck, [('out_file', - 'in_file')])]) - else: - ar2_wf.connect([(inputspec, remove_neck, [('nu', 'in_file')])]) - - # SkullLTA (EM Registration, with Skull) - # Computes transform to align volume mri/nu_noneck.mgz with GCA volume - # possessing the skull. - em_reg_withskull = pe.Node(EMRegister(), name='EM_Register_withSkull') - em_reg_withskull.inputs.skull = True - em_reg_withskull.inputs.out_file = 'talairach_with_skull_2.lta' - if plugin_args: - em_reg_withskull.plugin_args = plugin_args - ar2_wf.connect([(align_transform, em_reg_withskull, [('out_file', - 'transform')]), - (remove_neck, em_reg_withskull, [('out_file', 'in_file')]), - (inputspec, em_reg_withskull, - [('num_threads', 'num_threads'), - ('reg_template_withskull', 'template')])]) - - # SubCort Seg (CA Label) - # Labels subcortical structures, based in GCA model. - if longitudinal: - copy_long_ltas = pe.MapNode( - Function( - ['in_file', 'subjects_dir', 'subject_id', 'long_template'], - ['out_file'], copy_ltas), - iterfield=['in_file'], - name='Copy_long_ltas') - ar2_wf.connect([(inputspec, copy_long_ltas, - [('alltps_to_template_ltas', 'in_file'), - ('subjects_dir', 'subjects_dir'), ('subject_id', - 'subject_id')])]) - copy_long_ltas.inputs.long_template = config['long_template'] - - merge_norms = pe.Node(Merge(2), name="Merge_Norms") - - ar2_wf.connect([(inputspec, merge_norms, [('alltps_norms', 'in1')]), - (ca_normalize, merge_norms, [('out_file', 'in2')])]) - - fuse_segmentations = pe.Node( - FuseSegmentations(), name="Fuse_Segmentations") - - ar2_wf.connect([(inputspec, fuse_segmentations, [ - ('timepoints', 'timepoints'), ('alltps_segs', 'in_segmentations'), - ('alltps_segs_noCC', 'in_segmentations_noCC'), ('subject_id', - 'subject_id') - ]), (merge_norms, fuse_segmentations, [('out', 'in_norms')])]) - fuse_segmentations.inputs.out_file = 'aseg.fused.mgz' - - ca_label = pe.Node(CALabel(), name='CA_Label') - if fsvernum >= 6: - ca_label.inputs.relabel_unlikely = (9, .3) - ca_label.inputs.prior = 0.5 - ca_label.inputs.align = True - ca_label.inputs.out_file = 'aseg.auto_noCCseg.mgz' - if plugin_args: - ca_label.plugin_args = plugin_args - ar2_wf.connect([(ca_normalize, ca_label, [('out_file', 'in_file')]), - (ca_register, ca_label, [('out_file', 'transform')]), - (inputspec, ca_label, [('num_threads', 'num_threads'), - ('reg_template', 'template')])]) - - if longitudinal: - ar2_wf.connect([(fuse_segmentations, ca_label, [('out_file', - 'in_vol')]), - (inputspec, ca_label, [('template_label_intensities', - 'intensities')])]) - - # mri_cc - segments the corpus callosum into five separate labels in the - # subcortical segmentation volume 'aseg.mgz' - segment_cc = pe.Node(SegmentCC(), name="Segment_CorpusCallosum") - segment_cc.inputs.out_rotation = 'cc_up.lta' - segment_cc.inputs.out_file = 'aseg.auto.mgz' - segment_cc.inputs.copy_inputs = True - ar2_wf.connect([ - (ca_label, segment_cc, [('out_file', 'in_file')]), - (ca_normalize, segment_cc, [('out_file', 'in_norm')]), - ]) - - copy_cc = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_CCSegmentation') - copy_cc.inputs.out_file = 'aseg.presurf.mgz' - - ar2_wf.connect([(segment_cc, copy_cc, [('out_file', 'in_file')])]) - - # Normalization2 - """ - Performs a second (major) intensity correction using only the brain volume a - s the input (so that it has to be done after the skull strip). Intensity - normalization works better when the skull has been removed. Creates a new - brain.mgz volume. The -autorecon2-cp stage begins here. - """ - normalization2 = pe.Node(Normalize(), name="Normalization2") - normalization2.inputs.out_file = 'brain.mgz' - ar2_wf.connect([(copy_cc, normalization2, [('out_file', 'segmentation')]), - (inputspec, normalization2, [('brainmask', 'mask')]), - (ca_normalize, normalization2, [('out_file', 'in_file')])]) - - # Mask Brain Final Surface - - # Applies brainmask.mgz to brain.mgz to create brain.finalsurfs.mgz. - mri_mask = pe.Node(ApplyMask(), name="Mask_Brain_Final_Surface") - mri_mask.inputs.mask_thresh = 5 - mri_mask.inputs.out_file = 'brain.finalsurfs.mgz' - - ar2_wf.connect([(normalization2, mri_mask, [('out_file', 'in_file')]), - (inputspec, mri_mask, [('brainmask', 'mask_file')])]) - - # WM Segmentation - """ - Attempts to separate white matter from everything else. The input is - mri/brain.mgz, and the output is mri/wm.mgz. Uses intensity, neighborhood, - and smoothness constraints. This is the volume that is edited when manually - fixing defects. Calls mri_segment, mri_edit_wm_with_aseg, and mri_pretess. - """ - - wm_seg = pe.Node(SegmentWM(), name="Segment_WM") - wm_seg.inputs.out_file = 'wm.seg.mgz' - ar2_wf.connect([(normalization2, wm_seg, [('out_file', 'in_file')])]) - - edit_wm = pe.Node(EditWMwithAseg(), name='Edit_WhiteMatter') - edit_wm.inputs.out_file = 'wm.asegedit.mgz' - edit_wm.inputs.keep_in = True - ar2_wf.connect([(wm_seg, edit_wm, [('out_file', 'in_file')]), - (copy_cc, edit_wm, [('out_file', 'seg_file')]), - (normalization2, edit_wm, [('out_file', 'brain_file')])]) - - pretess = pe.Node(MRIPretess(), name="MRI_Pretess") - pretess.inputs.out_file = 'wm.mgz' - pretess.inputs.label = 'wm' - ar2_wf.connect([(edit_wm, pretess, [('out_file', 'in_filled')]), - (ca_normalize, pretess, [('out_file', 'in_norm')])]) - - if longitudinal: - transfer_init_wm = pe.Node(ApplyMask(), name="Transfer_Initial_WM") - transfer_init_wm.inputs.transfer = 255 - transfer_init_wm.inputs.keep_mask_deletion_edits = True - transfer_init_wm.inputs.out_file = 'wm.mgz' - ar2_wf.connect([(pretess, transfer_init_wm, [('out_file', 'in_file')]), - (inputspec, transfer_init_wm, - [('init_wm', 'mask_file'), ('subj_to_template_lta', - 'xfm_file')])]) - # changing the pretess variable so that the rest of the connections still work!!! - pretess = transfer_init_wm - - # Fill - """ This creates the subcortical mass from which the orig surface is created. - The mid brain is cut from the cerebrum, and the hemispheres are cut from each - other. The left hemisphere is binarized to 255. The right hemisphere is binarized - to 127. The input is mri/wm.mgz and the output is mri/filled.mgz. Calls mri_fill. - """ - - fill = pe.Node(MRIFill(), name="Fill") - fill.inputs.log_file = 'ponscc.cut.log' - fill.inputs.out_file = 'filled.mgz' - - ar2_wf.connect([ - (pretess, fill, [('out_file', 'in_file')]), - (align_transform, fill, [('out_file', 'transform')]), - (ca_label, fill, [('out_file', 'segmentation')]), - ]) - - ar2_lh = pe.Workflow("AutoRecon2_Left") - ar2_rh = pe.Workflow("AutoRecon2_Right") - - # iterate by hemisphere - for hemisphere in ['lh', 'rh']: - if hemisphere == 'lh': - label = 255 - hemi_wf = ar2_lh - else: - label = 127 - hemi_wf = ar2_rh - - hemi_inputspec = pe.Node( - IdentityInterface(fields=[ - 'norm', 'filled', 'aseg', 't1', 'wm', 'brain', 'num_threads' - ]), - name="inputspec") - - if longitudinal: - # Make White Surf - # Copy files from longitudinal base - copy_template_white = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_White') - copy_template_white.inputs.out_file = '{0}.orig'.format(hemisphere) - - copy_template_orig_white = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_Orig_White') - copy_template_orig_white.inputs.out_file = '{0}.orig_white'.format( - hemisphere) - - copy_template_orig_pial = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Template_Orig_Pial') - copy_template_orig_pial.inputs.out_file = '{0}.orig_pial'.format( - hemisphere) - - # White - - # This function implicitly calls other inputs based on the subject_id - # wf attempts to make sure files are data sinked to the correct - # folders before calling - make_surfaces = pe.Node(MakeSurfaces(), name="Make_Surfaces") - make_surfaces.inputs.noaparc = True - make_surfaces.inputs.mgz = True - make_surfaces.inputs.white_only = True - make_surfaces.inputs.hemisphere = hemisphere - make_surfaces.inputs.maximum = 3.5 - make_surfaces.inputs.longitudinal = True - make_surfaces.inputs.copy_inputs = True - - hemi_wf.connect([(copy_template_orig_white, make_surfaces, - [('out_file', 'orig_white')]), - (copy_template_white, make_surfaces, - [('out_file', 'in_orig')])]) - - else: - # If running single session - # Tessellate by hemisphere - """ - This is the step where the orig surface (ie, surf/?h.orig.nofix) is created. - The surface is created by covering the filled hemisphere with triangles. - Runs mri_pretess to create a connected WM volume (neighboring voxels must - have faces in common) and then mri_tessellate to create the surface. The - places where the points of the triangles meet are called vertices. Creates - the file surf/?h.orig.nofix Note: the topology fixer will create the surface - ?h.orig. Finally mris_extract_main_component will remove small surface - components, not connected to the main body. - """ - pretess2 = pe.Node(MRIPretess(), name='Pretess2') - pretess2.inputs.out_file = 'filled-pretess{0}.mgz'.format(label) - pretess2.inputs.label = label - - hemi_wf.connect([(hemi_inputspec, pretess2, - [('norm', 'in_norm'), ('filled', 'in_filled')])]) - - tesselate = pe.Node(MRITessellate(), name="Tesselation") - tesselate.inputs.out_file = "{0}.orig.nofix".format(hemisphere) - tesselate.inputs.label_value = label - hemi_wf.connect([(pretess2, tesselate, [('out_file', 'in_file')])]) - - extract_main_component = pe.Node( - ExtractMainComponent(), name="Extract_Main_Component") - extract_main_component.inputs.out_file = "{0}.orig.nofix".format( - hemisphere) - hemi_wf.connect([(tesselate, extract_main_component, - [('surface', 'in_file')])]) - - copy_orig = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Orig') - copy_orig.inputs.out_file = '{0}.orig'.format(hemisphere) - hemi_wf.connect([(extract_main_component, copy_orig, - [('out_file', 'in_file')])]) - - # Orig Surface Smoothing 1 - """ - After tesselation, the orig surface is very jagged because each triangle is - on the edge of a voxel face and so are at right angles to each other. The - vertex positions are adjusted slightly here to reduce the angle. This is - only necessary for the inflation processes. Creates surf/?h.smoothwm(.nofix). - Calls mris_smooth. Smooth1 is the step just after tessellation. - """ - - smooth1 = pe.Node(SmoothTessellation(), name="Smooth1") - smooth1.inputs.disable_estimates = True - smooth1.inputs.seed = 1234 - smooth1.inputs.out_file = '{0}.smoothwm.nofix'.format(hemisphere) - hemi_wf.connect([(extract_main_component, smooth1, [('out_file', - 'in_file')])]) - - # Inflation 1 - """ - Inflation of the surf/?h.smoothwm(.nofix) surface to create surf/?h.inflated. - The inflation attempts to minimize metric distortion so that distances and - areas are preserved (ie, the surface is not stretched). In this sense, it is - like inflating a paper bag and not a balloon. Inflate1 is the step just after - tessellation. - """ - - inflate1 = pe.Node(MRIsInflate(), name="inflate1") - inflate1.inputs.no_save_sulc = True - inflate1.inputs.out_file = '{0}.inflated.nofix'.format(hemisphere) - - copy_inflate1 = pe.Node( - Function(['in_file', 'out_file'], ['out_file'], copy_file), - name='Copy_Inflate1') - copy_inflate1.inputs.out_file = '{0}.inflated'.format(hemisphere) - hemi_wf.connect([ - (smooth1, inflate1, [('surface', 'in_file')]), - (inflate1, copy_inflate1, [('out_file', 'in_file')]), - ]) - - # Sphere - """ - This is the initial step of automatic topology fixing. It is a - quasi-homeomorphic spherical transformation of the inflated surface designed - to localize topological defects for the subsequent automatic topology fixer. - Calls mris_sphere. - """ - - qsphere = pe.Node(Sphere(), name="Sphere") - qsphere.inputs.seed = 1234 - qsphere.inputs.magic = True - qsphere.inputs.out_file = '{0}.qsphere.nofix'.format(hemisphere) - if plugin_args: - qsphere.plugin_args = plugin_args - hemi_wf.connect([(inflate1, qsphere, [('out_file', 'in_file')]), - (hemi_inputspec, qsphere, [('num_threads', - 'num_threads')])]) - - # Automatic Topology Fixer - """ - Finds topological defects (ie, holes in a filled hemisphere) using - surf/?h.qsphere.nofix, and changes the orig surface (surf/?h.orig.nofix) to - remove the defects. Changes the number of vertices. All the defects will be - removed, but the user should check the orig surface in the volume to make - sure that it looks appropriate. - - This mris_fix_topology does not take in the {lh,rh}.orig file, but instead takes in the - subject ID and hemisphere and tries to find it from the subjects - directory. - """ - fix_topology = pe.Node(FixTopology(), name="Fix_Topology") - fix_topology.inputs.mgz = True - fix_topology.inputs.ga = True - fix_topology.inputs.seed = 1234 - fix_topology.inputs.hemisphere = hemisphere - fix_topology.inputs.copy_inputs = True - hemi_wf.connect([(copy_orig, fix_topology, - [('out_file', - 'in_orig')]), (copy_inflate1, fix_topology, - [('out_file', 'in_inflated')]), - (qsphere, fix_topology, [('out_file', 'sphere')]), - (hemi_inputspec, fix_topology, - [('wm', 'in_wm'), ('brain', 'in_brain')])]) - - # TODO: halt workflow for bad euler number - euler_number = pe.Node(EulerNumber(), name="Euler_Number") - - hemi_wf.connect([ - (fix_topology, euler_number, [('out_file', 'in_file')]), - ]) - - remove_intersection = pe.Node( - RemoveIntersection(), name="Remove_Intersection") - remove_intersection.inputs.out_file = "{0}.orig".format(hemisphere) - - hemi_wf.connect([(euler_number, remove_intersection, - [('out_file', 'in_file')])]) - - # White - - # This function implicitly calls other inputs based on the subject_id - # need to make sure files are data sinked to the correct folders before - # calling - make_surfaces = pe.Node(MakeSurfaces(), name="Make_Surfaces") - make_surfaces.inputs.noaparc = True - make_surfaces.inputs.mgz = True - make_surfaces.inputs.white_only = True - make_surfaces.inputs.hemisphere = hemisphere - make_surfaces.inputs.copy_inputs = True - hemi_wf.connect([(remove_intersection, make_surfaces, - [('out_file', 'in_orig')]), - (hemi_inputspec, make_surfaces, - [('aseg', 'in_aseg'), ('t1', 'in_T1'), - ('filled', 'in_filled'), ('wm', 'in_wm')])]) - # end of non-longitudinal specific steps - - # Orig Surface Smoothing 2 - """ - After tesselation, the orig surface is very jagged because each triangle is on - the edge of a voxel face and so are at right angles to each other. The vertex - positions are adjusted slightly here to reduce the angle. This is only necessary - for the inflation processes. Smooth2 is the step just after topology - fixing. - """ - smooth2 = pe.Node(SmoothTessellation(), name="Smooth2") - smooth2.inputs.disable_estimates = True - smooth2.inputs.smoothing_iterations = 3 - smooth2.inputs.seed = 1234 - smooth2.inputs.out_file = '{0}.smoothwm'.format(hemisphere) - hemi_wf.connect([(make_surfaces, smooth2, [('out_white', 'in_file')])]) - - # Inflation 2 - """ - Inflation of the surf/?h.smoothwm(.nofix) surface to create surf/?h.inflated. - The inflation attempts to minimize metric distortion so that distances and areas - are preserved (ie, the surface is not stretched). In this sense, it is like - inflating a paper bag and not a balloon. Inflate2 is the step just after - topology fixing. - """ - inflate2 = pe.Node(MRIsInflate(), name="inflate2") - inflate2.inputs.out_sulc = '{0}.sulc'.format(hemisphere) - inflate2.inputs.out_file = '{0}.inflated'.format(hemisphere) - hemi_wf.connect([ - (smooth2, inflate2, [('surface', 'in_file')]), - ]) - - # Compute Curvature - """No documentation on this step""" - - curvature1 = pe.Node(Curvature(), name="Curvature1") - curvature1.inputs.save = True - curvature1.inputs.copy_input = True - hemi_wf.connect([ - (make_surfaces, curvature1, [('out_white', 'in_file')]), - ]) - - curvature2 = pe.Node(Curvature(), name="Curvature2") - curvature2.inputs.threshold = .999 - curvature2.inputs.n = True - curvature2.inputs.averages = 5 - curvature2.inputs.save = True - curvature2.inputs.distances = (10, 10) - curvature1.inputs.copy_input = True - hemi_wf.connect([ - (inflate2, curvature2, [('out_file', 'in_file')]), - ]) - - curvature_stats = pe.Node(CurvatureStats(), name="Curvature_Stats") - curvature_stats.inputs.min_max = True - curvature_stats.inputs.write = True - curvature_stats.inputs.values = True - curvature_stats.inputs.hemisphere = hemisphere - curvature_stats.inputs.copy_inputs = True - curvature_stats.inputs.out_file = '{0}.curv.stats'.format(hemisphere) - hemi_wf.connect([ - (smooth2, curvature_stats, [('surface', 'surface')]), - (make_surfaces, curvature_stats, [('out_curv', 'curvfile1')]), - (inflate2, curvature_stats, [('out_sulc', 'curvfile2')]), - ]) - - if longitudinal: - ar2_wf.connect([(inputspec, hemi_wf, - [('template_{0}_white'.format(hemisphere), - 'Copy_Template_White.in_file'), - ('template_{0}_white'.format(hemisphere), - 'Copy_Template_Orig_White.in_file'), - ('template_{0}_pial'.format(hemisphere), - 'Copy_Template_Pial.in_file')])]) - - # Connect inputs for the hemisphere workflows - ar2_wf.connect( - [(ca_normalize, hemi_wf, - [('out_file', 'inputspec.norm')]), (fill, hemi_wf, [ - ('out_file', 'inputspec.filled') - ]), (copy_cc, hemi_wf, [('out_file', 'inputspec.aseg')]), - (mri_mask, hemi_wf, [('out_file', 'inputspec.t1')]), - (pretess, hemi_wf, [('out_file', - 'inputspec.wm')]), (normalization2, hemi_wf, - [('out_file', - 'inputspec.brain')]), - (inputspec, hemi_wf, [('num_threads', 'inputspec.num_threads')])]) - - # Outputs for hemisphere workflow - hemi_outputs = [ - 'orig_nofix', 'orig', 'smoothwm_nofix', 'inflated_nofix', - 'qsphere_nofix', 'white', 'curv', 'area', 'cortex', 'pial_auto', - 'thickness', 'smoothwm', 'sulc', 'inflated', 'white_H', 'white_K', - 'inflated_H', 'inflated_K', 'curv_stats' - ] - - hemi_outputspec = pe.Node( - IdentityInterface(fields=hemi_outputs), name="outputspec") - - hemi_wf.connect( - [(extract_main_component, hemi_outputspec, - [('out_file', 'orig_nofix')]), (inflate1, hemi_outputspec, [ - ('out_file', 'inflated_nofix') - ]), (smooth1, hemi_outputspec, [('surface', 'smoothwm_nofix')]), - (qsphere, hemi_outputspec, [('out_file', 'qsphere_nofix')]), - (remove_intersection, hemi_outputspec, - [('out_file', 'orig')]), (make_surfaces, hemi_outputspec, [ - ('out_white', 'white'), ('out_curv', 'curv'), - ('out_area', 'area'), ('out_cortex', 'cortex'), ('out_pial', - 'pial_auto') - ]), (smooth2, hemi_outputspec, - [('surface', 'smoothwm')]), (inflate2, hemi_outputspec, - [('out_sulc', 'sulc'), - ('out_file', 'inflated')]), - (curvature1, hemi_outputspec, - [('out_mean', 'white_H'), - ('out_gauss', 'white_K')]), (curvature2, hemi_outputspec, [ - ('out_mean', 'inflated_H'), ('out_gauss', 'inflated_K') - ]), (curvature_stats, hemi_outputspec, [('out_file', - 'curv_stats')])]) - - outputs = [ - 'nu', 'tal_lta', 'norm', 'ctrl_pts', 'tal_m3z', 'nu_noneck', - 'talskull2', 'aseg_noCC', 'cc_up', 'aseg_auto', 'aseg_presurf', - 'brain', 'brain_finalsurfs', 'wm_seg', 'wm_aseg', 'wm', 'ponscc_log', - 'filled' - ] - for hemi in ('lh', 'rh'): - for field in hemi_outputs: - outputs.append("{0}_".format(hemi) + field) - outputspec = pe.Node(IdentityInterface(fields=outputs), name="outputspec") - - if fsvernum >= 6: - ar2_wf.connect([(add_to_header_nu, outputspec, [('out_file', 'nu')])]) - else: - # add to outputspec to perserve datasinking - ar2_wf.connect([(inputspec, outputspec, [('nu', 'nu')])]) - - ar2_wf.connect([ - (align_transform, outputspec, [('out_file', 'tal_lta')]), - (ca_normalize, outputspec, [('out_file', 'norm')]), - (ca_normalize, outputspec, [('control_points', 'ctrl_pts')]), - (ca_register, outputspec, [('out_file', 'tal_m3z')]), - (remove_neck, outputspec, [('out_file', 'nu_noneck')]), - (em_reg_withskull, outputspec, [('out_file', 'talskull2')]), - (ca_label, outputspec, [('out_file', 'aseg_noCC')]), - (segment_cc, outputspec, [('out_rotation', 'cc_up'), ('out_file', - 'aseg_auto')]), - (copy_cc, outputspec, [('out_file', 'aseg_presurf')]), - (normalization2, outputspec, [('out_file', 'brain')]), - (mri_mask, outputspec, [('out_file', 'brain_finalsurfs')]), - (wm_seg, outputspec, [('out_file', 'wm_seg')]), - (edit_wm, outputspec, [('out_file', 'wm_aseg')]), - (pretess, outputspec, [('out_file', 'wm')]), - (fill, outputspec, [('out_file', 'filled'), ('log_file', - 'ponscc_log')]), - ]) - - for hemi, hemi_wf in [('lh', ar2_lh), ('rh', ar2_rh)]: - for field in hemi_outputs: - output = "{0}_".format(hemi) + field - ar2_wf.connect([(hemi_wf, outputspec, [("outputspec." + field, - output)])]) - - return ar2_wf, outputs diff --git a/nipype/workflows/smri/freesurfer/autorecon3.py b/nipype/workflows/smri/freesurfer/autorecon3.py deleted file mode 100644 index 477198d2da..0000000000 --- a/nipype/workflows/smri/freesurfer/autorecon3.py +++ /dev/null @@ -1,959 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ....interfaces.utility import IdentityInterface, Merge, Function -from ....pipeline import engine as pe -from ....interfaces.freesurfer import * -from .ba_maps import create_ba_maps_wf -from ....interfaces.io import DataGrabber - - -def create_AutoRecon3(name="AutoRecon3", - qcache=False, - plugin_args=None, - th3=True, - exvivo=True, - entorhinal=True, - fsvernum=5.3): - - # AutoRecon3 - # Workflow - ar3_wf = pe.Workflow(name=name) - - # Input Node - inputspec = pe.Node( - IdentityInterface(fields=[ - 'lh_inflated', 'rh_inflated', 'lh_smoothwm', 'rh_smoothwm', - 'lh_white', 'rh_white', 'lh_white_H', 'rh_white_H', 'lh_white_K', - 'rh_white_K', 'lh_cortex_label', 'rh_cortex_label', 'lh_orig', - 'rh_orig', 'lh_sulc', 'rh_sulc', 'lh_area', 'rh_area', 'lh_curv', - 'rh_curv', 'lh_orig_nofix', 'rh_orig_nofix', 'aseg_presurf', - 'brain_finalsurfs', 'wm', 'filled', 'brainmask', 'transform', - 'orig_mgz', 'rawavg', 'norm', 'lh_atlas', 'rh_atlas', - 'lh_classifier1', 'rh_classifier1', 'lh_classifier2', - 'rh_classifier2', 'lh_classifier3', 'rh_classifier3', - 'lookup_table', 'wm_lookup_table', 'src_subject_id', - 'src_subject_dir', 'color_table', 'num_threads' - ]), - name='inputspec') - - ar3_lh_wf1 = pe.Workflow(name="AutoRecon3_Left_1") - ar3_rh_wf1 = pe.Workflow(name="AutoRecon3_Right_1") - for hemisphere, hemi_wf in [('lh', ar3_lh_wf1), ('rh', ar3_rh_wf1)]: - hemi_inputspec1 = pe.Node( - IdentityInterface(fields=[ - 'inflated', 'smoothwm', 'white', 'cortex_label', 'orig', - 'aseg_presurf', 'brain_finalsurfs', 'wm', 'filled', 'sphere', - 'sulc', 'area', 'curv', 'classifier', 'atlas', 'num_threads' - ]), - name="inputspec") - - # Spherical Inflation - - # Inflates the orig surface into a sphere while minimizing metric distortion. - # This step is necessary in order to register the surface to the spherical - # atlas (also known as the spherical morph). Calls mris_sphere. Creates - # surf/?h.sphere. The -autorecon3 stage begins here. - - ar3_sphere = pe.Node(Sphere(), name="Spherical_Inflation") - ar3_sphere.inputs.seed = 1234 - ar3_sphere.inputs.out_file = '{0}.sphere'.format(hemisphere) - if plugin_args: - ar3_sphere.plugin_args = plugin_args - hemi_wf.connect([(hemi_inputspec1, ar3_sphere, - [('inflated', 'in_file'), - ('smoothwm', 'in_smoothwm'), ('num_threads', - 'num_threads')])]) - - # Ipsilateral Surface Registation (Spherical Morph) - - # Registers the orig surface to the spherical atlas through surf/?h.sphere. - # The surfaces are first coarsely registered by aligning the large scale - # folding patterns found in ?h.sulc and then fine tuned using the small-scale - # patterns as in ?h.curv. Calls mris_register. Creates surf/?h.sphere.reg. - - ar3_surfreg = pe.Node(Register(), name="Surface_Registration") - ar3_surfreg.inputs.out_file = '{0}.sphere.reg'.format(hemisphere) - ar3_surfreg.inputs.curv = True - hemi_wf.connect([(ar3_sphere, ar3_surfreg, [('out_file', 'in_surf')]), - (hemi_inputspec1, ar3_surfreg, - [('smoothwm', 'in_smoothwm'), ('sulc', 'in_sulc'), - ('atlas', 'target')])]) - - # Jacobian - - # Computes how much the white surface was distorted in order to register to - # the spherical atlas during the -surfreg step. - - ar3_jacobian = pe.Node(Jacobian(), name="Jacobian") - ar3_jacobian.inputs.out_file = '{0}.jacobian_white'.format(hemisphere) - hemi_wf.connect( - [(hemi_inputspec1, ar3_jacobian, [('white', 'in_origsurf')]), - (ar3_surfreg, ar3_jacobian, [('out_file', 'in_mappedsurf')])]) - - # Average Curvature - - # Resamples the average curvature from the atlas to that of the subject. - # Allows the user to display activity on the surface of an individual - # with the folding pattern (ie, anatomy) of a group. - - ar3_paint = pe.Node(Paint(), name="Average_Curvature") - ar3_paint.inputs.averages = 5 - ar3_paint.inputs.template_param = 6 - ar3_paint.inputs.out_file = "{0}.avg_curv".format(hemisphere) - hemi_wf.connect([(ar3_surfreg, ar3_paint, [('out_file', 'in_surf')]), - (hemi_inputspec1, ar3_paint, [('atlas', - 'template')])]) - - # Cortical Parcellation - - # Assigns a neuroanatomical label to each location on the cortical - # surface. Incorporates both geometric information derived from the - # cortical model (sulcus and curvature), and neuroanatomical convention. - - ar3_parcellation = pe.Node(MRIsCALabel(), "Cortical_Parcellation") - ar3_parcellation.inputs.seed = 1234 - ar3_parcellation.inputs.hemisphere = hemisphere - ar3_parcellation.inputs.copy_inputs = True - ar3_parcellation.inputs.out_file = "{0}.aparc.annot".format(hemisphere) - if plugin_args: - ar3_parcellation.plugin_args = plugin_args - hemi_wf.connect( - [(hemi_inputspec1, ar3_parcellation, - [('smoothwm', 'smoothwm'), ('cortex_label', 'label'), - ('aseg_presurf', 'aseg'), ('classifier', 'classifier'), - ('curv', 'curv'), ('sulc', 'sulc'), ('num_threads', - 'num_threads')]), - (ar3_surfreg, ar3_parcellation, [('out_file', 'canonsurf')])]) - - # Pial Surface - - ar3_pial = pe.Node(MakeSurfaces(), name="Make_Pial_Surface") - ar3_pial.inputs.mgz = True - ar3_pial.inputs.hemisphere = hemisphere - ar3_pial.inputs.copy_inputs = True - - if fsvernum < 6: - ar3_pial.inputs.white = 'NOWRITE' - hemi_wf.connect(hemi_inputspec1, 'white', ar3_pial, 'in_white') - else: - ar3_pial.inputs.no_white = True - hemi_wf.connect([(hemi_inputspec1, ar3_pial, - [('white', 'orig_pial'), ('white', - 'orig_white')])]) - - hemi_wf.connect( - [(hemi_inputspec1, ar3_pial, - [('wm', 'in_wm'), ('orig', 'in_orig'), ('filled', 'in_filled'), - ('brain_finalsurfs', 'in_T1'), ('aseg_presurf', 'in_aseg')]), - (ar3_parcellation, ar3_pial, [('out_file', 'in_label')])]) - - # Surface Volume - """ - Creates the ?h.volume file by first creating the ?h.mid.area file by - adding ?h.area(.white) to ?h.area.pial, then dividing by two. Then ?h.volume - is created by multiplying ?.mid.area with ?h.thickness. - """ - - ar3_add = pe.Node(MRIsCalc(), name="Add_Pial_Area") - ar3_add.inputs.action = "add" - ar3_add.inputs.out_file = '{0}.area.mid'.format(hemisphere) - hemi_wf.connect([ - (ar3_pial, ar3_add, [('out_area', 'in_file2')]), - (hemi_inputspec1, ar3_add, [('area', 'in_file1')]), - ]) - - ar3_divide = pe.Node(MRIsCalc(), name="Mid_Pial") - ar3_divide.inputs.action = "div" - ar3_divide.inputs.in_int = 2 - ar3_divide.inputs.out_file = '{0}.area.mid'.format(hemisphere) - hemi_wf.connect([ - (ar3_add, ar3_divide, [('out_file', 'in_file1')]), - ]) - - ar3_volume = pe.Node(MRIsCalc(), name="Calculate_Volume") - ar3_volume.inputs.action = "mul" - ar3_volume.inputs.out_file = '{0}.volume'.format(hemisphere) - hemi_wf.connect([ - (ar3_divide, ar3_volume, [('out_file', 'in_file1')]), - (ar3_pial, ar3_volume, [('out_thickness', 'in_file2')]), - ]) - - # Connect the inputs - ar3_wf.connect( - [(inputspec, hemi_wf, - [('{0}_inflated'.format(hemisphere), 'inputspec.inflated'), - ('{0}_smoothwm'.format(hemisphere), - 'inputspec.smoothwm'), ('{0}_white'.format(hemisphere), - 'inputspec.white'), - ('{0}_cortex_label'.format(hemisphere), - 'inputspec.cortex_label'), ('{0}_orig'.format(hemisphere), - 'inputspec.orig'), - ('{0}_sulc'.format(hemisphere), - 'inputspec.sulc'), ('{0}_area'.format(hemisphere), - 'inputspec.area'), - ('{0}_curv'.format(hemisphere), - 'inputspec.curv'), ('aseg_presurf', 'inputspec.aseg_presurf'), - ('brain_finalsurfs', - 'inputspec.brain_finalsurfs'), ('wm', 'inputspec.wm'), - ('filled', 'inputspec.filled'), ('{0}_atlas'.format(hemisphere), - 'inputspec.atlas'), - ('{0}_classifier1'.format(hemisphere), - 'inputspec.classifier'), ('num_threads', - 'inputspec.num_threads')])]) - - # Workflow1 Outputs - hemi_outputs1 = [ - 'sphere', 'sphere_reg', 'jacobian_white', 'avg_curv', - 'aparc_annot', 'area_pial', 'curv_pial', 'pial', 'thickness_pial', - 'area_mid', 'volume' - ] - hemi_outputspec1 = pe.Node( - IdentityInterface(fields=hemi_outputs1), name="outputspec") - hemi_wf.connect([(ar3_pial, hemi_outputspec1, [ - ('out_pial', 'pial'), ('out_curv', 'curv_pial'), - ('out_area', 'area_pial'), ('out_thickness', 'thickness_pial') - ]), (ar3_divide, hemi_outputspec1, - [('out_file', 'area_mid')]), (ar3_volume, hemi_outputspec1, - [('out_file', 'volume')]), - (ar3_parcellation, hemi_outputspec1, - [('out_file', 'aparc_annot')]), - (ar3_jacobian, hemi_outputspec1, - [('out_file', - 'jacobian_white')]), (ar3_paint, hemi_outputspec1, - [('out_file', 'avg_curv')]), - (ar3_surfreg, hemi_outputspec1, - [('out_file', - 'sphere_reg')]), (ar3_sphere, hemi_outputspec1, - [('out_file', 'sphere')])]) - - # Cortical Ribbon Mask - """ - Creates binary volume masks of the cortical ribbon - ie, each voxel is either a 1 or 0 depending upon whether it falls in the ribbon or not. - """ - volume_mask = pe.Node(VolumeMask(), name="Mask_Ribbon") - volume_mask.inputs.left_whitelabel = 2 - volume_mask.inputs.left_ribbonlabel = 3 - volume_mask.inputs.right_whitelabel = 41 - volume_mask.inputs.right_ribbonlabel = 42 - volume_mask.inputs.save_ribbon = True - volume_mask.inputs.copy_inputs = True - - ar3_wf.connect([ - (inputspec, volume_mask, [('lh_white', 'lh_white'), ('rh_white', - 'rh_white')]), - (ar3_lh_wf1, volume_mask, [('outputspec.pial', 'lh_pial')]), - (ar3_rh_wf1, volume_mask, [('outputspec.pial', 'rh_pial')]), - ]) - - if fsvernum >= 6: - ar3_wf.connect([(inputspec, volume_mask, [('aseg_presurf', - 'in_aseg')])]) - else: - ar3_wf.connect([(inputspec, volume_mask, [('aseg_presurf', 'aseg')])]) - - ar3_lh_wf2 = pe.Workflow(name="AutoRecon3_Left_2") - ar3_rh_wf2 = pe.Workflow(name="AutoRecon3_Right_2") - - for hemisphere, hemiwf2 in [('lh', ar3_lh_wf2), ('rh', ar3_rh_wf2)]: - if hemisphere == 'lh': - hemiwf1 = ar3_lh_wf1 - else: - hemiwf1 = ar3_rh_wf1 - - hemi_inputs2 = [ - 'wm', - 'lh_white', - 'rh_white', - 'transform', - 'brainmask', - 'aseg_presurf', - 'cortex_label', - 'lh_pial', - 'rh_pial', - 'thickness', - 'aparc_annot', - 'ribbon', - 'smoothwm', - 'sphere_reg', - 'orig_mgz', - 'rawavg', - 'curv', - 'sulc', - 'classifier2', - 'classifier3', - ] - - hemi_inputspec2 = pe.Node( - IdentityInterface(fields=hemi_inputs2), name="inputspec") - - # Parcellation Statistics - """ - Runs mris_anatomical_stats to create a summary table of cortical parcellation statistics for each structure, including - structure name - number of vertices - total surface area (mm^2) - total gray matter volume (mm^3) - average cortical thickness (mm) - standard error of cortical thicknessr (mm) - integrated rectified mean curvature - integrated rectified Gaussian curvature - folding index - intrinsic curvature index. - """ - parcellation_stats_white = pe.Node( - ParcellationStats(), - name="Parcellation_Stats_{0}_White".format(hemisphere)) - parcellation_stats_white.inputs.mgz = True - parcellation_stats_white.inputs.th3 = th3 - parcellation_stats_white.inputs.tabular_output = True - parcellation_stats_white.inputs.surface = 'white' - parcellation_stats_white.inputs.hemisphere = hemisphere - parcellation_stats_white.inputs.out_color = 'aparc.annot.ctab' - parcellation_stats_white.inputs.out_table = '{0}.aparc.stats'.format( - hemisphere) - parcellation_stats_white.inputs.copy_inputs = True - - hemiwf2.connect([ - (hemi_inputspec2, parcellation_stats_white, [ - ('wm', 'wm'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('brainmask', 'brainmask'), - ('aseg_presurf', 'aseg'), - ('cortex_label', 'in_cortex'), - ('cortex_label', 'cortex_label'), - ('lh_pial', 'lh_pial'), - ('rh_pial', 'rh_pial'), - ('thickness', 'thickness'), - ('aparc_annot', 'in_annotation'), - ('ribbon', 'ribbon'), - ]), - ]) - - parcellation_stats_pial = pe.Node( - ParcellationStats(), - name="Parcellation_Stats_{0}_Pial".format(hemisphere)) - parcellation_stats_pial.inputs.mgz = True - parcellation_stats_pial.inputs.th3 = th3 - parcellation_stats_pial.inputs.tabular_output = True - parcellation_stats_pial.inputs.surface = 'pial' - parcellation_stats_pial.inputs.hemisphere = hemisphere - parcellation_stats_pial.inputs.copy_inputs = True - parcellation_stats_pial.inputs.out_color = 'aparc.annot.ctab' - parcellation_stats_pial.inputs.out_table = '{0}.aparc.pial.stats'.format( - hemisphere) - - hemiwf2.connect([ - (hemi_inputspec2, parcellation_stats_pial, [ - ('wm', 'wm'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('brainmask', 'brainmask'), - ('aseg_presurf', 'aseg'), - ('cortex_label', 'cortex_label'), - ('cortex_label', 'in_cortex'), - ('lh_pial', 'lh_pial'), - ('rh_pial', 'rh_pial'), - ('thickness', 'thickness'), - ('aparc_annot', 'in_annotation'), - ('ribbon', 'ribbon'), - ]), - ]) - - # Cortical Parcellation 2 - cortical_parcellation_2 = pe.Node( - MRIsCALabel(), - name="Cortical_Parcellation_{0}_2".format(hemisphere)) - cortical_parcellation_2.inputs.out_file = '{0}.aparc.a2009s.annot'.format( - hemisphere) - cortical_parcellation_2.inputs.seed = 1234 - cortical_parcellation_2.inputs.copy_inputs = True - cortical_parcellation_2.inputs.hemisphere = hemisphere - - hemiwf2.connect([(hemi_inputspec2, cortical_parcellation_2, - [('smoothwm', 'smoothwm'), ('aseg_presurf', 'aseg'), - ('cortex_label', 'label'), ('sphere_reg', - 'canonsurf'), ('curv', - 'curv'), - ('sulc', 'sulc'), ('classifier2', 'classifier')])]) - - # Parcellation Statistics 2 - parcellation_stats_white_2 = parcellation_stats_white.clone( - name="Parcellation_Statistics_{0}_2".format(hemisphere)) - parcellation_stats_white_2.inputs.hemisphere = hemisphere - parcellation_stats_white_2.inputs.out_color = 'aparc.annot.a2009s.ctab' - parcellation_stats_white_2.inputs.out_table = '{0}.aparc.a2009s.stats'.format( - hemisphere) - hemiwf2.connect([(hemi_inputspec2, parcellation_stats_white_2, [ - ('wm', 'wm'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('brainmask', 'brainmask'), - ('aseg_presurf', 'aseg'), - ('cortex_label', 'cortex_label'), - ('cortex_label', 'in_cortex'), - ('lh_pial', 'lh_pial'), - ('rh_pial', 'rh_pial'), - ('thickness', 'thickness'), - ('ribbon', 'ribbon'), - ]), (cortical_parcellation_2, parcellation_stats_white_2, - [('out_file', 'in_annotation')])]) - - # Cortical Parcellation 3 - cortical_parcellation_3 = pe.Node( - MRIsCALabel(), - name="Cortical_Parcellation_{0}_3".format(hemisphere)) - cortical_parcellation_3.inputs.out_file = '{0}.aparc.DKTatlas40.annot'.format( - hemisphere) - cortical_parcellation_3.inputs.hemisphere = hemisphere - cortical_parcellation_3.inputs.seed = 1234 - cortical_parcellation_3.inputs.copy_inputs = True - hemiwf2.connect([(hemi_inputspec2, cortical_parcellation_3, - [('smoothwm', 'smoothwm'), ('aseg_presurf', 'aseg'), - ('cortex_label', 'label'), ('sphere_reg', - 'canonsurf'), ('curv', - 'curv'), - ('sulc', 'sulc'), ('classifier3', 'classifier')])]) - - # Parcellation Statistics 3 - parcellation_stats_white_3 = parcellation_stats_white.clone( - name="Parcellation_Statistics_{0}_3".format(hemisphere)) - parcellation_stats_white_3.inputs.out_color = 'aparc.annot.DKTatlas40.ctab' - parcellation_stats_white_3.inputs.out_table = '{0}.aparc.DKTatlas40.stats'.format( - hemisphere) - parcellation_stats_white_3.inputs.hemisphere = hemisphere - - hemiwf2.connect([(hemi_inputspec2, parcellation_stats_white_3, [ - ('wm', 'wm'), - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('brainmask', 'brainmask'), - ('aseg_presurf', 'aseg'), - ('cortex_label', 'cortex_label'), - ('cortex_label', 'in_cortex'), - ('lh_pial', 'lh_pial'), - ('rh_pial', 'rh_pial'), - ('thickness', 'thickness'), - ('ribbon', 'ribbon'), - ]), (cortical_parcellation_3, parcellation_stats_white_3, - [('out_file', 'in_annotation')])]) - - # WM/GM Contrast - contrast = pe.Node( - Contrast(), name="WM_GM_Contrast_{0}".format(hemisphere)) - contrast.inputs.hemisphere = hemisphere - contrast.inputs.copy_inputs = True - - hemiwf2.connect([ - (hemi_inputspec2, contrast, [ - ('orig_mgz', 'orig'), - ('rawavg', 'rawavg'), - ('{0}_white'.format(hemisphere), 'white'), - ('cortex_label', 'cortex'), - ('aparc_annot', 'annotation'), - ('thickness', 'thickness'), - ]), - ]) - - hemi_outputs2 = [ - 'aparc_annot_ctab', - 'aparc_stats', - 'aparc_pial_stats', - 'aparc_a2009s_annot', - 'aparc_a2009s_annot_ctab', - 'aparc_a2009s_annot_stats', - 'aparc_DKTatlas40_annot', - 'aparc_DKTatlas40_annot_ctab', - 'aparc_DKTatlas40_annot_stats', - 'wg_pct_mgh', - 'wg_pct_stats', - 'pctsurfcon_log', - ] - hemi_outputspec2 = pe.Node( - IdentityInterface(fields=hemi_outputs2), name="outputspec") - - hemiwf2.connect([ - (contrast, hemi_outputspec2, - [('out_contrast', 'wg_pct_mgh'), ('out_stats', 'wg_pct_stats'), - ('out_log', 'pctsurfcon_log')]), - (parcellation_stats_white_3, hemi_outputspec2, - [('out_color', 'aparc_DKTatlas40_annot_ctab'), - ('out_table', 'aparc_DKTatlas40_annot_stats')]), - (cortical_parcellation_3, hemi_outputspec2, - [('out_file', 'aparc_DKTatlas40_annot')]), - (parcellation_stats_white_2, hemi_outputspec2, - [('out_color', 'aparc_a2009s_annot_ctab'), - ('out_table', 'aparc_a2009s_annot_stats')]), - (cortical_parcellation_2, hemi_outputspec2, - [('out_file', 'aparc_a2009s_annot')]), - (parcellation_stats_white, hemi_outputspec2, - [('out_color', 'aparc_annot_ctab'), ('out_table', - 'aparc_stats')]), - (parcellation_stats_pial, hemi_outputspec2, - [('out_table', 'aparc_pial_stats')]), - ]) - # connect inputs to hemisphere2 workflow - ar3_wf.connect([ - (inputspec, hemiwf2, [ - ('wm', 'inputspec.wm'), - ('lh_white', 'inputspec.lh_white'), - ('rh_white', 'inputspec.rh_white'), - ('transform', 'inputspec.transform'), - ('brainmask', 'inputspec.brainmask'), - ('aseg_presurf', 'inputspec.aseg_presurf'), - ('{0}_cortex_label'.format(hemisphere), - 'inputspec.cortex_label'), - ('{0}_smoothwm'.format(hemisphere), 'inputspec.smoothwm'), - ('orig_mgz', 'inputspec.orig_mgz'), - ('rawavg', 'inputspec.rawavg'), - ('{0}_curv'.format(hemisphere), 'inputspec.curv'), - ('{0}_sulc'.format(hemisphere), 'inputspec.sulc'), - ('{0}_classifier2'.format(hemisphere), - 'inputspec.classifier2'), - ('{0}_classifier3'.format(hemisphere), - 'inputspec.classifier3'), - ]), - (ar3_lh_wf1, hemiwf2, [('outputspec.pial', 'inputspec.lh_pial')]), - (ar3_rh_wf1, hemiwf2, [('outputspec.pial', 'inputspec.rh_pial')]), - (hemiwf1, hemiwf2, - [('outputspec.thickness_pial', 'inputspec.thickness'), - ('outputspec.aparc_annot', 'inputspec.aparc_annot'), - ('outputspec.sphere_reg', 'inputspec.sphere_reg')]), - (volume_mask, hemiwf2, [('out_ribbon', 'inputspec.ribbon')]), - ]) - # End hemisphere2 workflow - - # APARC to ASEG - # Adds information from the ribbon into the aseg.mgz (volume parcellation). - aparc_2_aseg = pe.Node(Aparc2Aseg(), name="Aparc2Aseg") - aparc_2_aseg.inputs.volmask = True - aparc_2_aseg.inputs.copy_inputs = True - aparc_2_aseg.inputs.out_file = "aparc+aseg.mgz" - ar3_wf.connect([(inputspec, aparc_2_aseg, [ - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ]), (ar3_lh_wf1, aparc_2_aseg, [ - ('outputspec.pial', 'lh_pial'), - ('outputspec.aparc_annot', 'lh_annotation'), - ]), (ar3_rh_wf1, aparc_2_aseg, [ - ('outputspec.pial', 'rh_pial'), - ('outputspec.aparc_annot', 'rh_annotation'), - ]), (volume_mask, aparc_2_aseg, [ - ('rh_ribbon', 'rh_ribbon'), - ('lh_ribbon', 'lh_ribbon'), - ('out_ribbon', 'ribbon'), - ])]) - if fsvernum < 6: - ar3_wf.connect([(inputspec, aparc_2_aseg, [('aseg_presurf', 'aseg')])]) - else: - # Relabel Hypointensities - relabel_hypos = pe.Node( - RelabelHypointensities(), name="Relabel_Hypointensities") - relabel_hypos.inputs.out_file = 'aseg.presurf.hypos.mgz' - ar3_wf.connect([(inputspec, relabel_hypos, - [('aseg_presurf', 'aseg'), ('lh_white', 'lh_white'), - ('rh_white', 'rh_white')])]) - ar3_wf.connect([(relabel_hypos, aparc_2_aseg, [('out_file', 'aseg')])]) - - aparc_2_aseg_2009 = pe.Node(Aparc2Aseg(), name="Aparc2Aseg_2009") - aparc_2_aseg_2009.inputs.volmask = True - aparc_2_aseg_2009.inputs.a2009s = True - aparc_2_aseg_2009.inputs.copy_inputs = True - aparc_2_aseg_2009.inputs.out_file = "aparc.a2009s+aseg.mgz" - ar3_wf.connect([(inputspec, aparc_2_aseg_2009, [ - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ]), (ar3_lh_wf1, aparc_2_aseg_2009, [ - ('outputspec.pial', 'lh_pial'), - ]), (ar3_lh_wf2, aparc_2_aseg_2009, [('outputspec.aparc_a2009s_annot', - 'lh_annotation')]), - (ar3_rh_wf2, aparc_2_aseg_2009, - [('outputspec.aparc_a2009s_annot', - 'rh_annotation')]), (ar3_rh_wf1, aparc_2_aseg_2009, [ - ('outputspec.pial', 'rh_pial'), - ]), (volume_mask, aparc_2_aseg_2009, - [('rh_ribbon', 'rh_ribbon'), - ('lh_ribbon', 'lh_ribbon'), ('out_ribbon', - 'ribbon')])]) - - if fsvernum >= 6: - apas_2_aseg = pe.Node(Apas2Aseg(), name="Apas_2_Aseg") - ar3_wf.connect([(aparc_2_aseg, apas_2_aseg, [('out_file', 'in_file')]), - (relabel_hypos, aparc_2_aseg_2009, [('out_file', - 'aseg')])]) - else: - # aseg.mgz gets edited in place, so we'll copy and pass it to the - # outputspec once aparc_2_aseg has completed - def out_aseg(in_aparcaseg, in_aseg, out_file): - import shutil - import os - out_file = os.path.abspath(out_file) - shutil.copy(in_aseg, out_file) - return out_file - - apas_2_aseg = pe.Node( - Function(['in_aparcaseg', 'in_aseg', 'out_file'], ['out_file'], - out_aseg), - name="Aseg") - ar3_wf.connect( - [(aparc_2_aseg, apas_2_aseg, [('out_file', 'in_aparcaseg')]), - (inputspec, apas_2_aseg, [('aseg_presurf', 'in_aseg')]), - (inputspec, aparc_2_aseg_2009, [('aseg_presurf', 'aseg')])]) - - apas_2_aseg.inputs.out_file = "aseg.mgz" - - # Segmentation Stats - """ - Computes statistics on the segmented subcortical structures found in - mri/aseg.mgz. Writes output to file stats/aseg.stats. - """ - - segstats = pe.Node(SegStatsReconAll(), name="Segmentation_Statistics") - segstats.inputs.empty = True - segstats.inputs.brain_vol = 'brain-vol-from-seg' - segstats.inputs.exclude_ctx_gm_wm = True - segstats.inputs.supratent = True - segstats.inputs.subcort_gm = True - segstats.inputs.etiv = True - segstats.inputs.wm_vol_from_surf = True - segstats.inputs.cortex_vol_from_surf = True - segstats.inputs.total_gray = True - segstats.inputs.euler = True - segstats.inputs.exclude_id = 0 - segstats.inputs.intensity_units = "MR" - segstats.inputs.summary_file = 'aseg.stats' - segstats.inputs.copy_inputs = True - - ar3_wf.connect([ - (apas_2_aseg, segstats, [('out_file', 'segmentation_file')]), - (inputspec, segstats, [ - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('norm', 'in_intensity'), - ('norm', 'partial_volume_file'), - ('brainmask', 'brainmask_file'), - ('lh_orig_nofix', 'lh_orig_nofix'), - ('rh_orig_nofix', 'rh_orig_nofix'), - ('lookup_table', 'color_table_file'), - ]), - (volume_mask, segstats, [('out_ribbon', 'ribbon')]), - (ar3_lh_wf1, segstats, [ - ('outputspec.pial', 'lh_pial'), - ]), - (ar3_rh_wf1, segstats, [ - ('outputspec.pial', 'rh_pial'), - ]), - ]) - - if fsvernum >= 6: - ar3_wf.connect(inputspec, 'aseg_presurf', segstats, 'presurf_seg') - else: - ar3_wf.connect(inputspec, 'aseg_presurf', segstats, 'aseg') - - # White Matter Parcellation - - # Adds WM Parcellation info into the aseg and computes stat. - - wm_parcellation = pe.Node(Aparc2Aseg(), name="WM_Parcellation") - wm_parcellation.inputs.volmask = True - wm_parcellation.inputs.label_wm = True - wm_parcellation.inputs.hypo_wm = True - wm_parcellation.inputs.rip_unknown = True - wm_parcellation.inputs.copy_inputs = True - wm_parcellation.inputs.out_file = "wmparc.mgz" - - ar3_wf.connect([(inputspec, wm_parcellation, [ - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ]), (ar3_lh_wf1, wm_parcellation, [ - ('outputspec.pial', 'lh_pial'), - ('outputspec.aparc_annot', 'lh_annotation'), - ]), (ar3_rh_wf1, wm_parcellation, [ - ('outputspec.pial', 'rh_pial'), - ('outputspec.aparc_annot', 'rh_annotation'), - ]), (volume_mask, wm_parcellation, [ - ('rh_ribbon', 'rh_ribbon'), - ('lh_ribbon', 'lh_ribbon'), - ('out_ribbon', 'ribbon'), - ]), (apas_2_aseg, wm_parcellation, [('out_file', 'aseg')]), - (aparc_2_aseg, wm_parcellation, [('out_file', 'ctxseg')])]) - - if fsvernum < 6: - ar3_wf.connect([(inputspec, wm_parcellation, [('filled', 'filled')])]) - - # White Matter Segmentation Stats - - wm_segstats = pe.Node( - SegStatsReconAll(), name="WM_Segmentation_Statistics") - wm_segstats.inputs.intensity_units = "MR" - wm_segstats.inputs.wm_vol_from_surf = True - wm_segstats.inputs.etiv = True - wm_segstats.inputs.copy_inputs = True - wm_segstats.inputs.exclude_id = 0 - wm_segstats.inputs.summary_file = "wmparc.stats" - - ar3_wf.connect([ - (wm_parcellation, wm_segstats, [('out_file', 'segmentation_file')]), - (inputspec, wm_segstats, [ - ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), - ('transform', 'transform'), - ('norm', 'in_intensity'), - ('norm', 'partial_volume_file'), - ('brainmask', 'brainmask_file'), - ('lh_orig_nofix', 'lh_orig_nofix'), - ('rh_orig_nofix', 'rh_orig_nofix'), - ('wm_lookup_table', 'color_table_file'), - ]), - (volume_mask, wm_segstats, [('out_ribbon', 'ribbon')]), - (ar3_lh_wf1, wm_segstats, [ - ('outputspec.pial', 'lh_pial'), - ]), - (ar3_rh_wf1, wm_segstats, [ - ('outputspec.pial', 'rh_pial'), - ]), - ]) - - if fsvernum >= 6: - ar3_wf.connect(inputspec, 'aseg_presurf', wm_segstats, 'presurf_seg') - else: - ar3_wf.connect(inputspec, 'aseg_presurf', wm_segstats, 'aseg') - - # add brodman area maps to the workflow - ba_WF, ba_outputs = create_ba_maps_wf( - th3=th3, exvivo=exvivo, entorhinal=entorhinal) - - ar3_wf.connect([(ar3_lh_wf1, ba_WF, [ - ('outputspec.sphere_reg', 'inputspec.lh_sphere_reg'), - ('outputspec.thickness_pial', 'inputspec.lh_thickness'), - ('outputspec.pial', 'inputspec.lh_pial'), - ]), (ar3_rh_wf1, ba_WF, [ - ('outputspec.sphere_reg', 'inputspec.rh_sphere_reg'), - ('outputspec.thickness_pial', 'inputspec.rh_thickness'), - ('outputspec.pial', 'inputspec.rh_pial'), - ]), (inputspec, ba_WF, [ - ('lh_white', 'inputspec.lh_white'), - ('rh_white', 'inputspec.rh_white'), - ('transform', 'inputspec.transform'), - ('aseg_presurf', 'inputspec.aseg'), - ('brainmask', 'inputspec.brainmask'), - ('wm', 'inputspec.wm'), - ('lh_orig', 'inputspec.lh_orig'), - ('rh_orig', 'inputspec.rh_orig'), - ('lh_cortex_label', 'inputspec.lh_cortex_label'), - ('rh_cortex_label', 'inputspec.rh_cortex_label'), - ('src_subject_dir', 'inputspec.src_subject_dir'), - ('src_subject_id', 'inputspec.src_subject_id'), - ('color_table', 'inputspec.color_table'), - ]), (volume_mask, ba_WF, [('out_ribbon', 'inputspec.ribbon')])]) - - if qcache: - source_inputs = ['lh_sphere_reg', 'rh_sphere_reg'] - source_subject = pe.Node( - DataGrabber(outfields=source_inputs), - name="{0}_srcsubject".format(hemisphere)) - source_subject.inputs.template = '*' - source_subject.inputs.sort_filelist = False - source_subject.inputs.field_template = dict( - lh_sphere_reg='surf/lh.sphere.reg', - rh_sphere_reg='surf/rh.sphere.reg') - - qcache_wf = pe.Workflow("QCache") - - measurements = [ - 'thickness', 'area', 'area.pial', 'volume', 'curv', 'sulc', - 'white.K', 'white.H', 'jacobian_white', 'w-g.pct.mgh' - ] - - qcache_inputs = list() - for source_file in source_inputs: - qcache_inputs.append('source_' + source_file) - qcache_config = dict() - qcache_outputs = list() - for hemisphere in ['lh', 'rh']: - qcache_config[hemisphere] = dict() - for meas_name in measurements: - qcache_config[hemisphere][meas_name] = dict() - - if meas_name == 'thickness': - meas_file = hemisphere + '_' + meas_name + '_pial' - else: - meas_file = hemisphere + '_' + meas_name.replace( - '.', '_').replace('-', '') - qcache_inputs.append(meas_file) - - preproc_name = "Preproc_{0}".format(meas_file) - preproc_out = '{0}.{1}.{2}.mgh'.format( - hemisphere, meas_name, config['src_subject_id']) - preproc_out_name = preproc_out.replace('.', '_') - qcache_config[hemisphere][meas_name]['preproc'] = dict( - infile=meas_file, - name=preproc_name, - out=preproc_out, - out_name=preproc_out_name) - qcache_outputs.append(preproc_out_name) - - qcache_config[hemisphere][meas_name]['smooth'] = dict() - for value in range(0, 26, 5): - smooth_name = "Smooth_{0}_{1}".format(meas_file, value) - smooth_out = "{0}.{1}.fwhm{2}.{3}.mgh".format( - hemisphere, meas_name, value, config['src_subject_id']) - smooth_out_name = smooth_out.replace('.', '_') - qcache_config[hemisphere][meas_name]['smooth'][ - value] = dict( - name=smooth_name, - out=smooth_out, - out_name=smooth_out_name) - qcache_outputs.append(smooth_out_name) - - qcache_inputs.append(hemisphere + '_sphere_reg') - - qcache_inputspec = pe.Node( - IdentityInterface(fields=qcache_inputs), name="inputspec") - - qcache_outputspec = pe.Node( - IdentityInterface(fields=qcache_outputs), name="outputspec") - - for hemi in qcache_config.iterkeys(): - for meas_config in qcache_config[hemi].itervalues(): - preprocess = pe.Node( - MRISPreprocReconAll(), name=meas_config['preproc']['name']) - target_id = config['src_subject_id'] - preprocess.inputs.out_file = meas_config['preproc']['out'] - preprocess.inputs.target = target_id - preprocess.inputs.hemi = hemi - preprocess.inputs.copy_inputs = True - - qcache_merge = pe.Node( - Merge(2), - name="Merge{0}".format(meas_config['preproc']['name'])) - - qcache_wf.connect([ - (qcache_inputspec, qcache_merge, - [('lh_sphere_reg', 'in1'), ('rh_sphere_reg', 'in2')]), - (qcache_inputspec, preprocess, - [(meas_config['preproc']['infile'], 'surf_measure_file'), - ('source_lh_sphere_reg', 'lh_surfreg_target'), - ('source_rh_sphere_reg', 'rh_surfreg_target')]), - (qcache_merge, preprocess, [('out', 'surfreg_files')]), - (preprocess, qcache_outputspec, - [('out_file', meas_config['preproc']['out_name'])]), - ]) - - for value, val_config in meas_config['smooth'].iteritems(): - surf2surf = pe.Node( - SurfaceSmooth(), name=val_config['name']) - surf2surf.inputs.fwhm = value - surf2surf.inputs.cortex = True - surf2surf.inputs.subject_id = target_id - surf2surf.inputs.hemi = hemisphere - surf2surf.inputs.out_file = val_config['out'] - qcache_wf.connect( - [(preprocess, surf2surf, [('out_file', 'in_file')]), - (surf2surf, qcache_outputspec, - [('out_file', val_config['out_name'])])]) - - # connect qcache inputs - ar3_wf.connect([ - (inputspec, qcache_wf, - [('lh_curv', 'inputspec.lh_curv'), ('rh_curv', - 'inputspec.rh_curv'), - ('lh_sulc', 'inputspec.lh_sulc'), ('rh_sulc', - 'inputspec.rh_sulc'), - ('lh_white_K', 'inputspec.lh_white_K'), ('rh_white_K', - 'inputspec.rh_white_K'), - ('lh_area', 'inputspec.lh_area'), ('rh_area', - 'inputspec.rh_area')]), - (ar3_lh_wf1, qcache_wf, - [('outputspec.thickness_pial', 'inputspec.lh_thickness_pial'), - ('outputspec.area_pial', - 'inputspec.lh_area_pial'), ('outputspec.volume', - 'inputspec.lh_volume'), - ('outputspec.jacobian_white', - 'inputspec.lh_jacobian_white'), ('outputspec.sphere_reg', - 'inputspec.lh_sphere_reg')]), - (ar3_lh_wf2, qcache_wf, [('outputspec.wg_pct_mgh', - 'inputspec.lh_wg_pct_mgh')]), - (ar3_rh_wf1, qcache_wf, - [('outputspec.thickness_pial', 'inputspec.rh_thickness_pial'), - ('outputspec.area_pial', - 'inputspec.rh_area_pial'), ('outputspec.volume', - 'inputspec.rh_volume'), - ('outputspec.jacobian_white', - 'inputspec.rh_jacobian_white'), ('outputspec.sphere_reg', - 'inputspec.rh_sphere_reg')]), - (ar3_rh_wf2, qcache_wf, [('outputspec.wg_pct_mgh', - 'inputspec.rh_wg_pct_mgh')]), - ]) - for source_file in source_inputs: - ar3_wf.connect([(inputspec, source_subject, [('source_subject_dir', - 'base_directory')]), - (source_subject, qcache_wf, - [(source_file, - 'inputspec.source_' + source_file)])]) - # end qcache workflow - - # Add outputs to outputspec - ar3_outputs = [ - 'aseg', 'wmparc', 'wmparc_stats', 'aseg_stats', 'aparc_a2009s_aseg', - 'aparc_aseg', 'aseg_presurf_hypos', 'ribbon', 'rh_ribbon', 'lh_ribbon' - ] - for output in hemi_outputs1 + hemi_outputs2: - for hemi in ('lh_', 'rh_'): - ar3_outputs.append(hemi + output) - if qcache: - ar3_outputs.extend(qcache_outputs) - - ar3_outputs.extend(ba_outputs) - - outputspec = pe.Node( - IdentityInterface(fields=ar3_outputs), name="outputspec") - - ar3_wf.connect([(apas_2_aseg, outputspec, - [('out_file', 'aseg')]), (wm_parcellation, outputspec, - [('out_file', 'wmparc')]), - (wm_segstats, outputspec, - [('summary_file', - 'wmparc_stats')]), (segstats, outputspec, - [('summary_file', 'aseg_stats')]), - (aparc_2_aseg_2009, outputspec, - [('out_file', - 'aparc_a2009s_aseg')]), (aparc_2_aseg, outputspec, - [('out_file', 'aparc_aseg')]), - (volume_mask, outputspec, - [('out_ribbon', 'ribbon'), ('lh_ribbon', 'lh_ribbon'), - ('rh_ribbon', 'rh_ribbon')])]) - if fsvernum >= 6: - ar3_wf.connect([(relabel_hypos, outputspec, [('out_file', - 'aseg_presurf_hypos')])]) - - for i, outputs in enumerate([hemi_outputs1, hemi_outputs2]): - if i == 0: - lhwf = ar3_lh_wf1 - rhwf = ar3_rh_wf1 - else: - lhwf = ar3_lh_wf2 - rhwf = ar3_rh_wf2 - for output in outputs: - ar3_wf.connect([(lhwf, outputspec, [('outputspec.' + output, - 'lh_' + output)]), - (rhwf, outputspec, [('outputspec.' + output, - 'rh_' + output)])]) - - for output in ba_outputs: - ar3_wf.connect([(ba_WF, outputspec, [('outputspec.' + output, - output)])]) - - if qcache: - for output in qcache_outputs: - ar3_wf.connect([(qcache_wf, outputspec, [('outputspec.' + output, - output)])]) - - return ar3_wf, ar3_outputs diff --git a/nipype/workflows/smri/freesurfer/ba_maps.py b/nipype/workflows/smri/freesurfer/ba_maps.py deleted file mode 100644 index 8a4ae6caf1..0000000000 --- a/nipype/workflows/smri/freesurfer/ba_maps.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -import os -from ....interfaces.utility import Function, IdentityInterface -from ....pipeline import engine as pe # pypeline engine -from ....interfaces.freesurfer import Label2Label, Label2Annot, ParcellationStats -from ....interfaces.io import DataGrabber -from ....interfaces.utility import Merge - - -def create_ba_maps_wf(name="Brodmann_Area_Maps", - th3=True, - exvivo=True, - entorhinal=True): - # Brodmann Area Maps (BA Maps) and Hinds V1 Atlas - inputs = [ - 'lh_sphere_reg', 'rh_sphere_reg', 'lh_white', 'rh_white', 'lh_pial', - 'rh_pial', 'lh_orig', 'rh_orig', 'transform', 'lh_thickness', - 'rh_thickness', 'lh_cortex_label', 'rh_cortex_label', 'brainmask', - 'aseg', 'ribbon', 'wm', 'src_subject_id', 'src_subject_dir', - 'color_table' - ] - - inputspec = pe.Node(IdentityInterface(fields=inputs), name="inputspec") - - ba_WF = pe.Workflow(name=name) - - ba_outputs = [ - 'lh_BAMaps_stats', 'lh_color', 'lh_BAMaps_labels', - 'lh_BAMaps_annotation', 'lh_thresh_BAMaps_stats', 'lh_thresh_color', - 'lh_thresh_BAMaps_labels', 'lh_thresh_BAMaps_annotation', - 'rh_BAMaps_stats', 'rh_color', 'rh_BAMaps_labels', - 'rh_BAMaps_annotation', 'rh_thresh_BAMaps_stats', 'rh_thresh_color', - 'rh_thresh_BAMaps_labels', 'rh_thresh_BAMaps_annotation' - ] - - outputspec = pe.Node( - IdentityInterface(fields=ba_outputs), name="outputspec") - - labels = [ - "BA1", "BA2", "BA3a", "BA3b", "BA4a", "BA4p", "BA6", "BA44", "BA45", - "V1", "V2", "MT", "perirhinal" - ] - if entorhinal: - labels.insert(-1, 'entorhinal') - for hemisphere in ['lh', 'rh']: - for threshold in [True, False]: - field_template = dict( - sphere_reg='surf/{0}.sphere.reg'.format(hemisphere), - white='surf/{0}.white'.format(hemisphere)) - - out_files = list() - source_fields = list() - if threshold: - for label in labels: - if label == 'perirhinal' and not entorhinal: - # versions < 6.0 do not use thresh.perirhinal - continue - if exvivo: - out_file = '{0}.{1}_exvivo.thresh.label'.format( - hemisphere, label) - else: - out_file = '{0}.{1}.thresh.label'.format( - hemisphere, label) - out_files.append(out_file) - field_template[label] = 'label/' + out_file - source_fields.append(label) - node_name = 'BA_Maps_' + hemisphere + '_Thresh' - else: - for label in labels: - if exvivo: - out_file = '{0}.{1}_exvivo.label'.format( - hemisphere, label) - else: - out_file = '{0}.{1}.label'.format(hemisphere, label) - - out_files.append(out_file) - field_template[label] = 'label/' + out_file - source_fields.append(label) - node_name = 'BA_Maps_' + hemisphere - - source_subject = pe.Node( - DataGrabber(outfields=source_fields + ['sphere_reg', 'white']), - name=node_name + "_srcsubject") - source_subject.inputs.template = '*' - source_subject.inputs.sort_filelist = False - source_subject.inputs.field_template = field_template - ba_WF.connect([(inputspec, source_subject, [('src_subject_dir', - 'base_directory')])]) - - merge_labels = pe.Node( - Merge(len(out_files)), name=node_name + "_Merge") - for i, label in enumerate(source_fields): - ba_WF.connect([(source_subject, merge_labels, - [(label, 'in{0}'.format(i + 1))])]) - - node = pe.MapNode( - Label2Label(), - name=node_name + '_Label2Label', - iterfield=['source_label', 'out_file']) - node.inputs.hemisphere = hemisphere - node.inputs.out_file = out_files - node.inputs.copy_inputs = True - - ba_WF.connect( - [(merge_labels, node, [('out', 'source_label')]), - (source_subject, node, [('sphere_reg', 'source_sphere_reg'), - ('white', 'source_white')]), - (inputspec, node, [('src_subject_id', 'source_subject')])]) - - label2annot = pe.Node(Label2Annot(), name=node_name + '_2_Annot') - label2annot.inputs.hemisphere = hemisphere - label2annot.inputs.verbose_off = True - label2annot.inputs.keep_max = True - label2annot.inputs.copy_inputs = True - - stats_node = pe.Node( - ParcellationStats(), name=node_name + '_Stats') - stats_node.inputs.hemisphere = hemisphere - stats_node.inputs.mgz = True - stats_node.inputs.th3 = th3 - stats_node.inputs.surface = 'white' - stats_node.inputs.tabular_output = True - stats_node.inputs.copy_inputs = True - - if threshold: - label2annot.inputs.out_annot = "BA_exvivo.thresh" - ba_WF.connect( - [(stats_node, outputspec, - [('out_color', '{0}_thresh_color'.format(hemisphere)), - ('out_table', - '{0}_thresh_BAMaps_stats'.format(hemisphere))]), - (label2annot, outputspec, - [('out_file', - '{0}_thresh_BAMaps_annotation'.format(hemisphere))]), - (node, outputspec, - [('out_file', - '{0}_thresh_BAMaps_labels'.format(hemisphere))])]) - else: - label2annot.inputs.out_annot = "BA_exvivo" - ba_WF.connect( - [(stats_node, outputspec, - [('out_color', '{0}_color'.format(hemisphere)), - ('out_table', '{0}_BAMaps_stats'.format(hemisphere))]), - (label2annot, outputspec, - [('out_file', - '{0}_BAMaps_annotation'.format(hemisphere))]), - (node, outputspec, - [('out_file', '{0}_BAMaps_labels'.format(hemisphere))])]) - - ba_WF.connect( - [(inputspec, node, [ - ('{0}_sphere_reg'.format(hemisphere), 'sphere_reg'), - ('{0}_white'.format(hemisphere), 'white'), - ]), (node, label2annot, [('out_file', 'in_labels')]), - (inputspec, label2annot, - [('{0}_orig'.format(hemisphere), 'orig'), - ('color_table', 'color_table')]), (label2annot, stats_node, - [('out_file', - 'in_annotation')]), - (inputspec, stats_node, - [('{0}_thickness'.format(hemisphere), - 'thickness'), ('{0}_cortex_label'.format(hemisphere), - 'cortex_label'), ('lh_white', 'lh_white'), - ('rh_white', 'rh_white'), ('lh_pial', 'lh_pial'), - ('rh_pial', 'rh_pial'), ('transform', - 'transform'), ('brainmask', - 'brainmask'), - ('aseg', 'aseg'), ('wm', 'wm'), ('ribbon', 'ribbon')])]) - - return ba_WF, ba_outputs diff --git a/nipype/workflows/smri/freesurfer/bem.py b/nipype/workflows/smri/freesurfer/bem.py deleted file mode 100644 index b959de4852..0000000000 --- a/nipype/workflows/smri/freesurfer/bem.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from ....pipeline import engine as pe -from ....interfaces import mne as mne -from ....interfaces import freesurfer as fs -from ....interfaces import utility as niu - - -def create_bem_flow(name='bem', out_format='stl'): - """Uses MNE's Watershed algorithm to create Boundary Element Meshes (BEM) - for a subject's brain, inner/outer skull, and skin. The surfaces are - returned in the desired (by default, stereolithic .stl) format. - - Example - ------- - >>> from nipype.workflows.smri.freesurfer import create_bem_flow - >>> bemflow = create_bem_flow() - >>> bemflow.inputs.inputspec.subject_id = 'subj1' - >>> bemflow.inputs.inputspec.subjects_dir = '.' - >>> bemflow.run() # doctest: +SKIP - - - Inputs:: - - inputspec.subject_id : freesurfer subject id - inputspec.subjects_dir : freesurfer subjects directory - - Outputs:: - - outputspec.meshes : output boundary element meshes in (by default) - stereolithographic (.stl) format - """ - """ - Initialize the workflow - """ - - bemflow = pe.Workflow(name=name) - """ - Define the inputs to the workflow. - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=['subject_id', 'subjects_dir']), - name='inputspec') - """ - Define all the nodes of the workflow: - - fssource: used to retrieve aseg.mgz - mri_convert : converts aseg.mgz to aseg.nii - tessellate : tessellates regions in aseg.mgz - surfconvert : converts regions to stereolithographic (.stl) format - - """ - - watershed_bem = pe.Node(interface=mne.WatershedBEM(), name='WatershedBEM') - - surfconvert = pe.MapNode( - fs.MRIsConvert(out_datatype=out_format), - iterfield=['in_file'], - name='surfconvert') - """ - Connect the nodes - """ - - bemflow.connect([ - (inputnode, watershed_bem, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (watershed_bem, surfconvert, [('mesh_files', 'in_file')]), - ]) - """ - Setup an outputnode that defines relevant inputs of the workflow. - """ - - outputnode = pe.Node( - niu.IdentityInterface(fields=["meshes"]), name="outputspec") - bemflow.connect([ - (surfconvert, outputnode, [("converted", "meshes")]), - ]) - return bemflow diff --git a/nipype/workflows/smri/freesurfer/recon.py b/nipype/workflows/smri/freesurfer/recon.py deleted file mode 100644 index f7fa593a49..0000000000 --- a/nipype/workflows/smri/freesurfer/recon.py +++ /dev/null @@ -1,604 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from ....pipeline import engine as pe -from ....interfaces import freesurfer as fs -from ....interfaces import utility as niu -from .autorecon1 import create_AutoRecon1 -from .autorecon2 import create_AutoRecon2 -from .autorecon3 import create_AutoRecon3 -from ....interfaces.freesurfer import AddXFormToHeader, Info -from ....interfaces.io import DataSink -from .utils import getdefaultconfig -from .... import logging - -logger = logging.getLogger('nipype.workflow') - - -def create_skullstripped_recon_flow(name="skullstripped_recon_all"): - """Performs recon-all on voulmes that are already skull stripped. - FreeSurfer failes to perform skullstrippig on some volumes (especially - MP2RAGE). This can be avoided by doing skullstripping before running - recon-all (using for example SPECTRE algorithm). - - Example - ------- - >>> from nipype.workflows.smri.freesurfer import create_skullstripped_recon_flow - >>> recon_flow = create_skullstripped_recon_flow() - >>> recon_flow.inputs.inputspec.subject_id = 'subj1' - >>> recon_flow.inputs.inputspec.T1_files = 'T1.nii.gz' - >>> recon_flow.run() # doctest: +SKIP - - - Inputs:: - inputspec.T1_files : skullstripped T1_files (mandatory) - inputspec.subject_id : freesurfer subject id (optional) - inputspec.subjects_dir : freesurfer subjects directory (optional) - - Outputs:: - - outputspec.subject_id : freesurfer subject id - outputspec.subjects_dir : freesurfer subjects directory - """ - wf = pe.Workflow(name=name) - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['subject_id', 'subjects_dir', 'T1_files']), - name='inputspec') - - autorecon1 = pe.Node(fs.ReconAll(), name="autorecon1") - autorecon1.plugin_args = {'submit_specs': 'request_memory = 2500'} - autorecon1.inputs.directive = "autorecon1" - autorecon1.inputs.args = "-noskullstrip" - autorecon1._interface._can_resume = False - - wf.connect(inputnode, "T1_files", autorecon1, "T1_files") - wf.connect(inputnode, "subjects_dir", autorecon1, "subjects_dir") - wf.connect(inputnode, "subject_id", autorecon1, "subject_id") - - def link_masks(subjects_dir, subject_id): - import os - os.symlink( - os.path.join(subjects_dir, subject_id, "mri", "T1.mgz"), - os.path.join(subjects_dir, subject_id, "mri", - "brainmask.auto.mgz")) - os.symlink( - os.path.join(subjects_dir, subject_id, "mri", - "brainmask.auto.mgz"), - os.path.join(subjects_dir, subject_id, "mri", "brainmask.mgz")) - return subjects_dir, subject_id - - masks = pe.Node( - niu.Function( - input_names=['subjects_dir', 'subject_id'], - output_names=['subjects_dir', 'subject_id'], - function=link_masks), - name="link_masks") - - wf.connect(autorecon1, "subjects_dir", masks, "subjects_dir") - wf.connect(autorecon1, "subject_id", masks, "subject_id") - - autorecon_resume = pe.Node(fs.ReconAll(), name="autorecon_resume") - autorecon_resume.plugin_args = {'submit_specs': 'request_memory = 2500'} - autorecon_resume.inputs.args = "-no-isrunning" - wf.connect(masks, "subjects_dir", autorecon_resume, "subjects_dir") - wf.connect(masks, "subject_id", autorecon_resume, "subject_id") - - outputnode = pe.Node( - niu.IdentityInterface(fields=['subject_id', 'subjects_dir']), - name='outputspec') - - wf.connect(autorecon_resume, "subjects_dir", outputnode, "subjects_dir") - wf.connect(autorecon_resume, "subject_id", outputnode, "subject_id") - return wf - - -def create_reconall_workflow(name="ReconAll", plugin_args=None): - """Creates the ReconAll workflow in Nipype. This workflow is designed to - run the same commands as FreeSurfer's reconall script but with the added - features that a Nipype workflow provides. Before running this workflow, it - is necessary to have the FREESURFER_HOME environmental variable set to the - directory containing the version of FreeSurfer to be used in this workflow. - - Example - ------- - >>> from nipype.workflows.smri.freesurfer import create_reconall_workflow - >>> recon_all = create_reconall_workflow() - >>> recon_all.inputs.inputspec.subject_id = 'subj1' - >>> recon_all.inputs.inputspec.subjects_dir = '.' - >>> recon_all.inputs.inputspec.T1_files = 'T1.nii.gz' - >>> recon_all.run() # doctest: +SKIP - - - Inputs:: - inputspec.subjects_dir : subjects directory (mandatory) - inputspec.subject_id : name of subject (mandatory) - inputspec.T1_files : T1 files (mandatory) - inputspec.T2_file : T2 file (optional) - inputspec.FLAIR_file : FLAIR file (optional) - inputspec.cw256 : Conform inputs to 256 FOV (optional) - inputspec.num_threads: Number of threads on nodes that utilize OpenMP (default=1) - plugin_args : Dictionary of plugin args to set to nodes that utilize OpenMP (optional) - Outputs:: - postdatasink_outputspec.subject_id : name of the datasinked output folder in the subjects directory - - Note: - The input subject_id is not passed to the commands in the workflow. Commands - that require subject_id are reading implicit inputs from - {SUBJECTS_DIR}/{subject_id}. For those commands the subject_id is set to the - default value and SUBJECTS_DIR is set to the node directory. The implicit - inputs are then copied to the node directory in order to mimic a SUBJECTS_DIR - structure. For example, if the command implicitly reads in brainmask.mgz, the - interface would copy that input file to - {node_dir}/{subject_id}/mri/brainmask.mgz and set SUBJECTS_DIR to node_dir. - The workflow only uses the input subject_id to datasink the outputs to - {subjects_dir}/{subject_id}. - """ - reconall = pe.Workflow(name=name) - - inputspec = pe.Node( - niu.IdentityInterface(fields=[ - 'subject_id', 'subjects_dir', 'T1_files', 'T2_file', 'FLAIR_file', - 'num_threads', 'cw256', 'reg_template', 'reg_template_withskull', - 'lh_atlas', 'rh_atlas', 'lh_classifier1', 'rh_classifier1', - 'lh_classifier2', 'rh_classifier2', 'lh_classifier3', - 'rh_classifier3', 'lookup_table', 'wm_lookup_table', - 'src_subject_id', 'src_subject_dir', 'color_table', 'awk_file' - ]), - run_without_submitting=True, - name='inputspec') - - # check freesurfer version and set parameters - fs_version_full = Info.version() - if fs_version_full and ('v6.0' in fs_version_full - or 'dev' in fs_version_full): - # assuming that dev is 6.0 - fsvernum = 6.0 - fs_version = 'v6.0' - th3 = True - shrink = 2 - distance = 200 # 3T should be 50 - stop = 0.0001 - exvivo = True - entorhinal = True - rb_date = "2014-08-21" - else: - # 5.3 is default - fsvernum = 5.3 - if fs_version_full: - if 'v5.3' in fs_version_full: - fs_version = 'v5.3' - else: - fs_version = fs_version_full.split('-')[-1] - logger.info(("Warning: Workflow may not work properly if " - "FREESURFER_HOME environmental variable is not " - "set or if you are using an older version of " - "FreeSurfer")) - else: - fs_version = 5.3 # assume version 5.3 - th3 = False - shrink = None - distance = 50 - stop = None - exvivo = False - entorhinal = False - rb_date = "2008-03-26" - - logger.info("FreeSurfer Version: {0}".format(fs_version)) - - def setconfig(reg_template=None, - reg_template_withskull=None, - lh_atlas=None, - rh_atlas=None, - lh_classifier1=None, - rh_classifier1=None, - lh_classifier2=None, - rh_classifier2=None, - lh_classifier3=None, - rh_classifier3=None, - src_subject_id=None, - src_subject_dir=None, - color_table=None, - lookup_table=None, - wm_lookup_table=None, - awk_file=None, - rb_date=None): - """Set optional configurations to the default""" - - def checkarg(arg, default): - """Returns the value if defined; otherwise default""" - if arg: - return arg - else: - return default - - defaultconfig = getdefaultconfig(exitonfail=True, rb_date=rb_date) - # set the default template and classifier files - reg_template = checkarg(reg_template, - defaultconfig['registration_template']) - reg_template_withskull = checkarg( - reg_template_withskull, - defaultconfig['registration_template_withskull']) - lh_atlas = checkarg(lh_atlas, defaultconfig['lh_atlas']) - rh_atlas = checkarg(rh_atlas, defaultconfig['rh_atlas']) - lh_classifier1 = checkarg(lh_classifier1, - defaultconfig['lh_classifier']) - rh_classifier1 = checkarg(rh_classifier1, - defaultconfig['rh_classifier']) - lh_classifier2 = checkarg(lh_classifier2, - defaultconfig['lh_classifier2']) - rh_classifier2 = checkarg(rh_classifier2, - defaultconfig['rh_classifier2']) - lh_classifier3 = checkarg(lh_classifier3, - defaultconfig['lh_classifier3']) - rh_classifier3 = checkarg(rh_classifier3, - defaultconfig['rh_classifier3']) - src_subject_id = checkarg(src_subject_id, - defaultconfig['src_subject_id']) - src_subject_dir = checkarg(src_subject_dir, - defaultconfig['src_subject_dir']) - color_table = checkarg(color_table, defaultconfig['AvgColorTable']) - lookup_table = checkarg(lookup_table, defaultconfig['LookUpTable']) - wm_lookup_table = checkarg(wm_lookup_table, - defaultconfig['WMLookUpTable']) - awk_file = checkarg(awk_file, defaultconfig['awk_file']) - return reg_template, reg_template_withskull, lh_atlas, rh_atlas, \ - lh_classifier1, rh_classifier1, lh_classifier2, rh_classifier2, \ - lh_classifier3, rh_classifier3, src_subject_id, src_subject_dir, \ - color_table, lookup_table, wm_lookup_table, awk_file - - # list of params to check - params = [ - 'reg_template', 'reg_template_withskull', 'lh_atlas', 'rh_atlas', - 'lh_classifier1', 'rh_classifier1', 'lh_classifier2', 'rh_classifier2', - 'lh_classifier3', 'rh_classifier3', 'src_subject_id', - 'src_subject_dir', 'color_table', 'lookup_table', 'wm_lookup_table', - 'awk_file' - ] - - config_node = pe.Node( - niu.Function(params + ['rb_date'], params, setconfig), name="config") - - config_node.inputs.rb_date = rb_date - - for param in params: - reconall.connect(inputspec, param, config_node, param) - - # create AutoRecon1 - ar1_wf, ar1_outputs = create_AutoRecon1( - plugin_args=plugin_args, - stop=stop, - distance=distance, - shrink=shrink, - fsvernum=fsvernum) - # connect inputs for AutoRecon1 - reconall.connect([(inputspec, ar1_wf, [ - ('T1_files', 'inputspec.T1_files'), ('T2_file', 'inputspec.T2_file'), - ('FLAIR_file', 'inputspec.FLAIR_file'), - ('num_threads', 'inputspec.num_threads'), ('cw256', 'inputspec.cw256') - ]), (config_node, ar1_wf, [('reg_template_withskull', - 'inputspec.reg_template_withskull'), - ('awk_file', 'inputspec.awk_file')])]) - # create AutoRecon2 - ar2_wf, ar2_outputs = create_AutoRecon2( - plugin_args=plugin_args, - fsvernum=fsvernum, - stop=stop, - shrink=shrink, - distance=distance) - # connect inputs for AutoRecon2 - reconall.connect( - [(inputspec, ar2_wf, [('num_threads', 'inputspec.num_threads')]), - (config_node, ar2_wf, [('reg_template_withskull', - 'inputspec.reg_template_withskull'), - ('reg_template', 'inputspec.reg_template')]), - (ar1_wf, ar2_wf, [('outputspec.brainmask', 'inputspec.brainmask'), - ('outputspec.talairach', 'inputspec.transform'), - ('outputspec.orig', 'inputspec.orig')])]) - - if fsvernum < 6: - reconall.connect([(ar1_wf, ar2_wf, [('outputspec.nu', - 'inputspec.nu')])]) - - # create AutoRecon3 - ar3_wf, ar3_outputs = create_AutoRecon3( - plugin_args=plugin_args, - th3=th3, - exvivo=exvivo, - entorhinal=entorhinal, - fsvernum=fsvernum) - # connect inputs for AutoRecon3 - reconall.connect( - [(config_node, ar3_wf, - [('lh_atlas', 'inputspec.lh_atlas'), - ('rh_atlas', 'inputspec.rh_atlas'), ('lh_classifier1', - 'inputspec.lh_classifier1'), - ('rh_classifier1', - 'inputspec.rh_classifier1'), ('lh_classifier2', - 'inputspec.lh_classifier2'), - ('rh_classifier2', - 'inputspec.rh_classifier2'), ('lh_classifier3', - 'inputspec.lh_classifier3'), - ('rh_classifier3', - 'inputspec.rh_classifier3'), ('lookup_table', - 'inputspec.lookup_table'), - ('wm_lookup_table', - 'inputspec.wm_lookup_table'), ('src_subject_dir', - 'inputspec.src_subject_dir'), - ('src_subject_id', - 'inputspec.src_subject_id'), ('color_table', - 'inputspec.color_table')]), - (ar1_wf, ar3_wf, [('outputspec.brainmask', 'inputspec.brainmask'), - ('outputspec.talairach', 'inputspec.transform'), - ('outputspec.orig', - 'inputspec.orig_mgz'), ('outputspec.rawavg', - 'inputspec.rawavg')]), - (ar2_wf, ar3_wf, - [('outputspec.aseg_presurf', 'inputspec.aseg_presurf'), - ('outputspec.brain_finalsurfs', - 'inputspec.brain_finalsurfs'), ('outputspec.wm', 'inputspec.wm'), - ('outputspec.filled', 'inputspec.filled'), ('outputspec.norm', - 'inputspec.norm')])]) - for hemi in ('lh', 'rh'): - reconall.connect([(ar2_wf, ar3_wf, - [('outputspec.{0}_inflated'.format(hemi), - 'inputspec.{0}_inflated'.format(hemi)), - ('outputspec.{0}_smoothwm'.format(hemi), - 'inputspec.{0}_smoothwm'.format(hemi)), - ('outputspec.{0}_white'.format(hemi), - 'inputspec.{0}_white'.format(hemi)), - ('outputspec.{0}_cortex'.format(hemi), - 'inputspec.{0}_cortex_label'.format(hemi)), - ('outputspec.{0}_area'.format(hemi), - 'inputspec.{0}_area'.format(hemi)), - ('outputspec.{0}_curv'.format(hemi), - 'inputspec.{0}_curv'.format(hemi)), - ('outputspec.{0}_sulc'.format(hemi), - 'inputspec.{0}_sulc'.format(hemi)), - ('outputspec.{0}_orig_nofix'.format(hemi), - 'inputspec.{0}_orig_nofix'.format(hemi)), - ('outputspec.{0}_orig'.format(hemi), - 'inputspec.{0}_orig'.format(hemi)), - ('outputspec.{0}_white_H'.format(hemi), - 'inputspec.{0}_white_H'.format(hemi)), - ('outputspec.{0}_white_K'.format(hemi), - 'inputspec.{0}_white_K'.format(hemi))])]) - - # Add more outputs to outputspec - outputs = ar1_outputs + ar2_outputs + ar3_outputs - outputspec = pe.Node( - niu.IdentityInterface(fields=outputs, mandatory_inputs=True), - name="outputspec") - - for outfields, wf in [(ar1_outputs, ar1_wf), (ar2_outputs, ar2_wf), - (ar3_outputs, ar3_wf)]: - for field in outfields: - reconall.connect([(wf, outputspec, [('outputspec.' + field, - field)])]) - - # PreDataSink: Switch Transforms to datasinked transfrom - # The transforms in the header files of orig.mgz, orig_nu.mgz, and nu.mgz - # are all reference a transform in the cache directory. We need to rewrite the - # headers to reference the datasinked transform - - # get the filepath to where the transform will be datasinked - def getDSTransformPath(subjects_dir, subject_id): - import os - transform = os.path.join(subjects_dir, subject_id, 'mri', 'transforms', - 'talairach.xfm') - return transform - - dstransform = pe.Node( - niu.Function(['subjects_dir', 'subject_id'], ['transform'], - getDSTransformPath), - name="PreDataSink_GetTransformPath") - reconall.connect([(inputspec, dstransform, - [('subjects_dir', 'subjects_dir'), ('subject_id', - 'subject_id')])]) - # add the data sink transfrom location to the headers - predatasink_orig = pe.Node(AddXFormToHeader(), name="PreDataSink_Orig") - predatasink_orig.inputs.copy_name = True - predatasink_orig.inputs.out_file = 'orig.mgz' - reconall.connect([(outputspec, predatasink_orig, [('orig', 'in_file')]), - (dstransform, predatasink_orig, [('transform', - 'transform')])]) - predatasink_orig_nu = pe.Node( - AddXFormToHeader(), name="PreDataSink_Orig_Nu") - predatasink_orig_nu.inputs.copy_name = True - predatasink_orig_nu.inputs.out_file = 'orig_nu.mgz' - reconall.connect( - [(outputspec, predatasink_orig_nu, [('orig_nu', 'in_file')]), - (dstransform, predatasink_orig_nu, [('transform', 'transform')])]) - predatasink_nu = pe.Node(AddXFormToHeader(), name="PreDataSink_Nu") - predatasink_nu.inputs.copy_name = True - predatasink_nu.inputs.out_file = 'nu.mgz' - reconall.connect([(outputspec, predatasink_nu, [('nu', 'in_file')]), - (dstransform, predatasink_nu, [('transform', - 'transform')])]) - - # Datasink outputs - datasink = pe.Node(DataSink(), name="DataSink") - datasink.inputs.parameterization = False - - reconall.connect([(inputspec, datasink, - [('subjects_dir', 'base_directory'), ('subject_id', - 'container')])]) - - # assign datasink inputs - reconall.connect([ - (predatasink_orig, datasink, [('out_file', 'mri.@orig')]), - (predatasink_orig_nu, datasink, [('out_file', 'mri.@orig_nu')]), - (predatasink_nu, datasink, [('out_file', 'mri.@nu')]), - (outputspec, datasink, [ - ('origvols', 'mri.orig'), - ('t2_raw', 'mri.orig.@t2raw'), - ('flair', 'mri.orig.@flair'), - ('rawavg', 'mri.@rawavg'), - ('talairach_auto', 'mri.transforms.@tal_auto'), - ('talairach', 'mri.transforms.@tal'), - ('t1', 'mri.@t1'), - ('brainmask_auto', 'mri.@brainmask_auto'), - ('brainmask', 'mri.@brainmask'), - ('braintemplate', 'mri.@braintemplate'), - ('tal_lta', 'mri.transforms.@tal_lta'), - ('norm', 'mri.@norm'), - ('ctrl_pts', 'mri.@ctrl_pts'), - ('tal_m3z', 'mri.transforms.@tal_m3z'), - ('nu_noneck', 'mri.@nu_noneck'), - ('talskull2', 'mri.transforms.@talskull2'), - ('aseg_noCC', 'mri.@aseg_noCC'), - ('cc_up', 'mri.transforms.@cc_up'), - ('aseg_auto', 'mri.@aseg_auto'), - ('aseg_presurf', 'mri.@aseg_presuf'), - ('brain', 'mri.@brain'), - ('brain_finalsurfs', 'mri.@brain_finalsurfs'), - ('wm_seg', 'mri.@wm_seg'), - ('wm_aseg', 'mri.@wm_aseg'), - ('wm', 'mri.@wm'), - ('filled', 'mri.@filled'), - ('ponscc_log', 'mri.@ponscc_log'), - ('lh_orig_nofix', 'surf.@lh_orig_nofix'), - ('lh_orig', 'surf.@lh_orig'), - ('lh_smoothwm_nofix', 'surf.@lh_smoothwm_nofix'), - ('lh_inflated_nofix', 'surf.@lh_inflated_nofix'), - ('lh_qsphere_nofix', 'surf.@lh_qsphere_nofix'), - ('lh_white', 'surf.@lh_white'), - ('lh_curv', 'surf.@lh_curv'), - ('lh_area', 'surf.@lh_area'), - ('lh_cortex', 'label.@lh_cortex'), - ('lh_smoothwm', 'surf.@lh_smoothwm'), - ('lh_sulc', 'surf.@lh_sulc'), - ('lh_inflated', 'surf.@lh_inflated'), - ('lh_white_H', 'surf.@lh_white_H'), - ('lh_white_K', 'surf.@lh_white_K'), - ('lh_inflated_H', 'surf.@lh_inflated_H'), - ('lh_inflated_K', 'surf.@lh_inflated_K'), - ('lh_curv_stats', 'stats.@lh_curv_stats'), - ('rh_orig_nofix', 'surf.@rh_orig_nofix'), - ('rh_orig', 'surf.@rh_orig'), - ('rh_smoothwm_nofix', 'surf.@rh_smoothwm_nofix'), - ('rh_inflated_nofix', 'surf.@rh_inflated_nofix'), - ('rh_qsphere_nofix', 'surf.@rh_qsphere_nofix'), - ('rh_white', 'surf.@rh_white'), - ('rh_curv', 'surf.@rh_curv'), - ('rh_area', 'surf.@rh_area'), - ('rh_cortex', 'label.@rh_cortex'), - ('rh_smoothwm', 'surf.@rh_smoothwm'), - ('rh_sulc', 'surf.@rh_sulc'), - ('rh_inflated', 'surf.@rh_inflated'), - ('rh_white_H', 'surf.@rh_white_H'), - ('rh_white_K', 'surf.@rh_white_K'), - ('rh_inflated_H', 'surf.@rh_inflated_H'), - ('rh_inflated_K', 'surf.@rh_inflated_K'), - ('rh_curv_stats', 'stats.@rh_curv_stats'), - ('lh_aparc_annot_ctab', 'label.@aparc_annot_ctab'), - ('aseg', 'mri.@aseg'), - ('wmparc', 'mri.@wmparc'), - ('wmparc_stats', 'stats.@wmparc_stats'), - ('aseg_stats', 'stats.@aseg_stats'), - ('aparc_a2009s_aseg', 'mri.@aparc_a2009s_aseg'), - ('aparc_aseg', 'mri.@aparc_aseg'), - ('aseg_presurf_hypos', 'mri.@aseg_presurf_hypos'), - ('ribbon', 'mri.@ribbon'), - ('rh_ribbon', 'mri.@rh_ribbon'), - ('lh_ribbon', 'mri.@lh_ribbon'), - ('lh_sphere', 'surf.@lh_sphere'), - ('rh_sphere', 'surf.@rh_sphere'), - ('lh_sphere_reg', 'surf.@lh_sphere_reg'), - ('rh_sphere_reg', 'surf.@rh_sphere_reg'), - ('lh_jacobian_white', 'surf.@lh_jacobian_white'), - ('rh_jacobian_white', 'surf.@rh_jacobian_white'), - ('lh_avg_curv', 'surf.@lh_avg_curv'), - ('rh_avg_curv', 'surf.@rh_avg_curv'), - ('lh_aparc_annot', 'label.@lh_aparc_annot'), - ('rh_aparc_annot', 'label.@rh_aparc_annot'), - ('lh_area_pial', 'surf.@lh_area_pial'), - ('rh_area_pial', 'surf.@rh_area_pial'), - ('lh_curv_pial', 'surf.@lh_curv_pial'), - ('rh_curv_pial', 'surf.@rh_curv_pial'), - ('lh_pial', 'surf.@lh_pial'), - ('rh_pial', 'surf.@rh_pial'), - ('lh_thickness_pial', 'surf.@lh_thickness_pial'), - ('rh_thickness_pial', 'surf.@rh_thickness_pial'), - ('lh_area_mid', 'surf.@lh_area_mid'), - ('rh_area_mid', 'surf.@rh_area_mid'), - ('lh_volume', 'surf.@lh_volume'), - ('rh_volume', 'surf.@rh_volume'), - ('lh_aparc_annot_ctab', 'label.@lh_aparc_annot_ctab'), - ('rh_aparc_annot_ctab', 'label.@rh_aparc_annot_ctab'), - ('lh_aparc_stats', 'stats.@lh_aparc_stats'), - ('rh_aparc_stats', 'stats.@rh_aparc_stats'), - ('lh_aparc_pial_stats', 'stats.@lh_aparc_pial_stats'), - ('rh_aparc_pial_stats', 'stats.@rh_aparc_pial_stats'), - ('lh_aparc_a2009s_annot', 'label.@lh_aparc_a2009s_annot'), - ('rh_aparc_a2009s_annot', 'label.@rh_aparc_a2009s_annot'), - ('lh_aparc_a2009s_annot_ctab', - 'label.@lh_aparc_a2009s_annot_ctab'), - ('rh_aparc_a2009s_annot_ctab', - 'label.@rh_aparc_a2009s_annot_ctab'), - ('lh_aparc_a2009s_annot_stats', - 'stats.@lh_aparc_a2009s_annot_stats'), - ('rh_aparc_a2009s_annot_stats', - 'stats.@rh_aparc_a2009s_annot_stats'), - ('lh_aparc_DKTatlas40_annot', 'label.@lh_aparc_DKTatlas40_annot'), - ('rh_aparc_DKTatlas40_annot', 'label.@rh_aparc_DKTatlas40_annot'), - ('lh_aparc_DKTatlas40_annot_ctab', - 'label.@lh_aparc_DKTatlas40_annot_ctab'), - ('rh_aparc_DKTatlas40_annot_ctab', - 'label.@rh_aparc_DKTatlas40_annot_ctab'), - ('lh_aparc_DKTatlas40_annot_stats', - 'stats.@lh_aparc_DKTatlas40_annot_stats'), - ('rh_aparc_DKTatlas40_annot_stats', - 'stats.@rh_aparc_DKTatlas40_annot_stats'), - ('lh_wg_pct_mgh', 'surf.@lh_wg_pct_mgh'), - ('rh_wg_pct_mgh', 'surf.@rh_wg_pct_mgh'), - ('lh_wg_pct_stats', 'stats.@lh_wg_pct_stats'), - ('rh_wg_pct_stats', 'stats.@rh_wg_pct_stats'), - ('lh_pctsurfcon_log', 'log.@lh_pctsurfcon_log'), - ('rh_pctsurfcon_log', 'log.@rh_pctsurfcon_log'), - ('lh_BAMaps_stats', 'stats.@lh_BAMaps_stats'), - ('lh_color', 'label.@lh_color'), - ('lh_thresh_BAMaps_stats', 'stats.@lh_thresh_BAMaps_stats'), - ('lh_thresh_color', 'label.@lh_thresh_color'), - ('rh_BAMaps_stats', 'stats.@rh_BAMaps_stats'), - ('rh_color', 'label.@rh_color'), - ('rh_thresh_BAMaps_stats', 'stats.@rh_thresh_BAMaps_stats'), - ('rh_thresh_color', 'label.@rh_thresh_color'), - ('lh_BAMaps_labels', 'label.@lh_BAMaps_labels'), - ('lh_thresh_BAMaps_labels', 'label.@lh_thresh_BAMaps_labels'), - ('rh_BAMaps_labels', 'label.@rh_BAMaps_labels'), - ('rh_thresh_BAMaps_labels', 'label.@rh_thresh_BAMaps_labels'), - ('lh_BAMaps_annotation', 'label.@lh_BAMaps_annotation'), - ('lh_thresh_BAMaps_annotation', - 'label.@lh_thresh_BAMaps_annotation'), - ('rh_BAMaps_annotation', 'label.@rh_BAMaps_annotation'), - ('rh_thresh_BAMaps_annotation', - 'label.@rh_thresh_BAMaps_annotation'), - ]), - ]) - - # compeltion node - # since recon-all outputs so many files a completion node is added - # that will output the subject_id once the workflow has completed - def completemethod(datasinked_files, subject_id): - print("recon-all has finished executing for subject: {0}".format( - subject_id)) - return subject_id - - completion = pe.Node( - niu.Function(['datasinked_files', 'subject_id'], ['subject_id'], - completemethod), - name="Completion") - - # create a special identity interface for outputing the subject_id - - postds_outputspec = pe.Node( - niu.IdentityInterface(['subject_id']), name="postdatasink_outputspec") - - reconall.connect( - [(datasink, completion, [('out_file', 'datasinked_files')]), - (inputspec, completion, [('subject_id', 'subject_id')]), - (completion, postds_outputspec, [('subject_id', 'subject_id')])]) - - return reconall diff --git a/nipype/workflows/smri/freesurfer/utils.py b/nipype/workflows/smri/freesurfer/utils.py deleted file mode 100644 index 40f1f205b6..0000000000 --- a/nipype/workflows/smri/freesurfer/utils.py +++ /dev/null @@ -1,498 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from ....pipeline import engine as pe -from ....interfaces import fsl as fsl -from ....interfaces import freesurfer as fs -from ....interfaces import meshfix as mf -from ....interfaces import io as nio -from ....interfaces import utility as niu -from ....algorithms import misc as misc -from ....interfaces.utility import Function -from ....workflows.misc.utils import region_list_from_volume, id_list_from_lookup_table -import os - - -def get_aparc_aseg(files): - """Return the aparc+aseg.mgz file""" - for name in files: - if 'aparc+aseg' in name: - return name - raise ValueError('aparc+aseg.mgz not found') - - -def create_getmask_flow(name='getmask', dilate_mask=True): - """Registers a source file to freesurfer space and create a brain mask in - source space - - Requires fsl tools for initializing registration - - Parameters - ---------- - - name : string - name of workflow - dilate_mask : boolean - indicates whether to dilate mask or not - - Example - ------- - - >>> getmask = create_getmask_flow() - >>> getmask.inputs.inputspec.source_file = 'mean.nii' - >>> getmask.inputs.inputspec.subject_id = 's1' - >>> getmask.inputs.inputspec.subjects_dir = '.' - >>> getmask.inputs.inputspec.contrast_type = 't2' - - - Inputs:: - - inputspec.source_file : reference image for mask generation - inputspec.subject_id : freesurfer subject id - inputspec.subjects_dir : freesurfer subjects directory - inputspec.contrast_type : MR contrast of reference image - - Outputs:: - - outputspec.mask_file : binary mask file in reference image space - outputspec.reg_file : registration file that maps reference image to - freesurfer space - outputspec.reg_cost : cost of registration (useful for detecting misalignment) - """ - """ - Initialize the workflow - """ - - getmask = pe.Workflow(name=name) - """ - Define the inputs to the workflow. - """ - - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'source_file', 'subject_id', 'subjects_dir', 'contrast_type' - ]), - name='inputspec') - """ - Define all the nodes of the workflow: - - fssource: used to retrieve aseg.mgz - threshold : binarize aseg - register : coregister source file to freesurfer space - voltransform: convert binarized aseg to source file space - """ - - fssource = pe.Node(nio.FreeSurferSource(), name='fssource') - threshold = pe.Node(fs.Binarize(min=0.5, out_type='nii'), name='threshold') - register = pe.MapNode( - fs.BBRegister(init='fsl'), iterfield=['source_file'], name='register') - voltransform = pe.MapNode( - fs.ApplyVolTransform(inverse=True), - iterfield=['source_file', 'reg_file'], - name='transform') - """ - Connect the nodes - """ - - getmask.connect([(inputnode, fssource, [ - ('subject_id', 'subject_id'), ('subjects_dir', 'subjects_dir') - ]), (inputnode, register, - [('source_file', 'source_file'), ('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir'), - ('contrast_type', 'contrast_type')]), (inputnode, voltransform, [ - ('subjects_dir', 'subjects_dir'), ('source_file', 'source_file') - ]), (fssource, threshold, [(('aparc_aseg', get_aparc_aseg), - 'in_file')]), - (register, voltransform, [('out_reg_file', 'reg_file')]), - (threshold, voltransform, [('binary_file', - 'target_file')])]) - """ - Add remaining nodes and connections - - dilate : dilate the transformed file in source space - threshold2 : binarize transformed file - """ - - threshold2 = pe.MapNode( - fs.Binarize(min=0.5, out_type='nii'), - iterfield=['in_file'], - name='threshold2') - if dilate_mask: - threshold2.inputs.dilate = 1 - getmask.connect([(voltransform, threshold2, [('transformed_file', - 'in_file')])]) - """ - Setup an outputnode that defines relevant inputs of the workflow. - """ - - outputnode = pe.Node( - niu.IdentityInterface(fields=["mask_file", "reg_file", "reg_cost"]), - name="outputspec") - getmask.connect([ - (register, outputnode, [("out_reg_file", "reg_file")]), - (register, outputnode, [("min_cost_file", "reg_cost")]), - (threshold2, outputnode, [("binary_file", "mask_file")]), - ]) - return getmask - - -def create_get_stats_flow(name='getstats', withreg=False): - """Retrieves stats from labels - - Parameters - ---------- - - name : string - name of workflow - withreg : boolean - indicates whether to register source to label - - Example - ------- - - - Inputs:: - - inputspec.source_file : reference image for mask generation - inputspec.label_file : label file from which to get ROIs - - (optionally with registration) - inputspec.reg_file : bbreg file (assumes reg from source to label - inputspec.inverse : boolean whether to invert the registration - inputspec.subjects_dir : freesurfer subjects directory - - Outputs:: - - outputspec.stats_file : stats file - """ - """ - Initialize the workflow - """ - - getstats = pe.Workflow(name=name) - """ - Define the inputs to the workflow. - """ - - if withreg: - inputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'source_file', 'label_file', 'reg_file', 'subjects_dir' - ]), - name='inputspec') - else: - inputnode = pe.Node( - niu.IdentityInterface(fields=['source_file', 'label_file']), - name='inputspec') - - statnode = pe.MapNode( - fs.SegStats(), - iterfield=['segmentation_file', 'in_file'], - name='segstats') - """ - Convert between source and label spaces if registration info is provided - - """ - if withreg: - voltransform = pe.MapNode( - fs.ApplyVolTransform(inverse=True), - iterfield=['source_file', 'reg_file'], - name='transform') - getstats.connect(inputnode, 'reg_file', voltransform, 'reg_file') - getstats.connect(inputnode, 'source_file', voltransform, 'source_file') - getstats.connect(inputnode, 'label_file', voltransform, 'target_file') - getstats.connect(inputnode, 'subjects_dir', voltransform, - 'subjects_dir') - - def switch_labels(inverse, transform_output, source_file, label_file): - if inverse: - return transform_output, source_file - else: - return label_file, transform_output - - chooser = pe.MapNode( - niu.Function( - input_names=[ - 'inverse', 'transform_output', 'source_file', 'label_file' - ], - output_names=['label_file', 'source_file'], - function=switch_labels), - iterfield=['transform_output', 'source_file'], - name='chooser') - getstats.connect(inputnode, 'source_file', chooser, 'source_file') - getstats.connect(inputnode, 'label_file', chooser, 'label_file') - getstats.connect(inputnode, 'inverse', chooser, 'inverse') - getstats.connect(voltransform, 'transformed_file', chooser, - 'transform_output') - getstats.connect(chooser, 'label_file', statnode, 'segmentation_file') - getstats.connect(chooser, 'source_file', statnode, 'in_file') - else: - getstats.connect(inputnode, 'label_file', statnode, - 'segmentation_file') - getstats.connect(inputnode, 'source_file', statnode, 'in_file') - """ - Setup an outputnode that defines relevant inputs of the workflow. - """ - - outputnode = pe.Node( - niu.IdentityInterface(fields=["stats_file"]), name="outputspec") - getstats.connect([ - (statnode, outputnode, [("summary_file", "stats_file")]), - ]) - return getstats - - -def create_tessellation_flow(name='tessellate', out_format='stl'): - """Tessellates the input subject's aseg.mgz volume and returns - the surfaces for each region in stereolithic (.stl) format - - Example - ------- - >>> from nipype.workflows.smri.freesurfer import create_tessellation_flow - >>> tessflow = create_tessellation_flow() - >>> tessflow.inputs.inputspec.subject_id = 'subj1' - >>> tessflow.inputs.inputspec.subjects_dir = '.' - >>> tessflow.inputs.inputspec.lookup_file = 'FreeSurferColorLUT.txt' # doctest: +SKIP - >>> tessflow.run() # doctest: +SKIP - - - Inputs:: - - inputspec.subject_id : freesurfer subject id - inputspec.subjects_dir : freesurfer subjects directory - inputspec.lookup_file : lookup file from freesurfer directory - - Outputs:: - - outputspec.meshes : output region meshes in (by default) stereolithographic (.stl) format - """ - """ - Initialize the workflow - """ - - tessflow = pe.Workflow(name=name) - """ - Define the inputs to the workflow. - """ - - inputnode = pe.Node( - niu.IdentityInterface( - fields=['subject_id', 'subjects_dir', 'lookup_file']), - name='inputspec') - """ - Define all the nodes of the workflow: - - fssource: used to retrieve aseg.mgz - mri_convert : converts aseg.mgz to aseg.nii - tessellate : tessellates regions in aseg.mgz - surfconvert : converts regions to stereolithographic (.stl) format - smoother: smooths the tessellated regions - - """ - - fssource = pe.Node(nio.FreeSurferSource(), name='fssource') - volconvert = pe.Node(fs.MRIConvert(out_type='nii'), name='volconvert') - tessellate = pe.MapNode( - fs.MRIMarchingCubes(), - iterfield=['label_value', 'out_file'], - name='tessellate') - surfconvert = pe.MapNode( - fs.MRIsConvert(out_datatype='stl'), - iterfield=['in_file'], - name='surfconvert') - smoother = pe.MapNode( - mf.MeshFix(), iterfield=['in_file1'], name='smoother') - if out_format == 'gii': - stl_to_gifti = pe.MapNode( - fs.MRIsConvert(out_datatype=out_format), - iterfield=['in_file'], - name='stl_to_gifti') - smoother.inputs.save_as_stl = True - smoother.inputs.laplacian_smoothing_steps = 1 - - region_list_from_volume_interface = Function( - input_names=["in_file"], - output_names=["region_list"], - function=region_list_from_volume) - - id_list_from_lookup_table_interface = Function( - input_names=["lookup_file", "region_list"], - output_names=["id_list"], - function=id_list_from_lookup_table) - - region_list_from_volume_node = pe.Node( - interface=region_list_from_volume_interface, - name='region_list_from_volume_node') - id_list_from_lookup_table_node = pe.Node( - interface=id_list_from_lookup_table_interface, - name='id_list_from_lookup_table_node') - """ - Connect the nodes - """ - - tessflow.connect([ - (inputnode, fssource, [('subject_id', 'subject_id'), - ('subjects_dir', 'subjects_dir')]), - (fssource, volconvert, [('aseg', 'in_file')]), - (volconvert, region_list_from_volume_node, [('out_file', 'in_file')]), - (region_list_from_volume_node, tessellate, [('region_list', - 'label_value')]), - (region_list_from_volume_node, id_list_from_lookup_table_node, - [('region_list', 'region_list')]), - (inputnode, id_list_from_lookup_table_node, [('lookup_file', - 'lookup_file')]), - (id_list_from_lookup_table_node, tessellate, [('id_list', - 'out_file')]), - (fssource, tessellate, [('aseg', 'in_file')]), - (tessellate, surfconvert, [('surface', 'in_file')]), - (surfconvert, smoother, [('converted', 'in_file1')]), - ]) - """ - Setup an outputnode that defines relevant inputs of the workflow. - """ - - outputnode = pe.Node( - niu.IdentityInterface(fields=["meshes"]), name="outputspec") - - if out_format == 'gii': - tessflow.connect([ - (smoother, stl_to_gifti, [("mesh_file", "in_file")]), - ]) - tessflow.connect([ - (stl_to_gifti, outputnode, [("converted", "meshes")]), - ]) - else: - tessflow.connect([ - (smoother, outputnode, [("mesh_file", "meshes")]), - ]) - return tessflow - - -def copy_files(in_files, out_files): - """ - Create a function to copy a file that can be modified by a following node - without changing the original file - """ - import shutil - import sys - if len(in_files) != len(out_files): - print( - "ERROR: Length of input files must be identical to the length of " - + "outrput files to be copied") - sys.exit(-1) - for i, in_file in enumerate(in_files): - out_file = out_files[i] - print("copying {0} to {1}".format(in_file, out_file)) - shutil.copy(in_file, out_file) - return out_files - - -def copy_file(in_file, out_file=None): - """ - Create a function to copy a file that can be modified by a following node - without changing the original file. - """ - import os - import shutil - if out_file is None: - out_file = os.path.join(os.getcwd(), os.path.basename(in_file)) - if type(in_file) is list and len(in_file) == 1: - in_file = in_file[0] - out_file = os.path.abspath(out_file) - in_file = os.path.abspath(in_file) - print("copying {0} to {1}".format(in_file, out_file)) - shutil.copy(in_file, out_file) - return out_file - - -def mkdir_p(path): - import errno - import os - try: - os.makedirs(path) - except OSError as exc: # Python >2.5 - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise - - -def getdefaultconfig(exitonfail=False, rb_date="2014-08-21"): - config = { - 'custom_atlas': None, - 'cw256': False, - 'field_strength': '1.5T', - 'fs_home': checkenv(exitonfail), - 'longitudinal': False, - 'long_base': None, - 'openmp': None, - 'plugin_args': None, - 'qcache': False, - 'queue': None, - 'recoding_file': None, - 'src_subject_id': 'fsaverage', - 'th3': True - } - - config['src_subject_dir'] = os.path.join(config['fs_home'], 'subjects', - config['src_subject_id']) - config['awk_file'] = os.path.join(config['fs_home'], 'bin', - 'extract_talairach_avi_QA.awk') - config['registration_template'] = os.path.join( - config['fs_home'], 'average', 'RB_all_{0}.gca'.format(rb_date)) - config['registration_template_withskull'] = os.path.join( - config['fs_home'], 'average', - 'RB_all_withskull_{0}.gca'.format(rb_date)) - for hemi in ('lh', 'rh'): - config['{0}_atlas'.format(hemi)] = os.path.join( - config['fs_home'], 'average', - '{0}.average.curvature.filled.buckner40.tif'.format(hemi)) - config['{0}_classifier'.format(hemi)] = os.path.join( - config['fs_home'], 'average', - '{0}.curvature.buckner40.filled.desikan_killiany.2010-03-25.gcs'. - format(hemi)) - config['{0}_classifier2'.format(hemi)] = os.path.join( - config['fs_home'], 'average', - '{0}.destrieux.simple.2009-07-29.gcs'.format(hemi)) - config['{0}_classifier3'.format(hemi)] = os.path.join( - config['fs_home'], 'average', '{0}.DKTatlas40.gcs'.format(hemi)) - config['LookUpTable'] = os.path.join(config['fs_home'], 'ASegStatsLUT.txt') - config['WMLookUpTable'] = os.path.join(config['fs_home'], - 'WMParcStatsLUT.txt') - config['AvgColorTable'] = os.path.join(config['fs_home'], 'average', - 'colortable_BA.txt') - - return config - - -def checkenv(exitonfail=False): - """Check for the necessary FS environment variables""" - import sys - fs_home = os.environ.get('FREESURFER_HOME') - path = os.environ.get('PATH') - print("FREESURFER_HOME: {0}".format(fs_home)) - if fs_home is None: - msg = "please set FREESURFER_HOME before running the workflow" - elif not os.path.isdir(fs_home): - msg = "FREESURFER_HOME must be set to a valid directory before running this workflow" - elif os.path.join(fs_home, 'bin') not in path.replace('//', '/'): - print(path) - msg = "Could not find necessary executable in path" - setupscript = os.path.join(fs_home, 'SetUpFreeSurfer.sh') - if os.path.isfile(setupscript): - print("Please source the setup script before running the workflow:" - + "\nsource {0}".format(setupscript)) - else: - print( - "Please ensure that FREESURFER_HOME is set to a valid fs " + - "directory and source the necessary SetUpFreeSurfer.sh script before running " - + "this workflow") - else: - return fs_home - - if exitonfail: - print("ERROR: " + msg) - sys.exit(2) - else: - print("Warning: " + msg) diff --git a/nipype/workflows/smri/niftyreg/__init__.py b/nipype/workflows/smri/niftyreg/__init__.py deleted file mode 100644 index b9d0c9c85b..0000000000 --- a/nipype/workflows/smri/niftyreg/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: - -from .groupwise import (create_groupwise_average, create_nonlinear_gw_step, - create_linear_gw_step) diff --git a/nipype/workflows/smri/niftyreg/groupwise.py b/nipype/workflows/smri/niftyreg/groupwise.py deleted file mode 100644 index fd8d25541b..0000000000 --- a/nipype/workflows/smri/niftyreg/groupwise.py +++ /dev/null @@ -1,384 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Example of registration workflows using niftyreg, useful for a variety of -pipelines. Including linear and non-linear image co-registration -""" - -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import str, range -from ....interfaces import utility as niu -from ....interfaces import niftyreg as niftyreg -from ....pipeline import engine as pe - - -def create_linear_gw_step(name="linear_gw_niftyreg", - demean=True, - linear_options_hash=None, - use_mask=False, - verbose=False): - """ - Creates a workflow that performs linear co-registration of a set of images - using RegAladin, producing an average image and a set of affine - transformation matrices linking each of the floating images to the average. - - Inputs:: - - inputspec.in_files - The input files to be registered - inputspec.ref_file - The initial reference image that the input files - are registered to - inputspec.rmask_file - Mask of the reference image - inputspec.in_aff_files - Initial affine transformation files - - Outputs:: - - outputspec.average_image - The average image - outputspec.aff_files - The affine transformation files - - Optional arguments:: - - linear_options_hash - An options dictionary containing a list of - parameters for RegAladin that take - the same form as given in the interface (default None) - demean - Selects whether to demean the transformation matrices when - performing the averaging (default True) - initial_affines - Selects whether to iterate over initial affine - images, which we generally won't have (default False) - - Example - ------- - - >>> from nipype.workflows.smri.niftyreg import create_linear_gw_step - >>> lgw = create_linear_gw_step('my_linear_coreg') # doctest: +SKIP - >>> lgw.inputs.inputspec.in_files = [ - ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP - >>> lgw.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP - >>> lgw.run() # doctest: +SKIP - - """ - # Create the sub workflow - workflow = pe.Workflow(name=name) - workflow.base_output_dir = name - - # We need to create an input node for the workflow - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_files', 'ref_file', 'rmask_file']), - name='inputspec') - - if linear_options_hash is None: - linear_options_hash = dict() - - # Rigidly register each of the images to the average - lin_reg = pe.MapNode( - interface=niftyreg.RegAladin(**linear_options_hash), - name="lin_reg", - iterfield=['flo_file']) - - if verbose is False: - lin_reg.inputs.verbosity_off_flag = True - - # Average the images - ave_ims = pe.Node(interface=niftyreg.RegAverage(), name="ave_ims") - - # We have a new average image and the affine - # transformations, which are returned as an output node. - outputnode = pe.Node( - niu.IdentityInterface(fields=['average_image', 'trans_files']), - name='outputspec') - - # Connect the inputs to the lin_reg node - workflow.connect([(inputnode, lin_reg, [('ref_file', 'ref_file')]), - (inputnode, lin_reg, [('in_files', 'flo_file')])]) - if use_mask: - workflow.connect(inputnode, 'rmask_file', lin_reg, 'rmask_file') - - if demean: - workflow.connect([(inputnode, ave_ims, [('ref_file', - 'demean1_ref_file')]), - (lin_reg, ave_ims, [('avg_output', 'warp_files')])]) - else: - workflow.connect(lin_reg, 'res_file', ave_ims, 'avg_files') - - # Connect up the output node - workflow.connect([(lin_reg, outputnode, [('aff_file', 'trans_files')]), - (ave_ims, outputnode, [('out_file', 'average_image')])]) - - return workflow - - -def create_nonlinear_gw_step(name="nonlinear_gw_niftyreg", - demean=True, - nonlinear_options_hash=None, - initial_affines=False, - use_mask=False, - verbose=False): - """ - Creates a workflow that perform non-linear co-registrations of a set of - images using RegF3d, producing an non-linear average image and a set of - cpp transformation linking each of the floating images to the average. - - Inputs:: - - inputspec.in_files - The input files to be registered - inputspec.ref_file - The initial reference image that the input files - are registered to - inputspec.rmask_file - Mask of the reference image - inputspec.in_trans_files - Initial transformation files (affine or - cpps) - - Outputs:: - - outputspec.average_image - The average image - outputspec.cpp_files - The bspline transformation files - - Optional arguments:: - - nonlinear_options_hash - An options dictionary containing a list of - parameters for RegAladin that take the - same form as given in the interface (default None) - initial_affines - Selects whether to iterate over initial affine - images, which we generally won't have (default False) - - Example - ------- - >>> from nipype.workflows.smri.niftyreg import create_nonlinear_gw_step - >>> nlc = create_nonlinear_gw_step('nonlinear_coreg') # doctest: +SKIP - >>> nlc.inputs.inputspec.in_files = [ - ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP - >>> nlc.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP - >>> nlc.run() # doctest: +SKIP - - """ - - # Create the workflow - workflow = pe.Workflow(name=name) - workflow.base_output_dir = name - - # We need to create an input node for the workflow - inputnode = pe.Node( - niu.IdentityInterface( - fields=['in_files', 'ref_file', 'rmask_file', 'input_aff_files']), - name='inputspec') - - if nonlinear_options_hash is None: - nonlinear_options_hash = dict() - - # non-rigidly register each of the images to the average - # flo_file can take a list of files - # Need to be able to iterate over input affine files, but what about the - # cases where we have no input affine files? - # Passing empty strings are not valid filenames, and undefined fields can - # not be iterated over. - # Current simple solution, as this is not generally required, is to use a - # flag which specifies wherther to iterate - if initial_affines: - nonlin_reg = pe.MapNode( - interface=niftyreg.RegF3D(**nonlinear_options_hash), - name="nonlin_reg", - iterfield=['flo_file', 'aff_file']) - else: - nonlin_reg = pe.MapNode( - interface=niftyreg.RegF3D(**nonlinear_options_hash), - name="nonlin_reg", - iterfield=['flo_file']) - - if verbose is False: - nonlin_reg.inputs.verbosity_off_flag = True - - # Average the images - ave_ims = pe.Node(interface=niftyreg.RegAverage(), name="ave_ims") - - # We have a new centered average image, the resampled original images and - # the affine transformations, which are returned as an output node. - outputnode = pe.Node( - niu.IdentityInterface(fields=['average_image', 'trans_files']), - name='outputspec') - - # Connect the inputs to the lin_reg node, which is split over in_files - workflow.connect([(inputnode, nonlin_reg, [('in_files', 'flo_file')]), - (inputnode, nonlin_reg, [('ref_file', 'ref_file')])]) - - if use_mask: - workflow.connect(inputnode, 'rmask_file', nonlin_reg, 'rmask_file') - - # If we have initial affine transforms, we need to connect them in - if initial_affines: - workflow.connect(inputnode, 'input_aff_files', nonlin_reg, 'aff_file') - - if demean: - if 'vel_flag' in list(nonlinear_options_hash.keys()) and \ - nonlinear_options_hash['vel_flag'] is True and \ - initial_affines: - workflow.connect(inputnode, 'ref_file', ave_ims, - 'demean3_ref_file') - else: - workflow.connect(inputnode, 'ref_file', ave_ims, - 'demean2_ref_file') - workflow.connect(nonlin_reg, 'avg_output', ave_ims, 'warp_files') - else: - workflow.connect(nonlin_reg, 'res_file', ave_ims, 'avg_files') - - # Connect up the output node - workflow.connect([(nonlin_reg, outputnode, [('cpp_file', 'trans_files')]), - (ave_ims, outputnode, [('out_file', 'average_image')])]) - - return workflow - - -# Creates an atlas image by iterative registration. An initial reference image -# can be provided, otherwise one will be made. -def create_groupwise_average(name="atlas_creation", - itr_rigid=3, - itr_affine=3, - itr_non_lin=5, - linear_options_hash=None, - nonlinear_options_hash=None, - use_mask=False, - verbose=False): - """ - Create the overall workflow that embeds all the rigid, affine and - non-linear components. - - Inputs:: - - inputspec.in_files - The input files to be registered - inputspec.ref_file - The initial reference image that the input files - are registered to - inputspec.rmask_file - Mask of the reference image - inputspec.in_trans_files - Initial transformation files (affine or - cpps) - - Outputs:: - - outputspec.average_image - The average image - outputspec.cpp_files - The bspline transformation files - - - Example - ------- - - >>> from nipype.workflows.smri.niftyreg import create_groupwise_average - >>> node = create_groupwise_average('groupwise_av') # doctest: +SKIP - >>> node.inputs.inputspec.in_files = [ - ... 'file1.nii.gz', 'file2.nii.gz'] # doctest: +SKIP - >>> node.inputs.inputspec.ref_file = ['ref.nii.gz'] # doctest: +SKIP - >>> node.inputs.inputspec.rmask_file = ['mask.nii.gz'] # doctest: +SKIP - >>> node.run() # doctest: +SKIP - - """ - # Create workflow - workflow = pe.Workflow(name=name) - - if linear_options_hash is None: - linear_options_hash = dict() - - if nonlinear_options_hash is None: - nonlinear_options_hash = dict() - - # Create the input and output node - inputnode = pe.Node( - niu.IdentityInterface(fields=['in_files', 'ref_file', 'rmask_file']), - name='inputspec') - - outputnode = pe.Node( - niu.IdentityInterface(fields=['average_image', 'trans_files']), - name='outputspec') - - # Create lists to store the rigid, affine and non-linear sub-workflow - lin_workflows = [] - nonlin_workflows = [] - - # Create the linear groupwise registration sub-workflows - for i in range(itr_rigid + itr_affine): - # Define is the sub-workflow is rigid or affine - if i >= itr_rigid: - linear_options_hash['rig_only_flag'] = False - else: - linear_options_hash['rig_only_flag'] = True - - # Define if the average image should be demean to ensure we have a - # barycenter - if (i < itr_rigid) or (i == (itr_rigid + itr_affine - 1)): - demean_arg = False - else: - demean_arg = True - - # Create the rigid or affine sub-workflow and add it to the relevant - # list - wf = create_linear_gw_step( - name='lin_reg' + str(i), - linear_options_hash=linear_options_hash, - demean=demean_arg, - verbose=verbose) - lin_workflows.append(wf) - - # Connect up the input data to the workflow - workflow.connect(inputnode, 'in_files', wf, 'inputspec.in_files') - if use_mask: - workflow.connect(inputnode, 'rmask_file', wf, - 'inputspec.rmask_file') - # If it exist, connect the previous workflow to the current one - if i == 0: - workflow.connect(inputnode, 'ref_file', wf, 'inputspec.ref_file') - else: - workflow.connect(lin_workflows[i - 1], 'outputspec.average_image', - wf, 'inputspec.ref_file') - - demean_arg = True - - # Create the nonlinear groupwise registration sub-workflows - for i in range(itr_non_lin): - - if len(lin_workflows) > 0: - initial_affines_arg = True - if i == (itr_non_lin - 1): - demean_arg = False - - wf = create_nonlinear_gw_step( - name='nonlin' + str(i), - demean=demean_arg, - initial_affines=initial_affines_arg, - nonlinear_options_hash=nonlinear_options_hash, - verbose=verbose) - - # Connect up the input data to the workflows - workflow.connect(inputnode, 'in_files', wf, 'inputspec.in_files') - if use_mask: - workflow.connect(inputnode, 'rmask_file', wf, - 'inputspec.rmask_file') - - if initial_affines_arg: - # Take the final linear registration results and use them to - # initialise the NR - workflow.connect(lin_workflows[-1], 'outputspec.trans_files', wf, - 'inputspec.input_aff_files') - - if i == 0: - if len(lin_workflows) > 0: - workflow.connect(lin_workflows[-1], 'outputspec.average_image', - wf, 'inputspec.ref_file') - else: - workflow.connect(inputnode, 'ref_file', wf, - 'inputspec.ref_file') - else: - workflow.connect(nonlin_workflows[i - 1], - 'outputspec.average_image', wf, - 'inputspec.ref_file') - - nonlin_workflows.append(wf) - - # Set up the last workflow - lw = None - if len(nonlin_workflows) > 0: - lw = nonlin_workflows[-1] - elif len(lin_workflows) > 0: - lw = lin_workflows[-1] - - # Connect the data to return - workflow.connect( - [(lw, outputnode, [('outputspec.average_image', 'average_image')]), - (lw, outputnode, [('outputspec.trans_files', 'trans_files')])]) - - return workflow diff --git a/nipype/workflows/warp/__init__.py b/nipype/workflows/warp/__init__.py deleted file mode 100644 index 40a96afc6f..0000000000 --- a/nipype/workflows/warp/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- From e41ca1ad63a3045c2557b4b2234dbe527b87fcfd Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 7 Oct 2019 13:01:17 -0400 Subject: [PATCH 2/3] ENH: Add niflow-nipype1-workflows to Docker file --- docker/generate_dockerfiles.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/generate_dockerfiles.sh b/docker/generate_dockerfiles.sh index 44173ee009..9c389873fa 100755 --- a/docker/generate_dockerfiles.sh +++ b/docker/generate_dockerfiles.sh @@ -94,7 +94,7 @@ function generate_main_dockerfile() { conda_install='python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} libxml2 libxslt matplotlib mkl "numpy!=1.16.0" paramiko pandas psutil scikit-learn scipy traits' \ - pip_install="pytest-xdist" \ + pip_install="pytest-xdist niflow-nipype1-workflows" \ activate=true \ --copy docker/files/run_builddocs.sh docker/files/run_examples.sh \ docker/files/run_pytests.sh nipype/external/fsl_imglob.py /usr/bin/ \ From 5c9e5711180dc327de28c0b48256f8a26551b100 Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 7 Oct 2019 14:19:09 -0400 Subject: [PATCH 3/3] CI: Install rdflib earlier to ensure neurdflib goes later --- docker/generate_dockerfiles.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/generate_dockerfiles.sh b/docker/generate_dockerfiles.sh index 9c389873fa..d6d880bfc5 100755 --- a/docker/generate_dockerfiles.sh +++ b/docker/generate_dockerfiles.sh @@ -93,7 +93,7 @@ function generate_main_dockerfile() { --miniconda create_env=neuro \ conda_install='python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} libxml2 libxslt matplotlib mkl "numpy!=1.16.0" paramiko - pandas psutil scikit-learn scipy traits' \ + pandas psutil scikit-learn scipy traits rdflib' \ pip_install="pytest-xdist niflow-nipype1-workflows" \ activate=true \ --copy docker/files/run_builddocs.sh docker/files/run_examples.sh \