From 1c98d4df6323d64c7cd2714baa396a678d27605d Mon Sep 17 00:00:00 2001 From: dPys Date: Fri, 3 Jan 2020 15:06:48 -0600 Subject: [PATCH 01/48] [BUG] immunize shutil.rmtree to node non-existence for remove_node_directories=True in the case that stop_on_first_crash=False --- nipype/pipeline/plugins/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 4be8eb232b..599db29418 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -461,7 +461,7 @@ def _remove_node_dirs(self): ) % (self.procs[idx]._id, outdir) ) - shutil.rmtree(outdir) + shutil.rmtree(outdir, ignore_errors=True) class SGELikeBatchManagerBase(DistributedPluginBase): From 28b8b8a00e9134d665d88cbd2f816d56bf62cf7a Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Dec 2019 11:20:06 -0800 Subject: [PATCH 02/48] DOC: Deduplicate code for Sphinx's APIdoc generation The ``InterfaceHelpWriter`` class was practically a clone of ``ApiDocWriter``. Both have been merged into one single module, having the Interface helper inherit from the ApiDocWriter. --- tools/apigen.py | 395 +++++++++++++++++++++---- tools/build_interface_docs.py | 42 +-- tools/interfacedocgen.py | 529 ---------------------------------- 3 files changed, 366 insertions(+), 600 deletions(-) delete mode 100644 tools/interfacedocgen.py diff --git a/tools/apigen.py b/tools/apigen.py index e3198664f3..19e47b5c20 100644 --- a/tools/apigen.py +++ b/tools/apigen.py @@ -21,25 +21,59 @@ project. """ import os +import sys import re +import tempfile +import warnings + +from nipype.interfaces.base import BaseInterface +from nipype.pipeline.engine import Workflow +from nipype.utils.misc import trim + +from github import get_file_url + +RST_SECTION_LEVELS = ("*", "=", "-", "~", "^") + +RST_CLASS_BLOCK = """ +.. _{uri}.{cls}: + +.. index:: {cls} + +{cls} +{underline} +`Link to code <{code_url}>`__ + +{body} +""" + +RST_FUNC_BLOCK = """ +.. _{uri}.{name}: + +:func:`{name}` +{underline} +`Link to code <{code_url}>`__ + +{body} + +""" # Functions and classes class ApiDocWriter(object): - """ Class for automatic detection and parsing of API docs - to Sphinx-parsable reST format""" + """Write reST documents for API docs.""" # only separating first two levels - rst_section_levels = ["*", "=", "-", "~", "^"] + rst_section_levels = RST_SECTION_LEVELS def __init__( self, package_name, rst_extension=".rst", - package_skip_patterns=None, - module_skip_patterns=None, + package_skip_patterns=(r"\.tests$",), + module_skip_patterns=(r"\.setup$", r"\._"), ): - """ Initialize package for parsing + r""" + Initialize package for parsing. Parameters ---------- @@ -55,7 +89,7 @@ def __init__( if *package_name* is ``sphinx``, then ``sphinx.util`` will result in ``.util`` being passed for earching by these regexps. If is None, gives default. Default is: - ['\.tests$'] + ``('\.tests$', )``. module_skip_patterns : None or sequence Sequence of strings giving URIs of modules to be excluded Operates on the module name including preceding URI path, @@ -63,22 +97,24 @@ def __init__( ``sphinx.util.console`` results in the string to search of ``.util.console`` If is None, gives default. Default is: - ['\.setup$', '\._'] + ``('\.setup$', '\._')``. + """ - if package_skip_patterns is None: - package_skip_patterns = ["\\.tests$"] - if module_skip_patterns is None: - module_skip_patterns = ["\\.setup$", "\\._"] - self.package_name = package_name + self._skip_patterns = {} self.rst_extension = rst_extension + self.package_name = package_name self.package_skip_patterns = package_skip_patterns self.module_skip_patterns = module_skip_patterns - def get_package_name(self): + @property + def package_name(self): + """Get package name.""" return self._package_name - def set_package_name(self, package_name): - """ Set package_name + @package_name.setter + def package_name(self, name): + """ + Set package_name. >>> docwriter = ApiDocWriter('sphinx') >>> import sphinx @@ -88,19 +124,36 @@ def set_package_name(self, package_name): >>> import docutils >>> docwriter.root_path == docutils.__path__[0] True + """ # It's also possible to imagine caching the module parsing here - self._package_name = package_name - self.root_module = __import__(package_name) + self._package_name = name + self.root_module = __import__(name) self.root_path = self.root_module.__path__[0] self.written_modules = None - package_name = property( - get_package_name, set_package_name, None, "get/set package_name" - ) + @property + def package_skip_patterns(self): + """Get package skip patterns.""" + return self._skip_patterns['package'] + + @package_skip_patterns.setter + def package_skip_patterns(self, pattern): + self._skip_patterns['package'] = _parse_patterns(pattern) + + @property + def module_skip_patterns(self): + """Get module skip patterns.""" + return self._skip_patterns['module'] + + @module_skip_patterns.setter + def module_skip_patterns(self, pattern): + self._skip_patterns['module'] = _parse_patterns(pattern) def _get_object_name(self, line): - """ Get second token in line + """ + Get second token in line. + >>> docwriter = ApiDocWriter('sphinx') >>> docwriter._get_object_name(" def func(): ") u'func' @@ -115,7 +168,8 @@ def _get_object_name(self, line): return name.rstrip(":") def _uri2path(self, uri): - """ Convert uri to absolute filepath + """ + Convert uri to absolute filepath. Parameters ---------- @@ -157,25 +211,25 @@ def _uri2path(self, uri): return path def _path2uri(self, dirpath): - """ Convert directory path to uri """ + """Convert directory path to uri.""" relpath = dirpath.replace(self.root_path, self.package_name) if relpath.startswith(os.path.sep): relpath = relpath[1:] return relpath.replace(os.path.sep, ".") def _parse_module(self, uri): - """ Parse module defined in *uri* """ + """Parse module defined in ``uri``.""" filename = self._uri2path(uri) if filename is None: # nothing that we could handle here. return ([], []) f = open(filename, "rt") - functions, classes = self._parse_lines(f) + functions, classes = self._parse_lines(f, uri) f.close() return functions, classes - def _parse_lines(self, linesource): - """ Parse lines of text for functions and classes """ + def _parse_lines(self, linesource, module=None): + """Parse lines of text for functions and classes.""" functions = [] classes = [] for line in linesource: @@ -196,7 +250,8 @@ def _parse_lines(self, linesource): return functions, classes def generate_api_doc(self, uri): - """Make autodoc documentation template string for a module + """ + Make autodoc documentation template string for a module. Parameters ---------- @@ -207,6 +262,7 @@ def generate_api_doc(self, uri): ------- S : string Contents of API doc + """ # get the names of all classes and functions functions, classes = self._parse_module(uri) @@ -272,7 +328,8 @@ def generate_api_doc(self, uri): return ad def _survives_exclude(self, matchstr, match_type): - """ Returns True if *matchstr* does not match patterns + r""" + Return ``True`` if ``matchstr`` does not match patterns. ``self.package_name`` removed from front of string if present @@ -281,41 +338,38 @@ def _survives_exclude(self, matchstr, match_type): >>> dw = ApiDocWriter('sphinx') >>> dw._survives_exclude('sphinx.okpkg', 'package') True - >>> dw.package_skip_patterns.append('^\\.badpkg$') + >>> dw.package_skip_patterns.append(r'^\.badpkg$') >>> dw._survives_exclude('sphinx.badpkg', 'package') False >>> dw._survives_exclude('sphinx.badpkg', 'module') True >>> dw._survives_exclude('sphinx.badmod', 'module') True - >>> dw.module_skip_patterns.append('^\\.badmod$') + >>> dw.module_skip_patterns.append(r'^\.badmod$') >>> dw._survives_exclude('sphinx.badmod', 'module') False + """ - if match_type == "module": - patterns = self.module_skip_patterns - elif match_type == "package": - patterns = self.package_skip_patterns - else: + patterns = self._skip_patterns.get(match_type) + if patterns is None: raise ValueError('Cannot interpret match type "%s"' % match_type) + # Match to URI without package name L = len(self.package_name) if matchstr[:L] == self.package_name: matchstr = matchstr[L:] for pat in patterns: - # print (pat, matchstr, match_type) #dbg try: pat.search except AttributeError: pat = re.compile(pat) - # print (pat.search(matchstr)) #dbg if pat.search(matchstr): return False return True - def discover_modules(self): - """ Return module sequence discovered from ``self.package_name`` - + def discover_modules(self, empty_start=True): + r""" + Return module sequence discovered from ``self.package_name``. Parameters ---------- @@ -336,8 +390,9 @@ def discover_modules(self): >>> 'sphinx.util' in dw.discover_modules() False >>> + """ - modules = [] + modules = [] if empty_start else [self.package_name] # raw directory parsing for dirpath, dirnames, filenames in os.walk(self.root_path): # Check directory names for packages @@ -358,11 +413,10 @@ def discover_modules(self): module_uri, "module" ): modules.append(module_uri) - # print sorted(modules) #dbg return sorted(modules) def write_modules_api(self, modules, outdir): - # write the list + """Generate the list of modules.""" written_modules = [] for m in modules: api_str = self.generate_api_doc(m) @@ -377,7 +431,8 @@ def write_modules_api(self, modules, outdir): self.written_modules = written_modules def write_api_docs(self, outdir): - """Generate API reST files. + """ + Generate API reST files. Parameters ---------- @@ -391,7 +446,8 @@ def write_api_docs(self, outdir): Notes ----- - Sets self.written_modules to list of written modules + Sets ``self.written_modules`` to list of written modules + """ if not os.path.exists(outdir): os.mkdir(outdir) @@ -399,8 +455,10 @@ def write_api_docs(self, outdir): modules = self.discover_modules() self.write_modules_api(modules, outdir) - def write_index(self, outdir, froot="gen", relative_to=None): - """Make a reST API index file from written files + def write_index(self, outdir, froot="gen", relative_to=None, + maxdepth=None): + """ + Make a reST API index file from written files. Parameters ---------- @@ -416,6 +474,7 @@ def write_index(self, outdir, froot="gen", relative_to=None): component of the written file path will be removed from outdir, in the generated index. Default is None, meaning, leave path as it is. + """ if self.written_modules is None: raise ValueError("No modules written") @@ -429,7 +488,243 @@ def write_index(self, outdir, froot="gen", relative_to=None): idx = open(path, "wt") w = idx.write w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") - w(".. toctree::\n\n") + if maxdepth is None: + w(".. toctree::\n\n") + else: + w(".. toctree::\n") + w(" :maxdepth: %d\n\n" % maxdepth) for f in self.written_modules: w(" %s\n" % os.path.join(relpath, f)) idx.close() + + +class InterfaceHelpWriter(ApiDocWriter): + """Convert interface specs to rST.""" + + def __init__( + self, + package_name, + class_skip_patterns=None, + **kwargs + ): + """ + Initialize an :py:mod:`ApiDocWriter` for interface specs. + + Additional Parameters + --------------------- + class_skip_patterns : None or sequence + Sequence of strings giving classes to be excluded + Default is: None + + """ + super().__init__(package_name, **kwargs) + self.class_skip_patterns = class_skip_patterns + + @property + def class_skip_patterns(self): + """Get class skip patterns.""" + return self._skip_patterns['class'] + + @class_skip_patterns.setter + def class_skip_patterns(self, pattern): + self._skip_patterns['class'] = _parse_patterns(pattern) + + def _parse_lines(self, linesource, module=None): + """Parse lines of text for functions and classes.""" + functions = [] + classes = [] + for line in linesource: + if line.startswith("def ") and line.count("("): + # exclude private stuff + name = self._get_object_name(line) + if not name.startswith("_"): + functions.append(name) + elif line.startswith("class "): + # exclude private stuff + name = self._get_object_name(line) + if not name.startswith("_") and self._survives_exclude( + ".".join((module, name)), "class" + ): + classes.append(name) + else: + pass + functions.sort() + classes.sort() + return functions, classes + + def _write_graph_section(self, fname, title): + ad = "\n%s\n%s\n\n" % (title, self.rst_section_levels[3] * len(title)) + ad += ".. graphviz::\n\n" + fhandle = open(fname) + for line in fhandle: + ad += "\t" + line + "\n" + + fhandle.close() + os.remove(fname) + bitmap_fname = "{}.png".format(os.path.splitext(fname)[0]) + os.remove(bitmap_fname) + return ad + + def generate_api_doc(self, uri): + """ + Make autodoc documentation template string for a module. + + Parameters + ---------- + uri : string + python location of module - e.g 'sphinx.builder' + + Returns + ------- + S : string + Contents of API doc + + """ + # get the names of all classes and functions + functions, classes = self._parse_module(uri) + workflows = [] + helper_functions = [] + for function in functions: + + try: + __import__(uri) + finst = sys.modules[uri].__dict__[function] + except TypeError: + continue + try: + workflow = finst() + except Exception: + helper_functions.append((function, finst)) + continue + + if isinstance(workflow, Workflow): + workflows.append((workflow, function, finst)) + + if not classes and not workflows and not helper_functions: + print("WARNING: Empty -", uri) # dbg + return "" + + # Make a shorter version of the uri that omits the package name for + # titles + uri_short = re.sub(r"^%s\." % self.package_name, "", uri) + # uri_short = uri + + ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" + + chap_title = uri_short + ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" + + # Set the chapter title to read 'module' for all modules except for the + # main packages + # if '.' in uri: + # title = 'Module: :mod:`' + uri_short + '`' + # else: + # title = ':mod:`' + uri_short + '`' + # ad += title + '\n' + self.rst_section_levels[2] * len(title) + + # ad += '\n' + 'Classes' + '\n' + \ + # self.rst_section_levels[2] * 7 + '\n' + for c in classes: + __import__(uri) + print(c) + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + classinst = sys.modules[uri].__dict__[c] + except Exception as inst: + print(inst) + continue + + if not issubclass(classinst, BaseInterface): + continue + + ad += RST_CLASS_BLOCK.format( + uri=uri, + cls=c, + underline=self.rst_section_levels[2] * len(c), + code_url=get_file_url(classinst), + body=trim(classinst.help(returnhelp=True), self.rst_section_levels[3]) + ) + + if workflows or helper_functions: + ad += "\n.. module:: %s\n\n" % uri + + for workflow, name, finst in workflows: + ad += RST_FUNC_BLOCK.format( + uri=uri, + name=name, + underline=self.rst_section_levels[2] * (len(name) + 8), + code_url=get_file_url(finst), + body=trim(finst.__doc__, self.rst_section_levels[3]) + ) + """ + # use sphinx autodoc for function signature + ad += '\n.. _%s:\n\n' % (uri + '.' + name) + ad += '.. autofunction:: %s\n\n' % name + """ + + (_, fname) = tempfile.mkstemp(suffix=".dot") + workflow.write_graph(dotfilename=fname, graph2use="hierarchical") + ad += self._write_graph_section(fname, "Graph") + "\n" + + for name, finst in helper_functions: + ad += RST_FUNC_BLOCK.format( + uri=uri, + name=name, + underline=self.rst_section_levels[2] * (len(name) + 8), + code_url=get_file_url(finst), + body=trim(finst.__doc__, self.rst_section_levels[3]) + ) + return ad + + def discover_modules(self, empty_start=True): + """Return module sequence discovered from ``self.package_name``.""" + return super().discover_modules(empty_start=False) + + def write_modules_api(self, modules, outdir): + """Generate the list of modules.""" + written_modules = [] + for m in modules: + api_str = self.generate_api_doc(m) + if not api_str: + continue + # write out to file + mvalues = m.split(".") + if len(mvalues) > 3: + index_prefix = ".".join(mvalues[1:3]) + index_dir = os.path.join(outdir, index_prefix) + index_file = index_dir + self.rst_extension + if not os.path.exists(index_dir): + os.makedirs(index_dir) + header = """.. AUTO-GENERATED FILE -- DO NOT EDIT! + +{name} +{underline} + +.. toctree:: + :maxdepth: 1 + :glob: + + {name}/* + """.format( + name=index_prefix, underline="=" * len(index_prefix) + ) + with open(index_file, "wt") as fp: + fp.write(header) + m = os.path.join(index_prefix, ".".join(mvalues[3:])) + outfile = os.path.join(outdir, m + self.rst_extension) + fileobj = open(outfile, "wt") + fileobj.write(api_str) + fileobj.close() + written_modules.append(m) + self.written_modules = written_modules + + +def _parse_patterns(pattern): + if pattern is None: + return [] + if isinstance(pattern, str): + return [pattern] + if isinstance(pattern, tuple): + return list(pattern) + return pattern diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py index f53c7a0419..d21d19428a 100755 --- a/tools/build_interface_docs.py +++ b/tools/build_interface_docs.py @@ -12,36 +12,36 @@ nipypepath = os.path.abspath("..") sys.path.insert(1, nipypepath) # local imports - from interfacedocgen import InterfaceHelpWriter + from apigen import InterfaceHelpWriter package = "nipype" outdir = os.path.join("interfaces", "generated") docwriter = InterfaceHelpWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += [ - "\.external$", - "\.fixes$", - "\.utils$", - "\.pipeline", - "\.testing", - "\.caching", - "\.scripts", + r"\.external$", + r"\.fixes$", + r"\.utils$", + r"\.pipeline", + r"\.testing", + r"\.caching", + r"\.scripts", ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += [ - "\.version$", - "\.interfaces\.base$", - "\.interfaces\.matlab$", - "\.interfaces\.rest$", - "\.interfaces\.pymvpa$", - "\.interfaces\.slicer\.generate_classes$", - "\.interfaces\.spm\.base$", - "\.interfaces\.traits", - "\.pipeline\.alloy$", - "\.pipeline\.s3_node_wrapper$", - "\.testing", - "\.scripts", - "\.conftest", + r"\.version$", + r"\.interfaces\.base$", + r"\.interfaces\.matlab$", + r"\.interfaces\.rest$", + r"\.interfaces\.pymvpa$", + r"\.interfaces\.slicer\.generate_classes$", + r"\.interfaces\.spm\.base$", + r"\.interfaces\.traits", + r"\.pipeline\.alloy$", + r"\.pipeline\.s3_node_wrapper$", + r"\.testing", + r"\.scripts", + r"\.conftest", ] docwriter.class_skip_patterns += [ "AFNICommand", diff --git a/tools/interfacedocgen.py b/tools/interfacedocgen.py deleted file mode 100644 index 27f45ec887..0000000000 --- a/tools/interfacedocgen.py +++ /dev/null @@ -1,529 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Attempt to generate templates for module reference with Sphinx - -XXX - we exclude extension modules - -To include extension modules, first identify them as valid in the -``_uri2path`` method, then handle them in the ``_parse_module`` script. - -We get functions and classes by parsing the text of .py files. -Alternatively we could import the modules for discovery, and we'd have -to do that for extension modules. This would involve changing the -``_parse_module`` method to work via import and introspection, and -might involve changing ``discover_modules`` (which determines which -files are modules, and therefore which module URIs will be passed to -``_parse_module``). - -NOTE: this is a modified version of a script originally shipped with the -PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed -project.""" - -# Stdlib imports -import inspect -import os -import re -import sys -import tempfile -import warnings - -from nipype.interfaces.base import BaseInterface -from nipype.pipeline.engine import Workflow -from nipype.utils.misc import trim - -from github import get_file_url - -# Functions and classes - - -class InterfaceHelpWriter(object): - """ Class for automatic detection and parsing of API docs - to Sphinx-parsable reST format""" - - # only separating first two levels - rst_section_levels = ["*", "=", "-", "~", "^"] - - def __init__( - self, - package_name, - rst_extension=".rst", - package_skip_patterns=None, - module_skip_patterns=None, - class_skip_patterns=None, - ): - """ Initialize package for parsing - - Parameters - ---------- - package_name : string - Name of the top-level package. *package_name* must be the - name of an importable package - rst_extension : string, optional - Extension for reST files, default '.rst' - package_skip_patterns : None or sequence of {strings, regexps} - Sequence of strings giving URIs of packages to be excluded - Operates on the package path, starting at (including) the - first dot in the package path, after *package_name* - so, - if *package_name* is ``sphinx``, then ``sphinx.util`` will - result in ``.util`` being passed for earching by these - regexps. If is None, gives default. Default is: - ['\.tests$'] - module_skip_patterns : None or sequence - Sequence of strings giving URIs of modules to be excluded - Operates on the module name including preceding URI path, - back to the first dot after *package_name*. For example - ``sphinx.util.console`` results in the string to search of - ``.util.console`` - If is None, gives default. Default is: - ['\.setup$', '\._'] - class_skip_patterns : None or sequence - Sequence of strings giving classes to be excluded - Default is: None - - """ - if package_skip_patterns is None: - package_skip_patterns = ["\\.tests$"] - if module_skip_patterns is None: - module_skip_patterns = ["\\.setup$", "\\._"] - if class_skip_patterns: - self.class_skip_patterns = class_skip_patterns - else: - self.class_skip_patterns = [] - self.package_name = package_name - self.rst_extension = rst_extension - self.package_skip_patterns = package_skip_patterns - self.module_skip_patterns = module_skip_patterns - - def get_package_name(self): - return self._package_name - - def set_package_name(self, package_name): - """ Set package_name - - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> docwriter.root_path == sphinx.__path__[0] - True - >>> docwriter.package_name = 'docutils' - >>> import docutils - >>> docwriter.root_path == docutils.__path__[0] - True - """ - # It's also possible to imagine caching the module parsing here - self._package_name = package_name - self.root_module = __import__(package_name) - self.root_path = self.root_module.__path__[0] - self.written_modules = None - - package_name = property( - get_package_name, set_package_name, None, "get/set package_name" - ) - - def _get_object_name(self, line): - """ Get second token in line - >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - u'func' - >>> docwriter._get_object_name(" class Klass(object): ") - 'Klass' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - """ - name = line.split()[1].split("(")[0].strip() - # in case we have classes which are not derived from object - # ie. old style classes - return name.rstrip(":") - - def _uri2path(self, uri): - """ Convert uri to absolute filepath - - Parameters - ---------- - uri : string - URI of python module to return path for - - Returns - ------- - path : None or string - Returns None if there is no valid path for this URI - Otherwise returns absolute file system path for URI - - Examples - -------- - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> modpath = sphinx.__path__[0] - >>> res = docwriter._uri2path('sphinx.builder') - >>> res == os.path.join(modpath, 'builder.py') - True - >>> res = docwriter._uri2path('sphinx') - >>> res == os.path.join(modpath, '__init__.py') - True - >>> docwriter._uri2path('sphinx.does_not_exist') - - """ - if uri == self.package_name: - return os.path.join(self.root_path, "__init__.py") - path = uri.replace(".", os.path.sep) - path = path.replace(self.package_name + os.path.sep, "") - path = os.path.join(self.root_path, path) - # XXX maybe check for extensions as well? - if os.path.exists(path + ".py"): # file - path += ".py" - elif os.path.exists(os.path.join(path, "__init__.py")): - path = os.path.join(path, "__init__.py") - else: - return None - return path - - def _path2uri(self, dirpath): - """ Convert directory path to uri """ - relpath = dirpath.replace(self.root_path, self.package_name) - if relpath.startswith(os.path.sep): - relpath = relpath[1:] - return relpath.replace(os.path.sep, ".") - - def _parse_module(self, uri): - """ Parse module defined in *uri* """ - filename = self._uri2path(uri) - if filename is None: - # nothing that we could handle here. - return ([], []) - f = open(filename, "rt") - functions, classes = self._parse_lines(f, uri) - f.close() - return functions, classes - - def _parse_lines(self, linesource, module): - """ Parse lines of text for functions and classes """ - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_") and self._survives_exclude( - ".".join((module, name)), "class" - ): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def _write_graph_section(self, fname, title): - ad = "\n%s\n%s\n\n" % (title, self.rst_section_levels[3] * len(title)) - ad += ".. graphviz::\n\n" - fhandle = open(fname) - for line in fhandle: - ad += "\t" + line + "\n" - - fhandle.close() - os.remove(fname) - bitmap_fname = "{}.png".format(os.path.splitext(fname)[0]) - os.remove(bitmap_fname) - return ad - - def generate_api_doc(self, uri): - """Make autodoc documentation template string for a module - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - """ - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - workflows = [] - helper_functions = [] - for function in functions: - - try: - __import__(uri) - finst = sys.modules[uri].__dict__[function] - except TypeError: - continue - try: - workflow = finst() - except Exception: - helper_functions.append((function, finst)) - continue - - if isinstance(workflow, Workflow): - workflows.append((workflow, function, finst)) - - if not classes and not workflows and not helper_functions: - print("WARNING: Empty -", uri) # dbg - return "" - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - # uri_short = uri - - ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - - chap_title = uri_short - ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - # if '.' in uri: - # title = 'Module: :mod:`' + uri_short + '`' - # else: - # title = ':mod:`' + uri_short + '`' - # ad += title + '\n' + self.rst_section_levels[2] * len(title) - - # ad += '\n' + 'Classes' + '\n' + \ - # self.rst_section_levels[2] * 7 + '\n' - for c in classes: - __import__(uri) - print(c) - try: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - classinst = sys.modules[uri].__dict__[c] - except Exception as inst: - print(inst) - continue - - if not issubclass(classinst, BaseInterface): - continue - - label = uri + "." + c + ":" - ad += "\n.. _%s\n\n" % label - ad += "\n.. index:: %s\n\n" % c - ad += c + "\n" + self.rst_section_levels[2] * len(c) + "\n\n" - ad += "`Link to code <%s>`__\n\n" % get_file_url(classinst) - ad += ( - trim(classinst.help(returnhelp=True), self.rst_section_levels[3]) + "\n" - ) - - if workflows or helper_functions: - ad += "\n.. module:: %s\n\n" % uri - - for workflow, name, finst in workflows: - label = ":func:`" + name + "`" - ad += "\n.. _%s:\n\n" % (uri + "." + name) - ad += "\n".join((label, self.rst_section_levels[2] * len(label))) - ad += "\n\n`Link to code <%s>`__\n\n" % get_file_url(finst) - helpstr = trim(finst.__doc__, self.rst_section_levels[3]) - ad += "\n\n" + helpstr + "\n\n" - """ - # use sphinx autodoc for function signature - ad += '\n.. _%s:\n\n' % (uri + '.' + name) - ad += '.. autofunction:: %s\n\n' % name - """ - - (_, fname) = tempfile.mkstemp(suffix=".dot") - workflow.write_graph(dotfilename=fname, graph2use="hierarchical") - - ad += self._write_graph_section(fname, "Graph") + "\n" - - for name, finst in helper_functions: - label = ":func:`" + name + "`" - ad += "\n.. _%s:\n\n" % (uri + "." + name) - ad += "\n".join((label, self.rst_section_levels[2] * len(label))) - ad += "\n\n`Link to code <%s>`__\n\n" % get_file_url(finst) - helpstr = trim(finst.__doc__, self.rst_section_levels[3]) - ad += "\n\n" + helpstr + "\n\n" - - return ad - - def _survives_exclude(self, matchstr, match_type): - """ Returns True if *matchstr* does not match patterns - - ``self.package_name`` removed from front of string if present - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> dw._survives_exclude('sphinx.okpkg', 'package') - True - >>> dw.package_skip_patterns.append('^\\.badpkg$') - >>> dw._survives_exclude('sphinx.badpkg', 'package') - False - >>> dw._survives_exclude('sphinx.badpkg', 'module') - True - >>> dw._survives_exclude('sphinx.badmod', 'module') - True - >>> dw.module_skip_patterns.append('^\\.badmod$') - >>> dw._survives_exclude('sphinx.badmod', 'module') - False - """ - if match_type == "module": - patterns = self.module_skip_patterns - elif match_type == "package": - patterns = self.package_skip_patterns - elif match_type == "class": - patterns = self.class_skip_patterns - else: - raise ValueError('Cannot interpret match type "%s"' % match_type) - # Match to URI without package name - L = len(self.package_name) - if matchstr[:L] == self.package_name: - matchstr = matchstr[L:] - for pat in patterns: - try: - pat.search - except AttributeError: - pat = re.compile(pat) - if pat.search(matchstr): - return False - return True - - def discover_modules(self): - """ Return module sequence discovered from ``self.package_name`` - - - Parameters - ---------- - None - - Returns - ------- - mods : sequence - Sequence of module names within ``self.package_name`` - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> mods = dw.discover_modules() - >>> 'sphinx.util' in mods - True - >>> dw.package_skip_patterns.append('\.util$') - >>> 'sphinx.util' in dw.discover_modules() - False - >>> - """ - modules = [self.package_name] - # raw directory parsing - for dirpath, dirnames, filenames in os.walk(self.root_path): - # Check directory names for packages - root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace - package_uri = ".".join((root_uri, dirname)) - if self._uri2path(package_uri) and self._survives_exclude( - package_uri, "package" - ): - modules.append(package_uri) - else: - dirnames.remove(dirname) - # Check filenames for modules - for filename in filenames: - module_name = filename[:-3] - module_uri = ".".join((root_uri, module_name)) - if self._uri2path(module_uri) and self._survives_exclude( - module_uri, "module" - ): - modules.append(module_uri) - return sorted(modules) - - def write_modules_api(self, modules, outdir): - # write the list - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - mvalues = m.split(".") - if len(mvalues) > 3: - index_prefix = ".".join(mvalues[1:3]) - index_dir = os.path.join(outdir, index_prefix) - index_file = index_dir + self.rst_extension - if not os.path.exists(index_dir): - os.makedirs(index_dir) - header = """.. AUTO-GENERATED FILE -- DO NOT EDIT! - -{name} -{underline} - -.. toctree:: - :maxdepth: 1 - :glob: - - {name}/* - """.format( - name=index_prefix, underline="=" * len(index_prefix) - ) - with open(index_file, "wt") as fp: - fp.write(header) - m = os.path.join(index_prefix, ".".join(mvalues[3:])) - outfile = os.path.join(outdir, m + self.rst_extension) - fileobj = open(outfile, "wt") - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - def write_api_docs(self, outdir): - """Generate API reST files. - - Parameters - ---------- - outdir : string - Directory name in which to store files - We create automatic filenames for each module - - Returns - ------- - None - - Notes - ----- - Sets self.written_modules to list of written modules - """ - if not os.path.exists(outdir): - os.mkdir(outdir) - # compose list of modules - modules = self.discover_modules() - self.write_modules_api(modules, outdir) - - def write_index(self, outdir, froot="gen", relative_to=None): - """Make a reST API index file from written files - - Parameters - ---------- - path : string - Filename to write index to - outdir : string - Directory to which to write generated index file - froot : string, optional - root (filename without extension) of filename to write to - Defaults to 'gen'. We add ``self.rst_extension``. - relative_to : string - path to which written filenames are relative. This - component of the written file path will be removed from - outdir, in the generated index. Default is None, meaning, - leave path as it is. - """ - if self.written_modules is None: - raise ValueError("No modules written") - # Get full filename path - path = os.path.join(outdir, froot + self.rst_extension) - # Path written into index is relative to rootpath - if relative_to is not None: - relpath = outdir.replace(relative_to + os.path.sep, "") - else: - relpath = outdir - idx = open(path, "wt") - w = idx.write - w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") - w(".. toctree::\n") - w(" :maxdepth: 2\n\n") - for f in self.written_modules: - w(" %s\n" % os.path.join(relpath, f)) - idx.close() From 722bd64883437bab07ec0354ec3b63595321ba18 Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Dec 2019 17:11:25 -0800 Subject: [PATCH 03/48] DOC: Deep revision of documentation building This PR follows up on #3119 (merge that one first for a clean diff, or diff against ``oesteban:maint/dedup-apigen-code``). In practice, this PR fixes several broken points of our documentation (e.g., the workflows list was empty and now it has been updated, changelog not rendered, API of pure python code not rendered by the Nipype API parser was missing, etc.). CHANGES ------- * Replaced the ``numpydoc`` sphinx extension with ``sphinxcontrib-napoleon``. * Removed autosummary sphinx extension, required by numpydoc * Cleared up ``docs/sphinxext/*``, as nothing is now used from there * Use current sphinx-apidoc/autodoc/autosummary * Removed the modref generation tooling, as it is not necessary anymore after re-enabling apidoc. * Cut building warnings down to 321 - just those we incur because our API generator. This required some fixes of some docstrings. Beyond those corresponding to the Nipype API generator, only missing links remain as warnings (for sections in the navbar). * Updated changelogs to be reStructuredText. --- doc/Makefile | 13 +- doc/api/index.rst | 13 +- .../{0.X.X-changelog => 0.X.X-changelog.rst} | 8 +- .../{1.X.X-changelog => 1.X.X-changelog.rst} | 42 +-- doc/changes.rst | 4 +- doc/conf.py | 112 +++++--- doc/devel/cmd_interface_devel.rst | 2 +- doc/devel/interface_specs.rst | 3 +- doc/devel/testing_nipype.rst | 2 +- doc/documentation.rst | 31 ++- doc/interfaces/index.rst | 1 - doc/links_names.txt | 2 +- doc/sphinxext/README.txt | 16 -- doc/sphinxext/autosummary_generate.py | 240 ------------------ doc/sphinxext/ipython_console_highlighting.py | 101 -------- doc/users/install.rst | 6 +- doc/version.rst | 6 +- nipype/__init__.py | 8 +- nipype/pipeline/plugins/base.py | 54 ++-- nipype/utils/config.py | 4 +- nipype/utils/filemanip.py | 6 +- nipype/utils/nipype2boutiques.py | 51 ++-- tools/README | 15 -- tools/build_interface_docs.py | 15 +- tools/build_modref_templates.py | 44 ---- tools/update_changes.sh | 2 +- 26 files changed, 234 insertions(+), 567 deletions(-) rename doc/changelog/{0.X.X-changelog => 0.X.X-changelog.rst} (99%) rename doc/changelog/{1.X.X-changelog => 1.X.X-changelog.rst} (94%) delete mode 100644 doc/sphinxext/README.txt delete mode 100755 doc/sphinxext/autosummary_generate.py delete mode 100644 doc/sphinxext/ipython_console_highlighting.py delete mode 100644 tools/README delete mode 100755 tools/build_modref_templates.py diff --git a/doc/Makefile b/doc/Makefile index abe329a57a..25acfeb122 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -5,18 +5,19 @@ SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = +PYTHONPATH = $(PWD) # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html api htmlonly latex changes linkcheck doctest +.PHONY: help clean html nipypeapi htmlonly latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html make the HTML documentation" - @echo " api make API documents only" + @echo " nipypeapi make interface API documents only" @echo " latex make the LaTeX, you can set PAPER=a4 or PAPER=letter" @echo " pdf make and run the PDF generation" @echo " changes make an overview of all changed/added/deprecated" \ @@ -33,14 +34,12 @@ htmlonly: @echo @echo "Build finished. The HTML pages are in _build/html." -api: - rm -rf api/generated - python -u ../tools/build_modref_templates.py +nipypeapi: rm -rf interfaces/generated python -u ../tools/build_interface_docs.py @echo "Build API docs finished." -html: clean examples2rst api htmlonly +html: clean examples2rst nipypeapi htmlonly @echo "Build HTML and API finished." examples2rst: @@ -48,7 +47,7 @@ examples2rst: ../tools/make_examples.py --no-exec @echo "examples2rst finished." -latex: api +latex: nipypeapi $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." diff --git a/doc/api/index.rst b/doc/api/index.rst index 0e40dbf4ab..0cc9d87e32 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -1,10 +1,15 @@ .. _api-index: -### -API -### +########################################### +Library API (application program interface) +########################################### + +Information on specific functions, classes, and methods. :Release: |version| :Date: |today| -.. include:: generated/gen.rst +.. toctree:: + :glob: + + generated/* diff --git a/doc/changelog/0.X.X-changelog b/doc/changelog/0.X.X-changelog.rst similarity index 99% rename from doc/changelog/0.X.X-changelog rename to doc/changelog/0.X.X-changelog.rst index b1c16318b2..0c007cade7 100644 --- a/doc/changelog/0.X.X-changelog +++ b/doc/changelog/0.X.X-changelog.rst @@ -1,7 +1,7 @@ 0.14.0 (November 29, 2017) ========================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/13) +(`Full changelog `__) * FIX+MAINT: Revision of the resource monitor (https://github.com/nipy/nipype/pull/2285) * FIX: MultiProc mishandling crashes (https://github.com/nipy/nipype/pull/2301) @@ -385,8 +385,10 @@ Release 0.9.0 (December 20, 2013) - camino.QBallMX - camino.LinRecon - camino.SFPeaks - One outdated interface no longer part of Camino was removed: + + One outdated interface no longer part of Camino was removed: - camino.Conmap + * ENH: Three new mrtrix interfaces were added: - mrtrix.GenerateDirections - mrtrix.FindShPeaks @@ -713,7 +715,7 @@ Features added * General: - - Type checking of inputs and outputs using Traits from ETS_. + - Type checking of inputs and outputs using Traits from ETS. - Support for nested workflows. - Preliminary Slicer and AFNI support. - New flexible DataGrabber node. diff --git a/doc/changelog/1.X.X-changelog b/doc/changelog/1.X.X-changelog.rst similarity index 94% rename from doc/changelog/1.X.X-changelog rename to doc/changelog/1.X.X-changelog.rst index 99d26fa4a8..0e0d661f61 100644 --- a/doc/changelog/1.X.X-changelog +++ b/doc/changelog/1.X.X-changelog.rst @@ -26,7 +26,7 @@ 1.3.0 (November 11, 2019) ========================= -##### [Full changelog](https://github.com/nipy/nipype/milestone/34?closed=1) +(`Full changelog `__) * FIX: Fixed typo in QwarpInputSpec Trait description (https://github.com/nipy/nipype/pull/3079) * FIX: Restore ``AFNICommand._get_fname``, required by some interfaces (https://github.com/nipy/nipype/pull/3071) @@ -52,7 +52,7 @@ Python 1.2.3 will be the last version to support Python 3.4. -##### [Full changelog](https://github.com/nipy/nipype/milestone/35?closed=1) +(`Full changelog `__) * FIX: Patch Path.mkdir for Python 2 (https://github.com/nipy/nipype/pull/3037) * FIX: Drop deprecated message argument to ``FileNotFoundError`` (https://github.com/nipy/nipype/pull/3035) @@ -71,7 +71,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.2.2 (September 07, 2019) ========================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/33?closed=1) +(`Full changelog `__) * FIX: Ensure ``loadpkl`` returns a not None value (https://github.com/nipy/nipype/pull/3020) * FIX: ``loadpkl`` failed when pklz file contained versioning info (https://github.com/nipy/nipype/pull/3017) @@ -87,7 +87,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.2.1 (August 19, 2019) ======================= -##### [Full changelog](https://github.com/nipy/nipype/milestone/32?closed=1) +(`Full changelog `__) * FIX: Resolve/rebase paths from/to results files (https://github.com/nipy/nipype/pull/2971) * FIX: Use ``load_resultfile`` when loading a results pickle (https://github.com/nipy/nipype/pull/2985) @@ -97,7 +97,7 @@ Python 1.2.3 will be the last version to support Python 3.4. * FIX: Docker build (https://github.com/nipy/nipype/pull/2963) * FIX: Remove '=' signs from EddyQuad argument specifications (https://github.com/nipy/nipype/pull/2941) * FIX: Set input model to bedpostx for camino.TrackBedpostxProba (https://github.com/nipy/nipype/pull/2947) - * FIX: Allow ``max_sh``not to be set (auto mode) (https://github.com/nipy/nipype/pull/2940) + * FIX: Allow ``max_sh`` to not be set (auto mode) (https://github.com/nipy/nipype/pull/2940) * ENH: Update mrtrix reconst.py EstimateFOD max_sh to be able to accept list (https://github.com/nipy/nipype/pull/2990) * ENH: Let ``indirectory`` handle ``nipype.utils.filemanip.Path`` (https://github.com/nipy/nipype/pull/2989) * ENH: Add resolve/rebase ``BasePath`` traits methods & tests (https://github.com/nipy/nipype/pull/2970) @@ -114,7 +114,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.2.0 (May 09, 2019) ==================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/31?closed=1) +(`Full changelog `__) * FIX: Parsing of filename in AlignEpiAnatPy when filename does not have + (https://github.com/nipy/nipype/pull/2909) * FIX: Import nibabel reorientation bug fix (https://github.com/nipy/nipype/pull/2912) @@ -133,7 +133,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.1.9 (February 25, 2019) ========================= -##### [Full changelog](https://github.com/nipy/nipype/milestone/30?closed=1) +(`Full changelog `__) * FIX: Make positional arguments to LaplacianThickness require previous argument (https://github.com/nipy/nipype/pull/2848) * FIX: Import math and csv modules for bids_gen_info (https://github.com/nipy/nipype/pull/2881) @@ -149,7 +149,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.1.8 (January 28, 2019) ======================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/29?closed=1) +(`Full changelog `__) * FIX: ANTS LaplacianThickness cmdline opts fixed up (https://github.com/nipy/nipype/pull/2846) * FIX: Resolve LinAlgError during SVD (https://github.com/nipy/nipype/pull/2838) @@ -171,7 +171,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.1.7 (December 17, 2018) ========================= -##### [Full changelog](https://github.com/nipy/nipype/milestone/28?closed=1) +(`Full changelog `__) * FIX: Copy node list before generating a flat graph (https://github.com/nipy/nipype/pull/2828) * FIX: Update pytest req'd version to 3.6 (https://github.com/nipy/nipype/pull/2827) @@ -193,7 +193,7 @@ Python 1.2.3 will be the last version to support Python 3.4. 1.1.6 (November 26, 2018) ========================= -##### [Full changelog](https://github.com/nipy/nipype/milestone/27?closed=1) +(`Full changelog `__) * FIX: MapNodes fail when ``MultiProcPlugin`` passed by instance (https://github.com/nipy/nipype/pull/2786) * FIX: --fineTune arguments order for MeshFix command (https://github.com/nipy/nipype/pull/2780) @@ -216,7 +216,7 @@ Python 1.2.3 will be the last version to support Python 3.4. Hotfix release. -##### [Full changelog](https://github.com/nipy/nipype/milestone/26?closed=1) +(`Full changelog `__) * ENH: Allow timeouts during SLURM job status checks (https://github.com/nipy/nipype/pull/2767) * RF: Subclass non-daemon variants of all multiprocessing contexts (https://github.com/nipy/nipype/pull/2771) @@ -225,7 +225,7 @@ Hotfix release. 1.1.4 (October 31, 2018) ======================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/25?closed=1) +(`Full changelog `__) * FIX: Python 2.7-3.7.1 compatible NonDaemonPool (https://github.com/nipy/nipype/pull/2754) * FIX: VRML typo (VMRL) in MeshFix (https://github.com/nipy/nipype/pull/2757) @@ -253,7 +253,7 @@ Hotfix release. 1.1.3 (September 24, 2018) ========================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/24?closed=1) +(`Full changelog `__) * FIX: Return afni.Qwarp outputs as absolute paths (https://github.com/nipy/nipype/pull/2705) * FIX: Add informative error for interfaces that fail to return valid runtime object (https://github.com/nipy/nipype/pull/2692) @@ -272,7 +272,7 @@ Hotfix release. Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. -##### [Full changelog](https://github.com/nipy/nipype/milestone/23?closed=1) +(`Full changelog `__) * FIX: Read BIDS config.json under grabbids or layout (https://github.com/nipy/nipype/pull/2679) * FIX: Node __repr__ and detailed graph expansion (https://github.com/nipy/nipype/pull/2669) @@ -287,7 +287,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.1.1 (July 30, 2018) ===================== -##### [Full changelog](https://github.com/nipy/nipype/milestone/22?closed=1) +(`Full changelog `__) * FIX: Un-set incorrect default options in TOPUP (https://github.com/nipy/nipype/pull/2637) * FIX: Copy FSCommand.version to ReconAll.version (https://github.com/nipy/nipype/pull/2656) @@ -309,7 +309,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.1.0 (July 04, 2018) ===================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/21?closed=1) +(`Full changelog `__) * RF: Futures-based MultiProc (https://github.com/nipy/nipype/pull/2598) * FIX: Avoid closing file descriptors on Windows (https://github.com/nipy/nipype/pull/2617) @@ -326,7 +326,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.0.4 (May 29, 2018) ==================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/20?closed=1) +(`Full changelog `__) * FIX: Update logging levels in enable_debug_mode (https://github.com/nipy/nipype/pull/2595) * FIX: Set default result in DistributedPluginBase._clean_queue (https://github.com/nipy/nipype/pull/2596) @@ -349,7 +349,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.0.3 (April 30, 2018) ====================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/19?closed=1) +(`Full changelog `__) * FIX: Propagate explicit Workflow config to Nodes (https://github.com/nipy/nipype/pull/2559) * FIX: Return non-enhanced volumes from dwi_flirt (https://github.com/nipy/nipype/pull/2547) @@ -376,7 +376,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.0.2 (March 27, 2018) ====================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/16?closed=1) +(`Full changelog `__) * FIX: dcm2niix interface (https://github.com/nipy/nipype/pull/2498) * FIX: mark .niml.dset as special extension in utils.filemanip (https://github.com/nipy/nipype/pull/2495) @@ -399,7 +399,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.0.1 (February 27, 2018) ========================= -###### [Full changelog](https://github.com/nipy/nipype/milestone/16?closed=1) +(`Full changelog `__) * FIX: Small bug in freesurfer label2annot fill_thresh specs [#2377](https://github.com/nipy/nipype/pull/2377) * FIX: Error creating gradients in DTIRecon [#2460](https://github.com/nipy/nipype/pull/2460) @@ -432,7 +432,7 @@ Hot-fix release, resolving incorrect dependencies in 1.1.1 wheel. 1.0.0 (January 24, 2018) ======================== -###### [Full changelog](https://github.com/nipy/nipype/milestone/16?closed=1) +(`Full changelog `__) * FIX: Change to interface workdir within ``Interface.run()`` instead Node (https://github.com/nipy/nipype/pull/2384) * FIX: PBS plugin submissions (https://github.com/nipy/nipype/pull/2344) diff --git a/doc/changes.rst b/doc/changes.rst index 4585c58af8..858a907691 100644 --- a/doc/changes.rst +++ b/doc/changes.rst @@ -6,6 +6,8 @@ Changes in Nipype ================= -.. include:: ../CHANGES +.. include:: changelog/1.X.X-changelog.rst + +.. include:: changelog/0.X.X-changelog.rst .. include:: links_names.txt diff --git a/doc/conf.py b/doc/conf.py index c49f20e514..45bd46b97b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -12,49 +12,65 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os -from shutil import rmtree - -nipypepath = os.path.abspath('..') -sys.path.insert(1, nipypepath) - +from packaging.version import Version import nipype -if not os.path.exists('users/examples'): - os.mkdir('users/examples') -os.system('python ../tools/make_examples.py --no-exec') +# if not os.path.exists('users/examples'): +# os.mkdir('users/examples') +# os.system('python ../tools/make_examples.py --no-exec') -if os.path.exists('api/generated'): - rmtree('api/generated') -os.system('python ../tools/build_modref_templates.py') -if os.path.exists('interfaces/generated'): - rmtree('interfaces/generated') -os.system('python ../tools/build_interface_docs.py') +# if os.path.exists('interfaces/generated'): +# rmtree('interfaces/generated') +# os.system('python ../tools/build_interface_docs.py') # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('sphinxext')) +# sys.path.append(os.path.abspath('sphinxext')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.todo', - 'sphinx.ext.imgmath', - 'sphinx.ext.inheritance_diagram', - 'sphinx.ext.graphviz', - 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.autosummary', - 'numpydoc', - 'matplotlib.sphinxext.plot_directive', - #'matplotlib.sphinxext.only_directives', - 'nipype.sphinxext.plot_workflow', - #'IPython.sphinxext.ipython_directive', - #'IPython.sphinxext.ipython_console_highlighting' - ] +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.graphviz', + 'sphinx.ext.mathjax', + 'sphinx.ext.inheritance_diagram', + 'sphinx.ext.todo', + 'sphinxcontrib.apidoc', + 'sphinxcontrib.napoleon', + 'matplotlib.sphinxext.plot_directive', + 'nipype.sphinxext.plot_workflow', +] + +autodoc_mock_imports = [ + 'matplotlib', + 'nilearn', + 'nipy', + 'nitime', + 'numpy', + 'pandas', + 'seaborn', + 'skimage', + 'svgutils', + 'transforms3d', +] + +# Accept custom section names to be parsed for numpy-style docstrings +# of parameters. +# Requires pinning sphinxcontrib-napoleon to a specific commit while +# https://github.com/sphinx-contrib/napoleon/pull/10 is merged. +napoleon_use_param = False +napoleon_custom_sections = [ + ('Inputs', 'Parameters'), + ('Outputs', 'Parameters'), + ('Attributes', 'Parameters'), +] + + on_rtd = os.environ.get('READTHEDOCS') == 'True' if on_rtd: extensions.append('readthedocs_ext.readthedocs') @@ -80,9 +96,9 @@ # built documents. # # The short X.Y version. -version = nipype.__version__ +version = Version(nipype.__version__).public # The full version, including alpha/beta/rc tags. -release = "1.3.0-rc1" +release = nipype.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -101,6 +117,15 @@ # for source files. exclude_trees = ['_build'] +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [ + '_build', 'Thumbs.db', '.DS_Store', + 'api/generated/gen.rst', + 'interfaces/generated/gen.rst' +] + # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None @@ -248,8 +273,29 @@ # If false, no module index is generated. #latex_use_modindex = True +# -- apidoc extension configuration ------------------------------------------ +apidoc_module_dir = '../nipype' +apidoc_output_dir = 'api/generated' +apidoc_excluded_paths = [ + '*/tests/*', 'tests/*', + 'algorithms/*', + 'external/*', + 'fixes/*', + 'interfaces/*', + 'scripts/*', + 'sphinxext/*', + 'testing/*', + 'workflows/*', + 'conftest.py', + 'info.py', + 'pkg_info.py', + 'refs.py', +] +apidoc_separate_modules = True +apidoc_extra_args = ['--module-first', '-d 1', '-T'] + +# -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} -exclude_patterns = ['interfaces/generated/gen.rst', 'api/generated/gen.rst'] diff --git a/doc/devel/cmd_interface_devel.rst b/doc/devel/cmd_interface_devel.rst index b53dfe9f5b..e0153cf678 100644 --- a/doc/devel/cmd_interface_devel.rst +++ b/doc/devel/cmd_interface_devel.rst @@ -170,7 +170,7 @@ names as arguments on the command line. We have simplified this procedure with three additional metadata terms: ``name_source``, ``name_template``, ``keep_extension``. -For example in the :ref:`InvWarp ` class, the +For example in the :ref:`InvWarp ` class, the ``inverse_warp`` parameter is the name of the output file that is created by the routine. diff --git a/doc/devel/interface_specs.rst b/doc/devel/interface_specs.rst index 26623266c6..13d44e1528 100644 --- a/doc/devel/interface_specs.rst +++ b/doc/devel/interface_specs.rst @@ -470,8 +470,7 @@ If you used genfile: And optionally: -* ``_redirect_x``: If set to True it will make Nipype start Xvfb before running the interface and redirect X output to it. This is useful for -commandlines that spawn a graphical user interface. +* ``_redirect_x``: If set to True it will make Nipype start Xvfb before running the interface and redirect X output to it. This is useful for commandlines that spawn a graphical user interface. * ``_format_arg(name, spec, value)``: For extra formatting of the input values before passing them to generic ``_parse_inputs()`` method. diff --git a/doc/devel/testing_nipype.rst b/doc/devel/testing_nipype.rst index 0cce8b4671..5713f6727b 100644 --- a/doc/devel/testing_nipype.rst +++ b/doc/devel/testing_nipype.rst @@ -90,7 +90,7 @@ Testing Nipype using Docker Nipype is tested inside Docker containers and users can use nipype images to test local versions. First, install the `Docker Engine `_. Nipype has one base docker image called nipype/nipype:base, that contains several useful tools - (FreeSurfer, AFNI, FSL, ANTs, etc.), and additional test images +(FreeSurfer, AFNI, FSL, ANTs, etc.), and additional test images for specific Python versions: py27 for Python 2.7 and py36 for Python 3.6. Users can pull the nipype image for Python 3.6 as follows:: diff --git a/doc/documentation.rst b/doc/documentation.rst index 5b4216f8a7..1cf275d630 100644 --- a/doc/documentation.rst +++ b/doc/documentation.rst @@ -4,10 +4,8 @@ Documentation ============= -.. htmlonly:: - - :Release: |version| - :Date: |today| +:Release: |version| +:Date: |today| Previous versions: `1.3.0 `_ `1.2.3 `_ @@ -16,35 +14,42 @@ Previous versions: `1.3.0 `_ `1.2.3 `_. + Be sure to read `Michael's excellent tutorials `__. + + .. admonition:: Nipype Workflows - .. admonition:: Interfaces, Workflows and Examples + The workflows that used to live as a module under + ``nipype.workflows`` have been migrated to the + new project `NiFlows `__. + + .. admonition:: Interfaces and Examples .. hlist:: :columns: 2 - * Workflows + * *In-house* interfaces .. toctree:: :maxdepth: 1 :glob: - interfaces/generated/*workflows* - * Examples + interfaces/generated/*algorithms* + + * Interfaces to third-party tools .. toctree:: :maxdepth: 1 :glob: - users/examples/* - * Interfaces + interfaces/generated/*interfaces* + + * Examples .. toctree:: :maxdepth: 1 :glob: - interfaces/generated/*algorithms* - interfaces/generated/*interfaces* + users/examples/* .. admonition:: Developer Guides diff --git a/doc/interfaces/index.rst b/doc/interfaces/index.rst index 77b9541100..14deeec063 100644 --- a/doc/interfaces/index.rst +++ b/doc/interfaces/index.rst @@ -7,4 +7,3 @@ Interfaces and Algorithms :Release: |version| :Date: |today| -.. include:: generated/gen.rst diff --git a/doc/links_names.txt b/doc/links_names.txt index 4cf07795f7..1a51a6dea3 100644 --- a/doc/links_names.txt +++ b/doc/links_names.txt @@ -74,7 +74,7 @@ .. _EPD: http://www.enthought.com/products/epd.php .. _Traits: http://code.enthought.com/projects/traits/ .. _Miniconda: https://conda.io/miniconda.html -.. _neurodocker: https://github.com/kaczmarj/neurodocker +.. _NeuroDocker: https://github.com/kaczmarj/neurodocker .. Python imaging projects .. _PyMVPA: http://www.pymvpa.org diff --git a/doc/sphinxext/README.txt b/doc/sphinxext/README.txt deleted file mode 100644 index 08bcbe9a60..0000000000 --- a/doc/sphinxext/README.txt +++ /dev/null @@ -1,16 +0,0 @@ -=================== - Sphinx Extensions -=================== - -We've copied these sphinx extensions over from nipy-core. Any edits -should be done upstream in nipy-core, not here in nipype! - -These a are a few sphinx extensions we are using to build the nipy -documentation. In this file we list where they each come from, since we intend -to always push back upstream any modifications or improvements we make to them. - -* From numpy: - * numpy_ext - -* From ipython - * ipython_console_highlighting diff --git a/doc/sphinxext/autosummary_generate.py b/doc/sphinxext/autosummary_generate.py deleted file mode 100755 index 658c50e4a4..0000000000 --- a/doc/sphinxext/autosummary_generate.py +++ /dev/null @@ -1,240 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -r""" -autosummary_generate.py OPTIONS FILES - -Generate automatic RST source files for items referred to in -autosummary:: directives. - -Each generated RST file contains a single auto*:: directive which -extracts the docstring of the referred item. - -Example Makefile rule:: - - generate: - ./ext/autosummary_generate.py -o source/generated source/*.rst - -""" -from __future__ import print_function, unicode_literals -from builtins import open - -import re -import inspect -import os -import optparse -import pydoc -from autosummary import import_by_name - -try: - from phantom_import import import_phantom_module -except ImportError: - import_phantom_module = lambda x: x - - -def main(): - p = optparse.OptionParser(__doc__.strip()) - p.add_option("-p", "--phantom", action="store", type="string", - dest="phantom", default=None, - help="Phantom import modules from a file") - p.add_option("-o", "--output-dir", action="store", type="string", - dest="output_dir", default=None, - help=("Write all output files to the given directory " - "(instead of writing them as specified in the " - "autosummary:: directives)")) - options, args = p.parse_args() - - if len(args) == 0: - p.error("wrong number of arguments") - - if options.phantom and os.path.isfile(options.phantom): - import_phantom_module(options.phantom) - - # read - names = {} - for name, loc in list(get_documented(args).items()): - for (filename, sec_title, keyword, toctree) in loc: - if toctree is not None: - path = os.path.join(os.path.dirname(filename), toctree) - names[name] = os.path.abspath(path) - - # write - for name, path in sorted(names.items()): - if options.output_dir is not None: - path = options.output_dir - - if not os.path.isdir(path): - os.makedirs(path) - - try: - obj, name = import_by_name(name) - except ImportError as e: - print("Failed to import '%s': %s" % (name, e)) - continue - - fn = os.path.join(path, '%s.rst' % name) - - if os.path.exists(fn): - # skip - continue - - f = open(fn, 'w') - - try: - f.write('%s\n%s\n\n' % (name, '=' * len(name))) - - if inspect.isclass(obj): - if issubclass(obj, Exception): - f.write(format_modulemember(name, 'autoexception')) - else: - f.write(format_modulemember(name, 'autoclass')) - elif inspect.ismodule(obj): - f.write(format_modulemember(name, 'automodule')) - elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj): - f.write(format_classmember(name, 'automethod')) - elif callable(obj): - f.write(format_modulemember(name, 'autofunction')) - elif hasattr(obj, '__get__'): - f.write(format_classmember(name, 'autoattribute')) - else: - f.write(format_modulemember(name, 'autofunction')) - finally: - f.close() - - -def format_modulemember(name, directive): - parts = name.split('.') - mod, name = '.'.join(parts[:-1]), parts[-1] - return ".. currentmodule:: %s\n\n.. %s:: %s\n" % (mod, directive, name) - - -def format_classmember(name, directive): - parts = name.split('.') - mod, name = '.'.join(parts[:-2]), '.'.join(parts[-2:]) - return ".. currentmodule:: %s\n\n.. %s:: %s\n" % (mod, directive, name) - - -def get_documented(filenames): - """ - Find out what items are documented in source/*.rst - See `get_documented_in_lines`. - - """ - documented = {} - for filename in filenames: - f = open(filename, 'r') - lines = f.read().splitlines() - documented.update(get_documented_in_lines(lines, filename=filename)) - f.close() - return documented - - -def get_documented_in_docstring(name, module=None, filename=None): - """ - Find out what items are documented in the given object's docstring. - See `get_documented_in_lines`. - - """ - try: - obj, real_name = import_by_name(name) - lines = pydoc.getdoc(obj).splitlines() - return get_documented_in_lines(lines, module=name, filename=filename) - except AttributeError: - pass - except ImportError as e: - print("Failed to import '%s': %s" % (name, e)) - return {} - - -def get_documented_in_lines(lines, module=None, filename=None): - """ - Find out what items are documented in the given lines - - Returns - ------- - documented : dict of list of (filename, title, keyword, toctree) - Dictionary whose keys are documented names of objects. - The value is a list of locations where the object was documented. - Each location is a tuple of filename, the current section title, - the name of the directive, and the value of the :toctree: argument - (if present) of the directive. - - """ - title_underline_re = re.compile("^[-=*_^#]{3,}\s*$") - autodoc_re = re.compile( - ".. auto(function|method|attribute|class|exception|module)::" - "\s*([A-Za-z0-9_.]+)\s*$") - autosummary_re = re.compile(r'^\.\.\s+autosummary::\s*') - module_re = re.compile( - r'^\.\.\s+(current)?module::\s*([a-zA-Z0-9_.]+)\s*$') - autosummary_item_re = re.compile(r'^\s+([_a-zA-Z][a-zA-Z0-9_.]*)\s*.*?') - toctree_arg_re = re.compile(r'^\s+:toctree:\s*(.*?)\s*$') - - documented = {} - - current_title = [] - last_line = None - toctree = None - current_module = module - in_autosummary = False - - for line in lines: - try: - if in_autosummary: - m = toctree_arg_re.match(line) - if m: - toctree = m.group(1) - continue - - if line.strip().startswith(':'): - continue # skip options - - m = autosummary_item_re.match(line) - if m: - name = m.group(1).strip() - if current_module and not name.startswith( - current_module + '.'): - name = "%s.%s" % (current_module, name) - documented.setdefault(name, []).append( - (filename, current_title, 'autosummary', toctree)) - continue - if line.strip() == '': - continue - in_autosummary = False - - m = autosummary_re.match(line) - if m: - in_autosummary = True - continue - - m = autodoc_re.search(line) - if m: - name = m.group(2).strip() - if m.group(1) == "module": - current_module = name - documented.update(get_documented_in_docstring( - name, filename=filename)) - elif current_module and not name.startswith( - current_module + '.'): - name = "%s.%s" % (current_module, name) - documented.setdefault(name, []).append( - (filename, current_title, "auto" + m.group(1), None)) - continue - - m = title_underline_re.match(line) - if m and last_line: - current_title = last_line.strip() - continue - - m = module_re.match(line) - if m: - current_module = m.group(2) - continue - finally: - last_line = line - - return documented - - -if __name__ == "__main__": - main() diff --git a/doc/sphinxext/ipython_console_highlighting.py b/doc/sphinxext/ipython_console_highlighting.py deleted file mode 100644 index a400d3c9c1..0000000000 --- a/doc/sphinxext/ipython_console_highlighting.py +++ /dev/null @@ -1,101 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""reST directive for syntax-highlighting ipython interactive sessions. -""" - -# ----------------------------------------------------------------------------- -# Needed modules - -# Standard library -import re - -# Third party -from pygments.lexer import Lexer, do_insertions -from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, - PythonTracebackLexer) -from pygments.token import Comment, Generic - -from sphinx import highlighting - - -# ----------------------------------------------------------------------------- -# Global constants -line_re = re.compile('.*?\n') - -# ----------------------------------------------------------------------------- -# Code begins - classes and functions - - -class IPythonConsoleLexer(Lexer): - """ - For IPython console output or doctests, such as: - - .. sourcecode:: ipython - - In [1]: a = 'foo' - - In [2]: a - Out[2]: 'foo' - - In [3]: print a - foo - - In [4]: 1 / 0 - - Notes: - - - Tracebacks are not currently supported. - - - It assumes the default IPython prompts, not customized ones. - """ - - name = 'IPython console session' - aliases = ['ipython'] - mimetypes = ['text/x-ipython-console'] - input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") - output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") - continue_prompt = re.compile(" \.\.\.+:") - tb_start = re.compile("\-+") - - def get_tokens_unprocessed(self, text): - pylexer = PythonLexer(**self.options) - - curcode = '' - insertions = [] - for match in line_re.finditer(text): - line = match.group() - input_prompt = self.input_prompt.match(line) - continue_prompt = self.continue_prompt.match(line.rstrip()) - output_prompt = self.output_prompt.match(line) - if line.startswith("#"): - insertions.append((len(curcode), - [(0, Comment, line)])) - elif input_prompt is not None: - insertions.append((len( - curcode), [(0, Generic.Prompt, input_prompt.group())])) - curcode += line[input_prompt.end():] - elif continue_prompt is not None: - insertions.append((len( - curcode), [(0, Generic.Prompt, continue_prompt.group())])) - curcode += line[continue_prompt.end():] - elif output_prompt is not None: - insertions.append((len( - curcode), [(0, Generic.Output, output_prompt.group())])) - curcode += line[output_prompt.end():] - else: - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed( - curcode)): - yield item - curcode = '' - insertions = [] - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - -# ----------------------------------------------------------------------------- -# Register the extension as a valid pygments lexer -highlighting.lexers['ipython'] = IPythonConsoleLexer() diff --git a/doc/users/install.rst b/doc/users/install.rst index 3a710088e9..a16d41c5df 100644 --- a/doc/users/install.rst +++ b/doc/users/install.rst @@ -16,7 +16,7 @@ image from Docker hub:: docker pull nipype/nipype You may also build custom docker containers with specific versions of software -using Neurodocker_ (see the `Neurodocker tutorial +using NeuroDocker_ (see the `Neurodocker tutorial `_). Using conda @@ -61,7 +61,7 @@ listed below:: Debian and Ubuntu ~~~~~~~~~~~~~~~~~ -Add the `NeuroDebian `_ repository and install +Add the NeuroDebian_ repository and install the ``python-nipype`` package using ``apt-get`` or your favorite package manager. @@ -111,7 +111,7 @@ Interface Dependencies Nipype provides wrappers around many neuroimaging tools and contains some algorithms. These tools will need to be installed for Nipype to run. You can create containers with different versions of these tools installed using -Neurodocker_ (see the :doc:`neurodocker`). +NeuroDocker_. Installation for developers --------------------------- diff --git a/doc/version.rst b/doc/version.rst index c795d991c6..35e3e0a60f 100644 --- a/doc/version.rst +++ b/doc/version.rst @@ -1,6 +1,4 @@ .. _version: -.. htmlonly:: - - :Release: |version| - :Date: |today| +:Release: |version| +:Date: |today| diff --git a/nipype/__init__.py b/nipype/__init__.py index 76b2ba58f9..74c6a42dd7 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -67,10 +67,12 @@ def get_info(): def check_latest_version(raise_exception=False): - """Check for the latest version of the library + """ + Check for the latest version of the library. - parameters: - raise_exception: boolean + Parameters + ---------- + raise_exception: bool Raise a RuntimeError if a bad version is being used """ import etelemetry diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index f7fcb6dab1..4be8eb232b 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Common graph operations for execution -""" +"""Common graph operations for execution.""" import sys from copy import deepcopy from glob import glob @@ -23,10 +22,7 @@ class PluginBase(object): - """ - Base class for plugins - - """ + """Base class for plugins.""" def __init__(self, plugin_args=None): if plugin_args is None: @@ -37,15 +33,20 @@ def __init__(self, plugin_args=None): def run(self, graph, config, updatehash=False): """ + Instruct the plugin to execute the workflow graph. + The core plugin member that should be implemented by all plugins. - graph: a networkx, flattened :abbr:`DAG (Directed Acyclic Graph)` - to be executed - - config: a nipype.config object - - updatehash: + Parameters + ---------- + graph : + a networkx, flattened :abbr:`DAG (Directed Acyclic Graph)` + to be executed + config : :obj:`~nipype.config` + a nipype.config object + updatehash : :obj:`bool` + whether cached nodes with stale hash should be just updated. """ raise NotImplementedError @@ -55,19 +56,7 @@ class DistributedPluginBase(PluginBase): """ Execute workflow with a distribution engine - Relevant class attributes - ------------------------- - - procs: list (N) of underlying interface elements to be processed - proc_done: a boolean numpy array (N,) signifying whether a process has been - submitted for execution - proc_pending: a boolean numpy array (N,) signifying whether a - process is currently running. - depidx: a boolean matrix (NxN) storing the dependency structure accross - processes. Process dependencies are derived from each column. - - Combinations of ``proc_done`` and ``proc_pending`` - -------------------------------------------------- + Combinations of ``proc_done`` and ``proc_pending``: +------------+---------------+--------------------------------+ | proc_done | proc_pending | outcome | @@ -80,6 +69,21 @@ class DistributedPluginBase(PluginBase): +------------+---------------+--------------------------------+ | False | True | INVALID COMBINATION | +------------+---------------+--------------------------------+ + + Attributes + ---------- + procs : :obj:`list` + list (N) of underlying interface elements to be processed + proc_done : :obj:`numpy.ndarray` + a boolean numpy array (N,) signifying whether a process has been + submitted for execution + proc_pending : :obj:`numpy.ndarray` + a boolean numpy array (N,) signifying whether a + process is currently running. + depidx : :obj:`numpy.matrix` + a boolean matrix (NxN) storing the dependency structure accross + processes. Process dependencies are derived from each column. + """ def __init__(self, plugin_args=None): diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 999ee76307..6f1b385672 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -71,7 +71,7 @@ [check] interval = 1209600 -""".format +""" def mkdir_p(path): @@ -130,7 +130,7 @@ def cwd(self): def set_default_config(self): """Read default settings template and set into config object""" - default_cfg = DEFAULT_CONFIG_TPL( + default_cfg = DEFAULT_CONFIG_TPL.format( log_dir=os.path.expanduser("~"), # Get $HOME in a platform-agnostic way crashdump_dir=self.cwd, # Read cached cwd ) diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index b0d6c4a0c1..735cc610b6 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -284,7 +284,8 @@ def _generate_cifs_table(): def on_cifs(fname): - """ Checks whether a file path is on a CIFS filesystem mounted in a POSIX + """ + Checks whether a file path is on a CIFS filesystem mounted in a POSIX host (i.e., has the ``mount`` command). On Windows, Docker mounts host directories into containers through CIFS @@ -292,9 +293,10 @@ def on_cifs(fname): the CIFS driver exposes to the OS as symlinks. We have found that under concurrent access to the filesystem, this feature can result in failures to create or read recently-created symlinks, - leading to inconsistent behavior and ``FileNotFoundError``s. + leading to inconsistent behavior and ``FileNotFoundError``. This check is written to support disabling symlinks on CIFS shares. + """ # Only the first match (most recent parent) counts for fspath, fstype in _cifs_table: diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index 4013714bc2..0a12e59f28 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -34,25 +34,42 @@ def generate_boutiques_descriptor( tags=None, ): """ - Returns a JSON string containing a JSON Boutiques description of a - Nipype interface. - Arguments: - * module: module where the Nipype interface is declared. - * interface_name: name of Nipype interface. - * container_image: name of the container image where the tool is installed - * container_type: type of container image (Docker or Singularity) - * container_index: optional index where the image is available - * verbose: print information messages - * save: True if you want to save descriptor to a file - * save_path: file path for the saved descriptor (defaults to name of the + Generate a JSON Boutiques description of a Nipype interface. + + Arguments + --------- + module : + module where the Nipype interface is declared. + interface_name : + name of Nipype interface. + container_image : + name of the container image where the tool is installed + container_type : + type of container image (Docker or Singularity) + container_index : + optional index where the image is available + verbose : + print information messages + save : + True if you want to save descriptor to a file + save_path : + file path for the saved descriptor (defaults to name of the interface in current directory) - * author: author of the tool (required for publishing) - * ignore_inputs: list of interface inputs to not include in the descriptor - * tags: JSON object containing tags to include in the descriptor, - e.g. "{\"key1\": \"value1\"}" (note: the tags 'domain:neuroinformatics' - and 'interface-type:nipype' are included by default) - """ + author : + author of the tool (required for publishing) + ignore_inputs : + list of interface inputs to not include in the descriptor + tags : + JSON object containing tags to include in the descriptor, + e.g. ``{"key1": "value1"}`` (note: the tags 'domain:neuroinformatics' + and 'interface-type:nipype' are included by default) + + Returns + ------- + boutiques : str + string containing a Boutiques' JSON object + """ if not module: raise Exception("Undefined module.") diff --git a/tools/README b/tools/README deleted file mode 100644 index 8d987d4e00..0000000000 --- a/tools/README +++ /dev/null @@ -1,15 +0,0 @@ -============== - Nipype Tools -============== - -This directory contains various tools used by the nipype developers. -Only install tools here that are unique to the nipype project. Any -tools shared with our parent project, nipy, should go in the -nipy/tools directory. - -Exceptions ----------- - -* apigen.py: This is not importable from nipy, so I copied it. -* build_modref_templates.py: This was copied and modified to work with nipype. - diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py index d21d19428a..f42adc7904 100755 --- a/tools/build_interface_docs.py +++ b/tools/build_interface_docs.py @@ -26,22 +26,25 @@ r"\.testing", r"\.caching", r"\.scripts", + r"\.sphinxext$", + r"\.workflows" ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += [ - r"\.version$", + r"\.conftest", r"\.interfaces\.base$", r"\.interfaces\.matlab$", - r"\.interfaces\.rest$", r"\.interfaces\.pymvpa$", + r"\.interfaces\.rest$", r"\.interfaces\.slicer\.generate_classes$", r"\.interfaces\.spm\.base$", r"\.interfaces\.traits", r"\.pipeline\.alloy$", r"\.pipeline\.s3_node_wrapper$", - r"\.testing", + r"\.pkg_info" r"\.scripts", - r"\.conftest", + r"\.testing", + r"\.version$", ] docwriter.class_skip_patterns += [ "AFNICommand", @@ -52,12 +55,12 @@ "^SPM", "Tester", "Spec$", - "Numpy" + "Numpy", # NipypeTester raises an # exception when instantiated in # InterfaceHelpWriter.generate_api_doc "NipypeTester", ] docwriter.write_api_docs(outdir) - docwriter.write_index(outdir, "gen", relative_to="interfaces") + # docwriter.write_index(outdir, "gen") print("%d files written" % len(docwriter.written_modules)) diff --git a/tools/build_modref_templates.py b/tools/build_modref_templates.py deleted file mode 100755 index 8a4c480f51..0000000000 --- a/tools/build_modref_templates.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Script to auto-generate our API docs. -""" -# stdlib imports -import os -import sys - -# ***************************************************************************** -if __name__ == "__main__": - nipypepath = os.path.abspath("..") - sys.path.insert(1, nipypepath) - package = "nipype" - # local imports - from apigen import ApiDocWriter - - outdir = os.path.join("api", "generated") - docwriter = ApiDocWriter(package) - # Packages that should not be included in generated API docs. - docwriter.package_skip_patterns += [ - "\.external$", - "\.utils$", - "\.interfaces\.", - "\.workflows$", - "\.pipeline\.plugins$", - "\.testing$", - "\.fixes$", - "\.algorithms$", - "\.scripts$", - ] - # Modules that should not be included in generated API docs. - docwriter.module_skip_patterns += [ - "\.version$", - "info", - "\.interfaces\.(?!(base|matlab))", - "\.pipeline\.utils$", - "\.interfaces\.slicer\.generate_classes$", - "\.interfaces\.pymvpa$", - "\.scripts$", - ] - docwriter.write_api_docs(outdir) - docwriter.write_index(outdir, "gen", relative_to="api") - print("%d files written" % len(docwriter.written_modules)) diff --git a/tools/update_changes.sh b/tools/update_changes.sh index 1ba3528b1f..7a12a2d1b4 100755 --- a/tools/update_changes.sh +++ b/tools/update_changes.sh @@ -13,7 +13,7 @@ set -u # Treat unset variables as an error when substituting. set -x # Print command traces before executing command. ROOT=$( git rev-parse --show-toplevel ) -CHANGES=$ROOT/doc/changelog/1.X.X-changelog +CHANGES=$ROOT/doc/changelog/1.X.X-changelog.rst # Check whether the Upcoming release header is present head -1 $CHANGES | grep -q Upcoming From c68e3b9e1792d23a9db15be099e5548e09f1c66e Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Dec 2019 17:43:57 -0800 Subject: [PATCH 04/48] fix: last release changelog --- doc/changelog/1.X.X-changelog.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 0e0d661f61..3af9ed8ca4 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,7 +1,6 @@ 1.4.0 (December 20, 2019) ========================= - -##### [Full changelog](https://github.com/nipy/nipype/milestone/37?closed=1) +(`Full changelog `__) * FIX: Mark strings containing regex escapes as raw (https://github.com/nipy/nipype/pull/3106) * ENH: Pacify DeprecationWarnings caused by nibabel 3 pre-release (https://github.com/nipy/nipype/pull/3099) @@ -16,16 +15,13 @@ * MNT: Fix Dorota Jarecka ORCID (https://github.com/nipy/nipype/pull/3100) * MNT: Drop Python 2 support (https://github.com/nipy/nipype/pull/2654) - 1.3.1 (November 12, 2019) ========================= - * FIX: Restore checking traits or bunch (https://github.com/nipy/nipype/pull/3094) 1.3.0 (November 11, 2019) ========================= - (`Full changelog `__) * FIX: Fixed typo in QwarpInputSpec Trait description (https://github.com/nipy/nipype/pull/3079) From 48f78d0b40a7382b94a609f545f5ad62d5641586 Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Dec 2019 17:50:19 -0800 Subject: [PATCH 05/48] enh: accept a milestone number in the ``update_changes.sh`` script --- tools/update_changes.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tools/update_changes.sh b/tools/update_changes.sh index 7a12a2d1b4..387ec3442f 100755 --- a/tools/update_changes.sh +++ b/tools/update_changes.sh @@ -23,7 +23,10 @@ UPCOMING=$? HEADER="$1 ($(date '+%B %d, %Y'))" echo $HEADER >> newchanges echo $( printf "%${#HEADER}s" | tr " " "=" ) >> newchanges -echo "" >> newchanges + +if [[ "x$2" != "x" ]]; then + echo "(\`Full changelog \`__)" >> newchanges +fi # Search for PRs since previous release git log --grep="Merge pull request" `git describe --tags --abbrev=0`..HEAD --pretty='format: * %b %s' | sed 's+Merge pull request \#\([^\d]*\)\ from\ .*+(https://github.com/nipy/nipype/pull/\1)+' >> newchanges From 0ce15de2f6a6985d6d97d3773bb53cad4bac3d08 Mon Sep 17 00:00:00 2001 From: oesteban Date: Fri, 20 Dec 2019 17:54:38 -0800 Subject: [PATCH 06/48] fix: remove unnecessary line [skip ci] --- doc/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/Makefile b/doc/Makefile index 25acfeb122..2c96edd38b 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -5,7 +5,6 @@ SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = -PYTHONPATH = $(PWD) # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 From a51b95a64da83141591ddb93aa9dc4cd9d28ffd6 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sat, 21 Dec 2019 00:07:57 -0800 Subject: [PATCH 07/48] DOC: Documentation overhaul Building on top of #3119 and #3129, this PR makes a deep revision of the documentation: * Added a new ``build_docs`` job to CircleCI to test how it renders. * Minimized external machinery (under ``/tools/``) when building the documentation: 1. Some minimal modifications to sphinx extensions (apidoc, napoleon) allow the generation of special documentation for nipype interfaces, as it used to be before this PR 2. A new sphinx extension (``nipype.sphinxext.apidoc``) takes care of parsing and rendering inputs and outputs. They now look like the parameters/arguments of functions when formatted with numpydoc. * Revised the description of many interfaces and the documentation of the main class and the input/output specs. * Revised the structure of the navbar, separating out User-Guide/Examples, Interfaces-Index, and Devs' documentation. * Minimized the number of WARNINGS at documentation build to 5 (4 of them coming out from the auto-generated SEM tools). --- .circleci/config.yml | 47 + doc/Makefile | 31 +- doc/README.txt | 28 - doc/_templates/navbar.html | 4 +- doc/about.rst | 2 + doc/api/index.rst | 15 - doc/changes.rst | 1 + doc/conf.py | 20 +- doc/developers.rst | 17 + doc/documentation.rst | 73 - doc/examples.rst | 19 + doc/interfaces.rst | 130 ++ doc/interfaces/.gitignore | 1 - doc/interfaces/index.rst | 9 - doc/links_names.txt | 1 + doc/make.bat | 112 -- doc/quickstart.rst | 2 + doc/requirements.txt | 8 + doc/searchresults.rst | 2 + doc/version.rst | 2 + examples/dmri_connectivity.py | 5 +- examples/dmri_preprocessing.py | 12 + examples/fmri_openfmri.py | 0 examples/fmri_spm_auditory.py | 112 +- examples/fmri_spm_dartel.py | 49 +- examples/fmri_spm_face.py | 42 +- examples/fmri_spm_nested.py | 48 +- nipype/__init__.py | 11 +- nipype/algorithms/confounds.py | 75 +- nipype/algorithms/misc.py | 55 +- nipype/algorithms/modelgen.py | 182 ++- nipype/info.py | 11 +- nipype/interfaces/afni/preprocess.py | 1235 +++++++++-------- nipype/interfaces/afni/utils.py | 529 ++++--- nipype/interfaces/ants/registration.py | 21 +- nipype/interfaces/ants/segmentation.py | 118 +- nipype/interfaces/ants/visualization.py | 18 +- nipype/interfaces/base/core.py | 52 +- nipype/interfaces/base/specs.py | 5 +- nipype/interfaces/base/support.py | 13 +- nipype/interfaces/brainsuite/brainsuite.py | 33 +- nipype/interfaces/c3.py | 4 +- nipype/interfaces/camino/calib.py | 38 +- nipype/interfaces/camino/connectivity.py | 7 +- nipype/interfaces/camino/dti.py | 2 +- nipype/interfaces/camino/odf.py | 37 +- nipype/interfaces/camino2trackvis/__init__.py | 4 +- nipype/interfaces/camino2trackvis/convert.py | 4 +- nipype/interfaces/cmtk/__init__.py | 1 + nipype/interfaces/cmtk/cmtk.py | 19 +- nipype/interfaces/cmtk/nbs.py | 7 +- nipype/interfaces/cmtk/nx.py | 3 +- nipype/interfaces/cmtk/parcellation.py | 30 +- nipype/interfaces/dcm2nii.py | 3 +- nipype/interfaces/dcmstack.py | 3 +- .../interfaces/diffusion_toolkit/__init__.py | 1 + nipype/interfaces/diffusion_toolkit/dti.py | 81 +- nipype/interfaces/diffusion_toolkit/odf.py | 170 ++- nipype/interfaces/dipy/__init__.py | 1 + nipype/interfaces/dipy/tensors.py | 16 +- nipype/interfaces/dtitk/__init__.py | 9 +- nipype/interfaces/dtitk/utils.py | 96 +- nipype/interfaces/dynamic_slicer.py | 6 +- nipype/interfaces/elastix/__init__.py | 4 +- nipype/interfaces/freesurfer/__init__.py | 2 +- nipype/interfaces/freesurfer/model.py | 34 +- nipype/interfaces/freesurfer/preprocess.py | 31 +- nipype/interfaces/freesurfer/registration.py | 3 +- nipype/interfaces/freesurfer/utils.py | 97 +- nipype/interfaces/fsl/__init__.py | 7 +- nipype/interfaces/fsl/aroma.py | 2 +- nipype/interfaces/fsl/epi.py | 4 +- nipype/interfaces/io.py | 451 +++--- nipype/interfaces/matlab.py | 2 +- nipype/interfaces/meshfix.py | 3 +- nipype/interfaces/minc/__init__.py | 4 +- nipype/interfaces/minc/minc.py | 7 +- nipype/interfaces/mipav/__init__.py | 1 + nipype/interfaces/mipav/developer.py | 226 +-- nipype/interfaces/mne/__init__.py | 1 + nipype/interfaces/mrtrix/__init__.py | 1 + nipype/interfaces/mrtrix/tracking.py | 2 +- nipype/interfaces/mrtrix3/__init__.py | 2 +- nipype/interfaces/mrtrix3/tracking.py | 10 +- nipype/interfaces/niftyfit/__init__.py | 8 +- nipype/interfaces/niftyreg/__init__.py | 4 +- nipype/interfaces/niftyseg/maths.py | 288 ++-- nipype/interfaces/niftyseg/stats.py | 136 +- nipype/interfaces/nilearn.py | 4 +- nipype/interfaces/nipy/__init__.py | 1 + nipype/interfaces/nitime/__init__.py | 2 +- nipype/interfaces/nitime/analysis.py | 16 +- nipype/interfaces/petpvc.py | 108 +- nipype/interfaces/quickshear.py | 3 +- nipype/interfaces/semtools/__init__.py | 1 + nipype/interfaces/slicer/__init__.py | 6 + nipype/interfaces/spm/__init__.py | 3 +- nipype/interfaces/spm/model.py | 81 +- nipype/interfaces/spm/preprocess.py | 114 +- nipype/interfaces/vista/__init__.py | 1 + nipype/interfaces/vista/vista.py | 8 +- nipype/interfaces/workbench/__init__.py | 2 +- nipype/interfaces/workbench/base.py | 10 +- nipype/sphinxext/apidoc/__init__.py | 188 +++ nipype/sphinxext/apidoc/docstring.py | 159 +++ nipype/sphinxext/documenter.py | 72 + nipype/sphinxext/gh.py | 32 + rtd_requirements.txt | 20 - tools/apigen.py | 730 ---------- tools/build_interface_docs.py | 66 - tools/ex2rst | 6 +- tools/github.py | 109 -- tools/make_examples.py | 16 +- 113 files changed, 3126 insertions(+), 3654 deletions(-) delete mode 100644 doc/README.txt delete mode 100644 doc/api/index.rst create mode 100644 doc/developers.rst delete mode 100644 doc/documentation.rst create mode 100644 doc/examples.rst create mode 100644 doc/interfaces.rst delete mode 100644 doc/interfaces/.gitignore delete mode 100644 doc/interfaces/index.rst delete mode 100644 doc/make.bat create mode 100644 doc/requirements.txt delete mode 100755 examples/fmri_openfmri.py create mode 100644 nipype/sphinxext/apidoc/__init__.py create mode 100644 nipype/sphinxext/apidoc/docstring.py create mode 100644 nipype/sphinxext/documenter.py create mode 100644 nipype/sphinxext/gh.py delete mode 100644 rtd_requirements.txt delete mode 100644 tools/apigen.py delete mode 100755 tools/build_interface_docs.py delete mode 100644 tools/github.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 3eed619d56..5c889193ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -394,6 +394,35 @@ jobs: ssh-add ~/.ssh/id_ed25519 /home/circleci/nipype/tools/feedstock.sh + build_docs: + docker: + - image: python:3.7.4 + working_directory: /tmp/src/nipype + environment: + - FSLOUTPUTTYPE: 'NIFTI' + steps: + - checkout + name: Check Python version and upgrade pip + command: | + python --version + python -m pip install -U pip + - run: + name: Install graphviz + command: | + apt-get update + apt-get install -y graphviz + - run: + name: Install Requirements (may contain pinned versions) + command: python -m pip install -r docs/requirements.txt + - run: + name: Install NiPype + command: python -m pip install ".[doc]" + - run: + name: Build documentation + command: make -C doc html + - store_artifacts: + path: /tmp/src/nipype/doc/_build/html + workflows: version: 2 build_test_deploy: @@ -406,21 +435,39 @@ workflows: only: /.*/ - compare_base_dockerfiles: filters: + branches: + ignore: + - /docs?\/.*/ tags: only: /.*/ - test_pytest: filters: + branches: + ignore: + - /docs?\/.*/ tags: only: /.*/ requires: - compare_base_dockerfiles - test_fmri_fsl_spm: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - test_fmri_spm_dartel_multiproc: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - test_fmri_spm_nested_fsl_feeds: + filters: + branches: + ignore: + - /docs?\/.*/ requires: - compare_base_dockerfiles - deploy_dockerhub: diff --git a/doc/Makefile b/doc/Makefile index 2c96edd38b..85d491a70f 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -11,12 +11,11 @@ PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html nipypeapi htmlonly latex changes linkcheck doctest +.PHONY: help clean html htmlonly latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html make the HTML documentation" - @echo " nipypeapi make interface API documents only" @echo " latex make the LaTeX, you can set PAPER=a4 or PAPER=letter" @echo " pdf make and run the PDF generation" @echo " changes make an overview of all changed/added/deprecated" \ @@ -33,20 +32,15 @@ htmlonly: @echo @echo "Build finished. The HTML pages are in _build/html." -nipypeapi: - rm -rf interfaces/generated - python -u ../tools/build_interface_docs.py - @echo "Build API docs finished." - -html: clean examples2rst nipypeapi htmlonly +html: clean examples2rst htmlonly @echo "Build HTML and API finished." -examples2rst: +examples2rst: clean mkdir -p users/examples - ../tools/make_examples.py --no-exec + ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec @echo "examples2rst finished." -latex: nipypeapi +latex: clean $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." @@ -80,18 +74,3 @@ gitwash-update: --project-url=http://nipy.org/nipype \ --project-ml-url=http://mail.scipy.org/mailman/listinfo/nipy-devel @echo "gitwash updated" - -# Sourceforge doesn't appear to have a way of copying the files -# without specifying a username. So we'll probably have one target -# for each project admin -sf_satra_nightly: html - @echo "Copying html files to sourceforge..." - scp -r _build/html/* satra,nipy@web.sourceforge.net:htdocs/nipype-nightly/ - -sf_satra: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. satra,nipy@web.sourceforge.net:htdocs/nipype/. - -sf_filo: html - @echo "Copying html files to sourceforge..." - rsync -auv _build/html/. gorgolewski,nipy@web.sourceforge.net:htdocs/nipype/. diff --git a/doc/README.txt b/doc/README.txt deleted file mode 100644 index e55d4936cf..0000000000 --- a/doc/README.txt +++ /dev/null @@ -1,28 +0,0 @@ -====================== - Nipype Documentation -====================== - -This directory contains the documentation for the Nipype_ project. -The documentation is written in reST_ (reStructuredText) and uses -Sphinx_ to render html documentation from the rst source files. - -A relatively recent version of Sphinx_ is required to build the -documentation, at least 0.6.x. - -Use the ``Makefile`` to build the documentation. Common commands: - -Discover available make targets:: - - make help - -Clean up previous build:: - - make clean - -Build html documentation:: - - make html - - - - diff --git a/doc/_templates/navbar.html b/doc/_templates/navbar.html index 883bedc56b..fd0216b53f 100644 --- a/doc/_templates/navbar.html +++ b/doc/_templates/navbar.html @@ -9,7 +9,9 @@ Home · Quickstart · -Documentation · +User Guide and Examples · +Interfaces Index · +Developers · About · Nipy diff --git a/doc/about.rst b/doc/about.rst index f9d2831f70..45a6e30229 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -1,3 +1,5 @@ +:orphan: + .. _about: ===== diff --git a/doc/api/index.rst b/doc/api/index.rst deleted file mode 100644 index 0cc9d87e32..0000000000 --- a/doc/api/index.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _api-index: - -########################################### -Library API (application program interface) -########################################### - -Information on specific functions, classes, and methods. - -:Release: |version| -:Date: |today| - -.. toctree:: - :glob: - - generated/* diff --git a/doc/changes.rst b/doc/changes.rst index 858a907691..3fc9469ef8 100644 --- a/doc/changes.rst +++ b/doc/changes.rst @@ -1,3 +1,4 @@ +:orphan: :tocdepth: 2 .. _changes: diff --git a/doc/conf.py b/doc/conf.py index 45bd46b97b..a93cfe7480 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,14 +16,6 @@ from packaging.version import Version import nipype -# if not os.path.exists('users/examples'): -# os.mkdir('users/examples') -# os.system('python ../tools/make_examples.py --no-exec') - -# if os.path.exists('interfaces/generated'): -# rmtree('interfaces/generated') -# os.system('python ../tools/build_interface_docs.py') - # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. @@ -41,9 +33,11 @@ 'sphinx.ext.inheritance_diagram', 'sphinx.ext.todo', 'sphinxcontrib.apidoc', - 'sphinxcontrib.napoleon', 'matplotlib.sphinxext.plot_directive', + 'nbsphinx', 'nipype.sphinxext.plot_workflow', + 'nipype.sphinxext.apidoc', + 'nipype.sphinxext.documenter', ] autodoc_mock_imports = [ @@ -57,6 +51,8 @@ 'skimage', 'svgutils', 'transforms3d', + 'tvtk', + 'vtk' ] # Accept custom section names to be parsed for numpy-style docstrings @@ -68,6 +64,8 @@ ('Inputs', 'Parameters'), ('Outputs', 'Parameters'), ('Attributes', 'Parameters'), + ('Mandatory Inputs', 'Parameters'), + ('Optional Inputs', 'Parameters'), ] @@ -122,8 +120,6 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = [ '_build', 'Thumbs.db', '.DS_Store', - 'api/generated/gen.rst', - 'interfaces/generated/gen.rst' ] # The reST default role (used for this markup: `text`) to use for all documents. @@ -278,10 +274,8 @@ apidoc_output_dir = 'api/generated' apidoc_excluded_paths = [ '*/tests/*', 'tests/*', - 'algorithms/*', 'external/*', 'fixes/*', - 'interfaces/*', 'scripts/*', 'sphinxext/*', 'testing/*', diff --git a/doc/developers.rst b/doc/developers.rst new file mode 100644 index 0000000000..d77fbcc946 --- /dev/null +++ b/doc/developers.rst @@ -0,0 +1,17 @@ +:orphan: + +.. _developers: + +================== +Developer's Corner +================== + +.. toctree:: + :maxdepth: 2 + + devel/index + +.. toctree:: + :maxdepth: 3 + + api/generated/nipype diff --git a/doc/documentation.rst b/doc/documentation.rst deleted file mode 100644 index 1cf275d630..0000000000 --- a/doc/documentation.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. _documentation: - -============= -Documentation -============= - -:Release: |version| -:Date: |today| - -Previous versions: `1.3.0 `_ `1.2.3 `_ - - -.. container:: doc2 - - .. admonition:: Michael Notter's Nipype guide - - Be sure to read `Michael's excellent tutorials `__. - - .. admonition:: Nipype Workflows - - The workflows that used to live as a module under - ``nipype.workflows`` have been migrated to the - new project `NiFlows `__. - - .. admonition:: Interfaces and Examples - - .. hlist:: - :columns: 2 - - * *In-house* interfaces - - .. toctree:: - :maxdepth: 1 - :glob: - - interfaces/generated/*algorithms* - - * Interfaces to third-party tools - - .. toctree:: - :maxdepth: 1 - :glob: - - interfaces/generated/*interfaces* - - * Examples - - .. toctree:: - :maxdepth: 1 - :glob: - - users/examples/* - - .. admonition:: Developer Guides - - .. hlist:: - :columns: 2 - - * API - - .. toctree:: - :maxdepth: 2 - - api/index - - * Developer Guide - - .. toctree:: - :maxdepth: 2 - - devel/index - -.. include:: links_names.txt diff --git a/doc/examples.rst b/doc/examples.rst new file mode 100644 index 0000000000..5b645fcace --- /dev/null +++ b/doc/examples.rst @@ -0,0 +1,19 @@ +:orphan: + +.. _examples: + +======================= +User Guide and Examples +======================= + +.. admonition:: Michael Notter's User Guide + + Be sure to read `Michael's excellent tutorials `__. + +Examples +~~~~~~~~ + .. toctree:: + :maxdepth: 1 + :glob: + + users/examples/* diff --git a/doc/interfaces.rst b/doc/interfaces.rst new file mode 100644 index 0000000000..177f08fca4 --- /dev/null +++ b/doc/interfaces.rst @@ -0,0 +1,130 @@ +:orphan: + +.. _interfaces: + +======================== +Interfaces and Workflows +======================== +Workflows +--------- +.. important:: + + The workflows that used to live as a module under + ``nipype.workflows`` have been migrated to the + new project `NiFlows `__. + +Interfaces +---------- +An index of all nipype interfaces is found belows. +Nipype provides some *in-house* interfaces to help with workflow +management tasks, basic image manipulations, and filesystem/storage +interfaces: + + * `"Algorithms" `__ + * `Image manipulation `__ + * `I/O Operations `__ + * `Self-reporting interfaces `__ + * `Utilities `__ + +Nipype provides interfaces for the following **third-party** tools: + + * `AFNI `__ + (Analysis of Functional NeuroImages) is a leading software suite of C, Python, + R programs and shell scripts primarily developed for the analysis and display of + anatomical and functional MRI (fMRI) data. + * `ANTs `__ + (Advanced Normalization ToolS) computes high-dimensional mappings to capture + the statistics of brain structure and function. + * `BrainSuite `__ + is a collection of open source software tools that enable largely + automated processing of magnetic resonance images (MRI) of the human brain. + * `BRU2NII `__ + is a simple tool for converting Bruker ParaVision MRI data to NIfTI. + * `Convert3D `__ + is a command-line tool for converting 3D images between common file formats. + * `Camino `__ + is an open-source software toolkit for diffusion MRI processing. + * `Camino-TrackVis `__ + allows interoperability between Camino and TrackVis. + * `Connectome Mapper (CMP) `__ + implements a full processing pipeline for creating multi-variate and + multi-resolution connectomes with dMRI data. + * `dcm2nii `__ + converts images from the proprietary scanner DICOM format to NIfTI + * `DCMStack `__ + allows series of DICOM images to be stacked into multi-dimensional arrays. + * `Diffusion Toolkit `__ + is a set of command-line tools with a GUI frontend that performs data reconstruction + and fiber tracking on diffusion MR images. + * `DIPY `__ + is a free and open source software project for computational neuroanatomy, + focusing mainly on diffusion magnetic resonance imaging (dMRI) analysis. + * `DTITK `__ + is a spatial normalization and atlas construction toolkit optimized for examining + white matter morphometry using DTI data. + * `Elastix `__ + is a toolbox for rigid and nonrigid registration of images. + * `FreeSurfer `__ + is an open source software suite for processing and analyzing (human) brain MRI images. + * `FSL `__ + is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. + * Matlab `script wrapper `__ + provides interfaces to integrate matlab scripts within workflows. + * `MeshFix `__ + converts a raw digitized polygon mesh to a clean mesh where all the occurrences + of a specific set of "defects" are corrected. + * `MINC Toolkit `__ + contains the most commonly used tools developed at the McConnell Brain Imaging Centre, + Montreal Neurological Institute. + * `MIPAV (Medical Image Processing, Analysis, and Visualization) `__ + enables quantitative analysis and visualization of medical images of numerous + modalities such as PET, MRI, CT, or microscopy. + * `MNE `__ + is a software for exploring, visualizing, and analyzing human neurophysiological + data: MEG, EEG, sEEG, ECoG, and more. + * MRTrix is a set of tools to perform various types of diffusion MRI analyses, from various + forms of tractography through to next-generation group-level analyses + (`MRTrix3 `__, and the deprecated + `MRTrix version 2 `__). + * Nifty Tools: + `NiftyFit `__ + is a software package for multi-parametric model-fitting of 4D MRI; + `NiftyReg `__ + is an open-source software for efficient medical image registration; and + `NiftySeg `__ + contains programs to perform EM based segmentation of images in NIfTI or Analyze format. + * `NiLearn `__ + is a Python module for fast and easy statistical learning on NeuroImaging data. + * `NiPy `__ + is a Python project for analysis of structural and functional neuroimaging data. + * `Nitime `__ + is a library for time-series analysis of data from neuroscience experiments. + * `PETPVC `__ + is toolbox for :abbr:`PVC (partial volume correction)` of + :abbr:`PET (positron emission tomography)` imaging. + * `QuickShear `__ + uses a skull-stripped version of an anatomical images as a reference to deface the + unaltered anatomical image. + * `SEM Tools `__ + are useful tools for Structural Equation Modeling. + * `SPM `__ + (Statistical Parametric Mapping) is a software package for the analysis of brain + imaging data sequences. + * `VistaSoft `__ + contains Matlab code to perform a variety of analysis on MRI data, including + functional MRI and diffusion MRI. + * `Connectome Workbench `__ + is an open source, freely available visualization and discovery tool used to map neuroimaging data, + especially data generated by the Human Connectome Project. + * `3D Slicer `__ + is an open source software platform for medical image informatics, + image processing, and three-dimensional visualization. + +Index of Interfaces +~~~~~~~~~~~~~~~~~~~ + +.. toctree:: + :maxdepth: 3 + + api/generated/nipype.algorithms + api/generated/nipype.interfaces \ No newline at end of file diff --git a/doc/interfaces/.gitignore b/doc/interfaces/.gitignore deleted file mode 100644 index e324eac91f..0000000000 --- a/doc/interfaces/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/generated diff --git a/doc/interfaces/index.rst b/doc/interfaces/index.rst deleted file mode 100644 index 14deeec063..0000000000 --- a/doc/interfaces/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. _interface-index: - -######################### -Interfaces and Algorithms -######################### - -:Release: |version| -:Date: |today| - diff --git a/doc/links_names.txt b/doc/links_names.txt index 1a51a6dea3..5f75721f32 100644 --- a/doc/links_names.txt +++ b/doc/links_names.txt @@ -98,6 +98,7 @@ .. _MNE: https://martinos.org/mne/index.html .. _ANTS: http://stnava.github.io/ANTs/ .. _DIPY: http://dipy.org +.. _BrainSuite: http://brainsuite.org/ .. General software .. _gcc: http://gcc.gnu.org diff --git a/doc/make.bat b/doc/make.bat deleted file mode 100644 index aa5985eece..0000000000 --- a/doc/make.bat +++ /dev/null @@ -1,112 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -set SPHINXBUILD=sphinx-build -set ALLSPHINXOPTS=-d _build/doctrees %SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (_build\*) do rmdir /q /s %%i - del /q /s _build\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% _build/html - echo. - echo.Build finished. The HTML pages are in _build/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% _build/dirhtml - echo. - echo.Build finished. The HTML pages are in _build/dirhtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% _build/pickle - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% _build/json - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% _build/htmlhelp - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in _build/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% _build/qthelp - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in _build/qthelp, like this: - echo.^> qcollectiongenerator _build\qthelp\nipype.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile _build\qthelp\nipype.ghc - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% _build/latex - echo. - echo.Build finished; the LaTeX files are in _build/latex. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% _build/changes - echo. - echo.The overview file is in _build/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% _build/linkcheck - echo. - echo.Link check complete; look for any errors in the above output ^ -or in _build/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% _build/doctest - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in _build/doctest/output.txt. - goto end -) - -:end diff --git a/doc/quickstart.rst b/doc/quickstart.rst index ee856d5fb2..60baa443c9 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -1,3 +1,5 @@ +:orphan: + .. _quickstart: ========== diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000000..057147c5b5 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,8 @@ +dipy +ipython +matplotlib +nbsphinx +sphinx-argparse +sphinx>=2.1.2 +sphinxcontrib-apidoc +sphinxcontrib-napoleon \ No newline at end of file diff --git a/doc/searchresults.rst b/doc/searchresults.rst index d79eaebfbc..06db60bc00 100644 --- a/doc/searchresults.rst +++ b/doc/searchresults.rst @@ -1,3 +1,5 @@ +:orphan: + .. This displays the search results from the Google Custom Search engine. Don't link to it directly. diff --git a/doc/version.rst b/doc/version.rst index 35e3e0a60f..cbbed6c7d8 100644 --- a/doc/version.rst +++ b/doc/version.rst @@ -1,3 +1,5 @@ +:orphan: + .. _version: :Release: |version| diff --git a/examples/dmri_connectivity.py b/examples/dmri_connectivity.py index fc5b51c362..06d212ebb7 100755 --- a/examples/dmri_connectivity.py +++ b/examples/dmri_connectivity.py @@ -26,9 +26,8 @@ * http://db.tt/1vx4vLeP -Along with `Camino `_, -`Camino-Trackvis `_, `FSL `_, -and `Freesurfer `_, you must also have the Connectome File Format +Along with Camino_, Camino2Trackvis_, FSL_, and FreeSurfer_, +you must also have the Connectome File Format library installed as well as the Connectome Mapper. These are written by Stephan Gerhard and can be obtained from: diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index 21d594d3b7..1537d2897f 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -130,6 +130,7 @@ """ bias = remove_bias() + """ Connect nodes in workflow ========================= @@ -148,6 +149,7 @@ (prep, bias, [('outputnode.out_file', 'inputnode.in_file'), ('outputnode.out_mask', 'inputnode.in_mask')]), (datasource, bias, [('bvals', 'inputnode.in_bval')])]) + """ Run the workflow as command line executable """ @@ -155,3 +157,13 @@ if __name__ == '__main__': wf.run() wf.write_graph() + +""" +References +---------- + +.. [Jeurissen2014] Jeurissen et al., Multi-tissue constrained spherical deconvolution + for improved analysis of multi-shell diffusion MRI data. + NeuroImage 103:411--426. 2014. + doi:`10.1016/j.neuroimage.2014.07.061 + `__. diff --git a/examples/fmri_openfmri.py b/examples/fmri_openfmri.py deleted file mode 100755 index e69de29bb2..0000000000 diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index e4c690421a..178deb42b4 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -8,7 +8,6 @@ Introduction ============ - The fmri_spm_auditory.py recreates the classical workflow described in the `SPM8 manual `_ using auditory dataset that can be downloaded from http://www.fil.ion.ucl.ac.uk/spm/data/auditory/:: @@ -36,18 +35,17 @@ # Set the way matlab should be called mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") + """ + Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ @@ -56,10 +54,10 @@ """We strongly encourage to use 4D files insteead of series of 3D for fMRI analyses for many reasons (cleanness and saving and filesystem inodes are among them). However, the the workflow presented in the SPM8 manual which this tutorial is based on -uses 3D files. Therefore we leave converting to 4D as an option. We are using `merge_to_4d` +uses 3D files. Therefore we leave converting to 4D as an option. We are using ``merge_to_4d`` variable, because switching between 3d and 4d requires some additional steps (explauned later on). -Use :class:`nipype.interfaces.fsl.Merge` to merge a series of 3D files along the time -dimension creating a 4d file. +Use :ref:`nipype.interfaces.fsl.utils.Merge` to merge a series +of 3D files along the time dimension creating a 4D file. """ merge_to_4d = True @@ -67,26 +65,28 @@ if merge_to_4d: merge = pe.Node(interface=fsl.Merge(), name="merge") merge.inputs.dimension = "t" -"""Use :class:`nipype.interfaces.spm.Realign` for motion correction -and register all images to the mean image. +"""Use :ref:`nipype.interfaces.spm.preprocess.Realign` +for motion correction and register all images to the mean image. """ realign = pe.Node(interface=spm.Realign(), name="realign") -"""Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid -body registration of the functional data to the structural data. +"""Use :ref:`nipype.interfaces.spm.preprocess.Coregister` +to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(interface=spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' segment = pe.Node(interface=spm.Segment(), name="segment") + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using -:class:`nipype.interfaces.spm.Normalize`. The tutorial data set -includes the template image, T1.nii. +:ref:`nipype.interfaces.spm.preprocess.Normalize`. +The tutorial data set includes the template image, T1.nii. """ normalize_func = pe.Node(interface=spm.Normalize(), name="normalize_func") @@ -95,16 +95,17 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" """Smooth the functional data using -:class:`nipype.interfaces.spm.Smooth`. +:ref:`nipype.interfaces.spm.preprocess.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") -"""`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to -the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data + +"""``write_voxel_sizes`` is the input of the normalize interface that is recommended +to be set to the voxel sizes of the target volume. +There is no need to set it manually since we can infer it from data using the following function: """ - def get_vox_dims(volume): import nibabel as nb from nipype.utils import NUMPY_MMAP @@ -116,8 +117,9 @@ def get_vox_dims(volume): return [float(voxdims[0]), float(voxdims[1]), float(voxdims[2])] -"""Here we are connecting all the nodes together. Notice that we add the merge node only if you choose -to use 4D. Also `get_vox_dims` function is passed along the input volume of normalise to set the optimal +"""Here we are connecting all the nodes together. +Notice that we add the merge node only if you choose to use 4D. +Also ``get_vox_dims`` function is passed along the input volume of normalise to set the optimal voxel sizes. """ @@ -137,34 +139,38 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using -:class:`nipype.interfaces.spm.SpecifyModel`. +:ref:`nipype.algorithms.modelgen.SpecifyModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis -:class:`nipype.interfaces.spm.Level1Design`. +:ref:`nipype.interfaces.spm.model.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} -"""Use :class:`nipype.interfaces.spm.EstimateModel` to determine the -parameters of the model. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateModel` +to determine the parameters of the model. """ level1estimate = pe.Node(interface=spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") -"""Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the -first level contrasts specified in a few steps above. + +"""Use :ref:`nipype.interfaces.spm.model.EstimateContrast` +to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node( @@ -182,16 +188,18 @@ def get_vox_dims(volume): """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) -"""Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. -Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and -to make one we need a helper function. + +""" +Pluging in ``functional_runs`` is a bit more complicated, +because model spec expects a list of ``runs``. +Every run can be a 4D file or a list of 3D files. +Therefore for 3D analysis we need a list of lists and to make one we need a helper function. """ if merge_to_4d: @@ -209,8 +217,7 @@ def makelist(item): """ Data specific components ------------------------ - -In this tutorial there is only one subject `M00223`. +In this tutorial there is only one subject ``M00223``. Below we set some variables to inform the ``datasource`` about the layout of our data. We specify the location of the data, the subject @@ -231,7 +238,9 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") -"""Here we set up iteration over all the subjects. The following line + +""" +Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that it should repeat the analysis on each of the items in the @@ -241,9 +250,10 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ -Now we create a :class:`nipype.interfaces.io.DataGrabber` object and -fill in the information from above about the layout of our data. The +Now we create a :ref:`nipype.interfaces.io.DataGrabber` +object and fill in the information from above about the layout of our data. The :class:`nipype.pipeline.NodeWrapper` module wraps the interface object and provides additional housekeeping and pipeline specific functionality. @@ -257,14 +267,14 @@ def makelist(item): datasource.inputs.template = '%s%s/%s%s_%03d.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the -:class:`nipype.interfaces.spm.SpecifyModel` to create the information -necessary to generate an SPM design matrix. +:ref:`nipype.algorithms.modelgen.SpecifyModel` +to create the information necessary to generate an SPM design matrix. """ from nipype.interfaces.base import Bunch @@ -272,11 +282,13 @@ def makelist(item): Bunch( conditions=['Task'], onsets=[list(range(6, 84, 12))], durations=[[6]]) ] -"""Setup the contrast structure that needs to be evaluated. This is a + +""" +Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the -following format - [Name,Stat,[list of condition names],[weights on -those conditions]. The condition names must match the `names` listed -in the `subjectinfo` function described above. +following format - ``[Name,Stat,[list of condition names],[weights on +those conditions]``. The condition names must match the ``names`` listed +in the ``subjectinfo`` function described above. """ cont1 = ('active > rest', 'T', ['Task'], [1]) @@ -297,10 +309,10 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -315,7 +327,7 @@ def makelist(item): pipeline. Thus for this pipeline there will be subject specific sub-directories. -The ``nipype.pipeline.engine.Pipeline.connect`` function creates the +The :func:`nipype.pipeline.engine.Pipeline.connect` function creates the links between the processes, i.e., how data should flow in and out of the processing nodes. """ @@ -332,24 +344,24 @@ def makelist(item): else: level1.connect([(datasource, l1pipeline, [('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - -Use :class:`nipype.interfaces.io.DataSink` to store selected outputs +Use :ref:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep them. The first step is to create a datasink node and then to connect outputs from the modules above to storage locations. These take the -following form directory_name[.[@]subdir] where parts between [] are +following form ``directory_name[.[@]subdir]`` where parts between ``[]`` are optional. For example 'realign.@mean' below creates a directory called realign in 'l1output/subject_id/' and stores the mean image output from the Realign process in the realign directory. If the @ is left out, then a sub-directory with the name 'mean' would be created and the mean image would be copied to that directory. + """ datasink = pe.Node(interface=nio.DataSink(), name="datasink") @@ -372,14 +384,14 @@ def getstripdir(subject_id): [('analysis.contrastestimate.con_images', 'contrasts.@con'), ('analysis.contrastestimate.spmT_images', 'contrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the -analysis on the data the ``nipype.pipeline.engine.Pipeline.Run`` +analysis on the data the :func:`nipype.pipeline.engine.Workflow.run` function needs to be called. """ diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 587ff9b291..9c66ea7aac 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -28,11 +28,10 @@ import nipype.algorithms.rapidart as ra # artifact detection import nipype.algorithms.modelgen as model # model specification import os # system functions -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,10 +44,10 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. @@ -56,18 +55,19 @@ Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -80,18 +80,21 @@ art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Normalize and smooth functional data using DARTEL template """ @@ -99,6 +102,7 @@ spm.DARTELNorm2MNI(modulate=True), name='normalize_and_smooth_func') fwhmlist = [4] normalize_and_smooth_func.iterables = ('fwhm', fwhmlist) + """Normalize structural data using DARTEL template """ @@ -117,41 +121,47 @@ 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -160,6 +170,7 @@ overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -181,10 +192,10 @@ (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -198,10 +209,10 @@ 'level1design.mask_image'), ('normalize_struct.normalized_files', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -230,6 +241,7 @@ infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -240,6 +252,7 @@ """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -256,6 +269,7 @@ datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """We need to create a separate workflow to make the DARTEL template """ @@ -268,6 +282,7 @@ struct=[['subject_id', 'struct']]) datasource_dartel.inputs.sort_filelist = True datasource_dartel.inputs.subject_id = subject_list + """Here we make sure that struct files have names corresponding to the subject ids. This way we will be able to pick the right field flows later. """ @@ -281,10 +296,10 @@ dartel_workflow = spm_wf.create_DARTEL_template(name='dartel_workflow') dartel_workflow.inputs.inputspec.template_prefix = "template" + """This function will allow to pick the right field flow for each subject """ - def pickFieldFlow(dartel_flow_fields, subject_id): from nipype.utils.filemanip import split_filename for f in dartel_flow_fields: @@ -294,17 +309,16 @@ def pickFieldFlow(dartel_flow_fields, subject_id): raise Exception - pick_flow = pe.Node( niu.Function( input_names=['dartel_flow_fields', 'subject_id'], output_names=['dartel_flow_field'], function=pickFieldFlow), name="pick_flow") + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -312,7 +326,6 @@ def pickFieldFlow(dartel_flow_fields, subject_id): paradigm was used for every participant. """ - def subjectinfo(subject_id): from nipype.interfaces.base import Bunch from copy import deepcopy @@ -333,7 +346,6 @@ def subjectinfo(subject_id): regressors=None)) return output - """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -360,10 +372,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -411,11 +423,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -457,10 +468,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -471,10 +482,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run(plugin_args={'n_procs': 4}) level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -490,6 +501,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -503,6 +515,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -516,10 +529,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': diff --git a/examples/fmri_spm_face.py b/examples/fmri_spm_face.py index 5644398d54..bff892bbd8 100755 --- a/examples/fmri_spm_face.py +++ b/examples/fmri_spm_face.py @@ -27,11 +27,10 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -42,22 +41,20 @@ mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # If SPM is not in your MATLAB path you should add it here # mlab.MatlabCommand.set_default_paths('/path/to/your/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. It one is slightly different then the one used in spm_tutorial2. - Setup preprocessing workflow ---------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ @@ -65,6 +62,7 @@ realign = pe.Node(interface=spm.Realign(), name="realign") slice_timing = pe.Node(interface=spm.SliceTiming(), name="slice_timing") + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ @@ -74,10 +72,12 @@ segment = pe.Node(interface=spm.Segment(), name="segment") segment.inputs.save_bias_corrected = True + """Uncomment the following line for faster execution """ # segment.inputs.gaussians_per_class = [1, 1, 1, 4] + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. @@ -88,11 +88,13 @@ normalize_struc = pe.Node(interface=spm.Normalize(), name="normalize_struc") normalize_struc.inputs.jobtype = "write" + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ smooth = pe.Node(interface=spm.Smooth(), name="smooth") + """`write_voxel_sizes` is the input of the normalize interface that is recommended to be set to the voxel sizes of the target volume. There is no need to set it manually since we van infer it from data using the following function: @@ -129,23 +131,26 @@ def get_vox_dims(volume): 'write_voxel_sizes')]), (normalize_func, smooth, [('normalized_files', 'in_files')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(interface=spm.Level1Design(), name="level1design") + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ @@ -154,6 +159,7 @@ def get_vox_dims(volume): level1estimate.inputs.estimation_method = {'Classical': 1} threshold = pe.Node(interface=spm.Threshold(), name="threshold") + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ @@ -176,16 +182,17 @@ def pickfirst(l): (('spmT_images', pickfirst), 'stat_image')]), ]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') l1pipeline.connect([(preproc, l1analysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters')])]) + """Pluging in `functional_runs` is a bit more complicated, because model spec expects a list of `runs`. Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and to make one we need a helper function. @@ -199,10 +206,10 @@ def makelist(item): l1pipeline.connect([(preproc, l1analysis, [(('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Data specific components ------------------------ - In this tutorial there is only one subject `M03953`. Below we set some variables to inform the ``datasource`` about the @@ -223,6 +230,7 @@ def makelist(item): infosource = pe.Node( interface=util.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -233,6 +241,7 @@ def makelist(item): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -249,10 +258,10 @@ def makelist(item): datasource.inputs.template = '%s/s%s_%04d%s.img' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a structure that provides information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -260,6 +269,7 @@ def makelist(item): """ from nipype.interfaces.base import Bunch + """We're importing the onset times from a mat file (found on http://www.fil.ion.ucl.ac.uk/spm/data/face_rep/) """ @@ -280,6 +290,7 @@ def makelist(item): regressor_names=None, regressors=None) ] + """Setup the contrast structure that needs to be evaluated. This is a list of lists. The inner list specifies the contrasts and has the following format - [Name,Stat,[list of condition names],[weights on @@ -322,6 +333,7 @@ def makelist(item): cond1, cond2, cond3, fam1, fam2, fam3, rep1, rep2, rep3, int1, int2, int3, contf1, contf2, contf3, contf4 ] + """Setting up nodes inputs """ @@ -350,6 +362,7 @@ def makelist(item): l1designref.microtime_resolution = slice_timingref.num_slices l1designref.microtime_onset = slice_timingref.ref_slice l1designref.bases = {'hrf': {'derivs': [1, 1]}} + """ The following lines automatically inform SPM to create a default set of contrats for a factorial design. @@ -361,11 +374,13 @@ def makelist(item): l1pipeline.inputs.analysis.modelspec.subject_info = subjectinfo l1pipeline.inputs.analysis.contrastestimate.contrasts = contrasts l1pipeline.inputs.analysis.threshold.contrast_index = 1 + """ Use derivative estimates in the non-parametric model """ l1pipeline.inputs.analysis.contrastestimate.use_derivs = True + """ Setting up parametricvariation of the model """ @@ -402,10 +417,10 @@ def makelist(item): [(preproc, paramanalysis, [('realign.realignment_parameters', 'modelspec.realignment_parameters'), (('smooth.smoothed_files', makelist), 'modelspec.functional_runs')])]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -432,11 +447,10 @@ def makelist(item): (datasource, l1pipeline, [('struct', 'preproc.coregister.source'), ('func', 'preproc.realign.in_files')])]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -475,10 +489,10 @@ def getstripdir(subject_id): 'paramcontrasts.@con'), ('paramanalysis.contrastestimate.spmT_images', 'paramcontrasts.@T')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the diff --git a/examples/fmri_spm_nested.py b/examples/fmri_spm_nested.py index 534b8c960d..e63b3a2cde 100755 --- a/examples/fmri_spm_nested.py +++ b/examples/fmri_spm_nested.py @@ -28,11 +28,10 @@ from nipype.pipeline import engine as pe # pypeline engine from nipype.algorithms import rapidart as ra # artifact detection from nipype.algorithms import modelgen as model # model specification -""" +""" Preliminaries ------------- - Set any package specific configuration. The output file format for FSL routines is being set to uncompressed NIFTI and a specific version of matlab is being used. The uncompressed format is required @@ -45,18 +44,16 @@ # Set the way matlab should be called # mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") # mlab.MatlabCommand.set_default_paths('/software/spm8') + """ Setting up workflows -------------------- - In this tutorial we will be setting up a hierarchical workflow for spm analysis. This will demonstrate how pre-defined workflows can be setup and shared across users, projects and labs. - Example of how to inline functions in connect() ----------------------------------------------- - """ @@ -66,15 +63,13 @@ def _template_path(in_data): """ - Set-up preprocessing workflow ----------------------------- - This is a generic preprocessing workflow that can be used by different analyses - """ preproc = pe.Workflow(name='preproc') + """ A node called :code:`inputnode` is set to designate the path in which input data are located: @@ -82,12 +77,14 @@ def _template_path(in_data): inputnode = pe.Node( niu.IdentityInterface(fields=['in_data']), name='inputnode') + """Use :class:`nipype.interfaces.spm.Realign` for motion correction and register all images to the mean image. """ realign = pe.Node(spm.Realign(), name="realign") realign.inputs.register_to_mean = True + """Use :class:`nipype.algorithms.rapidart` to determine which of the images in the functional series are outliers based on deviations in intensity or movement. @@ -100,24 +97,28 @@ def _template_path(in_data): art.inputs.zintensity_threshold = 3 art.inputs.mask_type = 'file' art.inputs.parameter_source = 'SPM' + """Skull strip structural images using :class:`nipype.interfaces.fsl.BET`. """ skullstrip = pe.Node(fsl.BET(), name="skullstrip") skullstrip.inputs.mask = True + """Use :class:`nipype.interfaces.spm.Coregister` to perform a rigid body registration of the functional data to the structural data. """ coregister = pe.Node(spm.Coregister(), name="coregister") coregister.inputs.jobtype = 'estimate' + """Warp functional and structural data to SPM's T1 template using :class:`nipype.interfaces.spm.Normalize`. The tutorial data set includes the template image, T1.nii. """ normalize = pe.Node(spm.Normalize(), name="normalize") + """Smooth the functional data using :class:`nipype.interfaces.spm.Smooth`. """ @@ -137,41 +138,47 @@ def _template_path(in_data): (normalize, art, [('normalized_files', 'realigned_files')]), (skullstrip, art, [('mask_file', 'mask_file')]), ]) + """ Set up analysis workflow ------------------------ - """ l1analysis = pe.Workflow(name='analysis') + """Generate SPM-specific design information using :class:`nipype.interfaces.spm.SpecifyModel`. """ modelspec = pe.Node(model.SpecifySPMModel(), name="modelspec") modelspec.inputs.concatenate_runs = True + """Generate a first level SPM.mat file for analysis :class:`nipype.interfaces.spm.Level1Design`. """ level1design = pe.Node(spm.Level1Design(), name="level1design") level1design.inputs.bases = {'hrf': {'derivs': [0, 0]}} + """Use :class:`nipype.interfaces.spm.EstimateModel` to determine the parameters of the model. """ level1estimate = pe.Node(spm.EstimateModel(), name="level1estimate") level1estimate.inputs.estimation_method = {'Classical': 1} + """Use :class:`nipype.interfaces.spm.EstimateContrast` to estimate the first level contrasts specified in a few steps above. """ contrastestimate = pe.Node(spm.EstimateContrast(), name="contrastestimate") + """Use :class: `nipype.interfaces.utility.Select` to select each contrast for reporting. """ selectcontrast = pe.Node(niu.Select(), name="selectcontrast") + """Use :class:`nipype.interfaces.fsl.Overlay` to combine the statistical output of the contrast estimate and a background image into one volume. """ @@ -180,6 +187,7 @@ def _template_path(in_data): overlaystats.inputs.stat_thresh = (3, 10) overlaystats.inputs.show_negative_stats = True overlaystats.inputs.auto_thresh_bg = True + """Use :class:`nipype.interfaces.fsl.Slicer` to create images of the overlaid statistical volumes for a report of the first-level results. """ @@ -201,10 +209,10 @@ def _template_path(in_data): (selectcontrast, overlaystats, [('out', 'stat_image')]), (overlaystats, slicestats, [('out_file', 'in_file')])]) + """ Preproc + Analysis pipeline --------------------------- - """ l1pipeline = pe.Workflow(name='firstlevel') @@ -218,10 +226,10 @@ def _template_path(in_data): 'level1design.mask_image'), ('normalize.normalized_source', 'overlaystats.background_image')]), ]) + """ Data specific components ------------------------ - The nipype tutorial contains data for two subjects. Subject data is in two subdirectories, ``s1`` and ``s2``. Each subject directory contains four functional volumes: f3.nii, f5.nii, f7.nii, f10.nii. And @@ -236,7 +244,6 @@ def _template_path(in_data): In the example below, run 'f3' is of type 'func' and gets mapped to a nifti filename through a template '%s.nii'. So 'f3' would become 'f3.nii'. - """ # Specify the subject directories @@ -248,6 +255,7 @@ def _template_path(in_data): infosource = pe.Node( niu.IdentityInterface(fields=['subject_id']), name="infosource") + """Here we set up iteration over all the subjects. The following line is a particular example of the flexibility of the system. The ``datasource`` attribute ``iterables`` tells the pipeline engine that @@ -258,6 +266,7 @@ def _template_path(in_data): """ infosource.iterables = ('subject_id', subject_list) + """ Now we create a :class:`nipype.interfaces.io.DataGrabber` object and fill in the information from above about the layout of our data. The @@ -272,10 +281,10 @@ def _template_path(in_data): datasource.inputs.template = 'nipype-tutorial/data/%s/%s.nii' datasource.inputs.template_args = info datasource.inputs.sort_filelist = True + """ Experimental paradigm specific components ----------------------------------------- - Here we create a function that returns subject-specific information about the experimental paradigm. This is used by the :class:`nipype.interfaces.spm.SpecifyModel` to create the information @@ -331,10 +340,10 @@ def subjectinfo(subject_id): # Iterate over each contrast and create report images. selectcontrast.iterables = ('index', [[i] for i in range(len(contrasts))]) + """ Setup the pipeline ------------------ - The nodes created above do not describe the flow of data. They merely describe the parameters used for each function. In this section we setup the connections between the nodes such that appropriate outputs @@ -366,11 +375,10 @@ def subjectinfo(subject_id): (infosource, l1pipeline, [(('subject_id', subjectinfo), 'analysis.modelspec.subject_info')]), ]) -""" +""" Setup storage results --------------------- - Use :class:`nipype.interfaces.io.DataSink` to store selected outputs from the pipeline in a specific location. This allows the user to selectively choose important output bits from the analysis and keep @@ -410,10 +418,10 @@ def getstripdir(subject_id): (('subject_id', getstripdir), 'strip_dir')]), (l1pipeline, report, [('analysis.slicestats.out_file', '@report')]), ]) + """ Execute the pipeline -------------------- - The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the @@ -424,10 +432,10 @@ def getstripdir(subject_id): if __name__ == '__main__': level1.run('MultiProc') level1.write_graph() + """ Setup level 2 pipeline ---------------------- - Use :class:`nipype.interfaces.io.DataGrabber` to extract the contrast images across a group of first level subjects. Unlike the previous pipeline that iterated over subjects, this pipeline will iterate over @@ -443,6 +451,7 @@ def getstripdir(subject_id): # iterate over all contrast images l2source.iterables = [('fwhm', fwhmlist), ('con', contrast_ids)] l2source.inputs.sort_filelist = True + """Use :class:`nipype.interfaces.spm.OneSampleTTestDesign` to perform a simple statistical analysis of the contrasts from the group of subjects (n=2 in this example). @@ -456,6 +465,7 @@ def getstripdir(subject_id): cont1 = ('Group', 'T', ['mean'], [1]) l2conestimate.inputs.contrasts = [cont1] l2conestimate.inputs.group_contrast = True + """As before, we setup a pipeline to connect these two nodes (l2source -> onesamplettest). """ @@ -469,10 +479,10 @@ def getstripdir(subject_id): [('spm_mat_file', 'spm_mat_file'), ('beta_images', 'beta_images'), ('residual_image', 'residual_image')]), ]) + """ Execute the second level pipeline --------------------------------- - """ if __name__ == '__main__': diff --git a/nipype/__init__.py b/nipype/__init__.py index 74c6a42dd7..18449c5f81 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,11 +1,20 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Information on specific functions, classes, and methods. + +:Release: |version| +:Date: |today| + +Top-level module API +-------------------- + +""" import os from distutils.version import LooseVersion from .info import ( - LONG_DESCRIPTION as __doc__, URL as __url__, STATUS as __status__, __version__, diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 3bbf4632f4..251d196d3f 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -395,8 +395,8 @@ class CompCorInputSpec(BaseInterfaceInputSpec): desc=( "One or more mask files that determines " "ROI (3D). When more that one file is " - "provided `merge_method` or " - "`merge_index` must be provided" + "provided ``merge_method`` or " + "``merge_index`` must be provided" ), ) merge_method = traits.Enum( @@ -407,10 +407,10 @@ class CompCorInputSpec(BaseInterfaceInputSpec): requires=["mask_files"], desc=( "Merge method if multiple masks are " - "present - `union` uses voxels included in" - " at least one input mask, `intersect` " + "present - ``union`` uses voxels included in" + " at least one input mask, ``intersect`` " "uses only voxels present in all input " - "masks, `none` performs CompCor on " + "masks, ``none`` performs CompCor on " "each mask individually" ), ) @@ -418,7 +418,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): low=0, xor=["merge_method"], requires=["mask_files"], - desc=("Position of mask in `mask_files` to use - " "first is the default."), + desc=("Position of mask in ``mask_files`` to use - " "first is the default."), ) mask_names = traits.List( traits.Str, @@ -436,12 +436,12 @@ class CompCorInputSpec(BaseInterfaceInputSpec): traits.Range(low=1), xor=["variance_threshold"], desc="Number of components to return from the decomposition. If " - "`num_components` is `all`, then all components will be " + "``num_components`` is ``all``, then all components will be " "retained.", ) # 6 for BOLD, 4 for ASL # automatically instantiated to 6 in CompCor below if neither - # `num_components` nor `variance_threshold` is defined (for + # ``num_components`` nor ``variance_threshold`` is defined (for # backward compatibility) variance_threshold = traits.Range( low=0.0, @@ -451,7 +451,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): xor=["num_components"], desc="Select the number of components to be returned automatically " "based on their ability to explain variance in the dataset. " - "`variance_threshold` is a fractional value between 0 and 1; " + "``variance_threshold`` is a fractional value between 0 and 1; " "the number of components retained will be equal to the minimum " "number of components necessary to explain the provided " "fraction of variance in the masked time series.", @@ -521,13 +521,14 @@ class CompCorOutputSpec(TraitedSpec): class CompCor(SimpleInterface): """ - Interface with core CompCor computation, used in aCompCor and tCompCor + Interface with core CompCor computation, used in aCompCor and tCompCor. CompCor provides three pre-filter options, all of which include per-voxel mean removal: - - polynomial: Legendre polynomial basis - - cosine: Discrete cosine basis - - False: mean-removal only + + - ``'polynomial'``: Legendre polynomial basis + - ``'cosine'``: Discrete cosine basis + - ``False``: mean-removal only In the case of ``polynomial`` and ``cosine`` filters, a pre-filter file may be saved with a row for each volume/timepoint, and a column for each @@ -545,7 +546,6 @@ class CompCor(SimpleInterface): Example ------- - >>> ccinterface = CompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -654,10 +654,10 @@ def _run_interface(self, runtime): else: components_criterion = 6 IFLOGGER.warning( - "`num_components` and `variance_threshold` are " + "``num_components`` and ``variance_threshold`` are " "not defined. Setting number of components to 6 " "for backward compatibility. Please set either " - "`num_components` or `variance_threshold`, as " + "``num_components`` or ``variance_threshold``, as " "this feature may be deprecated in the future." ) @@ -812,7 +812,6 @@ class TCompCor(CompCor): Example ------- - >>> ccinterface = TCompCor() >>> ccinterface.inputs.realigned_file = 'functional.nii' >>> ccinterface.inputs.mask_files = 'mask.nii' @@ -906,7 +905,6 @@ class TSNR(BaseInterface): Example ------- - >>> tsnr = TSNR() >>> tsnr.inputs.in_file = 'functional.nii' >>> res = tsnr.run() # doctest: +SKIP @@ -1240,6 +1238,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): A helper function for CompCor + Parameters + ---------- mask_files: a list one or more binary mask files mask_method: enum ('union', 'intersect', 'none') @@ -1247,7 +1247,10 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): mask_index: an integer determines which file to return (mutually exclusive with mask_method) - returns: a list of nibabel images + Returns + ------- + masks: a list of nibabel images + """ if isdefined(mask_index) or not isdefined(mask_method): @@ -1310,49 +1313,48 @@ def compute_noise_components( failure_mode="error", mask_names=None, ): - """Compute the noise components from the imgseries for each mask + """ + Compute the noise components from the image series for each mask. Parameters ---------- imgseries: nibabel image Time series data to be decomposed. mask_images: list - List of nibabel images. Time series data from `img_series` is subset + List of nibabel images. Time series data from ``img_series`` is subset according to the spatial extent of each mask, and the subset data is then decomposed using principal component analysis. Masks should be coextensive with either anatomical or spatial noise ROIs. components_criterion: float Number of noise components to return. If this is a decimal value - between 0 and 1, then `create_noise_components` will instead return + between 0 and 1, then ``create_noise_components`` will instead return the smallest number of components necessary to explain the indicated - fraction of variance. If `components_criterion` is `all`, then all + fraction of variance. If ``components_criterion`` is ``all``, then all components will be returned. filter_type: str - Type of filter to apply to time series before computing - noise components. - 'polynomial' - Legendre polynomial basis - 'cosine' - Discrete cosine (DCT) basis - False - None (mean-removal only) + Type of filter to apply to time series before computing noise components. + + - 'polynomial' - Legendre polynomial basis + - 'cosine' - Discrete cosine (DCT) basis + - False - None (mean-removal only) + failure_mode: str Action to be taken in the event that any decomposition fails to - identify any components. `error` indicates that the routine should + identify any components. ``error`` indicates that the routine should raise an exception and exit, while any other value indicates that the routine should return a matrix of NaN values equal in size to the requested decomposition matrix. mask_names: list or None - List of names for each image in `mask_images`. This should be equal in - length to `mask_images`, with the ith element of `mask_names` naming - the ith element of `mask_images`. - - Filter options: - + List of names for each image in ``mask_images``. This should be equal in + length to ``mask_images``, with the ith element of ``mask_names`` naming + the ith element of ``mask_images``. degree: int Order of polynomial used to remove trends from the timeseries period_cut: float Minimum period (in sec) for DCT high-pass filter repetition_time: float Time (in sec) between volume acquisitions. This must be defined if - the `filter_type` is `cosine`. + the ``filter_type`` is ``cosine``. Returns ------- @@ -1363,6 +1365,7 @@ def compute_noise_components( metadata: OrderedDict{str: numpy array} Dictionary of eigenvalues, fractional explained variances, and cumulative explained variances. + """ basis = np.array([]) if components_criterion == "all": diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index b472039075..7ba401a130 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Miscellaneous algorithms -""" +"""Miscellaneous algorithms.""" import os import os.path as op @@ -331,7 +329,7 @@ def replaceext(in_list, ext): return out_list -def matlab2csv(in_array, name, reshape): +def _matlab2csv(in_array, name, reshape): output_array = np.asarray(in_array) if reshape: if len(np.shape(output_array)) > 1: @@ -364,19 +362,19 @@ class Matlab2CSVOutputSpec(TraitedSpec): class Matlab2CSV(BaseInterface): - """Simple interface to save the components of a MATLAB .mat file as a text - file with comma-separated values (CSVs). + """ + Save the components of a MATLAB .mat file as a text file with comma-separated values (CSVs). CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.Matlab2CSV() >>> mat2csv.inputs.in_file = 'cmatrix.mat' >>> mat2csv.run() # doctest: +SKIP + """ input_spec = Matlab2CSVInputSpec @@ -413,7 +411,7 @@ def _run_interface(self, runtime): variable, type(in_dict[variable]), ) - matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) + _matlab2csv(in_dict[variable], variable, self.inputs.reshape_matrix) elif len(saved_variables) == 1: _, name, _ = split_filename(self.inputs.in_file) variable = saved_variables[0] @@ -423,7 +421,7 @@ def _run_interface(self, runtime): iflogger.info( "...Converting %s to CSV from %s", variable, self.inputs.in_file ) - matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) + _matlab2csv(in_dict[variable], name, self.inputs.reshape_matrix) else: iflogger.error("No values in the MATLAB file?!") return runtime @@ -587,22 +585,23 @@ class MergeCSVFilesOutputSpec(TraitedSpec): class MergeCSVFiles(BaseInterface): - """This interface is designed to facilitate data loading in the R environment. - It takes input CSV files and merges them into a single CSV file. + """ + Merge several CSV files into a single CSV file. + + This interface is designed to facilitate data loading in the R environment. If provided, it will also incorporate column heading names into the resulting CSV file. - CSV files are easily loaded in R, for use in statistical processing. For further information, see cran.r-project.org/doc/manuals/R-data.pdf Example ------- - >>> from nipype.algorithms import misc >>> mat2csv = misc.MergeCSVFiles() >>> mat2csv.inputs.in_files = ['degree.mat','clustering.mat'] >>> mat2csv.inputs.column_headings = ['degree','clustering'] >>> mat2csv.run() # doctest: +SKIP + """ input_spec = MergeCSVFilesInputSpec @@ -722,17 +721,18 @@ class AddCSVColumnOutputSpec(TraitedSpec): class AddCSVColumn(BaseInterface): - """Short interface to add an extra column and field to a text file + """ + Short interface to add an extra column and field to a text file. Example ------- - >>> from nipype.algorithms import misc >>> addcol = misc.AddCSVColumn() >>> addcol.inputs.in_file = 'degree.csv' >>> addcol.inputs.extra_column_heading = 'group' >>> addcol.inputs.extra_field = 'male' >>> addcol.run() # doctest: +SKIP + """ input_spec = AddCSVColumnInputSpec @@ -788,7 +788,8 @@ class AddCSVRowOutputSpec(TraitedSpec): class AddCSVRow(BaseInterface): - """Simple interface to add an extra row to a csv file + """ + Simple interface to add an extra row to a CSV file. .. note:: Requires `pandas `_ @@ -801,7 +802,6 @@ class AddCSVRow(BaseInterface): Example ------- - >>> from nipype.algorithms import misc >>> addrow = misc.AddCSVRow() >>> addrow.inputs.in_file = 'scores.csv' @@ -810,6 +810,7 @@ class AddCSVRow(BaseInterface): >>> addrow.inputs.subject_id = 'S400' >>> addrow.inputs.list_of_values = [ 0.4, 0.7, 0.3 ] >>> addrow.run() # doctest: +SKIP + """ input_spec = AddCSVRowInputSpec @@ -917,16 +918,17 @@ class CalculateNormalizedMomentsOutputSpec(TraitedSpec): class CalculateNormalizedMoments(BaseInterface): - """Calculates moments of timeseries. + """ + Calculates moments of timeseries. Example ------- - >>> from nipype.algorithms import misc >>> skew = misc.CalculateNormalizedMoments() >>> skew.inputs.moment = 3 >>> skew.inputs.timeseries_file = 'timeseries.txt' >>> skew.run() # doctest: +SKIP + """ input_spec = CalculateNormalizedMomentsInputSpec @@ -995,7 +997,7 @@ class AddNoiseOutputSpec(TraitedSpec): class AddNoise(BaseInterface): """ - Corrupts with noise the input image + Corrupts with noise the input image. Example @@ -1105,21 +1107,23 @@ class NormalizeProbabilityMapSetOutputSpec(TraitedSpec): class NormalizeProbabilityMapSet(BaseInterface): - """ Returns the input tissue probability maps (tpms, aka volume fractions) - normalized to sum up 1.0 at each voxel within the mask. + """ + Returns the input tissue probability maps (tpms, aka volume fractions). + + The tissue probability maps are normalized to sum up 1.0 at each voxel within the mask. .. note:: Please recall this is not a spatial normalization algorithm Example ------- - >>> from nipype.algorithms import misc >>> normalize = misc.NormalizeProbabilityMapSet() >>> normalize.inputs.in_files = [ 'tpm_00.nii.gz', 'tpm_01.nii.gz', \ 'tpm_02.nii.gz' ] >>> normalize.inputs.in_mask = 'tpms_msk.nii.gz' >>> normalize.run() # doctest: +SKIP + """ input_spec = NormalizeProbabilityMapSetInputSpec @@ -1159,11 +1163,11 @@ class SplitROIsOutputSpec(TraitedSpec): class SplitROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.SplitROIs() >>> rois.inputs.in_file = 'diffusion.nii' @@ -1215,11 +1219,11 @@ class MergeROIsOutputSpec(TraitedSpec): class MergeROIs(BaseInterface): """ Splits a 3D image in small chunks to enable parallel processing. + ROIs keep time series structure in 4D images. Example ------- - >>> from nipype.algorithms import misc >>> rois = misc.MergeROIs() >>> rois.inputs.in_files = ['roi%02d.nii' % i for i in range(1, 6)] @@ -1500,7 +1504,6 @@ class CalculateMedian(BaseInterface): Example ------- - >>> from nipype.algorithms.misc import CalculateMedian >>> mean = CalculateMedian() >>> mean.inputs.in_files = 'functional.nii' diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 2457fe8d2f..e0e9530ff4 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -6,10 +6,6 @@ subject analysis of task-based fMRI experiments. In particular it also includes algorithms for generating regressors for sparse and sparse-clustered acquisition experiments. - -These functions include: - - * SpecifyModel: allows specification of sparse and non-sparse models """ from copy import deepcopy import csv, math, os @@ -36,10 +32,11 @@ def gcd(a, b): - """Returns the greatest common divisor of two integers - - uses Euclid's algorithm + """ + Return the greatest common divisor of two integers (uses Euclid's algorithm). + Examples + -------- >>> gcd(4, 5) 1 >>> gcd(4, 8) @@ -54,31 +51,33 @@ def gcd(a, b): def spm_hrf(RT, P=None, fMRI_T=16): - """ python implementation of spm_hrf - - see spm_hrf for implementation details - - % RT - scan repeat time - % p - parameters of the response function (two gamma - % functions) - % defaults (seconds) - % p(0) - delay of response (relative to onset) 6 - % p(1) - delay of undershoot (relative to onset) 16 - % p(2) - dispersion of response 1 - % p(3) - dispersion of undershoot 1 - % p(4) - ratio of response to undershoot 6 - % p(5) - onset (seconds) 0 - % p(6) - length of kernel (seconds) 32 - % - % hrf - hemodynamic response function - % p - parameters of the response function - - the following code using scipy.stats.distributions.gamma - doesn't return the same result as the spm_Gpdf function :: + """ + python implementation of spm_hrf + + See ``spm_hrf`` for implementation details:: + % RT - scan repeat time + % p - parameters of the response function (two gamma + % functions) + % defaults (seconds) + % p(0) - delay of response (relative to onset) 6 + % p(1) - delay of undershoot (relative to onset) 16 + % p(2) - dispersion of response 1 + % p(3) - dispersion of undershoot 1 + % p(4) - ratio of response to undershoot 6 + % p(5) - onset (seconds) 0 + % p(6) - length of kernel (seconds) 32 + % + % hrf - hemodynamic response function + % p - parameters of the response function + + The following code using ``scipy.stats.distributions.gamma`` + doesn't return the same result as the ``spm_Gpdf`` function:: hrf = gamma.pdf(u, p[0]/p[2], scale=dt/p[2]) - gamma.pdf(u, p[1]/p[3], scale=dt/p[3])/p[4] + Example + ------- >>> print(spm_hrf(2)) [ 0.00000000e+00 8.65660810e-02 3.74888236e-01 3.84923382e-01 2.16117316e-01 7.68695653e-02 1.62017720e-03 -3.06078117e-02 @@ -131,11 +130,11 @@ def orth(x_in, y_in): def scale_timings(timelist, input_units, output_units, time_repetition): - """Scales timings given input and output units (scans/secs) + """ + Scale timings given input and output units (scans/secs). Parameters ---------- - timelist: list of times to scale input_units: 'secs' or 'scans' output_units: Ibid. @@ -157,11 +156,11 @@ def scale_timings(timelist, input_units, output_units, time_repetition): def bids_gen_info( bids_event_files, condition_column="", amplitude_column=None, time_repetition=False, ): - """Generate subject_info structure from a list of BIDS .tsv event files. + """ + Generate a subject_info structure from a list of BIDS .tsv event files. Parameters ---------- - bids_event_files : list of str Filenames of BIDS .tsv event files containing columns including: 'onset', 'duration', and 'trial_type' or the `condition_column` value. @@ -175,8 +174,8 @@ def bids_gen_info( Returns ------- + subject_info: list of Bunch - list of Bunch """ info = [] for bids_event_file in bids_event_files: @@ -210,8 +209,7 @@ def bids_gen_info( def gen_info(run_event_files): - """Generate subject_info structure from a list of event files - """ + """Generate subject_info structure from a list of event files.""" info = [] for i, event_files in enumerate(run_event_files): runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) @@ -245,8 +243,7 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): xor=["subject_info", "event_files", "bids_event_file"], desc="Bunch or List(Bunch) subject-specific " "condition information. see " - ":ref:`SpecifyModel` or " - "SpecifyModel.__doc__ for details", + ":ref:`nipype.algorithms.modelgen.SpecifyModel` or for details", ) event_files = InputMultiPath( traits.List(File(exists=True)), @@ -266,17 +263,17 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): bids_condition_column = traits.Str( default_value="trial_type", usedefault=True, - desc="Column of the file passed to `bids_event_file` to the " + desc="Column of the file passed to ``bids_event_file`` to the " "unique values of which events will be assigned" "to regressors", ) bids_amplitude_column = traits.Str( - desc="Column of the file passed to `bids_event_file` " + desc="Column of the file passed to ``bids_event_file`` " "according to which to assign amplitudes to events" ) realignment_parameters = InputMultiPath( File(exists=True), - desc="Realignment parameters returned " "by motion correction algorithm", + desc="Realignment parameters returned by motion correction algorithm", copyfile=False, ) parameter_source = traits.Enum( @@ -290,7 +287,7 @@ class SpecifyModelInputSpec(BaseInterfaceInputSpec): ) outlier_files = InputMultiPath( File(exists=True), - desc="Files containing scan outlier indices " "that should be tossed", + desc="Files containing scan outlier indices that should be tossed", copyfile=False, ) functional_runs = InputMultiPath( @@ -326,37 +323,38 @@ class SpecifyModelOutputSpec(TraitedSpec): class SpecifyModel(BaseInterface): - """Makes a model specification compatible with spm/fsl designers. + """ + Makes a model specification compatible with spm/fsl designers. The subject_info field should contain paradigm information in the form of a Bunch or a list of Bunch. The Bunch should contain the following information:: - [Mandatory] - - conditions : list of names - - onsets : lists of onsets corresponding to each condition - - durations : lists of durations corresponding to each condition. Should be - left to a single 0 if all events are being modelled as impulses. - - [Optional] - - regressor_names : list of str - list of names corresponding to each column. Should be None if - automatically assigned. - - regressors : list of lists - values for each regressor - must correspond to the number of - volumes in the functional run - - amplitudes : lists of amplitudes for each event. This will be ignored by - SPM's Level1Design. - - The following two (tmod, pmod) will be ignored by any Level1Design class - other than SPM: - - - tmod : lists of conditions that should be temporally modulated. Should - default to None if not being used. - - pmod : list of Bunch corresponding to conditions - - name : name of parametric modulator - - param : values of the modulator - - poly : degree of modulation + [Mandatory] + conditions : list of names + onsets : lists of onsets corresponding to each condition + durations : lists of durations corresponding to each condition. Should be + left to a single 0 if all events are being modelled as impulses. + + [Optional] + regressor_names : list of str + list of names corresponding to each column. Should be None if + automatically assigned. + regressors : list of lists + values for each regressor - must correspond to the number of + volumes in the functional run + amplitudes : lists of amplitudes for each event. This will be ignored by + SPM's Level1Design. + + The following two (tmod, pmod) will be ignored by any Level1Design class + other than SPM: + + tmod : lists of conditions that should be temporally modulated. Should + default to None if not being used. + pmod : list of Bunch corresponding to conditions + - name : name of parametric modulator + - param : values of the modulator + - poly : degree of modulation Alternatively, you can provide information through event files. @@ -367,7 +365,6 @@ class SpecifyModel(BaseInterface): Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifyModel() @@ -379,8 +376,7 @@ class SpecifyModel(BaseInterface): >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]]) >>> s.inputs.subject_info = [evs_run2, evs_run3] - Using pmod: - + >>> # Using pmod >>> evs_run2 = Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 50], [100, 180]], \ durations=[[0], [0]], pmod=[Bunch(name=['amp'], poly=[2], param=[[1, 2]]), \ None]) @@ -397,9 +393,7 @@ class SpecifyModel(BaseInterface): def _generate_standard_design( self, infolist, functional_runs=None, realignment_parameters=None, outliers=None ): - """ Generates a standard design matrix paradigm given information about - each run - """ + """Generate a standard design matrix paradigm given information about each run.""" sessinfo = [] output_units = "secs" if "output_units" in self.inputs.traits(): @@ -561,26 +555,26 @@ class SpecifySPMModelInputSpec(SpecifyModelInputSpec): concatenate_runs = traits.Bool( False, usedefault=True, - desc="Concatenate all runs to look like a " "single session.", + desc="Concatenate all runs to look like a single session.", ) output_units = traits.Enum( "secs", "scans", usedefault=True, - desc="Units of design event onsets and durations " "(secs or scans)", + desc="Units of design event onsets and durations (secs or scans)", ) class SpecifySPMModel(SpecifyModel): - """Adds SPM specific options to SpecifyModel + """Add SPM specific options to SpecifyModel + + Adds: - adds: - concatenate_runs - output_units Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySPMModel() @@ -734,7 +728,7 @@ def _generate_design(self, infolist=None): class SpecifySparseModelInputSpec(SpecifyModelInputSpec): time_acquisition = traits.Float( - 0, mandatory=True, desc="Time in seconds to acquire a single " "image volume" + 0, mandatory=True, desc="Time in seconds to acquire a single image volume" ) volumes_in_cluster = traits.Range( 1, usedefault=True, desc="Number of scan volumes in a cluster" @@ -745,7 +739,7 @@ class SpecifySparseModelInputSpec(SpecifyModelInputSpec): ) use_temporal_deriv = traits.Bool( requires=["model_hrf"], - desc="Create a temporal derivative in " "addition to regular regressor", + desc="Create a temporal derivative in addition to regular regressor", ) scale_regressors = traits.Bool( True, desc="Scale regressors by the peak", usedefault=True @@ -754,7 +748,7 @@ class SpecifySparseModelInputSpec(SpecifyModelInputSpec): 0.0, desc="Start of scanning relative to onset of run in secs", usedefault=True ) save_plot = traits.Bool( - desc=("Save plot of sparse design calculation " "(requires matplotlib)") + desc=("Save plot of sparse design calculation (requires matplotlib)") ) @@ -764,18 +758,10 @@ class SpecifySparseModelOutputSpec(SpecifyModelOutputSpec): class SpecifySparseModel(SpecifyModel): - """ Specify a sparse model that is compatible with spm/fsl designers - - References - ---------- - - .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of - sparse-sampling fMRI experiments. Front. Neurosci. 7:55 - http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract + """ Specify a sparse model that is compatible with SPM/FSL designers [1]_. Examples -------- - >>> from nipype.algorithms import modelgen >>> from nipype.interfaces.base import Bunch >>> s = modelgen.SpecifySparseModel() @@ -785,11 +771,17 @@ class SpecifySparseModel(SpecifyModel): >>> s.inputs.time_acquisition = 2 >>> s.inputs.high_pass_filter_cutoff = 128. >>> s.inputs.model_hrf = True - >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], \ -durations=[[1]]) - >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], \ -durations=[[1]]) - >>> s.inputs.subject_info = [evs_run2, evs_run3] + >>> evs_run2 = Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], + ... durations=[[1]]) + >>> evs_run3 = Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], + ... durations=[[1]]) + >>> s.inputs.subject_info = [evs_run2, evs_run3] # doctest: +SKIP + + References + ---------- + .. [1] Perrachione TK and Ghosh SS (2013) Optimized design and analysis of + sparse-sampling fMRI experiments. Front. Neurosci. 7:55 + http://journal.frontiersin.org/Journal/10.3389/fnins.2013.00055/abstract """ diff --git a/nipype/info.py b/nipype/info.py index 72cabcbc1e..4ec80ea60c 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -159,7 +159,16 @@ def get_nipype_gitversion(): EXTRA_REQUIRES = { "data": ["datalad"], - "doc": ["Sphinx>=1.4", "numpydoc", "matplotlib", "pydotplus", "pydot>=1.2.3"], + "doc": [ + 'dipy', + 'ipython', + 'matplotlib', + 'nbsphinx', + 'sphinx-argparse', + 'sphinx>=2.1.2', + 'sphinxcontrib-apidoc', + 'sphinxcontrib-napoleon', + ], "duecredit": ["duecredit"], "nipy": ["nitime", "nilearn<0.5.0", "dipy", "nipy", "matplotlib"], "profiler": ["psutil>=5.0"], diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 3e0c02eda7..172f8f3902 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -138,7 +138,8 @@ class AlignEpiAnatPyOutputSpec(TraitedSpec): class AlignEpiAnatPy(AFNIPythonCommand): - """Align EPI to anatomical datasets or vice versa + """Align EPI to anatomical datasets or vice versa. + This Python script computes the alignment between two datasets, typically an EPI and an anatomical structural dataset, and applies the resulting transformation to one or the other to bring them into alignment. @@ -148,11 +149,12 @@ class AlignEpiAnatPy(AFNIPythonCommand): script combines multiple transformations, thereby minimizing the amount of interpolation applied to the data. - Basic Usage: - align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 + Basic Usage:: + + align_epi_anat.py -anat anat+orig -epi epi+orig -epi_base 5 - The user must provide EPI and anatomical datasets and specify the EPI - sub-brick to use as a base in the alignment. + The user must provide :abbr:`EPI (echo-planar imaging)` and anatomical datasets + and specify the EPI sub-brick to use as a base in the alignment. Internally, the script always aligns the anatomical to the EPI dataset, and the resulting transformation is saved to a 1D file. @@ -166,11 +168,8 @@ class AlignEpiAnatPy(AFNIPythonCommand): and requested (with options to turn on and off each of the steps) in order to create the aligned datasets. - For complete details, see the `align_epi_anat.py' Documentation. - `_ - Examples - ======== + -------- >>> from nipype.interfaces import afni >>> al_ea = afni.AlignEpiAnatPy() >>> al_ea.inputs.anat = "structural.nii" @@ -183,6 +182,12 @@ class AlignEpiAnatPy(AFNIPythonCommand): >>> al_ea.cmdline # doctest: +ELLIPSIS 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' >>> res = allineate.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `align_epi_anat.py documentation. + `__. + """ _cmd = "align_epi_anat.py" @@ -547,8 +552,7 @@ class Allineate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> allineate = afni.Allineate() >>> allineate.inputs.in_file = 'functional.nii' @@ -651,8 +655,7 @@ class AutoTcorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> corr = afni.AutoTcorrelate() >>> corr.inputs.in_file = 'functional.nii' @@ -718,8 +721,7 @@ class Automask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> automask = afni.Automask() >>> automask.inputs.in_file = 'functional.nii' @@ -750,64 +752,70 @@ class AutoTLRCInputSpec(CommandLineInputSpec): copyfile=False, ) base = traits.Str( - desc=" Reference anatomical volume" - " Usually this volume is in some standard space like" - " TLRC or MNI space and with afni dataset view of" - " (+tlrc)." - " Preferably, this reference volume should have had" - " the skull removed but that is not mandatory." - " AFNI's distribution contains several templates." - ' For a longer list, use "whereami -show_templates"' - "TT_N27+tlrc --> Single subject, skull stripped volume." - " This volume is also known as " - " N27_SurfVol_NoSkull+tlrc elsewhere in " - " AFNI and SUMA land." - " (www.loni.ucla.edu, www.bic.mni.mcgill.ca)" - " This template has a full set of FreeSurfer" - " (surfer.nmr.mgh.harvard.edu)" - " surface models that can be used in SUMA. " - " For details, see Talairach-related link:" - " https://afni.nimh.nih.gov/afni/suma" - "TT_icbm452+tlrc --> Average volume of 452 normal brains." - " Skull Stripped. (www.loni.ucla.edu)" - "TT_avg152T1+tlrc --> Average volume of 152 normal brains." - " Skull Stripped.(www.bic.mni.mcgill.ca)" - "TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1" - " TT_avg152 and TT_EPI volume sources are from" - " SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/)" - "If you do not specify a path for the template, the script" - "will attempt to locate the template AFNI's binaries directory." - "NOTE: These datasets have been slightly modified from" - " their original size to match the standard TLRC" - " dimensions (Jean Talairach and Pierre Tournoux" - " Co-Planar Stereotaxic Atlas of the Human Brain" - " Thieme Medical Publishers, New York, 1988). " - " That was done for internal consistency in AFNI." - " You may use the original form of these" - " volumes if you choose but your TLRC coordinates" - " will not be consistent with AFNI's TLRC database" - " (San Antonio Talairach Daemon database), for example.", + desc="""\ +Reference anatomical volume. +Usually this volume is in some standard space like +TLRC or MNI space and with afni dataset view of +(+tlrc). +Preferably, this reference volume should have had +the skull removed but that is not mandatory. +AFNI's distribution contains several templates. +For a longer list, use "whereami -show_templates" +TT_N27+tlrc --> Single subject, skull stripped volume. +This volume is also known as +N27_SurfVol_NoSkull+tlrc elsewhere in +AFNI and SUMA land. +(www.loni.ucla.edu, www.bic.mni.mcgill.ca) +This template has a full set of FreeSurfer +(surfer.nmr.mgh.harvard.edu) +surface models that can be used in SUMA. +For details, see Talairach-related link: +https://afni.nimh.nih.gov/afni/suma +TT_icbm452+tlrc --> Average volume of 452 normal brains. +Skull Stripped. (www.loni.ucla.edu) +TT_avg152T1+tlrc --> Average volume of 152 normal brains. +Skull Stripped.(www.bic.mni.mcgill.ca) +TT_EPI+tlrc --> EPI template from spm2, masked as TT_avg152T1 +TT_avg152 and TT_EPI volume sources are from +SPM's distribution. (www.fil.ion.ucl.ac.uk/spm/) +If you do not specify a path for the template, the script +will attempt to locate the template AFNI's binaries directory. +NOTE: These datasets have been slightly modified from +their original size to match the standard TLRC +dimensions (Jean Talairach and Pierre Tournoux +Co-Planar Stereotaxic Atlas of the Human Brain +Thieme Medical Publishers, New York, 1988). +That was done for internal consistency in AFNI. +You may use the original form of these +volumes if you choose but your TLRC coordinates +will not be consistent with AFNI's TLRC database +(San Antonio Talairach Daemon database), for example.""", mandatory=True, argstr="-base %s", ) no_ss = traits.Bool( - desc="Do not strip skull of input data set" - "(because skull has already been removed" - "or because template still has the skull)" - "NOTE: The -no_ss option is not all that optional." - " Here is a table of when you should and should not use -no_ss" - " Template Template" - " WITH skull WITHOUT skull" - " Dset." - " WITH skull -no_ss xxx " - " " - " WITHOUT skull No Cigar -no_ss" - " " - " Template means: Your template of choice" - " Dset. means: Your anatomical dataset" - " -no_ss means: Skull stripping should not be attempted on Dset" - " xxx means: Don't put anything, the script will strip Dset" - " No Cigar means: Don't try that combination, it makes no sense.", + desc="""\ +Do not strip skull of input data set +(because skull has already been removed +or because template still has the skull) +NOTE: The ``-no_ss`` option is not all that optional. +Here is a table of when you should and should not use ``-no_ss`` + + +------------------+------------+---------------+ + | Dataset | Template | + +==================+============+===============+ + | | w/ skull | wo/ skull | + +------------------+------------+---------------+ + | WITH skull | ``-no_ss`` | xxx | + +------------------+------------+---------------+ + | WITHOUT skull | No Cigar | ``-no_ss`` | + +------------------+------------+---------------+ + +Template means: Your template of choice +Dset. means: Your anatomical dataset +``-no_ss`` means: Skull stripping should not be attempted on Dset +xxx means: Don't put anything, the script will strip Dset +No Cigar means: Don't try that combination, it makes no sense.""", argstr="-no_ss", ) @@ -819,7 +827,7 @@ class AutoTLRC(AFNICommand): `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> autoTLRC = afni.AutoTLRC() >>> autoTLRC.inputs.in_file = 'structural.nii' @@ -931,8 +939,7 @@ class Bandpass(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> bandpass = afni.Bandpass() @@ -1000,8 +1007,7 @@ class BlurInMask(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bim = afni.BlurInMask() >>> bim.inputs.in_file = 'functional.nii' @@ -1056,8 +1062,7 @@ class BlurToFWHM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> blur = afni.preprocess.BlurToFWHM() >>> blur.inputs.in_file = 'epi.nii' @@ -1113,8 +1118,7 @@ class ClipLevel(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> cliplevel = preprocess.ClipLevel() >>> cliplevel.inputs.in_file = 'anatomical.nii' @@ -1196,8 +1200,7 @@ class DegreeCentrality(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> degree = afni.DegreeCentrality() >>> degree.inputs.in_file = 'functional.nii' @@ -1251,8 +1254,7 @@ class Despike(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> despike = afni.Despike() >>> despike.inputs.in_file = 'functional.nii' @@ -1292,8 +1294,7 @@ class Detrend(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> detrend = afni.Detrend() >>> detrend.inputs.in_file = 'functional.nii' @@ -1348,7 +1349,8 @@ class ECMInputSpec(CentralityInputSpec): ) eps = traits.Float( desc="sets the stopping criterion for the power iteration; " - "l2|v_old - v_new| < eps*|v_old|; default = 0.001", + ":math:`l2\\|v_\\text{old} - v_\\text{new}\\| < eps\\|v_\\text{old}\\|`; " + "default = 0.001", argstr="-eps %f", ) max_iter = traits.Int( @@ -1371,8 +1373,7 @@ class ECM(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> ecm = afni.ECM() >>> ecm.inputs.in_file = 'functional.nii' @@ -1428,8 +1429,7 @@ class Fim(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fim = afni.Fim() >>> fim.inputs.in_file = 'functional.nii' @@ -1480,8 +1480,7 @@ class Fourier(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> fourier = afni.Fourier() >>> fourier.inputs.in_file = 'functional.nii' @@ -1546,8 +1545,7 @@ class Hist(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> hist = afni.Hist() >>> hist.inputs.in_file = 'functional.nii' @@ -1608,8 +1606,7 @@ class LFCD(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> lfcd = afni.LFCD() >>> lfcd.inputs.in_file = 'functional.nii' @@ -1657,8 +1654,7 @@ class Maskave(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> maskave = afni.Maskave() >>> maskave.inputs.in_file = 'functional.nii' @@ -1712,8 +1708,7 @@ class Means(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> means = afni.Means() >>> means.inputs.in_file_a = 'im1.nii' @@ -1782,7 +1777,7 @@ class OutlierCountInputSpec(CommandLineInputSpec): False, usedefault=True, argstr="-range", - desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) save_outliers = traits.Bool(False, usedefault=True, desc="enables out_file option") outliers_file = File( @@ -1820,8 +1815,7 @@ class OutlierCount(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> toutcount = afni.OutlierCount() >>> toutcount.inputs.in_file = 'functional.nii' @@ -1909,7 +1903,7 @@ class QualityIndexInputSpec(CommandLineInputSpec): False, usedefault=True, argstr="-range", - desc="write out the median + 3.5 MAD of outlier count with each " "timepoint", + desc="write out the median + 3.5 MAD of outlier count with each timepoint", ) out_file = File( name_template="%s_tqual", @@ -1926,16 +1920,12 @@ class QualityIndexOutputSpec(TraitedSpec): class QualityIndex(CommandLine): - """Computes a `quality index' for each sub-brick in a 3D+time dataset. + """Computes a quality index for each sub-brick in a 3D+time dataset. The output is a 1D time series with the index for each sub-brick. The results are written to stdout. - For complete details, see the `3dTqual Documentation - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tqual = afni.QualityIndex() >>> tqual.inputs.in_file = 'functional.nii' @@ -1943,6 +1933,11 @@ class QualityIndex(CommandLine): '3dTqual functional.nii > functional_tqual' >>> res = tqual.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTqual Documentation + `_ + """ _cmd = "3dTqual" @@ -2032,28 +2027,33 @@ class ROIStatsInputSpec(CommandLineInputSpec): ] stat = InputMultiObject( traits.Enum(_stat_names), - desc="statistics to compute. Options include: " - " * mean = Compute the mean using only non_zero voxels." - " Implies the opposite for the mean computed " - " by default.\n" - " * median = Compute the median of nonzero voxels\n" - " * mode = Compute the mode of nonzero voxels." - " (integral valued sets only)\n" - " * minmax = Compute the min/max of nonzero voxels\n" - " * sum = Compute the sum using only nonzero voxels.\n" - " * voxels = Compute the number of nonzero voxels\n" - " * sigma = Compute the standard deviation of nonzero" - " voxels\n" - "Statistics that include zero-valued voxels:\n" - " * zerominmax = Compute the min/max of all voxels.\n" - " * zerosigma = Compute the standard deviation of all" - " voxels.\n" - " * zeromedian = Compute the median of all voxels.\n" - " * zeromode = Compute the mode of all voxels.\n" - " * summary = Only output a summary line with the grand " - " mean across all briks in the input dataset." - " This option cannot be used with nomeanout.\n" - "More that one option can be specified.", + desc="""\ +Statistics to compute. Options include: + + * mean = Compute the mean using only non_zero voxels. + Implies the opposite for the mean computed + by default. + * median = Compute the median of nonzero voxels + * mode = Compute the mode of nonzero voxels. + (integral valued sets only) + * minmax = Compute the min/max of nonzero voxels + * sum = Compute the sum using only nonzero voxels. + * voxels = Compute the number of nonzero voxels + * sigma = Compute the standard deviation of nonzero + voxels + +Statistics that include zero-valued voxels: + + * zerominmax = Compute the min/max of all voxels. + * zerosigma = Compute the standard deviation of all + voxels. + * zeromedian = Compute the median of all voxels. + * zeromode = Compute the mode of all voxels. + * summary = Only output a summary line with the grand + mean across all briks in the input dataset. + This option cannot be used with nomeanout. + +More that one option can be specified.""", argstr="%s...", ) out_file = File( @@ -2077,8 +2077,7 @@ class ROIStats(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> roistats = afni.ROIStats() >>> roistats.inputs.in_file = 'functional.nii' @@ -2192,7 +2191,7 @@ class Retroicor(AFNICommand): `_ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> ret = afni.Retroicor() >>> ret.inputs.in_file = 'functional.nii' @@ -2285,8 +2284,7 @@ class Seg(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces.afni import preprocess >>> seg = preprocess.Seg() >>> seg.inputs.in_file = 'structural.nii' @@ -2343,8 +2341,7 @@ class SkullStrip(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> skullstrip = afni.SkullStrip() >>> skullstrip.inputs.in_file = 'functional.nii' @@ -2531,8 +2528,7 @@ class TCorrMap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcm = afni.TCorrMap() >>> tcm.inputs.in_file = 'functional.nii' @@ -2598,8 +2594,7 @@ class TCorrelate(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcorrelate = afni.TCorrelate() >>> tcorrelate.inputs.xset= 'u_rc1s1_Template.nii' @@ -2637,7 +2632,7 @@ class TNormInputSpec(AFNICommandInputSpec): desc="L2 normalize (sum of squares = 1) [DEFAULT]", argstr="-norm2" ) normR = traits.Bool( - desc="normalize so sum of squares = number of time points * e.g., so RMS = 1.", + desc="normalize so sum of squares = number of time points \\* e.g., so RMS = 1.", argstr="-normR", ) norm1 = traits.Bool( @@ -2647,28 +2642,28 @@ class TNormInputSpec(AFNICommandInputSpec): desc="Scale so max absolute value = 1 (L_infinity norm)", argstr="-normx" ) polort = traits.Int( - desc="""Detrend with polynomials of order p before normalizing - [DEFAULT = don't do this] - * Use '-polort 0' to remove the mean, for example""", + desc="""\ +Detrend with polynomials of order p before normalizing [DEFAULT = don't do this]. +Use '-polort 0' to remove the mean, for example""", argstr="-polort %s", ) L1fit = traits.Bool( - desc="""Detrend with L1 regression (L2 is the default) - * This option is here just for the hell of it""", + desc="""\ +Detrend with L1 regression (L2 is the default) +This option is here just for the hell of it""", argstr="-L1fit", ) class TNorm(AFNICommand): - """Shifts voxel time series from input so that seperate slices are aligned + """Shifts voxel time series from input so that separate slices are aligned to the same temporal origin. For complete details, see the `3dTnorm Documentation. `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tnorm = afni.TNorm() >>> tnorm.inputs.in_file = 'functional.nii' @@ -2702,104 +2697,128 @@ class TProjectInputSpec(AFNICommandInputSpec): name_source="in_file", ) censor = File( - desc="""filename of censor .1D time series - * This is a file of 1s and 0s, indicating which - time points are to be included (1) and which are - to be excluded (0).""", + desc="""\ +Filename of censor .1D time series. +This is a file of 1s and 0s, indicating which +time points are to be included (1) and which are +to be excluded (0).""", argstr="-censor %s", exists=True, ) censortr = traits.List( traits.Str(), - desc="""list of strings that specify time indexes - to be removed from the analysis. Each string is - of one of the following forms: - 37 => remove global time index #37 - 2:37 => remove time index #37 in run #2 - 37..47 => remove global time indexes #37-47 - 37-47 => same as above - 2:37..47 => remove time indexes #37-47 in run #2 - *:0-2 => remove time indexes #0-2 in all runs - +Time indexes within each run start at 0. - +Run indexes start at 1 (just be to confusing). - +N.B.: 2:37,47 means index #37 in run #2 and - global time index 47; it does NOT mean - index #37 in run #2 AND index #47 in run #2.""", + desc="""\ +List of strings that specify time indexes +to be removed from the analysis. Each string is +of one of the following forms: + +* ``37`` => remove global time index #37 +* ``2:37`` => remove time index #37 in run #2 +* ``37..47`` => remove global time indexes #37-47 +* ``37-47`` => same as above +* ``2:37..47`` => remove time indexes #37-47 in run #2 +* ``*:0-2`` => remove time indexes #0-2 in all runs + + * Time indexes within each run start at 0. + * Run indexes start at 1 (just be to confusing). + * N.B.: 2:37,47 means index #37 in run #2 and + global time index 47; it does NOT mean + index #37 in run #2 AND index #47 in run #2. + +""", argstr="-CENSORTR %s", ) cenmode = traits.Enum( "KILL", "ZERO", "NTRP", - desc="""specifies how censored time points are treated in - the output dataset: - + mode = ZERO ==> put zero values in their place - ==> output datset is same length as input - + mode = KILL ==> remove those time points - ==> output dataset is shorter than input - + mode = NTRP ==> censored values are replaced by interpolated - neighboring (in time) non-censored values, - BEFORE any projections, and then the - analysis proceeds without actual removal - of any time points -- this feature is to - keep the Spanish Inquisition happy. - * The default mode is KILL !!!""", + desc="""\ +Specifies how censored time points are treated in +the output dataset: + +* mode = ZERO -- put zero values in their place; + output datset is same length as input +* mode = KILL -- remove those time points; + output dataset is shorter than input +* mode = NTRP -- censored values are replaced by interpolated + neighboring (in time) non-censored values, + BEFORE any projections, and then the + analysis proceeds without actual removal + of any time points -- this feature is to + keep the Spanish Inquisition happy. +* The default mode is KILL !!! + +""", argstr="-cenmode %s", ) concat = File( - desc="""The catenation file, as in 3dDeconvolve, containing the - TR indexes of the start points for each contiguous run - within the input dataset (the first entry should be 0). - ++ Also as in 3dDeconvolve, if the input dataset is - automatically catenated from a collection of datasets, - then the run start indexes are determined directly, - and '-concat' is not needed (and will be ignored). - ++ Each run must have at least 9 time points AFTER - censoring, or the program will not work! - ++ The only use made of this input is in setting up - the bandpass/stopband regressors. - ++ '-ort' and '-dsort' regressors run through all time - points, as read in. If you want separate projections - in each run, then you must either break these ort files - into appropriate components, OR you must run 3dTproject - for each run separately, using the appropriate pieces - from the ort files via the '{...}' selector for the - 1D files and the '[...]' selector for the datasets.""", + desc="""\ +The catenation file, as in 3dDeconvolve, containing the +TR indexes of the start points for each contiguous run +within the input dataset (the first entry should be 0). + +* Also as in 3dDeconvolve, if the input dataset is + automatically catenated from a collection of datasets, + then the run start indexes are determined directly, + and '-concat' is not needed (and will be ignored). +* Each run must have at least 9 time points AFTER + censoring, or the program will not work! +* The only use made of this input is in setting up + the bandpass/stopband regressors. +* '-ort' and '-dsort' regressors run through all time + points, as read in. If you want separate projections + in each run, then you must either break these ort files + into appropriate components, OR you must run 3dTproject + for each run separately, using the appropriate pieces + from the ort files via the ``{...}`` selector for the + 1D files and the ``[...]`` selector for the datasets. + +""", exists=True, argstr="-concat %s", ) noblock = traits.Bool( - desc="""Also as in 3dDeconvolve, if you want the program to treat - an auto-catenated dataset as one long run, use this option. - ++ However, '-noblock' will not affect catenation if you use - the '-concat' option.""", + desc="""\ +Also as in 3dDeconvolve, if you want the program to treat +an auto-catenated dataset as one long run, use this option. +However, '-noblock' will not affect catenation if you use +the '-concat' option.""", argstr="-noblock", ) ort = File( - desc="""Remove each column in file - ++ Each column will have its mean removed.""", + desc="""\ +Remove each column in file. +Each column will have its mean removed.""", exists=True, argstr="-ort %s", ) polort = traits.Int( - desc="""Remove polynomials up to and including degree pp. - ++ Default value is 2. - ++ It makes no sense to use a value of pp greater than - 2, if you are bandpassing out the lower frequencies! - ++ For catenated datasets, each run gets a separate set - set of pp+1 Legendre polynomial regressors. - ++ Use of -polort -1 is not advised (if data mean != 0), - even if -ort contains constant terms, as all means are - removed.""", + desc="""\ +Remove polynomials up to and including degree pp. + +* Default value is 2. +* It makes no sense to use a value of pp greater than + 2, if you are bandpassing out the lower frequencies! +* For catenated datasets, each run gets a separate set + set of pp+1 Legendre polynomial regressors. +* Use of -polort -1 is not advised (if data mean != 0), + even if -ort contains constant terms, as all means are + removed. + +""", argstr="-polort %d", ) dsort = InputMultiObject( File(exists=True, copyfile=False), argstr="-dsort %s...", - desc="""Remove the 3D+time time series in dataset fset. - ++ That is, 'fset' contains a different nuisance time - series for each voxel (e.g., from AnatICOR). - ++ Multiple -dsort options are allowed.""", + desc="""\ +Remove the 3D+time time series in dataset fset. + +* That is, 'fset' contains a different nuisance time + series for each voxel (e.g., from AnatICOR). +* Multiple -dsort options are allowed. + +""", ) bandpass = traits.Tuple( traits.Float, @@ -2814,31 +2833,38 @@ class TProjectInputSpec(AFNICommandInputSpec): argstr="-stopband %g %g", ) TR = traits.Float( - desc="""Use time step dd for the frequency calculations, - rather than the value stored in the dataset header.""", + desc="""\ +Use time step dd for the frequency calculations, +rather than the value stored in the dataset header.""", argstr="-TR %g", ) mask = File( exists=True, - desc="""Only operate on voxels nonzero in the mset dataset. - ++ Voxels outside the mask will be filled with zeros. - ++ If no masking option is given, then all voxels - will be processed.""", + desc="""\ +Only operate on voxels nonzero in the mset dataset. + +* Voxels outside the mask will be filled with zeros. +* If no masking option is given, then all voxels + will be processed. + +""", argstr="-mask %s", ) automask = traits.Bool( desc="""Generate a mask automatically""", xor=["mask"], argstr="-automask" ) blur = traits.Float( - desc="""Blur (inside the mask only) with a filter that has - width (FWHM) of fff millimeters. - ++ Spatial blurring (if done) is after the time - series filtering.""", + desc="""\ +Blur (inside the mask only) with a filter that has +width (FWHM) of fff millimeters. +Spatial blurring (if done) is after the time +series filtering.""", argstr="-blur %g", ) norm = traits.Bool( - desc="""Normalize each output time series to have sum of - squares = 1. This is the LAST operation.""", + desc=""" +Normalize each output time series to have sum of +squares = 1. This is the LAST operation.""", argstr="-norm", ) @@ -2848,18 +2874,14 @@ class TProject(AFNICommand): This program projects (detrends) out various 'nuisance' time series from each voxel in the input dataset. Note that all the projections are done via linear regression, including the frequency-based options such - as '-passband'. In this way, you can bandpass time-censored data, and at + as ``-passband``. In this way, you can bandpass time-censored data, and at the same time, remove other time series of no interest (e.g., physiological estimates, motion parameters). Shifts voxel time series from input so that seperate slices are aligned to the same temporal origin. - For complete details, see the `3dTproject Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tproject = afni.TProject() >>> tproject.inputs.in_file = 'functional.nii' @@ -2871,6 +2893,11 @@ class TProject(AFNICommand): '3dTproject -input functional.nii -automask -bandpass 0.00667 99999 -polort 3 -prefix projected.nii.gz' >>> res = tproject.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dTproject Documentation. + `__ + """ _cmd = "3dTproject" @@ -2972,8 +2999,7 @@ class TShift(AFNICommand): `_ Examples - ======== - + -------- Slice timing details may be specified explicitly via the ``slice_timing`` input: @@ -3108,28 +3134,28 @@ class TSmoothInputSpec(AFNICommandInputSpec): desc="Sets the data type of the output dataset", argstr="-datum %s" ) lin = traits.Bool( - desc="3 point linear filter: 0.15*a + 0.70*b + 0.15*c" + desc=r"3 point linear filter: :math:`0.15\,a + 0.70\,b + 0.15\,c`" "[This is the default smoother]", argstr="-lin", ) med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") osf = traits.Bool( desc="3 point order statistics filter:" - "0.15*min(a,b,c) + 0.70*median(a,b,c) + 0.15*max(a,b,c)", + r":math:`0.15\,min(a,b,c) + 0.70\,median(a,b,c) + 0.15\,max(a,b,c)`", argstr="-osf", ) lin3 = traits.Int( - desc="3 point linear filter: 0.5*(1-m)*a + m*b + 0.5*(1-m)*c" + desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c" "Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) hamming = traits.Int( argstr="-hamming %d", - desc="Use N point Hamming windows." "(N must be odd and bigger than 1.)", + desc="Use N point Hamming windows. (N must be odd and bigger than 1.)", ) blackman = traits.Int( argstr="-blackman %d", - desc="Use N point Blackman windows." "(N must be odd and bigger than 1.)", + desc="Use N point Blackman windows. (N must be odd and bigger than 1.)", ) custom = File( argstr="-custom %s", @@ -3150,8 +3176,7 @@ class TSmooth(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> from nipype.testing import example_data >>> smooth = afni.TSmooth() @@ -3252,8 +3277,7 @@ class Volreg(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> volreg = afni.Volreg() >>> volreg.inputs.in_file = 'functional.nii' @@ -3350,14 +3374,10 @@ class WarpOutputSpec(TraitedSpec): class Warp(AFNICommand): - """Use 3dWarp for spatially transforming a dataset - - For complete details, see the `3dWarp Documentation. - `_ + """Use 3dWarp for spatially transforming a dataset. Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> warp = afni.Warp() >>> warp.inputs.in_file = 'structural.nii' @@ -3375,6 +3395,11 @@ class Warp(AFNICommand): '3dWarp -newgrid 1.000000 -prefix trans.nii.gz structural.nii' >>> res = warp_2.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dWarp Documentation. + `__. + """ _cmd = "3dWarp" @@ -3422,51 +3447,59 @@ class QwarpInputSpec(AFNICommandInputSpec): name_source=["in_file"], desc="""\ Sets the prefix/suffix for the output datasets. + * The source dataset is warped to match the base -and gets prefix 'ppp'. (Except if '-plusminus' is used + and gets prefix 'ppp'. (Except if '-plusminus' is used * The final interpolation to this output dataset is -done using the 'wsinc5' method. See the output of - 3dAllineate -HELP -(in the "Modifying '-final wsinc5'" section) for -the lengthy technical details. + done using the 'wsinc5' method. See the output of + 3dAllineate -HELP + (in the "Modifying '-final wsinc5'" section) for + the lengthy technical details. * The 3D warp used is saved in a dataset with -prefix 'ppp_WARP' -- this dataset can be used -with 3dNwarpApply and 3dNwarpCat, for example. + prefix 'ppp_WARP' -- this dataset can be used + with 3dNwarpApply and 3dNwarpCat, for example. * To be clear, this is the warp from source dataset - coordinates to base dataset coordinates, where the - values at each base grid point are the xyz displacments - needed to move that grid point's xyz values to the - corresponding xyz values in the source dataset: - base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) - Another way to think of this warp is that it 'pulls' - values back from source space to base space. + coordinates to base dataset coordinates, where the + values at each base grid point are the xyz displacments + needed to move that grid point's xyz values to the + corresponding xyz values in the source dataset: + base( (x,y,z) + WARP(x,y,z) ) matches source(x,y,z) + Another way to think of this warp is that it 'pulls' + values back from source space to base space. * 3dNwarpApply would use 'ppp_WARP' to transform datasets -aligned with the source dataset to be aligned with the -base dataset. -** If you do NOT want this warp saved, use the option '-nowarp'. --->> (However, this warp is usually the most valuable possible output!) + aligned with the source dataset to be aligned with the + base dataset. + +**If you do NOT want this warp saved, use the option '-nowarp'**. +(However, this warp is usually the most valuable possible output!) + * If you want to calculate and save the inverse 3D warp, -use the option '-iwarp'. This inverse warp will then be -saved in a dataset with prefix 'ppp_WARPINV'. + use the option '-iwarp'. This inverse warp will then be + saved in a dataset with prefix 'ppp_WARPINV'. * This inverse warp could be used to transform data from base -space to source space, if you need to do such an operation. + space to source space, if you need to do such an operation. * You can easily compute the inverse later, say by a command like - 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' -or the inverse can be computed as needed in 3dNwarpApply, like - 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ...""", - ) + 3dNwarpCat -prefix Z_WARPINV 'INV(Z_WARP+tlrc)' + or the inverse can be computed as needed in 3dNwarpApply, like + 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ... + +""") resample = traits.Bool( - desc="This option simply resamples the source dataset to match the" - "base dataset grid. You can use this if the two datasets" - "overlap well (as seen in the AFNI GUI), but are not on the" - "same 3D grid." - "* If they don't overlap well, allineate them first" - "* The reampling here is done with the" - "'wsinc5' method, which has very little blurring artifact." - "* If the base and source datasets ARE on the same 3D grid," - "then the -resample option will be ignored." - "* You CAN use -resample with these 3dQwarp options:" - "-plusminus -inilev -iniwarp -duplo", + desc="""\ +This option simply resamples the source dataset to match the +base dataset grid. You can use this if the two datasets +overlap well (as seen in the AFNI GUI), but are not on the +same 3D grid. + +* If they don't overlap well, allineate them first +* The reampling here is done with the + 'wsinc5' method, which has very little blurring artifact. +* If the base and source datasets ARE on the same 3D grid, + then the -resample option will be ignored. +* You CAN use -resample with these 3dQwarp options: + -plusminus -inilev -iniwarp -duplo + +""", argstr="-resample", ) allineate = traits.Bool( @@ -3477,7 +3510,7 @@ class QwarpInputSpec(AFNICommandInputSpec): argstr="-allineate", ) allineate_opts = traits.Str( - desc="add extra options to the 3dAllineate command to be run by " "3dQwarp.", + desc="add extra options to the 3dAllineate command to be run by 3dQwarp.", argstr="-allineate_opts %s", requires=["allineate"], ) @@ -3489,53 +3522,64 @@ class QwarpInputSpec(AFNICommandInputSpec): ) pear = traits.Bool( desc="Use strict Pearson correlation for matching." - "* Not usually recommended, since the 'clipped Pearson' method" + "Not usually recommended, since the 'clipped Pearson' method" "used by default will reduce the impact of outlier values.", argstr="-pear", ) noneg = traits.Bool( - desc="Replace negative values in either input volume with 0." - "* If there ARE negative input values, and you do NOT use -noneg," - "then strict Pearson correlation will be used, since the 'clipped'" - "method only is implemented for non-negative volumes." - "* '-noneg' is not the default, since there might be situations where" - "you want to align datasets with positive and negative values mixed." - "* But, in many cases, the negative values in a dataset are just the" - "result of interpolation artifacts (or other peculiarities), and so" - "they should be ignored. That is what '-noneg' is for.", + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", argstr="-noneg", ) nopenalty = traits.Bool( - desc="Replace negative values in either input volume with 0." - "* If there ARE negative input values, and you do NOT use -noneg," - "then strict Pearson correlation will be used, since the 'clipped'" - "method only is implemented for non-negative volumes." - "* '-noneg' is not the default, since there might be situations where" - "you want to align datasets with positive and negative values mixed." - "* But, in many cases, the negative values in a dataset are just the" - "result of interpolation artifacts (or other peculiarities), and so" - "they should be ignored. That is what '-noneg' is for.", + desc="""\ +Replace negative values in either input volume with 0. + +* If there ARE negative input values, and you do NOT use -noneg, + then strict Pearson correlation will be used, since the 'clipped' + method only is implemented for non-negative volumes. +* '-noneg' is not the default, since there might be situations where + you want to align datasets with positive and negative values mixed. +* But, in many cases, the negative values in a dataset are just the + result of interpolation artifacts (or other peculiarities), and so + they should be ignored. That is what '-noneg' is for. + +""", argstr="-nopenalty", ) penfac = traits.Float( - desc="Use this value to weight the penalty." - "The default value is 1.Larger values mean the" - "penalty counts more, reducing grid distortions," - "insha'Allah; '-nopenalty' is the same as '-penfac 0'." - " -->>* [23 Sep 2013] -- Zhark increased the default value of" - " the penalty by a factor of 5, and also made it get" - " progressively larger with each level of refinement." - " Thus, warping results will vary from earlier instances" - " of 3dQwarp." - " * The progressive increase in the penalty at higher levels" - " means that the 'cost function' can actually look like the" - " alignment is getting worse when the levels change." - " * IF you wish to turn off this progression, for whatever" - " reason (e.g., to keep compatibility with older results)," - " use the option '-penold'.To be completely compatible with" - " the older 3dQwarp, you'll also have to use '-penfac 0.2'.", argstr="-penfac %f", - ) + desc="""\ +Use this value to weight the penalty. +The default value is 1. Larger values mean the +penalty counts more, reducing grid distortions, +insha'Allah; '-nopenalty' is the same as '-penfac 0'. +In 23 Sep 2013 Zhark increased the default value of +the penalty by a factor of 5, and also made it get +progressively larger with each level of refinement. +Thus, warping results will vary from earlier instances +of 3dQwarp. + +* The progressive increase in the penalty at higher levels + means that the 'cost function' can actually look like the + alignment is getting worse when the levels change. +* IF you wish to turn off this progression, for whatever + reason (e.g., to keep compatibility with older results), + use the option '-penold'.To be completely compatible with + the older 3dQwarp, you'll also have to use '-penfac 0.2'. + +""") noweight = traits.Bool( desc="If you want a binary weight (the old default), use this option." "That is, each voxel in the base volume automask will be" @@ -3545,106 +3589,120 @@ class QwarpInputSpec(AFNICommandInputSpec): weight = File( desc="Instead of computing the weight from the base dataset," "directly input the weight volume from dataset 'www'." - "* Useful if you know what over parts of the base image you" + "Useful if you know what over parts of the base image you" "want to emphasize or de-emphasize the matching functional.", argstr="-weight %s", exists=True, ) wball = traits.List( traits.Int(), - desc="-wball x y z r f" - "Enhance automatic weight from '-useweight' by a factor" - "of 1+f*Gaussian(FWHM=r) centered in the base image at" - "DICOM coordinates (x,y,z) and with radius 'r'. The" - "goal of this option is to try and make the alignment" - "better in a specific part of the brain." - "* Example: -wball 0 14 6 30 40" - "to emphasize the thalamic area (in MNI/Talairach space)." - "* The 'r' parameter must be positive!" - "* The 'f' parameter must be between 1 and 100 (inclusive)." - "* '-wball' does nothing if you input your own weight" - "with the '-weight' option." - "* '-wball' does change the binary weight created by" - "the '-noweight' option." - "* You can only use '-wball' once in a run of 3dQwarp." - "*** The effect of '-wball' is not dramatic. The example" - "above makes the average brain image across a collection" - "of subjects a little sharper in the thalamic area, which" - "might have some small value. If you care enough about" - "alignment to use '-wball', then you should examine the" - "results from 3dQwarp for each subject, to see if the" - "alignments are good enough for your purposes.", + desc=""""\ +``-wball x y z r f`` +Enhance automatic weight from '-useweight' by a factor +of 1+f\\*Gaussian(FWHM=r) centered in the base image at +DICOM coordinates (x,y,z) and with radius 'r'. The +goal of this option is to try and make the alignment +better in a specific part of the brain. +Example: -wball 0 14 6 30 40 +to emphasize the thalamic area (in MNI/Talairach space). + +* The 'r' parameter must be positive! +* The 'f' parameter must be between 1 and 100 (inclusive). +* '-wball' does nothing if you input your own weight + with the '-weight' option. +* '-wball' does change the binary weight created by + the '-noweight' option. +* You can only use '-wball' once in a run of 3dQwarp. + +**The effect of '-wball' is not dramatic.** The example +above makes the average brain image across a collection +of subjects a little sharper in the thalamic area, which +might have some small value. If you care enough about +alignment to use '-wball', then you should examine the +results from 3dQwarp for each subject, to see if the +alignments are good enough for your purposes.""", argstr="-wball %s", minlen=5, maxlen=5, + xor=['wmask'], ) traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") wmask = traits.Tuple( (File(exists=True), traits.Float()), - desc="-wmask ws f" - "Similar to '-wball', but here, you provide a dataset 'ws'" - "that indicates where to increase the weight." - "* The 'ws' dataset must be on the same 3D grid as the base dataset." - "* 'ws' is treated as a mask -- it only matters where it" - "is nonzero -- otherwise, the values inside are not used." - "* After 'ws' comes the factor 'f' by which to increase the" - "automatically computed weight. Where 'ws' is nonzero," - "the weighting will be multiplied by (1+f)." - "* As with '-wball', the factor 'f' should be between 1 and 100." - "* You cannot use '-wball' and '-wmask' together!", + desc="""\ +Similar to '-wball', but here, you provide a dataset 'ws' +that indicates where to increase the weight. + +* The 'ws' dataset must be on the same 3D grid as the base dataset. +* 'ws' is treated as a mask -- it only matters where it + is nonzero -- otherwise, the values inside are not used. +* After 'ws' comes the factor 'f' by which to increase the + automatically computed weight. Where 'ws' is nonzero, + the weighting will be multiplied by (1+f). +* As with '-wball', the factor 'f' should be between 1 and 100. + +""", argstr="-wpass %s %f", + xor=['wball'], ) out_weight_file = File( argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" ) blur = traits.List( traits.Float(), - desc="Gaussian blur the input images by 'bb' (FWHM) voxels before" - "doing the alignment (the output dataset will not be blurred)." - "The default is 2.345 (for no good reason)." - "* Optionally, you can provide 2 values for 'bb', and then" - "the first one is applied to the base volume, the second" - "to the source volume." - "-->>* e.g., '-blur 0 3' to skip blurring the base image" - "(if the base is a blurry template, for example)." - "* A negative blur radius means to use 3D median filtering," - "rather than Gaussian blurring. This type of filtering will" - "better preserve edges, which can be important in alignment." - "* If the base is a template volume that is already blurry," - "you probably don't want to blur it again, but blurring" - "the source volume a little is probably a good idea, to" - "help the program avoid trying to match tiny features." - "* Note that -duplo will blur the volumes some extra" - "amount for the initial small-scale warping, to make" - "that phase of the program converge more rapidly.", + desc="""\ +Gaussian blur the input images by 'bb' (FWHM) voxels before +doing the alignment (the output dataset will not be blurred). +The default is 2.345 (for no good reason). + +* Optionally, you can provide 2 values for 'bb', and then + the first one is applied to the base volume, the second + to the source volume. + e.g., '-blur 0 3' to skip blurring the base image + (if the base is a blurry template, for example). +* A negative blur radius means to use 3D median filtering, + rather than Gaussian blurring. This type of filtering will + better preserve edges, which can be important in alignment. +* If the base is a template volume that is already blurry, + you probably don't want to blur it again, but blurring + the source volume a little is probably a good idea, to + help the program avoid trying to match tiny features. +* Note that -duplo will blur the volumes some extra + amount for the initial small-scale warping, to make + that phase of the program converge more rapidly. + +""", argstr="-blur %s", minlen=1, maxlen=2, ) pblur = traits.List( traits.Float(), - desc="Use progressive blurring; that is, for larger patch sizes," - "the amount of blurring is larger. The general idea is to" - "avoid trying to match finer details when the patch size" - "and incremental warps are coarse. When '-blur' is used" - "as well, it sets a minimum amount of blurring that will" - "be used. [06 Aug 2014 -- '-pblur' may become the default someday]." - "* You can optionally give the fraction of the patch size that" - "is used for the progressive blur by providing a value between" - "0 and 0.25 after '-pblur'. If you provide TWO values, the" - "the first fraction is used for progressively blurring the" - "base image and the second for the source image. The default" - "parameters when just '-pblur' is given is the same as giving" - "the options as '-pblur 0.09 0.09'." - "* '-pblur' is useful when trying to match 2 volumes with high" - "amounts of detail; e.g, warping one subject's brain image to" - "match another's, or trying to warp to match a detailed template." - "* Note that using negative values with '-blur' means that the" - "progressive blurring will be done with median filters, rather" - "than Gaussian linear blurring." - "-->>*** The combination of the -allineate and -pblur options will make" - "the results of using 3dQwarp to align to a template somewhat" - "less sensitive to initial head position and scaling.", + desc="""\ +Use progressive blurring; that is, for larger patch sizes, +the amount of blurring is larger. The general idea is to +avoid trying to match finer details when the patch size +and incremental warps are coarse. When '-blur' is used +as well, it sets a minimum amount of blurring that will +be used. [06 Aug 2014 -- '-pblur' may become the default someday]. + +* You can optionally give the fraction of the patch size that + is used for the progressive blur by providing a value between + 0 and 0.25 after '-pblur'. If you provide TWO values, the + the first fraction is used for progressively blurring the + base image and the second for the source image. The default + parameters when just '-pblur' is given is the same as giving + the options as '-pblur 0.09 0.09'. +* '-pblur' is useful when trying to match 2 volumes with high + amounts of detail; e.g, warping one subject's brain image to + match another's, or trying to warp to match a detailed template. +* Note that using negative values with '-blur' means that the + progressive blurring will be done with median filters, rather + than Gaussian linear blurring. + +Note: The combination of the -allineate and -pblur options will make +the results of using 3dQwarp to align to a template somewhat +less sensitive to initial head position and scaling.""", argstr="-pblur %s", minlen=1, maxlen=2, @@ -3653,7 +3711,7 @@ class QwarpInputSpec(AFNICommandInputSpec): desc="Here, 'ee' is a dataset to specify a mask of voxels" "to EXCLUDE from the analysis -- all voxels in 'ee'" "that are NONZERO will not be used in the alignment." - "* The base image always automasked -- the emask is" + "The base image always automasked -- the emask is" "extra, to indicate voxels you definitely DON'T want" "included in the matching process, even if they are" "inside the brain.", @@ -3666,192 +3724,236 @@ class QwarpInputSpec(AFNICommandInputSpec): noZdis = traits.Bool(desc="Warp will not displace in z direction", argstr="-noZdis") iniwarp = traits.List( File(exists=True, copyfile=False), - desc="A dataset with an initial nonlinear warp to use." - "* If this option is not used, the initial warp is the identity." - "* You can specify a catenation of warps (in quotes) here, as in" - "program 3dNwarpApply." - "* As a special case, if you just input an affine matrix in a .1D" - "file, that will work also -- it is treated as giving the initial" - 'warp via the string "IDENT(base_dataset) matrix_file.aff12.1D".' - "* You CANNOT use this option with -duplo !!" - "* -iniwarp is usually used with -inilev to re-start 3dQwarp from" - "a previous stopping point.", + desc="""\ +A dataset with an initial nonlinear warp to use. + +* If this option is not used, the initial warp is the identity. +* You can specify a catenation of warps (in quotes) here, as in + program 3dNwarpApply. +* As a special case, if you just input an affine matrix in a .1D + file, that will work also -- it is treated as giving the initial + warp via the string "IDENT(base_dataset) matrix_file.aff12.1D". +* You CANNOT use this option with -duplo !! +* -iniwarp is usually used with -inilev to re-start 3dQwarp from + a previous stopping point. + +""", argstr="-iniwarp %s", xor=["duplo"], ) inilev = traits.Int( - desc="The initial refinement 'level' at which to start." - "* Usually used with -iniwarp; CANNOT be used with -duplo." - "* The combination of -inilev and -iniwarp lets you take the" - "results of a previous 3dQwarp run and refine them further:" - "Note that the source dataset in the second run is the SAME as" - "in the first run. If you don't see why this is necessary," - "then you probably need to seek help from an AFNI guru.", + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", argstr="-inilev %d", xor=["duplo"], ) minpatch = traits.Int( - desc="* The value of mm should be an odd integer." - "* The default value of mm is 25." - "* For more accurate results than mm=25, try 19 or 13." - "* The smallest allowed patch size is 5." - "* You may want stop at a larger patch size (say 7 or 9) and use" - "the -Qfinal option to run that final level with quintic warps," - "which might run faster and provide the same degree of warp detail." - "* Trying to make two different brain volumes match in fine detail" - "is usually a waste of time, especially in humans. There is too" - "much variability in anatomy to match gyrus to gyrus accurately." - "For this reason, the default minimum patch size is 25 voxels." - "Using a smaller '-minpatch' might try to force the warp to" - "match features that do not match, and the result can be useless" - "image distortions -- another reason to LOOK AT THE RESULTS.", + desc="""\ +The value of mm should be an odd integer. + +* The default value of mm is 25. +* For more accurate results than mm=25, try 19 or 13. +* The smallest allowed patch size is 5. +* You may want stop at a larger patch size (say 7 or 9) and use + the -Qfinal option to run that final level with quintic warps, + which might run faster and provide the same degree of warp detail. +* Trying to make two different brain volumes match in fine detail + is usually a waste of time, especially in humans. There is too + much variability in anatomy to match gyrus to gyrus accurately. + For this reason, the default minimum patch size is 25 voxels. + Using a smaller '-minpatch' might try to force the warp to + match features that do not match, and the result can be useless + image distortions -- another reason to LOOK AT THE RESULTS. + +""", argstr="-minpatch %d", ) maxlev = traits.Int( - desc="The initial refinement 'level' at which to start." - "* Usually used with -iniwarp; CANNOT be used with -duplo." - "* The combination of -inilev and -iniwarp lets you take the" - "results of a previous 3dQwarp run and refine them further:" - "Note that the source dataset in the second run is the SAME as" - "in the first run. If you don't see why this is necessary," - "then you probably need to seek help from an AFNI guru.", + desc="""\ +The initial refinement 'level' at which to start. + +* Usually used with -iniwarp; CANNOT be used with -duplo. +* The combination of -inilev and -iniwarp lets you take the + results of a previous 3dQwarp run and refine them further: + Note that the source dataset in the second run is the SAME as + in the first run. If you don't see why this is necessary, + then you probably need to seek help from an AFNI guru. + +""", argstr="-maxlev %d", xor=["duplo"], position=-1, ) gridlist = File( - desc="This option provides an alternate way to specify the patch" - "grid sizes used in the warp optimization process. 'gl' is" - "a 1D file with a list of patches to use -- in most cases," - "you will want to use it in the following form:" - "-gridlist '1D: 0 151 101 75 51'" - "* Here, a 0 patch size means the global domain. Patch sizes" - "otherwise should be odd integers >= 5." - "* If you use the '0' patch size again after the first position," - "you will actually get an iteration at the size of the" - "default patch level 1, where the patch sizes are 75% of" - "the volume dimension. There is no way to force the program" - "to literally repeat the sui generis step of lev=0." - "* You cannot use -gridlist with -duplo or -plusminus!", + desc="""\ +This option provides an alternate way to specify the patch +grid sizes used in the warp optimization process. 'gl' is +a 1D file with a list of patches to use -- in most cases, +you will want to use it in the following form: +``-gridlist '1D: 0 151 101 75 51'`` + +* Here, a 0 patch size means the global domain. Patch sizes + otherwise should be odd integers >= 5. +* If you use the '0' patch size again after the first position, + you will actually get an iteration at the size of the + default patch level 1, where the patch sizes are 75% of + the volume dimension. There is no way to force the program + to literally repeat the sui generis step of lev=0. + +""", argstr="-gridlist %s", exists=True, copyfile=False, xor=["duplo", "plusminus"], ) allsave = traits.Bool( - desc="This option lets you save the output warps from each level" - "of the refinement process. Mostly used for experimenting." - "* Cannot be used with -nopadWARP, -duplo, or -plusminus." - "* Will only save all the outputs if the program terminates" - "normally -- if it crashes, or freezes, then all these" - "warps are lost.", + desc=""" +This option lets you save the output warps from each level" +of the refinement process. Mostly used for experimenting." +Will only save all the outputs if the program terminates" +normally -- if it crashes, or freezes, then all these" +warps are lost.""", argstr="-allsave", xor=["nopadWARP", "duplo", "plusminus"], ) duplo = traits.Bool( - desc="Start off with 1/2 scale versions of the volumes," - "for getting a speedy coarse first alignment." - "* Then scales back up to register the full volumes." - "The goal is greater speed, and it seems to help this" - "positively piggish program to be more expeditious." - "* However, accuracy is somewhat lower with '-duplo'," - "for reasons that currenly elude Zhark; for this reason," - "the Emperor does not usually use '-duplo'.", + desc="""\ +Start off with 1/2 scale versions of the volumes," +for getting a speedy coarse first alignment." + +* Then scales back up to register the full volumes." + The goal is greater speed, and it seems to help this" + positively piggish program to be more expeditious." +* However, accuracy is somewhat lower with '-duplo'," + for reasons that currenly elude Zhark; for this reason," + the Emperor does not usually use '-duplo'. + +""", argstr="-duplo", xor=["gridlist", "maxlev", "inilev", "iniwarp", "plusminus", "allsave"], ) workhard = traits.Bool( - desc="Iterate more times, which can help when the volumes are" - "hard to align at all, or when you hope to get a more precise" - "alignment." - "* Slows the program down (possibly a lot), of course." - "* When you combine '-workhard' with '-duplo', only the" - "full size volumes get the extra iterations." - "* For finer control over which refinement levels work hard," - "you can use this option in the form (for example)" - " -workhard:4:7" - "which implies the extra iterations will be done at levels" - "4, 5, 6, and 7, but not otherwise." - "* You can also use '-superhard' to iterate even more, but" - "this extra option will REALLY slow things down." - "-->>* Under most circumstances, you should not need to use either" - "-workhard or -superhard." - "-->>* The fastest way to register to a template image is via the" - "-duplo option, and without the -workhard or -superhard options." - "-->>* If you use this option in the form '-Workhard' (first letter" - "in upper case), then the second iteration at each level is" - "done with quintic polynomial warps.", + desc="""\ +Iterate more times, which can help when the volumes are +hard to align at all, or when you hope to get a more precise +alignment. + +* Slows the program down (possibly a lot), of course. +* When you combine '-workhard' with '-duplo', only the + full size volumes get the extra iterations. +* For finer control over which refinement levels work hard, + you can use this option in the form (for example) ``-workhard:4:7`` + which implies the extra iterations will be done at levels + 4, 5, 6, and 7, but not otherwise. +* You can also use '-superhard' to iterate even more, but + this extra option will REALLY slow things down. + + * Under most circumstances, you should not need to use either + ``-workhard`` or ``-superhard``. + * The fastest way to register to a template image is via the + ``-duplo`` option, and without the ``-workhard`` or ``-superhard`` options. + * If you use this option in the form '-Workhard' (first letter + in upper case), then the second iteration at each level is + done with quintic polynomial warps. + +""", argstr="-workhard", xor=["boxopt", "ballopt"], ) Qfinal = traits.Bool( - desc="At the finest patch size (the final level), use Hermite" - "quintic polynomials for the warp instead of cubic polynomials." - "* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis" - "function parameters over which to optimize (2 polynomials" - "dependent on each of the x,y,z directions, and 3 different" - "directions of displacement)." - "* There are 3x3x3x3=81 quintic polynomial parameters per patch." - "* With -Qfinal, the final level will have more detail in" - "the allowed warps, at the cost of yet more CPU time." - "* However, no patch below 7x7x7 in size will be done with quintic" - "polynomials." - "* This option is also not usually needed, and is experimental.", + desc="""\ +At the finest patch size (the final level), use Hermite +quintic polynomials for the warp instead of cubic polynomials. + +* In a 3D 'patch', there are 2x2x2x3=24 cubic polynomial basis + function parameters over which to optimize (2 polynomials + dependent on each of the x,y,z directions, and 3 different + directions of displacement). +* There are 3x3x3x3=81 quintic polynomial parameters per patch. +* With -Qfinal, the final level will have more detail in + the allowed warps, at the cost of yet more CPU time. +* However, no patch below 7x7x7 in size will be done with quintic + polynomials. +* This option is also not usually needed, and is experimental. + +""", argstr="-Qfinal", ) Qonly = traits.Bool( - desc="Use Hermite quintic polynomials at all levels." - "* Very slow (about 4 times longer). Also experimental." - "* Will produce a (discrete representation of a) C2 warp.", + desc="""\ +Use Hermite quintic polynomials at all levels. + +* Very slow (about 4 times longer). Also experimental. +* Will produce a (discrete representation of a) C2 warp. + +""", argstr="-Qonly", ) plusminus = traits.Bool( - desc="Normally, the warp displacements dis(x) are defined to match" - "base(x) to source(x+dis(x)). With this option, the match" - "is between base(x-dis(x)) and source(x+dis(x)) -- the two" - "images 'meet in the middle'." - "* One goal is to mimic the warping done to MRI EPI data by" - "field inhomogeneities, when registering between a 'blip up'" - "and a 'blip down' down volume, which will have opposite" - "distortions." - "* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since" - "base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x))" - "wherever we see x, we have base(x) matches source(Wp(INV(Wm(x))));" - "that is, the warp V(x) that one would get from the 'usual' way" - "of running 3dQwarp is V(x) = Wp(INV(Wm(x)))." - "* Conversely, we can calculate Wp(x) in terms of V(x) as follows:" - "If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2;" - "then Wp(x) = V(INV(Vh(x)))" - "* With the above formulas, it is possible to compute Wp(x) from" - "V(x) and vice-versa, using program 3dNwarpCalc. The requisite" - "commands are left as an exercise for the aspiring AFNI Jedi Master." - "* You can use the semi-secret '-pmBASE' option to get the V(x)" - "warp and the source dataset warped to base space, in addition to" - "the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps." - "-->>* Alas: -plusminus does not work with -duplo or -allineate :-(" - "* However, you can use -iniwarp with -plusminus :-)" - "-->>* The outputs have _PLUS (from the source dataset) and _MINUS" - "(from the base dataset) in their filenames, in addition to" - "the prefix. The -iwarp option, if present, will be ignored.", + desc="""\ +Normally, the warp displacements dis(x) are defined to match +base(x) to source(x+dis(x)). With this option, the match +is between base(x-dis(x)) and source(x+dis(x)) -- the two +images 'meet in the middle'. + +* One goal is to mimic the warping done to MRI EPI data by + field inhomogeneities, when registering between a 'blip up' + and a 'blip down' down volume, which will have opposite + distortions. +* Define Wp(x) = x+dis(x) and Wm(x) = x-dis(x). Then since + base(Wm(x)) matches source(Wp(x)), by substituting INV(Wm(x)) + wherever we see x, we have base(x) matches source(Wp(INV(Wm(x)))); + that is, the warp V(x) that one would get from the 'usual' way + of running 3dQwarp is V(x) = Wp(INV(Wm(x))). +* Conversely, we can calculate Wp(x) in terms of V(x) as follows: + If V(x) = x + dv(x), define Vh(x) = x + dv(x)/2; + then Wp(x) = V(INV(Vh(x))) +* With the above formulas, it is possible to compute Wp(x) from + V(x) and vice-versa, using program 3dNwarpCalc. The requisite + commands are left as an exercise for the aspiring AFNI Jedi Master. +* You can use the semi-secret '-pmBASE' option to get the V(x) + warp and the source dataset warped to base space, in addition to + the Wp(x) '_PLUS' and Wm(x) '_MINUS' warps. + + * Alas: -plusminus does not work with -duplo or -allineate :-( + * However, you can use -iniwarp with -plusminus :-) + * The outputs have _PLUS (from the source dataset) and _MINUS + (from the base dataset) in their filenames, in addition to + the prefix. The -iwarp option, if present, will be ignored. + +""", argstr="-plusminus", xor=["duplo", "allsave", "iwarp"], ) nopad = traits.Bool( - desc="Do NOT use zero-padding on the 3D base and source images." - "[Default == zero-pad, if needed]" - "* The underlying model for deformations goes to zero at the" - "edge of the volume being warped. However, if there is" - "significant data near an edge of the volume, then it won't" - "get displaced much, and so the results might not be good." - "* Zero padding is designed as a way to work around this potential" - "problem. You should NOT need the '-nopad' option for any" - "reason that Zhark can think of, but it is here to be symmetrical" - "with 3dAllineate." - "* Note that the output (warped from source) dataset will be on the" - "base dataset grid whether or not zero-padding is allowed. However," - "unless you use the following option, allowing zero-padding (i.e.," - "the default operation) will make the output WARP dataset(s) be" - "on a larger grid (also see '-expad' below).", + desc="""\ +Do NOT use zero-padding on the 3D base and source images. +[Default == zero-pad, if needed] + +* The underlying model for deformations goes to zero at the + edge of the volume being warped. However, if there is + significant data near an edge of the volume, then it won't + get displaced much, and so the results might not be good. +* Zero padding is designed as a way to work around this potential + problem. You should NOT need the '-nopad' option for any + reason that Zhark can think of, but it is here to be symmetrical + with 3dAllineate. +* Note that the output (warped from source) dataset will be on the + base dataset grid whether or not zero-padding is allowed. However, + unless you use the following option, allowing zero-padding (i.e., + the default operation) will make the output WARP dataset(s) be + on a larger grid (also see '-expad' below). + +""", argstr="-nopad", ) nopadWARP = traits.Bool( @@ -3864,13 +3966,13 @@ class QwarpInputSpec(AFNICommandInputSpec): expad = traits.Int( desc="This option instructs the program to pad the warp by an extra" "'EE' voxels (and then 3dQwarp starts optimizing it)." - "* This option is seldom needed, but can be useful if you" + "This option is seldom needed, but can be useful if you" "might later catenate the nonlinear warp -- via 3dNwarpCat --" "with an affine transformation that contains a large shift." "Under that circumstance, the nonlinear warp might be shifted" "partially outside its original grid, so expanding that grid" "can avoid this problem." - "* Note that this option perforce turns off '-nopadWARP'.", + "Note that this option perforce turns off '-nopadWARP'.", argstr="-expad %d", xor=["nopadWARP"], ) @@ -3888,7 +3990,7 @@ class QwarpInputSpec(AFNICommandInputSpec): baxopt = traits.Bool( desc="Use the 'box' optimization limits instead of the 'ball'" "[this is the default at present]." - "* Note that if '-workhard' is used, then ball and box optimization" + "Note that if '-workhard' is used, then ball and box optimization" "are alternated in the different iterations at each level, so" "these two options have no effect in that case.", argstr="-boxopt", @@ -3917,7 +4019,7 @@ class QwarpInputSpec(AFNICommandInputSpec): position=-2, ) lpa = traits.Bool( - desc="Local Pearson maximization" "This option has not be extensively tested", + desc="Local Pearson maximization. This option has not be extensively tested", argstr="-lpa", xor=["nmi", "mi", "lpc", "hel", "pear"], ) @@ -3937,7 +4039,7 @@ class QwarpInputSpec(AFNICommandInputSpec): ) nmi = traits.Bool( desc="Normalized Mutual Information: a matching function for the adventurous" - "This option has NOT be extensively tested for usefullness" + "This option has NOT been extensively tested for usefullness" "and should be considered experimental at this infundibulum.", argstr="-nmi", xor=["nmi", "hel", "lpc", "lpa", "pear"], @@ -3965,15 +4067,11 @@ class QwarpOutputSpec(TraitedSpec): class Qwarp(AFNICommand): - """A version of 3dQwarp + """ Allineate your images prior to passing them to this workflow. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.Qwarp() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -4007,6 +4105,7 @@ class Qwarp(AFNICommand): >>> qwarp.cmdline '3dQwarp -base epi.nii -blur 0.0 3.0 -source structural.nii -iwarp -prefix anatSSQ.nii.gz \ -resample -verb -lpc' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -4017,6 +4116,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.blur = [0,3] >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -prefix ppp_structural' + >>> res = qwarp.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -4029,6 +4129,7 @@ class Qwarp(AFNICommand): >>> qwarp.inputs.out_file = 'Q25' >>> qwarp.cmdline '3dQwarp -base mni.nii -blur 0.0 3.0 -duplo -source structural.nii -minpatch 25 -prefix Q25' + >>> res = qwarp.run() # doctest: +SKIP >>> qwarp2 = afni.Qwarp() >>> qwarp2.inputs.in_file = 'structural.nii' @@ -4040,6 +4141,7 @@ class Qwarp(AFNICommand): >>> qwarp2.cmdline '3dQwarp -base mni.nii -blur 0.0 2.0 -source structural.nii -inilev 7 -iniwarp Q25_\ warp+tlrc.HEAD -prefix Q11' + >>> res2 = qwarp2.run() # doctest: +SKIP >>> res2 = qwarp2.run() # doctest: +SKIP >>> qwarp3 = afni.Qwarp() @@ -4050,7 +4152,15 @@ class Qwarp(AFNICommand): >>> qwarp3.cmdline "3dQwarp -allineate -allineate_opts '-cose lpa -verb' -base mni.nii -source structural.nii \ -prefix ppp_structural" - >>> res3 = qwarp3.run() # doctest: +SKIP """ + + >>> res3 = qwarp3.run() # doctest: +SKIP + + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + + """ _cmd = "3dQwarp" input_spec = QwarpInputSpec @@ -4176,12 +4286,8 @@ class QwarpPlusMinus(Qwarp): """A version of 3dQwarp for performing field susceptibility correction using two images with opposing phase encoding directions. - For complete details, see the `3dQwarp Documentation. - `_ - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> qwarp = afni.QwarpPlusMinus() >>> qwarp.inputs.in_file = 'sub-01_dir-LR_epi.nii.gz' @@ -4189,9 +4295,14 @@ class QwarpPlusMinus(Qwarp): >>> qwarp.inputs.base_file = 'sub-01_dir-RL_epi.nii.gz' >>> qwarp.cmdline '3dQwarp -prefix Qwarp.nii.gz -plusminus -base sub-01_dir-RL_epi.nii.gz \ - -source sub-01_dir-LR_epi.nii.gz -nopadWARP' +-source sub-01_dir-LR_epi.nii.gz -nopadWARP' >>> res = warp.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dQwarp Documentation. + `__ + """ input_spec = QwarpPlusMinusInputSpec diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 61287b934e..a6515987e0 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -1,14 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -AFNI utility interfaces. - -Examples --------- -See the docstrings of the individual classes for examples. - -""" +"""AFNI utility interfaces.""" import os import os.path as op import re @@ -76,8 +69,7 @@ class ABoverlap(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> aboverlap = afni.ABoverlap() >>> aboverlap.inputs.in_file_a = 'functional.nii' @@ -149,8 +141,7 @@ class AFNItoNIFTI(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> a2n = afni.AFNItoNIFTI() >>> a2n.inputs.in_file = 'afni_output.3D' @@ -217,8 +208,7 @@ class Autobox(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> abox = afni.Autobox() >>> abox.inputs.in_file = 'structural.nii' @@ -277,7 +267,7 @@ class BrickStatInputSpec(CommandLineInputSpec): traits.Float, traits.Float, desc="p0 ps p1 write the percentile values starting " - "at p0% and ending at p1% at a step of ps%. " + "at p0\\% and ending at p1\\% at a step of ps%. " "only one sub-brick is accepted.", argstr="-percentile %.3f %.3f %.3f", ) @@ -295,8 +285,7 @@ class BrickStat(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> brickstat = afni.BrickStat() >>> brickstat.inputs.in_file = 'functional.nii' @@ -350,46 +339,46 @@ class BucketInputSpec(AFNICommandInputSpec): position=-1, mandatory=True, argstr="%s", - desc="List of tuples of input datasets and subbrick selection strings" - "as described in more detail in the following afni help string" - "Input dataset specified using one of these forms:" - " 'prefix+view', 'prefix+view.HEAD', or 'prefix+view.BRIK'." - "You can also add a sub-brick selection list after the end of the" - "dataset name. This allows only a subset of the sub-bricks to be" - "included into the output (by default, all of the input dataset" - "is copied into the output). A sub-brick selection list looks like" - "one of the following forms:" - " fred+orig[5] ==> use only sub-brick #5" - " fred+orig[5,9,17] ==> use #5, #9, and #17" - " fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8" - " fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13" - "Sub-brick indexes start at 0. You can use the character '$'" - "to indicate the last sub-brick in a dataset; for example, you" - "can select every third sub-brick by using the selection list" - " fred+orig[0..$(3)]" - "N.B.: The sub-bricks are output in the order specified, which may" - " not be the order in the original datasets. For example, using" - " fred+orig[0..$(2),1..$(2)]" - " will cause the sub-bricks in fred+orig to be output into the" - " new dataset in an interleaved fashion. Using" - " fred+orig[$..0]" - " will reverse the order of the sub-bricks in the output." - "N.B.: Bucket datasets have multiple sub-bricks, but do NOT have" - " a time dimension. You can input sub-bricks from a 3D+time dataset" - " into a bucket dataset. You can use the '3dinfo' program to see" - " how many sub-bricks a 3D+time or a bucket dataset contains." - "N.B.: In non-bucket functional datasets (like the 'fico' datasets" - " output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick" - " [0] is the 'intensity' and sub-brick [1] is the statistical parameter" - " used as a threshold. Thus, to create a bucket dataset using the" - " intensity from dataset A and the threshold from dataset B, and" - " calling the output dataset C, you would type" - " 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1]'" - "WARNING: using this program, it is possible to create a dataset that" - " has different basic datum types for different sub-bricks" - " (e.g., shorts for brick 0, floats for brick 1)." - " Do NOT do this! Very few AFNI programs will work correctly" - " with such datasets!", + desc="""\ +List of tuples of input datasets and subbrick selection strings +as described in more detail in the following afni help string +Input dataset specified using one of these forms: +``prefix+view``, ``prefix+view.HEAD``, or ``prefix+view.BRIK``. +You can also add a sub-brick selection list after the end of the +dataset name. This allows only a subset of the sub-bricks to be +included into the output (by default, all of the input dataset +is copied into the output). A sub-brick selection list looks like +one of the following forms:: + + fred+orig[5] ==> use only sub-brick #5 + fred+orig[5,9,17] ==> use #5, #9, and #17 + fred+orig[5..8] or [5-8] ==> use #5, #6, #7, and #8 + fred+orig[5..13(2)] or [5-13(2)] ==> use #5, #7, #9, #11, and #13 + +Sub-brick indexes start at 0. You can use the character '$' +to indicate the last sub-brick in a dataset; for example, you +can select every third sub-brick by using the selection list +``fred+orig[0..$(3)]`` +N.B.: The sub-bricks are output in the order specified, which may +not be the order in the original datasets. For example, using +``fred+orig[0..$(2),1..$(2)]`` +will cause the sub-bricks in fred+orig to be output into the +new dataset in an interleaved fashion. Using ``fred+orig[$..0]`` +will reverse the order of the sub-bricks in the output. +N.B.: Bucket datasets have multiple sub-bricks, but do NOT have +a time dimension. You can input sub-bricks from a 3D+time dataset +into a bucket dataset. You can use the '3dinfo' program to see +how many sub-bricks a 3D+time or a bucket dataset contains. +N.B.: In non-bucket functional datasets (like the 'fico' datasets +output by FIM, or the 'fitt' datasets output by 3dttest), sub-brick +``[0]`` is the 'intensity' and sub-brick [1] is the statistical parameter +used as a threshold. Thus, to create a bucket dataset using the +intensity from dataset A and the threshold from dataset B, and +calling the output dataset C, you would type:: + + 3dbucket -prefix C -fbuc 'A+orig[0]' -fbuc 'B+orig[1] + +""", ) out_file = File(argstr="-prefix %s", name_template="buck") @@ -398,12 +387,16 @@ class Bucket(AFNICommand): """Concatenate sub-bricks from input datasets into one big 'bucket' dataset. - For complete details, see the `3dbucket Documentation. - `_ + .. danger:: - Examples - ======== + Using this program, it is possible to create a dataset that + has different basic datum types for different sub-bricks + (e.g., shorts for brick 0, floats for brick 1). + Do NOT do this! Very few AFNI programs will work correctly + with such datasets! + Examples + -------- >>> from nipype.interfaces import afni >>> bucket = afni.Bucket() >>> bucket.inputs.in_file = [('functional.nii',"{2..$}"), ('functional.nii',"{1}")] @@ -412,6 +405,11 @@ class Bucket(AFNICommand): "3dbucket -prefix vr_base functional.nii'{2..$}' functional.nii'{1}'" >>> res = bucket.run() # doctest: +SKIP + See Also + -------- + For complete details, see the `3dbucket Documentation. + `__. + """ _cmd = "3dbucket" @@ -459,8 +457,7 @@ class Calc(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> calc = afni.Calc() >>> calc.inputs.in_file_a = 'functional.nii' @@ -531,8 +528,7 @@ class CatInputSpec(AFNICommandInputSpec): "fint", "cint", argstr="-form %s", - desc="specify data type for output. Valid types are 'int', " - "'nice', 'double', 'fint', and 'cint'.", + desc="specify data type for output.", xor=["out_int", "out_nice", "out_double", "out_fint", "out_cint"], ) stack = traits.Bool( @@ -578,8 +574,7 @@ class Cat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cat1d = afni.Cat() >>> cat1d.inputs.sel = "'[0,2]'" @@ -642,8 +637,7 @@ class CatMatvec(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cmv = afni.CatMatvec() >>> cmv.inputs.in_file = [('structural.BRIK::WARP_DATA','I')] @@ -738,8 +732,7 @@ class CenterMass(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> cm = afni.CenterMass() >>> cm.inputs.in_file = 'structural.nii' @@ -748,6 +741,7 @@ class CenterMass(AFNICommandBase): >>> cm.cmdline '3dCM -roi_vals 2 10 structural.nii > cm.txt' >>> res = 3dcm.run() # doctest: +SKIP + """ _cmd = "3dCM" @@ -806,8 +800,7 @@ class ConvertDset(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> convertdset = afni.ConvertDset() >>> convertdset.inputs.in_file = 'lh.pial_converted.gii' @@ -816,6 +809,7 @@ class ConvertDset(AFNICommandBase): >>> convertdset.cmdline 'ConvertDset -o_niml_asc -input lh.pial_converted.gii -prefix lh.pial_converted.niml.dset' >>> res = convertdset.run() # doctest: +SKIP + """ _cmd = "ConvertDset" @@ -852,11 +846,10 @@ class Copy(AFNICommand): or different type using 3dcopy command For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> copy3d = afni.Copy() >>> copy3d.inputs.in_file = 'functional.nii' @@ -914,7 +907,7 @@ class DotInputSpec(AFNICommandInputSpec): ) dodot = traits.Bool(desc="Return the dot product (unscaled).", argstr="-dodot") docoef = traits.Bool( - desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b*dset1", + desc="Return the least square fit coefficients {{a,b}} so that dset2 is approximately a + b\\*dset1", argstr="-docoef", ) dosums = traits.Bool( @@ -943,7 +936,11 @@ class Dot(AFNICommand): """Correlation coefficient between sub-brick pairs. All datasets in in_files list will be concatenated. You can use sub-brick selectors in the file specification. - Note: This program is not efficient when more than two subbricks are input. + + .. warning:: + + This program is not efficient when more than two subbricks are input. + For complete details, see the `3ddot Documentation. `_ @@ -1015,36 +1012,13 @@ class Edge3InputSpec(AFNICommandInputSpec): class Edge3(AFNICommand): """Does 3D Edge detection using the library 3DEdge - by Gregoire Malandain (gregoire.malandain@sophia.inria.fr). + by Gregoire Malandain. For complete details, see the `3dedge3 Documentation. `_ - references_ = [{'entry': BibTeX('@article{Deriche1987,' - 'author={R. Deriche},' - 'title={Optimal edge detection using recursive filtering},' - 'journal={International Journal of Computer Vision},' - 'volume={2},', - 'pages={167-187},' - 'year={1987},' - '}'), - 'tags': ['method'], - }, - {'entry': BibTeX('@article{MongaDericheMalandainCocquerez1991,' - 'author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez},' - 'title={Recursive filtering and edge tracking: two primary tools for 3D edge detection},' - 'journal={Image and vision computing},' - 'volume={9},', - 'pages={203-214},' - 'year={1991},' - '}'), - 'tags': ['method'], - }, - ] - Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> edge3 = afni.Edge3() >>> edge3.inputs.in_file = 'functional.nii' @@ -1059,6 +1033,25 @@ class Edge3(AFNICommand): _cmd = "3dedge3" input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec + references_ = [ + {'entry': BibTeX("""\ +@article{Deriche1987, +author={R. Deriche}, +title={Optimal edge detection using recursive filtering}, +journal={International Journal of Computer Vision}, +volume={2},' +pages={167-187}, +year={1987}, +}"""), 'tags': ['method']}, + {'entry': BibTeX("""\ +@article{MongaDericheMalandainCocquerez1991, + author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez}, + title={Recursive filtering and edge tracking: two primary tools for 3D edge detection}, + journal={Image and vision computing}, + volume={9},' + pages={203-214}, + year={1991}, +}"""), 'tags': ['method']}] class EvalInputSpec(AFNICommandInputSpec): @@ -1097,8 +1090,7 @@ class Eval(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> eval = afni.Eval() >>> eval.inputs.in_file_a = 'seed.1D' @@ -1252,17 +1244,6 @@ class FWHMx(AFNICommandBase): For complete details, see the `3dFWHMx Documentation. `_ - Examples - -------- - - >>> from nipype.interfaces import afni - >>> fwhm = afni.FWHMx() - >>> fwhm.inputs.in_file = 'functional.nii' - >>> fwhm.cmdline - '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' - >>> res = fwhm.run() # doctest: +SKIP - - (Classic) METHOD: * Calculate ratio of variance of first differences to data variance. @@ -1296,14 +1277,12 @@ class FWHMx(AFNICommandBase): 3dClustSim has also been modified to use the ACF model given above to generate noise random fields. - .. note:: TL;DR or summary The take-awaymessage is that the 'classic' 3dFWHMx and 3dClustSim analysis, using a pure Gaussian ACF, is not very correct for FMRI data -- I cannot speak for PET or MEG data. - .. warning:: Do NOT use 3dFWHMx on the statistical results (e.g., '-bucket') from @@ -1311,7 +1290,6 @@ class FWHMx(AFNICommandBase): the smoothness of the time series NOISE, not of the statistics. This proscription is especially true if you plan to use 3dClustSim next!! - .. note:: Recommendations * For FMRI statistical purposes, you DO NOT want the FWHM to reflect @@ -1327,7 +1305,6 @@ class FWHMx(AFNICommandBase): * If you do not use '-detrend', the program attempts to find non-zero spatial structure in the input, and will print a warning message if it is detected. - .. note:: Notes on -demend * I recommend this option, and it is not the default only for historical @@ -1340,6 +1317,14 @@ class FWHMx(AFNICommandBase): structure in the image will bias the estimation of the FWHM of the image time series NOISE (which is usually the point of using 3dFWHMx). + Examples + -------- + >>> from nipype.interfaces import afni + >>> fwhm = afni.FWHMx() + >>> fwhm.inputs.in_file = 'functional.nii' + >>> fwhm.cmdline + '3dFWHMx -input functional.nii -out functional_subbricks.out > functional_fwhmx.out' + >>> res = fwhm.run() # doctest: +SKIP """ @@ -1371,21 +1356,19 @@ def _parse_inputs(self, skip=None): def _format_arg(self, name, trait_spec, value): if name == "detrend": - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - return None + if value is True: + return trait_spec.argstr + elif value is False: + return None elif isinstance(value, int): return trait_spec.argstr + " %d" % value if name == "acf": - if isinstance(value, bool): - if value: - return trait_spec.argstr - else: - self._acf = False - return None + if value is True: + return trait_spec.argstr + elif value is False: + self._acf = False + return None elif isinstance(value, tuple): return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): @@ -1471,30 +1454,33 @@ class LocalBistatInputSpec(AFNICommandInputSpec): stat = InputMultiPath( traits.Enum(_stat_names), mandatory=True, - desc="statistics to compute. Possible names are :" - " * pearson = Pearson correlation coefficient" - " * spearman = Spearman correlation coefficient" - " * quadrant = Quadrant correlation coefficient" - " * mutinfo = Mutual Information" - " * normuti = Normalized Mutual Information" - " * jointent = Joint entropy" - " * hellinger= Hellinger metric" - " * crU = Correlation ratio (Unsymmetric)" - " * crM = Correlation ratio (symmetrized by Multiplication)" - " * crA = Correlation ratio (symmetrized by Addition)" - " * L2slope = slope of least-squares (L2) linear regression of " - " the data from dataset1 vs. the dataset2 " - " (i.e., d2 = a + b*d1 ==> this is 'b')" - " * L1slope = slope of least-absolute-sum (L1) linear " - " regression of the data from dataset1 vs. " - " the dataset2" - " * num = number of the values in the region: " - " with the use of -mask or -automask, " - " the size of the region around any given " - " voxel will vary; this option lets you " - " map that size." - " * ALL = all of the above, in that order" - "More than one option can be used.", + desc="""\ +Statistics to compute. Possible names are: + + * pearson = Pearson correlation coefficient + * spearman = Spearman correlation coefficient + * quadrant = Quadrant correlation coefficient + * mutinfo = Mutual Information + * normuti = Normalized Mutual Information + * jointent = Joint entropy + * hellinger= Hellinger metric + * crU = Correlation ratio (Unsymmetric) + * crM = Correlation ratio (symmetrized by Multiplication) + * crA = Correlation ratio (symmetrized by Addition) + * L2slope = slope of least-squares (L2) linear regression of + the data from dataset1 vs. the dataset2 + (i.e., d2 = a + b*d1 ==> this is 'b') + * L1slope = slope of least-absolute-sum (L1) linear + regression of the data from dataset1 vs. + the dataset2 + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. + * ALL = all of the above, in that order + +More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( @@ -1534,8 +1520,7 @@ class LocalBistat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> bistat = afni.LocalBistat() >>> bistat.inputs.in_file1 = 'functional.nii' @@ -1608,48 +1593,51 @@ class LocalstatInputSpec(AFNICommandInputSpec): ), ), mandatory=True, - desc="statistics to compute. Possible names are :\n" - " * mean = average of the values\n" - " * stdev = standard deviation\n" - " * var = variance (stdev*stdev)\n" - " * cvar = coefficient of variation = stdev/fabs(mean)\n" - " * median = median of the values\n" - " * MAD = median absolute deviation\n" - " * min = minimum\n" - " * max = maximum\n" - " * absmax = maximum of the absolute values\n" - " * num = number of the values in the region:\n" - " with the use of -mask or -automask," - " the size of the region around any given" - " voxel will vary; this option lets you" - " map that size. It may be useful if you" - " plan to compute a t-statistic (say) from" - " the mean and stdev outputs.\n" - " * sum = sum of the values in the region\n" - " * FWHM = compute (like 3dFWHM) image smoothness" - " inside each voxel's neighborhood. Results" - " are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz." - " Places where an output is -1 are locations" - " where the FWHM value could not be computed" - " (e.g., outside the mask).\n" - " * FWHMbar= Compute just the average of the 3 FWHM values" - " (normally would NOT do this with FWHM also).\n" - " * perc:P0:P1:Pstep = \n" - " Compute percentiles between P0 and P1 with a " - " step of Pstep.\n" - " Default P1 is equal to P0 and default P2 = 1\n" - " * rank = rank of the voxel's intensity\n" - " * frank = rank / number of voxels in neighborhood\n" - " * P2skew = Pearson's second skewness coefficient" - " 3 * (mean - median) / stdev\n" - " * ALL = all of the above, in that order " - " (except for FWHMbar and perc).\n" - " * mMP2s = Exactly the same output as:" - " median, MAD, P2skew," - " but a little faster\n" - " * mmMP2s = Exactly the same output as:" - " mean, median, MAD, P2skew\n" - "More than one option can be used.", + desc="""\ +statistics to compute. Possible names are: + + * mean = average of the values + * stdev = standard deviation + * var = variance (stdev\\*stdev) + * cvar = coefficient of variation = stdev/fabs(mean) + * median = median of the values + * MAD = median absolute deviation + * min = minimum + * max = maximum + * absmax = maximum of the absolute values + * num = number of the values in the region: + with the use of -mask or -automask, + the size of the region around any given + voxel will vary; this option lets you + map that size. It may be useful if you + plan to compute a t-statistic (say) from + the mean and stdev outputs. + * sum = sum of the values in the region + * FWHM = compute (like 3dFWHM) image smoothness + inside each voxel's neighborhood. Results + are in 3 sub-bricks: FWHMx, FHWMy, and FWHMz. + Places where an output is -1 are locations + where the FWHM value could not be computed + (e.g., outside the mask). + * FWHMbar= Compute just the average of the 3 FWHM values + (normally would NOT do this with FWHM also). + * perc:P0:P1:Pstep = + Compute percentiles between P0 and P1 with a + step of Pstep. + Default P1 is equal to P0 and default P2 = 1 + * rank = rank of the voxel's intensity + * frank = rank / number of voxels in neighborhood + * P2skew = Pearson's second skewness coefficient + 3 \\* (mean - median) / stdev + * ALL = all of the above, in that order + (except for FWHMbar and perc). + * mMP2s = Exactly the same output as: + median, MAD, P2skew, + but a little faster + * mmMP2s = Exactly the same output as: + mean, median, MAD, P2skew + +More than one option can be used.""", argstr="-stat %s...", ) mask_file = File( @@ -1664,12 +1652,12 @@ class LocalstatInputSpec(AFNICommandInputSpec): desc="Compute the mask as in program 3dAutomask.", argstr="-automask" ) nonmask = traits.Bool( - desc="Voxels not in the mask WILL have their local statistics " - "computed from all voxels in their neighborhood that ARE in " - "the mask.\n" - " * For instance, this option can be used to compute the " - " average local white matter time series, even at non-WM " - " voxels.", + desc="""\ +Voxels not in the mask WILL have their local statistics +computed from all voxels in their neighborhood that ARE in +the mask. For instance, this option can be used to compute the +average local white matter time series, even at non-WM +voxels.""", argstr="-use_nonmask", ) reduce_grid = traits.Either( @@ -1737,8 +1725,7 @@ class Localstat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> localstat = afni.Localstat() >>> localstat.inputs.in_file = 'functional.nii' @@ -1796,8 +1783,7 @@ class MaskToolInputSpec(AFNICommandInputSpec): "short", "float", argstr="-datum %s", - desc="specify data type for output. Valid types are 'byte', " - "'short' and 'float'.", + desc="specify data type for output.", ) dilate_inputs = Str( desc="Use this option to dilate and/or erode datasets as they are " @@ -1843,8 +1829,7 @@ class MaskTool(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> masktool = afni.MaskTool() >>> masktool.inputs.in_file = 'functional.nii' @@ -1889,8 +1874,7 @@ class Merge(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> merge = afni.Merge() >>> merge.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -1938,8 +1922,7 @@ class Notes(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> notes = afni.Notes() >>> notes.inputs.in_file = 'functional.HEAD' @@ -1948,6 +1931,7 @@ class Notes(CommandLine): >>> notes.cmdline '3dNotes -a "This note is added." -h "This note is added to history." functional.HEAD' >>> res = notes.run() # doctest: +SKIP + """ _cmd = "3dNotes" @@ -2000,8 +1984,7 @@ class NwarpAdjust(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> adjust = afni.NwarpAdjust() >>> adjust.inputs.warps = ['func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz', 'func2anat_InverseWarp.nii.gz'] @@ -2117,8 +2100,7 @@ class NwarpApply(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarp = afni.NwarpApply() >>> nwarp.inputs.in_file = 'Fred+orig' @@ -2215,8 +2197,7 @@ class NwarpCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> nwarpcat = afni.NwarpCat() >>> nwarpcat.inputs.in_files = ['Q25_warp+tlrc.HEAD', ('IDENT', 'structural.nii')] @@ -2450,8 +2431,7 @@ class Refit(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> refit = afni.Refit() >>> refit.inputs.in_file = 'structural.nii' @@ -2466,6 +2446,7 @@ class Refit(AFNICommandBase): >>> refit_2.cmdline "3drefit -atrfloat IJK_TO_DICOM_REAL '1 0.2 0 0 -0.2 1 0 0 0 0 1 0' structural.nii" >>> res = refit_2.run() # doctest: +SKIP + """ _cmd = "3drefit" @@ -2510,29 +2491,32 @@ class ReHoInputSpec(CommandLineInputSpec): "vertices", xor=["sphere", "ellipsoid"], argstr="-nneigh %s", - desc="voxels in neighborhood. can be: " - "* faces (for voxel and 6 facewise neighbors, only),\n" - "* edges (for voxel and 18 face- and edge-wise neighbors),\n" - "* vertices (for voxel and 26 face-, edge-, and node-wise " - "neighbors).\n", + desc=""" +voxels in neighborhood. can be: +``faces`` (for voxel and 6 facewise neighbors, only), +``edges`` (for voxel and 18 face- and edge-wise neighbors), +``vertices`` (for voxel and 26 face-, edge-, and node-wise neighbors).""", ) sphere = traits.Float( argstr="-neigh_RAD %s", xor=["neighborhood", "ellipsoid"], - desc="for additional voxelwise neighborhood control, the " - "radius R of a desired neighborhood can be put in; R is " - "a floating point number, and must be >1. Examples of " - "the numbers of voxels in a given radius are as follows " - "(you can roughly approximate with the ol' 4*PI*(R^3)/3 " - "thing):\n" - " R=2.0 -> V=33,\n" - " R=2.3 -> V=57, \n" - " R=2.9 -> V=93, \n" - " R=3.1 -> V=123, \n" - " R=3.9 -> V=251, \n" - " R=4.5 -> V=389, \n" - " R=6.1 -> V=949, \n" - "but you can choose most any value.", + desc=r"""\ +For additional voxelwise neighborhood control, the +radius R of a desired neighborhood can be put in; R is +a floating point number, and must be >1. Examples of +the numbers of voxels in a given radius are as follows +(you can roughly approximate with the ol' :math:`4\pi\,R^3/3` +thing): + + * R=2.0 -> V=33 + * R=2.3 -> V=57, + * R=2.9 -> V=93, + * R=3.1 -> V=123, + * R=3.9 -> V=251, + * R=4.5 -> V=389, + * R=6.1 -> V=949, + +but you can choose most any value.""", ) ellipsoid = traits.Tuple( traits.Float, @@ -2540,13 +2524,14 @@ class ReHoInputSpec(CommandLineInputSpec): traits.Float, xor=["sphere", "neighborhood"], argstr="-neigh_X %s -neigh_Y %s -neigh_Z %s", - desc="Tuple indicating the x, y, and z radius of an ellipsoid " - "defining the neighbourhood of each voxel.\n" - "The 'hood is then made according to the following relation:" - "(i/A)^2 + (j/B)^2 + (k/C)^2 <=1.\n" - "which will have approx. V=4*PI*A*B*C/3. The impetus for " - "this freedom was for use with data having anisotropic " - "voxel edge lengths.", + desc=r"""\ +Tuple indicating the x, y, and z radius of an ellipsoid +defining the neighbourhood of each voxel. +The 'hood is then made according to the following relation: +:math:`(i/A)^2 + (j/B)^2 + (k/C)^2 \le 1.` +which will have approx. :math:`V=4 \pi \, A B C/3`. The impetus for +this freedom was for use with data having anisotropic +voxel edge lengths.""", ) label_set = File( exists=True, @@ -2572,8 +2557,7 @@ class ReHo(AFNICommandBase): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> reho = afni.ReHo() >>> reho.inputs.in_file = 'functional.nii' @@ -2648,8 +2632,7 @@ class Resample(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> resample = afni.Resample() >>> resample.inputs.in_file = 'functional.nii' @@ -2708,8 +2691,7 @@ class TCat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcat = afni.TCat() >>> tcat.inputs.in_files = ['functional.nii', 'functional2.nii'] @@ -2760,8 +2742,7 @@ class TCatSubBrick(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tcsb = afni.TCatSubBrick() >>> tcsb.inputs.in_files = [('functional.nii', "'{2..$}'"), ('functional2.nii', "'{2..$}'")] @@ -2808,8 +2789,7 @@ class TStat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> tstat = afni.TStat() >>> tstat.inputs.in_file = 'functional.nii' @@ -2890,8 +2870,7 @@ class To3D(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> to3d = afni.To3D() >>> to3d.inputs.datatype = 'float' @@ -3005,8 +2984,7 @@ class Undump(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unndump = afni.Undump() >>> unndump.inputs.in_file = 'structural.nii' @@ -3068,7 +3046,7 @@ class UnifizeInputSpec(AFNICommandInputSpec): argstr="-noduplo", ) epi = traits.Bool( - desc="Assume the input dataset is a T2 (or T2*) weighted EPI time " + desc="Assume the input dataset is a T2 (or T2\\*) weighted EPI time " "series. After computing the scaling, apply it to ALL volumes " "(TRs) in the input dataset. That is, a given voxel will be " "scaled by the same factor at each TR. " @@ -3144,8 +3122,7 @@ class Unifize(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> unifize = afni.Unifize() >>> unifize.inputs.in_file = 'structural.nii' @@ -3186,8 +3163,7 @@ class ZCutUp(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcutup = afni.ZCutUp() >>> zcutup.inputs.in_file = 'functional.nii' @@ -3243,8 +3219,7 @@ class GCOR(CommandLine): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> gcor = afni.GCOR() >>> gcor.inputs.in_file = 'structural.nii' @@ -3318,10 +3293,10 @@ class Axialize(AFNICommand): with the data brick oriented as axial slices. For complete details, see the `3dcopy Documentation. - `_ + `__ Examples - ======== + -------- >>> from nipype.interfaces import afni >>> axial3d = afni.Axialize() >>> axial3d.inputs.in_file = 'functional.nii' @@ -3389,8 +3364,7 @@ class Zcat(AFNICommand): `_ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zcat = afni.Zcat() >>> zcat.inputs.in_files = ['functional2.nii', 'functional3.nii'] @@ -3398,6 +3372,7 @@ class Zcat(AFNICommand): >>> zcat.cmdline '3dZcat -prefix cat_functional.nii functional2.nii functional3.nii' >>> res = zcat.run() # doctest: +SKIP + """ _cmd = "3dZcat" @@ -3498,11 +3473,10 @@ class Zeropad(AFNICommand): """Adds planes of zeros to a dataset (i.e., pads it out). For complete details, see the `3dZeropad Documentation. - `_ + `__ Examples - ======== - + -------- >>> from nipype.interfaces import afni >>> zeropad = afni.Zeropad() >>> zeropad.inputs.in_files = 'functional.nii' @@ -3516,6 +3490,7 @@ class Zeropad(AFNICommand): >>> zeropad.cmdline '3dZeropad -A 10 -I 10 -L 10 -P 10 -R 10 -S 10 -prefix pad_functional.nii functional.nii' >>> res = zeropad.run() # doctest: +SKIP + """ _cmd = "3dZeropad" diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 6aee26655e..3e74b59924 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1653,15 +1653,18 @@ class RegistrationSynQuickInputSpec(ANTSCommandInputSpec): "b", "br", argstr="-t %s", - desc=""" - transform type - t: translation - r: rigid - a: rigid + affine - s: rigid + affine + deformable syn (default) - sr: rigid + deformable syn - b: rigid + affine + deformable b-spline syn - br: rigid + deformable b-spline syn""", + desc="""\ +Transform type + + * t: translation + * r: rigid + * a: rigid + affine + * s: rigid + affine + deformable syn (default) + * sr: rigid + deformable syn + * b: rigid + affine + deformable b-spline syn + * br: rigid + deformable b-spline syn + +""", usedefault=True, ) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index e3fe579844..ec83982191 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -331,7 +331,7 @@ class N4BiasFieldCorrectionInputSpec(ANTSCommandInputSpec): False, mandatory=True, usedefault=True, - desc="copy headers of the original image into the " "output (corrected) file", + desc="copy headers of the original image into the output (corrected) file", ) rescale_intensities = traits.Bool( False, @@ -545,11 +545,11 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): "antsCT_", argstr="-o %s", usedefault=True, - desc=("Prefix that is prepended to all output" " files (default = antsCT_)"), + desc=("Prefix that is prepended to all output files"), ) image_suffix = traits.Str( "nii.gz", - desc=("any of standard ITK formats," " nii.gz is default"), + desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) @@ -583,11 +583,11 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): ) prior_segmentation_weight = traits.Float( argstr="-w %f", - desc=("Atropos spatial prior *probability* weight for" " the segmentation"), + desc=("Atropos spatial prior *probability* weight for the segmentation"), ) segmentation_iterations = traits.Int( argstr="-n %d", - desc=("N4 -> Atropos -> N4 iterations during segmentation" " (default = 3)"), + desc=("N4 -> Atropos -> N4 iterations during segmentation (default = 3)"), ) posterior_formulation = traits.Str( argstr="-b %s", @@ -611,7 +611,7 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): 1, argstr="-u %d", desc=( - "Use random number generated from system clock in Atropos" " (default = 1)" + "Use random number generated from system clock in Atropos (default = 1)" ), ) b_spline_smoothing = traits.Bool( @@ -856,7 +856,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): argstr="-o %s", usedefault=True, desc=( - "Prefix that is prepended to all output" " files (default = highress001_)" + "Prefix that is prepended to all output files" ), ) @@ -871,7 +871,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): ) image_suffix = traits.Str( "nii.gz", - desc=("any of standard ITK formats," " nii.gz is default"), + desc=("any of standard ITK formats, nii.gz is default"), argstr="-s %s", usedefault=True, ) @@ -880,7 +880,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): 1, argstr="-u %d", desc=( - "Use random number generated from system clock in Atropos" " (default = 1)" + "Use random number generated from system clock in Atropos (default = 1)" ), ) keep_temporary_files = traits.Int( @@ -932,6 +932,8 @@ class BrainExtractionOutputSpec(TraitedSpec): class BrainExtraction(ANTSCommand): """ + Atlas-based brain extraction. + Examples -------- >>> from nipype.interfaces.ants.segmentation import BrainExtraction @@ -941,8 +943,8 @@ class BrainExtraction(ANTSCommand): >>> brainextraction.inputs.brain_template = 'study_template.nii.gz' >>> brainextraction.inputs.brain_probability_mask ='ProbabilityMaskOfStudyTemplate.nii.gz' >>> brainextraction.cmdline - 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 \ --s nii.gz -o highres001_' + 'antsBrainExtraction.sh -a T1.nii.gz -m ProbabilityMaskOfStudyTemplate.nii.gz -e study_template.nii.gz -d 3 -s nii.gz -o highres001_' + """ input_spec = BrainExtractionInputSpec @@ -1121,23 +1123,23 @@ class JointFusionInputSpec(ANTSCommandInputSpec): desc="Warped atlas segmentations", ) method = traits.Str( - default="Joint", + "Joint", argstr="-m %s", usedefault=True, desc=( "Select voting method. Options: Joint (Joint" - " Label Fusion). May be followed by optional" - " parameters in brackets, e.g., -m Joint[0.1,2]" + "Label Fusion). May be followed by optional" + "parameters in brackets, e.g., -m Joint[0.1,2]" ), ) alpha = traits.Float( - default=0.1, + 0.1, usedefault=True, requires=["method"], desc=("Regularization term added to matrix Mx for inverse"), ) beta = traits.Int( - default=2, + 2, usedefault=True, requires=["method"], desc=("Exponent for mapping intensity difference to joint error"), @@ -1155,7 +1157,7 @@ class JointFusionInputSpec(ANTSCommandInputSpec): maxlen=3, argstr="-rp %s", desc=( - "Patch radius for similarity measures, " "scalar or vector. Default: 2x2x2" + "Patch radius for similarity measures, scalar or vector. Default: 2x2x2" ), ) search_radius = traits.ListInt( @@ -1181,9 +1183,10 @@ class JointFusionOutputSpec(TraitedSpec): class JointFusion(ANTSCommand): """ + Segmentation fusion tool. + Examples -------- - >>> from nipype.interfaces.ants import JointFusion >>> at = JointFusion() >>> at.inputs.dimension = 3 @@ -1198,8 +1201,7 @@ class JointFusion(ANTSCommand): ... 'segmentation1.nii.gz'] >>> at.inputs.target_image = 'T1.nii' >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz \ --l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' >>> at.inputs.method = 'Joint' >>> at.inputs.alpha = 0.5 @@ -1207,8 +1209,8 @@ class JointFusion(ANTSCommand): >>> at.inputs.patch_radius = [3,2,1] >>> at.inputs.search_radius = [1,2,3] >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ --l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + """ input_spec = JointFusionInputSpec @@ -1334,6 +1336,7 @@ class DenoiseImage(ANTSCommand): >>> denoise_3.inputs.save_noise = True >>> denoise_3.cmdline 'DenoiseImage -i im1.nii -n Gaussian -o [ im1_noise_corrected.nii, im1_noise.nii ] -s 1' + """ input_spec = DenoiseImageInputSpec @@ -1433,7 +1436,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): minlen=3, maxlen=3, argstr="-p %s", - desc=("Patch radius for similarity measures." "Default: 2x2x2"), + desc=("Patch radius for similarity measures. Default: 2x2x2"), ) patch_metric = traits.Enum( "PC", @@ -1485,7 +1488,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): out_label_post_prob_name_format = traits.Str( "antsJointFusionPosterior_%d.nii.gz", requires=["out_label_fusion", "out_intensity_fusion_name_format"], - desc="Optional label posterior probability " "image file name format.", + desc="Optional label posterior probability image file name format.", ) out_atlas_voting_weight_name_format = traits.Str( "antsJointFusionVotingWeight_%d.nii.gz", @@ -1494,7 +1497,7 @@ class AntsJointFusionInputSpec(ANTSCommandInputSpec): "out_intensity_fusion_name_format", "out_label_post_prob_name_format", ], - desc="Optional atlas voting weight image " "file name format.", + desc="Optional atlas voting weight image file name format.", ) verbose = traits.Bool(False, argstr="-v", desc=("Verbose output.")) @@ -1510,7 +1513,6 @@ class AntsJointFusion(ANTSCommand): """ Examples -------- - >>> from nipype.interfaces.ants import AntsJointFusion >>> antsjointfusion = AntsJointFusion() >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' @@ -1518,22 +1520,18 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] >>> antsjointfusion.inputs.target_image = ['im1.nii'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" >>> antsjointfusion.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ --b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], ... ['rc2s1.nii','rc2s2.nii'] ] >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii \ --s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.dimension = 3 >>> antsjointfusion.inputs.alpha = 0.5 @@ -1541,29 +1539,21 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.patch_radius = [3,2,1] >>> antsjointfusion.inputs.search_radius = [3] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii \ --p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.search_radius = ['mask.nii'] >>> antsjointfusion.inputs.verbose = True >>> antsjointfusion.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] >>> antsjointfusion.inputs.exclusion_image_label = ['1','2'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' >>> antsjointfusion.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' >>> antsjointfusion.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' >>> antsjointfusion.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ --l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ --o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, \ -ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] \ --p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ @@ -1678,8 +1668,8 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): exists=True, argstr='--segmentation-image "%s"', mandatory=True, - desc="A segmentation image must be supplied labeling the gray and white matters." - " Default values = 2 and 3, respectively.", + desc="A segmentation image must be supplied labeling the gray and white matters. " + "Default values = 2 and 3, respectively.", ) gray_matter_label = traits.Int( @@ -1711,7 +1701,7 @@ class KellyKapowskiInputSpec(ANTSCommandInputSpec): ) convergence = traits.Str( - default="[50,0.001,10]", + "[50,0.001,10]", argstr='--convergence "%s"', usedefault=True, desc="Convergence is determined by fitting a line to the normalized energy profile of" @@ -1816,12 +1806,7 @@ class KellyKapowski(ANTSCommand): >>> kk.inputs.convergence = "[45,0.0,10]" >>> kk.inputs.thickness_prior_estimate = 10 >>> kk.cmdline - 'KellyKapowski --convergence "[45,0.0,10]" \ ---output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" \ ---image-dimensionality 3 --gradient-step 0.025000 \ ---maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 \ ---segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 \ ---smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' + 'KellyKapowski --convergence "[45,0.0,10]" --output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" --image-dimensionality 3 --gradient-step 0.025000 --maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 --segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ @@ -1831,20 +1816,19 @@ class KellyKapowski(ANTSCommand): references_ = [ { - "entry": BibTeX( - "@book{Das2009867," - "author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}," - "title={Registration based cortical thickness measurement.}," - "journal={NeuroImage}," - "volume={45}," - "number={37}," - "pages={867--879}," - "year={2009}," - "issn={1053-8119}," - "url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}," - "doi={https://doi.org/10.1016/j.neuroimage.2008.12.016}" - "}" - ), + "entry": BibTeX("""\ +@book{Das2009867, + author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}, + title={Registration based cortical thickness measurement.}, + journal={NeuroImage}, + volume={45}, + number={37}, + pages={867--879}, + year={2009}, + issn={1053-8119}, + url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}, + doi={https://doi.org/10.1016/j.neuroimage.2008.12.016} +}"""), "description": "The details on the implementation of DiReCT.", "tags": ["implementation"], } diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 3e3c75be50..a08cfb1764 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -31,14 +31,11 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): mask_image = File( "none", argstr="%s", exists=True, desc="mask image", position=3, usedefault=True ) - colormap = traits.Str( + colormap = traits.Enum( + "grey", "red", "green", "blue", "copper", "jet", "hsv", "spring", "summer", + "autumn", "winter", "hot", "cool", "overunder", "custom", argstr="%s", - usedefault=True, - desc=( - "Possible colormaps: grey, red, green, " - "blue, copper, jet, hsv, spring, summer, " - "autumn, winter, hot, cool, overunder, custom " - ), + desc="Select a colormap", mandatory=True, position=4, ) @@ -52,10 +49,10 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): argstr="%d", desc="maximum input", mandatory=True, position=7 ) minimum_RGB_output = traits.Int( - 0, usedefault=True, argstr="%d", desc="", position=8 + 0, usedefault=True, argstr="%d", position=8 ) maximum_RGB_output = traits.Int( - 255, usedefault=True, argstr="%d", desc="", position=9 + 255, usedefault=True, argstr="%d", position=9 ) @@ -65,6 +62,8 @@ class ConvertScalarImageToRGBOutputSpec(TraitedSpec): class ConvertScalarImageToRGB(ANTSCommand): """ + Convert scalar images to RGB. + Examples -------- >>> from nipype.interfaces.ants.visualization import ConvertScalarImageToRGB @@ -76,6 +75,7 @@ class ConvertScalarImageToRGB(ANTSCommand): >>> converter.inputs.maximum_input = 6 >>> converter.cmdline 'ConvertScalarImageToRGB 3 T1.nii.gz rgb.nii.gz none jet none 0 6 0 255' + """ _cmd = "ConvertScalarImageToRGB" diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index fd4c701fff..a0e7325580 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -128,10 +128,7 @@ def _get_filecopy_info(cls): class BaseInterface(Interface): - """Implements common interface functionality. - - Implements - ---------- + """Implement common interface functionality. * Initializes inputs/outputs from input_spec/output_spec * Provides help based on input_spec and output_spec @@ -144,18 +141,19 @@ class BaseInterface(Interface): This class cannot be instantiated. - - Relevant Interface attributes - ----------------------------- - - ``input_spec`` points to the traited class for the inputs - ``output_spec`` points to the traited class for the outputs - ``_redirect_x`` should be set to ``True`` when the interface requires - connecting to a ``$DISPLAY`` (default is ``False``). - ``resource_monitor`` if ``False`` prevents resource-monitoring this - interface, if ``True`` monitoring will be enabled IFF the general - Nipype config is set on (``resource_monitor = true``). - + Attributes + ---------- + input_spec: HasTraits + points to the traited class for the inputs + output_spec: HasTraits + points to the traited class for the outputs + _redirect_x: bool + should be set to ``True`` when the interface requires + connecting to a ``$DISPLAY`` (default is ``False``). + resource_monitor: bool + If ``False``, prevents resource-monitoring this interface + If ``True`` monitoring will be enabled IFF the general + Nipype config is set on (``resource_monitor = true``). """ @@ -323,14 +321,15 @@ def run(self, cwd=None, ignore_exception=None, **inputs): Parameters ---------- - cwd : specify a folder where the interface should be run inputs : allows the interface settings to be updated Returns ------- - results : an InterfaceResult object containing a copy of the instance - that was executed, provenance information and, if successful, results + results : :obj:`InterfaceResult` + A copy of the instance that was executed, provenance information and, + if successful, results + """ from ...utils.profiler import ResourceMonitor @@ -577,7 +576,6 @@ class SimpleInterface(BaseInterface): Examples -------- - >>> from nipype.interfaces.base import ( ... SimpleInterface, BaseInterfaceInputSpec, TraitedSpec) @@ -602,6 +600,7 @@ class SimpleInterface(BaseInterface): >>> dbl.inputs.x = 2 >>> dbl.run().outputs.doubled 4.0 + """ def __init__(self, from_file=None, resource_monitor=None, **inputs): @@ -620,14 +619,11 @@ class must be instantiated with a command argument Parameters ---------- - - command : string + command : str define base immutable `command` you wish to run - - args : string, optional + args : str, optional optional arguments passed to base `command` - Examples -------- >>> import pprint @@ -637,7 +633,7 @@ class must be instantiated with a command argument >>> cli.cmdline 'ls -al' - # Use get_traitsfree() to check all inputs set + >>> # Use get_traitsfree() to check all inputs set >>> pprint.pprint(cli.inputs.get_traitsfree()) # doctest: {'args': '-al', 'environ': {'DISPLAY': ':1'}} @@ -758,7 +754,8 @@ def _run_interface(self, runtime, correct_return_codes=(0,)): Returns ------- - runtime : updated runtime information + runtime : + updated runtime information adds stdout, stderr, merged, cmdline, dependencies, command_path """ @@ -997,6 +994,7 @@ class MpiCommandLine(CommandLine): >>> mpi_cli.inputs.n_procs = 8 >>> mpi_cli.cmdline 'mpiexec -n 8 my_mpi_prog -v' + """ input_spec = MpiCommandLineInputSpec diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 579f97def8..b42a73f501 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -30,8 +30,7 @@ from ... import config, __version__ - -FLOAT_FORMAT = "{:.10f}".format +_float_fmt = "{:.10f}".format nipype_version = Version(__version__) @@ -325,7 +324,7 @@ def _get_sorteddict( else: out = hash elif isinstance(objekt, float): - out = FLOAT_FORMAT(objekt) + out = _float_fmt(objekt) else: out = objekt return out diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index e3e1a229f6..4b01754be0 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -32,10 +32,11 @@ def __str__(self): class Bunch(object): - """Dictionary-like class that provides attribute-style access to it's items. + """ + Dictionary-like class that provides attribute-style access to it's items. - A `Bunch` is a simple container that stores it's items as class - attributes. Internally all items are stored in a dictionary and + A ``Bunch`` is a simple container that stores it's items as class + attributes [1]_. Internally all items are stored in a dictionary and the class exposes several of the dictionary methods. Examples @@ -48,10 +49,8 @@ class Bunch(object): >>> inputs Bunch(fwhm=6.0, infile='subj.nii', register_to_mean=False) - Notes - ----- - The Bunch pattern came from the Python Cookbook: - + References + ---------- .. [1] A. Martelli, D. Hudgeon, "Collecting a Bunch of Named Items", Python Cookbook, 2nd Ed, Chapter 4.18, 2005. diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 0d26017ea7..84177a16ad 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,4 +1,9 @@ # -*- coding: utf-8 -*- +"""This script provides interfaces for BrainSuite command line tools. +Please see brainsuite.org for more information. + +Author: Jason Wong +""" import os import re as regex @@ -12,12 +17,6 @@ isdefined, ) -"""This script provides interfaces for BrainSuite command line tools. -Please see brainsuite.org for more information. - -Author: Jason Wong -""" - class BseInputSpec(CommandLineInputSpec): @@ -124,7 +123,7 @@ class BfcInputSpec(CommandLineInputSpec): ) inputMaskFile = File(desc="mask file", argstr="-m %s", hash_files=False) outputMRIVolume = File( - desc="output bias-corrected MRI volume.If unspecified, output file name will be auto generated.", + desc="output bias-corrected MRI volume. If unspecified, output file name will be auto generated.", argstr="-o %s", hash_files=False, genfile=True, @@ -150,7 +149,13 @@ class BfcInputSpec(CommandLineInputSpec): histogramType = traits.Enum( "ellipse", "block", - desc="Options for type of histogram\nellipse: use ellipsoid for ROI histogram\nblock :use block for ROI histogram", + desc="""\ +Options for type of histogram: + + * ``ellipse``: use ellipsoid for ROI histogram + * ``block``:use block for ROI histogram + +""", argstr="%s", ) iterativeMode = traits.Bool( @@ -178,8 +183,14 @@ class BfcInputSpec(CommandLineInputSpec): "low", "medium", "high", - desc="Preset options for bias_model\n low: small bias model [0.95,1.05]\n" - "medium: medium bias model [0.90,1.10]\n high: high bias model [0.80,1.20]", + desc="""\ +Preset options for bias_model + + * low: small bias model [0.95,1.05] + * medium: medium bias model [0.90,1.10] + * high: high bias model [0.80,1.20] + +""", argstr="%s", ) intermediate_file_type = traits.Enum( @@ -1314,7 +1325,7 @@ class BDPInputSpec(CommandLineInputSpec): "saves derived diffusion tensor parameters (FA, MD, axial, radial, L2, " "L3). This is the default behavior if no diffusion modeling flags are " "specified. The estimated diffusion tensors can be visualized by loading " - "the saved *.eig.nii.gz file in BrainSuite. BDP reports diffusivity (MD, " + "the saved ``*.eig.nii.gz`` file in BrainSuite. BDP reports diffusivity (MD, " "axial, radial, L2 and L3) in a unit which is reciprocal inverse of the " "unit of input b-value. ", ) diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index 4eadb98207..c91c02569c 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -"""The ants module provides basic functions for interfacing with ants - functions. -""" +"""Convert3D is a command-line tool for converting 3D images between common file formats.""" import os from glob import glob diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index a16dbd9149..0c44b4abea 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -134,8 +134,8 @@ class SFPICOCalibData(StdOutCommandLine): which stores information about the datafile, is generated along with the datafile. - Example 1 - --------- + Examples + -------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -151,8 +151,6 @@ class SFPICOCalibData(StdOutCommandLine): data produced can be varied by specifying the ranges and steps of the parameters for both the one and two fibre datasets used. - Example 2 - --------- To create a custom calibration dataset >>> import nipype.interfaces.camino as cam @@ -172,6 +170,7 @@ class SFPICOCalibData(StdOutCommandLine): simulate the one fibre cases and 72,912 voxels simulate the various two fibre cases. However, care should be taken to ensure that enough data is generated for calculating the LUT. # doctest: +SKIP + """ _cmd = "sfpicocalibdata" @@ -218,14 +217,16 @@ class SFLUTGenInputSpec(StdOutCommandLineInputSpec): "bingham", "watson", argstr="-pdf %s", - desc=( - "Sets the distribution to use for the calibration. The default is the Bingham " - "distribution, which allows elliptical probability density contours. " - "Currently supported options are: " - " bingham - The Bingham distribution, which allows elliptical probability " - " density contours. " - " watson - The Watson distribution. This distribution is rotationally symmetric." - ), + desc="""\ +Sets the distribution to use for the calibration. The default is the Bingham +distribution, which allows elliptical probability density contours. +Currently supported options are: + + * bingham -- The Bingham distribution, which allows elliptical probability + density contours. + * watson -- The Watson distribution. This distribution is rotationally symmetric. + +""", usedefault=True, ) binincsize = traits.Int( @@ -286,9 +287,9 @@ class SFLUTGen(StdOutCommandLine): This utility uses calibration data generated from SFPICOCalibData and peak information created by SFPeaks. - The utility outputs two lut's, *_oneFibreSurfaceCoeffs.Bdouble and - *_twoFibreSurfaceCoeffs.Bdouble. Each of these files contains big- - endian doubles as standard. The format of the output is: :: + The utility outputs two lut's, ``*_oneFibreSurfaceCoeffs.Bdouble`` and + ``*_twoFibreSurfaceCoeffs.Bdouble``. Each of these files contains big-endian doubles + as standard. The format of the output is:: dimensions (1 for Watson, 2 for Bingham) order (the order of the polynomial) @@ -298,12 +299,12 @@ class SFLUTGen(StdOutCommandLine): coefficient_N In the case of the Watson, there is a single set of coefficients, - which are ordered: :: + which are ordered:: constant, x, x^2, ..., x^order. In the case of the Bingham, there are two sets of coefficients (one - for each surface), ordered so that: :: + for each surface), ordered so that:: for j = 1 to order for k = 1 to order @@ -311,7 +312,7 @@ class SFLUTGen(StdOutCommandLine): where j+k < order Example - --------- + ------- To create a calibration dataset using the default settings >>> import nipype.interfaces.camino as cam @@ -319,6 +320,7 @@ class SFLUTGen(StdOutCommandLine): >>> lutgen.inputs.in_file = 'QSH_peaks.Bdouble' >>> lutgen.inputs.info_file = 'PICO_calib.info' >>> lutgen.run() # doctest: +SKIP + """ _cmd = "sflutgen" diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 5ec7fe8c63..2b7d0ff337 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -146,8 +146,8 @@ class Conmat(CommandLine): In all cases, distance to the seed point is defined along the streamline path. - Example 1 - --------- + Examples + -------- To create a standard connectivity matrix based on streamline counts. >>> import nipype.interfaces.camino as cam @@ -156,8 +156,6 @@ class Conmat(CommandLine): >>> conmat.inputs.target_file = 'atlas.nii.gz' >>> conmat.run() # doctest: +SKIP - Example 1 - --------- To create a standard connectivity matrix and mean tractwise FA statistics. >>> import nipype.interfaces.camino as cam @@ -167,6 +165,7 @@ class Conmat(CommandLine): >>> conmat.inputs.scalar_file = 'fa.nii.gz' >>> conmat.tract_stat = 'mean' >>> conmat.run() # doctest: +SKIP + """ _cmd = "conmat" diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 6a17271bcf..7e74fe8ad6 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -357,7 +357,7 @@ def _gen_model_options(): # @NoSelf argstr="-fixedmod %s", minlen=4, maxlen=4, - desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with |q|=Q and diffusion time tau. The N measurements with |q|=Q have unique directions. The program reads in the directions from the files in directory PointSets.", + desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. The N measurements with :math:`|q|=Q` have unique directions. The program reads in the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index f152f32762..0cd8b0c49c 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -73,8 +73,8 @@ class QBallMX(StdOutCommandLine): Generates a reconstruction matrix for Q-Ball. Used in LinRecon with the same scheme file to reconstruct data. - Example 1 - --------- + Examples + -------- To create a linear transform matrix using Spherical Harmonics (sh). >>> import nipype.interfaces.camino as cam @@ -84,8 +84,6 @@ class QBallMX(StdOutCommandLine): >>> qballmx.inputs.order = 6 >>> qballmx.run() # doctest: +SKIP - Example 2 - --------- To create a linear transform matrix using Radial Basis Functions (rbf). This command uses the default setting of rbf sigma = 0.2618 (15 degrees), data smoothing sigma = 0.1309 (7.5 degrees), rbf @@ -106,6 +104,7 @@ class QBallMX(StdOutCommandLine): >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.inputs.bgmask = 'brain_mask.nii' >>> qballcoeffs.run() # doctest: +SKIP + """ _cmd = "qballmx" @@ -187,7 +186,7 @@ class LinRecon(StdOutCommandLine): are stored row by row. Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -206,6 +205,7 @@ class LinRecon(StdOutCommandLine): >>> qballcoeffs.inputs.qball_mat = 'A_qmat.Bdouble' >>> qballcoeffs.inputs.normalize = True >>> qballcoeffs.run() # doctest: +SKIP + """ _cmd = "linrecon" @@ -236,14 +236,19 @@ class MESDInputSpec(StdOutCommandLineInputSpec): argstr="-filter %s", position=2, mandatory=True, - desc=( - "The inversion index specifies the type of inversion to perform on the data." - "The currently available choices are:" - "Inverter name | Inverter parameters" - "---------------|------------------" - "SPIKE | bd (b-value x diffusivity along the fibre.)" - "PAS | r" - ), + desc=""" +The inversion index specifies the type of inversion to perform on the data. +The currently available choices are: + + +----------------+---------------------------------------------+ + | Inverter name | Inverter parameters | + +================+=============================================+ + | SPIKE | bd (b-value x diffusivity along the fibre.) | + +----------------+---------------------------------------------+ + | PAS | r | + +----------------+---------------------------------------------+ + +""", ) inverter_param = traits.Float( argstr="%f", @@ -365,7 +370,7 @@ class MESD(StdOutCommandLine): Sweet and Alexander "Reduced Encoding Persistent Angular Structure" 572 ISMRM 2010. Example - --------- + ------- Run MESD on every voxel of the data file SubjectA.Bfloat using the PASMRI kernel. >>> import nipype.interfaces.camino as cam @@ -375,6 +380,7 @@ class MESD(StdOutCommandLine): >>> mesd.inputs.inverter = 'PAS' >>> mesd.inputs.inverter_param = 1.4 >>> mesd.run() # doctest: +SKIP + """ _cmd = "mesd" @@ -565,7 +571,7 @@ class SFPeaks(StdOutCommandLine): Example - --------- + ------- First run QBallMX and create a linear transform matrix using Spherical Harmonics (sh). @@ -577,6 +583,7 @@ class SFPeaks(StdOutCommandLine): >>> sf_peaks.inputs.density = 100 >>> sf_peaks.inputs.searchradius = 1.0 >>> sf_peaks.run() # doctest: +SKIP + """ _cmd = "sfpeaks" diff --git a/nipype/interfaces/camino2trackvis/__init__.py b/nipype/interfaces/camino2trackvis/__init__.py index 94d3e458a7..b132a20f0c 100644 --- a/nipype/interfaces/camino2trackvis/__init__.py +++ b/nipype/interfaces/camino2trackvis/__init__.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Camino2Trackvis top level namespace -""" - +"""Camino-Trackvis allows interoperability between Camino and TrackVis.""" from .convert import Camino2Trackvis, Trackvis2Camino diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index f4e7e7dfd1..63d7a385da 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -""" -Provides interfaces to various commands provided by Camino-Trackvis -""" +"""Provides interfaces to various commands provided by Camino-Trackvis.""" import os diff --git a/nipype/interfaces/cmtk/__init__.py b/nipype/interfaces/cmtk/__init__.py index 60c7d636d5..fc45bc986e 100644 --- a/nipype/interfaces/cmtk/__init__.py +++ b/nipype/interfaces/cmtk/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""CMP implements a full processing pipeline for creating connectomes with dMRI data.""" from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks from .parcellation import Parcellate diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 237b092f35..9c2e5815e0 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -133,18 +133,23 @@ def create_allpoints_cmat(streamlines, roiData, voxelSize, n_rois): def create_endpoints_array(fib, voxelSize): - """ Create the endpoints arrays for each fiber + """ Create the endpoints arrays for each fiber. + Parameters ---------- - fib: the fibers data - voxelSize: 3-tuple containing the voxel size of the ROI image + fib : + the fibers data + voxelSize: + 3-tuple containing the voxel size of the ROI image + Returns ------- - (endpoints: matrix of size [#fibers, 2, 3] containing for each fiber the - index of its first and last point in the voxelSize volume - endpointsmm) : endpoints in milimeter coordinates - """ + endpoints : matrix of size [#fibers, 2, 3] + containing for each fiber the ndex of its first and last point in the voxelSize volume + endpointsmm : matrix of size [#fibers, 2, 3] + endpoints in milimeter coordinates + """ # Init n = len(fib) endpoints = np.zeros((n, 2, 3)) diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index e224daa082..4e1db9ffb7 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -97,18 +97,19 @@ class NetworkBasedStatistic(LibraryBaseInterface): """ Calculates and outputs the average network given a set of input NetworkX gpickle files + See Also + -------- For documentation of Network-based statistic parameters: - - https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py + https://github.com/LTS5/connectomeviewer/blob/master/cviewer/libs/pyconto/groupstatistics/nbs/_nbs.py Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nbs = cmtk.NetworkBasedStatistic() >>> nbs.inputs.in_group1 = ['subj1.pck', 'subj2.pck'] # doctest: +SKIP >>> nbs.inputs.in_group2 = ['pat1.pck', 'pat2.pck'] # doctest: +SKIP >>> nbs.run() # doctest: +SKIP + """ input_spec = NetworkBasedStatisticInputSpec diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index c34d372a7e..3886fe8844 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -447,11 +447,11 @@ class NetworkXMetrics(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> nxmetrics = cmtk.NetworkXMetrics() >>> nxmetrics.inputs.in_file = 'subj1.pck' >>> nxmetrics.run() # doctest: +SKIP + """ input_spec = NetworkXMetricsInputSpec @@ -636,7 +636,6 @@ class AverageNetworks(BaseInterface): Example ------- - >>> import nipype.interfaces.cmtk as cmtk >>> avg = cmtk.AverageNetworks() >>> avg.inputs.in_files = ['subj1.pck', 'subj2.pck'] diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index c7397b2133..4868ff3df4 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -661,17 +661,25 @@ def crop_and_move_datasets( def extract(Z, shape, position, fill): - """ Extract voxel neighbourhood -Parameters ----------- -Z: the original data -shape: tuple containing neighbourhood dimensions -position: tuple containing central point indexes -fill: value for the padding of Z -Returns -------- -R: the neighbourhood of the specified point in Z -""" + """Extract voxel neighbourhood + + Parameters + ---------- + Z : + the original data + shape : + tuple containing neighbourhood dimensions + position : + tuple containing central point indexes + fill : + value for the padding of Z + + Returns + ------- + R : + the neighbourhood of the specified point in Z + + """ R = ( np.ones(shape, dtype=Z.dtype) * fill ) # initialize output block to the fill value diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 87d9b7b3df..c88f11ba6a 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -"""The dcm2nii module provides basic functions for dicom conversion -""" +"""dcm2nii converts images from the proprietary scanner DICOM format to NIfTI.""" import os import re from copy import deepcopy diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index d7223468c8..f23461814c 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -"""Provides interfaces to various commands provided by dcmstack -""" +"""dcmstack allows series of DICOM images to be stacked into multi-dimensional arrays.""" import os from os import path as op diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index cef13227c4..c3927800a3 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""Diffusion Toolkit performs data reconstruction and fiber tracking on diffusion MR images.""" from .base import Info from .postproc import SplineFilter, TrackMerge from .dti import DTIRecon, DTITracker diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index 765ef6d8b9..bc30656b00 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -46,25 +46,28 @@ class DTIReconInputSpec(CommandLineInputSpec): traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +Specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. If 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +This information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when.""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +When oblique angle(s) applied, some SIEMENS DTI protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) b0_threshold = traits.Float( - desc="""program will use b0 image with the given threshold to mask out high - background of fa/adc maps. by default it will calculate threshold - automatically. but if it failed, you need to set it manually.""", + desc="""\ +Program will use b0 image with the given threshold to mask out high +background of fa/adc maps. by default it will calculate threshold +automatically. but if it failed, you need to set it manually.""", argstr="-b0_th", ) @@ -167,12 +170,16 @@ class DTITrackerInputSpec(CommandLineInputSpec): "analyze", "ni1", "nii.gz", - desc="""input and output file type. accepted values are: - analyze -> analyze format 7.5 - ni1 -> nifti format saved in seperate .hdr and .img file - nii -> nifti format with one .nii file - nii.gz -> nifti format with compression - default type is 'nii'""", + desc="""\ +Input and output file type. Accepted values are: + +* analyze -> analyze format 7.5 +* ni1 -> nifti format saved in seperate .hdr and .img file +* nii -> nifti format with one .nii file +* nii.gz -> nifti format with compression + +Default type is 'nii' +""", argstr="-it %s", ) tracking_method = traits.Enum( @@ -180,29 +187,35 @@ class DTITrackerInputSpec(CommandLineInputSpec): "rk2", "tl", "sl", - desc="""fact -> use FACT method for tracking. this is the default method. - rk2 -> use 2nd order runge-kutta method for tracking. - tl -> use tensorline method for tracking. - sl -> use interpolated streamline method with fixed step-length""", + desc="""\ +Tracking algorithm. + +* fact -> use FACT method for tracking. this is the default method. +* rk2 -> use 2nd order runge-kutta method for tracking. +* tl -> use tensorline method for tracking. +* sl -> use interpolated streamline method with fixed step-length + +""", argstr="-%s", ) step_length = traits.Float( - desc="""set step length, in the unit of minimum voxel size. - default value is 0.5 for interpolated streamline method - and 0.1 for other methods""", + desc="""\ +Step length, in the unit of minimum voxel size. +default value is 0.5 for interpolated streamline method +and 0.1 for other methods""", argstr="-l %f", ) angle_threshold = traits.Float( desc="set angle threshold. default value is 35 degree", argstr="-at %f" ) angle_threshold_weight = traits.Float( - desc="set angle threshold weighting factor. weighting will be be applied \ - on top of the angle_threshold", + desc="set angle threshold weighting factor. weighting will be be applied " + "on top of the angle_threshold", argstr="-atw %f", ) random_seed = traits.Int( - desc="use random location in a voxel instead of the center of the voxel \ - to seed. can also define number of seed per voxel. default is 1", + desc="use random location in a voxel instead of the center of the voxel " + "to seed. can also define number of seed per voxel. default is 1", argstr="-rseed %d", ) invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") @@ -215,14 +228,14 @@ class DTITrackerInputSpec(CommandLineInputSpec): desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( - desc="threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc="threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", position=5, ) input_data_prefix = traits.Str( diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 1935015d0c..8d8c5bf9bd 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -33,50 +33,54 @@ class HARDIMatInputSpec(CommandLineInputSpec): ) order = traits.Int( argstr="-order %s", - desc="""maximum order of spherical harmonics. must be even number. default - is 4""", + desc="""maximum order of spherical harmonics. must be even number. default is 4""", ) odf_file = File( exists=True, argstr="-odf %s", - desc="""filename that contains the reconstruction points on a HEMI-sphere. - use the pre-set 181 points by default""", + desc="""\ +Filename that contains the reconstruction points on a HEMI-sphere. +Use the pre-set 181 points by default""", ) reference_file = File( exists=True, argstr="-ref %s", - desc="""provide a dicom or nifti image as the reference for the program to - figure out the image orientation information. if no such info was - found in the given image header, the next 5 options -info, etc., - will be used if provided. if image orientation info can be found - in the given reference, all other 5 image orientation options will - be IGNORED""", + desc="""\ +Provide a dicom or nifti image as the reference for the program to +figure out the image orientation information. if no such info was +found in the given image header, the next 5 options -info, etc., +will be used if provided. if image orientation info can be found +in the given reference, all other 5 image orientation options will +be IGNORED""", ) image_info = File( exists=True, argstr="-info %s", - desc="""specify image information file. the image info file is generated - from original dicom image by diff_unpack program and contains image - orientation and other information needed for reconstruction and - tracking. by default will look into the image folder for .info file""", + desc="""\ +specify image information file. the image info file is generated +from original dicom image by diff_unpack program and contains image +orientation and other information needed for reconstruction and +tracking. by default will look into the image folder for .info file""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) @@ -143,10 +147,11 @@ class ODFReconInputSpec(CommandLineInputSpec): ) n_b0 = traits.Int( argstr="-b0 %s", - desc="""number of b0 scans. by default the program gets this information - from the number of directions and number of volumes in - the raw data. useful when dealing with incomplete raw - data set or only using part of raw data set to reconstruct""", + desc="""\ +number of b0 scans. by default the program gets this information +from the number of directions and number of volumes in +the raw data. useful when dealing with incomplete raw +data set or only using part of raw data set to reconstruct""", mandatory=True, ) output_type = traits.Enum( @@ -159,9 +164,10 @@ class ODFReconInputSpec(CommandLineInputSpec): usedefault=True, ) sharpness = traits.Float( - desc="""smooth or sharpen the raw data. factor > 0 is smoothing. - factor < 0 is sharpening. default value is 0 - NOTE: this option applies to DSI study only""", + desc="""\ +smooth or sharpen the raw data. factor > 0 is smoothing. +factor < 0 is sharpening. default value is 0 +NOTE: this option applies to DSI study only""", argstr="-s %f", ) filter = traits.Bool( @@ -176,19 +182,21 @@ class ODFReconInputSpec(CommandLineInputSpec): traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) oblique_correction = traits.Bool( - desc="""when oblique angle(s) applied, some SIEMENS dti protocols do not - adjust gradient accordingly, thus it requires adjustment for correct - diffusion tensor calculation""", + desc="""\ +when oblique angle(s) applied, some SIEMENS dti protocols do not +adjust gradient accordingly, thus it requires adjustment for correct +diffusion tensor calculation""", argstr="-oc", ) @@ -255,23 +263,27 @@ class ODFTrackerInputSpec(CommandLineInputSpec): ) runge_kutta2 = traits.Bool( argstr="-rk2", - desc="""use 2nd order runge-kutta method for tracking. - default tracking method is non-interpolate streamline""", + desc="""\ +use 2nd order runge-kutta method for tracking. +default tracking method is non-interpolate streamline""", ) step_length = traits.Float( argstr="-l %f", - desc="""set step length, in the unit of minimum voxel size. - default value is 0.1.""", + desc="""\ +set step length, in the unit of minimum voxel size. +default value is 0.1.""", ) angle_threshold = traits.Float( argstr="-at %f", - desc="""set angle threshold. default value is 35 degree for - default tracking method and 25 for rk2""", + desc="""\ +set angle threshold. default value is 35 degree for +default tracking method and 25 for rk2""", ) random_seed = traits.Int( argstr="-rseed %s", - desc="""use random location in a voxel instead of the center of the voxel - to seed. can also define number of seed per voxel. default is 1""", + desc="""\ +use random location in a voxel instead of the center of the voxel +to seed. can also define number of seed per voxel. default is 1""", ) invert_x = traits.Bool(argstr="-ix", desc="invert x component of the vector") invert_y = traits.Bool(argstr="-iy", desc="invert y component of the vector") @@ -284,39 +296,42 @@ class ODFTrackerInputSpec(CommandLineInputSpec): desc="first mask image", mandatory=True, argstr="-m %s", position=2 ) mask1_threshold = traits.Float( - desc="threshold value for the first mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the first mask image, if not given, the program will " + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( - desc="threshold value for the second mask image, if not given, the program will \ - try automatically find the threshold", + desc="threshold value for the second mask image, if not given, the program will " + "try automatically find the threshold", position=5, ) limit = traits.Int( argstr="-limit %d", - desc="""in some special case, such as heart data, some track may go into - infinite circle and take long time to stop. this option allows - setting a limit for the longest tracking steps (voxels)""", + desc="""\ +in some special case, such as heart data, some track may go into +infinite circle and take long time to stop. this option allows +setting a limit for the longest tracking steps (voxels)""", ) dsi = traits.Bool( argstr="-dsi", - desc=""" specify the input odf data is dsi. because dsi recon uses fixed - pre-calculated matrix, some special orientation patch needs to - be applied to keep dti/dsi/q-ball consistent.""", + desc="""\ +specify the input odf data is dsi. because dsi recon uses fixed +pre-calculated matrix, some special orientation patch needs to +be applied to keep dti/dsi/q-ball consistent.""", ) image_orientation_vectors = traits.List( traits.Float(), minlen=6, maxlen=6, - desc="""specify image orientation vectors. if just one argument given, - will treat it as filename and read the orientation vectors from - the file. if 6 arguments are given, will treat them as 6 float - numbers and construct the 1st and 2nd vector and calculate the 3rd - one automatically. - this information will be used to determine image orientation, - as well as to adjust gradient vectors with oblique angle when""", + desc="""\ +specify image orientation vectors. if just one argument given, +will treat it as filename and read the orientation vectors from +the file. if 6 arguments are given, will treat them as 6 float +numbers and construct the 1st and 2nd vector and calculate the 3rd +one automatically. +this information will be used to determine image orientation, +as well as to adjust gradient vectors with oblique angle when""", argstr="-iop %f", ) slice_order = traits.Int( @@ -333,17 +348,18 @@ class ODFTrackerInputSpec(CommandLineInputSpec): "LPS", "LPI", argstr="-vorder %s", - desc="""specify the voxel order in RL/AP/IS (human brain) reference. must be - 3 letters with no space in between. - for example, RAS means the voxel row is from L->R, the column - is from P->A and the slice order is from I->S. - by default voxel order is determined by the image orientation - (but NOT guaranteed to be correct because of various standards). - for example, siemens axial image is LPS, coronal image is LIP and - sagittal image is PIL. - this information also is NOT needed for tracking but will be saved - in the track file and is essential for track display to map onto - the right coordinates""", + desc="""\ +specify the voxel order in RL/AP/IS (human brain) reference. must be +3 letters with no space in between. +for example, RAS means the voxel row is from L->R, the column +is from P->A and the slice order is from I->S. +by default voxel order is determined by the image orientation +(but NOT guaranteed to be correct because of various standards). +for example, siemens axial image is LPS, coronal image is LIP and +sagittal image is PIL. +this information also is NOT needed for tracking but will be saved +in the track file and is essential for track display to map onto +the right coordinates""", ) diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index 1bd5dcb217..ec840871ba 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""DIPY is a computational neuroimaging tool for diffusion MRI.""" from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI from .preprocess import Resample, Denoise diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index 6f77d9a47e..f2b197f372 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -94,23 +94,25 @@ class TensorMode(DipyDiffusionInterface): """ Creates a map of the mode of the diffusion tensors given a set of diffusion-weighted images, as well as their associated b-values and - b-vectors. Fits the diffusion tensors and calculates tensor mode + b-vectors [1]_. Fits the diffusion tensors and calculates tensor mode with Dipy. - .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor - Invariants and the Analysis of Diffusion Tensor Magnetic Resonance - Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, - 2006. - Example ------- - >>> import nipype.interfaces.dipy as dipy >>> mode = dipy.TensorMode() >>> mode.inputs.in_file = 'diffusion.nii' >>> mode.inputs.in_bvec = 'bvecs' >>> mode.inputs.in_bval = 'bvals' >>> mode.run() # doctest: +SKIP + + References + ---------- + .. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor + Invariants and the Analysis of Diffusion Tensor Magnetic Resonance + Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146, + 2006. + """ input_spec = TensorModeInputSpec diff --git a/nipype/interfaces/dtitk/__init__.py b/nipype/interfaces/dtitk/__init__.py index 6c9569114c..d1420c3afb 100644 --- a/nipype/interfaces/dtitk/__init__.py +++ b/nipype/interfaces/dtitk/__init__.py @@ -1,11 +1,10 @@ -"""The dtitk module provides classes for interfacing with the `Diffusion -Tensor Imaging Toolkit (DTI-TK) +""" +DTI-TK is a spatial normalization and atlas construction toolkit for DTI. + +Interfaces for the `Diffusion Tensor Imaging Toolkit (DTI-TK) `_ command line tools. -Top-level namespace for dti-tk. """ - -# from .base import () from .registration import ( Rigid, Affine, diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py index e959fd8f0c..c5850450a6 100644 --- a/nipype/interfaces/dtitk/utils.py +++ b/nipype/interfaces/dtitk/utils.py @@ -66,11 +66,10 @@ class TVAdjustVoxSpOutputSpec(TraitedSpec): class TVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a tensor volume + Adjusts the voxel space of a tensor volume. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.TVAdjustVoxSp() >>> node.inputs.in_file = 'im1.nii' @@ -78,6 +77,7 @@ class TVAdjustVoxSp(CommandLineDtitk): >>> node.cmdline 'TVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' >>> node.run() # doctest: +SKIP + """ input_spec = TVAdjustVoxSpInputSpec @@ -119,19 +119,19 @@ class SVAdjustVoxSpOutputSpec(TraitedSpec): class SVAdjustVoxSp(CommandLineDtitk): """ - Adjusts the voxel space of a scalar volume + Adjusts the voxel space of a scalar volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVAdjustVoxSp() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVAdjustVoxSp() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVAdjustVoxelspace -in im1.nii -out im1_avs.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = SVAdjustVoxSpInputSpec output_spec = SVAdjustVoxSpOutputSpec @@ -189,19 +189,19 @@ class TVResampleOutputSpec(TraitedSpec): class TVResample(CommandLineDtitk): """ - Resamples a tensor volume + Resamples a tensor volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'TVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = TVResampleInputSpec output_spec = TVResampleOutputSpec @@ -256,19 +256,19 @@ class SVResampleOutputSpec(TraitedSpec): class SVResample(CommandLineDtitk): """ - Resamples a scalar volume + Resamples a scalar volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.SVResample() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.target_file = 'im2.nii' + >>> node.cmdline + 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.SVResample() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.target_file = 'im2.nii' - >>> node.cmdline - 'SVResample -in im1.nii -out im1_resampled.nii -target im2.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = SVResampleInputSpec output_spec = SVResampleOutputSpec @@ -290,19 +290,19 @@ class TVtoolOutputSpec(TraitedSpec): class TVtool(CommandLineDtitk): """ - Calculates a tensor metric volume from a tensor volume + Calculates a tensor metric volume from a tensor volume. - Example - ------- + Example + ------- + >>> from nipype.interfaces import dtitk + >>> node = dtitk.TVtool() + >>> node.inputs.in_file = 'im1.nii' + >>> node.inputs.in_flag = 'fa' + >>> node.cmdline + 'TVtool -in im1.nii -fa -out im1_fa.nii' + >>> node.run() # doctest: +SKIP - >>> from nipype.interfaces import dtitk - >>> node = dtitk.TVtool() - >>> node.inputs.in_file = 'im1.nii' - >>> node.inputs.in_flag = 'fa' - >>> node.cmdline - 'TVtool -in im1.nii -fa -out im1_fa.nii' - >>> node.run() # doctest: +SKIP - """ + """ input_spec = TVtoolInputSpec output_spec = TVtoolOutputSpec @@ -383,11 +383,10 @@ class BinThreshOutputSpec(TraitedSpec): class BinThresh(CommandLineDtitk): """ - Binarizes an image + Binarizes an image. Example ------- - >>> from nipype.interfaces import dtitk >>> node = dtitk.BinThresh() >>> node.inputs.in_file = 'im1.nii' @@ -398,6 +397,7 @@ class BinThresh(CommandLineDtitk): >>> node.cmdline 'BinaryThresholdImageFilter im1.nii im1_thrbin.nii 0 100 1 0' >>> node.run() # doctest: +SKIP + """ input_spec = BinThreshInputSpec diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 8404aad802..6dc6a7e154 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Experimental Slicer wrapper - Work in progress.""" import os import warnings import xml.dom.minidom @@ -22,9 +22,7 @@ class SlicerCommandLineInputSpec(DynamicTraitedSpec, CommandLineInputSpec): class SlicerCommandLine(CommandLine): - """Experimental Slicer wrapper. Work in progress. - - """ + """Experimental Slicer wrapper. Work in progress.""" _cmd = "Slicer3" input_spec = SlicerCommandLineInputSpec diff --git a/nipype/interfaces/elastix/__init__.py b/nipype/interfaces/elastix/__init__.py index e7ddc2a9f7..8f60ed8ff1 100644 --- a/nipype/interfaces/elastix/__init__.py +++ b/nipype/interfaces/elastix/__init__.py @@ -2,8 +2,6 @@ # coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for elastix.""" - - +"""elastix is a toolbox for rigid and nonrigid registration of images.""" from .registration import Registration, ApplyWarp, AnalyzeWarp, PointsWarp from .utils import EditTransform diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 803ac571cb..705cf895e4 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for freesurfer.""" +"""FreeSurfer is an open source software suite for processing and analyzing brain MRI images.""" from .base import Info, FSCommand, no_freesurfer from .preprocess import ( diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 8bf7918ae4..5209d731c9 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -116,7 +116,6 @@ class MRISPreproc(FSCommand): Examples -------- - >>> preproc = MRISPreproc() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -176,8 +175,8 @@ class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): ) copy_inputs = traits.Bool( desc="If running as a node, set this to True " - + "this will copy some implicit inputs to the " - + "node directory." + "this will copy some implicit inputs to the " + "node directory." ) @@ -185,7 +184,7 @@ class MRISPreprocReconAll(MRISPreproc): """Extends MRISPreproc to allow it to be used in a recon-all workflow Examples - ======== + -------- >>> preproc = MRISPreprocReconAll() >>> preproc.inputs.target = 'fsaverage' >>> preproc.inputs.hemi = 'lh' @@ -194,6 +193,7 @@ class MRISPreprocReconAll(MRISPreproc): >>> preproc.inputs.out_file = 'concatenated_file.mgz' >>> preproc.cmdline 'mris_preproc --hemi lh --out concatenated_file.mgz --s subject_id --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat' + """ input_spec = MRISPreprocReconAllInputSpec @@ -451,7 +451,6 @@ class GLMFit(FSCommand): Examples -------- - >>> glmfit = GLMFit() >>> glmfit.inputs.in_file = 'functional.nii' >>> glmfit.inputs.one_sample = True @@ -614,7 +613,6 @@ class Binarize(FSCommand): Examples -------- - >>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii') >>> binvol.cmdline 'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000' @@ -755,7 +753,6 @@ class Concatenate(FSCommand): Examples -------- - Combine two input volumes into one volume with two frames >>> concat = Concatenate() @@ -974,7 +971,6 @@ class SegStats(FSCommand): Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> ss = fs.SegStats() >>> ss.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -1077,8 +1073,8 @@ class SegStatsReconAllInputSpec(SegStatsInputSpec): aseg = File(exists=True, desc="Mandatory implicit input in 5.3") copy_inputs = traits.Bool( desc="If running as a node, set this to True " - + "otherwise, this will copy the implicit inputs " - + "to the node directory." + "otherwise, this will copy the implicit inputs " + "to the node directory." ) @@ -1089,7 +1085,7 @@ class SegStatsReconAll(SegStats): To ensure backwards compatability of SegStats, this class was created. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import SegStatsReconAll >>> segstatsreconall = SegStatsReconAll() >>> segstatsreconall.inputs.annot = ('PWS04', 'lh', 'aparc') @@ -1118,6 +1114,7 @@ class SegStatsReconAll(SegStats): >>> segstatsreconall.inputs.exclude_id = 0 >>> segstatsreconall.cmdline 'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --brain-vol-from-seg --surf-ctx-vol --empty --etiv --euler --excl-ctxgmwm --excludeid 0 --subcortgray --subject 10335 --supratent --totalgray --surf-wm-vol --sum ./summary.stats' + """ input_spec = SegStatsReconAllInputSpec @@ -1248,7 +1245,6 @@ class Label2Vol(FSCommand): Examples -------- - >>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii') >>> binvol.cmdline 'mri_label2vol --fillthresh 0.5 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii' @@ -1340,7 +1336,6 @@ class MS_LDA(FSCommand): Examples -------- - >>> grey_label = 2 >>> white_label = 3 >>> zero_value = 1 @@ -1351,6 +1346,7 @@ class MS_LDA(FSCommand): images=['FLASH1.mgz', 'FLASH2.mgz', 'FLASH3.mgz']) >>> optimalWeights.cmdline 'mri_ms_LDA -conform -label label.mgz -lda 2 3 -shift 1 -W -synth synth_out.mgz -weight weights.txt FLASH1.mgz FLASH2.mgz FLASH3.mgz' + """ _cmd = "mri_ms_LDA" @@ -1625,12 +1621,11 @@ class SphericalAverageInputSpec(FSTraitedSpec): argstr="%s", mandatory=True, position=-5, - desc="""Filename from the average subject directory. - Example: to use rh.entorhinal.label as the input label - filename, set fname to 'rh.entorhinal' and which to - 'label'. The program will then search for - '{in_average}/label/rh.entorhinal.label' - """, + desc="""\ +Filename from the average subject directory. +Example: to use rh.entorhinal.label as the input label filename, set fname to 'rh.entorhinal' +and which to 'label'. The program will then search for +``/label/rh.entorhinal.label``""", ) which = traits.Enum( "coords", @@ -1673,6 +1668,7 @@ class SphericalAverage(FSCommand): >>> sphericalavg.inputs.threshold = 5 >>> sphericalavg.cmdline 'mris_spherical_average -erode 2 -o 10335 -t 5.0 label lh.entorhinal lh pial . test.out' + """ _cmd = "mris_spherical_average" diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index f7e09f7629..39a444495c 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -2491,7 +2491,7 @@ class MNIBiasCorrection(FSCommand): >>> correct.cmdline 'mri_nu_correct.mni --distance 50 --i norm.mgz --n 6 --o norm_output.mgz --proto-iters 1000' - References: + References ---------- [http://freesurfer.net/fswiki/mri_nu_correct.mni] [http://www.bic.mni.mcgill.ca/software/N3] @@ -2677,11 +2677,13 @@ class CANormalize(FSCommand): """This program creates a normalized volume using the brain volume and an input gca file. - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__. Examples - ======== - + -------- >>> from nipype.interfaces import freesurfer >>> ca_normalize = freesurfer.CANormalize() >>> ca_normalize.inputs.in_file = "T1.mgz" @@ -2689,6 +2691,7 @@ class CANormalize(FSCommand): >>> ca_normalize.inputs.transform = "trans.mat" # in practice use .lta transforms >>> ca_normalize.cmdline 'mri_ca_normalize T1.mgz atlas.nii.gz trans.mat T1_norm.mgz' + """ _cmd = "mri_ca_normalize" @@ -2752,16 +2755,20 @@ class CARegisterOutputSpec(TraitedSpec): class CARegister(FSCommandOpenMP): """Generates a multi-dimensional talairach transform from a gca file and talairach.lta file - For complete details, see the `FS Documentation `_ + See Also + -------- + For complete details, see the `FS Documentation + `__ Examples - ======== + -------- >>> from nipype.interfaces import freesurfer >>> ca_register = freesurfer.CARegister() >>> ca_register.inputs.in_file = "norm.mgz" >>> ca_register.inputs.out_file = "talairach.m3z" >>> ca_register.cmdline 'mri_ca_register norm.mgz talairach.m3z' + """ _cmd = "mri_ca_register" @@ -2851,12 +2858,15 @@ class CALabelOutputSpec(TraitedSpec): class CALabel(FSCommandOpenMP): - """ - For complete details, see the `FS Documentation `_ + """Label subcortical structures based in GCA model. - Examples - ======== + See Also + -------- + For complete details, see the `FS Documentation + `__ + Examples + -------- >>> from nipype.interfaces import freesurfer >>> ca_label = freesurfer.CALabel() >>> ca_label.inputs.in_file = "norm.mgz" @@ -2865,6 +2875,7 @@ class CALabel(FSCommandOpenMP): >>> ca_label.inputs.template = "Template_6.nii" # in practice use .gcs extension >>> ca_label.cmdline 'mri_ca_label norm.mgz trans.mat Template_6.nii out.mgz' + """ _cmd = "mri_ca_label" diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index c93f813088..d6fd82cc15 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -48,8 +48,7 @@ class MPRtoMNI305(FSScriptCommand): For complete details, see FreeSurfer documentation Examples - ======== - + -------- >>> from nipype.interfaces.freesurfer import MPRtoMNI305, Info >>> mprtomni305 = MPRtoMNI305() >>> mprtomni305.inputs.target = 'structural.nii' diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 02a3831bc6..a5bc6f36b5 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -446,7 +446,7 @@ class SurfaceSmoothInputSpec(FSTraitedSpec): True, argstr="--cortex", usedefault=True, - desc="only smooth within $hemi.cortex.label", + desc="only smooth within ``$hemi.cortex.label``", ) reshape = traits.Bool( argstr="--reshape", desc="reshape surface vector to fit in non-mgh format" @@ -468,14 +468,13 @@ class SurfaceSmooth(FSCommand): smoothing process. If the latter, the underlying program will calculate the correct number of iterations internally. - .. seealso:: - - SmoothTessellation() Interface - For smoothing a tessellated surface (e.g. in gifti or .stl) + See Also + -------- + `nipype.interfaces.freesurfer.utils.SmoothTessellation`_ interface for + smoothing a tessellated surface (e.g. in gifti or .stl) Examples -------- - >>> import nipype.interfaces.freesurfer as fs >>> smoother = fs.SurfaceSmooth() >>> smoother.inputs.in_file = "lh.cope1.mgz" @@ -1569,16 +1568,12 @@ class MRIPretess(FSCommand): """ Uses Freesurfer's mri_pretess to prepare volumes to be tessellated. - Description - ----------- - Changes white matter (WM) segmentation so that the neighbors of all voxels labeled as WM have a face in common - no edges or corners allowed. Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> pretess = fs.MRIPretess() >>> pretess.inputs.in_filled = 'wm.mgz' @@ -1675,10 +1670,6 @@ def _gen_outfilename(self): class SmoothTessellationInputSpec(FSTraitedSpec): - """ - This program smooths the tessellation of a surface using 'mris_smooth' - """ - in_file = File( exists=True, mandatory=True, @@ -1694,17 +1685,17 @@ class SmoothTessellationInputSpec(FSTraitedSpec): argstr="-n %d", desc="Number of smoothing iterations (default=10)" ) snapshot_writing_iterations = traits.Int( - argstr="-w %d", desc='Write snapshot every "n" iterations' + argstr="-w %d", desc='Write snapshot every *n* iterations' ) use_gaussian_curvature_smoothing = traits.Bool( argstr="-g", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_norm_steps = traits.Int( - argstr="%d ", desc="Use Gaussian curvature smoothing" + argstr="%d", desc="Use Gaussian curvature smoothing" ) gaussian_curvature_smoothing_steps = traits.Int( - argstr="%d", desc="Use Gaussian curvature smoothing" + argstr=" %d", desc="Use Gaussian curvature smoothing" ) disable_estimates = traits.Bool( @@ -1722,10 +1713,10 @@ class SmoothTessellationInputSpec(FSTraitedSpec): desc="output filename or True to generate one", ) out_curvature_file = File( - argstr="-c %s", desc='Write curvature to ?h.curvname (default "curv")' + argstr="-c %s", desc='Write curvature to ``?h.curvname`` (default "curv")' ) out_area_file = File( - argstr="-b %s", desc='Write area to ?h.areaname (default "area")' + argstr="-b %s", desc='Write area to ``?h.areaname`` (default "area")' ) seed = traits.Int( argstr="-seed %d", desc="Seed for setting random number generator" @@ -1737,25 +1728,25 @@ class SmoothTessellationOutputSpec(TraitedSpec): This program smooths the tessellation of a surface using 'mris_smooth' """ - surface = File(exists=True, desc="Smoothed surface file ") + surface = File(exists=True, desc="Smoothed surface file.") class SmoothTessellation(FSCommand): """ - This program smooths the tessellation of a surface using 'mris_smooth' + Smooth a tessellated surface. - .. seealso:: - - SurfaceSmooth() Interface - For smoothing a scalar field along a surface manifold + See Also + -------- + `nipype.interfaces.freesurfer.utils.SurfaceSmooth`_ interface for smoothing a scalar field + along a surface manifold Example ------- - >>> import nipype.interfaces.freesurfer as fs >>> smooth = fs.SmoothTessellation() >>> smooth.inputs.in_file = 'lh.hippocampus.stl' >>> smooth.run() # doctest: +SKIP + """ _cmd = "mris_smooth" @@ -1951,11 +1942,10 @@ class Tkregister2(FSCommand): Examples -------- - Get transform matrix between orig (*tkRAS*) and native (*scannerRAS*) coordinates in Freesurfer. Implements the first step of mapping surfaces to native space in `this guide - `_. + `__. >>> from nipype.interfaces.freesurfer import Tkregister2 >>> tk2 = Tkregister2(reg_file='T1_to_native.dat') @@ -2050,11 +2040,16 @@ class AddXFormToHeaderOutputSpec(TraitedSpec): class AddXFormToHeader(FSCommand): - """ Just adds specified xform to the volume header + """ + Just adds specified xform to the volume header. - (!) WARNING: transform input **MUST** be an absolute path to a DataSink'ed transform or - the output will reference a transform in the workflow cache directory! + .. danger :: + Input transform **MUST** be an absolute path to a DataSink'ed transform or + the output will reference a transform in the workflow cache directory! + + Examples + -------- >>> from nipype.interfaces.freesurfer import AddXFormToHeader >>> adder = AddXFormToHeader() >>> adder.inputs.in_file = 'norm.mgz' @@ -2065,10 +2060,9 @@ class AddXFormToHeader(FSCommand): >>> adder.inputs.copy_name = True >>> adder.cmdline 'mri_add_xform_to_header -c trans.mat norm.mgz output.mgz' - >>> adder.run() # doctest: +SKIP - References: + References ---------- [https://surfer.nmr.mgh.harvard.edu/fswiki/mri_add_xform_to_header] @@ -3829,11 +3823,10 @@ class Aparc2AsegInputSpec(FSTraitedSpec): ctxseg = File(argstr="--ctxseg %s", exists=True, desc="") label_wm = traits.Bool( argstr="--labelwm", - desc=""" - For each voxel labeled as white matter in the aseg, re-assign - its label to be that of the closest cortical point if its - distance is less than dmaxctx - """, + desc="""\ +For each voxel labeled as white matter in the aseg, re-assign +its label to be that of the closest cortical point if its +distance is less than dmaxctx.""", ) hypo_wm = traits.Bool(argstr="--hypo-as-wm", desc="Label hypointensities as WM") rip_unknown = traits.Bool( @@ -3842,8 +3835,8 @@ class Aparc2AsegInputSpec(FSTraitedSpec): a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( desc="If running as a node, set this to True." - + "This will copy the input files to the node " - + "directory." + "This will copy the input files to the node " + "directory." ) @@ -3859,17 +3852,17 @@ class Aparc2Aseg(FSCommand): labeled as cortex (3 and 42) and assign it the label of the closest cortical vertex. If the voxel is not in the ribbon (as defined by mri/ lh.ribbon and rh.ribbon), then the voxel is marked as unknown (0). - This can be turned off with --noribbon. The cortical parcellation is + This can be turned off with ``--noribbon``. The cortical parcellation is obtained from subject/label/hemi.aparc.annot which should be based on the curvature.buckner40.filled.desikan_killiany.gcs atlas. The aseg is obtained from subject/mri/aseg.mgz and should be based on the RB40_talairach_2005-07-20.gca atlas. If these atlases are used, then the segmentations can be viewed with tkmedit and the - FreeSurferColorLUT.txt color table found in $FREESURFER_HOME. These - are the default atlases used by recon-all. + FreeSurferColorLUT.txt color table found in ``$FREESURFER_HOME``. These + are the default atlases used by ``recon-all``. Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Aparc2Aseg >>> aparc2aseg = Aparc2Aseg() >>> aparc2aseg.inputs.lh_white = 'lh.pial' @@ -3886,6 +3879,7 @@ class Aparc2Aseg(FSCommand): >>> aparc2aseg.inputs.rip_unknown = True >>> aparc2aseg.cmdline # doctest: +SKIP 'mri_aparc2aseg --labelwm --o aparc+aseg.mgz --rip-unknown --s subject_id' + """ _cmd = "mri_aparc2aseg" @@ -3947,13 +3941,14 @@ class Apas2Aseg(FSCommand): actual surface (this is not the case with aseg.mgz). Examples - ======== + -------- >>> from nipype.interfaces.freesurfer import Apas2Aseg >>> apas2aseg = Apas2Aseg() >>> apas2aseg.inputs.in_file = 'aseg.mgz' >>> apas2aseg.inputs.out_file = 'output.mgz' >>> apas2aseg.cmdline 'apas2aseg --i aseg.mgz --o output.mgz' + """ _cmd = "apas2aseg" @@ -3989,9 +3984,9 @@ class MRIsExpandInputSpec(FSTraitedSpec): position=-1, usedefault=True, desc=( - "Output surface file\n" - "If no path, uses directory of `in_file`\n" - 'If no path AND missing "lh." or "rh.", derive from `in_file`' + "Output surface file. " + "If no path, uses directory of ``in_file``. " + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) thickness = traits.Bool( @@ -4002,7 +3997,7 @@ class MRIsExpandInputSpec(FSTraitedSpec): copyfile=False, desc=( 'Name of thickness file (implicit: "thickness")\n' - "If no path, uses directory of `in_file`\n" + "If no path, uses directory of ``in_file``\n" 'If no path AND missing "lh." or "rh.", derive from `in_file`' ), ) @@ -4011,8 +4006,8 @@ class MRIsExpandInputSpec(FSTraitedSpec): copyfile=False, desc=( 'Name of pial file (implicit: "pial")\n' - "If no path, uses directory of `in_file`\n" - 'If no path AND missing "lh." or "rh.", derive from `in_file`' + "If no path, uses directory of ``in_file``\n" + 'If no path AND missing "lh." or "rh.", derive from ``in_file``' ), ) sphere = traits.Str( diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index dd7b3d76d7..1bf8e0ada7 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The fsl module provides classes for interfacing with the `FSL -`_ command line tools. +""" +FSL is a comprehensive library of analysis tools for fMRI, MRI and DTI brain imaging data. -Top-level namespace for fsl. +The fsl module provides classes for interfacing with the `FSL +`_ command line tools. """ from .base import FSLCommand, Info, check_fsl, no_fsl, no_fsl_course_data diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index c40a285989..4a3eb32034 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -2,7 +2,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This commandline module provides classes for interfacing with the -`ICA-AROMA.py`_ command line tool. +`ICA-AROMA.py `__ command line tool. """ from ..base import ( diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index eeab08371e..eef38795c7 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -904,7 +904,7 @@ class Eddy(FSLCommand): """ Interface for FSL eddy, a tool for estimating and correcting eddy currents induced distortions. `User guide - `_ and + `__ and `more info regarding acqp file `_. @@ -1648,7 +1648,7 @@ class EddyQuad(FSLCommand): """ Interface for FSL eddy_quad, a tool for generating single subject reports and storing the quality assessment indices for each subject. - `User guide `_ + `User guide `__ Examples -------- diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 071c834e14..32e0fc76f1 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -277,64 +277,64 @@ class DataSinkOutputSpec(TraitedSpec): # Custom DataSink class class DataSink(IOBase): - """ Generic datasink module to store structured outputs - - Primarily for use within a workflow. This interface allows arbitrary - creation of input attributes. The names of these attributes define the - directory structure to create for storage of the files or directories. - - The attributes take the following form: - - string[[.[@]]string[[.[@]]string]] ... + """ + Generic datasink module to store structured outputs. - where parts between [] are optional. + Primarily for use within a workflow. This interface allows arbitrary + creation of input attributes. The names of these attributes define the + directory structure to create for storage of the files or directories. - An attribute such as contrasts.@con will create a 'contrasts' directory - to store the results linked to the attribute. If the @ is left out, such - as in 'contrasts.con', a subdirectory 'con' will be created under - 'contrasts'. + The attributes take the following form:: - the general form of the output is:: + string[[.[@]]string[[.[@]]string]] ... - 'base_directory/container/parameterization/destloc/filename' + where parts between ``[]`` are optional. - destloc = string[[.[@]]string[[.[@]]string]] and - filename comesfrom the input to the connect statement. + An attribute such as contrasts.@con will create a 'contrasts' directory + to store the results linked to the attribute. If the ``@`` is left out, such + as in 'contrasts.con', a subdirectory 'con' will be created under + 'contrasts'. - .. warning:: + The general form of the output is:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + 'base_directory/container/parameterization/destloc/filename' - .. note:: + ``destloc = string[[.[@]]string[[.[@]]string]]`` and + ``filename`` come from the input to the connect statement. - If both substitutions and regexp_substitutions are used, then - substitutions are applied first followed by regexp_substitutions. + .. warning:: - This interface **cannot** be used in a MapNode as the inputs are - defined only when the connect statement is executed. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + .. note:: - >>> ds = DataSink() - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + If both substitutions and regexp_substitutions are used, then + substitutions are applied first followed by regexp_substitutions. - To use DataSink in a MapNode, its inputs have to be defined at the - time the interface is created. + This interface **cannot** be used in a MapNode as the inputs are + defined only when the connect statement is executed. - >>> ds = DataSink(infields=['contasts.@con']) - >>> ds.inputs.base_directory = 'results_dir' - >>> ds.inputs.container = 'subject' - >>> ds.inputs.structural = 'structural.nii' - >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) - >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) - >>> ds.run() # doctest: +SKIP + Examples + -------- + >>> ds = DataSink() + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP + + To use DataSink in a MapNode, its inputs have to be defined at the + time the interface is created. + + >>> ds = DataSink(infields=['contasts.@con']) + >>> ds.inputs.base_directory = 'results_dir' + >>> ds.inputs.container = 'subject' + >>> ds.inputs.structural = 'structural.nii' + >>> setattr(ds.inputs, 'contrasts.@con', ['cont1.nii', 'cont2.nii']) + >>> setattr(ds.inputs, 'contrasts.alt', ['cont1a.nii', 'cont2a.nii']) + >>> ds.run() # doctest: +SKIP """ @@ -822,7 +822,7 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): False, usedefault=True, desc="Use anonymous connection to s3. If this is set to True, boto may print" - + " a urlopen error, but this does not prevent data from being downloaded.", + " a urlopen error, but this does not prevent data from being downloaded.", ) region = Str("us-east-1", usedefault=True, desc="Region of s3 bucket") bucket = Str(mandatory=True, desc="Amazon S3 bucket where your data is stored") @@ -855,33 +855,36 @@ class S3DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class S3DataGrabber(LibraryBaseInterface, IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files from - Amazon S3 - - Works exactly like DataGrabber, except, you must specify an - S3 "bucket" and "bucket_path" to search for your data and a - "local_directory" to store the data. "local_directory" - should be a location on HDFS for Spark jobs. Additionally, - "template" uses regex style formatting, rather than the - glob-style found in the original DataGrabber. - - Examples - -------- - - >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) - >>> s3grab.inputs.bucket = 'openneuro' - >>> s3grab.inputs.sort_filelist = True - >>> s3grab.inputs.template = '*' - >>> s3grab.inputs.anon = True - >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' - >>> s3grab.inputs.local_directory = '/tmp' - >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', - ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} - >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], - ... 'func': [['subj_id', 'subj_id']]} - >>> s3grab.inputs.subj_id = 'sub-01' - >>> s3grab.run() # doctest: +SKIP + """ + Pull data from an Amazon S3 Bucket. + + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files from + Amazon S3 + + Works exactly like DataGrabber, except, you must specify an + S3 "bucket" and "bucket_path" to search for your data and a + "local_directory" to store the data. "local_directory" + should be a location on HDFS for Spark jobs. Additionally, + "template" uses regex style formatting, rather than the + glob-style found in the original DataGrabber. + + Examples + -------- + >>> s3grab = S3DataGrabber(infields=['subj_id'], outfields=["func", "anat"]) + >>> s3grab.inputs.bucket = 'openneuro' + >>> s3grab.inputs.sort_filelist = True + >>> s3grab.inputs.template = '*' + >>> s3grab.inputs.anon = True + >>> s3grab.inputs.bucket_path = 'ds000101/ds000101_R2.0.0/uncompressed/' + >>> s3grab.inputs.local_directory = '/tmp' + >>> s3grab.inputs.field_template = {'anat': '%s/anat/%s_T1w.nii.gz', + ... 'func': '%s/func/%s_task-simon_run-1_bold.nii.gz'} + >>> s3grab.inputs.template_args = {'anat': [['subj_id', 'subj_id']], + ... 'func': [['subj_id', 'subj_id']]} + >>> s3grab.inputs.subj_id = 'sub-01' + >>> s3grab.run() # doctest: +SKIP + """ input_spec = S3DataGrabberInputSpec @@ -1119,54 +1122,55 @@ class DataGrabberInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class DataGrabber(IOBase): - """ Generic datagrabber module that wraps around glob in an - intelligent way for neuroimaging tasks to grab files - + """ + Find files on a filesystem. - .. attention:: + Generic datagrabber module that wraps around glob in an + intelligent way for neuroimaging tasks to grab files - Doesn't support directories currently + .. important:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import DataGrabber + Examples + -------- + >>> from nipype.interfaces.io import DataGrabber - Pick all files from current directory + Pick all files from current directory - >>> dg = DataGrabber() - >>> dg.inputs.template = '*' + >>> dg = DataGrabber() + >>> dg.inputs.template = '*' - Pick file foo/foo.nii from current directory + Pick file foo/foo.nii from current directory - >>> dg.inputs.template = '%s/%s.dcm' - >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] + >>> dg.inputs.template = '%s/%s.dcm' + >>> dg.inputs.template_args['outfiles']=[['dicomdir','123456-1-1.dcm']] - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = DataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.arg1 = 'foo' - >>> dg.inputs.arg2 = 'foo' + >>> dg = DataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.arg1 = 'foo' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.base_directory = '.' - >>> dg.inputs.template = '%s/%s.nii' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = DataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.base_directory = '.' + >>> dg.inputs.template = '%s/%s.nii' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ @@ -1357,7 +1361,8 @@ class SelectFilesInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SelectFiles(IOBase): - """Flexibly collect data from disk to feed into workflows. + """ + Flexibly collect data from disk to feed into workflows. This interface uses the {}-based string formatting syntax to plug values (possibly known only at workflow execution time) into string @@ -1369,7 +1374,6 @@ class SelectFiles(IOBase): Examples -------- - >>> import pprint >>> from nipype import SelectFiles, Node >>> templates={"T1": "{subject_id}/struct/T1.nii", @@ -1520,7 +1524,7 @@ class DataFinderInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class DataFinder(IOBase): - """Search for paths that match a given regular expression. Allows a less + r"""Search for paths that match a given regular expression. Allows a less proscriptive approach to gathering input files compared to DataGrabber. Will recursively search any subdirectories by default. This can be limited with the min/max depth options. @@ -1530,7 +1534,6 @@ class DataFinder(IOBase): Examples -------- - >>> from nipype.interfaces.io import DataFinder >>> df = DataFinder() >>> df.inputs.root_paths = '.' @@ -1803,11 +1806,10 @@ class FSSourceOutputSpec(TraitedSpec): class FreeSurferSource(IOBase): - """Generates freesurfer subject info from their directories + """Generates freesurfer subject info from their directories. Examples -------- - >>> from nipype.interfaces.io import FreeSurferSource >>> fs = FreeSurferSource() >>> #fs.inputs.subjects_dir = '.' @@ -1891,36 +1893,35 @@ class XNATSourceInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class XNATSource(LibraryBaseInterface, IOBase): - """ Generic XNATSource module that wraps around the pyxnat module in - an intelligent way for neuroimaging tasks to grab files and data - from an XNAT server. - - Examples - -------- - - >>> from nipype.interfaces.io import XNATSource - - Pick all files from current directory - - >>> dg = XNATSource() - >>> dg.inputs.template = '*' - - >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) - >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ - '/assessors/%s/%s_resources/files' - >>> dg.inputs.project = 'IMAGEN' - >>> dg.inputs.subject = 'IMAGEN_000000001274' - >>> dg.inputs.experiment = '*SessionA*' - >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' - >>> dg.inputs.inout = 'out' + """ + Pull data from an XNAT server. - >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) - >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ - '/assessors/*%s_nii/out_resources/files' - >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] - >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] - >>> dg.inputs.sid = 'IMAGEN_000000001274' + Generic XNATSource module that wraps around the pyxnat module in + an intelligent way for neuroimaging tasks to grab files and data + from an XNAT server. + Examples + -------- + Pick all files from current directory + + >>> dg = XNATSource() + >>> dg.inputs.template = '*' + + >>> dg = XNATSource(infields=['project','subject','experiment','assessor','inout']) + >>> dg.inputs.query_template = '/projects/%s/subjects/%s/experiments/%s' \ + '/assessors/%s/%s_resources/files' + >>> dg.inputs.project = 'IMAGEN' + >>> dg.inputs.subject = 'IMAGEN_000000001274' + >>> dg.inputs.experiment = '*SessionA*' + >>> dg.inputs.assessor = '*ADNI_MPRAGE_nii' + >>> dg.inputs.inout = 'out' + + >>> dg = XNATSource(infields=['sid'],outfields=['struct','func']) + >>> dg.inputs.query_template = '/projects/IMAGEN/subjects/%s/experiments/*SessionA*' \ + '/assessors/*%s_nii/out_resources/files' + >>> dg.inputs.query_template_args['struct'] = [['sid','ADNI_MPRAGE']] + >>> dg.inputs.query_template_args['func'] = [['sid','EPI_faces']] + >>> dg.inputs.sid = 'IMAGEN_000000001274' """ @@ -2310,22 +2311,23 @@ class SQLiteSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class SQLiteSink(LibraryBaseInterface, IOBase): - """ Very simple frontend for storing values into SQLite database. + """ + Very simple frontend for storing values into SQLite database. - .. warning:: + .. warning:: - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - Examples - -------- + Examples + -------- - >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_file = 'my_database.db' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = SQLiteSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_file = 'my_database.db' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ @@ -2377,19 +2379,20 @@ class MySQLSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): class MySQLSink(IOBase): - """ Very simple frontend for storing values into MySQL database. + """ + Very simple frontend for storing values into MySQL database. - Examples - -------- + Examples + -------- - >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) - >>> sql.inputs.database_name = 'my_database' - >>> sql.inputs.table_name = 'experiment_results' - >>> sql.inputs.username = 'root' - >>> sql.inputs.password = 'secret' - >>> sql.inputs.subject_id = 's1' - >>> sql.inputs.some_measurement = 11.4 - >>> sql.run() # doctest: +SKIP + >>> sql = MySQLSink(input_names=['subject_id', 'some_measurement']) + >>> sql.inputs.database_name = 'my_database' + >>> sql.inputs.table_name = 'experiment_results' + >>> sql.inputs.username = 'root' + >>> sql.inputs.password = 'secret' + >>> sql.inputs.subject_id = 's1' + >>> sql.inputs.some_measurement = 11.4 + >>> sql.run() # doctest: +SKIP """ @@ -2455,64 +2458,64 @@ class SSHDataGrabberInputSpec(DataGrabberInputSpec): class SSHDataGrabber(LibraryBaseInterface, DataGrabber): - """ Extension of DataGrabber module that downloads the file list and - optionally the files from a SSH server. The SSH operation must - not need user and password so an SSH agent must be active in - where this module is being run. - + """ + Extension of DataGrabber module that downloads the file list and + optionally the files from a SSH server. The SSH operation must + not need user and password so an SSH agent must be active in + where this module is being run. - .. attention:: - Doesn't support directories currently + .. attention:: - Examples - -------- + Doesn't support directories currently - >>> from nipype.interfaces.io import SSHDataGrabber - >>> dg = SSHDataGrabber() - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub/example' + Examples + -------- + >>> from nipype.interfaces.io import SSHDataGrabber + >>> dg = SSHDataGrabber() + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub/example' - Pick all files from the base directory + Pick all files from the base directory - >>> dg.inputs.template = '*' + >>> dg.inputs.template = '*' - Pick all files starting with "s" and a number from current directory + Pick all files starting with "s" and a number from current directory - >>> dg.inputs.template_expression = 'regexp' - >>> dg.inputs.template = 'pop[0-9].*' + >>> dg.inputs.template_expression = 'regexp' + >>> dg.inputs.template = 'pop[0-9].*' - Same thing but with dynamically created fields + Same thing but with dynamically created fields - >>> dg = SSHDataGrabber(infields=['arg1','arg2']) - >>> dg.inputs.hostname = 'test.rebex.net' - >>> dg.inputs.user = 'demo' - >>> dg.inputs.password = 'password' - >>> dg.inputs.base_directory = 'pub' - >>> dg.inputs.template = '%s/%s.txt' - >>> dg.inputs.arg1 = 'example' - >>> dg.inputs.arg2 = 'foo' + >>> dg = SSHDataGrabber(infields=['arg1','arg2']) + >>> dg.inputs.hostname = 'test.rebex.net' + >>> dg.inputs.user = 'demo' + >>> dg.inputs.password = 'password' + >>> dg.inputs.base_directory = 'pub' + >>> dg.inputs.template = '%s/%s.txt' + >>> dg.inputs.arg1 = 'example' + >>> dg.inputs.arg2 = 'foo' - however this latter form can be used with iterables and iterfield in a - pipeline. + however this latter form can be used with iterables and iterfield in a + pipeline. - Dynamically created, user-defined input and output fields + Dynamically created, user-defined input and output fields - >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) - >>> dg.inputs.hostname = 'myhost.com' - >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' - >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] - >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] - >>> dg.inputs.template_args['ref'] = [['sid','ref']] - >>> dg.inputs.sid = 's1' + >>> dg = SSHDataGrabber(infields=['sid'], outfields=['func','struct','ref']) + >>> dg.inputs.hostname = 'myhost.com' + >>> dg.inputs.base_directory = '/main_folder/my_remote_dir' + >>> dg.inputs.template_args['func'] = [['sid',['f3','f5']]] + >>> dg.inputs.template_args['struct'] = [['sid',['struct']]] + >>> dg.inputs.template_args['ref'] = [['sid','ref']] + >>> dg.inputs.sid = 's1' - Change the template only for output field struct. The rest use the - general template + Change the template only for output field struct. The rest use the + general template - >>> dg.inputs.field_template = dict(struct='%s/struct.nii') - >>> dg.inputs.template_args['struct'] = [['sid']] + >>> dg.inputs.field_template = dict(struct='%s/struct.nii') + >>> dg.inputs.template_args['struct'] = [['sid']] """ @@ -2801,26 +2804,25 @@ class JSONFileSink(IOBase): Entries already existing in in_dict will be overridden by matching entries dynamically added as inputs. - .. warning:: - - This is not a thread-safe node because it can write to a common - shared location. It will not complain when it overwrites a file. + .. warning:: - Examples - -------- + This is not a thread-safe node because it can write to a common + shared location. It will not complain when it overwrites a file. - >>> jsonsink = JSONFileSink(input_names=['subject_id', - ... 'some_measurement']) - >>> jsonsink.inputs.subject_id = 's1' - >>> jsonsink.inputs.some_measurement = 11.4 - >>> jsonsink.run() # doctest: +SKIP + Examples + -------- + >>> jsonsink = JSONFileSink(input_names=['subject_id', + ... 'some_measurement']) + >>> jsonsink.inputs.subject_id = 's1' + >>> jsonsink.inputs.some_measurement = 11.4 + >>> jsonsink.run() # doctest: +SKIP - Using a dictionary as input: + Using a dictionary as input: - >>> dictsink = JSONFileSink() - >>> dictsink.inputs.in_dict = {'subject_id': 's1', - ... 'some_measurement': 11.4} - >>> dictsink.run() # doctest: +SKIP + >>> dictsink = JSONFileSink() + >>> dictsink.inputs.in_dict = {'subject_id': 's1', + ... 'some_measurement': 11.4} + >>> dictsink.run() # doctest: +SKIP """ @@ -2898,13 +2900,11 @@ class BIDSDataGrabberInputSpec(DynamicTraitedSpec): class BIDSDataGrabber(LibraryBaseInterface, IOBase): - - """ BIDS datagrabber module that wraps around pybids to allow arbitrary + """BIDS datagrabber module that wraps around pybids to allow arbitrary querying of BIDS datasets. Examples -------- - By default, the BIDSDataGrabber fetches anatomical and functional images from a project, and makes BIDS entities (e.g. subject) available for filtering outputs. @@ -3025,7 +3025,7 @@ class ExportFileOutputSpec(TraitedSpec): class ExportFile(SimpleInterface): - """ Export a file to an absolute path + """Export a file to an absolute path. This interface copies an input file to a named output file. This is useful to save individual files to a specific location, @@ -3033,7 +3033,6 @@ class ExportFile(SimpleInterface): Examples -------- - >>> from nipype.interfaces.io import ExportFile >>> import os.path as op >>> ef = ExportFile() diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index 488635843e..59c36eb478 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" General matlab interface code """ +"""Interfaces to run MATLAB scripts.""" import os from .. import config diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index d1689ad9b4..505426bfe2 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" Fixes meshes: -""" +"""MeshFix corrects topological errors in polygonal meshes.""" import os.path as op from ..utils.filemanip import split_filename diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index c593ea998b..a69e38eeb2 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The minc module provides classes for interfacing with the `MINC +"""The MINC (McConnell Brain Imaging Centre, Montreal Neurological Institute) toolkit. + +The minc module provides classes for interfacing with the `MINC `_ command line tools. This module was written to work with MINC version 2.2.00. diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index b4dfa1dac8..14c29f7b1b 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -5,8 +5,7 @@ `_ command line tools. This module was written to work with MINC version 2.2.00. -Author: Carlo Hamalainen - http://carlo-hamalainen.net +Author: `Carlo Hamalainen `__ """ import glob import os @@ -1263,7 +1262,7 @@ class BBoxInputSpec(StdOutCommandLineInputSpec): xor=_xor_one_two, ) two_lines = traits.Bool( - desc="Output on two lines: start_x y z \n width_x y z", + desc="""Write output with two rows (start and width).""", argstr="-two_lines", xor=_xor_one_two, ) @@ -1298,13 +1297,13 @@ class BBox(StdOutCommandLine): Examples -------- - >>> from nipype.interfaces.minc import BBox >>> from nipype.interfaces.minc.testdata import nonempty_minc_data >>> file0 = nonempty_minc_data(0) >>> bbox = BBox(input_file=file0) >>> bbox.run() # doctest: +SKIP + """ input_spec = BBoxInputSpec diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index 2bdbfef78b..85cc052c1e 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""MIPAV enables quantitative analysis and visualization of multimodal medical images.""" from .developer import ( JistLaminarVolumetricLayering, JistBrainMgdmSegmentation, diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index 9bc24b1a80..52e2b01b01 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -106,18 +106,17 @@ class JistLaminarVolumetricLayeringOutputSpec(TraitedSpec): class JistLaminarVolumetricLayering(SEMLikeCommandLine): - """title: Volumetric Layering + """Volumetric Layering. -category: Developer Tools + Builds a continuous layering of the cortex following distance-preserving or volume-preserving + models of cortical folding. -description: Builds a continuous layering of the cortex following distance-preserving or volume-preserving models of cortical folding. -Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. + References + ---------- + Waehnert MD, Dinse J, Weiss M, Streicher MN, Waehnert P, Geyer S, Turner R, Bazin PL, + Anatomically motivated modeling of cortical laminae, Neuroimage, 2013. -version: 3.0.RC - -contributor: Miriam Waehnert (waehnert@cbs.mpg.de) http://www.cbs.mpg.de/ - -""" + """ input_spec = JistLaminarVolumetricLayeringInputSpec output_spec = JistLaminarVolumetricLayeringOutputSpec @@ -217,15 +216,12 @@ class JistBrainMgdmSegmentationOutputSpec(TraitedSpec): class JistBrainMgdmSegmentation(SEMLikeCommandLine): - """title: MGDM Whole Brain Segmentation - -category: Developer Tools - -description: Estimate brain structures from an atlas for a MRI dataset (multiple input combinations are possible). + """MGDM Whole Brain Segmentation. -version: 2.0.RC + Estimate brain structures from an atlas for a MRI dataset (multiple input combinations + are possible). -""" + """ input_spec = JistBrainMgdmSegmentationInputSpec output_spec = JistBrainMgdmSegmentationOutputSpec @@ -279,15 +275,7 @@ class JistLaminarProfileGeometryOutputSpec(TraitedSpec): class JistLaminarProfileGeometry(SEMLikeCommandLine): - """title: Profile Geometry - -category: Developer Tools - -description: Compute various geometric quantities for a cortical layers. - -version: 3.0.RC - -""" + """Compute various geometric quantities for a cortical layers.""" input_spec = JistLaminarProfileGeometryInputSpec output_spec = JistLaminarProfileGeometryOutputSpec @@ -330,15 +318,7 @@ class JistLaminarProfileCalculatorOutputSpec(TraitedSpec): class JistLaminarProfileCalculator(SEMLikeCommandLine): - """title: Profile Calculator - -category: Developer Tools - -description: Compute various moments for intensities mapped along a cortical profile. - -version: 3.0.RC - -""" + """Compute various moments for intensities mapped along a cortical profile.""" input_spec = JistLaminarProfileCalculatorInputSpec output_spec = JistLaminarProfileCalculatorOutputSpec @@ -410,15 +390,7 @@ class MedicAlgorithmN3OutputSpec(TraitedSpec): class MedicAlgorithmN3(SEMLikeCommandLine): - """title: N3 Correction - -category: Developer Tools - -description: Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled. - -version: 1.8.R - -""" + """Non-parametric Intensity Non-uniformity Correction, N3, originally by J.G. Sled.""" input_spec = MedicAlgorithmN3InputSpec output_spec = MedicAlgorithmN3OutputSpec @@ -458,15 +430,7 @@ class JistLaminarROIAveragingOutputSpec(TraitedSpec): class JistLaminarROIAveraging(SEMLikeCommandLine): - """title: Profile ROI Averaging - -category: Developer Tools - -description: Compute an average profile over a given ROI. - -version: 3.0.RC - -""" + """Compute an average profile over a given ROI.""" input_spec = JistLaminarROIAveragingInputSpec output_spec = JistLaminarROIAveragingOutputSpec @@ -639,18 +603,18 @@ class MedicAlgorithmLesionToadsOutputSpec(TraitedSpec): class MedicAlgorithmLesionToads(SEMLikeCommandLine): - """title: Lesion TOADS + """Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. -category: Developer Tools + The brain segmentation is topologically consistent and the algorithm can use multiple + MR sequences as input data. -description: Algorithm for simulataneous brain structures and MS lesion segmentation of MS Brains. The brain segmentation is topologically consistent and the algorithm can use multiple MR sequences as input data. -N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. + References + ---------- + N. Shiee, P.-L. Bazin, A.Z. Ozturk, P.A. Calabresi, D.S. Reich, D.L. Pham, + "A Topology-Preserving Approach to the Segmentation of Brain Images with Multiple Sclerosis", + NeuroImage, vol. 49, no. 2, pp. 1524-1535, 2010. -version: 1.9.R - -contributor: Navid Shiee (navid.shiee@nih.gov) http://iacl.ece.jhu.edu/~nshiee/ - -""" + """ input_spec = MedicAlgorithmLesionToadsInputSpec output_spec = MedicAlgorithmLesionToadsOutputSpec @@ -728,15 +692,11 @@ class JistBrainMp2rageSkullStrippingOutputSpec(TraitedSpec): class JistBrainMp2rageSkullStripping(SEMLikeCommandLine): - """title: MP2RAGE Skull Stripping - -category: Developer Tools - -description: Estimate a brain mask for a MP2RAGE dataset. At least a T1-weighted or a T1 map image is required. + """Estimate a brain mask for a MP2RAGE dataset. -version: 3.0.RC + At least a T1-weighted or a T1 map image is required. -""" + """ input_spec = JistBrainMp2rageSkullStrippingInputSpec output_spec = JistBrainMp2rageSkullStrippingOutputSpec @@ -804,18 +764,14 @@ class JistCortexSurfaceMeshInflationOutputSpec(TraitedSpec): class JistCortexSurfaceMeshInflation(SEMLikeCommandLine): - """title: Surface Mesh Inflation + """Inflates a cortical surface mesh. -category: Developer Tools + References + ---------- + D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, + Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. -description: Inflates a cortical surface mesh. -D. Tosun, M. E. Rettmann, X. Han, X. Tao, C. Xu, S. M. Resnick, D. Pham, and J. L. Prince, Cortical Surface Segmentation and Mapping, NeuroImage, vol. 23, pp. S108--S118, 2004. - -version: 3.0.RC - -contributor: Duygu Tosun - -""" + """ input_spec = JistCortexSurfaceMeshInflationInputSpec output_spec = JistCortexSurfaceMeshInflationOutputSpec @@ -861,17 +817,7 @@ class RandomVolOutputSpec(TraitedSpec): class RandomVol(SEMLikeCommandLine): - """title: Random Volume Generator - -category: Developer Tools - -description: Generate a random scalar volume. - -version: 1.12.RC - -documentation-url: http://www.nitrc.org/projects/jist/ - -""" + """Generate a volume of random scalars.""" input_spec = RandomVolInputSpec output_spec = RandomVolOutputSpec @@ -918,17 +864,11 @@ class MedicAlgorithmImageCalculatorOutputSpec(TraitedSpec): class MedicAlgorithmImageCalculator(SEMLikeCommandLine): - """title: Image Calculator - -category: Developer Tools - -description: Perform simple image calculator operations on two images. The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' - -version: 1.10.RC + """Perform simple image calculator operations on two images. -documentation-url: http://www.iacl.ece.jhu.edu/ + The operations include 'Add', 'Subtract', 'Multiply', and 'Divide' -""" + """ input_spec = MedicAlgorithmImageCalculatorInputSpec output_spec = MedicAlgorithmImageCalculatorOutputSpec @@ -975,15 +915,7 @@ class JistBrainMp2rageDuraEstimationOutputSpec(TraitedSpec): class JistBrainMp2rageDuraEstimation(SEMLikeCommandLine): - """title: MP2RAGE Dura Estimation - -category: Developer Tools - -description: Filters a MP2RAGE brain image to obtain a probability map of dura matter. - -version: 3.0.RC - -""" + """Filters a MP2RAGE brain image to obtain a probability map of dura matter.""" input_spec = JistBrainMp2rageDuraEstimationInputSpec output_spec = JistBrainMp2rageDuraEstimationOutputSpec @@ -1029,15 +961,7 @@ class JistLaminarProfileSamplingOutputSpec(TraitedSpec): class JistLaminarProfileSampling(SEMLikeCommandLine): - """title: Profile Sampling - -category: Developer Tools - -description: Sample some intensity image along a cortical profile across layer surfaces. - -version: 3.0.RC - -""" + """Sample some intensity image along a cortical profile across layer surfaces.""" input_spec = JistLaminarProfileSamplingInputSpec output_spec = JistLaminarProfileSamplingOutputSpec @@ -1142,15 +1066,7 @@ class MedicAlgorithmMipavReorientOutputSpec(TraitedSpec): class MedicAlgorithmMipavReorient(SEMLikeCommandLine): - """title: Reorient Volume - -category: Developer Tools - -description: Reorient a volume to a particular anatomical orientation. - -version: .alpha - -""" + """Reorient a volume to a particular anatomical orientation.""" input_spec = MedicAlgorithmMipavReorientInputSpec output_spec = MedicAlgorithmMipavReorientOutputSpec @@ -1409,26 +1325,20 @@ class MedicAlgorithmSPECTRE2010OutputSpec(TraitedSpec): class MedicAlgorithmSPECTRE2010(SEMLikeCommandLine): - """title: SPECTRE 2010 - -category: Developer Tools - -description: Simple Paradigm for Extra-Cranial Tissue REmoval - -Algorithm Version: 1.6 -GUI Version: 1.10 + """SPECTRE 2010: Simple Paradigm for Extra-Cranial Tissue REmoval [1]_, [2]_. -A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, 'A Joint Registration and Segmentation Approach to Skull Stripping', Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, April 12-15, 2007. -A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', NeuroImage 56(4):1982-1992, 2011. + References + ---------- -version: 1.6.R + .. [1] A. Carass, M.B. Wheeler, J. Cuzzocreo, P.-L. Bazin, S.S. Bassett, and J.L. Prince, + 'A Joint Registration and Segmentation Approach to Skull Stripping', + Fourth IEEE International Symposium on Biomedical Imaging (ISBI 2007), Arlington, VA, + April 12-15, 2007. + .. [2] A. Carass, J. Cuzzocreo, M.B. Wheeler, P.-L. Bazin, S.M. Resnick, and J.L. Prince, + 'Simple paradigm for extra-cerebral tissue removal: Algorithm and analysis', + NeuroImage 56(4):1982-1992, 2011. -documentation-url: http://www.iacl.ece.jhu.edu/ - -contributor: Aaron Carass (aaron_carass@jhu.edu) http://www.iacl.ece.jhu.edu/ -Hanlin Wan (hanlinwan@gmail.com) - -""" + """ input_spec = MedicAlgorithmSPECTRE2010InputSpec output_spec = MedicAlgorithmSPECTRE2010OutputSpec @@ -1484,15 +1394,11 @@ class JistBrainPartialVolumeFilterOutputSpec(TraitedSpec): class JistBrainPartialVolumeFilter(SEMLikeCommandLine): - """title: Partial Volume Filter - -category: Developer Tools - -description: Filters an image for regions of partial voluming assuming a ridge-like model of intensity. + """Partial Volume Filter. -version: 2.0.RC + Filters an image for regions of partial voluming assuming a ridge-like model of intensity. -""" + """ input_spec = JistBrainPartialVolumeFilterInputSpec output_spec = JistBrainPartialVolumeFilterOutputSpec @@ -1575,15 +1481,7 @@ class JistIntensityMp2rageMaskingOutputSpec(TraitedSpec): class JistIntensityMp2rageMasking(SEMLikeCommandLine): - """title: MP2RAGE Background Masking - -category: Developer Tools - -description: Estimate a background signal mask for a MP2RAGE dataset. - -version: 3.0.RC - -""" + """Estimate a background signal mask for a MP2RAGE dataset.""" input_spec = JistIntensityMp2rageMaskingInputSpec output_spec = JistIntensityMp2rageMaskingOutputSpec @@ -1628,17 +1526,11 @@ class MedicAlgorithmThresholdToBinaryMaskOutputSpec(TraitedSpec): class MedicAlgorithmThresholdToBinaryMask(SEMLikeCommandLine): - """title: Threshold to Binary Mask - -category: Developer Tools - -description: Given a volume and an intensity range create a binary mask for values within that range. - -version: 1.2.RC + """Threshold to Binary Mask. -documentation-url: http://www.iacl.ece.jhu.edu/ + Given a volume and an intensity range create a binary mask for values within that range. -""" + """ input_spec = MedicAlgorithmThresholdToBinaryMaskInputSpec output_spec = MedicAlgorithmThresholdToBinaryMaskOutputSpec diff --git a/nipype/interfaces/mne/__init__.py b/nipype/interfaces/mne/__init__.py index 8bf3db28ed..820780e54d 100644 --- a/nipype/interfaces/mne/__init__.py +++ b/nipype/interfaces/mne/__init__.py @@ -1,2 +1,3 @@ # -*- coding: utf-8 -*- +"""MNE is a software for exploring, visualizing, and analyzing human neurophysiological data.""" from .base import WatershedBEM diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index 917d576eda..3aafdc1db7 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +"""MRTrix version 2 (DEPRECATED) -- tools to perform various types of diffusion MRI analyses.""" from .tracking import ( Tracks2Prob, FilterTracks, diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index c922c4fba4..b7465cdbf2 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -324,7 +324,7 @@ class StreamlineTrackInputSpec(CommandLineInputSpec): argstr="-number %d", desc="Sets the desired number of tracks." "The program will continue to generate tracks until this number of tracks have been selected and written to the output file" - "(default is 100 for *_STREAM methods, 1000 for *_PROB methods).", + "(default is 100 for ``*_STREAM`` methods, 1000 for ``*_PROB`` methods).", ) maximum_number_of_tracks = traits.Int( argstr="-maxnum %d", diff --git a/nipype/interfaces/mrtrix3/__init__.py b/nipype/interfaces/mrtrix3/__init__.py index 0dbe3bb872..2970918844 100644 --- a/nipype/interfaces/mrtrix3/__init__.py +++ b/nipype/interfaces/mrtrix3/__init__.py @@ -1,7 +1,7 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: # -*- coding: utf-8 -*- - +"""MRTrix3 provides software tools to perform various types of diffusion MRI analyses.""" from .utils import ( Mesh2PVE, Generate5tt, diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index e71d9cd37a..301f5deeff 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -40,7 +40,8 @@ class TractographyInputSpec(MRTrix3BaseInputSpec): "Tensor_Prob", usedefault=True, argstr="-algorithm %s", - desc="tractography algorithm to be used", + desc="Tractography algorithm to be used -- References:" + "[FACT]_, [iFOD1]_, [iFOD2]_, [Nulldist]_, [Tensor_Det]_, [Tensor_Prob]_.", ) # ROIs processing options @@ -311,8 +312,10 @@ class TractographyOutputSpec(TraitedSpec): class Tractography(MRTrix3Base): """ - Performs streamlines tractography after selecting the appropriate - algorithm. + Performs streamlines tractography after selecting the appropriate algorithm. + + References + ---------- .. [FACT] Mori, S.; Crain, B. J.; Chacko, V. P. & van Zijl, P. C. M. Three-dimensional tracking of axonal projections in the @@ -340,7 +343,6 @@ class Tractography(MRTrix3Base): Tracking Using the Wild Bootstrap With Diffusion Tensor MRI. IEEE Transactions on Medical Imaging, 2008, 27, 1268-1274 - Example ------- diff --git a/nipype/interfaces/niftyfit/__init__.py b/nipype/interfaces/niftyfit/__init__.py index b9d4725496..d945991fa0 100644 --- a/nipype/interfaces/niftyfit/__init__.py +++ b/nipype/interfaces/niftyfit/__init__.py @@ -1,12 +1,12 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ -The niftyfit module provides classes for interfacing with the `NiftyFit`_ -command line tools. +NiftyFit is a software package for multi-parametric model-fitting of 4D MRI. -Top-level namespace for niftyfit. -""" +The niftyfit module provides classes for interfacing with the `NiftyFit +`__ command line tools. +""" from .asl import FitAsl from .dwi import FitDwi, DwiTool from .qt1 import FitQt1 diff --git a/nipype/interfaces/niftyreg/__init__.py b/nipype/interfaces/niftyreg/__init__.py index 2ea7b95b26..1bc01a9ad5 100644 --- a/nipype/interfaces/niftyreg/__init__.py +++ b/nipype/interfaces/niftyreg/__init__.py @@ -2,12 +2,12 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ +NiftyReg is an open-source software for efficient medical image registration. + The niftyreg module provides classes for interfacing with the `NiftyReg `_ command line tools. -Top-level namespace for niftyreg. """ - from .base import get_custom_path from .reg import RegAladin, RegF3D from .regutils import ( diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index 0afea087a0..80ceca1da1 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -6,9 +6,6 @@ The maths module provides higher-level interfaces to some of the operations that can be performed with the niftysegmaths (seg_maths) command-line program. -Examples --------- -See the docstrings of the individual classes for examples. """ import os @@ -125,69 +122,43 @@ class UnaryMathsInput(MathsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * sqrt - Square root of the image). + * exp - Exponential root of the image. + * log - Log of the image. + * recip - Reciprocal (1/I) of the image. + * abs - Absolute value of the image. + * bin - Binarise the image. + * otsu - Otsu thresholding of the current image. + * lconcomp - Take the largest connected component + * concomp6 - Label the different connected components with a 6NN kernel + * concomp26 - Label the different connected components with a 26NN kernel + * fill - Fill holes in binary object (e.g. fill ventricle in brain mask). + * euc - Euclidean distance transform + * tpmax - Get the time point with the highest value (binarise 4D probabilities) + * tmean - Mean value of all time points. + * tmax - Max value of all time points. + * tmin - Mean value of all time points. + * splitlab - Split the integer labels into multiple timepoints + * removenan - Remove all NaNs and replace then with 0 + * isnan - Binary image equal to 1 if the value is NaN and 0 otherwise + * subsamp2 - Subsample the image by 2 using NN sampling (qform and sform scaled) + * scl - Reset scale and slope info. + * 4to5 - Flip the 4th and 5th dimension. + * range - Reset the image range to the min max. + +""") class UnaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any unary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - sqrt - Square root of the image). - - exp - Exponential root of the image. - - log - Log of the image. - - recip - Reciprocal (1/I) of the image. - - abs - Absolute value of the image. - - bin - Binarise the image. - - otsu - Otsu thresholding of the current image. - - lconcomp - Take the largest connected component - - concomp6 - Label the different connected components with a 6NN kernel - - concomp26 - Label the different connected components with a 26NN kernel - - fill - Fill holes in binary object (e.g. fill ventricle in brain mask). - - euc - Euclidean distance trasnform - - tpmax - Get the time point with the highest value (binarise 4D \ -probabilities) - - tmean - Mean value of all time points. - - tmax - Max value of all time points. - - tmin - Mean value of all time points. - - splitlab - Split the integer labels into multiple timepoints - - removenan - Remove all NaNs and replace then with 0 - - isnan - Binary image equal to 1 if the value is NaN and 0 otherwise + """Unary mathematical operations. - subsamp2 - Subsample the image by 2 using NN sampling (qform and sform \ -scaled) - - scl - Reset scale and slope info. - - 4to5 - Flip the 4th and 5th dimension. - - range - Reset the image range to the min max. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -196,30 +167,35 @@ class UnaryMaths(MathsCommand): >>> unary = niftyseg.UnaryMaths() >>> unary.inputs.output_datatype = 'float' >>> unary.inputs.in_file = 'im1.nii' + >>> # Test sqrt operation >>> unary_sqrt = copy.deepcopy(unary) >>> unary_sqrt.inputs.operation = 'sqrt' >>> unary_sqrt.cmdline 'seg_maths im1.nii -sqrt -odt float im1_sqrt.nii' >>> unary_sqrt.run() # doctest: +SKIP + >>> # Test sqrt operation >>> unary_abs = copy.deepcopy(unary) >>> unary_abs.inputs.operation = 'abs' >>> unary_abs.cmdline 'seg_maths im1.nii -abs -odt float im1_abs.nii' >>> unary_abs.run() # doctest: +SKIP + >>> # Test bin operation >>> unary_bin = copy.deepcopy(unary) >>> unary_bin.inputs.operation = 'bin' >>> unary_bin.cmdline 'seg_maths im1.nii -bin -odt float im1_bin.nii' >>> unary_bin.run() # doctest: +SKIP + >>> # Test otsu operation >>> unary_otsu = copy.deepcopy(unary) >>> unary_otsu.inputs.operation = 'otsu' >>> unary_otsu.cmdline 'seg_maths im1.nii -otsu -odt float im1_otsu.nii' >>> unary_otsu.run() # doctest: +SKIP + >>> # Test isnan operation >>> unary_isnan = copy.deepcopy(unary) >>> unary_isnan.inputs.operation = 'isnan' @@ -257,7 +233,32 @@ class BinaryMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", + desc="""\ +Operation to perform: + + * mul - - Multiply image value or by other image. + * div - - Divide image by or by other image. + * add - - Add image by or by other image. + * sub - - Subtract image by or by other image. + * pow - - Image to the power of . + * thr - - Threshold the image below . + * uthr - - Threshold image above . + * smo - - Gaussian smoothing by std (in voxels and up to 4-D). + * edge - - Calculate the edges of the image using a threshold . + * sobel3 - - Calculate the edges of all timepoints using a Sobel filter + with a 3x3x3 kernel and applying gaussian smoothing. + * sobel5 - - Calculate the edges of all timepoints using a Sobel filter + with a 5x5x5 kernel and applying gaussian smoothing. + * min - - Get the min per voxel between and . + * smol - - Gaussian smoothing of a 3D label image. + * geo - - Geodesic distance according to the speed function + * llsnorm - Linear LS normalisation between current and + * masknan - Assign everything outside the mask (mask==0) with NaNs + * hdr_copy - Copy header from working image to and save in . + * splitinter - Split interleaved slices in direction + into separate time points + +""", ) operand_file = File( @@ -291,61 +292,12 @@ class BinaryMathsInput(MathsInput): class BinaryMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any binary mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: - - mul - - Multiply image value or by other image. - - div - - Divide image by or by other image. - - add - - Add image by or by other image. - - sub - - Subtract image by or by other image. - - pow - - Image to the power of . - - thr - - Threshold the image below . - - uthr - - Threshold image above . - - smo - - Gaussian smoothing by std (in voxels and up to \ -4-D). - - edge - - Calculate the edges of the image using a threshold <\ -float>. + """Binary mathematical operations. - sobel3 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 3x3x3 kernel and applying gaussian smoothing. - - sobel5 - - Calculate the edges of all timepoints using a Sobel \ -filter with a 5x5x5 kernel and applying gaussian smoothing. - - min - - Get the min per voxel between and . - - smol - - Gaussian smoothing of a 3D label image. - - geo - - Geodesic distance according to the speed function \ - - - llsnorm - Linear LS normalisation between current and \ - - - masknan - Assign everything outside the mask (mask==0) \ -with NaNs - - hdr_copy - Copy header from working image to and save in \ -. - - splitinter - Split interleaved slices in direction into \ -separate time points - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -354,6 +306,7 @@ class BinaryMaths(MathsCommand): >>> binary = niftyseg.BinaryMaths() >>> binary.inputs.in_file = 'im1.nii' >>> binary.inputs.output_datatype = 'float' + >>> # Test sub operation >>> binary_sub = copy.deepcopy(binary) >>> binary_sub.inputs.operation = 'sub' @@ -361,6 +314,7 @@ class BinaryMaths(MathsCommand): >>> binary_sub.cmdline 'seg_maths im1.nii -sub im2.nii -odt float im1_sub.nii' >>> binary_sub.run() # doctest: +SKIP + >>> # Test mul operation >>> binary_mul = copy.deepcopy(binary) >>> binary_mul.inputs.operation = 'mul' @@ -368,6 +322,7 @@ class BinaryMaths(MathsCommand): >>> binary_mul.cmdline 'seg_maths im1.nii -mul 2.00000000 -odt float im1_mul.nii' >>> binary_mul.run() # doctest: +SKIP + >>> # Test llsnorm operation >>> binary_llsnorm = copy.deepcopy(binary) >>> binary_llsnorm.inputs.operation = 'llsnorm' @@ -375,6 +330,7 @@ class BinaryMaths(MathsCommand): >>> binary_llsnorm.cmdline 'seg_maths im1.nii -llsnorm im2.nii -odt float im1_llsnorm.nii' >>> binary_llsnorm.run() # doctest: +SKIP + >>> # Test splitinter operation >>> binary_splitinter = copy.deepcopy(binary) >>> binary_splitinter.inputs.operation = 'splitinter' @@ -440,8 +396,17 @@ class BinaryMathsInputInteger(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * equal - - Get voxels equal to + * dil - - Dilate the image times (in voxels). + * ero - - Erode the image times (in voxels). + * tp - - Extract time point + * crop - - Crop voxels around each 3D volume. + * pad - - Pad voxels with NaN value around each 3D volume. + +""") operand_value = traits.Int( argstr="%d", @@ -452,28 +417,12 @@ class BinaryMathsInputInteger(MathsInput): class BinaryMathsInteger(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any integer mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: (requiring integer values) - - equal - - Get voxels equal to - - dil - - Dilate the image times (in voxels). + """Integer mathematical operations. - ero - - Erode the image times (in voxels). - - tp - - Extract time point - - crop - - Crop voxels around each 3D volume. - - pad - - Pad voxels with NaN value around each 3D volume. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -519,8 +468,14 @@ class TupleMathsInput(MathsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * lncc Local CC between current img and on a kernel with + * lssd Local SSD between current img and on a kernel with + * lltsnorm Linear LTS normalisation assuming percent outliers + +""") operand_file1 = File( exists=True, @@ -552,25 +507,12 @@ class TupleMathsInput(MathsInput): class TupleMaths(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. - - Interface to use any tuple mathematical operations that can be performed - - with the seg_maths command-line program. - - See below for those operations:: + """Mathematical operations on tuples. - lncc Local CC between current img and on a kernel \ -with - - lssd Local SSD between current img and on a kernel \ -with - - lltsnorm Linear LTS normalisation assuming \ -percent outliers - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -604,9 +546,9 @@ class TupleMaths(MathsCommand): >>> tuple_lltsnorm.inputs.operand_file1 = 'im2.nii' >>> tuple_lltsnorm.inputs.operand_value2 = 0.01 >>> tuple_lltsnorm.cmdline - 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float \ -im1_lltsnorm.nii' + 'seg_maths im1.nii -lltsnorm im2.nii 0.01000000 -odt float im1_lltsnorm.nii' >>> tuple_lltsnorm.run() # doctest: +SKIP + """ input_spec = TupleMathsInput @@ -616,27 +558,19 @@ class MergeInput(MathsInput): """Input Spec for seg_maths merge operation.""" dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") - - desc = "List of images to merge to the working image ." merge_files = traits.List( - File(exists=True), argstr="%s", mandatory=True, position=4, desc=desc + File(exists=True), argstr="%s", mandatory=True, position=4, + desc="List of images to merge to the working image ." ) class Merge(MathsCommand): - """Interface for executable seg_maths from NiftySeg platform. + """Merge image files. - Interface to use the merge operation that can be performed - - with the seg_maths command-line program. - - See below for this option:: - - merge Merge images and the working image in the \ - dimension - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 611f293b42..4d9e598ddf 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -101,59 +101,39 @@ class UnaryStatsInput(StatsInput): argstr="-%s", position=4, mandatory=True, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * r - The range of all voxels. + * R - The robust range (assuming 2% outliers on both sides) of all voxels + * a - Average of all voxels + * s - Standard deviation of all voxels + * v - Volume of all voxels above 0 (<# voxels> * ) + * vl - Volume of each integer label (<# voxels per label> x ) + * vp - Volume of all probabilsitic voxels (sum() x ) + * n - Count of all voxels above 0 (<# voxels>) + * np - Sum of all fuzzy voxels (sum()) + * e - Entropy of all voxels + * ne - Normalized entropy of all voxels + * x - Location (i j k x y z) of the smallest value in the image + * X - Location (i j k x y z) of the largest value in the image + * c - Location (i j k x y z) of the centre of mass of the object + * B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] + * xvox - Output the number of voxels in the x direction. + Replace x with y/z for other directions. + * xdim - Output the voxel dimention in the x direction. + Replace x with y/z for other directions. + +""") class UnaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any unary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: - - r - The range of all voxels. - - R - The robust range (assuming 2% outliers on both sides) of all voxels - - a - Average of all voxels - - s - Standard deviation of all voxels - - v - Volume of all voxels above 0 (<# voxels> * ) - - vl - Volume of each integer label (<# voxels per label> * \ -) - - vp - Volume of all probabilsitic voxels (sum() * ) - - n - Count of all voxels above 0 (<# voxels>) - - np - Sum of all fuzzy voxels (sum()) - - e - Entropy of all voxels - - ne - Normalized entropy of all voxels - - x - Location (i j k x y z) of the smallest value in the image - - X - Location (i j k x y z) of the largest value in the image - - c - Location (i j k x y z) of the centre of mass of the object - - B - Bounding box of all nonzero voxels [ xmin xsize ymin ysize zmin zsize ] - - xvox - Output the number of voxels in the x direction. Replace x with \ -y/z for other directions. + """Unary statistical operations. - xdim - Output the voxel dimention in the x direction. Replace x with \ -y/z for other directions. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- @@ -161,18 +141,21 @@ class UnaryStats(StatsCommand): >>> from nipype.interfaces import niftyseg >>> unary = niftyseg.UnaryStats() >>> unary.inputs.in_file = 'im1.nii' + >>> # Test v operation >>> unary_v = copy.deepcopy(unary) >>> unary_v.inputs.operation = 'v' >>> unary_v.cmdline 'seg_stats im1.nii -v' >>> unary_v.run() # doctest: +SKIP + >>> # Test vl operation >>> unary_vl = copy.deepcopy(unary) >>> unary_vl.inputs.operation = 'vl' >>> unary_vl.cmdline 'seg_stats im1.nii -vl' >>> unary_vl.run() # doctest: +SKIP + >>> # Test x operation >>> unary_x = copy.deepcopy(unary) >>> unary_x.inputs.operation = 'x' @@ -202,8 +185,21 @@ class BinaryStatsInput(StatsInput): mandatory=True, argstr="-%s", position=4, - desc="operation to perform", - ) + desc="""\ +Operation to perform: + + * p - - The th percentile of all voxels intensity (float=[0,100]) + * sa - - Average of all voxels + * ss - - Standard deviation of all voxels + * svp - - Volume of all probabilsitic voxels (sum() x ) + * al - - Average value in for each label in + * d - - Calculate the Dice score between all classes in and + * ncc - - Normalized cross correlation between and + * nmi - - Normalized Mutual Information between and + * Vl - - Volume of each integer label . Save to file. + * Nl - - Count of each label . Save to file. + +""") operand_file = File( exists=True, @@ -224,40 +220,12 @@ class BinaryStatsInput(StatsInput): class BinaryStats(StatsCommand): - """ - Interface for executable seg_stats from NiftySeg platform. - - Interface to use any binary statistical operations that can be performed - - with the seg_stats command-line program. - - See below for those operations:: - - p - - The th percentile of all voxels intensity \ -(float=[0,100]) + """Binary statistical operations. - sa - - Average of all voxels - - ss - - Standard deviation of all voxels - - svp - - Volume of all probabilsitic voxels (sum() * \ -) - - al - - Average value in for each label in - - d - - Calculate the Dice score between all classes in \ -and - - ncc - - Normalized cross correlation between and - - nmi - - Normalized Mutual Information between and - - Vl - - Volume of each integer label . Save to file. - - Nl - - Count of each label . Save to file. - - `Source code `_ | - `Documentation `_ + See Also + -------- + `Source code `__ -- + `Documentation `__ Examples -------- diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 82da210fac..68f88b51e7 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Algorithms to compute statistics on :abbr:`fMRI (functional MRI)` -""" +"""Nilearn is a Python module for fast and easy statistical learning on NeuroImaging data.""" import os import numpy as np diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 19d030b61a..72317edae0 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""NIPY is a python project for analysis of structural and functional neuroimaging data.""" from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, SpaceTimeRealigner from .utils import Similarity diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index f237859eb6..f3fc84079a 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Nitime is a library for time-series analysis of data from neuroscience experiments.""" from .analysis import ( CoherenceAnalyzerInputSpec, CoherenceAnalyzerOutputSpec, diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 93787c1964..8abc0db75a 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -2,7 +2,6 @@ # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ - Interfaces to functionality from nitime for time-series analysis of fmri data - nitime.analysis.CoherenceAnalyzer: Coherence/y @@ -48,7 +47,7 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): # If you gave just a file name, you need to specify the sampling_rate: TR = traits.Float( - desc=("The TR used to collect the data" "in your csv file ") + desc=("The TR used to collect the data in your csv file ") ) in_TS = traits.Any(desc="a nitime TimeSeries object") @@ -87,11 +86,11 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): ) output_csv_file = File( - desc="File to write outputs (coherence,time-delay) with file-names: file_name_ {coherence,timedelay}" + desc="File to write outputs (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``" ) output_figure_file = File( - desc="File to write output figures (coherence,time-delay) with file-names: file_name_{coherence,timedelay}. Possible formats: .png,.svg,.pdf,.jpg,..." + desc="File to write output figures (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,..." ) figure_type = traits.Enum( @@ -109,19 +108,19 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): class CoherenceAnalyzerOutputSpec(TraitedSpec): coherence_array = traits.Array( - desc=("The pairwise coherence values" "between the ROIs") + desc=("The pairwise coherence values between the ROIs") ) timedelay_array = traits.Array( - desc=("The pairwise time delays between the" "ROIs (in seconds)") + desc=("The pairwise time delays between the ROIs (in seconds)") ) coherence_csv = File( - desc=("A csv file containing the pairwise " "coherence values") + desc=("A csv file containing the pairwise coherence values") ) timedelay_csv = File( - desc=("A csv file containing the pairwise " "time delay values") + desc=("A csv file containing the pairwise time delay values") ) coherence_fig = File(desc=("Figure representing coherence values")) @@ -129,6 +128,7 @@ class CoherenceAnalyzerOutputSpec(TraitedSpec): class CoherenceAnalyzer(NitimeBaseInterface): + """Wraps nitime.analysis.CoherenceAnalyzer: Coherence/y""" input_spec = CoherenceAnalyzerInputSpec output_spec = CoherenceAnalyzerOutputSpec diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index c59b7f2777..311f770848 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""PETPVC is a toolbox for partial volume correction in positron emission tomography.""" import os from .base import ( @@ -48,7 +48,34 @@ class PETPVCInputSpec(CommandLineInputSpec): desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" ) pvc = traits.Enum( - pvc_methods, desc="Desired PVC method", mandatory=True, argstr="-p %s" + pvc_methods, mandatory=True, argstr="-p %s", + desc="""\ +Desired PVC method: + + * Geometric transfer matrix -- ``GTM`` + * Labbe approach -- ``LABBE`` + * Richardson-Lucy -- ``RL`` + * Van-Cittert -- ``VC`` + * Region-based voxel-wise correction -- ``RBV`` + * RBV with Labbe -- ``LABBE+RBV`` + * RBV with Van-Cittert -- ``RBV+VC`` + * RBV with Richardson-Lucy -- ``RBV+RL`` + * RBV with Labbe and Van-Cittert -- ``LABBE+RBV+VC`` + * RBV with Labbe and Richardson-Lucy -- ``LABBE+RBV+RL`` + * Multi-target correction -- ``MTC`` + * MTC with Labbe -- ``LABBE+MTC`` + * MTC with Van-Cittert -- ``MTC+VC`` + * MTC with Richardson-Lucy -- ``MTC+RL`` + * MTC with Labbe and Van-Cittert -- ``LABBE+MTC+VC`` + * MTC with Labbe and Richardson-Lucy -- ``LABBE+MTC+RL`` + * Iterative Yang -- ``IY`` + * Iterative Yang with Van-Cittert -- ``IY+VC`` + * Iterative Yang with Richardson-Lucy -- ``IY+RL`` + * Muller Gartner -- ``MG`` + * Muller Gartner with Van-Cittert -- ``MG+VC`` + * Muller Gartner with Richardson-Lucy -- ``MG+RL`` + +""" ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", @@ -93,75 +120,11 @@ class PETPVCOutputSpec(TraitedSpec): class PETPVC(CommandLine): - """ Use PETPVC for partial volume correction of PET images. + """Use PETPVC for partial volume correction of PET images. - PETPVC is a software from the Nuclear Medicine Department + PETPVC ([1]_, [2]_) is a software from the Nuclear Medicine Department of the UCL University Hospital, London, UK. - Its source code is here: https://github.com/UCL/PETPVC - - The methods that it implement are explained here: - K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, - "A review of partial volume correction techniques for emission tomography - and their applications in neurology, cardiology and oncology," Phys. Med. - Biol., vol. 57, no. 21, p. R119, 2012. - - Its command line help shows this: - - -i --input < filename > - = PET image file - -o --output < filename > - = Output file - [ -m --mask < filename > ] - = Mask image file - -p --pvc < keyword > - = Desired PVC method - -x < X > - = The full-width at half maximum in mm along x-axis - -y < Y > - = The full-width at half maximum in mm along y-axis - -z < Z > - = The full-width at half maximum in mm along z-axis - [ -d --debug ] - = Prints debug information - [ -n --iter [ Val ] ] - = Number of iterations - With: Val (Default = 10) - [ -k [ Val ] ] - = Number of deconvolution iterations - With: Val (Default = 10) - [ -a --alpha [ aval ] ] - = Alpha value - With: aval (Default = 1.5) - [ -s --stop [ stopval ] ] - = Stopping criterion - With: stopval (Default = 0.01) - - Technique - keyword - ------------------- - - Geometric transfer matrix - "GTM" - - Labbe approach - "LABBE" - - Richardson-Lucy - "RL" - - Van-Cittert - "VC" - - Region-based voxel-wise correction - "RBV" - - RBV with Labbe - "LABBE+RBV" - - RBV with Van-Cittert - "RBV+VC" - - RBV with Richardson-Lucy - "RBV+RL" - - RBV with Labbe and Van-Cittert - "LABBE+RBV+VC" - - RBV with Labbe and Richardson-Lucy- "LABBE+RBV+RL" - - Multi-target correction - "MTC" - - MTC with Labbe - "LABBE+MTC" - - MTC with Van-Cittert - "MTC+VC" - - MTC with Richardson-Lucy - "MTC+RL" - - MTC with Labbe and Van-Cittert - "LABBE+MTC+VC" - - MTC with Labbe and Richardson-Lucy- "LABBE+MTC+RL" - - Iterative Yang - "IY" - - Iterative Yang with Van-Cittert - "IY+VC" - - Iterative Yang with Richardson-Lucy - "IY+RL" - - Muller Gartner - "MG" - - Muller Gartner with Van-Cittert - "MG+VC" - - Muller Gartner with Richardson-Lucy - "MG+RL" - Examples -------- >>> from ..testing import example_data @@ -175,6 +138,15 @@ class PETPVC(CommandLine): >>> pvc.inputs.fwhm_y = 2.0 >>> pvc.inputs.fwhm_z = 2.0 >>> outs = pvc.run() #doctest: +SKIP + + References + ---------- + .. [1] K. Erlandsson, I. Buvat, P. H. Pretorius, B. A. Thomas, and B. F. Hutton, + "A review of partial volume correction techniques for emission tomography + and their applications in neurology, cardiology and oncology," Phys. Med. + Biol., vol. 57, no. 21, p. R119, 2012. + .. [2] https://github.com/UCL/PETPVC + """ input_spec = PETPVCInputSpec diff --git a/nipype/interfaces/quickshear.py b/nipype/interfaces/quickshear.py index 7725abfeb5..b7409fdbf3 100644 --- a/nipype/interfaces/quickshear.py +++ b/nipype/interfaces/quickshear.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- -""" Quickshear is a simple geometric defacing algorithm -""" +"""Quickshear is a simple geometric defacing algorithm.""" from .base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File from ..external.due import BibTeX diff --git a/nipype/interfaces/semtools/__init__.py b/nipype/interfaces/semtools/__init__.py index 243e3a43a0..a09c926c37 100644 --- a/nipype/interfaces/semtools/__init__.py +++ b/nipype/interfaces/semtools/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- +"""SEM Tools are useful tools for Structural Equation Modeling.""" from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage from .segmentation import * diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index bef4698d03..91c56b131f 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -1,4 +1,10 @@ # -*- coding: utf-8 -*- +""" +3D Slicer is a platform for medical image informatics processing and visualization. + +For an EXPERIMENTAL implementation of an interface for the ``3dSlicer`` full framework, +please check `"dynamic" Slicer `__. +""" from .diffusion import * from .segmentation import * from .filtering import * diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index 0d5c91abfb..fcb6926eb6 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Top-level namespace for spm.""" - +"""SPM is a software package for the analysis of brain imaging data sequences.""" from .base import Info, SPMCommand, logger, no_spm, scans_for_fname, scans_for_fnames from .preprocess import ( FieldMap, diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index c2f50f56fc..e230ceb9bc 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -45,52 +45,49 @@ class Level1DesignInputSpec(SPMCommandInputSpec): field="timing.RT", desc="Interscan interval in secs", mandatory=True ) microtime_resolution = traits.Int( - field="timing.fmri_t", desc=("Number of time-bins per scan " "in secs (opt)") + field="timing.fmri_t", desc=("Number of time-bins per scan in secs (opt)") ) microtime_onset = traits.Float( field="timing.fmri_t0", - desc=("The onset/time-bin in seconds for " "alignment (opt)"), + desc=("The onset/time-bin in seconds for alignment (opt)"), ) session_info = traits.Any( field="sess", - desc=("Session specific information generated " "by ``modelgen.SpecifyModel``"), + desc=("Session specific information generated by ``modelgen.SpecifyModel``"), mandatory=True, ) factor_info = traits.List( traits.Dict(traits.Enum("name", "levels")), field="fact", - desc=("Factor specific information " "file (opt)"), + desc=("Factor specific information file (opt)"), ) bases = traits.Dict( traits.Enum("hrf", "fourier", "fourier_han", "gamma", "fir"), field="bases", - desc=""" - dict {'name':{'basesparam1':val,...}} - name : string - Name of basis function (hrf, fourier, fourier_han, - gamma, fir) - - hrf : - derivs : 2-element list - Model HRF Derivatives. No derivatives: [0,0], - Time derivatives : [1,0], Time and Dispersion - derivatives: [1,1] - fourier, fourier_han, gamma, fir: - length : int - Post-stimulus window length (in seconds) - order : int - Number of basis functions + desc="""\ +Dictionary names of the basis function to parameters: + + * hrf + + * derivs -- (2-element list) Model HRF Derivatives. No derivatives: [0,0], + Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] + + * fourier, fourier_han, gamma, or fir: + + * length -- (int) Post-stimulus window length (in seconds) + * order -- (int) Number of basis functions + """, mandatory=True, ) volterra_expansion_order = traits.Enum( - 1, 2, field="volt", desc=("Model interactions - " "yes:1, no:2") + 1, 2, field="volt", desc=("Model interactions - yes:1, no:2") ) global_intensity_normalization = traits.Enum( "none", "scaling", field="global", - desc=("Global intensity " "normalization - " "scaling or none"), + desc=("Global intensity normalization - scaling or none"), ) mask_image = File( exists=True, field="mask", desc="Image for explicitly masking the analysis" @@ -192,7 +189,7 @@ def _make_matlab_command(self, content): postscript += ( "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold ) - postscript += "SPM.xM.xs = struct('Masking', " "'explicit masking only');\n" + postscript += "SPM.xM.xs = struct('Masking', 'explicit masking only');\n" postscript += "save SPM SPM;\n" else: postscript = None @@ -220,7 +217,7 @@ class EstimateModelInputSpec(SPMCommandInputSpec): field="method", mandatory=True, desc=( - "Dictionary of either Classical: 1, Bayesian: 1, " "or Bayesian2: 1 (dict)" + "Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)" ), ) write_residuals = traits.Bool( @@ -388,7 +385,7 @@ class EstimateContrastInputSpec(SPMCommandInputSpec): ) beta_images = InputMultiPath( File(exists=True), - desc=("Parameter estimates of the " "design matrix"), + desc=("Parameter estimates of the design matrix"), copyfile=False, mandatory=True, ) @@ -474,7 +471,7 @@ def _make_matlab_command(self, _): script += "condnames=names;\n" else: if self.inputs.use_derivs: - script += r"pat = 'Sn\([0-9]*\) (.*)';" "\n" + script += r"pat = 'Sn\([0-9]*\) (.*)';\n" else: script += ( r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " @@ -483,7 +480,7 @@ def _make_matlab_command(self, _): ) script += "t = regexp(names,pat,'tokens');\n" # get sessidx for columns - script += r"pat1 = 'Sn\(([0-9].*)\)\s.*';" "\n" + script += r"pat1 = 'Sn\(([0-9].*)\)\s.*';\n" script += "t1 = regexp(names,pat1,'tokens');\n" script += ( "for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " @@ -506,7 +503,7 @@ def _make_matlab_command(self, _): for sno, sw in enumerate(contrast.sessions): script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) script += ( - "consess{%d}.tcon.convec(idx(sidx)) " "= %f;\n" + "consess{%d}.tcon.convec(idx(sidx)) = %f;\n" ) % (i + 1, sw * contrast.weights[c0]) else: script += "consess{%d}.tcon.convec(idx) = %f;\n" % ( @@ -526,7 +523,7 @@ def _make_matlab_command(self, _): "to the F contrasts" ) script += ( - "consess{%d}.fcon.convec{%d} = " "consess{%d}.tcon.convec;\n" + "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;\n" ) % (i + 1, cl0 + 1, tidx + 1) script += "jobs{1}.stats{1}.con.consess = consess;\n" script += ( @@ -588,23 +585,23 @@ class ThresholdInputSpec(SPMCommandInputSpec): use_topo_fdr = traits.Bool( True, usedefault=True, - desc=("whether to use FDR over cluster extent " "probabilities"), + desc=("whether to use FDR over cluster extent probabilities"), ) height_threshold = traits.Float( 0.05, usedefault=True, - desc=("value for initial thresholding " "(defining clusters)"), + desc=("value for initial thresholding (defining clusters)"), ) height_threshold_type = traits.Enum( "p-value", "stat", usedefault=True, - desc=("Is the cluster forming " "threshold a stat value or " "p-value?"), + desc=("Is the cluster forming threshold a stat value or p-value?"), ) extent_fdr_p_threshold = traits.Float( 0.05, usedefault=True, - desc=("p threshold on FDR corrected " "cluster size probabilities"), + desc=("p threshold on FDR corrected cluster size probabilities"), ) extent_threshold = traits.Int( 0, usedefault=True, desc="Minimum cluster size in voxels" @@ -825,7 +822,7 @@ class ThresholdStatisticsInputSpec(SPMCommandInputSpec): mandatory=True, desc="which contrast in the SPM.mat to use" ) height_threshold = traits.Float( - desc=("stat value for initial " "thresholding (defining clusters)"), + desc=("stat value for initial thresholding (defining clusters)"), mandatory=True, ) extent_threshold = traits.Int( @@ -946,7 +943,7 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): key_trait=traits.Enum("vector", "name", "interaction", "centering") ), field="cov", - desc=("covariate dictionary {vector, name, " "interaction, centering}"), + desc=("covariate dictionary {vector, name, interaction, centering}"), ) threshold_mask_none = traits.Bool( field="masking.tm.tm_none", @@ -961,10 +958,10 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): threshold_mask_relative = traits.Float( field="masking.tm.tmr.rthresh", xor=["threshold_mask_absolute", "threshold_mask_none"], - desc=("threshold using a " "proportion of the global " "value"), + desc=("threshold using a proportion of the global value"), ) use_implicit_threshold = traits.Bool( - field="masking.im", desc=("use implicit mask NaNs or " "zeros to threshold") + field="masking.im", desc=("use implicit mask NaNs or zeros to threshold") ) explicit_mask_file = File( field="masking.em", # requires cell @@ -987,14 +984,14 @@ class FactorialDesignInputSpec(SPMCommandInputSpec): desc="omit global calculation", ) no_grand_mean_scaling = traits.Bool( - field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean " "scaling") + field="globalm.gmsca.gmsca_no", desc=("do not perform grand mean scaling") ) global_normalization = traits.Enum( 1, 2, 3, field="globalm.glonorm", - desc=("global normalization None-1, " "Proportional-2, ANCOVA-3"), + desc=("global normalization None-1, Proportional-2, ANCOVA-3"), ) @@ -1099,11 +1096,11 @@ class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): desc="Group 2 input files", ) dependent = traits.Bool( - field="des.t2.dept", desc=("Are the measurements dependent between " "levels") + field="des.t2.dept", desc=("Are the measurements dependent between levels") ) unequal_variance = traits.Bool( field="des.t2.variance", - desc=("Are the variances equal or unequal " "between groups"), + desc=("Are the variances equal or unequal between groups"), ) @@ -1183,7 +1180,7 @@ class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): user_covariates = InputMultiPath( traits.Dict(key_trait=traits.Enum("vector", "name", "centering")), field="des.mreg.mcov", - desc=("covariate dictionary {vector, " "name, centering}"), + desc=("covariate dictionary {vector, name, centering}"), ) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index b6a705fa8e..528fbc282d 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -205,9 +205,10 @@ class FieldMap(SPMCommand): http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=173 - To do - ----- - Deal with real/imag magnitude images and with the two phase files case. + .. important:: + + This interface does not deal with real/imag magnitude images neither + with the two phase files case. Examples -------- @@ -266,18 +267,18 @@ class SliceTimingInputSpec(SPMCommandInputSpec): ) time_repetition = traits.Float( field="tr", - desc=("time between volume acquisitions" "(start to start time)"), + desc=("time between volume acquisitions (start to start time)"), mandatory=True, ) time_acquisition = traits.Float( field="ta", - desc=("time of volume acquisition. usually" "calculated as TR-(TR/num_slices)"), + desc=("time of volume acquisition. usually calculated as TR-(TR/num_slices)"), mandatory=True, ) slice_order = traits.List( traits.Float(), field="so", - desc=("1-based order or onset (in ms) in which " "slices are acquired"), + desc=("1-based order or onset (in ms) in which slices are acquired"), mandatory=True, ) ref_slice = traits.Int( @@ -378,7 +379,7 @@ class RealignInputSpec(SPMCommandInputSpec): ) register_to_mean = traits.Bool( field="eoptions.rtm", - desc=("Indicate whether realignment is " "done to the mean image"), + desc=("Indicate whether realignment is done to the mean image"), ) weight_img = File( exists=True, field="eoptions.weight", desc="filename of weighting image" @@ -408,14 +409,14 @@ class RealignInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z]"), + desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( @@ -448,7 +449,7 @@ class RealignOutputSpec(TraitedSpec): ), ) realignment_parameters = OutputMultiPath( - File(exists=True), desc=("Estimated translation and " "rotation parameters") + File(exists=True), desc=("Estimated translation and rotation parameters") ) @@ -872,14 +873,14 @@ class CoregisterInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), minlen=3, maxlen=3, field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z]"), + desc=("Check if interpolation should wrap in [x,y,z]"), ) write_mask = traits.Bool(field="roptions.mask", desc="True/False mask output image") out_prefix = traits.String( @@ -1025,7 +1026,7 @@ class NormalizeInputSpec(SPMCommandInputSpec): field="eoptions.cutoff", desc="Cutoff of for DCT bases" ) nonlinear_iterations = traits.Int( - field="eoptions.nits", desc=("Number of iterations of " "nonlinear warping") + field="eoptions.nits", desc=("Number of iterations of nonlinear warping") ) nonlinear_regularization = traits.Float( field="eoptions.reg", @@ -1053,12 +1054,12 @@ class NormalizeInputSpec(SPMCommandInputSpec): low=0, high=7, field="roptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) write_wrap = traits.List( traits.Int(), field="roptions.wrap", - desc=("Check if interpolation should wrap in " "[x,y,z] - list of bools"), + desc=("Check if interpolation should wrap in [x,y,z] - list of bools"), ) out_prefix = traits.String( "w", field="roptions.prefix", usedefault=True, desc="normalized output prefix" @@ -1068,7 +1069,7 @@ class NormalizeInputSpec(SPMCommandInputSpec): class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( File(exists=True), - desc=("MAT files containing " "the normalization " "parameters"), + desc=("MAT files containing the normalization parameters"), ) normalized_source = OutputMultiPath( File(exists=True), desc="Normalized source files" @@ -1175,7 +1176,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): image_to_align = ImageFileSPM( exists=True, field="subj.vol", - desc=("file to estimate normalization parameters " "with"), + desc=("file to estimate normalization parameters with"), xor=["deformation_file"], mandatory=True, copyfile=True, @@ -1235,7 +1236,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): tpm = File( exists=True, field="eoptions.tpm", - desc=("template in form of tissue probablitiy maps to " "normalize to"), + desc=("template in form of tissue probablitiy maps to normalize to"), xor=["deformation_file"], copyfile=False, ) @@ -1247,15 +1248,15 @@ class Normalize12InputSpec(SPMCommandInputSpec): field="eoptions.reg", minlen=5, maxlen=5, - desc=("controls balance between " "parameters and data"), + desc=("controls balance between parameters and data"), ) smoothness = traits.Float( field="eoptions.fwhm", - desc=("value (in mm) to smooth the data before " "normalization"), + desc=("value (in mm) to smooth the data before normalization"), ) sampling_distance = traits.Float( field="eoptions.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) write_bounding_box = traits.List( traits.List(traits.Float(), minlen=3, maxlen=3), @@ -1283,7 +1284,7 @@ class Normalize12InputSpec(SPMCommandInputSpec): low=0, high=7, field="woptions.interp", - desc=("degree of b-spline used for " "interpolation"), + desc=("degree of b-spline used for interpolation"), ) out_prefix = traits.String( "w", field="woptions.prefix", usedefault=True, desc="Normalized output prefix" @@ -1301,7 +1302,7 @@ class Normalize12OutputSpec(TraitedSpec): ), ) normalized_image = OutputMultiPath( - File(exists=True), desc=("Normalized file that needed to " "be aligned") + File(exists=True), desc=("Normalized file that needed to be aligned") ) normalized_files = OutputMultiPath(File(exists=True), desc="Normalized other files") @@ -1456,24 +1457,24 @@ class SegmentInputSpec(SPMCommandInputSpec): Modulated + Unmodulated Normalised: [True,True,False]""", ) save_bias_corrected = traits.Bool( - field="output.biascor", desc=("True/False produce a bias " "corrected image") + field="output.biascor", desc=("True/False produce a bias corrected image") ) clean_masks = traits.Enum( "no", "light", "thorough", field="output.cleanup", - desc=("clean using estimated brain mask " "('no','light','thorough')"), + desc=("clean using estimated brain mask ('no','light','thorough')"), ) tissue_prob_maps = traits.List( File(exists=True), field="opts.tpm", - desc=("list of gray, white & csf prob. " "(opt,)"), + desc=("list of gray, white & csf prob. (opt,)"), ) gaussians_per_class = traits.List( traits.Int(), field="opts.ngaus", - desc=("num Gaussians capture intensity " "distribution"), + desc=("num Gaussians capture intensity distribution"), ) affine_regularization = traits.Enum( "mni", @@ -1490,7 +1491,7 @@ class SegmentInputSpec(SPMCommandInputSpec): ), ) warping_regularization = traits.Float( - field="opts.warpreg", desc=("Controls balance between " "parameters and data") + field="opts.warpreg", desc=("Controls balance between parameters and data") ) warp_frequency_cutoff = traits.Float( field="opts.warpco", desc="Cutoff of DCT bases" @@ -1525,7 +1526,7 @@ class SegmentInputSpec(SPMCommandInputSpec): ) sampling_distance = traits.Float( field="opts.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) mask_image = File( exists=True, @@ -1537,13 +1538,13 @@ class SegmentInputSpec(SPMCommandInputSpec): class SegmentOutputSpec(TraitedSpec): native_gm_image = File(desc="native space grey probability map") normalized_gm_image = File(desc="normalized grey probability map",) - modulated_gm_image = File(desc=("modulated, normalized grey " "probability map")) + modulated_gm_image = File(desc=("modulated, normalized grey probability map")) native_wm_image = File(desc="native space white probability map") normalized_wm_image = File(desc="normalized white probability map") - modulated_wm_image = File(desc=("modulated, normalized white " "probability map")) + modulated_wm_image = File(desc=("modulated, normalized white probability map")) native_csf_image = File(desc="native space csf probability map") normalized_csf_image = File(desc="normalized csf probability map") - modulated_csf_image = File(desc=("modulated, normalized csf " "probability map")) + modulated_csf_image = File(desc=("modulated, normalized csf probability map")) modulated_input_image = File( deprecated="0.10", new_name="bias_corrected_image", @@ -1682,14 +1683,14 @@ class NewSegmentInputSpec(SPMCommandInputSpec): ) sampling_distance = traits.Float( field="warp.samp", - desc=("Sampling distance on data for " "parameter estimation"), + desc=("Sampling distance on data for parameter estimation"), ) write_deformation_fields = traits.List( traits.Bool(), minlen=2, maxlen=2, field="warp.write", - desc=("Which deformation fields to " "write:[Inverse, Forward]"), + desc=("Which deformation fields to write:[Inverse, Forward]"), ) @@ -1704,7 +1705,7 @@ class NewSegmentOutputSpec(TraitedSpec): traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( - traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" @@ -1883,7 +1884,7 @@ class SmoothInputSpec(SPMCommandInputSpec): ) data_type = traits.Int(field="dtype", desc="Data type of the output images") implicit_masking = traits.Bool( - field="im", desc=("A mask implied by a particular" "voxel value") + field="im", desc=("A mask implied by a particular voxel value") ) out_prefix = traits.String( "s", field="prefix", usedefault=True, desc="smoothed output prefix" @@ -1957,7 +1958,7 @@ class DARTELInputSpec(SPMCommandInputSpec): "Membrane", "Bending", field="warp.settings.rform", - desc=("Form of regularization energy " "term"), + desc=("Form of regularization energy term"), ) iteration_parameters = traits.List( traits.Tuple( @@ -1969,31 +1970,36 @@ class DARTELInputSpec(SPMCommandInputSpec): minlen=3, maxlen=12, field="warp.settings.param", - desc="""List of tuples for each iteration - - Inner iterations - - Regularization parameters - - Time points for deformation model - - smoothing parameter - """, + desc="""\ +List of tuples for each iteration + + * Inner iterations + * Regularization parameters + * Time points for deformation model + * smoothing parameter + +""", ) optimization_parameters = traits.Tuple( traits.Float, traits.Range(1, 8), traits.Range(1, 8), field="warp.settings.optim", - desc=""" - Optimization settings a tuple - - LM regularization - - cycles of multigrid solver - - relaxation iterations - """, + desc="""\ +Optimization settings a tuple: + + * LM regularization + * cycles of multigrid solver + * relaxation iterations + +""", ) class DARTELOutputSpec(TraitedSpec): final_template_file = File(exists=True, desc="final DARTEL template") template_files = traits.List( - File(exists=True), desc=("Templates from different stages of " "iteration") + File(exists=True), desc=("Templates from different stages of iteration") ) dartel_flow_fields = traits.List(File(exists=True), desc="DARTEL flow fields") @@ -2104,7 +2110,7 @@ class DARTELNorm2MNIInputSpec(SPMCommandInputSpec): ) modulate = traits.Bool( field="mni_norm.preserve", - desc=("Modulate out images - no modulation " "preserves concentrations"), + desc=("Modulate out images - no modulation preserves concentrations"), ) fwhm = traits.Either( traits.List(traits.Float(), minlen=3, maxlen=3), @@ -2119,7 +2125,7 @@ class DARTELNorm2MNIOutputSpec(TraitedSpec): File(exists=True), desc="Normalized files in MNI space" ) normalization_parameter_file = File( - exists=True, desc=("Transform parameters to MNI " "space") + exists=True, desc=("Transform parameters to MNI space") ) @@ -2203,7 +2209,7 @@ class CreateWarpedInputSpec(SPMCommandInputSpec): iterations = traits.Range( low=0, high=9, - desc=("The number of iterations: log2(number of " "time steps)"), + desc=("The number of iterations: log2(number of time steps)"), field="crt_warped.K", ) interp = traits.Range( @@ -2491,7 +2497,7 @@ class VBMSegmentOuputSpec(TraitedSpec): traits.List(File(exists=True)), desc="normalized class images" ) modulated_class_images = traits.List( - traits.List(File(exists=True)), desc=("modulated+normalized class " "images") + traits.List(File(exists=True)), desc=("modulated+normalized class images") ) transformation_mat = OutputMultiPath( File(exists=True), desc="Normalization transformation" diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index c44c4678d3..928ff19fc2 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: +"""VistaSoft contains Matlab code to perform a variety of analysis on MRI data.""" from .vista import Vnifti2Image, VtoMat diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 4bb941c7f9..12823b10cc 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -31,12 +31,12 @@ class Vnifti2Image(CommandLine): Example ------- - >>> vimage = Vnifti2Image() >>> vimage.inputs.in_file = 'image.nii' >>> vimage.cmdline 'vnifti2image -in image.nii -out image.v' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ _cmd = "vnifti2image" @@ -69,12 +69,12 @@ class VtoMat(CommandLine): Example ------- - >>> vimage = VtoMat() >>> vimage.inputs.in_file = 'image.v' >>> vimage.cmdline 'vtomat -in image.v -out image.mat' - >>> vimage.run() # doctest: +SKIP + >>> vimage.run() # doctest: +SKIP + """ _cmd = "vtomat" diff --git a/nipype/interfaces/workbench/__init__.py b/nipype/interfaces/workbench/__init__.py index 1de46f8953..fb68624c88 100644 --- a/nipype/interfaces/workbench/__init__.py +++ b/nipype/interfaces/workbench/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: - +"""Connectome Workbench is a visualization for neuroimaging data, esp. derived from HCP data.""" from .metric import MetricResample from .cifti import CiftiSmooth diff --git a/nipype/interfaces/workbench/base.py b/nipype/interfaces/workbench/base.py index 82c12420b9..d91c85d9f6 100644 --- a/nipype/interfaces/workbench/base.py +++ b/nipype/interfaces/workbench/base.py @@ -5,9 +5,9 @@ The workbench module provides classes for interfacing with `connectome workbench `_ tools. -`Connectome Workbench is an open source, freely available visualization and - discovery tool used to map neuroimaging data, especially data generated by the - Human Connectome Project. +Connectome Workbench is an open source, freely available visualization and +discovery tool used to map neuroimaging data, especially data generated by the +Human Connectome Project. """ import os @@ -21,9 +21,7 @@ class Info(PackageInfo): - """ - Handle `wb_command` version information. - """ + """Handle Connectome Workbench version information.""" version_cmd = "wb_command -version" diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py new file mode 100644 index 0000000000..a7b45241d3 --- /dev/null +++ b/nipype/sphinxext/apidoc/__init__.py @@ -0,0 +1,188 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Settings for sphinxext.interfaces and connection to sphinx-apidoc.""" +import re +from sphinxcontrib.napoleon import ( + Config as NapoleonConfig, + _patch_python_domain, + _skip_member as _napoleon_skip_member +) + +from ... import __version__ +from ...interfaces.base import BaseInterface, TraitedSpec +from .docstring import NipypeDocstring, InterfaceDocstring + + +class Config(NapoleonConfig): + r""" + Sphinx-nipype extension settings in ``conf.py``. + + Listed below are all the settings used by this extension + and their default values. + These settings can be changed in the Sphinx's ``conf.py`` file. + Make sure that ``nipype.sphinxext.interfaces`` is enabled + in ``conf.py``:: + + # conf.py + + # Add this extension to the corresponding list: + extensions = ['nipype.sphinxext.interfaces'] + + # NiPype settings + nipype_references = False + + Attributes + ---------- + nipype_skip_classes: :obj:`bool` (Defaults to True) + True to include referenced publications with the interface + (requires duecredit to be installed). + + """ + _config_values = { + 'nipype_skip_classes': ([ + "AFNI(Python)?Command", + "ANTS", + "FSLCommand", + "FS(Command|Script)", + "Info", + "^SPM", + "Tester", + "InputSpec", + "OutputSpec", + "Numpy", + "NipypeTester", + ], 'env'), + **NapoleonConfig._config_values + } + + +def setup(app): + # type: (Sphinx) -> Dict[unicode, Any] + """ + Sphinx extension setup function. + + When the extension is loaded, Sphinx imports this module and executes + the ``setup()`` function, which in turn notifies Sphinx of everything + the extension offers. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + + See Also + -------- + `The Sphinx documentation on Extensions + `_ + `The Extension Tutorial `_ + `The Extension API `_ + + """ + from sphinx.application import Sphinx + if not isinstance(app, Sphinx): + # probably called by tests + return {'version': __version__, 'parallel_read_safe': True} + + _patch_python_domain() + + app.setup_extension('sphinx.ext.autodoc') + app.connect('autodoc-process-docstring', _process_docstring) + app.connect('autodoc-skip-member', _skip_member) + + for name, (default, rebuild) in Config._config_values.items(): + app.add_config_value(name, default, rebuild) + return {'version': __version__, 'parallel_read_safe': True} + + +def _process_docstring(app, what, name, obj, options, lines): + # type: (Sphinx, unicode, unicode, Any, Any, List[unicode]) -> None + """Process the docstring for a given python object. + Called when autodoc has read and processed a docstring. `lines` is a list + of docstring lines that `_process_docstring` modifies in place to change + what Sphinx outputs. + The following settings in conf.py control what styles of docstrings will + be parsed: + * ``napoleon_google_docstring`` -- parse Google style docstrings + * ``napoleon_numpy_docstring`` -- parse NumPy style docstrings + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process. + what : str + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + lines : list of str + The lines of the docstring, see above. + .. note:: `lines` is modified *in place* + """ + result_lines = lines + # Parse Nipype Interfaces + if what == "class" and issubclass(obj, BaseInterface): + result_lines[:] = InterfaceDocstring( + result_lines, app.config, app, what, name, obj, options).lines() + + result_lines = NipypeDocstring(result_lines, app.config, app, what, name, + obj, options).lines() + lines[:] = result_lines[:] + + +def _skip_member(app, what, name, obj, skip, options): + # type: (Sphinx, unicode, unicode, Any, bool, Any) -> bool + """ + Determine if private and special class members are included in docs. + + Parameters + ---------- + app : sphinx.application.Sphinx + Application object representing the Sphinx process + what : str + A string specifying the type of the object to which the member + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : str + The name of the member. + obj : module, class, exception, function, method, or attribute. + For example, if the member is the __init__ method of class A, then + `obj` will be `A.__init__`. + skip : bool + A boolean indicating if autodoc will skip this member if `_skip_member` + does not override the decision + options : sphinx.ext.autodoc.Options + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + Returns + ------- + bool + True if the member should be skipped during creation of the docs, + False if it should be included in the docs. + + """ + # Parse Nipype Interfaces + patterns = [pat if hasattr(pat, 'search') else re.compile(pat) + for pat in app.config.nipype_skip_classes] + isbase = False + try: + isbase = issubclass(obj, BaseInterface) + if issubclass(obj, TraitedSpec): + return True + except TypeError: + pass + + if isbase: + for pattern in patterns: + if pattern.search(name): + return True + + return _napoleon_skip_member(app, what, name, obj, skip, options) diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py new file mode 100644 index 0000000000..f5191d5155 --- /dev/null +++ b/nipype/sphinxext/apidoc/docstring.py @@ -0,0 +1,159 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +"""Reformat interface docstrings.""" +import re +from sphinxcontrib.napoleon._upstream import _ +from sphinxcontrib.napoleon.docstring import NumpyDocstring + + +class NipypeDocstring(NumpyDocstring): + """Patch the NumpyDocstring from napoleon to get special section headers.""" + + def _parse_parameters_section(self, section): + # type: (unicode) -> List[unicode] + labels = { + 'args': _('Parameters'), + 'arguments': _('Parameters'), + 'parameters': _('Parameters'), + } # type: Dict[unicode, unicode] + label = labels.get(section.lower(), section) + + fields = self._consume_fields() + if self._config.napoleon_use_param: + return self._format_docutils_params(fields) + + return self._format_fields(label, fields) + + +class InterfaceDocstring(NipypeDocstring): + """ + Convert docstrings of Nipype Interfaces to reStructuredText. + + Parameters + ---------- + docstring : :obj:`str` or :obj:`list` of :obj:`str` + The docstring to parse, given either as a string or split into + individual lines. + config: :obj:`sphinxcontrib.napoleon.Config` or :obj:`sphinx.config.Config` + The configuration settings to use. If not given, defaults to the + config object on `app`; or if `app` is not given defaults to the + a new :class:`nipype.sphinxext.apidoc.Config` object. + + Other Parameters + ---------------- + app : :class:`sphinx.application.Sphinx`, optional + Application object representing the Sphinx process. + what : :obj:`str`, optional + A string specifying the type of the object to which the docstring + belongs. Valid values: "module", "class", "exception", "function", + "method", "attribute". + name : :obj:`str`, optional + The fully qualified name of the object. + obj : module, class, exception, function, method, or attribute + The object to which the docstring belongs. + options : :class:`sphinx.ext.autodoc.Options`, optional + The options given to the directive: an object with attributes + inherited_members, undoc_members, show_inheritance and noindex that + are True if the flag option of same name was given to the auto + directive. + + """ + + _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" + r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) + + def __init__(self, docstring, config=None, app=None, what='', name='', + obj=None, options=None): + # type: (Union[unicode, List[unicode]], SphinxConfig, Sphinx, unicode, unicode, Any, Any) -> None # NOQA + super().__init__(docstring, config, app, what, name, obj, options) + + cmd = getattr(obj, '_cmd', '') + if cmd and cmd.strip(): + self._parsed_lines = [ + 'Wrapped executable: ``%s``.' % cmd.strip(), + ''] + self._parsed_lines + + if obj is not None: + self._parsed_lines += _parse_interface(obj) + + +def _parse_interface(obj): + """Print description for input parameters.""" + parsed = [] + if obj.input_spec: + inputs = obj.input_spec() + mandatory_items = sorted(inputs.traits(mandatory=True).items()) + if mandatory_items: + parsed += ["", "Mandatory Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in mandatory_items: + parsed += _parse_spec(inputs, name, spec) + + mandatory_keys = {item[0] for item in mandatory_items} + optional_items = sorted([ + (name, val) for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ]) + if optional_items: + parsed += ["", "Optional Inputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in optional_items: + parsed += _parse_spec(inputs, name, spec) + + if obj.output_spec: + outputs = sorted(obj.output_spec().traits(transient=None).items()) + if outputs: + parsed += ["", "Outputs"] + parsed += ["-" * len(parsed[-1])] + for name, spec in outputs: + parsed += _parse_spec(inputs, name, spec) + + return parsed + + +def _indent(lines, n=4): + # type: (List[unicode], int) -> List[unicode] + return [(' ' * n) + line for line in lines] + + +def _parse_spec(inputs, name, spec): + """Parse a HasTraits object into a Numpy-style docstring.""" + desc_lines = [] + if spec.desc: + desc = ''.join([spec.desc[0].capitalize(), spec.desc[1:]]) + if not desc.endswith('.') and not desc.endswith('\n'): + desc = '%s.' % desc + desc_lines += desc.splitlines() + + argstr = spec.argstr + if argstr and argstr.strip(): + pos = spec.position + if pos is None: + desc_lines += ["""Maps to a command-line argument: :code:`{arg}`.""".format( + arg=argstr.strip())] + else: + desc_lines += [ + """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( + arg=argstr.strip(), pos=pos)] + + xor = spec.xor + if xor: + desc_lines += ["Mutually **exclusive** with inputs: %s." % ", ".join( + ["``%s``" % x for x in xor])] + + requires = spec.requires + if requires: + desc_lines += ["**Requires** inputs: %s." % ", ".join( + ["``%s``" % x for x in requires])] + + if spec.usedefault: + default = spec.default_value()[1] + if isinstance(default, (bytes, str)) and not default: + default = '""' + + desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] + + out_rst = ["{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None))] + out_rst += _indent(desc_lines, 4) + + return out_rst diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py new file mode 100644 index 0000000000..9e15c57f49 --- /dev/null +++ b/nipype/sphinxext/documenter.py @@ -0,0 +1,72 @@ +"""sphinx autodoc ext.""" +from sphinx.locale import _ +from sphinx.ext import autodoc +from nipype.interfaces.base import BaseInterface +from .gh import get_url + +_ClassDocumenter = autodoc.ClassDocumenter +RST_CLASS_BLOCK = """ +.. index:: {name} + +.. _{module}.{name}: + +{name} +{underline} +`Link to code <{code_url}>`__ + +""" + + +class NipypeClassDocumenter(_ClassDocumenter): # type: ignore + priority = 20 + + def add_directive_header(self, sig: str) -> None: + if self.doc_as_attr: + self.directivetype = 'attribute' + + # Copied from super + domain = getattr(self, 'domain', 'py') + directive = getattr(self, 'directivetype', self.objtype) + name = self.format_name() + sourcename = self.get_sourcename() + + is_interface = False + try: + is_interface = issubclass(self.object, BaseInterface) + except TypeError: + pass + + if is_interface is True: + lines = RST_CLASS_BLOCK.format( + code_url=get_url(self.object), + module=self.modname, + name=name, + underline='=' * len(name), + ) + for line in lines.splitlines(): + self.add_line(line, sourcename) + else: + self.add_line('.. %s:%s:: %s%s' % (domain, directive, name, sig), + sourcename) + if self.options.noindex: + self.add_line(' :noindex:', sourcename) + if self.objpath: + # Be explicit about the module, this is necessary since .. class:: + # etc. don't support a prepended module name + self.add_line(' :module: %s' % self.modname, sourcename) + + # add inheritance info, if wanted + if not self.doc_as_attr and self.options.show_inheritance: + sourcename = self.get_sourcename() + self.add_line('', sourcename) + if hasattr(self.object, '__bases__') and len(self.object.__bases__): + bases = [':class:`%s`' % b.__name__ + if b.__module__ in ('__builtin__', 'builtins') + else ':class:`%s.%s`' % (b.__module__, b.__name__) + for b in self.object.__bases__] + self.add_line(' ' + _('Bases: %s') % ', '.join(bases), + sourcename) + + +def setup(app): + app.add_autodocumenter(NipypeClassDocumenter) diff --git a/nipype/sphinxext/gh.py b/nipype/sphinxext/gh.py new file mode 100644 index 0000000000..6658fda361 --- /dev/null +++ b/nipype/sphinxext/gh.py @@ -0,0 +1,32 @@ +"""Build a file URL.""" +import os +import inspect +import subprocess + +REVISION_CMD = 'git rev-parse --short HEAD' + + +def _get_git_revision(): + # Comes from scikit-learn + # https://github.com/scikit-learn/scikit-learn/blob/master/doc/sphinxext/github_link.py + try: + revision = subprocess.check_output(REVISION_CMD.split()).strip() + except (subprocess.CalledProcessError, OSError): + return None + return revision.decode('utf-8') + + +def get_url(obj): + """Return local or remote url for an object.""" + filename = inspect.getsourcefile(obj) + uri = "file://%s" % filename + revision = _get_git_revision() + if revision is not None: + shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) + uri = "http://github.com/nipy/nipype/blob/%s/%s" % ( + revision, + shortfile, + ) + lines, lstart = inspect.getsourcelines(obj) + lend = len(lines) + lstart + return '%s#L%d-L%d' % (uri, lstart, lend) diff --git a/rtd_requirements.txt b/rtd_requirements.txt deleted file mode 100644 index 8cb274347a..0000000000 --- a/rtd_requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -configparser -funcsigs -future>=0.16.0 -matplotlib -mock -networkx>=1.9 -nibabel>=2.1.0 -numpy>=1.9.0 -numpydoc -packaging -prov>=1.5.2 -neurdflib -psutil -pydot>=1.2.3 -pydotplus -pytest>=3.0 -python-dateutil>=2.2 -scipy>=0.14 -simplejson>=3.8.0 -traits>=4.6 diff --git a/tools/apigen.py b/tools/apigen.py deleted file mode 100644 index 19e47b5c20..0000000000 --- a/tools/apigen.py +++ /dev/null @@ -1,730 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Attempt to generate templates for module reference with Sphinx - -XXX - we exclude extension modules - -To include extension modules, first identify them as valid in the -``_uri2path`` method, then handle them in the ``_parse_module`` script. - -We get functions and classes by parsing the text of .py files. -Alternatively we could import the modules for discovery, and we'd have -to do that for extension modules. This would involve changing the -``_parse_module`` method to work via import and introspection, and -might involve changing ``discover_modules`` (which determines which -files are modules, and therefore which module URIs will be passed to -``_parse_module``). - -NOTE: this is a modified version of a script originally shipped with the -PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed -project. -""" -import os -import sys -import re -import tempfile -import warnings - -from nipype.interfaces.base import BaseInterface -from nipype.pipeline.engine import Workflow -from nipype.utils.misc import trim - -from github import get_file_url - -RST_SECTION_LEVELS = ("*", "=", "-", "~", "^") - -RST_CLASS_BLOCK = """ -.. _{uri}.{cls}: - -.. index:: {cls} - -{cls} -{underline} -`Link to code <{code_url}>`__ - -{body} -""" - -RST_FUNC_BLOCK = """ -.. _{uri}.{name}: - -:func:`{name}` -{underline} -`Link to code <{code_url}>`__ - -{body} - -""" - - -# Functions and classes -class ApiDocWriter(object): - """Write reST documents for API docs.""" - - # only separating first two levels - rst_section_levels = RST_SECTION_LEVELS - - def __init__( - self, - package_name, - rst_extension=".rst", - package_skip_patterns=(r"\.tests$",), - module_skip_patterns=(r"\.setup$", r"\._"), - ): - r""" - Initialize package for parsing. - - Parameters - ---------- - package_name : string - Name of the top-level package. *package_name* must be the - name of an importable package - rst_extension : string, optional - Extension for reST files, default '.rst' - package_skip_patterns : None or sequence of {strings, regexps} - Sequence of strings giving URIs of packages to be excluded - Operates on the package path, starting at (including) the - first dot in the package path, after *package_name* - so, - if *package_name* is ``sphinx``, then ``sphinx.util`` will - result in ``.util`` being passed for earching by these - regexps. If is None, gives default. Default is: - ``('\.tests$', )``. - module_skip_patterns : None or sequence - Sequence of strings giving URIs of modules to be excluded - Operates on the module name including preceding URI path, - back to the first dot after *package_name*. For example - ``sphinx.util.console`` results in the string to search of - ``.util.console`` - If is None, gives default. Default is: - ``('\.setup$', '\._')``. - - """ - self._skip_patterns = {} - self.rst_extension = rst_extension - self.package_name = package_name - self.package_skip_patterns = package_skip_patterns - self.module_skip_patterns = module_skip_patterns - - @property - def package_name(self): - """Get package name.""" - return self._package_name - - @package_name.setter - def package_name(self, name): - """ - Set package_name. - - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> docwriter.root_path == sphinx.__path__[0] - True - >>> docwriter.package_name = 'docutils' - >>> import docutils - >>> docwriter.root_path == docutils.__path__[0] - True - - """ - # It's also possible to imagine caching the module parsing here - self._package_name = name - self.root_module = __import__(name) - self.root_path = self.root_module.__path__[0] - self.written_modules = None - - @property - def package_skip_patterns(self): - """Get package skip patterns.""" - return self._skip_patterns['package'] - - @package_skip_patterns.setter - def package_skip_patterns(self, pattern): - self._skip_patterns['package'] = _parse_patterns(pattern) - - @property - def module_skip_patterns(self): - """Get module skip patterns.""" - return self._skip_patterns['module'] - - @module_skip_patterns.setter - def module_skip_patterns(self, pattern): - self._skip_patterns['module'] = _parse_patterns(pattern) - - def _get_object_name(self, line): - """ - Get second token in line. - - >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - u'func' - >>> docwriter._get_object_name(" class Klass(object): ") - 'Klass' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - """ - name = line.split()[1].split("(")[0].strip() - # in case we have classes which are not derived from object - # ie. old style classes - return name.rstrip(":") - - def _uri2path(self, uri): - """ - Convert uri to absolute filepath. - - Parameters - ---------- - uri : string - URI of python module to return path for - - Returns - ------- - path : None or string - Returns None if there is no valid path for this URI - Otherwise returns absolute file system path for URI - - Examples - -------- - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> modpath = sphinx.__path__[0] - >>> res = docwriter._uri2path('sphinx.builder') - >>> res == os.path.join(modpath, 'builder.py') - True - >>> res = docwriter._uri2path('sphinx') - >>> res == os.path.join(modpath, '__init__.py') - True - >>> docwriter._uri2path('sphinx.does_not_exist') - - """ - if uri == self.package_name: - return os.path.join(self.root_path, "__init__.py") - path = uri.replace(".", os.path.sep) - path = path.replace(self.package_name + os.path.sep, "") - path = os.path.join(self.root_path, path) - # XXX maybe check for extensions as well? - if os.path.exists(path + ".py"): # file - path += ".py" - elif os.path.exists(os.path.join(path, "__init__.py")): - path = os.path.join(path, "__init__.py") - else: - return None - return path - - def _path2uri(self, dirpath): - """Convert directory path to uri.""" - relpath = dirpath.replace(self.root_path, self.package_name) - if relpath.startswith(os.path.sep): - relpath = relpath[1:] - return relpath.replace(os.path.sep, ".") - - def _parse_module(self, uri): - """Parse module defined in ``uri``.""" - filename = self._uri2path(uri) - if filename is None: - # nothing that we could handle here. - return ([], []) - f = open(filename, "rt") - functions, classes = self._parse_lines(f, uri) - f.close() - return functions, classes - - def _parse_lines(self, linesource, module=None): - """Parse lines of text for functions and classes.""" - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def generate_api_doc(self, uri): - """ - Make autodoc documentation template string for a module. - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - - """ - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - if not len(functions) and not len(classes): - print(("WARNING: Empty -", uri)) # dbg - return "" - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - - ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - - chap_title = uri_short - ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - if "." in uri: - title = "Module: :mod:`" + uri_short + "`" - else: - title = ":mod:`" + uri_short + "`" - ad += title + "\n" + self.rst_section_levels[2] * len(title) - - if len(classes): - ad += "\nInheritance diagram for ``%s``:\n\n" % uri - ad += ".. inheritance-diagram:: %s \n" % uri - ad += " :parts: 2\n" - - ad += "\n.. automodule:: " + uri + "\n" - ad += "\n.. currentmodule:: " + uri + "\n" - multi_class = len(classes) > 1 - multi_fx = len(functions) > 1 - if multi_class: - ad += "\n" + "Classes" + "\n" + self.rst_section_levels[2] * 7 + "\n" - elif len(classes) and multi_fx: - ad += "\n" + "Class" + "\n" + self.rst_section_levels[2] * 5 + "\n" - for c in classes: - ad += ( - "\n:class:`" - + c - + "`\n" - + self.rst_section_levels[multi_class + 2] * (len(c) + 9) - + "\n\n" - ) - ad += "\n.. autoclass:: " + c + "\n" - # must NOT exclude from index to keep cross-refs working - ad += ( - " :members:\n" - " :undoc-members:\n" - " :show-inheritance:\n" - " :inherited-members:\n" - "\n" - " .. automethod:: __init__\n" - ) - if multi_fx: - ad += "\n" + "Functions" + "\n" + self.rst_section_levels[2] * 9 + "\n\n" - elif len(functions) and multi_class: - ad += "\n" + "Function" + "\n" + self.rst_section_levels[2] * 8 + "\n\n" - for f in functions: - # must NOT exclude from index to keep cross-refs working - ad += "\n.. autofunction:: " + uri + "." + f + "\n\n" - return ad - - def _survives_exclude(self, matchstr, match_type): - r""" - Return ``True`` if ``matchstr`` does not match patterns. - - ``self.package_name`` removed from front of string if present - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> dw._survives_exclude('sphinx.okpkg', 'package') - True - >>> dw.package_skip_patterns.append(r'^\.badpkg$') - >>> dw._survives_exclude('sphinx.badpkg', 'package') - False - >>> dw._survives_exclude('sphinx.badpkg', 'module') - True - >>> dw._survives_exclude('sphinx.badmod', 'module') - True - >>> dw.module_skip_patterns.append(r'^\.badmod$') - >>> dw._survives_exclude('sphinx.badmod', 'module') - False - - """ - patterns = self._skip_patterns.get(match_type) - if patterns is None: - raise ValueError('Cannot interpret match type "%s"' % match_type) - - # Match to URI without package name - L = len(self.package_name) - if matchstr[:L] == self.package_name: - matchstr = matchstr[L:] - for pat in patterns: - try: - pat.search - except AttributeError: - pat = re.compile(pat) - if pat.search(matchstr): - return False - return True - - def discover_modules(self, empty_start=True): - r""" - Return module sequence discovered from ``self.package_name``. - - Parameters - ---------- - None - - Returns - ------- - mods : sequence - Sequence of module names within ``self.package_name`` - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> mods = dw.discover_modules() - >>> 'sphinx.util' in mods - True - >>> dw.package_skip_patterns.append('\.util$') - >>> 'sphinx.util' in dw.discover_modules() - False - >>> - - """ - modules = [] if empty_start else [self.package_name] - # raw directory parsing - for dirpath, dirnames, filenames in os.walk(self.root_path): - # Check directory names for packages - root_uri = self._path2uri(os.path.join(self.root_path, dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace - package_uri = ".".join((root_uri, dirname)) - if self._uri2path(package_uri) and self._survives_exclude( - package_uri, "package" - ): - modules.append(package_uri) - else: - dirnames.remove(dirname) - # Check filenames for modules - for filename in filenames: - module_name = filename[:-3] - module_uri = ".".join((root_uri, module_name)) - if self._uri2path(module_uri) and self._survives_exclude( - module_uri, "module" - ): - modules.append(module_uri) - return sorted(modules) - - def write_modules_api(self, modules, outdir): - """Generate the list of modules.""" - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - outfile = os.path.join(outdir, m + self.rst_extension) - fileobj = open(outfile, "wt") - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - def write_api_docs(self, outdir): - """ - Generate API reST files. - - Parameters - ---------- - outdir : string - Directory name in which to store files - We create automatic filenames for each module - - Returns - ------- - None - - Notes - ----- - Sets ``self.written_modules`` to list of written modules - - """ - if not os.path.exists(outdir): - os.mkdir(outdir) - # compose list of modules - modules = self.discover_modules() - self.write_modules_api(modules, outdir) - - def write_index(self, outdir, froot="gen", relative_to=None, - maxdepth=None): - """ - Make a reST API index file from written files. - - Parameters - ---------- - path : string - Filename to write index to - outdir : string - Directory to which to write generated index file - froot : string, optional - root (filename without extension) of filename to write to - Defaults to 'gen'. We add ``self.rst_extension``. - relative_to : string - path to which written filenames are relative. This - component of the written file path will be removed from - outdir, in the generated index. Default is None, meaning, - leave path as it is. - - """ - if self.written_modules is None: - raise ValueError("No modules written") - # Get full filename path - path = os.path.join(outdir, froot + self.rst_extension) - # Path written into index is relative to rootpath - if relative_to is not None: - relpath = outdir.replace(relative_to + os.path.sep, "") - else: - relpath = outdir - idx = open(path, "wt") - w = idx.write - w(".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n") - if maxdepth is None: - w(".. toctree::\n\n") - else: - w(".. toctree::\n") - w(" :maxdepth: %d\n\n" % maxdepth) - for f in self.written_modules: - w(" %s\n" % os.path.join(relpath, f)) - idx.close() - - -class InterfaceHelpWriter(ApiDocWriter): - """Convert interface specs to rST.""" - - def __init__( - self, - package_name, - class_skip_patterns=None, - **kwargs - ): - """ - Initialize an :py:mod:`ApiDocWriter` for interface specs. - - Additional Parameters - --------------------- - class_skip_patterns : None or sequence - Sequence of strings giving classes to be excluded - Default is: None - - """ - super().__init__(package_name, **kwargs) - self.class_skip_patterns = class_skip_patterns - - @property - def class_skip_patterns(self): - """Get class skip patterns.""" - return self._skip_patterns['class'] - - @class_skip_patterns.setter - def class_skip_patterns(self, pattern): - self._skip_patterns['class'] = _parse_patterns(pattern) - - def _parse_lines(self, linesource, module=None): - """Parse lines of text for functions and classes.""" - functions = [] - classes = [] - for line in linesource: - if line.startswith("def ") and line.count("("): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_"): - functions.append(name) - elif line.startswith("class "): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith("_") and self._survives_exclude( - ".".join((module, name)), "class" - ): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def _write_graph_section(self, fname, title): - ad = "\n%s\n%s\n\n" % (title, self.rst_section_levels[3] * len(title)) - ad += ".. graphviz::\n\n" - fhandle = open(fname) - for line in fhandle: - ad += "\t" + line + "\n" - - fhandle.close() - os.remove(fname) - bitmap_fname = "{}.png".format(os.path.splitext(fname)[0]) - os.remove(bitmap_fname) - return ad - - def generate_api_doc(self, uri): - """ - Make autodoc documentation template string for a module. - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - - """ - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - workflows = [] - helper_functions = [] - for function in functions: - - try: - __import__(uri) - finst = sys.modules[uri].__dict__[function] - except TypeError: - continue - try: - workflow = finst() - except Exception: - helper_functions.append((function, finst)) - continue - - if isinstance(workflow, Workflow): - workflows.append((workflow, function, finst)) - - if not classes and not workflows and not helper_functions: - print("WARNING: Empty -", uri) # dbg - return "" - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r"^%s\." % self.package_name, "", uri) - # uri_short = uri - - ad = ".. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n" - - chap_title = uri_short - ad += chap_title + "\n" + self.rst_section_levels[1] * len(chap_title) + "\n\n" - - # Set the chapter title to read 'module' for all modules except for the - # main packages - # if '.' in uri: - # title = 'Module: :mod:`' + uri_short + '`' - # else: - # title = ':mod:`' + uri_short + '`' - # ad += title + '\n' + self.rst_section_levels[2] * len(title) - - # ad += '\n' + 'Classes' + '\n' + \ - # self.rst_section_levels[2] * 7 + '\n' - for c in classes: - __import__(uri) - print(c) - try: - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - classinst = sys.modules[uri].__dict__[c] - except Exception as inst: - print(inst) - continue - - if not issubclass(classinst, BaseInterface): - continue - - ad += RST_CLASS_BLOCK.format( - uri=uri, - cls=c, - underline=self.rst_section_levels[2] * len(c), - code_url=get_file_url(classinst), - body=trim(classinst.help(returnhelp=True), self.rst_section_levels[3]) - ) - - if workflows or helper_functions: - ad += "\n.. module:: %s\n\n" % uri - - for workflow, name, finst in workflows: - ad += RST_FUNC_BLOCK.format( - uri=uri, - name=name, - underline=self.rst_section_levels[2] * (len(name) + 8), - code_url=get_file_url(finst), - body=trim(finst.__doc__, self.rst_section_levels[3]) - ) - """ - # use sphinx autodoc for function signature - ad += '\n.. _%s:\n\n' % (uri + '.' + name) - ad += '.. autofunction:: %s\n\n' % name - """ - - (_, fname) = tempfile.mkstemp(suffix=".dot") - workflow.write_graph(dotfilename=fname, graph2use="hierarchical") - ad += self._write_graph_section(fname, "Graph") + "\n" - - for name, finst in helper_functions: - ad += RST_FUNC_BLOCK.format( - uri=uri, - name=name, - underline=self.rst_section_levels[2] * (len(name) + 8), - code_url=get_file_url(finst), - body=trim(finst.__doc__, self.rst_section_levels[3]) - ) - return ad - - def discover_modules(self, empty_start=True): - """Return module sequence discovered from ``self.package_name``.""" - return super().discover_modules(empty_start=False) - - def write_modules_api(self, modules, outdir): - """Generate the list of modules.""" - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - mvalues = m.split(".") - if len(mvalues) > 3: - index_prefix = ".".join(mvalues[1:3]) - index_dir = os.path.join(outdir, index_prefix) - index_file = index_dir + self.rst_extension - if not os.path.exists(index_dir): - os.makedirs(index_dir) - header = """.. AUTO-GENERATED FILE -- DO NOT EDIT! - -{name} -{underline} - -.. toctree:: - :maxdepth: 1 - :glob: - - {name}/* - """.format( - name=index_prefix, underline="=" * len(index_prefix) - ) - with open(index_file, "wt") as fp: - fp.write(header) - m = os.path.join(index_prefix, ".".join(mvalues[3:])) - outfile = os.path.join(outdir, m + self.rst_extension) - fileobj = open(outfile, "wt") - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - -def _parse_patterns(pattern): - if pattern is None: - return [] - if isinstance(pattern, str): - return [pattern] - if isinstance(pattern, tuple): - return list(pattern) - return pattern diff --git a/tools/build_interface_docs.py b/tools/build_interface_docs.py deleted file mode 100755 index f42adc7904..0000000000 --- a/tools/build_interface_docs.py +++ /dev/null @@ -1,66 +0,0 @@ -#!/usr/bin/env python -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -"""Script to auto-generate interface docs. -""" -# stdlib imports -import os -import sys - -# ***************************************************************************** -if __name__ == "__main__": - nipypepath = os.path.abspath("..") - sys.path.insert(1, nipypepath) - # local imports - from apigen import InterfaceHelpWriter - - package = "nipype" - outdir = os.path.join("interfaces", "generated") - docwriter = InterfaceHelpWriter(package) - # Packages that should not be included in generated API docs. - docwriter.package_skip_patterns += [ - r"\.external$", - r"\.fixes$", - r"\.utils$", - r"\.pipeline", - r"\.testing", - r"\.caching", - r"\.scripts", - r"\.sphinxext$", - r"\.workflows" - ] - # Modules that should not be included in generated API docs. - docwriter.module_skip_patterns += [ - r"\.conftest", - r"\.interfaces\.base$", - r"\.interfaces\.matlab$", - r"\.interfaces\.pymvpa$", - r"\.interfaces\.rest$", - r"\.interfaces\.slicer\.generate_classes$", - r"\.interfaces\.spm\.base$", - r"\.interfaces\.traits", - r"\.pipeline\.alloy$", - r"\.pipeline\.s3_node_wrapper$", - r"\.pkg_info" - r"\.scripts", - r"\.testing", - r"\.version$", - ] - docwriter.class_skip_patterns += [ - "AFNICommand", - "ANTS", - "FSLCommand", - "FS", - "Info", - "^SPM", - "Tester", - "Spec$", - "Numpy", - # NipypeTester raises an - # exception when instantiated in - # InterfaceHelpWriter.generate_api_doc - "NipypeTester", - ] - docwriter.write_api_docs(outdir) - # docwriter.write_index(outdir, "gen") - print("%d files written" % len(docwriter.written_modules)) diff --git a/tools/ex2rst b/tools/ex2rst index df24df0340..2434d16ccc 100755 --- a/tools/ex2rst +++ b/tools/ex2rst @@ -143,7 +143,7 @@ def exfile2rst(filename): else: if doc2code: doc2code = False - s += '\n::\n' + s += '\n\n.. code-block :: python\n' # has to be code s += ' %s' % line @@ -284,8 +284,8 @@ Name of the project that contains the examples. This name is used in the toparse.append(t) # filter parse list - if not opts.excluded is None: - toparse = [t for t in toparse if not t in opts.excluded] + if opts.excluded is not None: + toparse = [t for t in toparse if t not in opts.excluded] toparse_list = toparse toparse = set(toparse) diff --git a/tools/github.py b/tools/github.py deleted file mode 100644 index fab02772e9..0000000000 --- a/tools/github.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -import http.client -import inspect -import simplejson -import os -from subprocess import Popen, PIPE - -import nipype - - -def is_git_repo(): - """Does the current nipype module have a git folder - """ - sourcepath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) - ) - gitpathgit = os.path.join(sourcepath, ".git") - if os.path.exists(gitpathgit): - return True - else: - return False - - -def get_local_branch(): - """Determine current branch - """ - if is_git_repo(): - o, _ = Popen( - 'git branch | grep "\* "', - shell=True, - stdout=PIPE, - cwd=os.path.dirname(nipype.__file__), - ).communicate() - return o.strip()[2:] - else: - return None - - -def get_remote_branch(): - """Get remote branch for current branch - """ - - pass - - -def create_hash_map(): - """Create a hash map for all objects - """ - - hashmap = {} - from base64 import encodestring as base64 - import pwd - - login_name = pwd.getpwuid(os.geteuid())[0] - conn = http.client.HTTPSConnection("api.github.com") - conn.request( - "GET", - "/repos/nipy/nipype", - headers={"Authorization": "Basic %s" % base64(login_name)}, - ) - try: - conn.request("GET", "/repos/nipy/nipype/git/trees/master?recursive=1") - except: - pass - else: - r1 = conn.getresponse() - if r1.reason != "OK": - raise Exception("HTTP Response %s:%s" % (r1.status, r1.reason)) - payload = simplejson.loads(r1.read()) - for infodict in payload["tree"]: - if infodict["type"] == "blob": - hashmap[infodict["sha"]] = infodict["path"] - return hashmap - - -def get_repo_url(force_github=False): - """Returns github url or local url - - Returns - ------- - URI: str - filesystem path or github repo url - """ - sourcepath = os.path.realpath( - os.path.join(os.path.dirname(nipype.__file__), os.path.pardir) - ) - gitpathgit = os.path.join(sourcepath, ".git") - if not os.path.exists(gitpathgit) and not force_github: - uri = "file://%s" % sourcepath - else: - uri = "http://github.com/nipy/nipype/blob/master" - return uri - - -def get_file_url(object): - """Returns local or remote url for an object - """ - filename = inspect.getsourcefile(object) - lines = inspect.getsourcelines(object) - uri = "file://%s#L%d" % (filename, lines[1]) - if is_git_repo(): - info = nipype.get_info() - shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) - uri = "http://github.com/nipy/nipype/tree/%s/%s#L%d" % ( - info["commit_hash"], - shortfile, - lines[1], - ) - return uri diff --git a/tools/make_examples.py b/tools/make_examples.py index 785d06af33..f91d42b0fe 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -3,8 +3,6 @@ This also creates the index.rst file appropriately, makes figures, etc. """ -from past.builtins import execfile - # ----------------------------------------------------------------------------- # Library imports # ----------------------------------------------------------------------------- @@ -66,16 +64,19 @@ def show(): # Main script # ----------------------------------------------------------------------------- +exclude_files = ['-x %s' % sys.argv[i + 1] for i, arg in enumerate(sys.argv) if arg == '-x'] + # Work in examples directory cd("users/examples") if not os.getcwd().endswith("users/examples"): raise OSError("This must be run from doc/examples directory") # Run the conversion from .py to rst file -sh("../../../tools/ex2rst --project Nipype --outdir . ../../../examples") -sh( - "../../../tools/ex2rst --project Nipype " - "--outdir . ../../../examples/frontiers_paper" +sh("../../../tools/ex2rst %s --project Nipype --outdir . ../../../examples" % + ' '.join(exclude_files)) +sh("""\ +../../../tools/ex2rst --project Nipype %s --outdir . ../../../examples/frontiers_paper \ +""" % ' '.join(exclude_files) ) # Make the index.rst file @@ -99,5 +100,6 @@ def show(): for script in glob("*.py"): figure_basename = pjoin("fig", os.path.splitext(script)[0]) - execfile(script) + with open(script, 'rt') as f: + exec(f.read()) plt.close("all") From 204c0bde751c8973d8e7e3b8cf65e62e53158b72 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 15:07:36 -0800 Subject: [PATCH 08/48] fix: circle.yml file --- .circleci/config.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c889193ec..b8150456cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -99,7 +99,6 @@ _run_codecov_smoke: &_run_codecov_smoke version: 2 jobs: - compare_base_dockerfiles: docker: - image: docker:17.10.0-ce-git @@ -402,6 +401,7 @@ jobs: - FSLOUTPUTTYPE: 'NIFTI' steps: - checkout + - run: name: Check Python version and upgrade pip command: | python --version @@ -413,7 +413,7 @@ jobs: apt-get install -y graphviz - run: name: Install Requirements (may contain pinned versions) - command: python -m pip install -r docs/requirements.txt + command: python -m pip install -r doc/requirements.txt - run: name: Install NiPype command: python -m pip install ".[doc]" @@ -427,6 +427,10 @@ workflows: version: 2 build_test_deploy: jobs: + - build_docs: + filters: + tags: + only: /.*/ - pypi_precheck: filters: branches: From 6de90c57af3a98c35007ac875b236b03666f5c3d Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 16:58:01 -0800 Subject: [PATCH 09/48] fix: add modified specs --- .../test_auto_ConvertScalarImageToRGB.py | 28 +++++++-------- .../tests/test_auto_SmoothTessellation.py | 34 +++++++++---------- 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py index 7e8c5605f7..419c090958 100644 --- a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py +++ b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py @@ -4,19 +4,19 @@ def test_ConvertScalarImageToRGB_inputs(): input_map = dict( - args=dict(argstr="%s",), - colormap=dict(argstr="%s", mandatory=True, position=4, usedefault=True,), - custom_color_map_file=dict(argstr="%s", position=5, usedefault=True,), - dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True,), - environ=dict(nohash=True, usedefault=True,), - input_image=dict(argstr="%s", extensions=None, mandatory=True, position=1,), - mask_image=dict(argstr="%s", extensions=None, position=3, usedefault=True,), - maximum_RGB_output=dict(argstr="%d", position=9, usedefault=True,), - maximum_input=dict(argstr="%d", mandatory=True, position=7,), - minimum_RGB_output=dict(argstr="%d", position=8, usedefault=True,), - minimum_input=dict(argstr="%d", mandatory=True, position=6,), - num_threads=dict(nohash=True, usedefault=True,), - output_image=dict(argstr="%s", position=2, usedefault=True,), + args=dict(argstr="%s"), + colormap=dict(argstr="%s", mandatory=True, position=4), + custom_color_map_file=dict(argstr="%s", position=5, usedefault=True), + dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True), + environ=dict(nohash=True, usedefault=True), + input_image=dict(argstr="%s", extensions=None, mandatory=True, position=1), + mask_image=dict(argstr="%s", extensions=None, position=3, usedefault=True), + maximum_RGB_output=dict(argstr="%d", position=9, usedefault=True), + maximum_input=dict(argstr="%d", mandatory=True, position=7), + minimum_RGB_output=dict(argstr="%d", position=8, usedefault=True), + minimum_input=dict(argstr="%d", mandatory=True, position=6), + num_threads=dict(nohash=True, usedefault=True), + output_image=dict(argstr="%s", position=2, usedefault=True), ) inputs = ConvertScalarImageToRGB.input_spec() @@ -26,7 +26,7 @@ def test_ConvertScalarImageToRGB_inputs(): def test_ConvertScalarImageToRGB_outputs(): - output_map = dict(output_image=dict(extensions=None,),) + output_map = dict(output_image=dict(extensions=None)) outputs = ConvertScalarImageToRGB.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py index aed52899f5..983296b4bd 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py @@ -4,25 +4,25 @@ def test_SmoothTessellation_inputs(): input_map = dict( - args=dict(argstr="%s",), - curvature_averaging_iterations=dict(argstr="-a %d",), - disable_estimates=dict(argstr="-nw",), - environ=dict(nohash=True, usedefault=True,), - gaussian_curvature_norm_steps=dict(argstr="%d ",), - gaussian_curvature_smoothing_steps=dict(argstr="%d",), + args=dict(argstr="%s"), + curvature_averaging_iterations=dict(argstr="-a %d"), + disable_estimates=dict(argstr="-nw"), + environ=dict(nohash=True, usedefault=True), + gaussian_curvature_norm_steps=dict(argstr="%d"), + gaussian_curvature_smoothing_steps=dict(argstr=" %d"), in_file=dict( - argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2 ), - normalize_area=dict(argstr="-area",), - out_area_file=dict(argstr="-b %s", extensions=None,), - out_curvature_file=dict(argstr="-c %s", extensions=None,), - out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), - seed=dict(argstr="-seed %d",), - smoothing_iterations=dict(argstr="-n %d",), - snapshot_writing_iterations=dict(argstr="-w %d",), + normalize_area=dict(argstr="-area"), + out_area_file=dict(argstr="-b %s", extensions=None), + out_curvature_file=dict(argstr="-c %s", extensions=None), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1), + seed=dict(argstr="-seed %d"), + smoothing_iterations=dict(argstr="-n %d"), + snapshot_writing_iterations=dict(argstr="-w %d"), subjects_dir=dict(), - use_gaussian_curvature_smoothing=dict(argstr="-g",), - use_momentum=dict(argstr="-m",), + use_gaussian_curvature_smoothing=dict(argstr="-g"), + use_momentum=dict(argstr="-m"), ) inputs = SmoothTessellation.input_spec() @@ -32,7 +32,7 @@ def test_SmoothTessellation_inputs(): def test_SmoothTessellation_outputs(): - output_map = dict(surface=dict(extensions=None,),) + output_map = dict(surface=dict(extensions=None)) outputs = SmoothTessellation.output_spec() for key, metadata in list(output_map.items()): From f763008442d88d8ce00ec266698268389415f8d6 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 17:47:51 -0800 Subject: [PATCH 10/48] sty: black --- nipype/info.py | 16 ++--- nipype/interfaces/afni/preprocess.py | 10 ++-- nipype/interfaces/afni/utils.py | 19 ++++-- nipype/interfaces/ants/segmentation.py | 22 +++---- nipype/interfaces/ants/visualization.py | 25 +++++--- nipype/interfaces/diffusion_toolkit/dti.py | 8 +-- nipype/interfaces/diffusion_toolkit/odf.py | 4 +- nipype/interfaces/freesurfer/model.py | 8 +-- nipype/interfaces/freesurfer/utils.py | 6 +- nipype/interfaces/niftyseg/maths.py | 16 +++-- nipype/interfaces/niftyseg/stats.py | 6 +- nipype/interfaces/nitime/analysis.py | 8 +-- nipype/interfaces/petpvc.py | 6 +- nipype/interfaces/spm/model.py | 11 ++-- nipype/interfaces/spm/preprocess.py | 9 +-- nipype/sphinxext/apidoc/__init__.py | 58 ++++++++++-------- nipype/sphinxext/apidoc/docstring.py | 70 ++++++++++++++-------- nipype/sphinxext/documenter.py | 34 ++++++----- nipype/sphinxext/gh.py | 11 ++-- 19 files changed, 195 insertions(+), 152 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 4ec80ea60c..0a5e1a0e2e 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -160,14 +160,14 @@ def get_nipype_gitversion(): EXTRA_REQUIRES = { "data": ["datalad"], "doc": [ - 'dipy', - 'ipython', - 'matplotlib', - 'nbsphinx', - 'sphinx-argparse', - 'sphinx>=2.1.2', - 'sphinxcontrib-apidoc', - 'sphinxcontrib-napoleon', + "dipy", + "ipython", + "matplotlib", + "nbsphinx", + "sphinx-argparse", + "sphinx>=2.1.2", + "sphinxcontrib-apidoc", + "sphinxcontrib-napoleon", ], "duecredit": ["duecredit"], "nipy": ["nitime", "nilearn<0.5.0", "dipy", "nipy", "matplotlib"], diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 172f8f3902..189a7f0c03 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -3483,7 +3483,8 @@ class QwarpInputSpec(AFNICommandInputSpec): or the inverse can be computed as needed in 3dNwarpApply, like 3dNwarpApply -nwarp 'INV(Z_WARP+tlrc)' -source Dataset.nii ... -""") +""", + ) resample = traits.Bool( desc="""\ This option simply resamples the source dataset to match the @@ -3579,7 +3580,8 @@ class QwarpInputSpec(AFNICommandInputSpec): use the option '-penold'.To be completely compatible with the older 3dQwarp, you'll also have to use '-penfac 0.2'. -""") +""", + ) noweight = traits.Bool( desc="If you want a binary weight (the old default), use this option." "That is, each voxel in the base volume automask will be" @@ -3624,7 +3626,7 @@ class QwarpInputSpec(AFNICommandInputSpec): argstr="-wball %s", minlen=5, maxlen=5, - xor=['wmask'], + xor=["wmask"], ) traits.Tuple((traits.Float(), traits.Float()), argstr="-bpass %f %f") wmask = traits.Tuple( @@ -3643,7 +3645,7 @@ class QwarpInputSpec(AFNICommandInputSpec): """, argstr="-wpass %s %f", - xor=['wball'], + xor=["wball"], ) out_weight_file = File( argstr="-wtprefix %s", desc="Write the weight volume to disk as a dataset" diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index a6515987e0..9eb0031a37 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -1034,7 +1034,9 @@ class Edge3(AFNICommand): input_spec = Edge3InputSpec output_spec = AFNICommandOutputSpec references_ = [ - {'entry': BibTeX("""\ + { + "entry": BibTeX( + """\ @article{Deriche1987, author={R. Deriche}, title={Optimal edge detection using recursive filtering}, @@ -1042,8 +1044,13 @@ class Edge3(AFNICommand): volume={2},' pages={167-187}, year={1987}, -}"""), 'tags': ['method']}, - {'entry': BibTeX("""\ +}""" + ), + "tags": ["method"], + }, + { + "entry": BibTeX( + """\ @article{MongaDericheMalandainCocquerez1991, author={O. Monga, R. Deriche, G. Malandain, J.P. Cocquerez}, title={Recursive filtering and edge tracking: two primary tools for 3D edge detection}, @@ -1051,7 +1058,11 @@ class Edge3(AFNICommand): volume={9},' pages={203-214}, year={1991}, -}"""), 'tags': ['method']}] +}""" + ), + "tags": ["method"], + }, + ] class EvalInputSpec(AFNICommandInputSpec): diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index ec83982191..07f2d6e819 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -610,9 +610,7 @@ class CorticalThicknessInputSpec(ANTSCommandInputSpec): 0, 1, argstr="-u %d", - desc=( - "Use random number generated from system clock in Atropos (default = 1)" - ), + desc=("Use random number generated from system clock in Atropos (default = 1)"), ) b_spline_smoothing = traits.Bool( argstr="-v", @@ -855,9 +853,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): "highres001_", argstr="-o %s", usedefault=True, - desc=( - "Prefix that is prepended to all output files" - ), + desc=("Prefix that is prepended to all output files"), ) extraction_registration_mask = File( @@ -879,9 +875,7 @@ class BrainExtractionInputSpec(ANTSCommandInputSpec): 0, 1, argstr="-u %d", - desc=( - "Use random number generated from system clock in Atropos (default = 1)" - ), + desc=("Use random number generated from system clock in Atropos (default = 1)"), ) keep_temporary_files = traits.Int( argstr="-k %d", @@ -1156,9 +1150,7 @@ class JointFusionInputSpec(ANTSCommandInputSpec): minlen=3, maxlen=3, argstr="-rp %s", - desc=( - "Patch radius for similarity measures, scalar or vector. Default: 2x2x2" - ), + desc=("Patch radius for similarity measures, scalar or vector. Default: 2x2x2"), ) search_radius = traits.ListInt( minlen=3, maxlen=3, argstr="-rs %s", desc="Local search radius. Default: 3x3x3" @@ -1816,7 +1808,8 @@ class KellyKapowski(ANTSCommand): references_ = [ { - "entry": BibTeX("""\ + "entry": BibTeX( + """\ @book{Das2009867, author={Sandhitsu R. Das and Brian B. Avants and Murray Grossman and James C. Gee}, title={Registration based cortical thickness measurement.}, @@ -1828,7 +1821,8 @@ class KellyKapowski(ANTSCommand): issn={1053-8119}, url={http://www.sciencedirect.com/science/article/pii/S1053811908012780}, doi={https://doi.org/10.1016/j.neuroimage.2008.12.016} -}"""), +}""" + ), "description": "The details on the implementation of DiReCT.", "tags": ["implementation"], } diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index a08cfb1764..a443a76651 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -32,8 +32,21 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): "none", argstr="%s", exists=True, desc="mask image", position=3, usedefault=True ) colormap = traits.Enum( - "grey", "red", "green", "blue", "copper", "jet", "hsv", "spring", "summer", - "autumn", "winter", "hot", "cool", "overunder", "custom", + "grey", + "red", + "green", + "blue", + "copper", + "jet", + "hsv", + "spring", + "summer", + "autumn", + "winter", + "hot", + "cool", + "overunder", + "custom", argstr="%s", desc="Select a colormap", mandatory=True, @@ -48,12 +61,8 @@ class ConvertScalarImageToRGBInputSpec(ANTSCommandInputSpec): maximum_input = traits.Int( argstr="%d", desc="maximum input", mandatory=True, position=7 ) - minimum_RGB_output = traits.Int( - 0, usedefault=True, argstr="%d", position=8 - ) - maximum_RGB_output = traits.Int( - 255, usedefault=True, argstr="%d", position=9 - ) + minimum_RGB_output = traits.Int(0, usedefault=True, argstr="%d", position=8) + maximum_RGB_output = traits.Int(255, usedefault=True, argstr="%d", position=9) class ConvertScalarImageToRGBOutputSpec(TraitedSpec): diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index bc30656b00..fe60cf5281 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -210,12 +210,12 @@ class DTITrackerInputSpec(CommandLineInputSpec): ) angle_threshold_weight = traits.Float( desc="set angle threshold weighting factor. weighting will be be applied " - "on top of the angle_threshold", + "on top of the angle_threshold", argstr="-atw %f", ) random_seed = traits.Int( desc="use random location in a voxel instead of the center of the voxel " - "to seed. can also define number of seed per voxel. default is 1", + "to seed. can also define number of seed per voxel. default is 1", argstr="-rseed %d", ) invert_x = traits.Bool(desc="invert x component of the vector", argstr="-ix") @@ -229,13 +229,13 @@ class DTITrackerInputSpec(CommandLineInputSpec): ) mask1_threshold = traits.Float( desc="threshold value for the first mask image, if not given, the program will " - "try automatically find the threshold", + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( desc="threshold value for the second mask image, if not given, the program will " - "try automatically find the threshold", + "try automatically find the threshold", position=5, ) input_data_prefix = traits.Str( diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index 8d8c5bf9bd..f25f975bd4 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -297,13 +297,13 @@ class ODFTrackerInputSpec(CommandLineInputSpec): ) mask1_threshold = traits.Float( desc="threshold value for the first mask image, if not given, the program will " - "try automatically find the threshold", + "try automatically find the threshold", position=3, ) mask2_file = File(desc="second mask image", argstr="-m2 %s", position=4) mask2_threshold = traits.Float( desc="threshold value for the second mask image, if not given, the program will " - "try automatically find the threshold", + "try automatically find the threshold", position=5, ) limit = traits.Int( diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 5209d731c9..9e7d35d096 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -175,8 +175,8 @@ class MRISPreprocReconAllInputSpec(MRISPreprocInputSpec): ) copy_inputs = traits.Bool( desc="If running as a node, set this to True " - "this will copy some implicit inputs to the " - "node directory." + "this will copy some implicit inputs to the " + "node directory." ) @@ -1073,8 +1073,8 @@ class SegStatsReconAllInputSpec(SegStatsInputSpec): aseg = File(exists=True, desc="Mandatory implicit input in 5.3") copy_inputs = traits.Bool( desc="If running as a node, set this to True " - "otherwise, this will copy the implicit inputs " - "to the node directory." + "otherwise, this will copy the implicit inputs " + "to the node directory." ) diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index a5bc6f36b5..a2b3b7a47b 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1685,7 +1685,7 @@ class SmoothTessellationInputSpec(FSTraitedSpec): argstr="-n %d", desc="Number of smoothing iterations (default=10)" ) snapshot_writing_iterations = traits.Int( - argstr="-w %d", desc='Write snapshot every *n* iterations' + argstr="-w %d", desc="Write snapshot every *n* iterations" ) use_gaussian_curvature_smoothing = traits.Bool( @@ -3835,8 +3835,8 @@ class Aparc2AsegInputSpec(FSTraitedSpec): a2009s = traits.Bool(argstr="--a2009s", desc="Using the a2009s atlas") copy_inputs = traits.Bool( desc="If running as a node, set this to True." - "This will copy the input files to the node " - "directory." + "This will copy the input files to the node " + "directory." ) diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index 80ceca1da1..c297940695 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -149,7 +149,8 @@ class UnaryMathsInput(MathsInput): * 4to5 - Flip the 4th and 5th dimension. * range - Reset the image range to the min max. -""") +""", + ) class UnaryMaths(MathsCommand): @@ -406,7 +407,8 @@ class BinaryMathsInputInteger(MathsInput): * crop - - Crop voxels around each 3D volume. * pad - - Pad voxels with NaN value around each 3D volume. -""") +""", + ) operand_value = traits.Int( argstr="%d", @@ -475,7 +477,8 @@ class TupleMathsInput(MathsInput): * lssd Local SSD between current img and on a kernel with * lltsnorm Linear LTS normalisation assuming percent outliers -""") +""", + ) operand_file1 = File( exists=True, @@ -559,8 +562,11 @@ class MergeInput(MathsInput): dimension = traits.Int(mandatory=True, desc="Dimension to merge the images.") merge_files = traits.List( - File(exists=True), argstr="%s", mandatory=True, position=4, - desc="List of images to merge to the working image ." + File(exists=True), + argstr="%s", + mandatory=True, + position=4, + desc="List of images to merge to the working image .", ) diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 4d9e598ddf..d872e180eb 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -124,7 +124,8 @@ class UnaryStatsInput(StatsInput): * xdim - Output the voxel dimention in the x direction. Replace x with y/z for other directions. -""") +""", + ) class UnaryStats(StatsCommand): @@ -199,7 +200,8 @@ class BinaryStatsInput(StatsInput): * Vl - - Volume of each integer label . Save to file. * Nl - - Count of each label . Save to file. -""") +""", + ) operand_file = File( exists=True, diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 8abc0db75a..38bfb849a7 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -115,13 +115,9 @@ class CoherenceAnalyzerOutputSpec(TraitedSpec): desc=("The pairwise time delays between the ROIs (in seconds)") ) - coherence_csv = File( - desc=("A csv file containing the pairwise coherence values") - ) + coherence_csv = File(desc=("A csv file containing the pairwise coherence values")) - timedelay_csv = File( - desc=("A csv file containing the pairwise time delay values") - ) + timedelay_csv = File(desc=("A csv file containing the pairwise time delay values")) coherence_fig = File(desc=("Figure representing coherence values")) timedelay_fig = File(desc=("Figure representing coherence values")) diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index 311f770848..6d0b8f7e04 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -48,7 +48,9 @@ class PETPVCInputSpec(CommandLineInputSpec): desc="Mask image file", exists=True, mandatory=True, argstr="-m %s" ) pvc = traits.Enum( - pvc_methods, mandatory=True, argstr="-p %s", + pvc_methods, + mandatory=True, + argstr="-p %s", desc="""\ Desired PVC method: @@ -75,7 +77,7 @@ class PETPVCInputSpec(CommandLineInputSpec): * Muller Gartner with Van-Cittert -- ``MG+VC`` * Muller Gartner with Richardson-Lucy -- ``MG+RL`` -""" +""", ) fwhm_x = traits.Float( desc="The full-width at half maximum in mm along x-axis", diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index e230ceb9bc..ffcef6801e 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -216,9 +216,7 @@ class EstimateModelInputSpec(SPMCommandInputSpec): traits.Enum("Classical", "Bayesian2", "Bayesian"), field="method", mandatory=True, - desc=( - "Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)" - ), + desc=("Dictionary of either Classical: 1, Bayesian: 1, or Bayesian2: 1 (dict)"), ) write_residuals = traits.Bool( field="write_residuals", desc="Write individual residual images" @@ -502,9 +500,10 @@ def _make_matlab_command(self, _): if contrast.sessions: for sno, sw in enumerate(contrast.sessions): script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) - script += ( - "consess{%d}.tcon.convec(idx(sidx)) = %f;\n" - ) % (i + 1, sw * contrast.weights[c0]) + script += ("consess{%d}.tcon.convec(idx(sidx)) = %f;\n") % ( + i + 1, + sw * contrast.weights[c0], + ) else: script += "consess{%d}.tcon.convec(idx) = %f;\n" % ( i + 1, diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index 528fbc282d..d434709062 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1068,8 +1068,7 @@ class NormalizeInputSpec(SPMCommandInputSpec): class NormalizeOutputSpec(TraitedSpec): normalization_parameters = OutputMultiPath( - File(exists=True), - desc=("MAT files containing the normalization parameters"), + File(exists=True), desc=("MAT files containing the normalization parameters"), ) normalized_source = OutputMultiPath( File(exists=True), desc="Normalized source files" @@ -1525,8 +1524,7 @@ class SegmentInputSpec(SPMCommandInputSpec): desc="FWHM of Gaussian smoothness of bias", ) sampling_distance = traits.Float( - field="opts.samp", - desc=("Sampling distance on data for parameter estimation"), + field="opts.samp", desc=("Sampling distance on data for parameter estimation"), ) mask_image = File( exists=True, @@ -1682,8 +1680,7 @@ class NewSegmentInputSpec(SPMCommandInputSpec): ), ) sampling_distance = traits.Float( - field="warp.samp", - desc=("Sampling distance on data for parameter estimation"), + field="warp.samp", desc=("Sampling distance on data for parameter estimation"), ) write_deformation_fields = traits.List( traits.Bool(), diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py index a7b45241d3..cb46ff5b06 100644 --- a/nipype/sphinxext/apidoc/__init__.py +++ b/nipype/sphinxext/apidoc/__init__.py @@ -5,7 +5,7 @@ from sphinxcontrib.napoleon import ( Config as NapoleonConfig, _patch_python_domain, - _skip_member as _napoleon_skip_member + _skip_member as _napoleon_skip_member, ) from ... import __version__ @@ -39,20 +39,23 @@ class Config(NapoleonConfig): """ _config_values = { - 'nipype_skip_classes': ([ - "AFNI(Python)?Command", - "ANTS", - "FSLCommand", - "FS(Command|Script)", - "Info", - "^SPM", - "Tester", - "InputSpec", - "OutputSpec", - "Numpy", - "NipypeTester", - ], 'env'), - **NapoleonConfig._config_values + "nipype_skip_classes": ( + [ + "AFNI(Python)?Command", + "ANTS", + "FSLCommand", + "FS(Command|Script)", + "Info", + "^SPM", + "Tester", + "InputSpec", + "OutputSpec", + "Numpy", + "NipypeTester", + ], + "env", + ), + **NapoleonConfig._config_values, } @@ -79,19 +82,20 @@ def setup(app): """ from sphinx.application import Sphinx + if not isinstance(app, Sphinx): # probably called by tests - return {'version': __version__, 'parallel_read_safe': True} + return {"version": __version__, "parallel_read_safe": True} _patch_python_domain() - app.setup_extension('sphinx.ext.autodoc') - app.connect('autodoc-process-docstring', _process_docstring) - app.connect('autodoc-skip-member', _skip_member) + app.setup_extension("sphinx.ext.autodoc") + app.connect("autodoc-process-docstring", _process_docstring) + app.connect("autodoc-skip-member", _skip_member) for name, (default, rebuild) in Config._config_values.items(): app.add_config_value(name, default, rebuild) - return {'version': __version__, 'parallel_read_safe': True} + return {"version": __version__, "parallel_read_safe": True} def _process_docstring(app, what, name, obj, options, lines): @@ -129,10 +133,12 @@ def _process_docstring(app, what, name, obj, options, lines): # Parse Nipype Interfaces if what == "class" and issubclass(obj, BaseInterface): result_lines[:] = InterfaceDocstring( - result_lines, app.config, app, what, name, obj, options).lines() + result_lines, app.config, app, what, name, obj, options + ).lines() - result_lines = NipypeDocstring(result_lines, app.config, app, what, name, - obj, options).lines() + result_lines = NipypeDocstring( + result_lines, app.config, app, what, name, obj, options + ).lines() lines[:] = result_lines[:] @@ -170,8 +176,10 @@ def _skip_member(app, what, name, obj, skip, options): """ # Parse Nipype Interfaces - patterns = [pat if hasattr(pat, 'search') else re.compile(pat) - for pat in app.config.nipype_skip_classes] + patterns = [ + pat if hasattr(pat, "search") else re.compile(pat) + for pat in app.config.nipype_skip_classes + ] isbase = False try: isbase = issubclass(obj, BaseInterface) diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py index f5191d5155..445a95bfa2 100644 --- a/nipype/sphinxext/apidoc/docstring.py +++ b/nipype/sphinxext/apidoc/docstring.py @@ -12,9 +12,9 @@ class NipypeDocstring(NumpyDocstring): def _parse_parameters_section(self, section): # type: (unicode) -> List[unicode] labels = { - 'args': _('Parameters'), - 'arguments': _('Parameters'), - 'parameters': _('Parameters'), + "args": _("Parameters"), + "arguments": _("Parameters"), + "parameters": _("Parameters"), } # type: Dict[unicode, unicode] label = labels.get(section.lower(), section) @@ -59,19 +59,24 @@ class InterfaceDocstring(NipypeDocstring): """ - _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" - r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) + _name_rgx = re.compile( + r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" + r" (?P[a-zA-Z0-9_.-]+))\s*", + re.X, + ) - def __init__(self, docstring, config=None, app=None, what='', name='', - obj=None, options=None): + def __init__( + self, docstring, config=None, app=None, what="", name="", obj=None, options=None + ): # type: (Union[unicode, List[unicode]], SphinxConfig, Sphinx, unicode, unicode, Any, Any) -> None # NOQA super().__init__(docstring, config, app, what, name, obj, options) - cmd = getattr(obj, '_cmd', '') + cmd = getattr(obj, "_cmd", "") if cmd and cmd.strip(): self._parsed_lines = [ - 'Wrapped executable: ``%s``.' % cmd.strip(), - ''] + self._parsed_lines + "Wrapped executable: ``%s``." % cmd.strip(), + "", + ] + self._parsed_lines if obj is not None: self._parsed_lines += _parse_interface(obj) @@ -90,10 +95,13 @@ def _parse_interface(obj): parsed += _parse_spec(inputs, name, spec) mandatory_keys = {item[0] for item in mandatory_items} - optional_items = sorted([ - (name, val) for name, val in inputs.traits(transient=None).items() - if name not in mandatory_keys - ]) + optional_items = sorted( + [ + (name, val) + for name, val in inputs.traits(transient=None).items() + if name not in mandatory_keys + ] + ) if optional_items: parsed += ["", "Optional Inputs"] parsed += ["-" * len(parsed[-1])] @@ -113,38 +121,46 @@ def _parse_interface(obj): def _indent(lines, n=4): # type: (List[unicode], int) -> List[unicode] - return [(' ' * n) + line for line in lines] + return [(" " * n) + line for line in lines] def _parse_spec(inputs, name, spec): """Parse a HasTraits object into a Numpy-style docstring.""" desc_lines = [] if spec.desc: - desc = ''.join([spec.desc[0].capitalize(), spec.desc[1:]]) - if not desc.endswith('.') and not desc.endswith('\n'): - desc = '%s.' % desc + desc = "".join([spec.desc[0].capitalize(), spec.desc[1:]]) + if not desc.endswith(".") and not desc.endswith("\n"): + desc = "%s." % desc desc_lines += desc.splitlines() argstr = spec.argstr if argstr and argstr.strip(): pos = spec.position if pos is None: - desc_lines += ["""Maps to a command-line argument: :code:`{arg}`.""".format( - arg=argstr.strip())] + desc_lines += [ + """Maps to a command-line argument: :code:`{arg}`.""".format( + arg=argstr.strip() + ) + ] else: desc_lines += [ """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( - arg=argstr.strip(), pos=pos)] + arg=argstr.strip(), pos=pos + ) + ] xor = spec.xor if xor: - desc_lines += ["Mutually **exclusive** with inputs: %s." % ", ".join( - ["``%s``" % x for x in xor])] + desc_lines += [ + "Mutually **exclusive** with inputs: %s." + % ", ".join(["``%s``" % x for x in xor]) + ] requires = spec.requires if requires: - desc_lines += ["**Requires** inputs: %s." % ", ".join( - ["``%s``" % x for x in requires])] + desc_lines += [ + "**Requires** inputs: %s." % ", ".join(["``%s``" % x for x in requires]) + ] if spec.usedefault: default = spec.default_value()[1] @@ -153,7 +169,9 @@ def _parse_spec(inputs, name, spec): desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] - out_rst = ["{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None))] + out_rst = [ + "{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None)) + ] out_rst += _indent(desc_lines, 4) return out_rst diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py index 9e15c57f49..de84fd9412 100644 --- a/nipype/sphinxext/documenter.py +++ b/nipype/sphinxext/documenter.py @@ -22,11 +22,11 @@ class NipypeClassDocumenter(_ClassDocumenter): # type: ignore def add_directive_header(self, sig: str) -> None: if self.doc_as_attr: - self.directivetype = 'attribute' + self.directivetype = "attribute" # Copied from super - domain = getattr(self, 'domain', 'py') - directive = getattr(self, 'directivetype', self.objtype) + domain = getattr(self, "domain", "py") + directive = getattr(self, "directivetype", self.objtype) name = self.format_name() sourcename = self.get_sourcename() @@ -41,31 +41,33 @@ def add_directive_header(self, sig: str) -> None: code_url=get_url(self.object), module=self.modname, name=name, - underline='=' * len(name), + underline="=" * len(name), ) for line in lines.splitlines(): self.add_line(line, sourcename) else: - self.add_line('.. %s:%s:: %s%s' % (domain, directive, name, sig), - sourcename) + self.add_line( + ".. %s:%s:: %s%s" % (domain, directive, name, sig), sourcename + ) if self.options.noindex: - self.add_line(' :noindex:', sourcename) + self.add_line(" :noindex:", sourcename) if self.objpath: # Be explicit about the module, this is necessary since .. class:: # etc. don't support a prepended module name - self.add_line(' :module: %s' % self.modname, sourcename) + self.add_line(" :module: %s" % self.modname, sourcename) # add inheritance info, if wanted if not self.doc_as_attr and self.options.show_inheritance: sourcename = self.get_sourcename() - self.add_line('', sourcename) - if hasattr(self.object, '__bases__') and len(self.object.__bases__): - bases = [':class:`%s`' % b.__name__ - if b.__module__ in ('__builtin__', 'builtins') - else ':class:`%s.%s`' % (b.__module__, b.__name__) - for b in self.object.__bases__] - self.add_line(' ' + _('Bases: %s') % ', '.join(bases), - sourcename) + self.add_line("", sourcename) + if hasattr(self.object, "__bases__") and len(self.object.__bases__): + bases = [ + ":class:`%s`" % b.__name__ + if b.__module__ in ("__builtin__", "builtins") + else ":class:`%s.%s`" % (b.__module__, b.__name__) + for b in self.object.__bases__ + ] + self.add_line(" " + _("Bases: %s") % ", ".join(bases), sourcename) def setup(app): diff --git a/nipype/sphinxext/gh.py b/nipype/sphinxext/gh.py index 6658fda361..c373f84c1a 100644 --- a/nipype/sphinxext/gh.py +++ b/nipype/sphinxext/gh.py @@ -3,7 +3,7 @@ import inspect import subprocess -REVISION_CMD = 'git rev-parse --short HEAD' +REVISION_CMD = "git rev-parse --short HEAD" def _get_git_revision(): @@ -13,7 +13,7 @@ def _get_git_revision(): revision = subprocess.check_output(REVISION_CMD.split()).strip() except (subprocess.CalledProcessError, OSError): return None - return revision.decode('utf-8') + return revision.decode("utf-8") def get_url(obj): @@ -23,10 +23,7 @@ def get_url(obj): revision = _get_git_revision() if revision is not None: shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) - uri = "http://github.com/nipy/nipype/blob/%s/%s" % ( - revision, - shortfile, - ) + uri = "http://github.com/nipy/nipype/blob/%s/%s" % (revision, shortfile,) lines, lstart = inspect.getsourcelines(obj) lend = len(lines) + lstart - return '%s#L%d-L%d' % (uri, lstart, lend) + return "%s#L%d-L%d" % (uri, lstart, lend) From 1e55bc677243fa8e621800848096eb441a3ae621 Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 17:50:19 -0800 Subject: [PATCH 11/48] fix: make specs after black --- .../interfaces/afni/tests/test_auto_Qwarp.py | 4 +-- .../afni/tests/test_auto_QwarpPlusMinus.py | 4 +-- .../test_auto_ConvertScalarImageToRGB.py | 28 +++++++-------- .../tests/test_auto_SmoothTessellation.py | 34 +++++++++---------- 4 files changed, 35 insertions(+), 35 deletions(-) diff --git a/nipype/interfaces/afni/tests/test_auto_Qwarp.py b/nipype/interfaces/afni/tests/test_auto_Qwarp.py index 14ff6192d5..e8659ec4c3 100644 --- a/nipype/interfaces/afni/tests/test_auto_Qwarp.py +++ b/nipype/interfaces/afni/tests/test_auto_Qwarp.py @@ -68,9 +68,9 @@ def test_Qwarp_inputs(): quiet=dict(argstr="-quiet", xor=["verb"],), resample=dict(argstr="-resample",), verb=dict(argstr="-verb", xor=["quiet"],), - wball=dict(argstr="-wball %s",), + wball=dict(argstr="-wball %s", xor=["wmask"],), weight=dict(argstr="-weight %s", extensions=None,), - wmask=dict(argstr="-wpass %s %f",), + wmask=dict(argstr="-wpass %s %f", xor=["wball"],), workhard=dict(argstr="-workhard", xor=["boxopt", "ballopt"],), ) inputs = Qwarp.input_spec() diff --git a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py index 6b56af1006..c74e192b8f 100644 --- a/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py +++ b/nipype/interfaces/afni/tests/test_auto_QwarpPlusMinus.py @@ -77,9 +77,9 @@ def test_QwarpPlusMinus_inputs(): new_name="in_file", ), verb=dict(argstr="-verb", xor=["quiet"],), - wball=dict(argstr="-wball %s",), + wball=dict(argstr="-wball %s", xor=["wmask"],), weight=dict(argstr="-weight %s", extensions=None,), - wmask=dict(argstr="-wpass %s %f",), + wmask=dict(argstr="-wpass %s %f", xor=["wball"],), workhard=dict(argstr="-workhard", xor=["boxopt", "ballopt"],), ) inputs = QwarpPlusMinus.input_spec() diff --git a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py index 419c090958..eb0a7d48b0 100644 --- a/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py +++ b/nipype/interfaces/ants/tests/test_auto_ConvertScalarImageToRGB.py @@ -4,19 +4,19 @@ def test_ConvertScalarImageToRGB_inputs(): input_map = dict( - args=dict(argstr="%s"), - colormap=dict(argstr="%s", mandatory=True, position=4), - custom_color_map_file=dict(argstr="%s", position=5, usedefault=True), - dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True), - environ=dict(nohash=True, usedefault=True), - input_image=dict(argstr="%s", extensions=None, mandatory=True, position=1), - mask_image=dict(argstr="%s", extensions=None, position=3, usedefault=True), - maximum_RGB_output=dict(argstr="%d", position=9, usedefault=True), - maximum_input=dict(argstr="%d", mandatory=True, position=7), - minimum_RGB_output=dict(argstr="%d", position=8, usedefault=True), - minimum_input=dict(argstr="%d", mandatory=True, position=6), - num_threads=dict(nohash=True, usedefault=True), - output_image=dict(argstr="%s", position=2, usedefault=True), + args=dict(argstr="%s",), + colormap=dict(argstr="%s", mandatory=True, position=4,), + custom_color_map_file=dict(argstr="%s", position=5, usedefault=True,), + dimension=dict(argstr="%d", mandatory=True, position=0, usedefault=True,), + environ=dict(nohash=True, usedefault=True,), + input_image=dict(argstr="%s", extensions=None, mandatory=True, position=1,), + mask_image=dict(argstr="%s", extensions=None, position=3, usedefault=True,), + maximum_RGB_output=dict(argstr="%d", position=9, usedefault=True,), + maximum_input=dict(argstr="%d", mandatory=True, position=7,), + minimum_RGB_output=dict(argstr="%d", position=8, usedefault=True,), + minimum_input=dict(argstr="%d", mandatory=True, position=6,), + num_threads=dict(nohash=True, usedefault=True,), + output_image=dict(argstr="%s", position=2, usedefault=True,), ) inputs = ConvertScalarImageToRGB.input_spec() @@ -26,7 +26,7 @@ def test_ConvertScalarImageToRGB_inputs(): def test_ConvertScalarImageToRGB_outputs(): - output_map = dict(output_image=dict(extensions=None)) + output_map = dict(output_image=dict(extensions=None,),) outputs = ConvertScalarImageToRGB.output_spec() for key, metadata in list(output_map.items()): diff --git a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py index 983296b4bd..517a223432 100644 --- a/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py +++ b/nipype/interfaces/freesurfer/tests/test_auto_SmoothTessellation.py @@ -4,25 +4,25 @@ def test_SmoothTessellation_inputs(): input_map = dict( - args=dict(argstr="%s"), - curvature_averaging_iterations=dict(argstr="-a %d"), - disable_estimates=dict(argstr="-nw"), - environ=dict(nohash=True, usedefault=True), - gaussian_curvature_norm_steps=dict(argstr="%d"), - gaussian_curvature_smoothing_steps=dict(argstr=" %d"), + args=dict(argstr="%s",), + curvature_averaging_iterations=dict(argstr="-a %d",), + disable_estimates=dict(argstr="-nw",), + environ=dict(nohash=True, usedefault=True,), + gaussian_curvature_norm_steps=dict(argstr="%d",), + gaussian_curvature_smoothing_steps=dict(argstr=" %d",), in_file=dict( - argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2 + argstr="%s", copyfile=True, extensions=None, mandatory=True, position=-2, ), - normalize_area=dict(argstr="-area"), - out_area_file=dict(argstr="-b %s", extensions=None), - out_curvature_file=dict(argstr="-c %s", extensions=None), - out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1), - seed=dict(argstr="-seed %d"), - smoothing_iterations=dict(argstr="-n %d"), - snapshot_writing_iterations=dict(argstr="-w %d"), + normalize_area=dict(argstr="-area",), + out_area_file=dict(argstr="-b %s", extensions=None,), + out_curvature_file=dict(argstr="-c %s", extensions=None,), + out_file=dict(argstr="%s", extensions=None, genfile=True, position=-1,), + seed=dict(argstr="-seed %d",), + smoothing_iterations=dict(argstr="-n %d",), + snapshot_writing_iterations=dict(argstr="-w %d",), subjects_dir=dict(), - use_gaussian_curvature_smoothing=dict(argstr="-g"), - use_momentum=dict(argstr="-m"), + use_gaussian_curvature_smoothing=dict(argstr="-g",), + use_momentum=dict(argstr="-m",), ) inputs = SmoothTessellation.input_spec() @@ -32,7 +32,7 @@ def test_SmoothTessellation_inputs(): def test_SmoothTessellation_outputs(): - output_map = dict(surface=dict(extensions=None)) + output_map = dict(surface=dict(extensions=None,),) outputs = SmoothTessellation.output_spec() for key, metadata in list(output_map.items()): From f5f7314a723a10ae87ae232e3cc36652c584a94f Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 17:59:12 -0800 Subject: [PATCH 12/48] maint: remove unused ``build_docs.py`` script --- MANIFEST.in | 1 - build_docs.py | 197 -------------------------------------------------- 2 files changed, 198 deletions(-) delete mode 100644 build_docs.py diff --git a/MANIFEST.in b/MANIFEST.in index 7986c197d1..f21701cba7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,7 +4,6 @@ include MANIFEST.in include README include THANKS include Makefile -include build_docs.py include setup_egg.py include doc/documentation.zip include nipype/COMMIT_INFO.txt diff --git a/build_docs.py b/build_docs.py deleted file mode 100644 index c39a8da473..0000000000 --- a/build_docs.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -""" -Code to build the documentation in the setup.py - -To use this code, run:: - - python setup.py build_sphinx -""" -from __future__ import (print_function, division, unicode_literals, - absolute_import) -from builtins import open, str - -# Standard library imports -import sys -import os -from os.path import join as pjoin -import zipfile -import warnings -import shutil -from distutils.cmd import Command -from distutils.command.clean import clean - -_info_fname = pjoin(os.path.dirname(__file__), 'nipype', 'info.py') -INFO_VARS = {} -exec(str(open(_info_fname, 'rt').read()), {}, INFO_VARS) - -DOC_BUILD_DIR = os.path.join('doc', '_build', 'html') -DOC_DOCTREES_DIR = os.path.join('doc', '_build', 'doctrees') - -############################################################################### -# Distutils Command class for installing nipype to a temporary location. - - -class TempInstall(Command): - temp_install_dir = os.path.join('build', 'install') - - def run(self): - """ build and install nipype in a temporary location. """ - install = self.distribution.get_command_obj('install') - install.install_scripts = self.temp_install_dir - install.install_base = self.temp_install_dir - install.install_platlib = self.temp_install_dir - install.install_purelib = self.temp_install_dir - install.install_data = self.temp_install_dir - install.install_lib = self.temp_install_dir - install.install_headers = self.temp_install_dir - install.run() - - # Horrible trick to reload nipype with our temporary instal - for key in list(sys.modules.keys()): - if key.startswith('nipype'): - sys.modules.pop(key, None) - sys.path.append(os.path.abspath(self.temp_install_dir)) - # Pop the cwd - sys.path.pop(0) - import nipype - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - -############################################################################### -# Distutils Command class for API generation -class APIDocs(TempInstall): - description = \ - """generate API docs """ - - user_options = [ - ('None', None, 'this command has no options'), - ] - - def run(self): - # First build the project and install it to a temporary location. - TempInstall.run(self) - os.chdir('doc') - try: - # We are running the API-building script via an - # system call, but overriding the import path. - toolsdir = os.path.abspath(pjoin('..', 'tools')) - for docbuilder in ['build_interface_docs.py']: - build_templates = pjoin(toolsdir, docbuilder) - cmd = """%s -c 'import sys; sys.path.append("%s"); sys.path.append("%s"); execfile("%s", dict(__name__="__main__"))'""" \ - % (sys.executable, - toolsdir, - self.temp_install_dir, - build_templates) - os.system(cmd) - finally: - os.chdir('..') - - -############################################################################### -# Code to copy the sphinx-generated html docs in the distribution. -def relative_path(filename): - """ Return the relative path to the file, assuming the file is - in the DOC_BUILD_DIR directory. - """ - length = len(os.path.abspath(DOC_BUILD_DIR)) + 1 - return os.path.abspath(filename)[length:] - - -############################################################################### -# Distutils Command class build the docs -# Sphinx import. -try: - from sphinx.setup_command import BuildDoc -except: - MyBuildDoc = None -else: - class MyBuildDoc(BuildDoc): - """ Sub-class the standard sphinx documentation building system, to - add logics for API generation and matplotlib's plot directive. - """ - - def run(self): - self.run_command('api_docs') - # We need to be in the doc directory for to plot_directive - # and API generation to work - """ - os.chdir('doc') - try: - BuildDoc.run(self) - finally: - os.chdir('..') - """ - # It put's the build in a doc/doc/_build directory with the - # above?!?! I'm leaving the code above here but commented out - # in case I'm missing something? - BuildDoc.run(self) - self.zip_docs() - - def zip_docs(self): - if not os.path.exists(DOC_BUILD_DIR): - raise OSError('Doc directory does not exist.') - target_file = os.path.join('doc', 'documentation.zip') - # ZIP_DEFLATED actually compresses the archive. However, there - # will be a RuntimeError if zlib is not installed, so we check - # for it. ZIP_STORED produces an uncompressed zip, but does not - # require zlib. - try: - zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_DEFLATED) - except RuntimeError: - warnings.warn('zlib not installed, storing the docs ' - 'without compression') - zf = zipfile.ZipFile(target_file, 'w', - compression=zipfile.ZIP_STORED) - - for root, dirs, files in os.walk(DOC_BUILD_DIR): - relative = relative_path(root) - if not relative.startswith('.doctrees'): - for f in files: - zf.write(os.path.join(root, f), - os.path.join(relative, 'html_docs', f)) - zf.close() - - def finalize_options(self): - """ Override the default for the documentation build - directory. - """ - self.build_dir = os.path.join(*DOC_BUILD_DIR.split(os.sep)[:-1]) - BuildDoc.finalize_options(self) - -############################################################################### -# Distutils Command class to clean - - -class Clean(clean): - - def run(self): - clean.run(self) - api_path = os.path.join('doc', 'api', 'generated') - if os.path.exists(api_path): - print("Removing %s" % api_path) - shutil.rmtree(api_path) - interface_path = os.path.join('doc', 'interfaces', 'generated') - if os.path.exists(interface_path): - print("Removing %s" % interface_path) - shutil.rmtree(interface_path) - if os.path.exists(DOC_BUILD_DIR): - print("Removing %s" % DOC_BUILD_DIR) - shutil.rmtree(DOC_BUILD_DIR) - if os.path.exists(DOC_DOCTREES_DIR): - print("Removing %s" % DOC_DOCTREES_DIR) - shutil.rmtree(DOC_DOCTREES_DIR) - - -# The command classes for distutils, used by the setup.py -cmdclass = {'build_sphinx': MyBuildDoc, - 'api_docs': APIDocs, - 'clean': Clean, - } From 9967f257b4d6c3355c055408b9f8e65a3001743c Mon Sep 17 00:00:00 2001 From: oesteban Date: Sun, 29 Dec 2019 18:36:03 -0800 Subject: [PATCH 13/48] fix: small typos, pep8 errors, etc. --- doc/interfaces.rst | 2 +- examples/dmri_preprocessing.py | 2 + nipype/interfaces/camino/dti.py | 260 +++++++++++++++++++------------- 3 files changed, 155 insertions(+), 109 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 177f08fca4..662eaf285c 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -15,7 +15,7 @@ Workflows Interfaces ---------- -An index of all nipype interfaces is found belows. +An index of all nipype interfaces is found below. Nipype provides some *in-house* interfaces to help with workflow management tasks, basic image manipulations, and filesystem/storage interfaces: diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index 1537d2897f..9b0ad53be3 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -167,3 +167,5 @@ NeuroImage 103:411--426. 2014. doi:`10.1016/j.neuroimage.2014.07.061 `__. + +""" diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 7e74fe8ad6..0504def61e 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -47,7 +47,8 @@ class DTIFitInputSpec(StdOutCommandLineInputSpec): non_linear = traits.Bool( argstr="-nonlinear", position=3, - desc="Use non-linear fitting instead of the default linear regression to the log measurements. ", + desc="Use non-linear fitting instead of the default linear regression " + "to the log measurements. ", ) @@ -57,25 +58,31 @@ class DTIFitOutputSpec(TraitedSpec): class DTIFit(StdOutCommandLine): """ - Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, from the data file. + Reads diffusion MRI data, acquired using the acquisition scheme detailed in the scheme file, + from the data file. Use non-linear fitting instead of the default linear regression to the log measurements. - The data file stores the diffusion MRI data in voxel order with the measurements stored in big-endian format and ordered as in the scheme file. - The default input data type is four-byte float. The default output data type is eight-byte double. + The data file stores the diffusion MRI data in voxel order with the measurements stored + in big-endian format and ordered as in the scheme file. + The default input data type is four-byte float. + The default output data type is eight-byte double. See modelfit and camino for the format of the data file and scheme file. The program fits the diffusion tensor to each voxel and outputs the results, in voxel order and as big-endian eight-byte doubles, to the standard output. - The program outputs eight values in each voxel: [exit code, ln(S(0)), D_xx, D_xy, D_xz, D_yy, D_yz, D_zz]. - An exit code of zero indicates no problems. For a list of other exit codes, see modelfit(1). The entry S(0) is an estimate of the signal at q=0. + The program outputs eight values in each voxel: + [exit code, ln(S(0)), D_xx, D_xy, D_xz, D_yy, D_yz, D_zz]. + An exit code of zero indicates no problems. + For a list of other exit codes, see modelfit(1). + The entry S(0) is an estimate of the signal at q=0. Example ------- - >>> import nipype.interfaces.camino as cmon >>> fit = cmon.DTIFit() >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP + """ _cmd = "dtfit" @@ -115,10 +122,7 @@ class DTMetricInputSpec(CommandLineInputSpec): "cs", argstr="-stat %s", mandatory=True, - desc=( - "Specifies the metric to compute. Possible choices are: " - '"fa", "md", "rd", "l1", "l2", "l3", "tr", "ra", "2dfa", "cl", "cp" or "cs".' - ), + desc="Specifies the metric to compute.", ) inputdatatype = traits.Enum( @@ -130,12 +134,7 @@ class DTMetricInputSpec(CommandLineInputSpec): "char", argstr="-inputdatatype %s", usedefault=True, - desc=( - "Specifies the data type of the input data. " - "The data type can be any of the following strings: " - '"char", "short", "int", "long", "float" or "double".' - "Default is double data type" - ), + desc="Specifies the data type of the input data.", ) outputdatatype = traits.Enum( @@ -147,12 +146,7 @@ class DTMetricInputSpec(CommandLineInputSpec): "char", argstr="-outputdatatype %s", usedefault=True, - desc=( - "Specifies the data type of the output data. " - "The data type can be any of the following strings: " - '"char", "short", "int", "long", "float" or "double".' - "Default is double data type" - ), + desc="Specifies the data type of the output data.", ) data_header = File( @@ -215,6 +209,7 @@ class DTMetric(CommandLine): >>> dtmetric.inputs.metric = 'cp' >>> dtmetric.inputs.outputdatatype = 'float' >>> dtmetric.run() # doctest: +SKIP + """ _cmd = "dtshape" @@ -301,7 +296,9 @@ def _gen_model_options(): # @NoSelf "long", "double", argstr="-inputdatatype %s", - desc='Specifies the data type of the input file: "char", "short", "int", "long", "float" or "double". The input file must have BIG-ENDIAN ordering. By default, the input type is "float".', + desc="Specifies the data type of the input file. " + "The input file must have BIG-ENDIAN ordering. " + "By default, the input type is ``float``.", ) scheme_file = File( @@ -316,40 +313,54 @@ def _gen_model_options(): # @NoSelf outlier = File( argstr="-outliermap %s", exists=True, - desc="Specifies the name of the file to contain the outlier map generated by the RESTORE algorithm.", + desc="Specifies the name of the file to contain the outlier map generated by " + "the RESTORE algorithm.", ) noisemap = File( argstr="-noisemap %s", exists=True, - desc="Specifies the name of the file to contain the estimated noise variance on the diffusion-weighted signal, generated by a weighted tensor fit. The data type of this file is big-endian double.", + desc="Specifies the name of the file to contain the estimated noise variance on the " + "diffusion-weighted signal, generated by a weighted tensor fit. " + "The data type of this file is big-endian double.", ) residualmap = File( argstr="-residualmap %s", exists=True, - desc="Specifies the name of the file to contain the weighted residual errors after computing a weighted linear tensor fit. One value is produced per measurement, in voxel order.The data type of this file is big-endian double. Images of the residuals for each measurement can be extracted with shredder.", + desc="Specifies the name of the file to contain the weighted residual errors after " + "computing a weighted linear tensor fit. " + "One value is produced per measurement, in voxel order. " + "The data type of this file is big-endian double. " + "Images of the residuals for each measurement can be extracted with shredder.", ) sigma = traits.Float( argstr="-sigma %G", - desc="Specifies the standard deviation of the noise in the data. Required by the RESTORE algorithm.", + desc="Specifies the standard deviation of the noise in the data. " + "Required by the RESTORE algorithm.", ) bgthresh = traits.Float( argstr="-bgthresh %G", - desc="Sets a threshold on the average q=0 measurement to separate foreground and background. The program does not process background voxels, but outputs the same number of values in background voxels and foreground voxels. Each value is zero in background voxels apart from the exit code which is -1.", + desc="Sets a threshold on the average q=0 measurement to separate " + "foreground and background. The program does not process background voxels, " + "but outputs the same number of values in background voxels and foreground voxels. " + "Each value is zero in background voxels apart from the exit code which is -1.", ) bgmask = File( argstr="-bgmask %s", exists=True, - desc="Provides the name of a file containing a background mask computed using, for example, FSL's bet2 program. The mask file contains zero in background voxels and non-zero in foreground.", + desc="Provides the name of a file containing a background mask computed using, " + "for example, FSL's bet2 program. The mask file contains zero in background voxels " + "and non-zero in foreground.", ) cfthresh = traits.Float( argstr="-csfthresh %G", - desc="Sets a threshold on the average q=0 measurement to determine which voxels are CSF. This program does not treat CSF voxels any different to other voxels.", + desc="Sets a threshold on the average q=0 measurement to determine which voxels " + "are CSF. This program does not treat CSF voxels any different to other voxels.", ) fixedmodq = traits.List( @@ -357,7 +368,10 @@ def _gen_model_options(): # @NoSelf argstr="-fixedmod %s", minlen=4, maxlen=4, - desc="Specifies a spherical acquisition scheme with M measurements with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. The N measurements with :math:`|q|=Q` have unique directions. The program reads in the directions from the files in directory PointSets.", + desc="Specifies a spherical acquisition scheme with M measurements " + "with q=0 and N measurements with :math:`|q|=Q` and diffusion time tau. " + "The N measurements with :math:`|q|=Q` have unique directions. The program reads in " + "the directions from the files in directory PointSets.", ) fixedbvalue = traits.List( @@ -365,12 +379,15 @@ def _gen_model_options(): # @NoSelf argstr="-fixedbvalue %s", minlen=3, maxlen=3, - desc="As above, but specifies . The resulting scheme is the same whether you specify b directly or indirectly using -fixedmodq.", + desc="As above, but specifies . The resulting scheme is the same whether " + "you specify b directly or indirectly using -fixedmodq.", ) tau = traits.Float( argstr="-tau %G", - desc="Sets the diffusion time separately. This overrides the diffusion time specified in a scheme file or by a scheme index for both the acquisition scheme and in the data synthesis.", + desc="Sets the diffusion time separately. This overrides the diffusion time " + "specified in a scheme file or by a scheme index for both the acquisition scheme " + "and in the data synthesis.", ) @@ -390,13 +407,13 @@ class ModelFit(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> fit = cmon.ModelFit() >>> fit.model = 'dt' >>> fit.inputs.scheme_file = 'A.scheme' >>> fit.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fit.run() # doctest: +SKIP + """ _cmd = "modelfit" @@ -417,9 +434,11 @@ class DTLUTGenInputSpec(StdOutCommandLineInputSpec): lrange = traits.List( traits.Float, desc="Index to one-tensor LUTs. This is the ratio L1/L3 and L2 / L3." - "The LUT is square, with half the values calculated (because L2 / L3 cannot be less than L1 / L3 by definition)." - "The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 corresponds to an FA of 0.891, " - "and L1 / L3 = 15 with L2 / L3 = 1 corresponds to an FA of 0.929. The default range is 1 to 10.", + "The LUT is square, with half the values calculated (because L2 / L3 cannot be " + "less than L1 / L3 by definition)." + "The minimum must be >= 1. For comparison, a ratio L1 / L3 = 10 with L2 / L3 = 1 " + "corresponds to an FA of 0.891, and L1 / L3 = 15 with L2 / L3 = 1 corresponds " + "to an FA of 0.929. The default range is 1 to 10.", argstr="-lrange %s", minlen=2, maxlen=2, @@ -450,7 +469,8 @@ class DTLUTGenInputSpec(StdOutCommandLineInputSpec): samples = traits.Int( argstr="-samples %d", units="NA", - desc="The number of synthetic measurements to generate at each point in the LUT. The default is 2000.", + desc="The number of synthetic measurements to generate at each point in the LUT. " + "The default is 2000.", ) snr = traits.Float( @@ -478,7 +498,8 @@ class DTLUTGenInputSpec(StdOutCommandLineInputSpec): trace = traits.Float( argstr="-trace %G", units="NA", - desc="Trace of the diffusion tensor(s) used in the test function in the LUT generation. The default is 2100E-12 m^2 s^-1.", + desc="Trace of the diffusion tensor(s) used in the test function in the LUT generation. " + "The default is 2100E-12 m^2 s^-1.", ) scheme_file = File( @@ -498,9 +519,11 @@ class DTLUTGen(StdOutCommandLine): Calibrates the PDFs for PICo probabilistic tractography. This program needs to be run once for every acquisition scheme. - It outputs a lookup table that is used by the dtpicoparams program to find PICo PDF parameters for an image. - The default single tensor LUT contains parameters of the Bingham distribution and is generated by supplying - a scheme file and an estimated signal to noise in white matter regions of the (q=0) image. + It outputs a lookup table that is used by the dtpicoparams program to find PICo PDF + parameters for an image. + The default single tensor LUT contains parameters of the Bingham distribution and is + generated by supplying a scheme file and an estimated signal to noise in white matter + regions of the (q=0) image. The default inversion is linear (inversion index 1). Advanced users can control several options, including the extent and resolution of the LUT, @@ -508,12 +531,12 @@ class DTLUTGen(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> dtl = cmon.DTLUTGen() >>> dtl.inputs.snr = 16 >>> dtl.inputs.scheme_file = 'A.scheme' >>> dtl.run() # doctest: +SKIP + """ _cmd = "dtlutgen" @@ -554,8 +577,10 @@ class PicoPDFsInputSpec(StdOutCommandLineInputSpec): argstr="-luts %s", mandatory=True, desc="Files containing the lookup tables." - "For tensor data, one lut must be specified for each type of inversion used in the image (one-tensor, two-tensor, three-tensor)." - "For pds, the number of LUTs must match -numpds (it is acceptable to use the same LUT several times - see example, above)." + "For tensor data, one lut must be specified for each type of inversion used in the " + "image (one-tensor, two-tensor, three-tensor)." + "For pds, the number of LUTs must match -numpds (it is acceptable to use the same " + "LUT several times - see example, above)." "These LUTs may be generated with dtlutgen.", ) @@ -565,23 +590,30 @@ class PicoPDFsInputSpec(StdOutCommandLineInputSpec): "acg", argstr="-pdf %s", position=4, - desc=" Specifies the PDF to use. There are three choices:" - "watson - The Watson distribution. This distribution is rotationally symmetric." - "bingham - The Bingham distributionn, which allows elliptical probability density contours." - "acg - The Angular Central Gaussian distribution, which also allows elliptical probability density contours", + desc="""\ +Specifies the PDF to use. There are three choices: + + * watson - The Watson distribution. This distribution is rotationally symmetric. + * bingham - The Bingham distributionn, which allows elliptical probability density contours. + * acg - The Angular Central Gaussian distribution, which also allows elliptical probability + density contours. + +""", usedefault=True, ) directmap = traits.Bool( argstr="-directmap", - desc="Only applicable when using pds as the inputmodel. Use direct mapping between the eigenvalues and the distribution parameters instead of the log of the eigenvalues.", + desc="Only applicable when using pds as the inputmodel. Use direct mapping between " + "the eigenvalues and the distribution parameters instead of the log of the eigenvalues.", ) maxcomponents = traits.Int( argstr="-maxcomponents %d", units="NA", desc="The maximum number of tensor components in a voxel (default 2) for multitensor data." - "Currently, only the default is supported, but future releases may allow the input of three-tensor data using this option.", + "Currently, only the default is supported, but future releases may allow the input " + "of three-tensor data using this option.", ) numpds = traits.Int( @@ -604,13 +636,13 @@ class PicoPDFs(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> pdf = cmon.PicoPDFs() >>> pdf.inputs.inputmodel = 'dt' >>> pdf.inputs.luts = ['lut_file'] >>> pdf.inputs.in_file = 'voxel-order_data.Bfloat' >>> pdf.run() # doctest: +SKIP + """ _cmd = "picopdfs" @@ -852,13 +884,13 @@ class Track(CommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.Track() >>> track.inputs.inputmodel = 'dt' >>> track.inputs.in_file = 'data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ _cmd = "track" @@ -896,12 +928,12 @@ class TrackDT(Track): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackDT() >>> track.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ def __init__(self, command=None, **inputs): @@ -927,16 +959,16 @@ class TrackPICoInputSpec(TrackInputSpec): class TrackPICo(Track): """ - Performs streamline tractography using the Probabilistic Index of Connectivity (PICo) algorithm + Performs streamline tractography using Probabilistic Index of Connectivity (PICo). Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackPICo() >>> track.inputs.in_file = 'pdfs.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackPICoInputSpec @@ -981,12 +1013,12 @@ class TrackBedpostxDeter(Track): Example ------- - >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxDeter() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBedpostxDeterInputSpec @@ -1017,9 +1049,7 @@ class TrackBedpostxProbaInputSpec(TrackInputSpec): iterations = traits.Int( argstr="-iterations %d", units="NA", - desc=( - "Number of streamlines to generate at each " "seed point. The default is 1." - ), + desc="Number of streamlines to generate at each " "seed point. The default is 1.", ) @@ -1043,13 +1073,13 @@ class TrackBedpostxProba(Track): Example ------- - >>> import nipype.interfaces.camino as cam >>> track = cam.TrackBedpostxProba() >>> track.inputs.bedpostxdir = 'bedpostxout' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.iterations = 100 >>> track.run() # doctest: +SKIP + """ input_spec = TrackBedpostxProbaInputSpec @@ -1081,35 +1111,52 @@ class TrackBayesDiracInputSpec(TrackInputSpec): "watson", "acg", argstr="-pdf %s", - desc='Specifies the model for PICo priors (not the curvature priors). The default is "bingham".', + desc="Specifies the model for PICo priors (not the curvature priors). " + "The default is 'bingham'.", ) pointset = traits.Int( argstr="-pointset %s", - desc="Index to the point set to use for Bayesian likelihood calculation. The index specifies a set of evenly distributed points on the unit sphere, where each point x defines two possible step directions (x or -x) for the streamline path. A larger number indexes a larger point set, which gives higher angular resolution at the expense of computation time. The default is index 1, which gives 1922 points, index 0 gives 1082 points, index 2 gives 3002 points.", + desc="""\ +Index to the point set to use for Bayesian likelihood calculation. The index +specifies a set of evenly distributed points on the unit sphere, where each point x +defines two possible step directions (x or -x) for the streamline path. A larger +number indexes a larger point set, which gives higher angular resolution at the +expense of computation time. The default is index 1, which gives 1922 points, index 0 +gives 1082 points, index 2 gives 3002 points.""", ) datamodel = traits.Enum( "cylsymmdt", "ballstick", argstr="-datamodel %s", - desc='Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is "ballstick", the partial volume model (see ballstickfit).', + desc="""\ +Model of the data for Bayesian tracking. The default model is "cylsymmdt", a diffusion +tensor with cylindrical symmetry about e_1, ie L1 >= L_2 = L_3. The other model is +"ballstick", the partial volume model (see ballstickfit).""", ) curvepriork = traits.Float( argstr="-curvepriork %G", - desc="Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of k make curvature less likely.", + desc="""\ +Concentration parameter for the prior distribution on fibre orientations given the fibre +orientation at the previous step. Larger values of k make curvature less likely.""", ) curvepriorg = traits.Float( argstr="-curvepriorg %G", - desc="Concentration parameter for the prior distribution on fibre orientations given the fibre orientation at the previous step. Larger values of g make curvature less likely.", + desc="""\ +Concentration parameter for the prior distribution on fibre orientations given +the fibre orientation at the previous step. Larger values of g make curvature less likely.""", ) extpriorfile = File( exists=True, argstr="-extpriorfile %s", - desc="Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for the fibre orientation in Bayesian tracking. The prior image must be in the same space as the diffusion data.", + desc="""\ +Path to a PICo image produced by picopdfs. The PDF in each voxel is used as a prior for +the fibre orientation in Bayesian tracking. The prior image must be in the same space +as the diffusion data.""", ) extpriordatatype = traits.Enum( @@ -1122,7 +1169,7 @@ class TrackBayesDiracInputSpec(TrackInputSpec): class TrackBayesDirac(Track): """ - Performs streamline tractography using a Bayesian tracking with Dirac priors + Perform streamline tractography using a Bayesian tracking with Dirac priors. Example ------- @@ -1133,6 +1180,7 @@ class TrackBayesDirac(Track): >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.inputs.scheme_file = 'bvecs.scheme' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBayesDiracInputSpec @@ -1148,12 +1196,12 @@ class TrackBallStick(Track): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBallStick() >>> track.inputs.in_file = 'ballstickfit_data.Bfloat' >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ def __init__(self, command=None, **inputs): @@ -1177,20 +1225,27 @@ class TrackBootstrapInputSpec(TrackInputSpec): inversion = traits.Int( argstr="-inversion %s", - desc="Tensor reconstruction algorithm for repetition bootstrapping. Default is 1 (linear reconstruction, single tensor).", + desc="""\ +Tensor reconstruction algorithm for repetition bootstrapping. +Default is 1 (linear reconstruction, single tensor).""", ) bsdatafiles = traits.List( File(exists=True), mandatory=True, argstr="-bsdatafile %s", - desc="Specifies files containing raw data for repetition bootstrapping. Use -inputfile for wild bootstrap data.", + desc="""\ +Specifies files containing raw data for repetition bootstrapping. +Use -inputfile for wild bootstrap data.""", ) bgmask = File( argstr="-bgmask %s", exists=True, - desc="Provides the name of a file containing a background mask computed using, for example, FSL's bet2 program. The mask file contains zero in background voxels and non-zero in foreground.", + desc="""\ +Provides the name of a file containing a background mask computed using, for example, +FSL's bet2 program. +The mask file contains zero in background voxels and non-zero in foreground.""", ) @@ -1200,7 +1255,6 @@ class TrackBootstrap(Track): Example ------- - >>> import nipype.interfaces.camino as cmon >>> track = cmon.TrackBootstrap() >>> track.inputs.inputmodel='repbs_dt' @@ -1208,6 +1262,7 @@ class TrackBootstrap(Track): >>> track.inputs.bsdatafiles = ['fitted_data1.Bfloat', 'fitted_data2.Bfloat'] >>> track.inputs.seed_file = 'seed_mask.nii' >>> track.run() # doctest: +SKIP + """ input_spec = TrackBootstrapInputSpec @@ -1239,10 +1294,10 @@ class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): "twotensor", "threetensor", argstr="-inputmodel %s", - desc="Specifies the model that the input tensor data contains parameters for." - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - "contains a single diffusion tensor in each voxel.", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( @@ -1253,8 +1308,7 @@ class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): "float", "double", argstr="-inputdatatype %s", - desc="Specifies the data type of the input file. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( @@ -1265,8 +1319,7 @@ class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): "float", "double", argstr="-outputdatatype %s", - desc="Specifies the data type of the output data. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the output data.", ) @@ -1280,12 +1333,12 @@ class ComputeMeanDiffusivity(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> md = cmon.ComputeMeanDiffusivity() >>> md.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> md.inputs.scheme_file = 'A.scheme' >>> md.run() # doctest: +SKIP + """ _cmd = "md" @@ -1324,10 +1377,10 @@ class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): "threetensor", "multitensor", argstr="-inputmodel %s", - desc="Specifies the model that the input tensor data contains parameters for." - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - "contains a single diffusion tensor in each voxel.", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( @@ -1338,8 +1391,7 @@ class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): "float", "double", argstr="-inputdatatype %s", - desc="Specifies the data type of the input file. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( @@ -1350,8 +1402,7 @@ class ComputeFractionalAnisotropyInputSpec(StdOutCommandLineInputSpec): "float", "double", argstr="-outputdatatype %s", - desc="Specifies the data type of the output data. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the output data.", ) @@ -1371,12 +1422,12 @@ class ComputeFractionalAnisotropy(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> fa = cmon.ComputeFractionalAnisotropy() >>> fa.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> fa.inputs.scheme_file = 'A.scheme' >>> fa.run() # doctest: +SKIP + """ _cmd = "fa" @@ -1415,10 +1466,10 @@ class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): "threetensor", "multitensor", argstr="-inputmodel %s", - desc="Specifies the model that the input tensor data contains parameters for." - 'Possible model types are: "dt" (diffusion-tensor data), "twotensor" (two-tensor data), ' - '"threetensor" (three-tensor data). By default, the program assumes that the input data ' - "contains a single diffusion tensor in each voxel.", + desc="""\ +Specifies the model that the input tensor data contains parameters for. +By default, the program assumes that the input data +contains a single diffusion tensor in each voxel.""", ) inputdatatype = traits.Enum( @@ -1429,8 +1480,7 @@ class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): "float", "double", argstr="-inputdatatype %s", - desc="Specifies the data type of the input file. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the input file.", ) outputdatatype = traits.Enum( @@ -1441,8 +1491,7 @@ class ComputeTensorTraceInputSpec(StdOutCommandLineInputSpec): "float", "double", argstr="-outputdatatype %s", - desc="Specifies the data type of the output data. The data type can be any of the" - 'following strings: "char", "short", "int", "long", "float" or "double".', + desc="Specifies the data type of the output data.", ) @@ -1464,12 +1513,12 @@ class ComputeTensorTrace(StdOutCommandLine): Example ------- - >>> import nipype.interfaces.camino as cmon >>> trace = cmon.ComputeTensorTrace() >>> trace.inputs.in_file = 'tensor_fitted_data.Bdouble' >>> trace.inputs.scheme_file = 'A.scheme' >>> trace.run() # doctest: +SKIP + """ _cmd = "trd" @@ -1499,7 +1548,7 @@ class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): "dt", "multitensor", argstr="-inputmodel %s", - desc='Specifies the model that the input data contains parameters for. Possible model types are: "dt" (diffusion-tensor data) and "multitensor"', + desc="Specifies the model that the input data contains parameters for", ) maxcomponents = traits.Int( @@ -1533,12 +1582,7 @@ class ComputeEigensystemInputSpec(StdOutCommandLineInputSpec): "char", argstr="-outputdatatype %s", usedefault=True, - desc=( - "Specifies the data type of the output data. " - "The data type can be any of the following strings: " - '"char", "short", "int", "long", "float" or "double".' - "Default is double data type" - ), + desc="Specifies the data type of the output data.", ) From 45a6bfce8b94cfc2631a3e53095e15983f354642 Mon Sep 17 00:00:00 2001 From: Hrvoje Stojic Date: Mon, 23 Dec 2019 08:04:39 +0000 Subject: [PATCH 14/48] Update .zenodo.json - added Hrvoje Stojic --- .zenodo.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.zenodo.json b/.zenodo.json index ed94d94ab3..57605f0503 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -514,6 +514,11 @@ { "name": "Blair, Ross" }, + { + "affiliation": "Max Planck UCL Centre for Computational Psychiatry and Ageing Research, University College London", + "name": "Stojic, Hrvoje", + "orcid": "0000-0002-9699-9052" + }, { "affiliation": "The University of Texas at Austin", "name": "Floren, Andrew", From 7c90bd8ef203e48b1b2bdbb26af7b9629a01a6bf Mon Sep 17 00:00:00 2001 From: "Christopher J. Markiewicz" Date: Mon, 30 Dec 2019 16:06:27 -0500 Subject: [PATCH 15/48] MNT: Update mailmap --- .mailmap | 1 + 1 file changed, 1 insertion(+) diff --git a/.mailmap b/.mailmap index a5126e69c1..b96d00f538 100644 --- a/.mailmap +++ b/.mailmap @@ -67,6 +67,7 @@ Gio Piantoni Guillaume Flandin Hans Johnson Horea Christian +Hrvoje Stojic Isaac Schwabacher Jakub Kaczmarzyk James Kent From d7bef76d4f5c19c332e19d059dd06935b3bd3325 Mon Sep 17 00:00:00 2001 From: BenjaminMey Date: Fri, 20 Dec 2019 11:49:28 -0500 Subject: [PATCH 16/48] change ANTS number_of_time_steps from Float to Int --- nipype/interfaces/ants/registration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 3e74b59924..cb8e363c04 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -87,7 +87,7 @@ class ANTSInputSpec(ANTSCommandInputSpec): desc="", ) gradient_step_length = traits.Float(requires=["transformation_model"], desc="") - number_of_time_steps = traits.Float(requires=["gradient_step_length"], desc="") + number_of_time_steps = traits.Int(requires=["gradient_step_length"], desc="") delta_time = traits.Float(requires=["number_of_time_steps"], desc="") symmetry_type = traits.Float(requires=["delta_time"], desc="") From 218a63de16f86e81de197f95df9f025dd1e7735c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 31 Dec 2019 16:41:27 -0800 Subject: [PATCH 17/48] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- doc/interfaces.rst | 4 ++-- examples/fmri_spm_auditory.py | 2 +- nipype/algorithms/modelgen.py | 1 + nipype/interfaces/base/core.py | 4 ++-- nipype/interfaces/base/support.py | 4 ++-- nipype/interfaces/cmtk/cmtk.py | 8 ++++---- nipype/interfaces/cmtk/parcellation.py | 8 ++++---- nipype/interfaces/diffusion_toolkit/dti.py | 4 ++-- nipype/interfaces/diffusion_toolkit/odf.py | 2 +- 9 files changed, 19 insertions(+), 18 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 662eaf285c..e3c5299a54 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -20,7 +20,7 @@ Nipype provides some *in-house* interfaces to help with workflow management tasks, basic image manipulations, and filesystem/storage interfaces: - * `"Algorithms" `__ + * `Algorithms `__ * `Image manipulation `__ * `I/O Operations `__ * `Self-reporting interfaces `__ @@ -127,4 +127,4 @@ Index of Interfaces :maxdepth: 3 api/generated/nipype.algorithms - api/generated/nipype.interfaces \ No newline at end of file + api/generated/nipype.interfaces diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 178deb42b4..158f4d7aa2 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -327,7 +327,7 @@ def makelist(item): pipeline. Thus for this pipeline there will be subject specific sub-directories. -The :func:`nipype.pipeline.engine.Pipeline.connect` function creates the +The :func:`nipype.pipeline.engine.Workflow.connect` function creates the links between the processes, i.e., how data should flow in and out of the processing nodes. """ diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index e0e9530ff4..f6e73afeeb 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -55,6 +55,7 @@ def spm_hrf(RT, P=None, fMRI_T=16): python implementation of spm_hrf See ``spm_hrf`` for implementation details:: + % RT - scan repeat time % p - parameters of the response function (two gamma % functions) diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index a0e7325580..78b6584cd8 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -143,7 +143,7 @@ class BaseInterface(Interface): Attributes ---------- - input_spec: HasTraits + input_spec: :obj:`nipype.interfaces.base.spec.TraitedSpec` points to the traited class for the inputs output_spec: HasTraits points to the traited class for the outputs @@ -326,7 +326,7 @@ def run(self, cwd=None, ignore_exception=None, **inputs): Returns ------- - results : :obj:`InterfaceResult` + results : :obj:`nipype.interfaces.base.core.InterfaceResult` A copy of the instance that was executed, provenance information and, if successful, results diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index 4b01754be0..80835604a0 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -33,9 +33,9 @@ def __str__(self): class Bunch(object): """ - Dictionary-like class that provides attribute-style access to it's items. + Dictionary-like class that provides attribute-style access to its items. - A ``Bunch`` is a simple container that stores it's items as class + A ``Bunch`` is a simple container that stores its items as class attributes [1]_. Internally all items are stored in a dictionary and the class exposes several of the dictionary methods. diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 9c2e5815e0..2444aa2d56 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -137,16 +137,16 @@ def create_endpoints_array(fib, voxelSize): Parameters ---------- - fib : + fib : array-like the fibers data - voxelSize: + voxelSize : tuple 3-tuple containing the voxel size of the ROI image Returns ------- - endpoints : matrix of size [#fibers, 2, 3] + endpoints : ndarray of size [#fibers, 2, 3] containing for each fiber the ndex of its first and last point in the voxelSize volume - endpointsmm : matrix of size [#fibers, 2, 3] + endpointsmm : ndarray of size [#fibers, 2, 3] endpoints in milimeter coordinates """ diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 4868ff3df4..1cfc95dd31 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -665,18 +665,18 @@ def extract(Z, shape, position, fill): Parameters ---------- - Z : + Z : array-like the original data - shape : + shape : tuple tuple containing neighbourhood dimensions position : tuple containing central point indexes - fill : + fill : float value for the padding of Z Returns ------- - R : + R : ndarray the neighbourhood of the specified point in Z """ diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index fe60cf5281..f5715fb443 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -190,8 +190,8 @@ class DTITrackerInputSpec(CommandLineInputSpec): desc="""\ Tracking algorithm. -* fact -> use FACT method for tracking. this is the default method. -* rk2 -> use 2nd order runge-kutta method for tracking. +* fact -> use FACT method for tracking. This is the default method. +* rk2 -> use 2nd order Runge-Kutta method for tracking. * tl -> use tensorline method for tracking. * sl -> use interpolated streamline method with fixed step-length diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index f25f975bd4..b3d6574099 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -264,7 +264,7 @@ class ODFTrackerInputSpec(CommandLineInputSpec): runge_kutta2 = traits.Bool( argstr="-rk2", desc="""\ -use 2nd order runge-kutta method for tracking. +use 2nd order Runge-Kutta method for tracking. default tracking method is non-interpolate streamline""", ) step_length = traits.Float( From 9224a6f59c57103a97b0727abe07efe69c23bb37 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 31 Dec 2019 16:49:35 -0800 Subject: [PATCH 18/48] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- doc/examples.rst | 3 +++ doc/interfaces.rst | 3 +++ examples/fmri_spm_auditory.py | 2 +- nipype/interfaces/cmtk/parcellation.py | 2 +- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/doc/examples.rst b/doc/examples.rst index 5b645fcace..ec16ad372a 100644 --- a/doc/examples.rst +++ b/doc/examples.rst @@ -5,6 +5,9 @@ ======================= User Guide and Examples ======================= +The following examples are `literate programming `__ +documents which solve specific problems using Nipype. +In some cases, the same problem is solved with multiple underlying tools. .. admonition:: Michael Notter's User Guide diff --git a/doc/interfaces.rst b/doc/interfaces.rst index e3c5299a54..1c66363eea 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -12,6 +12,9 @@ Workflows The workflows that used to live as a module under ``nipype.workflows`` have been migrated to the new project `NiFlows `__. + These may be installed with the + `niflow-nipype1-examples `__ + package, but their continued use is discouraged. Interfaces ---------- diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 158f4d7aa2..00cc5f6b1f 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -391,7 +391,7 @@ def getstripdir(subject_id): The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the -analysis on the data the :func:`nipype.pipeline.engine.Workflow.run` +analysis on the data the :func:`nipype.pipeline.engine.workflows.Workflow.run` function needs to be called. """ diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index 1cfc95dd31..77e22bd6f7 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -669,7 +669,7 @@ def extract(Z, shape, position, fill): the original data shape : tuple tuple containing neighbourhood dimensions - position : + position : tuple tuple containing central point indexes fill : float value for the padding of Z From be8d9a923a231f63d9dba4784c1575bc00f74272 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 31 Dec 2019 17:02:53 -0800 Subject: [PATCH 19/48] fix: minimal fixes in ordering and links --- doc/examples.rst | 7 ++++--- nipype/interfaces/base/core.py | 4 ++-- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/examples.rst b/doc/examples.rst index ec16ad372a..c8299bf6d1 100644 --- a/doc/examples.rst +++ b/doc/examples.rst @@ -5,9 +5,6 @@ ======================= User Guide and Examples ======================= -The following examples are `literate programming `__ -documents which solve specific problems using Nipype. -In some cases, the same problem is solved with multiple underlying tools. .. admonition:: Michael Notter's User Guide @@ -15,6 +12,10 @@ In some cases, the same problem is solved with multiple underlying tools. Examples ~~~~~~~~ +The following examples are `literate programming `__ +documents which solve specific problems using Nipype. +In some cases, the same problem is solved with multiple underlying tools. + .. toctree:: :maxdepth: 1 :glob: diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 78b6584cd8..d282f8fe99 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -143,7 +143,7 @@ class BaseInterface(Interface): Attributes ---------- - input_spec: :obj:`nipype.interfaces.base.spec.TraitedSpec` + input_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` points to the traited class for the inputs output_spec: HasTraits points to the traited class for the outputs @@ -326,7 +326,7 @@ def run(self, cwd=None, ignore_exception=None, **inputs): Returns ------- - results : :obj:`nipype.interfaces.base.core.InterfaceResult` + results : :obj:`nipype.interfaces.base.support.InterfaceResult` A copy of the instance that was executed, provenance information and, if successful, results From be900d107cd3de27fd1a9cee10d91be440b5ac97 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 31 Dec 2019 17:33:18 -0800 Subject: [PATCH 20/48] fix: links to interface bases when showing inheritance --- nipype/sphinxext/documenter.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py index de84fd9412..5300d81165 100644 --- a/nipype/sphinxext/documenter.py +++ b/nipype/sphinxext/documenter.py @@ -60,14 +60,24 @@ def add_directive_header(self, sig: str) -> None: if not self.doc_as_attr and self.options.show_inheritance: sourcename = self.get_sourcename() self.add_line("", sourcename) - if hasattr(self.object, "__bases__") and len(self.object.__bases__): - bases = [ - ":class:`%s`" % b.__name__ - if b.__module__ in ("__builtin__", "builtins") - else ":class:`%s.%s`" % (b.__module__, b.__name__) - for b in self.object.__bases__ - ] - self.add_line(" " + _("Bases: %s") % ", ".join(bases), sourcename) + bases = getattr(self.object, "__bases__", []) + bases_links = [] + + for b in bases: + based_interface = False + try: + based_interface = issubclass(b, BaseInterface) + except TypeError: + pass + + if b.__module__ in ("__builtin__", "builtins"): + bases_links.append(":class:`%s`" % b.__name__) + elif based_interface: + bases_links.append(":ref:`%s.%s`" % (b.__module__, b.__name__)) + else: + bases_links.append(":class:`%s.%s`" % (b.__module__, b.__name__)) + + self.add_line(" " + _("Bases: %s") % ", ".join(bases_links), sourcename) def setup(app): From 8b5dab18fea4f70daf7c87d9c3159ad926e41daa Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 1 Jan 2020 08:01:41 -0800 Subject: [PATCH 21/48] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- examples/fmri_spm_auditory.py | 10 +++++----- nipype/algorithms/confounds.py | 4 ++-- nipype/interfaces/afni/preprocess.py | 2 +- nipype/interfaces/base/core.py | 2 +- nipype/interfaces/cmtk/cmtk.py | 4 ++-- nipype/interfaces/nilearn.py | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 00cc5f6b1f..8a13a8a4c3 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -148,7 +148,7 @@ def get_vox_dims(volume): l1analysis = pe.Workflow(name='analysis') """Generate SPM-specific design information using -:ref:`nipype.algorithms.modelgen.SpecifyModel`. +:ref:`nipype.algorithms.modelgen.SpecifySPMModel`. """ modelspec = pe.Node(interface=model.SpecifySPMModel(), name="modelspec") @@ -273,7 +273,7 @@ def makelist(item): ----------------------------------------- Here we create a structure that provides information about the experimental paradigm. This is used by the -:ref:`nipype.algorithms.modelgen.SpecifyModel` +:ref:`nipype.algorithms.modelgen.SpecifySPMModel` to create the information necessary to generate an SPM design matrix. """ @@ -327,7 +327,7 @@ def makelist(item): pipeline. Thus for this pipeline there will be subject specific sub-directories. -The :func:`nipype.pipeline.engine.Workflow.connect` function creates the +The :func:`~nipype.pipeline.engine.workflows.Workflow.connect` method creates the links between the processes, i.e., how data should flow in and out of the processing nodes. """ @@ -391,8 +391,8 @@ def getstripdir(subject_id): The code discussed above sets up all the necessary data structures with appropriate parameters and the connectivity between the processes, but does not generate any output. To actually run the -analysis on the data the :func:`nipype.pipeline.engine.workflows.Workflow.run` -function needs to be called. +analysis on the data the :func:`~nipype.pipeline.engine.workflows.Workflow.run` +method needs to be called. """ if __name__ == '__main__': diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 251d196d3f..40704f2e6f 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -654,10 +654,10 @@ def _run_interface(self, runtime): else: components_criterion = 6 IFLOGGER.warning( - "``num_components`` and ``variance_threshold`` are " + "`num_components` and `variance_threshold` are " "not defined. Setting number of components to 6 " "for backward compatibility. Please set either " - "``num_components`` or ``variance_threshold``, as " + "`num_components` or `variance_threshold`, as " "this feature may be deprecated in the future." ) diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 189a7f0c03..777596efd1 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -3145,7 +3145,7 @@ class TSmoothInputSpec(AFNICommandInputSpec): argstr="-osf", ) lin3 = traits.Int( - desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c" + desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`" "Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index d282f8fe99..6c11084032 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -145,7 +145,7 @@ class BaseInterface(Interface): ---------- input_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` points to the traited class for the inputs - output_spec: HasTraits + output_spec: :obj:`nipype.interfaces.base.specs.TraitedSpec` points to the traited class for the outputs _redirect_x: bool should be set to ``True`` when the interface requires diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 2444aa2d56..6587dcc291 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -145,9 +145,9 @@ def create_endpoints_array(fib, voxelSize): Returns ------- endpoints : ndarray of size [#fibers, 2, 3] - containing for each fiber the ndex of its first and last point in the voxelSize volume + containing for each fiber the index of its first and last point in the voxelSize volume endpointsmm : ndarray of size [#fibers, 2, 3] - endpoints in milimeter coordinates + endpoints in millimeter coordinates """ # Init diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 68f88b51e7..55f6982213 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Nilearn is a Python module for fast and easy statistical learning on NeuroImaging data.""" +"""Nilearn is a Python library for fast and easy statistical learning on NeuroImaging data.""" import os import numpy as np From 46d8f2b723708a439e01fa6eb3362cb60e4f8854 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 1 Jan 2020 08:09:28 -0800 Subject: [PATCH 22/48] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nipype/interfaces/spm/model.py | 4 ++-- nipype/interfaces/spm/preprocess.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index ffcef6801e..045051af6e 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -70,7 +70,7 @@ class Level1DesignInputSpec(SPMCommandInputSpec): * hrf * derivs -- (2-element list) Model HRF Derivatives. No derivatives: [0,0], - Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] + Time derivatives : [1,0], Time and Dispersion derivatives: [1,1] * fourier, fourier_han, gamma, or fir: @@ -500,7 +500,7 @@ def _make_matlab_command(self, _): if contrast.sessions: for sno, sw in enumerate(contrast.sessions): script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) - script += ("consess{%d}.tcon.convec(idx(sidx)) = %f;\n") % ( + script += "consess{%d}.tcon.convec(idx(sidx)) = %f;\n" % ( i + 1, sw * contrast.weights[c0], ) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index d434709062..c9bba24c1a 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -207,7 +207,7 @@ class FieldMap(SPMCommand): .. important:: - This interface does not deal with real/imag magnitude images neither + This interface does not deal with real/imag magnitude images nor with the two phase files case. Examples From 022dea5b541b4ee8ab1fa882b2c2e07d8fffc93b Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 1 Jan 2020 10:02:58 -0800 Subject: [PATCH 23/48] Apply suggestions from code review Co-Authored-By: Chris Markiewicz --- nipype/algorithms/confounds.py | 2 +- nipype/interfaces/afni/preprocess.py | 4 ++-- nipype/interfaces/afni/utils.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 40704f2e6f..4a24ab8224 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -418,7 +418,7 @@ class CompCorInputSpec(BaseInterfaceInputSpec): low=0, xor=["merge_method"], requires=["mask_files"], - desc=("Position of mask in ``mask_files`` to use - " "first is the default."), + desc="Position of mask in ``mask_files`` to use - first is the default.", ) mask_names = traits.List( traits.Str, diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 777596efd1..f6d3f7c334 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -3135,7 +3135,7 @@ class TSmoothInputSpec(AFNICommandInputSpec): ) lin = traits.Bool( desc=r"3 point linear filter: :math:`0.15\,a + 0.70\,b + 0.15\,c`" - "[This is the default smoother]", + " [This is the default smoother]", argstr="-lin", ) med = traits.Bool(desc="3 point median filter: median(a,b,c)", argstr="-med") @@ -3146,7 +3146,7 @@ class TSmoothInputSpec(AFNICommandInputSpec): ) lin3 = traits.Int( desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`" - "Here, 'm' is a number strictly between 0 and 1.", + " Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) hamming = traits.Int( diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 9eb0031a37..8ece6a8765 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -267,7 +267,7 @@ class BrickStatInputSpec(CommandLineInputSpec): traits.Float, traits.Float, desc="p0 ps p1 write the percentile values starting " - "at p0\\% and ending at p1\\% at a step of ps%. " + "at p0% and ending at p1% at a step of ps%. " "only one sub-brick is accepted.", argstr="-percentile %.3f %.3f %.3f", ) From ff7dd7d3f2cb45e946df1082183bd46d95095d2c Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 1 Jan 2020 10:05:16 -0800 Subject: [PATCH 24/48] Update doc/interfaces.rst --- doc/interfaces.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index 1c66363eea..ff489df51a 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -97,7 +97,7 @@ Nipype provides interfaces for the following **third-party** tools: `NiftySeg `__ contains programs to perform EM based segmentation of images in NIfTI or Analyze format. * `NiLearn `__ - is a Python module for fast and easy statistical learning on NeuroImaging data. + is a Python library for fast and easy statistical learning on NeuroImaging data. * `NiPy `__ is a Python project for analysis of structural and functional neuroimaging data. * `Nitime `__ From da67d7150e4ae406045610409b6f7a97d9f76570 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Wed, 1 Jan 2020 10:26:56 -0800 Subject: [PATCH 25/48] fix: typos and links in fmri auditory example --- examples/fmri_spm_auditory.py | 53 +++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 8a13a8a4c3..86acaad101 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -33,15 +33,15 @@ """ -# Set the way matlab should be called +# Set the way Matlab should be called mlab.MatlabCommand.set_default_matlab_cmd("matlab -nodesktop -nosplash") """ Setting up workflows -------------------- -In this tutorial we will be setting up a hierarchical workflow for spm -analysis. This will demonstrate how pre-defined workflows can be setup +In this tutorial we will be setting up a hierarchical workflow for SPM +analysis. This will demonstrate how predefined workflows can be setup and shared across users, projects and labs. Setup preprocessing workflow @@ -51,11 +51,11 @@ """ preproc = pe.Workflow(name='preproc') -"""We strongly encourage to use 4D files insteead of series of 3D for fMRI analyses +"""We strongly encourage to use 4D files instead of series of 3D for fMRI analyses for many reasons (cleanness and saving and filesystem inodes are among them). However, the the workflow presented in the SPM8 manual which this tutorial is based on uses 3D files. Therefore we leave converting to 4D as an option. We are using ``merge_to_4d`` -variable, because switching between 3d and 4d requires some additional steps (explauned later on). +variable, because switching between 3D and 4dD requires some additional steps (explained later on). Use :ref:`nipype.interfaces.fsl.utils.Merge` to merge a series of 3D files along the time dimension creating a 4D file. """ @@ -119,8 +119,8 @@ def get_vox_dims(volume): """Here we are connecting all the nodes together. Notice that we add the merge node only if you choose to use 4D. -Also ``get_vox_dims`` function is passed along the input volume of normalise to set the optimal -voxel sizes. +Also, the ``get_vox_dims`` function is passed along the input volume of +:ref:`nipype.interfaces.spm.preprocess.Normalize` to set the optimal voxel sizes. """ if merge_to_4d: @@ -186,8 +186,8 @@ def get_vox_dims(volume): ('spmT_images', 'stat_image')]), ]) """ -Preproc + Analysis pipeline ---------------------------- +Preprocessing and analysis pipeline +----------------------------------- """ l1pipeline = pe.Workflow(name='firstlevel') @@ -196,7 +196,7 @@ def get_vox_dims(volume): 'modelspec.realignment_parameters')])]) """ -Pluging in ``functional_runs`` is a bit more complicated, +Plugging in ``functional_runs`` is a bit more complicated, because model spec expects a list of ``runs``. Every run can be a 4D file or a list of 3D files. Therefore for 3D analysis we need a list of lists and to make one we need a helper function. @@ -253,10 +253,7 @@ def makelist(item): """ Now we create a :ref:`nipype.interfaces.io.DataGrabber` -object and fill in the information from above about the layout of our data. The -:class:`nipype.pipeline.NodeWrapper` module wraps the interface object -and provides additional housekeeping and pipeline specific -functionality. +object and fill in the information from above about the layout of our data. """ datasource = pe.Node( @@ -318,18 +315,26 @@ def makelist(item): setup the connections between the nodes such that appropriate outputs from nodes are piped into appropriate inputs of other nodes. -Use the :class:`nipype.pipeline.engine.Pipeline` to create a -graph-based execution pipeline for first level analysis. The config -options tells the pipeline engine to use `workdir` as the disk -location to use when running the processes and keeping their -outputs. The `use_parameterized_dirs` tells the engine to create -sub-directories under `workdir` corresponding to the iterables in the -pipeline. Thus for this pipeline there will be subject specific -sub-directories. +Use the :class:`~nipype.pipeline.engine.workflows.Workflow` to create a +graph-based execution pipeline for first level analysis. +Set the :py:attr:`~nipype.pipeline.engine.workflows.Workflow.base_dir` +option to instruct the pipeline engine to use ``spm_auditory_tutorial/workingdir`` +as the filesystem location to use when running the processes and keeping their +outputs. +Other options can be set via `the configuration file +`__. +For example, ``use_parameterized_dirs`` tells the engine to create +sub-directories under :py:attr:`~nipype.pipeline.engine.workflows.Workflow.base_dir`, +corresponding to the iterables in the pipeline. +Thus, for this pipeline there will be subject specific sub-directories. + +When building a workflow, interface objects are wrapped within +a :class:`~nipype.pipeline.engine.nodes.Node` so that they can be inserted +in the workflow. The :func:`~nipype.pipeline.engine.workflows.Workflow.connect` method creates the -links between the processes, i.e., how data should flow in and out of -the processing nodes. +links between :class:`~nipype.pipeline.engine.nodes.Node` instances, i.e., +how data should flow in and out of the processing nodes. """ level1 = pe.Workflow(name="level1") From 5bdaaf2b9f301955089466b1f77d3bd5ec4f5e2d Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 10:47:28 -0800 Subject: [PATCH 26/48] fix: updates to ``make_examples.py`` --- tools/make_examples.py | 34 ++++++++++++---------------------- 1 file changed, 12 insertions(+), 22 deletions(-) diff --git a/tools/make_examples.py b/tools/make_examples.py index f91d42b0fe..748e615043 100755 --- a/tools/make_examples.py +++ b/tools/make_examples.py @@ -2,18 +2,13 @@ """Run the py->rst conversion and run all examples. This also creates the index.rst file appropriately, makes figures, etc. -""" -# ----------------------------------------------------------------------------- -# Library imports -# ----------------------------------------------------------------------------- -# Stdlib imports +""" import os import sys - from glob import glob - -# Third-party imports +import runpy +from toollib import sh # We must configure the mpl backend before making any further mpl imports import matplotlib @@ -21,10 +16,6 @@ matplotlib.use("Agg") import matplotlib.pyplot as plt -from matplotlib._pylab_helpers import Gcf - -# Local tools -from toollib import * # ----------------------------------------------------------------------------- # Globals @@ -52,6 +43,7 @@ def show(): + from matplotlib._pylab_helpers import Gcf allfm = Gcf.get_all_fig_managers() for fcount, fm in enumerate(allfm): fm.canvas.figure.savefig("%s_%02i.png" % (figure_basename, fcount + 1)) @@ -66,18 +58,17 @@ def show(): exclude_files = ['-x %s' % sys.argv[i + 1] for i, arg in enumerate(sys.argv) if arg == '-x'] +tools_path = os.path.abspath(os.path.dirname(__file__)) +ex2rst = os.path.join(tools_path, 'ex2rst') # Work in examples directory -cd("users/examples") +os.chdir("users/examples") if not os.getcwd().endswith("users/examples"): raise OSError("This must be run from doc/examples directory") # Run the conversion from .py to rst file -sh("../../../tools/ex2rst %s --project Nipype --outdir . ../../../examples" % - ' '.join(exclude_files)) -sh("""\ -../../../tools/ex2rst --project Nipype %s --outdir . ../../../examples/frontiers_paper \ -""" % ' '.join(exclude_files) -) +sh("%s %s --project Nipype --outdir . ../../../examples" % (ex2rst, ' '.join(exclude_files))) +sh("""%s --project Nipype %s --outdir . ../../../examples/frontiers_paper""" % ( + ex2rst, ' '.join(exclude_files))) # Make the index.rst file """ @@ -99,7 +90,6 @@ def show(): os.mkdir("fig") for script in glob("*.py"): - figure_basename = pjoin("fig", os.path.splitext(script)[0]) - with open(script, 'rt') as f: - exec(f.read()) + figure_basename = os.path.join("fig", os.path.splitext(script)[0]) + runpy.run_path(script) plt.close("all") From 9f99aaf481e286fbaeaf0a2c90ac345aebf30fef Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 12:27:35 -0800 Subject: [PATCH 27/48] fix: miscellaneous improvements and fixes In particular, after allowing printing the inheritance of interfaces, links for ZZZCommandBase interfaces were broken, as originally filtered out and not built. Now they are being built and we are back to 5 warnings. --- doc/interfaces.rst | 5 +++++ examples/fmri_spm_auditory.py | 2 +- nipype/interfaces/afni/__init__.py | 7 +++--- nipype/interfaces/afni/base.py | 27 +++++++++++++---------- nipype/pipeline/engine/base.py | 33 ++++++++++++++++++++--------- nipype/sphinxext/apidoc/__init__.py | 6 ------ 6 files changed, 49 insertions(+), 31 deletions(-) diff --git a/doc/interfaces.rst b/doc/interfaces.rst index ff489df51a..32aabf7d0b 100644 --- a/doc/interfaces.rst +++ b/doc/interfaces.rst @@ -5,6 +5,11 @@ ======================== Interfaces and Workflows ======================== +:Release: |version| +:Date: |today| + +Previous versions: `1.3.0 `_ `1.2.3 `_ + Workflows --------- .. important:: diff --git a/examples/fmri_spm_auditory.py b/examples/fmri_spm_auditory.py index 86acaad101..3d621c2f91 100755 --- a/examples/fmri_spm_auditory.py +++ b/examples/fmri_spm_auditory.py @@ -317,7 +317,7 @@ def makelist(item): Use the :class:`~nipype.pipeline.engine.workflows.Workflow` to create a graph-based execution pipeline for first level analysis. -Set the :py:attr:`~nipype.pipeline.engine.workflows.Workflow.base_dir` +Set the :py:attr:`~nipype.pipeline.engine.workflows.base.EngineBase.base_dir` option to instruct the pipeline engine to use ``spm_auditory_tutorial/workingdir`` as the filesystem location to use when running the processes and keeping their outputs. diff --git a/nipype/interfaces/afni/__init__.py b/nipype/interfaces/afni/__init__.py index f795e347a3..d5f2bb4361 100644 --- a/nipype/interfaces/afni/__init__.py +++ b/nipype/interfaces/afni/__init__.py @@ -1,10 +1,11 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""The afni module provides classes for interfacing with the `AFNI -`_ command line tools. +""" +AFNI_ is a software suite for the analysis and display of anatomical and functional MRI data. + +.. include:: ../../../doc/links_names.txt -Top-level namespace for afni. """ from .base import Info diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index c1b181b85d..3ea7272448 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Provide interface to AFNI commands.""" +"""Provide a base interface to AFNI commands.""" import os from sys import platform from distutils import spawn @@ -108,12 +108,17 @@ def standard_image(img_name): class AFNICommandBase(CommandLine): """ - A base class to fix a linking problem in OSX and afni. + A base class to fix a linking problem in OSX and AFNI. + + See Also + -------- + `This thread + `__ + about the particular environment variable that fixes this problem. - See http://afni.nimh.nih.gov/afni/community/board/read.php?1,145346,145347#msg-145347 """ - def _run_interface(self, runtime): + def _run_interface(self, runtime, correct_return_codes=(0,)): if platform == "darwin": runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" return super(AFNICommandBase, self)._run_interface(runtime) @@ -294,13 +299,6 @@ def _gen_fname(self, basename, cwd=None, suffix=None, change_ext=True, ext=None) return fname -def no_afni(): - """Check whether AFNI is not available.""" - if Info.version() is None: - return True - return False - - class AFNIPythonCommandInputSpec(CommandLineInputSpec): outputtype = traits.Enum( "AFNI", list(Info.ftypes.keys()), desc="AFNI output filetype" @@ -323,3 +321,10 @@ def cmd(self): @property def _cmd_prefix(self): return "{} ".format(self.inputs.py27_path) + + +def no_afni(): + """Check whether AFNI is not available.""" + if Info.version() is None: + return True + return False diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index 6735c19d49..a041fd12e0 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -1,11 +1,7 @@ -#!/usr/bin/env python # -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: -"""Defines functionality for pipelined execution of interfaces - -The `EngineBase` class implements the more general view of a task. -""" +"""Defines functionality for pipelined execution of interfaces.""" from copy import deepcopy import re import numpy as np @@ -16,10 +12,15 @@ class EngineBase(object): - """Defines common attributes and functions for workflows and nodes.""" + """ + Defines common attributes and functions for workflows and nodes. + + Implements the more general view of a task. + """ def __init__(self, name=None, base_dir=None): - """ Initialize base parameters of a workflow or node + """ + Initialize base parameters of a workflow or node. Parameters ---------- @@ -31,15 +32,19 @@ def __init__(self, name=None, base_dir=None): default=None, which results in the use of mkdtemp """ + self._name = None self._hierarchy = None self.name = name self._id = self.name # for compatibility with node expansion using iterables self.base_dir = base_dir + """Define the work directory for this instance of workflow element.""" + self.config = deepcopy(config._sections) @property def name(self): + """Set the unique name of this workflow element.""" return self._name @name.setter @@ -50,6 +55,7 @@ def name(self, name): @property def fullname(self): + """Build the full name down the hierarchy.""" if self._hierarchy: return "%s.%s" % (self._hierarchy, self.name) return self.name @@ -64,20 +70,22 @@ def outputs(self): @property def itername(self): - """Name for expanded iterable""" + """Get the name of the expanded iterable.""" itername = self._id if self._hierarchy: itername = "%s.%s" % (self._hierarchy, self._id) return itername def clone(self, name): - """Clone an EngineBase object + """ + Clone an EngineBase object. Parameters ---------- name : string (mandatory) A clone of node or workflow must have a new name + """ if name == self.name: raise ValueError('Cloning requires a new name, "%s" is ' "in use." % name) @@ -96,15 +104,20 @@ def _check_inputs(self, parameter): return hasattr(self.inputs, parameter) def __str__(self): + """Convert to string.""" return self.fullname def __repr__(self): + """Get Python representation.""" return self.itername def save(self, filename=None): + """Store this workflow element to a file.""" if filename is None: filename = "temp.pklz" savepkl(filename, self) - def load(self, filename): + @staticmethod + def load(filename): + """Load this workflow element from a file.""" return loadpkl(filename) diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py index cb46ff5b06..67cb00c59a 100644 --- a/nipype/sphinxext/apidoc/__init__.py +++ b/nipype/sphinxext/apidoc/__init__.py @@ -41,12 +41,6 @@ class Config(NapoleonConfig): _config_values = { "nipype_skip_classes": ( [ - "AFNI(Python)?Command", - "ANTS", - "FSLCommand", - "FS(Command|Script)", - "Info", - "^SPM", "Tester", "InputSpec", "OutputSpec", From 879c5562094e60613fe6ac06640aed1b500a6be0 Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 15:54:20 -0800 Subject: [PATCH 28/48] fix: documentation build at readthedocs --- .dockerignore | 5 ++--- doc/requirements.txt | 21 ++++++++++++++++++++- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.dockerignore b/.dockerignore index fb4be03ec9..d6aeda95ff 100644 --- a/.dockerignore +++ b/.dockerignore @@ -22,13 +22,12 @@ src/ .git # other -docs/**/* -docs/ +doc/**/* +doc/ .cache/ .circle/**/* .circle/ circle.yml -rtd_requirements.txt Vagrantfile .travis.yml .mailmap diff --git a/doc/requirements.txt b/doc/requirements.txt index 057147c5b5..772afb185f 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,8 +1,27 @@ +configparser dipy +funcsigs +future>=0.16.0 ipython matplotlib +mock nbsphinx +networkx>=1.9 +neurdflib +nibabel>=2.1.0 +numpy>=1.9.0 +numpydoc +packaging +prov>=1.5.2 +psutil +pydot>=1.2.3 +pydotplus +pytest>=3.0 +python-dateutil>=2.2 +scipy>=0.14 +simplejson>=3.8.0 sphinx-argparse sphinx>=2.1.2 sphinxcontrib-apidoc -sphinxcontrib-napoleon \ No newline at end of file +sphinxcontrib-napoleon +traits>=4.6 \ No newline at end of file From 3335c1b1701139333abf5c981d47a65848547a06 Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 16:34:25 -0800 Subject: [PATCH 29/48] doc: fix sphinx's latex build --- doc/Makefile | 2 +- doc/conf.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/doc/Makefile b/doc/Makefile index 85d491a70f..6d824162e1 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -40,7 +40,7 @@ examples2rst: clean ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec @echo "examples2rst finished." -latex: clean +latex: clean examples2rst $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." diff --git a/doc/conf.py b/doc/conf.py index a93cfe7480..23f5ddf205 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -248,8 +248,12 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('documentation', 'nipype.tex', u'nipype Documentation', - u'Neuroimaging in Python team', 'manual'), + ('interfaces', 'interfaces.tex', 'Nipype Interfaces Documentation', + 'Neuroimaging in Python team', 'manual'), + # ('developers', 'developers.tex', 'Nipype API', + # 'Neuroimaging in Python team', 'manual'), + ('examples', 'examples.tex', 'Nipype Examples', + 'Neuroimaging in Python team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of From 21ecd6a4a4b15b4366f2743bc266917bd50c3fe8 Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 17:32:57 -0800 Subject: [PATCH 30/48] fix: strings too long --- nipype/interfaces/afni/preprocess.py | 19 +++++++------ nipype/interfaces/ants/segmentation.py | 37 +++++++++++++++++++------- 2 files changed, 39 insertions(+), 17 deletions(-) diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index f6d3f7c334..ac75566c08 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -180,7 +180,8 @@ class AlignEpiAnatPy(AFNIPythonCommand): >>> al_ea.inputs.tshift = 'off' >>> al_ea.inputs.save_skullstrip = True >>> al_ea.cmdline # doctest: +ELLIPSIS - 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' + 'python2 ...align_epi_anat.py -anat structural.nii -epi_base 0 -epi_strip 3dAutomask -epi \ +functional.nii -save_skullstrip -suffix _al -tshift off -volreg off' >>> res = allineate.run() # doctest: +SKIP See Also @@ -3285,7 +3286,8 @@ class Volreg(AFNICommand): >>> volreg.inputs.zpad = 4 >>> volreg.inputs.outputtype = 'NIFTI' >>> volreg.cmdline # doctest: +ELLIPSIS - '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' + '3dvolreg -Fourier -twopass -1Dfile functional.1D -1Dmatrix_save functional.aff12.1D -prefix \ +functional_volreg.nii -zpad 4 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP >>> from nipype.interfaces import afni @@ -3299,7 +3301,8 @@ class Volreg(AFNICommand): >>> volreg.inputs.oned_file = 'dfile.r1.1D' >>> volreg.inputs.oned_matrix_save = 'mat.r1.tshift+orig.1D' >>> volreg.cmdline - '3dvolreg -cubic -1Dfile dfile.r1.1D -1Dmatrix_save mat.r1.tshift+orig.1D -prefix rm.epi.volreg.r1 -verbose -base functional.nii -zpad 1 -maxdisp1D functional_md.1D functional.nii' + '3dvolreg -cubic -1Dfile dfile.r1.1D -1Dmatrix_save mat.r1.tshift+orig.1D -prefix \ +rm.epi.volreg.r1 -verbose -base functional.nii -zpad 1 -maxdisp1D functional_md.1D functional.nii' >>> res = volreg.run() # doctest: +SKIP """ @@ -3406,8 +3409,8 @@ class Warp(AFNICommand): input_spec = WarpInputSpec output_spec = WarpOutputSpec - def _run_interface(self, runtime): - runtime = super(Warp, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super(Warp, self)._run_interface(runtime, correct_return_codes) if self.inputs.save_warp: import numpy as np @@ -4168,10 +4171,10 @@ class Qwarp(AFNICommand): input_spec = QwarpInputSpec output_spec = QwarpOutputSpec - def _format_arg(self, name, spec, value): + def _format_arg(self, name, trait_spec, value): if name == "allineate_opts": - return spec.argstr % ("'" + value + "'") - return super(Qwarp, self)._format_arg(name, spec, value) + return trait_spec.argstr % ("'" + value + "'") + return super(Qwarp, self)._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 07f2d6e819..faba90dc82 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -1193,7 +1193,8 @@ class JointFusion(ANTSCommand): ... 'segmentation1.nii.gz'] >>> at.inputs.target_image = 'T1.nii' >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.1,2] -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz \ +-l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' >>> at.inputs.method = 'Joint' >>> at.inputs.alpha = 0.5 @@ -1201,7 +1202,8 @@ class JointFusion(ANTSCommand): >>> at.inputs.patch_radius = [3,2,1] >>> at.inputs.search_radius = [1,2,3] >>> at.cmdline - 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii -l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' + 'jointfusion 3 1 -m Joint[0.5,1] -rp 3x2x1 -rs 1x2x3 -tg T1.nii -g im1.nii -g im2.nii -g im3.nii \ +-l segmentation0.nii.gz -l segmentation1.nii.gz -l segmentation1.nii.gz fusion_labelimage_output.nii' """ @@ -1512,18 +1514,22 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz'] >>> antsjointfusion.inputs.target_image = ['im1.nii'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ +-b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii']" >>> antsjointfusion.inputs.target_image = [ ['im1.nii', 'im2.nii'] ] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -l segmentation0.nii.gz \ +-b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.atlas_image = [ ['rc1s1.nii','rc1s2.nii'], ... ['rc2s1.nii','rc2s2.nii'] ] >>> antsjointfusion.inputs.atlas_segmentation_image = ['segmentation0.nii.gz', ... 'segmentation1.nii.gz'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii -s 3x3x3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.1 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ +-l segmentation0.nii.gz -l segmentation1.nii.gz -b 2.0 -o ants_fusion_label_output.nii \ +-s 3x3x3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.dimension = 3 >>> antsjointfusion.inputs.alpha = 0.5 @@ -1531,21 +1537,29 @@ class AntsJointFusion(ANTSCommand): >>> antsjointfusion.inputs.patch_radius = [3,2,1] >>> antsjointfusion.inputs.search_radius = [3] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii -p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ +-l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -o ants_fusion_label_output.nii \ +-p 3x2x1 -s 3 -t ['im1.nii', 'im2.nii']" >>> antsjointfusion.inputs.search_radius = ['mask.nii'] >>> antsjointfusion.inputs.verbose = True >>> antsjointfusion.inputs.exclusion_image = ['roi01.nii', 'roi02.nii'] >>> antsjointfusion.inputs.exclusion_image_label = ['1','2'] >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ +-l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ +-o ants_fusion_label_output.nii -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" >>> antsjointfusion.inputs.out_label_fusion = 'ants_fusion_label_output.nii' >>> antsjointfusion.inputs.out_intensity_fusion_name_format = 'ants_joint_fusion_intensity_%d.nii.gz' >>> antsjointfusion.inputs.out_label_post_prob_name_format = 'ants_joint_fusion_posterior_%d.nii.gz' >>> antsjointfusion.inputs.out_atlas_voting_weight_name_format = 'ants_joint_fusion_voting_weight_%d.nii.gz' >>> antsjointfusion.cmdline - "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] -l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] -o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] -p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" + "antsJointFusion -a 0.5 -g ['rc1s1.nii', 'rc1s2.nii'] -g ['rc2s1.nii', 'rc2s2.nii'] \ +-l segmentation0.nii.gz -l segmentation1.nii.gz -b 1.0 -d 3 -e 1[roi01.nii] -e 2[roi02.nii] \ +-o [ants_fusion_label_output.nii, ants_joint_fusion_intensity_%d.nii.gz, \ +ants_joint_fusion_posterior_%d.nii.gz, ants_joint_fusion_voting_weight_%d.nii.gz] \ +-p 3x2x1 -s mask.nii -t ['im1.nii', 'im2.nii'] -v" """ @@ -1798,7 +1812,12 @@ class KellyKapowski(ANTSCommand): >>> kk.inputs.convergence = "[45,0.0,10]" >>> kk.inputs.thickness_prior_estimate = 10 >>> kk.cmdline - 'KellyKapowski --convergence "[45,0.0,10]" --output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" --image-dimensionality 3 --gradient-step 0.025000 --maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 --segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 --smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' + 'KellyKapowski --convergence "[45,0.0,10]" \ +--output "[segmentation0_cortical_thickness.nii.gz,segmentation0_warped_white_matter.nii.gz]" \ +--image-dimensionality 3 --gradient-step 0.025000 \ +--maximum-number-of-invert-displacement-field-iterations 20 --number-of-integration-points 10 \ +--segmentation-image "[segmentation0.nii.gz,2,3]" --smoothing-variance 1.000000 \ +--smoothing-velocity-field-parameter 1.500000 --thickness-prior-estimate 10.000000' """ From 9ef8c95047a8665be0a0d242eb94b79c22fc4a1f Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 18:04:16 -0800 Subject: [PATCH 31/48] fix: codacy issues --- nipype/interfaces/afni/base.py | 2 +- nipype/interfaces/afni/preprocess.py | 19 ++++++++----------- nipype/interfaces/nitime/analysis.py | 7 +++++-- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index 3ea7272448..dbe0882d8a 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -121,7 +121,7 @@ class AFNICommandBase(CommandLine): def _run_interface(self, runtime, correct_return_codes=(0,)): if platform == "darwin": runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" - return super(AFNICommandBase, self)._run_interface(runtime) + return super(AFNICommandBase, self)._run_interface(runtime, correct_return_codes) class AFNICommandInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index ac75566c08..cace949d3c 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -591,7 +591,7 @@ def _list_outputs(self): outputs["out_weight_file"] = op.abspath(self.inputs.out_weight_file) if self.inputs.out_matrix: - path, base, ext = split_filename(self.inputs.out_matrix) + ext = split_filename(self.inputs.out_matrix)[-1] if ext.lower() not in [".1d", ".1D"]: outputs["out_matrix"] = self._gen_fname( self.inputs.out_matrix, suffix=".aff12.1D" @@ -600,7 +600,7 @@ def _list_outputs(self): outputs["out_matrix"] = op.abspath(self.inputs.out_matrix) if self.inputs.out_param_file: - path, base, ext = split_filename(self.inputs.out_param_file) + ext = split_filename(self.inputs.out_param_file)[-1] if ext.lower() not in [".1d", ".1D"]: outputs["out_param_file"] = self._gen_fname( self.inputs.out_param_file, suffix=".param.1D" @@ -1220,9 +1220,6 @@ class DegreeCentrality(AFNICommand): # Re-define generated inputs def _list_outputs(self): - # Import packages - import os - # Update outputs dictionary if oned file is defined outputs = super(DegreeCentrality, self)._list_outputs() if self.inputs.oned_file: @@ -1844,8 +1841,8 @@ def _parse_inputs(self, skip=None): skip += ["outliers_file"] return super(OutlierCount, self)._parse_inputs(skip) - def _run_interface(self, runtime): - runtime = super(OutlierCount, self)._run_interface(runtime) + def _run_interface(self, runtime, correct_return_codes=(0,)): + runtime = super(OutlierCount, self)._run_interface(runtime, correct_return_codes) # Read from runtime.stdout or runtime.merged with open(op.abspath(self.inputs.out_file), "w") as outfh: @@ -2096,7 +2093,7 @@ class ROIStats(AFNICommandBase): input_spec = ROIStatsInputSpec output_spec = ROIStatsOutputSpec - def _format_arg(self, name, spec, value): + def _format_arg(self, name, trait_spec, value): _stat_dict = { "mean": "-nzmean", "median": "-nzmedian", @@ -2113,7 +2110,7 @@ def _format_arg(self, name, spec, value): } if name == "stat": value = [_stat_dict[v] for v in value] - return super(ROIStats, self)._format_arg(name, spec, value) + return super(ROIStats, self)._format_arg(name, trait_spec, value) class RetroicorInputSpec(AFNICommandInputSpec): @@ -3146,8 +3143,8 @@ class TSmoothInputSpec(AFNICommandInputSpec): argstr="-osf", ) lin3 = traits.Int( - desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`" - " Here, 'm' is a number strictly between 0 and 1.", + desc=r"3 point linear filter: :math:`0.5\,(1-m)\,a + m\,b + 0.5\,(1-m)\,c`. " + "Here, 'm' is a number strictly between 0 and 1.", argstr="-3lin %d", ) hamming = traits.Int( diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index 38bfb849a7..ffaf5380ce 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -86,11 +86,14 @@ class CoherenceAnalyzerInputSpec(BaseInterfaceInputSpec): ) output_csv_file = File( - desc="File to write outputs (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``" + desc="File to write outputs (coherence,time-delay) with file-names: " + "``file_name_{coherence,timedelay}``" ) output_figure_file = File( - desc="File to write output figures (coherence,time-delay) with file-names: ``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,..." + desc="""\ +File to write output figures (coherence,time-delay) with file-names: +``file_name_{coherence,timedelay}``. Possible formats: .png,.svg,.pdf,.jpg,...""" ) figure_type = traits.Enum( From 3e2e89f1c17bfda49b4193b3b906f9d7a0f8b093 Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 22:30:29 -0800 Subject: [PATCH 32/48] DOC: Bring examples generation back to ``doc/conf.py`` --- doc/Makefile | 4 ++-- doc/conf.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/doc/Makefile b/doc/Makefile index 6d824162e1..e224c20cbc 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -32,7 +32,7 @@ htmlonly: @echo @echo "Build finished. The HTML pages are in _build/html." -html: clean examples2rst htmlonly +html: clean htmlonly @echo "Build HTML and API finished." examples2rst: clean @@ -40,7 +40,7 @@ examples2rst: clean ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec @echo "examples2rst finished." -latex: clean examples2rst +latex: clean $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." diff --git a/doc/conf.py b/doc/conf.py index 23f5ddf205..e1da69f130 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,6 +16,9 @@ from packaging.version import Version import nipype +os.makedirs('users/examples', exist_ok=True) +os.system('python ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec') + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. From d8b89aa3eb88588b80609bb65be93e8671a67f1b Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 00:02:04 -0800 Subject: [PATCH 33/48] DOC: Revise generation of examples to work in RTD Amends #3131. --- doc/Makefile | 6 --- doc/conf.py | 10 ++++- tools/ex2rst | 13 +++--- tools/make_examples.py | 95 ------------------------------------------ 4 files changed, 16 insertions(+), 108 deletions(-) delete mode 100755 tools/make_examples.py diff --git a/doc/Makefile b/doc/Makefile index e224c20cbc..bcb7ac2e8f 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -22,7 +22,6 @@ help: "items (ChangeLog)" @echo " linkcheck check all external links for integrity" @echo " doctest run all doctests embedded in the documentation" - @echo " sf_satra copy html files to sourceforge (satra only)" clean: -rm -rf _build/* *~ api/generated interfaces/generated users/examples documentation.zip @@ -35,11 +34,6 @@ htmlonly: html: clean htmlonly @echo "Build HTML and API finished." -examples2rst: clean - mkdir -p users/examples - ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec - @echo "examples2rst finished." - latex: clean $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo diff --git a/doc/conf.py b/doc/conf.py index e1da69f130..56d6935270 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -16,8 +16,16 @@ from packaging.version import Version import nipype +doc_path = os.path.abspath(os.path.dirname(__file__)) os.makedirs('users/examples', exist_ok=True) -os.system('python ../tools/make_examples.py -x ../../../examples/test_spm.py --no-exec') + +os.chdir(os.path.join(doc_path, 'users', 'examples')) +os.system("""python ../../../tools/ex2rst -x ../../../examples/test_spm.py \ +--project Nipype --outdir . ../../../examples""") +os.system("""python ../../../tools/ex2rst --project Nipype --outdir . \ +../../../examples/frontiers_paper""") +os.chdir(doc_path) + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the diff --git a/tools/ex2rst b/tools/ex2rst index 2434d16ccc..dc3c6d5f37 100755 --- a/tools/ex2rst +++ b/tools/ex2rst @@ -158,9 +158,11 @@ def exfile2rstfile(filename, opts): """ # doc filename dfilename = os.path.basename(filename[:-3]) + '.rst' + dfilepath = os.path.join(opts.outdir, os.path.basename(dfilename)) + print("Creating file %s." % os.path.abspath(dfilepath)) # open dest file - dfile = open(os.path.join(opts.outdir, os.path.basename(dfilename)), 'w') + dfile = open(dfilepath, 'w') # place header dfile.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') @@ -196,8 +198,8 @@ def exfile2rstfile(filename, opts): def main(): - parser = OptionParser( \ - usage="%prog [options] [...]", \ + parser = OptionParser( + usage="%prog [options] [...]", version="%prog 0.1", description="""\ %prog converts Python scripts into restructered text (ReST) format suitable for integration into the Sphinx documentation framework. Its key feature is that it @@ -217,7 +219,7 @@ the respective indentation is removed in the ReST output. The parser algorithm automatically excludes file headers and starts with the first (module-level) docstring instead. -""" ) #' +""") # define options parser.add_option( @@ -293,8 +295,7 @@ Name of the project that contains the examples. This name is used in the if len(toparse) != len(toparse_list): print('Ignoring duplicate parse targets.') - if not os.path.exists(opts.outdir): - os.mkdir(outdir) + os.makedirs(opts.outdir, exist_ok=True) # finally process all examples for t in toparse: diff --git a/tools/make_examples.py b/tools/make_examples.py deleted file mode 100755 index 748e615043..0000000000 --- a/tools/make_examples.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python -"""Run the py->rst conversion and run all examples. - -This also creates the index.rst file appropriately, makes figures, etc. - -""" -import os -import sys -from glob import glob -import runpy -from toollib import sh - -# We must configure the mpl backend before making any further mpl imports -import matplotlib - -matplotlib.use("Agg") -import matplotlib.pyplot as plt - - -# ----------------------------------------------------------------------------- -# Globals -# ----------------------------------------------------------------------------- - -examples_header = """ - -.. _examples: - -Examples -======== - -.. note_about_examples -""" -# ----------------------------------------------------------------------------- -# Function defintions -# ----------------------------------------------------------------------------- - -# These global variables let show() be called by the scripts in the usual -# manner, but when generating examples, we override it to write the figures to -# files with a known name (derived from the script name) plus a counter -figure_basename = None - -# We must change the show command to save instead - - -def show(): - from matplotlib._pylab_helpers import Gcf - allfm = Gcf.get_all_fig_managers() - for fcount, fm in enumerate(allfm): - fm.canvas.figure.savefig("%s_%02i.png" % (figure_basename, fcount + 1)) - - -_mpl_show = plt.show -plt.show = show - -# ----------------------------------------------------------------------------- -# Main script -# ----------------------------------------------------------------------------- - -exclude_files = ['-x %s' % sys.argv[i + 1] for i, arg in enumerate(sys.argv) if arg == '-x'] - -tools_path = os.path.abspath(os.path.dirname(__file__)) -ex2rst = os.path.join(tools_path, 'ex2rst') -# Work in examples directory -os.chdir("users/examples") -if not os.getcwd().endswith("users/examples"): - raise OSError("This must be run from doc/examples directory") - -# Run the conversion from .py to rst file -sh("%s %s --project Nipype --outdir . ../../../examples" % (ex2rst, ' '.join(exclude_files))) -sh("""%s --project Nipype %s --outdir . ../../../examples/frontiers_paper""" % ( - ex2rst, ' '.join(exclude_files))) - -# Make the index.rst file -""" -index = open('index.rst', 'w') -index.write(examples_header) -for name in [os.path.splitext(f)[0] for f in glob('*.rst')]: - #Don't add the index in there to avoid sphinx errors and don't add the - #note_about examples again (because it was added at the top): - if name not in(['index','note_about_examples']): - index.write(' %s\n' % name) -index.close() -""" - -# Execute each python script in the directory. -if "--no-exec" in sys.argv: - pass -else: - if not os.path.isdir("fig"): - os.mkdir("fig") - - for script in glob("*.py"): - figure_basename = os.path.join("fig", os.path.splitext(script)[0]) - runpy.run_path(script) - plt.close("all") From 4b8d467efe75b1265d1a3182a5a23ba505480aa4 Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 10:13:28 -0800 Subject: [PATCH 34/48] FIX: Repair aftermath of docs refactor Bringing CircleCI back to green after #3124, #3131, and #3132. --- .circleci/build_docs.sh | 3 --- .circleci/config.yml | 7 ------- 2 files changed, 10 deletions(-) delete mode 100644 .circleci/build_docs.sh diff --git a/.circleci/build_docs.sh b/.circleci/build_docs.sh deleted file mode 100644 index a050caf66c..0000000000 --- a/.circleci/build_docs.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /src/nipype/doc "${DOCKER_IMAGE}:py36" /usr/bin/run_builddocs.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index b8150456cf..c8058c48b7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -157,13 +157,6 @@ jobs: - run: *_run_codecov_coverage - store_artifacts: *store_artifacts_kwds - store_test_results: *store_artifacts_kwds - - run: - name: Build docs - no_output_timeout: 30m - environment: *test_environment - command: bash -ux /home/circleci/nipype/.circleci/build_docs.sh - - store_artifacts: - path: /home/circleci/work/docs - run: name: Save Docker images to workspace if on master no_output_timeout: 60m From d576418a9a5abd5115853cbff3b9575322b15307 Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 11:23:37 -0800 Subject: [PATCH 35/48] fix: revise some r-strings in SPM's EstimateContrast By removing some space concatenation of strings, some of them were r-strings and the concatenated one contained ``\n``, effectively escaping the special return-carriage. Instead of concatenating strings, the interface now accumulates the lines in a list that is joined in the end. --- nipype/interfaces/spm/model.py | 111 ++++++++++++++++----------------- 1 file changed, 55 insertions(+), 56 deletions(-) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 045051af6e..9390e254b7 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -437,8 +437,7 @@ class EstimateContrast(SPMCommand): _jobname = "con" def _make_matlab_command(self, _): - """validates spm options and generates job structure - """ + """Validate spm options and generate job structure.""" contrasts = [] cname = [] for i, cont in enumerate(self.inputs.contrasts): @@ -457,80 +456,80 @@ def _make_matlab_command(self, _): contrasts[i].weights = cont[3] if len(cont) >= 5: contrasts[i].sessions = cont[4] - script = "% generated by nipype.interfaces.spm\n" - script += "spm_defaults;\n" - script += "jobs{1}.stats{1}.con.spmmat = {'%s'};\n" % self.inputs.spm_mat_file - script += "load(jobs{1}.stats{1}.con.spmmat{:});\n" - script += "SPM.swd = '%s';\n" % os.getcwd() - script += "save(jobs{1}.stats{1}.con.spmmat{:},'SPM');\n" - script += "names = SPM.xX.name;\n" + script = ["""\ +% generated by nipype.interfaces.spm +spm_defaults; +jobs{1}.stats{1}.con.spmmat = {'%s'}; +load(jobs{1}.stats{1}.con.spmmat{:}); +SPM.swd = '%s'; +save(jobs{1}.stats{1}.con.spmmat{:},'SPM'); +names = SPM.xX.name; +""" % (self.inputs.spm_mat_file, os.getcwd())] # get names for columns if isdefined(self.inputs.group_contrast) and self.inputs.group_contrast: - script += "condnames=names;\n" + script += ["condnames=names;"] else: if self.inputs.use_derivs: - script += r"pat = 'Sn\([0-9]*\) (.*)';\n" + script += [r"pat = 'Sn\([0-9]*\) (.*)';"] else: - script += ( - r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " - r".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';" - "\n" - ) - script += "t = regexp(names,pat,'tokens');\n" + script += [r"""\ +pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) .*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';"""] + + script += ["t = regexp(names,pat,'tokens');"] # get sessidx for columns - script += r"pat1 = 'Sn\(([0-9].*)\)\s.*';\n" - script += "t1 = regexp(names,pat1,'tokens');\n" - script += ( - "for i0=1:numel(t),condnames{i0}='';condsess(i0)=0;if " - "~isempty(t{i0}{1}),condnames{i0} = t{i0}{1}{1};" - "condsess(i0)=str2num(t1{i0}{1}{1});end;end;\n" - ) + script += [r"pat1 = 'Sn\(([0-9].*)\)\s.*';"] + script += ["t1 = regexp(names,pat1,'tokens');"] + script += ["""\ +for i0=1:numel(t) + condnames{i0}=''; + condsess(i0)=0; + if ~isempty(t{i0}{1}) + condnames{i0} = t{i0}{1}{1}; + condsess(i0)=str2num(t1{i0}{1}{1}); + end; +end; +"""] + # BUILD CONTRAST SESSION STRUCTURE for i, contrast in enumerate(contrasts): if contrast.stat == "T": - script += "consess{%d}.tcon.name = '%s';\n" % (i + 1, contrast.name) - script += "consess{%d}.tcon.convec = zeros(1,numel(names));\n" % (i + 1) + script += ["consess{%d}.tcon.name = '%s';" % (i + 1, contrast.name)] + script += ["consess{%d}.tcon.convec = zeros(1,numel(names));" % (i + 1)] for c0, cond in enumerate(contrast.conditions): - script += "idx = strmatch('%s',condnames,'exact');\n" % (cond) - script += ( - "if isempty(idx), throw(MException(" - "'CondName:Chk', sprintf('Condition %%s not " - "found in design','%s'))); end;\n" - ) % cond + script += ["idx = strmatch('%s',condnames,'exact');" % cond] + script += ["""\ +if isempty(idx) + throw(MException('CondName:Chk', sprintf('Condition %%s not found in design','%s'))); +end; +""" % cond] if contrast.sessions: for sno, sw in enumerate(contrast.sessions): - script += "sidx = find(condsess(idx)==%d);\n" % (sno + 1) - script += "consess{%d}.tcon.convec(idx(sidx)) = %f;\n" % ( + script += ["sidx = find(condsess(idx)==%d);" % (sno + 1)] + script += ["consess{%d}.tcon.convec(idx(sidx)) = %f;" % ( i + 1, sw * contrast.weights[c0], - ) + )] else: - script += "consess{%d}.tcon.convec(idx) = %f;\n" % ( + script += ["consess{%d}.tcon.convec(idx) = %f;" % ( i + 1, contrast.weights[c0], - ) + )] for i, contrast in enumerate(contrasts): if contrast.stat == "F": - script += "consess{%d}.fcon.name = '%s';\n" % (i + 1, contrast.name) + script += ["consess{%d}.fcon.name = '%s';" % (i + 1, contrast.name)] for cl0, fcont in enumerate(contrast.conditions): - try: - tidx = cname.index(fcont[0]) - except: - Exception( - "Contrast Estimate: could not get index of" - " T contrast. probably not defined prior " - "to the F contrasts" - ) - script += ( - "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;\n" - ) % (i + 1, cl0 + 1, tidx + 1) - script += "jobs{1}.stats{1}.con.consess = consess;\n" - script += ( - "if strcmp(spm('ver'),'SPM8'), spm_jobman('initcfg');" - "jobs=spm_jobman('spm5tospm8',{jobs});end\n" - ) - script += "spm_jobman('run',jobs);" - return script + tidx = cname.index(fcont[0]) + script += ["consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;" % + (i + 1, cl0 + 1, tidx + 1)] + script += ["jobs{1}.stats{1}.con.consess = consess;"] + script += ["""\ +if strcmp(spm('ver'),'SPM8') + spm_jobman('initcfg'); + jobs=spm_jobman('spm5tospm8',{jobs}); +end; +"""] + script += ["spm_jobman('run',jobs);"] + return "\n".join(script) def _list_outputs(self): import scipy.io as sio From aaf677a87f64c485f3e305799e4a5dc73b69e5fb Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 11:29:05 -0800 Subject: [PATCH 36/48] sty: black --- nipype/interfaces/afni/base.py | 4 +- nipype/interfaces/afni/preprocess.py | 4 +- nipype/interfaces/camino/dti.py | 2 +- nipype/interfaces/spm/model.py | 60 ++++++++++++++++------------ nipype/sphinxext/apidoc/__init__.py | 8 +--- 5 files changed, 43 insertions(+), 35 deletions(-) diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index dbe0882d8a..20a4a9b4d6 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -121,7 +121,9 @@ class AFNICommandBase(CommandLine): def _run_interface(self, runtime, correct_return_codes=(0,)): if platform == "darwin": runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" - return super(AFNICommandBase, self)._run_interface(runtime, correct_return_codes) + return super(AFNICommandBase, self)._run_interface( + runtime, correct_return_codes + ) class AFNICommandInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index cace949d3c..1d53aac98c 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -1842,7 +1842,9 @@ def _parse_inputs(self, skip=None): return super(OutlierCount, self)._parse_inputs(skip) def _run_interface(self, runtime, correct_return_codes=(0,)): - runtime = super(OutlierCount, self)._run_interface(runtime, correct_return_codes) + runtime = super(OutlierCount, self)._run_interface( + runtime, correct_return_codes + ) # Read from runtime.stdout or runtime.merged with open(op.abspath(self.inputs.out_file), "w") as outfh: diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 0504def61e..6d210c1b0b 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -1049,7 +1049,7 @@ class TrackBedpostxProbaInputSpec(TrackInputSpec): iterations = traits.Int( argstr="-iterations %d", units="NA", - desc="Number of streamlines to generate at each " "seed point. The default is 1.", + desc="Number of streamlines to generate at each seed point. The default is 1.", ) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 9390e254b7..2d09f23e90 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -456,15 +456,17 @@ def _make_matlab_command(self, _): contrasts[i].weights = cont[3] if len(cont) >= 5: contrasts[i].sessions = cont[4] - script = ["""\ -% generated by nipype.interfaces.spm + script = [ + """\ +%% generated by nipype.interfaces.spm spm_defaults; jobs{1}.stats{1}.con.spmmat = {'%s'}; load(jobs{1}.stats{1}.con.spmmat{:}); SPM.swd = '%s'; save(jobs{1}.stats{1}.con.spmmat{:},'SPM'); -names = SPM.xX.name; -""" % (self.inputs.spm_mat_file, os.getcwd())] +names = SPM.xX.name;""" + % (self.inputs.spm_mat_file, os.getcwd()) + ] # get names for columns if isdefined(self.inputs.group_contrast) and self.inputs.group_contrast: script += ["condnames=names;"] @@ -472,14 +474,17 @@ def _make_matlab_command(self, _): if self.inputs.use_derivs: script += [r"pat = 'Sn\([0-9]*\) (.*)';"] else: - script += [r"""\ -pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) .*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';"""] + script += [ + r"pat = 'Sn\([0-9]*\) (.*)\*bf\(1\)|Sn\([0-9]*\) " + r".*\*bf\([2-9]\)|Sn\([0-9]*\) (.*)';" + ] script += ["t = regexp(names,pat,'tokens');"] # get sessidx for columns script += [r"pat1 = 'Sn\(([0-9].*)\)\s.*';"] script += ["t1 = regexp(names,pat1,'tokens');"] - script += ["""\ + script += [ + """\ for i0=1:numel(t) condnames{i0}=''; condsess(i0)=0; @@ -487,8 +492,8 @@ def _make_matlab_command(self, _): condnames{i0} = t{i0}{1}{1}; condsess(i0)=str2num(t1{i0}{1}{1}); end; -end; -"""] +end;""" + ] # BUILD CONTRAST SESSION STRUCTURE for i, contrast in enumerate(contrasts): @@ -497,37 +502,42 @@ def _make_matlab_command(self, _): script += ["consess{%d}.tcon.convec = zeros(1,numel(names));" % (i + 1)] for c0, cond in enumerate(contrast.conditions): script += ["idx = strmatch('%s',condnames,'exact');" % cond] - script += ["""\ + script += [ + """\ if isempty(idx) throw(MException('CondName:Chk', sprintf('Condition %%s not found in design','%s'))); -end; -""" % cond] +end;""" + % cond + ] if contrast.sessions: for sno, sw in enumerate(contrast.sessions): script += ["sidx = find(condsess(idx)==%d);" % (sno + 1)] - script += ["consess{%d}.tcon.convec(idx(sidx)) = %f;" % ( - i + 1, - sw * contrast.weights[c0], - )] + script += [ + "consess{%d}.tcon.convec(idx(sidx)) = %f;" + % (i + 1, sw * contrast.weights[c0],) + ] else: - script += ["consess{%d}.tcon.convec(idx) = %f;" % ( - i + 1, - contrast.weights[c0], - )] + script += [ + "consess{%d}.tcon.convec(idx) = %f;" + % (i + 1, contrast.weights[c0],) + ] for i, contrast in enumerate(contrasts): if contrast.stat == "F": script += ["consess{%d}.fcon.name = '%s';" % (i + 1, contrast.name)] for cl0, fcont in enumerate(contrast.conditions): tidx = cname.index(fcont[0]) - script += ["consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;" % - (i + 1, cl0 + 1, tidx + 1)] + script += [ + "consess{%d}.fcon.convec{%d} = consess{%d}.tcon.convec;" + % (i + 1, cl0 + 1, tidx + 1) + ] script += ["jobs{1}.stats{1}.con.consess = consess;"] - script += ["""\ + script += [ + """\ if strcmp(spm('ver'),'SPM8') spm_jobman('initcfg'); jobs=spm_jobman('spm5tospm8',{jobs}); -end; -"""] +end;""" + ] script += ["spm_jobman('run',jobs);"] return "\n".join(script) diff --git a/nipype/sphinxext/apidoc/__init__.py b/nipype/sphinxext/apidoc/__init__.py index 67cb00c59a..9c64cb4fb9 100644 --- a/nipype/sphinxext/apidoc/__init__.py +++ b/nipype/sphinxext/apidoc/__init__.py @@ -40,13 +40,7 @@ class Config(NapoleonConfig): """ _config_values = { "nipype_skip_classes": ( - [ - "Tester", - "InputSpec", - "OutputSpec", - "Numpy", - "NipypeTester", - ], + ["Tester", "InputSpec", "OutputSpec", "Numpy", "NipypeTester",], "env", ), **NapoleonConfig._config_values, From 3b8f22845fc108b2a29e3f576a3ebde099014e6a Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 11:46:53 -0800 Subject: [PATCH 37/48] FIX: Can't seem to import workflows from niflows in CircleCI Master is broken -- this PR relocates the pip install of ``niflow-nipype1-workflows`` so that it happens AFTER nipype was installed. --- docker/generate_dockerfiles.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/generate_dockerfiles.sh b/docker/generate_dockerfiles.sh index d6d880bfc5..01a8e62781 100755 --- a/docker/generate_dockerfiles.sh +++ b/docker/generate_dockerfiles.sh @@ -94,7 +94,7 @@ function generate_main_dockerfile() { conda_install='python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} libxml2 libxslt matplotlib mkl "numpy!=1.16.0" paramiko pandas psutil scikit-learn scipy traits rdflib' \ - pip_install="pytest-xdist niflow-nipype1-workflows" \ + pip_install="pytest-xdist" \ activate=true \ --copy docker/files/run_builddocs.sh docker/files/run_examples.sh \ docker/files/run_pytests.sh nipype/external/fsl_imglob.py /usr/bin/ \ @@ -110,6 +110,8 @@ function generate_main_dockerfile() { --miniconda use_env=neuro \ pip_opts="-e" \ pip_install="/src/nipype[all] https://github.com/bids-standard/pybids/tarball/0.7.0" \ + --miniconda use_env=neuro \ + pip_install="niflow-nipype1-workflows" \ --workdir /work \ --label org.label-schema.build-date='$BUILD_DATE' \ org.label-schema.name="NIPYPE" \ From 5bc27775e631dc5cc9ca2bd035287e1424dbd6fa Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 12:40:47 -0800 Subject: [PATCH 38/48] fix: update imports nipype.workflows -> niflow.nipyp1.workflows --- examples/dmri_connectivity_advanced.py | 8 ++++---- examples/dmri_dtk_dti.py | 2 +- examples/dmri_dtk_odf.py | 2 +- examples/dmri_fsl_dti.py | 2 +- examples/dmri_group_connectivity_camino.py | 4 ++-- examples/dmri_group_connectivity_mrtrix.py | 4 ++-- examples/dmri_preprocessing.py | 4 ++-- examples/dmri_tbss_nki.py | 4 ++-- examples/fmri_ants_openfmri.py | 2 +- examples/fmri_fsl_feeds.py | 2 +- examples/fmri_fsl_reuse.py | 2 +- examples/fmri_spm_dartel.py | 2 +- examples/frontiers_paper/smoothing_comparison.py | 2 +- examples/nipype_tutorial.ipynb | 2 +- examples/smri_ants_build_template.py | 2 +- examples/smri_antsregistration_build_template.py | 2 +- examples/smri_fsreconall.py | 2 +- examples/tessellation_tutorial.py | 4 ++-- nipype/interfaces/fsl/epi.py | 2 +- nipype/sphinxext/plot_workflow.py | 4 ++-- 20 files changed, 29 insertions(+), 29 deletions(-) diff --git a/examples/dmri_connectivity_advanced.py b/examples/dmri_connectivity_advanced.py index 1c97eab5cb..c25f1fe488 100755 --- a/examples/dmri_connectivity_advanced.py +++ b/examples/dmri_connectivity_advanced.py @@ -58,12 +58,12 @@ import inspect import os import os.path as op # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline -from nipype.workflows.dmri.camino.connectivity_mapping import select_aparc_annot +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import select_aparc_annot from nipype.utils.misc import package_check import warnings -from nipype.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline -from nipype.workflows.smri.freesurfer import create_tessellation_flow +from niflow.nipype1.workflows.dmri.connectivity.nx import create_networkx_pipeline, create_cmats_to_csv_pipeline +from niflow.nipype1.workflows.smri.freesurfer import create_tessellation_flow try: package_check('cmp') diff --git a/examples/dmri_dtk_dti.py b/examples/dmri_dtk_dti.py index 2946e305cc..cd02d16391 100755 --- a/examples/dmri_dtk_dti.py +++ b/examples/dmri_dtk_dti.py @@ -26,7 +26,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline """ Confirm package dependencies are installed. (This is only for the tutorial, rarely would you put this in your own code.) diff --git a/examples/dmri_dtk_odf.py b/examples/dmri_dtk_odf.py index 14367668b6..42a3b0e03a 100755 --- a/examples/dmri_dtk_odf.py +++ b/examples/dmri_dtk_odf.py @@ -26,7 +26,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline """ Confirm package dependencies are installed. (This is only for the tutorial, rarely would you put this in your own code.) diff --git a/examples/dmri_fsl_dti.py b/examples/dmri_fsl_dti.py index 1ac833a98d..ffd114d2b3 100755 --- a/examples/dmri_fsl_dti.py +++ b/examples/dmri_fsl_dti.py @@ -25,7 +25,7 @@ import nipype.interfaces.utility as util # utility import nipype.pipeline.engine as pe # pypeline engine import os # system functions -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\ +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline,\ create_bedpostx_pipeline """ Confirm package dependencies are installed. (This is only for the diff --git a/examples/dmri_group_connectivity_camino.py b/examples/dmri_group_connectivity_camino.py index b244adee86..8dbceb606c 100644 --- a/examples/dmri_group_connectivity_camino.py +++ b/examples/dmri_group_connectivity_camino.py @@ -57,8 +57,8 @@ import nipype.interfaces.freesurfer as fs # freesurfer import os.path as op # system functions import cmp -from nipype.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline -from nipype.workflows.dmri.connectivity.group_connectivity import ( +from niflow.nipype1.workflows.dmri.camino.group_connectivity import create_group_connectivity_pipeline +from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import ( create_merge_networks_by_group_workflow, create_merge_group_networks_workflow, create_average_networks_by_group_workflow) diff --git a/examples/dmri_group_connectivity_mrtrix.py b/examples/dmri_group_connectivity_mrtrix.py index e709b9ade9..cbe7ef7003 100644 --- a/examples/dmri_group_connectivity_mrtrix.py +++ b/examples/dmri_group_connectivity_mrtrix.py @@ -57,8 +57,8 @@ import nipype.interfaces.freesurfer as fs # freesurfer import os.path as op # system functions import cmp -from nipype.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline -from nipype.workflows.dmri.connectivity.group_connectivity import ( +from niflow.nipype1.workflows.dmri.mrtrix.group_connectivity import create_group_connectivity_pipeline +from niflow.nipype1.workflows.dmri.connectivity.group_connectivity import ( create_merge_network_results_by_group_workflow, create_merge_group_network_results_workflow, create_average_networks_by_group_workflow) diff --git a/examples/dmri_preprocessing.py b/examples/dmri_preprocessing.py index 9b0ad53be3..1efc4e2e05 100644 --- a/examples/dmri_preprocessing.py +++ b/examples/dmri_preprocessing.py @@ -32,13 +32,13 @@ from nipype.interfaces import ants """ Load specific nipype's workflows for preprocessing of dMRI data: -:class:`nipype.workflows.dmri.preprocess.epi.all_peb_pipeline`, +:class:`niflow.nipype1.workflows.dmri.preprocess.epi.all_peb_pipeline`, as data include a *b0* volume with reverse encoding direction (*P>>>A*, or *y*), in contrast with the general acquisition encoding that is *A>>>P* or *-y* (in RAS systems). """ -from nipype.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias +from niflow.nipype1.workflows.dmri.fsl.artifacts import all_fsl_pipeline, remove_bias """ Map field names into individual subject runs """ diff --git a/examples/dmri_tbss_nki.py b/examples/dmri_tbss_nki.py index 5f2f3d5a01..d14b74dda9 100755 --- a/examples/dmri_tbss_nki.py +++ b/examples/dmri_tbss_nki.py @@ -10,8 +10,8 @@ """ -from nipype.workflows.dmri.fsl.dti import create_eddy_correct_pipeline -from nipype.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all +from niflow.nipype1.workflows.dmri.fsl.dti import create_eddy_correct_pipeline +from niflow.nipype1.workflows.dmri.fsl.tbss import create_tbss_non_FA, create_tbss_all """ Tell python where to find the appropriate functions. """ diff --git a/examples/fmri_ants_openfmri.py b/examples/fmri_ants_openfmri.py index 35684cf595..5a88638003 100755 --- a/examples/fmri_ants_openfmri.py +++ b/examples/fmri_ants_openfmri.py @@ -38,7 +38,7 @@ from nipype.interfaces.io import FreeSurferSource import nipype.interfaces.utility as niu from nipype.interfaces.utility import Merge, IdentityInterface -from nipype.workflows.fmri.fsl import (create_featreg_preproc, +from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) from nipype.utils import NUMPY_MMAP diff --git a/examples/fmri_fsl_feeds.py b/examples/fmri_fsl_feeds.py index f7b0aaf91d..5a90bf9213 100755 --- a/examples/fmri_fsl_feeds.py +++ b/examples/fmri_fsl_feeds.py @@ -22,7 +22,7 @@ from nipype.interfaces import fsl # fsl from nipype.pipeline import engine as pe # pypeline engine from nipype.algorithms import modelgen as model # model generation -from nipype.workflows.fmri.fsl import ( +from niflow.nipype1.workflows.fmri.fsl import ( create_featreg_preproc, create_modelfit_workflow, create_reg_workflow) from nipype.interfaces.base import Bunch """ diff --git a/examples/fmri_fsl_reuse.py b/examples/fmri_fsl_reuse.py index 7b24dc24b8..5375f8a780 100755 --- a/examples/fmri_fsl_reuse.py +++ b/examples/fmri_fsl_reuse.py @@ -28,7 +28,7 @@ import nipype.algorithms.modelgen as model # model generation import nipype.algorithms.rapidart as ra # artifact detection -from nipype.workflows.fmri.fsl import (create_featreg_preproc, +from niflow.nipype1.workflows.fmri.fsl import (create_featreg_preproc, create_modelfit_workflow, create_fixed_effects_flow) """ diff --git a/examples/fmri_spm_dartel.py b/examples/fmri_spm_dartel.py index 9c66ea7aac..815ce5a62d 100755 --- a/examples/fmri_spm_dartel.py +++ b/examples/fmri_spm_dartel.py @@ -21,7 +21,7 @@ import nipype.interfaces.io as nio # Data i/o import nipype.interfaces.spm as spm # spm -import nipype.workflows.fmri.spm as spm_wf # spm +import niflow.nipype1.workflows.fmri.spm as spm_wf # spm import nipype.interfaces.fsl as fsl # fsl from nipype.interfaces import utility as niu # Utilities import nipype.pipeline.engine as pe # pypeline engine diff --git a/examples/frontiers_paper/smoothing_comparison.py b/examples/frontiers_paper/smoothing_comparison.py index c4a31dad39..696e8a94b8 100644 --- a/examples/frontiers_paper/smoothing_comparison.py +++ b/examples/frontiers_paper/smoothing_comparison.py @@ -16,7 +16,7 @@ import nipype.interfaces.utility as util import nipype.pipeline.engine as pe # pypeline engine import nipype.algorithms.modelgen as model # model specification -import nipype.workflows.fmri.fsl as fsl_wf +import niflow.nipype1.workflows.fmri.fsl as fsl_wf from nipype.interfaces.base import Bunch import os # system functions diff --git a/examples/nipype_tutorial.ipynb b/examples/nipype_tutorial.ipynb index 9a7678dfd1..90a06a631e 100644 --- a/examples/nipype_tutorial.ipynb +++ b/examples/nipype_tutorial.ipynb @@ -1615,7 +1615,7 @@ "cell_type": "code", "collapsed": false, "input": [ - "from nipype.workflows.fmri.fsl.preprocess import create_susan_smooth\n", + "from niflow.nipype1.workflows.fmri.fsl.preprocess import create_susan_smooth\n", "\n", "smooth = create_susan_smooth()\n", "smooth.inputs.inputnode.in_files = opap('output/realigned/_subject_id_sub044/rbold_out.nii')\n", diff --git a/examples/smri_ants_build_template.py b/examples/smri_ants_build_template.py index a75c0f6783..53f3981428 100644 --- a/examples/smri_ants_build_template.py +++ b/examples/smri_ants_build_template.py @@ -23,7 +23,7 @@ import nipype.interfaces.io as io import nipype.pipeline.engine as pe # pypeline engine -from nipype.workflows.smri.ants import ANTSTemplateBuildSingleIterationWF +from niflow.nipype1.workflows.smri.ants import ANTSTemplateBuildSingleIterationWF """ 2. Download T1 volumes into home directory """ diff --git a/examples/smri_antsregistration_build_template.py b/examples/smri_antsregistration_build_template.py index ecc214265c..e84fc5b509 100644 --- a/examples/smri_antsregistration_build_template.py +++ b/examples/smri_antsregistration_build_template.py @@ -22,7 +22,7 @@ import nipype.interfaces.io as io import nipype.pipeline.engine as pe # pypeline engine -from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF +from niflow.nipype1.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF """ 2. Download T1 volumes into home directory """ diff --git a/examples/smri_fsreconall.py b/examples/smri_fsreconall.py index 6a9fc5446b..16d0b4c9f3 100644 --- a/examples/smri_fsreconall.py +++ b/examples/smri_fsreconall.py @@ -20,7 +20,7 @@ import nipype.pipeline.engine as pe import nipype.interfaces.io as nio -from nipype.workflows.smri.freesurfer import create_reconall_workflow +from niflow.nipype1.workflows.smri.freesurfer import create_reconall_workflow from nipype.interfaces.freesurfer.utils import MakeAverageSubject from nipype.interfaces.utility import IdentityInterface """ diff --git a/examples/tessellation_tutorial.py b/examples/tessellation_tutorial.py index 832ad9cad2..58bae095cc 100644 --- a/examples/tessellation_tutorial.py +++ b/examples/tessellation_tutorial.py @@ -7,7 +7,7 @@ Introduction ============ -This script, tessellation_tutorial.py, demonstrates the use of create_tessellation_flow from nipype.workflows.smri.freesurfer, and it can be run with:: +This script, tessellation_tutorial.py, demonstrates the use of create_tessellation_flow from niflow.nipype1.workflows.smri.freesurfer, and it can be run with:: python tessellation_tutorial.py @@ -39,7 +39,7 @@ import nipype.interfaces.io as nio # Data i/o import os import os.path as op -from nipype.workflows.smri.freesurfer import create_tessellation_flow +from niflow.nipype1.workflows.smri.freesurfer import create_tessellation_flow """ Directories =========== diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index eef38795c7..88250eb1b2 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -1418,7 +1418,7 @@ def __init__(self, **inputs): warnings.warn( ( "Deprecated: Please use " - "nipype.workflows.dmri.preprocess.epi.sdc_fmb instead" + "niflow.nipype1.workflows.dmri.preprocess.epi.sdc_fmb instead" ), DeprecationWarning, ) diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py index b1a36e36ea..78b5f71384 100644 --- a/nipype/sphinxext/plot_workflow.py +++ b/nipype/sphinxext/plot_workflow.py @@ -21,7 +21,7 @@ :graph2use: flat :simple_form: no - from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline + from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() @@ -32,7 +32,7 @@ :graph2use: flat :simple_form: no - from nipype.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline + from niflow.nipype1.workflows.dmri.camino.connectivity_mapping import create_connectivity_pipeline wf = create_connectivity_pipeline() From f1223235b08d841c383c1a05d6d0f1a5721bb519 Mon Sep 17 00:00:00 2001 From: oesteban Date: Thu, 2 Jan 2020 14:43:30 -0800 Subject: [PATCH 39/48] pin: new niflows version 0.4.0 --- docker/generate_dockerfiles.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/generate_dockerfiles.sh b/docker/generate_dockerfiles.sh index 01a8e62781..dd31b804ac 100755 --- a/docker/generate_dockerfiles.sh +++ b/docker/generate_dockerfiles.sh @@ -111,7 +111,7 @@ function generate_main_dockerfile() { pip_opts="-e" \ pip_install="/src/nipype[all] https://github.com/bids-standard/pybids/tarball/0.7.0" \ --miniconda use_env=neuro \ - pip_install="niflow-nipype1-workflows" \ + pip_install='"niflow-nipype1-workflows>=0.4.0"' \ --workdir /work \ --label org.label-schema.build-date='$BUILD_DATE' \ org.label-schema.name="NIPYPE" \ From 414fa39d21c8b70a27392ca89a906142cd84b9f6 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 5 Jan 2020 13:46:09 -0500 Subject: [PATCH 40/48] fix: mapnode to generate result file when crashes in single node mode --- nipype/info.py | 10 +++++- nipype/pipeline/engine/nodes.py | 38 ++++++++++++++++++---- nipype/pipeline/engine/tests/test_nodes.py | 24 ++++++++++++++ 3 files changed, 65 insertions(+), 7 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index 0a5e1a0e2e..7980f3719b 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -155,7 +155,15 @@ def get_nipype_gitversion(): # https://github.com/nipy/nipype/pull/2961#issuecomment-512035484 REQUIRES += ["neurdflib"] -TESTS_REQUIRES = ["codecov", "coverage<5", "mock", "pytest", "pytest-cov", "pytest-env"] +TESTS_REQUIRES = [ + "codecov", + "coverage<5", + "mock", + "pytest", + "pytest-cov", + "pytest-env", + "pytest-timeout", +] EXTRA_REQUIRES = { "data": ["datalad"], diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 09822cc7ff..aeff5f12da 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -1366,13 +1366,39 @@ def _run_interface(self, execute=True, updatehash=False): nodenames = [nnametpl.format(i) for i in range(nitems)] # Run mapnode - result = self._collate_results( - _node_runner( - self._make_nodes(cwd), - updatehash=updatehash, - stop_first=str2bool(self.config["execution"]["stop_on_first_crash"]), - ) + outdir = self.output_dir() + result = InterfaceResult( + interface=self._interface.__class__, + runtime=Bunch( + cwd=outdir, + returncode=1, + environ=dict(os.environ), + hostname=socket.gethostname(), + ), + inputs=self._interface.inputs.get_traitsfree(), ) + try: + result = self._collate_results( + _node_runner( + self._make_nodes(cwd), + updatehash=updatehash, + stop_first=str2bool( + self.config["execution"]["stop_on_first_crash"] + ), + ) + ) + except Exception as msg: + result.runtime.stderr = "%s\n\n%s".format( + getattr(result.runtime, "stderr", ""), msg + ) + _save_resultfile( + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) + raise + # And store results _save_resultfile(result, cwd, self.name, rebase=False) # remove any node directories no longer required diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index 6fd88011ee..a7d47a1d3d 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -314,3 +314,27 @@ def test_outputmultipath_collapse(tmpdir): assert ifres.outputs.out == [4] assert ndres.outputs.out == [4] assert select_nd.result.outputs.out == [4] + + +@pytest.mark.timeout(30) +def test_mapnode_single(tmpdir): + tmpdir.chdir() + + def _producer(num=1, output_file=None, deadly_num=7): + if num == deadly_num: + raise RuntimeError("Got the deadly num (%d)." % num) + if output_file is None: + output_file = "producer_output_%05d" % num + with open(output_file, "w") as ofile: + ofile.write("%d" % num) + return output_file + + pnode = pe.MapNode( + niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] + ) + pnode.inputs.num = [7] + wf = pe.Workflow(name="PC_Workflow") + wf.add_nodes([pnode]) + wf.base_dir = os.path.abspath("./test_output") + with pytest.raises(RuntimeError): + wf.run(plugin="MultiProc") From 6df78425c1138ac425988f339cdcf6dccb60591b Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 5 Jan 2020 13:54:56 -0500 Subject: [PATCH 41/48] enh: simplify test further --- nipype/pipeline/engine/tests/test_nodes.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index a7d47a1d3d..f5e2d5016c 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -320,14 +320,10 @@ def test_outputmultipath_collapse(tmpdir): def test_mapnode_single(tmpdir): tmpdir.chdir() - def _producer(num=1, output_file=None, deadly_num=7): + def _producer(num=1, deadly_num=7): if num == deadly_num: raise RuntimeError("Got the deadly num (%d)." % num) - if output_file is None: - output_file = "producer_output_%05d" % num - with open(output_file, "w") as ofile: - ofile.write("%d" % num) - return output_file + return num + 1 pnode = pe.MapNode( niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] From 4953418181cecffe9e81325db4c28a3464a0130f Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 6 Jan 2020 10:56:59 -0500 Subject: [PATCH 42/48] DOC: Update changelog to include backported PRs --- doc/changelog/1.X.X-changelog.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 3af9ed8ca4..4e9c1a6521 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,17 @@ +1.4.1 (To Be Determined) +======================== +(`Full changelog `__) + + * FIX: mapnode to generate result file when crashes in single node mode (https://github.com/nipy/nipype/pull/3143) + * FIX: Can't seem to import workflows from niflows in CircleCI (https://github.com/nipy/nipype/pull/3134) + * FIX: Repair aftermath of docs refactor (https://github.com/nipy/nipype/pull/3133) + * FIX: change ANTS number_of_time_steps from Float to Int (https://github.com/nipy/nipype/pull/3118) + * DOC: Revise generation of examples to work in RTD (https://github.com/nipy/nipype/pull/3132) + * DOC: Bring examples generation back to ``doc/conf.py`` (https://github.com/nipy/nipype/pull/3131) + * DOC: Documentation overhaul (https://github.com/nipy/nipype/pull/3124) + * DOC: Deep revision of documentation building (https://github.com/nipy/nipype/pull/3120) + * DOC: Deduplicate code for Sphinx's APIdoc generation (https://github.com/nipy/nipype/pull/3119) + 1.4.0 (December 20, 2019) ========================= (`Full changelog `__) From fcc544be7af20f936b8538fe5557bd6108c034c8 Mon Sep 17 00:00:00 2001 From: oesteban Date: Wed, 1 Jan 2020 15:54:20 -0800 Subject: [PATCH 43/48] fix: documentation build at readthedocs --- doc/requirements.txt | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/doc/requirements.txt b/doc/requirements.txt index 057147c5b5..772afb185f 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,8 +1,27 @@ +configparser dipy +funcsigs +future>=0.16.0 ipython matplotlib +mock nbsphinx +networkx>=1.9 +neurdflib +nibabel>=2.1.0 +numpy>=1.9.0 +numpydoc +packaging +prov>=1.5.2 +psutil +pydot>=1.2.3 +pydotplus +pytest>=3.0 +python-dateutil>=2.2 +scipy>=0.14 +simplejson>=3.8.0 sphinx-argparse sphinx>=2.1.2 sphinxcontrib-apidoc -sphinxcontrib-napoleon \ No newline at end of file +sphinxcontrib-napoleon +traits>=4.6 \ No newline at end of file From 38354a33cbfb02478f16888f3055c66bce89d346 Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 5 Jan 2020 13:46:09 -0500 Subject: [PATCH 44/48] fix: mapnode to generate result file when crashes in single node mode --- nipype/info.py | 10 +++++- nipype/pipeline/engine/nodes.py | 38 ++++++++++++++++++---- nipype/pipeline/engine/tests/test_nodes.py | 24 ++++++++++++++ 3 files changed, 65 insertions(+), 7 deletions(-) diff --git a/nipype/info.py b/nipype/info.py index c09c1e9e4d..7a2e4ae70e 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -155,7 +155,15 @@ def get_nipype_gitversion(): # https://github.com/nipy/nipype/pull/2961#issuecomment-512035484 REQUIRES += ["neurdflib"] -TESTS_REQUIRES = ["codecov", "coverage<5", "mock", "pytest", "pytest-cov", "pytest-env"] +TESTS_REQUIRES = [ + "codecov", + "coverage<5", + "mock", + "pytest", + "pytest-cov", + "pytest-env", + "pytest-timeout", +] EXTRA_REQUIRES = { "data": ["datalad"], diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index 09822cc7ff..aeff5f12da 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -1366,13 +1366,39 @@ def _run_interface(self, execute=True, updatehash=False): nodenames = [nnametpl.format(i) for i in range(nitems)] # Run mapnode - result = self._collate_results( - _node_runner( - self._make_nodes(cwd), - updatehash=updatehash, - stop_first=str2bool(self.config["execution"]["stop_on_first_crash"]), - ) + outdir = self.output_dir() + result = InterfaceResult( + interface=self._interface.__class__, + runtime=Bunch( + cwd=outdir, + returncode=1, + environ=dict(os.environ), + hostname=socket.gethostname(), + ), + inputs=self._interface.inputs.get_traitsfree(), ) + try: + result = self._collate_results( + _node_runner( + self._make_nodes(cwd), + updatehash=updatehash, + stop_first=str2bool( + self.config["execution"]["stop_on_first_crash"] + ), + ) + ) + except Exception as msg: + result.runtime.stderr = "%s\n\n%s".format( + getattr(result.runtime, "stderr", ""), msg + ) + _save_resultfile( + result, + outdir, + self.name, + rebase=str2bool(self.config["execution"]["use_relative_paths"]), + ) + raise + # And store results _save_resultfile(result, cwd, self.name, rebase=False) # remove any node directories no longer required diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index 6fd88011ee..a7d47a1d3d 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -314,3 +314,27 @@ def test_outputmultipath_collapse(tmpdir): assert ifres.outputs.out == [4] assert ndres.outputs.out == [4] assert select_nd.result.outputs.out == [4] + + +@pytest.mark.timeout(30) +def test_mapnode_single(tmpdir): + tmpdir.chdir() + + def _producer(num=1, output_file=None, deadly_num=7): + if num == deadly_num: + raise RuntimeError("Got the deadly num (%d)." % num) + if output_file is None: + output_file = "producer_output_%05d" % num + with open(output_file, "w") as ofile: + ofile.write("%d" % num) + return output_file + + pnode = pe.MapNode( + niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] + ) + pnode.inputs.num = [7] + wf = pe.Workflow(name="PC_Workflow") + wf.add_nodes([pnode]) + wf.base_dir = os.path.abspath("./test_output") + with pytest.raises(RuntimeError): + wf.run(plugin="MultiProc") From 333fd8a9bfffbd8d3cf0c623e414fc641344854c Mon Sep 17 00:00:00 2001 From: Satrajit Ghosh Date: Sun, 5 Jan 2020 13:54:56 -0500 Subject: [PATCH 45/48] enh: simplify test further --- nipype/pipeline/engine/tests/test_nodes.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index a7d47a1d3d..f5e2d5016c 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -320,14 +320,10 @@ def test_outputmultipath_collapse(tmpdir): def test_mapnode_single(tmpdir): tmpdir.chdir() - def _producer(num=1, output_file=None, deadly_num=7): + def _producer(num=1, deadly_num=7): if num == deadly_num: raise RuntimeError("Got the deadly num (%d)." % num) - if output_file is None: - output_file = "producer_output_%05d" % num - with open(output_file, "w") as ofile: - ofile.write("%d" % num) - return output_file + return num + 1 pnode = pe.MapNode( niu.Function(function=_producer), name="ProducerNode", iterfield=["num"] From 84bc62f22b522159a814c435fe1ecb12da9c2ecd Mon Sep 17 00:00:00 2001 From: Chris Markiewicz Date: Mon, 6 Jan 2020 10:56:59 -0500 Subject: [PATCH 46/48] DOC: Update changelog to include backported PRs --- doc/changelog/1.X.X-changelog.rst | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/doc/changelog/1.X.X-changelog.rst b/doc/changelog/1.X.X-changelog.rst index 3af9ed8ca4..4e9c1a6521 100644 --- a/doc/changelog/1.X.X-changelog.rst +++ b/doc/changelog/1.X.X-changelog.rst @@ -1,3 +1,17 @@ +1.4.1 (To Be Determined) +======================== +(`Full changelog `__) + + * FIX: mapnode to generate result file when crashes in single node mode (https://github.com/nipy/nipype/pull/3143) + * FIX: Can't seem to import workflows from niflows in CircleCI (https://github.com/nipy/nipype/pull/3134) + * FIX: Repair aftermath of docs refactor (https://github.com/nipy/nipype/pull/3133) + * FIX: change ANTS number_of_time_steps from Float to Int (https://github.com/nipy/nipype/pull/3118) + * DOC: Revise generation of examples to work in RTD (https://github.com/nipy/nipype/pull/3132) + * DOC: Bring examples generation back to ``doc/conf.py`` (https://github.com/nipy/nipype/pull/3131) + * DOC: Documentation overhaul (https://github.com/nipy/nipype/pull/3124) + * DOC: Deep revision of documentation building (https://github.com/nipy/nipype/pull/3120) + * DOC: Deduplicate code for Sphinx's APIdoc generation (https://github.com/nipy/nipype/pull/3119) + 1.4.0 (December 20, 2019) ========================= (`Full changelog `__) From 4e4b2a205855e700294b746de78485a06d1b3b89 Mon Sep 17 00:00:00 2001 From: dPys Date: Mon, 6 Jan 2020 12:33:00 -0600 Subject: [PATCH 47/48] [FIX] immunize shutil.rmtree to node non-existence for remove_node_directories=True in the case that stop_on_first_crash=False --- nipype/pipeline/plugins/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 4be8eb232b..599db29418 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -461,7 +461,7 @@ def _remove_node_dirs(self): ) % (self.procs[idx]._id, outdir) ) - shutil.rmtree(outdir) + shutil.rmtree(outdir, ignore_errors=True) class SGELikeBatchManagerBase(DistributedPluginBase): From 572bf37e85bbc02d614b7d75611c2fe7758cbe80 Mon Sep 17 00:00:00 2001 From: dPys Date: Mon, 6 Jan 2020 12:33:00 -0600 Subject: [PATCH 48/48] [FIX] immunize shutil.rmtree to node non-existence for remove_node_directories=True in the case that stop_on_first_crash=False --- nipype/pipeline/plugins/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 4be8eb232b..599db29418 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -461,7 +461,7 @@ def _remove_node_dirs(self): ) % (self.procs[idx]._id, outdir) ) - shutil.rmtree(outdir) + shutil.rmtree(outdir, ignore_errors=True) class SGELikeBatchManagerBase(DistributedPluginBase):