-
Notifications
You must be signed in to change notification settings - Fork 533
/
Copy pathrun_examples.py
69 lines (58 loc) · 2.27 KB
/
run_examples.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# -*- coding: utf-8 -*-
import os
import sys
from shutil import rmtree
from multiprocessing import cpu_count
def run_examples(example, pipelines, data_path, plugin=None, rm_base_dir=True):
from nipype import config
from nipype.interfaces.base import CommandLine
if plugin is None:
plugin = "MultiProc"
print("running example: %s with plugin: %s" % (example, plugin))
config.enable_debug_mode()
config.enable_provenance()
CommandLine.set_default_terminal_output("stream")
plugin_args = {}
if plugin == "MultiProc":
plugin_args["n_procs"] = int(os.getenv("NIPYPE_NUMBER_OF_CPUS", cpu_count()))
__import__(example)
for pipeline in pipelines:
wf = getattr(sys.modules[example], pipeline)
wf.base_dir = os.path.join(os.getcwd(), "output", example, plugin)
results_dir = os.path.join(wf.base_dir, wf.name)
if rm_base_dir and os.path.exists(results_dir):
rmtree(results_dir)
# Handle a logging directory
log_dir = os.path.join(os.getcwd(), "logs", example)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
wf.config = {
"execution": {
"hash_method": "timestamp",
"stop_on_first_rerun": "true",
"write_provenance": "true",
"poll_sleep_duration": 2,
},
"logging": {"log_directory": log_dir, "log_to_file": True},
}
try:
wf.inputs.inputnode.in_data = os.path.abspath(data_path)
except AttributeError:
pass # the workflow does not have inputnode.in_data
wf.run(plugin=plugin, plugin_args=plugin_args)
# run twice to check if nothing is rerunning
wf.run(plugin=plugin)
if __name__ == "__main__":
path, file = os.path.split(__file__)
sys.path.insert(0, os.path.realpath(os.path.join(path, "..", "examples")))
examples = {
"fmri_fsl_reuse": ["level1_workflow"],
"fmri_spm_nested": ["level1", "l2pipeline"],
# 'fmri_spm_dartel':['level1','l2pipeline'],
# 'fmri_fsl_feeds':['l1pipeline']
}
example = sys.argv[1]
plugin = sys.argv[2]
data_path = sys.argv[3]
pipelines = sys.argv[4:]
run_examples(example, pipelines, data_path, plugin)