Skip to content

Commit

Permalink
BUG: Need to propogate FS failures from sub-scripts.
Browse files Browse the repository at this point in the history
  • Loading branch information
hjmjohnson committed Jan 24, 2013
1 parent 794e9f7 commit 87f2953
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 75 deletions.
1 change: 1 addition & 0 deletions AutoWorkup/ShortWorkupT1T2.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,7 @@ def AccumulateLikeTissuePosteriors(posteriorImages):


def ShortWorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path,
GLOBAL_DATA_SINK_REWRITE,
InterpolationMode="Linear", Mode=10, DwiList=[], WORKFLOW_COMPONENTS=[], CLUSTER_QUEUE='', CLUSTER_QUEUE_LONG=''):
"""
Run autoworkup on all subjects data defined in the ExperimentDatabase
Expand Down
21 changes: 12 additions & 9 deletions AutoWorkup/WorkupT1T2.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ def MakeNewAtlasTemplate(t1_image, deformed_list,
###########################################################################
###########################################################################
def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path,
InterpolationMode="Linear", Mode=10, DwiList=[], WORKFLOW_COMPONENTS=[], CLUSTER_QUEUE='', CLUSTER_QUEUE_LONG='', SGE_JOB_SCRIPT='#!/bin/bash'):
GLOBAL_DATA_SINK_REWRITE, InterpolationMode="Linear", Mode=10, DwiList=[], WORKFLOW_COMPONENTS=[], CLUSTER_QUEUE='', CLUSTER_QUEUE_LONG='', SGE_JOB_SCRIPT='#!/bin/bash'):
"""
Run autoworkup on all subjects data defined in the ExperimentDatabase
Expand Down Expand Up @@ -1079,9 +1079,10 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
FS_DS[sessionid].inputs.regexp_substitutions = [
('/_uid_(?P<myuid>[^/]*)', r'/\g<myuid>')
]
baw200.connect(myLocalFSWF[sessionid], 'outputspec.FreeSurferOutputDirectory', FS_DS[sessionid], 'FREESURFER_SUBJ.@FreeSurferOutputDirectory')
baw200.connect(myLocalFSWF[sessionid], 'outputspec.FreeSurferOutputDirectory', FS_DS[sessionid], 'FREESURFER52_SUBJECTS.@FreeSurferOutputDirectory')
### Now define where the final organized outputs should go.
FSPREP_DataSink[sessionid] = pe.Node(nio.DataSink(), name="FREESURFER_PREP_" + str(subjectid) + "_" + str(sessionid))
FSPREP_DataSink[sessionid] = pe.Node(nio.DataSink(), name="FREESURFER_PREP_DS_" + str(subjectid) + "_" + str(sessionid))
FSPREP_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE
FSPREP_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults
FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, 'FREESURFER_PREP')
FSPREP_DataSink[sessionid].inputs.regexp_substitutions = FREESURFER_PREP_PATTERNS
Expand All @@ -1095,7 +1096,8 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
baw200.connect(myLocalFSWF[sessionid], 'outputspec.cnr_optimal_image', FSPREP_DataSink[sessionid], 'FREESURFER_PREP.@cnr_optimal_image')

#} end of "for sessionid in allSessions:"
#{ Do longitudinal
#{ Do template building
"""
##HACK : Move later
FS_TEMPLATE_oneSubjWorkflow = CreateFreeSurferSubjectTemplate(projectid,
subjectid,
Expand All @@ -1115,12 +1117,12 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
baw200.connect(FreeSurferSessionID_MergeNode[subjectid],'out', FS_TEMPLATE_oneSubjWorkflow, 'inputspec.FreeSurferSession_IDs')
FSTEMP_DataSink[subjectid] = pe.Node(nio.DataSink(), name='FREESURFER_TEMP_' + str(subjectid))
FSTEMP_DataSink[subjectid].overwrite = GLOBAL_DATA_SINK_REWRITE
FREESURFER_TEMP_PATTERNS = GenerateOutputPattern(projectid, subjectid, 'FREESURFER_TEMP', '')
FSTEMP_DataSink[subjectid].inputs.regexp_substitutions = FREESURFER_TEMP_PATTERNS
FSTEMP_DataSink[subjectid].overwrite = GLOBAL_DATA_SINK_REWRITE
baw200.connect(FS_TEMPLATE_oneSubjWorkflow, 'outputspec.FreeSurferTemplateDir', FSTEMP_DataSink[subjectid], 'FREESURFER_SUBJ.@FreeSurferTemplateDir')
baw200.connect(FS_TEMPLATE_oneSubjWorkflow, 'outputspec.FreeSurferTemplateDir', FSTEMP_DataSink[subjectid], 'FREESURFER52_SUBJECTS.@FreeSurferTemplateDir')
#}
#{
#{ Do longitudinal analysis
FSLONG_DataSink = dict()
FS_LONG_oneSubjWorkflow = dict()
for sessionid in allSessions:
Expand All @@ -1136,14 +1138,15 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
baw200.connect(FS_TEMPLATE_oneSubjWorkflow, 'outputspec.FreeSurferTemplateDir', FS_LONG_oneSubjWorkflow[sessionid], 'inputspec.SingleSubject_ID')
# baw200.connect(FREESURFER_SUBJ_ID, 'subjectTemplate_id', FS_LONG_oneSubjWorkflow[sessionid], 'inputspec.SingleSubject_ID')
FSLONG_DataSink[sessionid] = pe.Node(nio.DataSink(), name='_'.join(['FREESURFER_LONG', str(subjectid), str(sessionid)]))
FSLONG_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE
FSLONG_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults
FREESURFER_LONG_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, 'FREESURFER_LONG')
FSLONG_DataSink[sessionid].inputs.regexp_substitutions = FREESURFER_LONG_PATTERNS
FSLONG_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE
baw200.connect(FS_LONG_oneSubjWorkflow[sessionid], 'outputspec.FreeSurferLongitudinalDir', FSLONG_DataSink[sessionid], 'FREESURFER_SUBJ.@longitudinalDirs')
baw200.connect(FS_LONG_oneSubjWorkflow[sessionid], 'outputspec.FreeSurferLongitudinalDir', FSLONG_DataSink[sessionid], 'FREESURFER52_SUBJECTS.@longitudinalDirs')
#} end of "for sessionid in allSessions:"
"""
else:
print "Skipping freesurfer"
return baw200
6 changes: 3 additions & 3 deletions AutoWorkup/WorkupT1T2FreeSurfer_custom.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, WFname, CLU
freesurfer_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=18G,mem_free=8G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True}
if RunAllFSComponents == True:
print("""Run FreeSurfer ReconAll at""")
fs_reconall = pe.Node(interface=fswrap.FSScript(), name="FS52_custom")
fs_reconall = pe.Node(interface=fswrap.FSScript(), name="FS52_cross_"+str(sessionid))
fs_reconall.plugin_args = freesurfer_sge_options_dictionary
fs_reconall.inputs.subcommand = 'autorecon'
# fs_reconall.inputs.directive = 'all'
Expand Down Expand Up @@ -108,7 +108,7 @@ def CreateFreeSurferSubjectTemplate(projectid, subjectid, session_ids, WFname, C
os.environ['SUBJECTS_DIR'] = constructed_FS_SUBJECTS_DIR
inputsSpec.inputs.subjects_dir = constructed_FS_SUBJECTS_DIR # HACK
print("""Run FreeSurfer Within Subject Template at""")
fs_template = pe.Node(interface=fswrap.FSScript(), name="FS55_template_build_"+str(subjectid))
fs_template = pe.Node(interface=fswrap.FSScript(), name="FS52_base_"+str(subjectid))
fs_template.plugin_args = freesurfer_sge_options_dictionary
fs_template.inputs.session_ids = session_ids
fs_template.inputs.subcommand = 'template'
Expand All @@ -133,7 +133,7 @@ def CreateFreeSurferLongitudinalWorkflow(projectid, sessionid, subjectid, WFname
os.environ['SUBJECTS_DIR'] = constructed_FS_SUBJECTS_DIR
inputsSpec.inputs.subjects_dir = constructed_FS_SUBJECTS_DIR # HACK

fs_longitudinal = pe.Node(interface=fswrap.FSScript(), name="FS55_longitudinal_"+str(sessionid))
fs_longitudinal = pe.Node(interface=fswrap.FSScript(), name="FS52_long_"+str(sessionid))
fs_longitudinal.plugin_args = freesurfer_sge_options_dictionary
fs_longitudinal.inputs.subcommand = 'longitudinal'
fs_longitudinal.inputs.session_id = sessionid
Expand Down
19 changes: 11 additions & 8 deletions AutoWorkup/baw_exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@
import sys

##############################################################################
GLOBAL_DATA_SINK_REWRITE = True


def get_global_sge_script(pythonPathsList, binPathsList, customEnvironment={}):
"""This is a wrapper script for running commands on an SGE cluster
Expand Down Expand Up @@ -96,17 +94,18 @@ def setGlobalDatasinkRewrite(cli, cfg):
:param cfg: configuration file value
:type cfg: bool
Sets the global variable `GLOBAL_DATA_SINK_REWRITE` constant flag used in :mod:`WorkupT1T2()`
Sets the variable `GLOBAL_DATA_SINK_REWRITE` constant flag used in :mod:`WorkupT1T2()`
"""
assert isinstance(cli, bool) and isinstance(cfg, bool), \
"Inputs are not boolean: {0}, {1}".format(cli, cfg)
global GLOBAL_DATA_SINK_REWRITE
GLOBAL_DATA_SINK_REWRITE=False
if cli or cfg:
print "*** Ignoring datasinks for pipeline rewriting ***" # TODO: Use logging
GLOBAL_DATA_SINK_REWRITE = False
else:
GLOBAL_DATA_SINK_REWRITE = True
return GLOBAL_DATA_SINK_REWRITE


def main(argv=None):
Expand Down Expand Up @@ -140,8 +139,8 @@ def main(argv=None):
expConfig.read(input_arguments.ExperimentConfig)

# Pipeline-specific information
ignore_datasinks = expConfig.getboolean('PIPELINE', 'IGNORE_DATASINKS')
setGlobalDatasinkRewrite(input_arguments.ignore_datasinks, ignore_datasinks)
ignore_datasinks = expConfig.getboolean('PIPELINE', 'GLOBAL_DATA_SINK_REWRITE')
GLOBAL_DATA_SINK_REWRITE=setGlobalDatasinkRewrite(input_arguments.ignore_datasinks, ignore_datasinks)

# Experiment specific information
subject_data_file = expConfig.get('EXPERIMENT_DATA', 'SESSION_DB')
Expand Down Expand Up @@ -303,14 +302,18 @@ def main(argv=None):
ExperimentBaseDirectoryResults,
ExperimentDatabase,
CACHE_ATLASPATH,
CACHE_BCDMODELPATH, WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG)
CACHE_BCDMODELPATH,
GLOBAL_DATA_SINK_REWRITE,
WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG)
else:
baw200 = WorkupT1T2.WorkupT1T2(subjectid, mountPrefix,
os.path.join(ExperimentBaseDirectoryCache, str(subjectid)),
ExperimentBaseDirectoryResults,
ExperimentDatabase,
CACHE_ATLASPATH,
CACHE_BCDMODELPATH, WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG, SGE_JOB_SCRIPT=JOB_SCRIPT)
CACHE_BCDMODELPATH,
GLOBAL_DATA_SINK_REWRITE,
WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG, SGE_JOB_SCRIPT=JOB_SCRIPT)
print "Start Processing"

SGEFlavor = 'SGE'
Expand Down
Loading

0 comments on commit 87f2953

Please sign in to comment.