From e9c2f06b8a1049b5d117ca9d6b8d90a9be313144 Mon Sep 17 00:00:00 2001 From: Hans Johnson Date: Tue, 5 Feb 2013 15:09:44 -0600 Subject: [PATCH] BUG: Freesurfer modules re-written. The freesurfer scripts were not internally consistent, nor were they correct. A line-by-line review was done diagramming what the inputs/outputs were for each stage, regularizing the variable names and meanings across the different stages of running, and verifying that the correct paths were being created. --- AutoWorkup/WorkupT1T2.py | 106 ++++++++++------------ AutoWorkup/WorkupT1T2FreeSurfer_custom.py | 46 ++++------ AutoWorkup/fsscript.py | 85 ++++++++--------- AutoWorkup/fswrap.py | 44 +++++---- 4 files changed, 135 insertions(+), 146 deletions(-) diff --git a/AutoWorkup/WorkupT1T2.py b/AutoWorkup/WorkupT1T2.py index a9ddba6c..154dcaee 100644 --- a/AutoWorkup/WorkupT1T2.py +++ b/AutoWorkup/WorkupT1T2.py @@ -700,8 +700,8 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB myLocalSegWF = dict() SEGMENTATION_DataSink = dict() STAPLE_SEGMENTATION_DataSink = dict() - myLocalFSWF = dict() - FSPREP_DataSink = dict() + FSCROSS_WF = dict() + FSPREP_DS = dict() FSCROSS_DS = dict() MergeStage2AverageImages = dict() @@ -713,7 +713,7 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB LinearSubjectToAtlasANTsApplyTransforms = dict() MultiLabelSubjectToAtlasANTsApplyTransforms = dict() Subj2Atlas_DS = dict() - FSBASE_DataSink = dict() + FSBASE_DS = dict() if 'SEGMENTATION' in WORKFLOW_COMPONENTS: # Run the ANTS Registration from Atlas to Subject for BCut spatial priors propagation. import PipeLineFunctionHelpers @@ -1055,60 +1055,51 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB if RunMultiMode: # If multi-modal, then create synthesized image before running print("HACK FREESURFER len(global_All3T_T2s) > 0 ") - myLocalFSWF[sessionid] = CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, + FSCROSS_WF[sessionid] = CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, "FSCROSS", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents, RunMultiMode, constructed_FS_SUBJECTS_DIR) - FREESURFER_ID[sessionid] = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID']), + FREESURFER_ID[sessionid] = pe.Node(interface=IdentityInterface(fields=['subj_session_id']), run_without_submitting=True, name='99_FSNodeName' + str(subjectid) + "_" + str(sessionid)) - FREESURFER_ID[sessionid].inputs.FreeSurfer_ID = str(subjectid) + "_" + str(sessionid) + FREESURFER_ID[sessionid].inputs.subj_session_id = str(subjectid) + "_" + str(sessionid) - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', myLocalFSWF[sessionid], 'inputspec.T1_files') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', myLocalFSWF[sessionid], 'inputspec.T2_files') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', myLocalFSWF[sessionid], 'inputspec.label_file') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', FSCROSS_WF[sessionid], 'inputspec.T1_files') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', FSCROSS_WF[sessionid], 'inputspec.T2_files') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', FSCROSS_WF[sessionid], 'inputspec.label_file') from PipeLineFunctionHelpers import GetOnePosteriorImageFromDictionaryFunction - baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], myLocalFSWF[sessionid], + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], FSCROSS_WF[sessionid], [(('outputspec.posteriorImages', GetOnePosteriorImageFromDictionaryFunction, 'WM'), 'inputspec.wm_prob')])]) - baw200.connect(FREESURFER_ID[sessionid], 'FreeSurfer_ID', myLocalFSWF[sessionid], 'inputspec.FreeSurfer_ID') + baw200.connect(FREESURFER_ID[sessionid], 'subj_session_id', FSCROSS_WF[sessionid], 'inputspec.subj_session_id') ### Now define where the final organized outputs should go. - if RunAllFSComponents == True: - FSCROSS_DS_Name="FSCROSS_DS_" + str(subjectid) + "_" + str(sessionid) - FSCROSS_DS[sessionid] = pe.Node(nio.DataSink(), name=FSCROSS_DS_Name) - FSCROSS_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE - FSCROSS_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults - FSCROSS_DS[sessionid].inputs.regexp_substitutions = [ - ('/_uid_(?P[^/]*)', r'/\g') - ] - baw200.connect(myLocalFSWF[sessionid], 'outputspec.FreeSurferOutputDirectory', FSCROSS_DS[sessionid], 'FREESURFER52_SUBJECTS.@FreeSurferOutputDirectory') + FSCROSS_DS_Name="FSCROSS_DS_" + str(subjectid) + "_" + str(sessionid) + FSCROSS_DS[sessionid] = pe.Node(nio.DataSink(), name=FSCROSS_DS_Name) + FSCROSS_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + FSCROSS_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + FSCROSS_DS[sessionid].inputs.regexp_substitutions = [ ('/_uid_(?P[^/]*)', r'/\g') ] + baw200.connect(FSCROSS_WF[sessionid], 'outputspec.full_path_FS_output', FSCROSS_DS[sessionid], 'FREESURFER52_SUBJECTS.@full_path_FS_output') + ### Now define where the final organized outputs should go. - FSPREP_DataSink_Name="FREESURFER_PREP_DS_" + str(subjectid) + "_" + str(sessionid) - FSPREP_DataSink[sessionid] = pe.Node(nio.DataSink(), name = FSPREP_DataSink_Name) - FSPREP_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE - FSPREP_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults - FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, FSPREP_DataSink_Name) - FSPREP_DataSink[sessionid].inputs.regexp_substitutions = FREESURFER_PREP_PATTERNS - print "=========================" - print "=========================" - print "=========================" - print FREESURFER_PREP_PATTERNS - print "=========================" - print "=========================" - print "=========================" - baw200.connect(myLocalFSWF[sessionid], 'outputspec.cnr_optimal_image', FSPREP_DataSink[sessionid], 'FREESURFER_PREP.@cnr_optimal_image') - baw200.connect(myLocalFSWF[sessionid], 'outputspec.subj_session_id', - FreeSurferSessionID_MergeNode[subjectid], 'in' + str(FSindex)) + baw200.connect(FSCROSS_WF[sessionid], 'outputspec.processed_output_name', FreeSurferSessionID_MergeNode[subjectid], 'in' + str(FSindex)) FSindex += 1 + ### Now write out the prep image. + FSPREP_DS_Name="FREESURFER_PREP_DS_" + str(subjectid) + "_" + str(sessionid) + FSPREP_DS[sessionid] = pe.Node(nio.DataSink(), name = FSPREP_DS_Name) + FSPREP_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + FSPREP_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, FSPREP_DS_Name) + FSPREP_DS[sessionid].inputs.regexp_substitutions = FREESURFER_PREP_PATTERNS + baw200.connect(FSCROSS_WF[sessionid], 'outputspec.cnr_optimal_image', FSPREP_DS[sessionid], 'FREESURFER_PREP.@cnr_optimal_image') #} end of "for sessionid in allSessions:" #{ Do template building ##HACK : Move later - FSBASE_oneSubjWorkflow = CreateFreeSurferSubjectTemplate(projectid, + FSBASE_WF = CreateFreeSurferSubjectTemplate(projectid, subjectid, "FS52_BASE", CLUSTER_QUEUE, @@ -1116,29 +1107,29 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB True, True, constructed_FS_SUBJECTS_DIR) - baw200.connect(FreeSurferSessionID_MergeNode[subjectid], 'out', - FSBASE_oneSubjWorkflow, 'inputspec.FreeSurferSession_IDs') - FREESURFER_SUBJ_ID = pe.Node(interface=IdentityInterface(fields=['subjectTemplate_id']), + FREESURFER_SUBJ_ID = pe.Node(interface=IdentityInterface(fields=['base_template_id']), run_without_submitting=True, name='99_FSNodeName_' + str(subjectid) + "_template") - FREESURFER_SUBJ_ID.inputs.subjectTemplate_id = str(subjectid) + "_template" + FREESURFER_SUBJ_ID.inputs.base_template_id = str(subjectid) + "_template" - baw200.connect(FREESURFER_SUBJ_ID, 'subjectTemplate_id', - FSBASE_oneSubjWorkflow, 'inputspec.subjectTemplate_id') + baw200.connect(FREESURFER_SUBJ_ID, 'base_template_id', FSBASE_WF, 'inputspec.base_template_id') + baw200.connect(FreeSurferSessionID_MergeNode[subjectid], 'out', FSBASE_WF, 'inputspec.list_all_subj_session_ids') FSBASE_DS_NAME='FS52_BASE_DS' + str(subjectid) - FSBASE_DataSink[subjectid] = pe.Node(nio.DataSink(), name=FSBASE_DS_NAME) - FSBASE_DataSink[subjectid].overwrite = GLOBAL_DATA_SINK_REWRITE - FREESURFER_TEMP_PATTERNS = GenerateOutputPattern(projectid, subjectid, FSBASE_DS_NAME, '') - FSBASE_DataSink[subjectid].inputs.regexp_substitutions = FREESURFER_TEMP_PATTERNS - baw200.connect(FSBASE_oneSubjWorkflow, 'outputspec.FreeSurferTemplateDir', FSBASE_DataSink[subjectid], 'FREESURFER52_SUBJECTS.@FreeSurferTemplateDir') + FSBASE_DS[subjectid] = pe.Node(nio.DataSink(), name=FSBASE_DS_NAME) + FSBASE_DS[subjectid].overwrite = GLOBAL_DATA_SINK_REWRITE + FSBASE_DS[subjectid].inputs.base_directory = ExperimentBaseDirectoryResults + #FREESURFER_TEMP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, FSBASE_DS_NAME) + #FSBASE_DS[subjectid].inputs.regexp_substitutions = FREESURFER_TEMP_PATTERNS + FSBASE_DS[subjectid].inputs.regexp_substitutions = [ ('.*/(?P_template)', r'/\g_template') ] + baw200.connect(FSBASE_WF, 'outputspec.full_path_FS_output', FSBASE_DS[subjectid], 'FREESURFER52_SUBJECTS.@full_path_FS_output') #} #{ Do longitudinal analysis FSLONG_DataSink = dict() - FSLONG_oneSubjWorkflow = dict() + FSLONG_WF = dict() for sessionid in allSessions: - FSLONG_oneSubjWorkflow[sessionid] = CreateFreeSurferLongitudinalWorkflow(projectid, + FSLONG_WF[sessionid] = CreateFreeSurferLongitudinalWorkflow(projectid, subjectid, sessionid, "FS52_LONG", @@ -1147,18 +1138,17 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB True, True, constructed_FS_SUBJECTS_DIR) - baw200.connect(myLocalFSWF[sessionid], 'outputspec.subj_session_id', - FSLONG_oneSubjWorkflow[sessionid], 'inputspec.FreeSurferSession_ID') - baw200.connect(FSBASE_oneSubjWorkflow, 'outputspec.FreeSurferTemplateDir', - FSLONG_oneSubjWorkflow[sessionid], 'inputspec.SingleSubject_ID') - FSLONG_DataSink_Name='_'.join(['FREESURFER_LONG', str(subjectid), str(sessionid)]) - FSLONG_DataSink[sessionid] = pe.Node(nio.DataSink(), name=FSLONG_DataSink_Name) + baw200.connect(FSCROSS_WF[sessionid], 'outputspec.processed_output_name', FSLONG_WF[sessionid], 'inputspec.subj_session_id') + baw200.connect(FSBASE_WF, 'outputspec.processed_output_name', FSLONG_WF[sessionid], 'inputspec.base_template_id') + + FSLONG_DS='_'.join(['FREESURFER_LONG', str(subjectid), str(sessionid)]) + FSLONG_DataSink[sessionid] = pe.Node(nio.DataSink(), name=FSLONG_DS) FSLONG_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE FSLONG_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults - FREESURFER_LONG_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, FSLONG_DataSink_Name) + FREESURFER_LONG_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, FSLONG_DS) FSLONG_DataSink[sessionid].inputs.regexp_substitutions = FREESURFER_LONG_PATTERNS - baw200.connect(FSLONG_oneSubjWorkflow[sessionid], 'outputspec.FreeSurferLongitudinalDir', FSLONG_DataSink[sessionid], 'FREESURFER52_SUBJECTS.@longitudinalDirs') + baw200.connect(FSLONG_WF[sessionid], 'outputspec.full_path_FS_output', FSLONG_DataSink[sessionid], 'FREESURFER52_SUBJECTS.@longitudinalDirs') #} end of "for sessionid in allSessions:" else: diff --git a/AutoWorkup/WorkupT1T2FreeSurfer_custom.py b/AutoWorkup/WorkupT1T2FreeSurfer_custom.py index 5f66a93a..b1025451 100644 --- a/AutoWorkup/WorkupT1T2FreeSurfer_custom.py +++ b/AutoWorkup/WorkupT1T2FreeSurfer_custom.py @@ -19,11 +19,6 @@ """ -def MakeFreeSurferOutputDirectory(subjects_dir, subj_session_id): - import os - return os.path.join(subjects_dir, subj_session_id) - - def GenerateWFName(projectid, subjectid, sessionid, WFName): return WFName + '_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) @@ -31,10 +26,9 @@ def GenerateWFName(projectid, subjectid, sessionid, WFName): def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents=True, RunMultiMode=True, constructed_FS_SUBJECTS_DIR='/never_use_this'): freesurferWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, WFname)) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID', 'T1_files', 'T2_files', 'subjects_dir', + inputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_session_id', 'T1_files', 'T2_files', 'subjects_dir', 'wm_prob', 'label_file', 'mask_file']), name='inputspec') - outputsSpec = pe.Node(interface=IdentityInterface(fields=['subj_session_id', 'subjects_dir', - 'FreeSurferOutputDirectory', 'cnr_optimal_image']), name='outputspec') + outputsSpec = pe.Node(interface=IdentityInterface(fields=['full_path_FS_output', 'processed_output_name','cnr_optimal_image']), name='outputspec') ### HACK: the nipype interface requires that this environmental variable is set before running print "HACK SETTING SUBJECTS_DIR {0}".format(constructed_FS_SUBJECTS_DIR) @@ -75,7 +69,7 @@ def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, WFname, CLU # fs_reconall.inputs.directive = 'all' # fs_reconall.inputs.fs_env_script = '' # NOTE: NOT NEEDED HERE 'FreeSurferEnv.sh' # fs_reconall.inputs.fs_home = '' # NOTE: NOT NEEDED HERE - freesurferWF.connect(inputsSpec, 'FreeSurfer_ID', fs_reconall, 'subj_session_id') + freesurferWF.connect(inputsSpec, 'subj_session_id', fs_reconall, 'subj_session_id') if RunMultiMode: ## Use the output of the synthesized T1 with maximized contrast ## HACK: REMOVE FOR NOW - NEEDS FURTHER TESTING @@ -86,18 +80,10 @@ def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, WFname, CLU ## Use the output of the T1 only image freesurferWF.connect(inputsSpec, 'T1_files', fs_reconall, 'T1_files') - computeFinalDirectory = pe.Node(Function(function=MakeFreeSurferOutputDirectory, - input_names=['subjects_dir', 'subj_session_id'], - output_names=['FreeSurferOutputDirectory']), - run_without_submitting=True, - name="99_computeFreeSurferOutputDirectory") - freesurferWF.connect(inputsSpec, 'subjects_dir', computeFinalDirectory, 'subjects_dir') - freesurferWF.connect(inputsSpec, 'FreeSurfer_ID', computeFinalDirectory, 'subj_session_id') - freesurferWF.connect(inputsSpec, 'label_file', fs_reconall, 'brainmask') freesurferWF.connect(inputsSpec, 'subjects_dir', fs_reconall, 'subjects_dir') - freesurferWF.connect(fs_reconall, 'subj_session_id', outputsSpec, 'subj_session_id') - freesurferWF.connect(computeFinalDirectory, 'FreeSurferOutputDirectory', outputsSpec, 'FreeSurferOutputDirectory') + freesurferWF.connect(fs_reconall, 'outDir', outputsSpec, 'full_path_FS_output') + freesurferWF.connect(fs_reconall, 'processed_output_name', outputsSpec, 'processed_output_name') return freesurferWF def CreateFreeSurferSubjectTemplate(projectid, subjectid, WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents=True, RunMultiMode=True, constructed_FS_SUBJECTS_DIR='/never_use_this', subcommand='template'): @@ -105,7 +91,7 @@ def CreateFreeSurferSubjectTemplate(projectid, subjectid, WFname, CLUSTER_QUEUE, Step 1: Construct the within-subject cross-sectional template (using all subject's sessions) """ subjectTemplate_freesurferWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, '', WFname)) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['subjectTemplate_id', 'subjects_dir','FreeSurferSession_IDs' ]), + inputsSpec = pe.Node(interface=IdentityInterface(fields=['base_template_id', 'subjects_dir','list_all_subj_session_ids' ]), name='inputspec') ### HACK: the nipype interface requires that this environmental variable is set before running print "HACK SETTING SUBJECTS_DIR {0}".format(constructed_FS_SUBJECTS_DIR) @@ -116,12 +102,13 @@ def CreateFreeSurferSubjectTemplate(projectid, subjectid, WFname, CLUSTER_QUEUE, freesurfer_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=18G,mem_free=8G ' + CLUSTER_QUEUE, 'overwrite': True} fs_template.plugin_args = freesurfer_sge_options_dictionary fs_template.inputs.subcommand = 'template' - subjectTemplate_freesurferWF.connect(inputsSpec, 'subjectTemplate_id', fs_template, 'subjectTemplate_id') - subjectTemplate_freesurferWF.connect(inputsSpec, 'FreeSurferSession_IDs', fs_template, 'session_ids') subjectTemplate_freesurferWF.connect(inputsSpec, 'subjects_dir', fs_template, 'subjects_dir') + subjectTemplate_freesurferWF.connect(inputsSpec, 'base_template_id', fs_template, 'base_template_id') + subjectTemplate_freesurferWF.connect(inputsSpec, 'list_all_subj_session_ids', fs_template, 'list_all_subj_session_ids') - outputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurferTemplateDir']), name='outputspec') - subjectTemplate_freesurferWF.connect(fs_template, 'outDir', outputsSpec, 'FreeSurferTemplateDir') + outputsSpec = pe.Node(interface=IdentityInterface(fields=['full_path_FS_output','processed_output_name']), name='outputspec') + subjectTemplate_freesurferWF.connect(fs_template, 'outDir', outputsSpec, 'full_path_FS_output') + subjectTemplate_freesurferWF.connect(fs_template, 'processed_output_name', outputsSpec, 'processed_output_name') return subjectTemplate_freesurferWF @@ -130,7 +117,7 @@ def CreateFreeSurferLongitudinalWorkflow(projectid, subjectid, sessionid, WFname Step 2: Construct the longitudinal subject results (for each session individually) """ long_freesurferWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, WFname)) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['SingleSubject_ID', 'FreeSurferSession_ID', 'subjects_dir']), name='inputspec') + inputsSpec = pe.Node(interface=IdentityInterface(fields=['base_template_id', 'subj_session_id', 'subjects_dir']), name='inputspec') ### HACK: the nipype interface requires that this environmental variable is set before running print "HACK SETTING SUBJECTS_DIR {0}".format(constructed_FS_SUBJECTS_DIR) os.environ['SUBJECTS_DIR'] = constructed_FS_SUBJECTS_DIR @@ -140,11 +127,12 @@ def CreateFreeSurferLongitudinalWorkflow(projectid, subjectid, sessionid, WFname freesurfer_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=18G,mem_free=8G ' + CLUSTER_QUEUE, 'overwrite': True} fs_longitudinal.plugin_args = freesurfer_sge_options_dictionary fs_longitudinal.inputs.subcommand = 'longitudinal' - long_freesurferWF.connect(inputsSpec, 'SingleSubject_ID', fs_longitudinal, 'template_id') - long_freesurferWF.connect(inputsSpec, 'FreeSurferSession_ID', fs_longitudinal, 'session_id') long_freesurferWF.connect(inputsSpec, 'subjects_dir', fs_longitudinal, 'subjects_dir') - outputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurferLongitudinalDir']), name='outputspec') - long_freesurferWF.connect(fs_longitudinal, 'outDir', outputsSpec, 'FreeSurferLongitudinalDir') + long_freesurferWF.connect(inputsSpec, 'subj_session_id', fs_longitudinal, 'subj_session_id') + long_freesurferWF.connect(inputsSpec, 'base_template_id', fs_longitudinal, 'base_template_id') + outputsSpec = pe.Node(interface=IdentityInterface(fields=['full_path_FS_output','processed_output_name']), name='outputspec') + long_freesurferWF.connect(fs_longitudinal, 'outDir', outputsSpec, 'full_path_FS_output') + long_freesurferWF.connect(fs_longitudinal, 'processed_output_name', outputsSpec, 'processed_output_name') return long_freesurferWF diff --git a/AutoWorkup/fsscript.py b/AutoWorkup/fsscript.py index 87384104..c12cde8e 100755 --- a/AutoWorkup/fsscript.py +++ b/AutoWorkup/fsscript.py @@ -40,8 +40,8 @@ def IsFirstNewerThanSecond(firstFile, secondFile): def run_mri_convert_script(niftiVol, mgzVol, subjects_dir, subj_session_id, FREESURFER_HOME, FS_SCRIPT): FS_SCRIPT_FN = os.path.join(FREESURFER_HOME, FS_SCRIPT) mri_convert_script = """#!/bin/bash -#$ -o {FSSUBJDIR}/{SUBJID}/scripts/mri_convert_qsub.out -#$ -e {FSSUBJDIR}/{SUBJID}/scripts/mri_convert_qsub.err +#$ -o {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/mri_convert_qsub.out +#$ -e {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/mri_convert_qsub.err #$ -cwd export FREESURFER_HOME={FSHOME} export SUBJECTS_DIR={FSSUBJDIR} @@ -52,7 +52,7 @@ def run_mri_convert_script(niftiVol, mgzVol, subjects_dir, subj_session_id, FREE """.format(SOURCE_SCRIPT=FS_SCRIPT_FN, FSHOME=FREESURFER_HOME, FSSUBJDIR=subjects_dir, - SUBJID=subj_session_id, + SUBJ_SESSION_ID=subj_session_id, invol=niftiVol, outvol=mgzVol) script_name = mgzVol + '_convert.sh' @@ -74,8 +74,8 @@ def run_mri_convert_script(niftiVol, mgzVol, subjects_dir, subj_session_id, FREE def run_mri_mask_script(output_brainmask_fn_mgz, output_custom_brainmask_fn_mgz, output_nu_fn_mgz, subjects_dir, subj_session_id, FREESURFER_HOME, FS_SCRIPT): FS_SCRIPT_FN = os.path.join(FREESURFER_HOME, FS_SCRIPT) mri_mask_script = """#!/bin/bash -#$ -o {FSSUBJDIR}/{SUBJID}/scripts/mri_mask_scipt_qsub.out -#$ -e {FSSUBJDIR}/{SUBJID}/scripts/mri_mask_scipt_qsub.err +#$ -o {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/mri_mask_scipt_qsub.out +#$ -e {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/mri_mask_scipt_qsub.err #$ -cwd export FREESURFER_HOME={FSHOME} export SUBJECTS_DIR={FSSUBJDIR} @@ -87,7 +87,7 @@ def run_mri_mask_script(output_brainmask_fn_mgz, output_custom_brainmask_fn_mgz, """.format(SOURCE_SCRIPT=FS_SCRIPT_FN, FSHOME=FREESURFER_HOME, FSSUBJDIR=subjects_dir, - SUBJID=subj_session_id, + SUBJ_SESSION_ID=subj_session_id, CURRENT=os.path.dirname(output_custom_brainmask_fn_mgz), invol=output_nu_fn_mgz, maskvol=output_custom_brainmask_fn_mgz, @@ -154,23 +154,23 @@ def runAutoReconStage(subj_session_id, StageToRun, t1_fn, subjects_dir, FREESURF mkdir_p(os.path.dirname(orig_001_mgz_fn)) run_mri_convert_script(t1_fn, orig_001_mgz_fn, subjects_dir, subj_session_id, FREESURFER_HOME, FS_SCRIPT) auto_recon_script="""#!/bin/bash -#$ -o {FSSUBJDIR}/{SUBJID}/scripts/autorecon{AUTORECONSTAGE}_qsub.out -#$ -e {FSSUBJDIR}/{SUBJID}/scripts/autorecon{AUTORECONSTAGE}_qsub.err +#$ -o {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/autorecon{AUTORECONSTAGE}_qsub.out +#$ -e {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/autorecon{AUTORECONSTAGE}_qsub.err #$ -cwd export FREESURFER_HOME={FSHOME} export SUBJECTS_DIR={FSSUBJDIR} source {SOURCE_SCRIPT} ## Need to delete the "IsRunning" flags, if nipype pipeline is running this, then nipype prevents duplicates -rm -f {FSSUBJDIR}/{SUBJID}/scripts/Is* +rm -f {FSSUBJDIR}/{SUBJ_SESSION_ID}/scripts/Is* -{FSHOME}/bin/recon-all -debug -subjid {SUBJID} -make autorecon{AUTORECONSTAGE} +{FSHOME}/bin/recon-all -debug -subjid {SUBJ_SESSION_ID} -make autorecon{AUTORECONSTAGE} status=$? exit $status """.format(SOURCE_SCRIPT=FS_SCRIPT_FN, FSHOME=FREESURFER_HOME, FSSUBJDIR=subjects_dir, AUTORECONSTAGE=StageToRun, - SUBJID=subj_session_id) + SUBJ_SESSION_ID=subj_session_id) base_run_dir = os.path.join(subjects_dir, subj_session_id, 'scripts') mkdir_p(base_run_dir) script_name = os.path.join(base_run_dir,'run_autorecon_stage'+str(StageToRun)+'.sh') @@ -191,20 +191,20 @@ def runAutoReconStage(subj_session_id, StageToRun, t1_fn, subjects_dir, FREESURF def runSubjectTemplate(args, FREESURFER_HOME, FS_SCRIPT): """ Create the within-subject template """ - subjectTemplate_id = args.subjectTemplate_id - session_ids = args.session_ids + base_template_id = args.base_template_id + list_all_subj_session_ids = args.list_all_subj_session_ids subjects_dir = args.subjects_dir print "X"*80 - print "subjectTemplate_id :{0}:".format(subjectTemplate_id) - print "Input a list of session_ids :{0}:".format(session_ids) + print "base_template_id :{0}:".format(base_template_id) + print "Input a list of list_all_subj_session_ids :{0}:".format(list_all_subj_session_ids) print "subjects_dir :{0}:".format(subjects_dir) print "X"*80 - assert isinstance(session_ids, list), "Must input a list of session_ids :{0}:".format(session_ids) + assert isinstance(list_all_subj_session_ids, list), "Must input a list of list_all_subj_session_ids :{0}:".format(list_all_subj_session_ids) StageToRun = "Within-SubjectTemplate" FS_SCRIPT_FN = os.path.join(FREESURFER_HOME, FS_SCRIPT) allTimePointFlags = "" - for session_id in session_ids: - allTimePointFlags += " -tp {timepoint}".format(timepoint=session_id) + for subj_session_id in list_all_subj_session_ids: + allTimePointFlags += " -tp {timepoint}".format(timepoint=subj_session_id) allTimePointFlags += " -all" auto_recon_script="""#!/bin/bash #$ -o {FSSUBJDIR}/{TEMPLATEID}/scripts/base_{TEMPLATEID}_qsub.out @@ -227,9 +227,9 @@ def runSubjectTemplate(args, FREESURFER_HOME, FS_SCRIPT): """.format(SOURCE_SCRIPT=FS_SCRIPT_FN, FSHOME=FREESURFER_HOME, FSSUBJDIR=subjects_dir, - TEMPLATEID=subjectTemplate_id, + TEMPLATEID=base_template_id, ALL_TIME_POINTS=allTimePointFlags) - base_run_dir = os.path.join(subjects_dir, subjectTemplate_id, 'scripts') + base_run_dir = os.path.join(subjects_dir, base_template_id, 'scripts') mkdir_p(base_run_dir) script_name = os.path.join(base_run_dir,'run_autorecon_stage_'+str(StageToRun)+'.sh') script = open(script_name, 'w') @@ -238,21 +238,21 @@ def runSubjectTemplate(args, FREESURFER_HOME, FS_SCRIPT): os.chmod(script_name, 0777) script_name_stdout = script_name + '_out' script_name_stdout_fid = open(script_name_stdout, 'w') - print "Starting auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, subjectTemplate_id) + print "Starting auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, base_template_id) scriptStatus = subprocess.check_call([script_name], stdout=script_name_stdout_fid, stderr=subprocess.STDOUT, shell='/bin/bash') if scriptStatus != 0: sys.exit(scriptStatus) - print "Ending auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, subjectTemplate_id) + print "Ending auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, base_template_id) script_name_stdout_fid.close() return def runLongitudinal(args, FREESURFER_HOME, FS_SCRIPT): """ Create the longitudinal analysis """ - session_long_id = args.session_id + subj_session_id = args.subj_session_id subjects_dir = args.subjects_dir - template_id = args.template_id - assert isinstance(session_long_id, str), "Must input a singel session_id as string :{0}:".format(session_long_id) + base_template_id = args.base_template_id + assert isinstance(subj_session_id, str), "Must input a singel subj_session_id as string :{0}:".format(subj_session_id) StageToRun = "Longitudinal" FS_SCRIPT_FN = os.path.join(FREESURFER_HOME, FS_SCRIPT) auto_recon_script = """#!/bin/bash @@ -265,21 +265,21 @@ def runLongitudinal(args, FREESURFER_HOME, FS_SCRIPT): ## Need to delete the "IsRunning" flags, if nipype pipeline is running this, then nipype prevents duplicates rm -f {FSSUBJDIR}/{LONGSESSIONID}/scripts/Is* -if [ -f {FSSUBJDIR}/{TEMPLATEID}_{LONGSESSIONID}.long/stats/rh.entorhinal_exvivo.stats ]; then - echo "--- SKIPPING: {LONGSESSIONID}.long.{TEMPLATEID} file already exits: {FSSUBJDIR}/{TEMPLATEID}_{LONGSESSIONID}.long/stats/rh.entorhinal_exvivo.stats" +if [ -f {FSSUBJDIR}/{LONGSESSIONID}.long.{TEMPLATEID}/stats/rh.entorhinal_exvivo.stats ]; then + echo "--- SKIPPING: {LONGSESSIONID}.long.{TEMPLATEID} file already exits: {FSSUBJDIR}/{LONGSESSIONID}.long.{TEMPLATEID}/stats/rh.entorhinal_exvivo.stats" status=$? else - {FSHOME}/bin/recon-all -debug -long {LONGSESSIONID} {TEMPLATEID} -all - status=$? - # mv -n {FSSUBJDIR}/{LONGSESSIONID}.long.{TEMPLATEID} {FSSUBJDIR}/{TEMPLATEID}_{LONGSESSIONID}.long + echo "--- RUNNING: {LONGSESSIONID}.long.{TEMPLATEID} file already exits: {FSSUBJDIR}/{LONGSESSIONID}.long.{TEMPLATEID}/stats/rh.entorhinal_exvivo.stats" + {FSHOME}/bin/recon-all -debug -long {LONGSESSIONID} {TEMPLATEID} -all + status=$? fi exit $status """.format(SOURCE_SCRIPT=FS_SCRIPT_FN, FSHOME=FREESURFER_HOME, FSSUBJDIR=subjects_dir, - TEMPLATEID=template_id, - LONGSESSIONID=session_long_id) - base_run_dir = os.path.join(subjects_dir, session_long_id, 'scripts') + TEMPLATEID=base_template_id, + LONGSESSIONID=subj_session_id) + base_run_dir = os.path.join(subjects_dir, subj_session_id, 'scripts') mkdir_p(base_run_dir) script_name = os.path.join(base_run_dir,'run_autorecon_stage_'+str(StageToRun)+'.sh') script = open(script_name, 'w') @@ -288,11 +288,11 @@ def runLongitudinal(args, FREESURFER_HOME, FS_SCRIPT): os.chmod(script_name, 0777) script_name_stdout = script_name + '_out' script_name_stdout_fid = open(script_name_stdout, 'w') - print "Starting auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, template_id) + print "Starting auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, base_template_id) scriptStatus = subprocess.check_call([script_name], stdout=script_name_stdout_fid, stderr=subprocess.STDOUT, shell='/bin/bash') if scriptStatus != 0: sys.exit(scriptStatus) - print "Ending auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, template_id) + print "Ending auto_recon Stage: {0} for SubjectSession {1}".format(StageToRun, base_template_id) script_name_stdout_fid.close() return @@ -338,29 +338,30 @@ def runAutoRecon(args, FREESURFER_HOME, FS_SCRIPT): # local_FS_SCRIPT = os.path.join(local_FREESURFER_HOME,'FreeSurferEnv.sh') local_FS_SCRIPT = 'FreeSurferEnv.sh' except KeyError, err: + print KeyError + print err raise KeyError ### END HACK subparsers = parser.add_subparsers(help='Currently supported subprocesses: "autorecon", "template", "longitudinal"') # Create -make subparser autorecon = subparsers.add_parser('autorecon', help='Link to recon-all i/o table: http://surfer.nmr.mgh.harvard.edu/fswiki/ReconAllDevTable') - autorecon.add_argument('--T1_files', action='store', dest='T1_files', required=True, help='Original T1 image') + autorecon.add_argument('--subjects_dir', action='store', dest='subjects_dir', help='FreeSurfer subjects directory') autorecon.add_argument('--subj_session_id', action='store', dest='subj_session_id', required=True, help='Subject_Session') + autorecon.add_argument('--T1_files', action='store', dest='T1_files', required=True, help='Original T1 image') autorecon.add_argument('--brainmask', action='store', dest='brainmask', required=True, help='The normalized T1 image with the skull removed. Normalized 0-110 where white matter=110.') - autorecon.add_argument('--subjects_dir', action='store', dest='subjects_dir', help='FreeSurfer subjects directory') autorecon.set_defaults(func=runAutoRecon) # Create -base subparser template = subparsers.add_parser('template', help='Link to recon-all longitudinal processing: http://surfer.nmr.mgh.harvard.edu/fswiki/LongitudinalProcessing') - template.add_argument('--subjectTemplate_id', action='store', dest='subjectTemplate_id', required=True, help='Subject_template') - template.add_argument('--session_ids', action='store', dest='session_ids', nargs='+', required=True, help='List of sessions for a subject template') template.add_argument('--subjects_dir', action='store', dest='subjects_dir',required=True, help='FreeSurfer subjects directory') - #template.add_argument('--subj_session_id', action='store', dest='subj_session_id',required=True, help='Subject_Session') + template.add_argument('--base_template_id', action='store', dest='base_template_id', required=True, help='Subject_template') + template.add_argument('--list_all_subj_session_ids', action='store', dest='list_all_subj_session_ids', nargs='+', required=True, help='List of sessions for a subject template') template.set_defaults(func=runSubjectTemplate) # Create -long subparser longitudinal = subparsers.add_parser('longitudinal', help='Link to recon-all longitudinal processing: http://surfer.nmr.mgh.harvard.edu/fswiki/LongitudinalProcessing') longitudinal.add_argument('--subjects_dir', action='store', dest='subjects_dir', required=True, help='FreeSurfer subjects directory') - longitudinal.add_argument('--session_id', action='store', dest='session_id', required=True, help='Session for a subject longitudinal analysis (in --session_ids from "template" option)') - longitudinal.add_argument('--template_id', action='store', dest='template_id', required=True, help='Template folder name (--subjectTemplate_id from "template" option)') + longitudinal.add_argument('--subj_session_id', action='store', dest='subj_session_id', required=True, help='Subject_Session') + longitudinal.add_argument('--base_template_id', action='store', dest='base_template_id', required=True, help='Template folder name (--base_template_id from "template" option)') longitudinal.set_defaults(func=runLongitudinal) # Parse inputs and run correct function all_args = parser.parse_args() diff --git a/AutoWorkup/fswrap.py b/AutoWorkup/fswrap.py index 60196835..9bec57cc 100644 --- a/AutoWorkup/fswrap.py +++ b/AutoWorkup/fswrap.py @@ -7,26 +7,33 @@ class FSScriptInputSpec(CommandLineInputSpec): - subj_session_id = traits.Str(argstr='--subj_session_id %s', desc='Subject_Session') - subjects_dir = Directory(argstr='--subjects_dir %s', desc='FreeSurfer subjects directory') +## CROSS first cross sectional analysis +## BASE second generate a subject specific base reference (template building) +## LONG third use the BASE, and CROSS to generate a new better informed cross sectional result. +## Universal used subcommand = traits.Str('autorecon', argstr='%s', position=0, usedefault=True, desc='Define which subcommand to run: options ="autorecon", "template", "longitudinal"') + subjects_dir = Directory(argstr='--subjects_dir %s', desc='FreeSurfer subjects directory') + +## auto-recon CROSS flags T1_files = File(argstr='--T1_files %s', exists=True, desc='Original T1 image') brainmask = File(argstr='--brainmask %s', exists=True, desc='The normalized T1 image with the skull removed. Normalized 0-110 where white matter=110.') - subjectTemplate_id = traits.Str(argstr='--subjectTemplate_id %s', desc='Subject_template') - session_ids = traits.List(traits.Str(), argstr='--session_ids %s', desc='List of sessions for a subject template') - session_id = traits.Str(argstr='--session_id %s', desc='Session for a subject longitudinal analysis') - template_id = traits.Str(argstr='--template_id %s', desc='Template ID used in longitudinal processing') - # TODO: fs_env_script = traits.Str(argstr='--FSSource %s', default='${FREESURFER_HOME}/FreeSurferEnv.sh', desc='') - # TODO: fs_home = Directory(argstr='--FSHomeDir %s', desc='Location of FreeSurfer (differs for Mac and Linux environments') +## CROSS and LONG flags + subj_session_id = traits.Str(argstr='--subj_session_id %s', desc='Subject_Session used for "-subjid <> in cross sectional and used in -long <> for longitudinal') + +## BASE/Template building flags + list_all_subj_session_ids = traits.List(traits.Str(), argstr='--list_all_subj_session_ids %s', desc='List of sessions for a subject template') + +## LONG and BASE flags + base_template_id = traits.Str(argstr='--base_template_id %s', desc='The name of the result subdirectory (not full path) for the base/template processing to occur') class FSScriptOutputSpec(TraitedSpec): T1_out = File(exist=True, desc='brain.mgz') label1_out = File(exist=True, desc='aparc+aseg.nii.gz') label2_out = File(exist=True, desc='aparc.a2009+aseg.nii.gz') - subj_session_id = traits.Str(desc='Subject_Session, pass-through from input') - outDir = Directory(exist=True, desc='Template directory for subject') + processed_output_name = traits.Str(desc='The name of the subdirectory (not a full path) for this processing stage') + outDir = Directory(exist=True, desc='Full path to the output directory for this stage of processing') class FSScript(CommandLine): """ @@ -50,13 +57,16 @@ def _format_arg(self, opt, spec, val): def _list_outputs(self): outputs = self._outputs().get() if self.inputs.subcommand == 'autorecon': - outputs['T1_out'] = os.path.join(os.getcwd(), 'mri', 'brain.mgz') - outputs['label1_out'] = os.path.join(os.getcwd(), 'mri_nifti', 'aparc+aseg.nii.gz') - outputs['label2_out'] = os.path.join(os.getcwd(), 'mri_nifti', 'aparc.a2009+aseg.nii.gz') - outputs['subj_session_id'] = self.inputs.subj_session_id + outputs['T1_out'] = os.path.join(self.inputs.subjects_dir, 'mri', 'brain.mgz') + outputs['label1_out'] = os.path.join(self.inputs.subjects_dir, 'mri_nifti', 'aparc+aseg.nii.gz') + outputs['label2_out'] = os.path.join(self.inputs.subjects_dir, 'mri_nifti', 'aparc.a2009+aseg.nii.gz') + outputs['processed_output_name'] = self.inputs.subj_session_id + outputs['outDir'] = os.path.join(self.inputs.subjects_dir, self.inputs.subj_session_id) elif self.inputs.subcommand == 'template': - outputs['outDir'] = os.path.join(os.getcwd(), self.inputs.subjectTemplate_id) + outputs['processed_output_name'] = self.inputs.base_template_id + outputs['outDir'] = os.path.join(self.inputs.subjects_dir, self.inputs.base_template_id) elif self.inputs.subcommand == 'longitudinal': - templateFile = self.inputs.template_id + "_" + self.inputs.session_id + ".long" - outputs['outDir'] = os.path.join(os.getcwd(), templateFile) + longitudinal_processed_output_name = self.inputs.subj_session_id + ".long." + self.inputs.base_template_id + outputs['processed_output_name'] = longitudinal_processed_output_name + outputs['outDir'] = os.path.join(self.inputs.subjects_dir, longitudinal_processed_output_name) return outputs