Skip to content

Commit

Permalink
ENH: Fixed data types for new format, and updated Freesurfer processing.
Browse files Browse the repository at this point in the history
  • Loading branch information
hjmjohnson committed Apr 20, 2012
1 parent cd3d052 commit faf1bc5
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 46 deletions.
60 changes: 28 additions & 32 deletions AutoWorkup/WorkupT1T2.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
from BRAINSTools.WarpImageMultiTransform import *
from BRAINSTools.WarpAllAtlas import *

default_sge_options_dictionary={'qsub_args': '-S /bin/bash -q all.q -pe smp1 2-4 -o /dev/null -e /dev/null ', 'overwrite': True}

####################### HACK: Needed to make some global variables for quick
####################### processing needs
Expand Down Expand Up @@ -174,7 +173,7 @@ def create_BRAINSCut_XML(rho,phi,theta,model,

XMLSTRING="""<AutoSegProcessDescription>
<RegistrationConfiguration
ImageTypeToUse="T1"
ImageTypeToUse="T1-30"
ID="BSpline_ROI"
BRAINSROIAutoDilateSize="1"
/>
Expand Down Expand Up @@ -205,8 +204,8 @@ def create_BRAINSCut_XML(rho,phi,theta,model,
<ProbabilityMap StructureID="l_{structure}" Gaussian="0.5" GenerateVector="true" Filename="{l_probabilityMap}"/>
<ProbabilityMap StructureID="r_{structure}" Gaussian="0.5" GenerateVector="true" Filename="{r_probabilityMap}"/>
<DataSet Type="Atlas" Name="template">
<Image Type="T1" Filename="{atlasT1}"/>
<Image Type="T2" Filename="na"/>
<Image Type="T1-30" Filename="{atlasT1}"/>
<Image Type="T2-30" Filename="na"/>
{EXTRA_FLAGS}
<Image Type="SGGAD" Filename="na"/>
Expand All @@ -215,8 +214,8 @@ def create_BRAINSCut_XML(rho,phi,theta,model,
<SpatialLocation Type="theta" Filename="{theta}"/>
</DataSet>
<DataSet Name="sessionID" Type="Apply" OutputDir="./">
<Image Type="T1" Filename="{subjT1}"/>
<Image Type="T2" Filename="{subjT2}"/>
<Image Type="T1-30" Filename="{subjT1}"/>
<Image Type="T2-30" Filename="{subjT2}"/>
<Image Type="T1GAD" Filename="{subjT1GAD}"/>
<Image Type="T2GAD" Filename="{subjT2GAD}"/>
<Image Type="SGGAD" Filename="{subjSGGAD}"/>
Expand Down Expand Up @@ -271,41 +270,41 @@ def getFirstT1(uid, dbfile):
with open(dbfile) as fp:
db = load(fp)
print("uid:= {0}, dbfile: {1}".format(uid,dbfile))
print("result:= {0}".format(db[uid]["T1"]))
return db[uid]["T1"][0]
print("result:= {0}".format(db[uid]["T1-30"]))
return db[uid]["T1-30"][0]

def getT1s(uid, dbfile):
from cPickle import load
with open(dbfile) as fp:
db = load(fp)
#print("uid:= {0}, dbfile: {1}".format(uid,dbfile))
#print("result:= {0}".format(db[uid]["T1"]))
return db[uid]["T1"]
#print("result:= {0}".format(db[uid]["T1-30"]))
return db[uid]["T1-30"]

def getT1sLength(uid, dbfile):
from cPickle import load
with open(dbfile) as fp:
db = load(fp)
#print("uid:= {0}, dbfile: {1}".format(uid,dbfile))
#print("result:= {0}".format(db[uid]["T1"]))
return len(db[uid]["T1"])
#print("result:= {0}".format(db[uid]["T1-30"]))
return len(db[uid]["T1-30"])

def getT2s(uid, dbfile):
from cPickle import load
with open(dbfile) as fp:
db = load(fp)
#print("uid:= {0}, dbfile: {1}".format(uid,dbfile))
#print("result:= {0}".format(db[uid]["T1"]))
return db[uid]["T2"]
#print("result:= {0}".format(db[uid]["T1-30"]))
return db[uid]["T2-30"]

def getT1sT2s(uid, dbfile,altT1):
from cPickle import load
with open(dbfile) as fp:
db = load(fp)
#print("uid:= {0}, dbfile: {1}".format(uid,dbfile))
#print("result:= {0}".format(db[uid]["T1"]))
temp=db[uid]["T1"]
temp.append(db[uid]["T2"])
#print("result:= {0}".format(db[uid]["T1-30"]))
temp=db[uid]["T1-30"]
temp.append(db[uid]["T2-30"])
temp[0]=altT1
return temp

Expand All @@ -325,7 +324,7 @@ def getT1sT2s(uid, dbfile,altT1):
###########################################################################
###########################################################################
def WorkupT1T2(processingLevel,mountPrefix,ExperimentBaseDirectory, subject_data_file, atlas_fname_wpath, BCD_model_path,
InterpolationMode="Linear", Mode=10,DwiList=[] ):
InterpolationMode="Linear", Mode=10,DwiList=[],WORKFLOW_COMPONENTS=[],CLUSTER_QUEUE=''):
"""
Run autoworkup on all subjects data defined in the subject_data_file
Expand Down Expand Up @@ -372,6 +371,7 @@ def WorkupT1T2(processingLevel,mountPrefix,ExperimentBaseDirectory, subject_data
else:
print "ERROR: Invalid number of elements in row"
print row
print "DICTIONARY",multiLevel
from cPickle import dump
dump(multiLevel, open(subjectDatabaseFile,'w'))

Expand Down Expand Up @@ -442,7 +442,7 @@ def WorkupT1T2(processingLevel,mountPrefix,ExperimentBaseDirectory, subject_data
# Entries below are of the form:
baw200.connect( [ (uidSource, BCD, [(('uid', getFirstT1, subjectDatabaseFile) , 'inputVolume')] ), ])

if processingLevel > 0:
if 'BASIC' in WORKFLOW_COMPONENTS:
########################################################
# Run BLI atlas_to_subject
########################################################
Expand All @@ -457,7 +457,7 @@ def WorkupT1T2(processingLevel,mountPrefix,ExperimentBaseDirectory, subject_data
(BAtlas,BLI,[('template_landmark_weights_csv','inputWeightFilename')])
])

if processingLevel > 0:
if 'AUXLMK' in WORKFLOW_COMPONENTS:
########################################################
# Run BLI subject_to_atlas
########################################################
Expand All @@ -479,7 +479,7 @@ def WorkupT1T2(processingLevel,mountPrefix,ExperimentBaseDirectory, subject_data
baw200.connect(BLI2Atlas,'outputTransformFilename',Resample2Atlas,'warpTransform')
baw200.connect(BAtlas,'template_t1',Resample2Atlas,'referenceVolume')

if processingLevel > 1:
if 'TISSUE_CLASSIFY' in WORKFLOW_COMPONENTS:
########################################################
# Run BABC on Multi-modal images
########################################################
Expand Down Expand Up @@ -644,8 +644,7 @@ def printFullPath(outFileFullPath):
baw200.connect(GADT1,'outputVolume',SGI,'inputVolume1')
baw200.connect(GADT2,'outputVolume',SGI,'inputVolume2')

#if processingLevel > 1:
if processingLevel == -123:
if 'SEGMENTATION' in WORKFLOW_COMPONENTS:
"""
Load the BRAINSCut models & probabiity maps.
"""
Expand Down Expand Up @@ -753,9 +752,9 @@ def printFullPath(outFileFullPath):
"""

## Make deformed Atlas image space
if processingLevel > 2:
if 'ANTS' in WORKFLOW_COMPONENTS:

many_cpu_sge_options_dictionary={'qsub_args': '-S /bin/bash -q all.q -pe smp1 4-12 -l mem_free=5000M -o /dev/null -e /dev/null ', 'overwrite': True}
many_cpu_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 4-12 -l mem_free=5000M -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True}
print("""
Run ANTS Registration at processingLevel={0}
""".format(processingLevel) )
Expand All @@ -769,7 +768,7 @@ def printFullPath(outFileFullPath):
baw200.connect( BAtlas,'template_t1', ComputeAtlasToSubjectTransform,"moving_T2_image")
baw200.connect(BLI,'outputTransformFilename',ComputeAtlasToSubjectTransform,'initialTransform')

if processingLevel == -123:
if 'ANTSWARP_FIXME' in WORKFLOW_COMPONENTS:
WarpAtlas = pe.Node(interface=WarpAllAtlas(), name = "19_WarpAtlas")
WarpAtlas.inputs.moving_atlas = atlas_fname_wpath
WarpAtlas.inputs.deformed_atlas = "./template_t2.nii.gz"
Expand All @@ -778,17 +777,14 @@ def printFullPath(outFileFullPath):
baw200.connect( ComputeAtlasToSubjectTransform,'output_warp', WarpAtlas,"deformation_field")
baw200.connect( SplitAvgBABC,'avgBABCT1', WarpAtlas, 'reference_image')


if processingLevel > 3:
if 'FREESURFER' in WORKFLOW_COMPONENTS:
print("""
Run Freesurfer ReconAll at processingLevel={0}
""".format(processingLevel) )
subj_id = os.path.basename(os.path.dirname(os.path.dirname(baw200.base_dir)))
scan_id = os.path.basename(os.path.dirname(baw200.base_dir))
reconall = pe.Node(interface=ReconAll(),name="41_FS510")
freesurfer_sge_options_dictionary={'qsub_args': '-S /bin/bash -q all.q -pe smp1 1 -l mem_free=3100M -o /dev/null -e /dev/null ', 'overwrite': True}
freesurfer_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=3100M -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True}
reconall.plugin_args=freesurfer_sge_options_dictionary
reconall.inputs.subject_id = subj_id+'_'+scan_id
reconall.inputs.subject_id = 'ThisSubject'
reconall.inputs.directive = 'all'
reconall.inputs.subjects_dir = '.'
baw200.connect(SplitAvgBABC,'avgBABCT1',reconall,'T1_files')
Expand Down
41 changes: 28 additions & 13 deletions AutoWorkup/baw_exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def get_global_sge_script(pythonPathsList,binPathsList,customEnvironment={}):

custEnvString=""
for key,value in customEnvironment.items():
custEnvString+=key+"="+value+"\n"
custEnvString+="export "+key+"="+value+"\n"

PYTHONPATH=":".join(pythonPathsList)
BASE_BUILDS=":".join(binPathsList)
Expand All @@ -43,7 +43,7 @@ def main(argv=None):
import os
import csv
import string

if argv == None:
argv = sys.argv

Expand All @@ -67,20 +67,22 @@ def main(argv=None):

expConfig = ConfigParser.ConfigParser()
expConfig.read(input_arguments.ExperimentConfig)

# Experiment specific information
session_db=expConfig.get('EXPERIMENT_DATA','SESSION_DB')
ExperimentName=expConfig.get('EXPERIMENT_DATA','EXPERIMENTNAME')

WORKFLOW_COMPONENTS_STRING=expConfig.get('EXPERIMENT_DATA','WORKFLOW_COMPONENTS')
WORKFLOW_COMPONENTS=eval(WORKFLOW_COMPONENTS_STRING)

# Platform specific information
# Prepend the python search paths
PYTHON_AUX_PATHS=expConfig.get(input_arguments.processingEnvironment,'PYTHON_AUX_PATHS')
PYTHON_AUX_PATHS=PYTHON_AUX_PATHS.split(';')
PYTHON_AUX_PATHS=PYTHON_AUX_PATHS.split(':')
PYTHON_AUX_PATHS.extend(sys.path)
sys.path=PYTHON_AUX_PATHS
# Prepend the shell environment search paths
PROGRAM_PATHS=expConfig.get(input_arguments.processingEnvironment,'PROGRAM_PATHS')
PROGRAM_PATHS=PROGRAM_PATHS.split(';')
PROGRAM_PATHS=PROGRAM_PATHS.split(':')
PROGRAM_PATHS.extend(os.environ['PATH'].split(':'))
os.environ['PATH']=':'.join(PROGRAM_PATHS)
# Define platform specific output write paths
Expand All @@ -95,15 +97,28 @@ def main(argv=None):
BCDMODELPATH=expConfig.get(input_arguments.processingEnvironment,'BCDMODELPATH')
CUSTOM_ENVIRONMENT=expConfig.get(input_arguments.processingEnvironment,'CUSTOM_ENVIRONMENT')
CUSTOM_ENVIRONMENT=eval(CUSTOM_ENVIRONMENT)
## Set custom environmental variables so that subproceses work properly (i.e. for Freesurfer)
#print CUSTOM_ENVIRONMENT
for key,value in CUSTOM_ENVIRONMENT.items():
#print "SETTING: ", key, value
os.putenv(key,value)
os.environ[key]=value
print os.environ
#sys.exit(-1)

## If freesurfer is requested, then ensure that a sane environment is available
if 'FREESURFER' in WORKFLOW_COMPONENTS:
print "FREESURFER NEEDS TO CHECK FOR SANE ENVIRONMENT HERE."

CLUSTER_QUEUE=expConfig.get(input_arguments.processingEnvironment,'CLUSTER_QUEUE')

print "Configuring Pipeline"
import WorkupT1T2 ## NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified
baw200=WorkupT1T2.WorkupT1T2(input_arguments.processingLevel, mountPrefix,
ExperimentBaseDirectory,
session_db,
ATLASPATH,
BCDMODELPATH)

BCDMODELPATH,WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS,CLUSTER_QUEUE=CLUSTER_QUEUE)
print "Start Processing"

## Create the shell wrapper script for ensuring that all jobs running on remote hosts from SGE
Expand All @@ -112,10 +127,11 @@ def main(argv=None):
print JOB_SCRIPT
if input_arguments.wfrun == 'helium_all.q':
baw200.run(plugin='SGE',
plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -q all.q -pe smp1 2-4 -o /dev/null -e /dev/null "))
plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -pe smp1 2-4 -o /dev/null -e /dev/null "+CLUSTER_QUEUE))
elif input_arguments.wfrun == 'ipl_OSX':
print "Running On ipl_OSX"
baw200.run(plugin='SGE',
plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -q OSX -pe smp1 2-4 -o /dev/null -e /dev/null "))
plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -pe smp1 2-4 -o /dev/null -e /dev/null "+CLUSTER_QUEUE))
elif input_arguments.wfrun == 'local_4':
print "Running with 4 parallel processes on local machine"
baw200.run(plugin='MultiProc', plugin_args={'n_procs' : 4})
Expand All @@ -127,9 +143,8 @@ def main(argv=None):
baw200.run()
else:
print "You must specify the run environment type."
sys.exit(-1)

#baw200.write_graph()
sys.exit(-1)
baw200.write_graph()

if __name__ == "__main__":
sys.exit(main())
2 changes: 1 addition & 1 deletion AutoWorkup/install_python_tools.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
mkdir ./python_install_stuff
cd ./python_install_stuff

export INSTALL_DIR=/scratch/PREDICT/Experiments/NewExperiment/python-site-packages
export INSTALL_DIR=/ipldev/sharedopt/20120201/Darwin_i386/PYTHON_MODULES/
export PYTHONPATH=${INSTALL_DIR}
export THIS_DIR=$(pwd)

Expand Down

0 comments on commit faf1bc5

Please sign in to comment.