Skip to content

Commit

Permalink
BUG: This version of BRAINSCut is a temporary implementation
Browse files Browse the repository at this point in the history
The version of BRAINSCut used here is a temporary implementation that
will need to be replaced with a more generic version later.
Currently BRAINSCut takes 30GB of memory, and it is hard-coded
to a specific path.
  • Loading branch information
hjmjohnson committed Sep 11, 2012
1 parent 2d93912 commit 0f68431
Show file tree
Hide file tree
Showing 4 changed files with 100 additions and 50 deletions.
46 changes: 24 additions & 22 deletions AutoWorkup/BRAINSTools/BRAINSCutCMD.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import subprocess

def addProbabilityMapElement( probabilityMap, maskName, outputStream ):
outputStream.write( " <ProbabilityMap StructureID = \""+ maskName + "\" \n")
outputStream.write( " <ProbabilityMap StructureID = \""+ maskName + "\"\n")
outputStream.write( " Gaussian = \"0.5\"\n")
outputStream.write( " GenerateVector = \"true\"\n")
outputStream.write( " Filename = \""+ probabilityMap+"\"\n")
Expand Down Expand Up @@ -40,38 +40,39 @@ def xmlGenerator( args ):
# Registration
#
outputStream.write( " <RegistrationConfiguration \n")
outputStream.write( " ImageTypeToUse = \"T1\" \n")
outputStream.write( " ID = \""+registrationID+"\" \n")
outputStream.write( " BRAINSROIAutoDilateSize= \"1\" \n")
outputStream.write( " ImageTypeToUse = \"T1\"\n")
outputStream.write( " ID = \""+registrationID+"\"\n")
outputStream.write( " BRAINSROIAutoDilateSize= \"1\"\n")
outputStream.write( " />\n")

#
# training vector configuration (feature vector)
#

outputStream.write( " <NeuralNetParams MaskSmoothingValue = \"0.0\" \n")
outputStream.write( " <NeuralNetParams MaskSmoothingValue = \"0.0\"\n")
outputStream.write( " GradientProfileSize = \"1\"\n")
outputStream.write( " TrainingVectorFilename = \""+args.trainingVectorFilename+"\" \n")
outputStream.write( " TrainingModelFilename = \""+args.modelFileBasename+"\" \n")
outputStream.write( " TestVectorFilename = \"na\" \n")
outputStream.write( " Normalization = \""+args.vectorNormalization+"\" \n")
outputStream.write( " TrainingVectorFilename = \""+args.trainingVectorFilename+"\"\n")
# outputStream.write( " TrainingModelFilename = \""+args.modelFileBasename+"\"\n")
outputStream.write( " TrainingModelFilename = \"/nfsscratch/PREDICT/TEST_BRAINSCut/20120828ANNModel_Model_RF100.txt\"\n")
outputStream.write( " TestVectorFilename = \"na\"\n")
outputStream.write( " Normalization = \""+args.vectorNormalization+"\"\n")
outputStream.write( " />\n")

#
# random forest parameters
#
outputStream.write( " <RandomForestParameters \n")
outputStream.write( " MaxDepth= \"100\" \n") #dummy
outputStream.write( " MaxTreeCount= \"100\" \n") # dummy
outputStream.write( " MaxDepth= \"100\"\n") #dummy
outputStream.write( " MaxTreeCount= \"100\"\n") # dummy
outputStream.write( " MinSampleCount= \"5\"\n")
outputStream.write( " UseSurrogates= \"false\" \n")
outputStream.write( " UseSurrogates= \"false\"\n")
outputStream.write( " CalcVarImportance= \"false\"\n")
outputStream.write( " />\n")

#
# ANN Parameters
#
outputStream.write( " <ANNParameters Iterations = \"5\" \n")
outputStream.write( " <ANNParameters Iterations = \"5\"\n")
outputStream.write( " MaximumVectorsPerEpoch = \"70000\"\n")
outputStream.write( " EpochIterations = \"100\"\n")
outputStream.write( " ErrorInterval = \"1\"\n")
Expand All @@ -85,8 +86,8 @@ def xmlGenerator( args ):
# apply conditions
#
outputStream.write( "<ApplyModel CutOutThresh = \"0.05\"\n")
outputStream.write( " MaskThresh = \"0.5\" \n")
outputStream.write( " GaussianSmoothingSigma = \"0.0\" \n")
outputStream.write( " MaskThresh = \"0.5\"\n")
outputStream.write( " GaussianSmoothingSigma = \"0.0\"\n")
outputStream.write( " />\n")

#
Expand Down Expand Up @@ -129,7 +130,7 @@ def xmlGenerator( args ):
outputStream.write( ' <Registration SubjToAtlasRegistrationFilename="'+args.deformationFromSubjectToTemplate+'"\n')
else:
outputStream.write( ' <Registration SubjToAtlasRegistrationFilename="" \n')
outputStream.write( " AtlasToSubjRegistrationFilename=\""+args.deformationFromTemplateToSubject+"\" \n")
outputStream.write( " AtlasToSubjRegistrationFilename=\""+args.deformationFromTemplateToSubject+"\"\n")
outputStream.write( " ID=\""+registrationID+"\" /> \n")
outputStream.write( " </DataSet>\n")

Expand Down Expand Up @@ -212,14 +213,15 @@ def xmlGenerator( args ):
else:
print("no xml filename is given to process")

# " --modelFilename " + args.modelFilename +

subprocess.call(["BRAINSCut" +
" --applyModel " +
BRAINSCutCommand=["BRAINSCut" + " --applyModel " +
" --netConfiguration " + args.xmlFilename +
" --modelFilename " + args.modelFilename +
" --method RandomForest"
],
shell=True)
" --method RandomForest" +
" --numberOfTrees 100 --randomTreeDepth 100"
]
print("HACK: BRAINCUT COMMAND: {0}".format(BRAINSCutCommand))
subprocess.call(BRAINSCutCommand, shell=True)
"""
script to be run
/ipldev/scratch/eunyokim/src/BRAINS20111028/build-Darwin-Debug/lib/BRAINSCut --applyModel --netConfiguration /ipldev/scratch/eunyokim/src/BRAINS20111028/build-Darwin/BRAINSTools-build/BRAINSCut/TestSuite/TestSuite/NetConfigurations/output.xml --modelFilename /hjohnson/HDNI/PREDICT_TRAINING/regina_ann/TrainingModels/BRAINSAutoWorkUpTest/GadSG/Test9/TrainedModels/20110919ANNModel_allSubcorticals.txtD0050NT0050 --method RandomForest
Expand Down
2 changes: 1 addition & 1 deletion AutoWorkup/BRAINSTools/RF8BRAINSCutWrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class RF8BRAINSCutWrapperCLOutputSpec(TraitedSpec):
outputBinaryRightPutamen = File( desc = "Output binary file of right putamen", exists = True, mandatory = True)

outputBinaryLeftThalamus = File( desc = "Output binary file of left thalamus", exists = True, mandatory = True)
outputBinaryRightThalamus = File( desc = "Output binary file:0 of right thalamus", exists = True, mandatory = True)
outputBinaryRightThalamus = File( desc = "Output binary file of right thalamus", exists = True, mandatory = True)

class RF8BRAINSCutWrapper(CommandLine):
"""
Expand Down
27 changes: 25 additions & 2 deletions AutoWorkup/WorkupT1T2.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,13 +334,19 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas
allSessions = ExperimentDatabase.getSessionsFromSubject(subjectid)
print("Running sessions: {ses} for subject {sub}".format(ses=allSessions,sub=subjectid))
BAtlas[subjectid] = MakeAtlasNode(atlas_fname_wpath,"BAtlas_"+str(subjectid)) ## Call function to create node


for sessionid in allSessions:
global_AllT1s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15'])
global_AllT2s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15'])
global_AllPDs=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15'])
global_AllFLs=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15'])
global_AllOthers=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15'])
#print("HACK: all FLs: {0}".format(ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15'])))
print("HACK: all T1s: {0} {1}".format(global_AllT1s, len(global_AllT1s) ))
print("HACK: all T2s: {0} {1}".format(global_AllT2s, len(global_AllT2s) ))
print("HACK: all PDs: {0} {1}".format(global_AllPDs, len(global_AllPDs) ))
print("HACK: all FLs: {0} {1}".format(global_AllFLs, len(global_AllFLs) ))
print("HACK: all Others: {0} {1}".format(global_AllOthers, len(global_AllOthers) ))

projectid = ExperimentDatabase.getProjFromSession(sessionid)
print("PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid,subjectid,sessionid))
Expand Down Expand Up @@ -598,7 +604,22 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas
baw200.connect(ClipT1ImageWithBrainMaskNode[sessionid], 'clipped_file', AtlasToSubjectantsRegistration[subjectid], 'fixed_image')
baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'OutputSpec.atlasToSubjectTransform',AtlasToSubjectantsRegistration[subjectid],'initial_moving_transform')

if 'SEGMENTATION' in WORKFLOW_COMPONENTS and len(global_AllT2s) > 0: # Currently only works with multi-modal_data
global_AllT1s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15'])
global_AllT2s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15'])
global_AllPDs=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15'])
global_AllFLs=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15'])
global_AllOthers=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15'])
print("HACK2: all T1s: {0} {1}".format(global_AllT1s, len(global_AllT1s) ))
print("HACK2: all T2s: {0} {1}".format(global_AllT2s, len(global_AllT2s) ))
print("HACK2: all PDs: {0} {1}".format(global_AllPDs, len(global_AllPDs) ))
print("HACK2: all FLs: {0} {1}".format(global_AllFLs, len(global_AllFLs) ))
print("HACK2: all Others: {0} {1}".format(global_AllOthers, len(global_AllOthers) ))
if ( 'SEGMENTATION' in WORKFLOW_COMPONENTS ) : # Currently only works with multi-modal_data
print("HACK SEGMENTATION IN WORKFLOW_COMPONENTS")
if ( len(global_AllT2s) > 0 ): # Currently only works with multi-modal_data
print("HACK len(global_AllT2s) > 0 ")
print("HACK")
if ( 'SEGMENTATION' in WORKFLOW_COMPONENTS ) and ( len(global_AllT2s) > 0 ): # Currently only works with multi-modal_data
from WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow
myLocalSegWF[subjectid] = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid,'Segmentation',CLUSTER_QUEUE,BAtlas[subjectid]) ##Note: Passing in the entire BAtlas Object here!
baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'OutputSpec.t1_average', myLocalSegWF[subjectid], "InputSpec.T1Volume" )
Expand All @@ -625,6 +646,8 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas
baw200.connect(myLocalSegWF[subjectid], 'OutputSpec.outputBinaryRightThalamus',SEGMENTATION_DataSink[subjectid], 'BRAINSCut.@outputBinaryRightThalamus')
baw200.connect(myLocalSegWF[subjectid], 'OutputSpec.outputLabelImageName', SEGMENTATION_DataSink[subjectid],'BRAINSCut.@outputLabelImageName')
baw200.connect(myLocalSegWF[subjectid], 'OutputSpec.outputCSVFileName', SEGMENTATION_DataSink[subjectid],'BRAINSCut.@outputCSVFileName')
else:
print("SKIPPING SEGMENTATION PHASE FOR {0} {1} {2}, lenT2s {3}".format(projectid, subjectid, sessionid, len(global_AllT2s) ))

if 'FREESURFER' in WORKFLOW_COMPONENTS and len(global_AllT2s) > 0: # Currently only works with multi-modal_data
RunAllFSComponents=True ## A hack to avoid 26 hour run of freesurfer
Expand Down
75 changes: 50 additions & 25 deletions AutoWorkup/WorkupT1T2BRAINSCut.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,36 +17,48 @@ def CreateLabelMap(listOfImages,LabelImageName,CSVFileName):
A function to create a consolidated label map and a
csv file of volume measurements.
"""

"""
subjectANNLabel_l_caudate.nii.gz
subjectANNLabel_l_hippocampus.nii.gz
subjectANNLabel_l_putamen.nii.gz
subjectANNLabel_l_thalamus.nii.gz
subjectANNLabel_r_caudate.nii.gz
subjectANNLabel_r_hippocampus.nii.gz
subjectANNLabel_r_putamen.nii.gz
subjectANNLabel_r_thalamus.nii.gz
"""
import SimpleITK as sitk
import os
import csv
orderOfPriority = [
"l_Caudate_seg" ,
"r_Caudate_seg" ,
"l_Putamen_seg" ,
"r_Putamen_seg" ,
"l_Hippocampus_seg" ,
"r_Hippocampus_seg" ,
"l_Thalamus_seg" ,
"r_Thalamus_seg"
"l_caudate" ,
"r_caudate" ,
"l_putamen" ,
"r_putamen" ,
"l_hippocampus" ,
"r_hippocampus" ,
"l_thalamus" ,
"r_thalamus"
]

valueDict={
"l_Caudate_seg" : 1,
"r_Caudate_seg" : 2,
"l_Putamen_seg" : 3,
"r_Putamen_seg" : 4,
"l_Hippocampus_seg" : 5,
"r_Hippocampus_seg" : 6,
"l_Thalamus_seg" : 7,
"r_Thalamus_seg" : 8
"l_caudate" : 1,
"r_caudate" : 2,
"l_putamen" : 3,
"r_putamen" : 4,
"l_hippocampus" : 5,
"r_hippocampus" : 6,
"l_thalamus" : 7,
"r_thalamus" : 8
}

labelImage = None
for segFN in listOfImages:
im = sitk.ReadImage(segFN)
im.GetSize()
structName=os.path.basename(segFN.replace(".nii.gz",""))
remove_pre_postfix=os.path.basename(segFN.replace(".nii.gz","").replace("subjectANNLabel_","").replace("_seg",""))
structName=remove_pre_postfix.lower()
if labelImage is None:
labelImage = im*valueDict[structName]
else:
Expand Down Expand Up @@ -139,14 +151,25 @@ def CreateBRAINSCutWorkflow(projectid, subjectid, sessionid,WFName,CLUSTER_QUEUE
RF8BC.inputs.trainingVectorFilename = "trainingVectorFilename.txt"
RF8BC.inputs.xmlFilename = "BRAINSCutSegmentationDefinition.xml"

RF8BC.inputs.outputBinaryLeftCaudate= 'l_Caudate_seg.nii.gz'
RF8BC.inputs.outputBinaryRightCaudate= 'r_Caudate_seg.nii.gz'
RF8BC.inputs.outputBinaryLeftHippocampus= 'l_Hippocampus_seg.nii.gz'
RF8BC.inputs.outputBinaryRightHippocampus= 'r_Hippocampus_seg.nii.gz'
RF8BC.inputs.outputBinaryLeftPutamen= 'l_Putamen_seg.nii.gz'
RF8BC.inputs.outputBinaryRightPutamen= 'r_Putamen_seg.nii.gz'
RF8BC.inputs.outputBinaryLeftThalamus= 'l_Thalamus_seg.nii.gz'
RF8BC.inputs.outputBinaryRightThalamus= 'r_Thalamus_seg.nii.gz'
""" HACK These should be l_Caudate_seg.nii.gz
subjectANNLabel_l_caudate.nii.gz
subjectANNLabel_l_hippocampus.nii.gz
subjectANNLabel_l_putamen.nii.gz
subjectANNLabel_l_thalamus.nii.gz
subjectANNLabel_r_caudate.nii.gz
subjectANNLabel_r_hippocampus.nii.gz
subjectANNLabel_r_putamen.nii.gz
subjectANNLabel_r_thalamus.nii.gz
"""

RF8BC.inputs.outputBinaryLeftCaudate= 'subjectANNLabel_l_caudate.nii.gz'
RF8BC.inputs.outputBinaryRightCaudate= 'subjectANNLabel_r_caudate.nii.gz'
RF8BC.inputs.outputBinaryLeftHippocampus= 'subjectANNLabel_l_hippocampus.nii.gz'
RF8BC.inputs.outputBinaryRightHippocampus= 'subjectANNLabel_r_hippocampus.nii.gz'
RF8BC.inputs.outputBinaryLeftPutamen= 'subjectANNLabel_l_putamen.nii.gz'
RF8BC.inputs.outputBinaryRightPutamen= 'subjectANNLabel_r_putamen.nii.gz'
RF8BC.inputs.outputBinaryLeftThalamus= 'subjectANNLabel_l_thalamus.nii.gz'
RF8BC.inputs.outputBinaryRightThalamus= 'subjectANNLabel_r_thalamus.nii.gz'

cutWF.connect(inputsSpec,'T1Volume',RF8BC,'inputSubjectT1Filename')
cutWF.connect(inputsSpec,'T2Volume',RF8BC,'inputSubjectT2Filename')
Expand All @@ -171,6 +194,8 @@ def CreateBRAINSCutWorkflow(projectid, subjectid, sessionid,WFName,CLUSTER_QUEUE
cutWF.connect(atlasObject,'r_thalamus_ProbabilityMap',RF8BC,'probabilityMapsRightThalamus')
##TODO:
cutWF.connect(atlasObject,'RandomForestAllSubcorticalsBalancedModel_txtD0060NT0060_gz',RF8BC,'modelFilename')
##HACK: Needs to be fixed
#RF8BC.inputs.modelFilename='/nfsscratch/PREDICT/TEST_BRAINSCut/20120828ANNModel_Model_RF100.txt'

cutWF.connect(inputsSpec,'atlasToSubjectTransform',RF8BC,'deformationFromTemplateToSubject')

Expand Down

0 comments on commit 0f68431

Please sign in to comment.