Skip to content

Commit

Permalink
ENH: Clean up labels maps with prior probabilities.
Browse files Browse the repository at this point in the history
Trying to find the right morphological operators
to make nice clean looking segmentations maps.
  • Loading branch information
hjmjohnson committed Feb 2, 2013
1 parent 8d42246 commit a16921c
Show file tree
Hide file tree
Showing 2 changed files with 99 additions and 17 deletions.
9 changes: 5 additions & 4 deletions AutoWorkup/WorkupT1T2.py
Original file line number Diff line number Diff line change
Expand Up @@ -800,12 +800,12 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'composite_transform', myLocalSegWF[sessionid], 'inputspec.atlasToSubjectTransform')

### Now define where the final organized outputs should go.
SEGMENTATION_DataSink[sessionid] = pe.Node(nio.DataSink(), name="DenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid))
SEGMENTATION_DataSink[sessionid] = pe.Node(nio.DataSink(), name="CleanedDenoisedSegmentation_DS_" + str(subjectid) + "_" + str(sessionid))
SEGMENTATION_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE
SEGMENTATION_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults
# SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut')
# SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid)
SEGMENTATION_DataSink[sessionid].inputs.substitutions = [('Segmentations', os.path.join(projectid, subjectid, sessionid, 'DenoisedRFSegmentations')),
SEGMENTATION_DataSink[sessionid].inputs.substitutions = [('Segmentations', os.path.join(projectid, subjectid, sessionid, 'CleanedDenoisedRFSegmentations')),
('subjectANNLabel_', ''),
('ANNContinuousPrediction',''),
('subject.nii.gz','.nii.gz'),
Expand All @@ -825,6 +825,7 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightGlobus')
baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputLabelImageName', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputLabelImageName')
baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputCSVFileName', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputCSVFileName')
#baw200.connect(myLocalSegWF[sessionid], 'outputspec.cleaned_labels', SEGMENTATION_DataSink[sessionid], 'Segmentations.@cleaned_labels')

MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str(sessionid)
MergeStage2BinaryVolumes[sessionid] = pe.Node(interface=Merge(12),
Expand Down Expand Up @@ -1023,12 +1024,12 @@ def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentB
STAPLE_SEGMENTATION_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults
# STAPLE_SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut')
# STAPLE_SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid)
STAPLE_SEGMENTATION_DataSink[sessionid].inputs.substitutions = [('DenoisedSTAPLESegmentations', os.path.join(projectid, subjectid, sessionid, 'STAPLERFSegmentations')),
STAPLE_SEGMENTATION_DataSink[sessionid].inputs.substitutions = [('CleanedDenoisedSTAPLESegmentations', os.path.join(projectid, subjectid, sessionid, 'STAPLERFSegmentations')),
('subjectANNLabel_', ''),
('.nii.gz', '_seg.nii.gz')
]
baw200.connect(ANTSLabelWarpFromSubjectAtlasToSession[sessionid], 'output_image',
STAPLE_SEGMENTATION_DataSink[sessionid], 'DenoisedSTAPLESegmentations.@output_image')
STAPLE_SEGMENTATION_DataSink[sessionid], 'CleanedDenoisedSTAPLESegmentations.@output_image')

else:
print("SKIPPING SEGMENTATION PHASE FOR {0} {1} {2}, lenT2s {3}".format(projectid, subjectid, sessionid, len(global_AllT2s[sessionid])))
Expand Down
107 changes: 94 additions & 13 deletions AutoWorkup/WorkupT1T2BRAINSCut.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def GenerateWFName(projectid, subjectid, sessionid, WFName):
return WFName + '_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid)


def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, projectid, subjectid, sessionid):
def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, posteriorDictionary, projectid, subjectid, sessionid):
"""
A function to create a consolidated label map and a
csv file of volume measurements.
Expand All @@ -25,6 +25,33 @@ def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, projectid, subject
import SimpleITK as sitk
import os
import csv
def CleanUpSegmentationsWithExclusionProbabilityMaps( initial_seg, probMapOfExclusion, percentageThreshold = 0.85 ):
"""This function is used to clean up grey matter sub-cortical segmentations
by removing tissue that is more than 85% chance of being either WM or CSF
The inputs are the initial segmentation, the WM Probability, and the CSF Probability
"""
seg = sitk.Cast( initial_seg, sitk.sitkUInt8 )
print "AA", initial_seg
print "BB", dict(sitk.Statistics(seg))
exclude_Mask = sitk.Cast( sitk.BinaryThreshold( probMapOfExclusion, percentageThreshold, 1.0, 0, 1), sitk.sitkUInt8 )
print "CC", dict(sitk.Statistics(exclude_Mask))
cleanedUpSeg = seg * exclude_Mask
print "DD", dict(sitk.Statistics(cleanedUpSeg))
return cleanedUpSeg

def CleanUpGMSegmentationWithWMCSF( initial_seg_fn, posteriorDictionary, WMThreshold, CSFThreshold ):
initial_seg = sitk.Cast(sitk.ReadImage(initial_seg_fn), sitk.sitkUInt8 )

WM_FN=posteriorDictionary['WM']
WM_PROB=sitk.ReadImage(WM_FN)
WM_removed = CleanUpSegmentationsWithExclusionProbabilityMaps( initial_seg, WM_PROB, WMThreshold )

CSF_FN=posteriorDictionary['CSF']
CSF_PROB=sitk.ReadImage(CSF_FN)
CSF_removed = CleanUpSegmentationsWithExclusionProbabilityMaps( initial_seg, CSF_PROB, CSFThreshold )
return CSF_removed


orderOfPriority = [
"l_caudate",
"r_caudate",
Expand Down Expand Up @@ -55,22 +82,33 @@ def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, projectid, subject
"r_globus": 12
}

cleaned_labels_map = dict()
labelImage = None
print "ZZZ"
x=0
for segFN in listOfImages:
im = sitk.ReadImage(segFN)
im = sitk.Cast(sitk.BinaryThreshold(im,0.51,1.01,1,0),sitk.sitkUInt8)
im.GetSize()
remove_pre_postfix = os.path.basename(segFN.replace(".nii.gz", "").replace("subjectANNLabel_", "").replace("_seg", ""))
remove_pre_postfix = os.path.basename(segFN.replace(".nii.gz", "").replace("ANNContinuousPrediction", "").replace("subject", ""))
x=x+1
print x, segFN
## Clean up the segmentations
curr_segROI = CleanUpGMSegmentationWithWMCSF(segFN, posteriorDictionary, 0.85, 0.85 )
print "Y"
curr_segROI.GetSize()
remove_pre_postfix = segFN.replace(".nii.gz", "")
remove_pre_postfix = os.path.basename(remove_pre_postfix.replace("subjectANNLabel_", "").replace("_seg", ""))
remove_pre_postfix = os.path.basename(remove_pre_postfix.replace("ANNContinuousPrediction", "").replace("subject", ""))
structName = remove_pre_postfix.lower()
cleaned_fileName = os.path.join(os.path.dirname(segFN),"cleaned_"+structName+"_seg.nii.gz")
print "="*20,structName," ",cleaned_fileName
cleaned_labels_map[structName]=cleaned_fileName
sitk.WriteImage(curr_segROI, cleaned_fileName)
if labelImage is None:
labelImage = im * valueDict[structName]
labelImage = curr_segROI * valueDict[structName]
else:
mask = sitk.Not(im)
not_mask = sitk.Not(curr_segROI)
## Clear out an empty space for the next mask to be inserted
labelImage *= mask
labelImage *= not_mask
## Add in the mask image with it's proper label
labelImage = labelImage + im * valueDict[structName]
labelImage = labelImage + curr_segROI * valueDict[structName]
sitk.WriteImage(labelImage, LabelImageName)

ls = sitk.LabelStatisticsImageFilter()
Expand All @@ -97,7 +135,20 @@ def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, projectid, subject
writeDictionary['subjectid'] = subjectid
writeDictionary['sessionid'] = sessionid
dWriter.writerow(writeDictionary)
return os.path.abspath(LabelImageName), os.path.abspath(CSVFileName)

CleanedLeftCaudate = cleaned_labels_map['l_caudate']
CleanedRightCaudate = cleaned_labels_map['r_caudate']
CleanedLeftHippocampus = cleaned_labels_map['l_hippocampus']
CleanedRightHippocampus = cleaned_labels_map['r_hippocampus']
CleanedLeftPutamen = cleaned_labels_map['l_putamen']
CleanedRightPutamen = cleaned_labels_map['r_putamen']
CleanedLeftThalamus = cleaned_labels_map['l_thalamus']
CleanedRightThalamus = cleaned_labels_map['r_thalamus']
CleanedLeftAccumben = cleaned_labels_map['l_accumben']
CleanedRightAccumben = cleaned_labels_map['r_accumben']
CleanedLeftGlobus = cleaned_labels_map['l_globus']
CleanedRightGlobus = cleaned_labels_map['r_globus']
return os.path.abspath(LabelImageName), os.path.abspath(CSVFileName), CleanedLeftCaudate, CleanedRightCaudate, CleanedLeftHippocampus, CleanedRightHippocampus, CleanedLeftPutamen, CleanedRightPutamen, CleanedLeftThalamus, CleanedRightThalamus, CleanedLeftAccumben, CleanedRightAccumben, CleanedLeftGlobus, CleanedRightGlobus

#==============================================
#==============================================
Expand Down Expand Up @@ -207,7 +258,6 @@ def CreateBRAINSCutWorkflow(projectid,
subjectANNLabel_r_thalamus.nii.gz
"""

"""
RF12BC.inputs.outputBinaryLeftCaudate = 'subjectANNLabel_l_caudate.nii.gz'
RF12BC.inputs.outputBinaryRightCaudate = 'subjectANNLabel_r_caudate.nii.gz'
RF12BC.inputs.outputBinaryLeftHippocampus = 'subjectANNLabel_l_hippocampus.nii.gz'
Expand All @@ -234,6 +284,7 @@ def CreateBRAINSCutWorkflow(projectid,
RF12BC.inputs.outputBinaryRightAccumben = 'ANNContinuousPredictionr_accumbensubject.nii.gz'
RF12BC.inputs.outputBinaryLeftGlobus = 'ANNContinuousPredictionl_globussubject.nii.gz'
RF12BC.inputs.outputBinaryRightGlobus = 'ANNContinuousPredictionr_globussubject.nii.gz'
"""

cutWF.connect( DenoisedT1, 'outputVolume', RF12BC, 'inputSubjectT1Filename')

Expand Down Expand Up @@ -300,14 +351,29 @@ def ChangeModelPathDirectory(multiModalFileName):
cutWF.connect(RF12BC, 'outputBinaryRightGlobus', mergeAllLabels, 'in12')

computeOneLabelMap = pe.Node(interface=Function(['listOfImages', 'LabelImageName', 'CSVFileName',
'posteriorDictionary',
'projectid', 'subjectid', 'sessionid'],
['outputLabelImageName', 'outputCSVFileName'],
['outputLabelImageName', 'outputCSVFileName',
'CleanedLeftCaudate',
'CleanedRightCaudate',
'CleanedLeftHippocampus',
'CleanedRightHippocampus',
'CleanedLeftPutamen',
'CleanedRightPutamen',
'CleanedLeftThalamus',
'CleanedRightThalamus',
'CleanedLeftAccumben',
'CleanedRightAccumben',
'CleanedLeftGlobus',
'CleanedRightGlobus'
],
function=CreateLabelMap), name="ComputeOneLabelMap")
computeOneLabelMap.inputs.projectid = projectid
computeOneLabelMap.inputs.subjectid = subjectid
computeOneLabelMap.inputs.sessionid = sessionid
computeOneLabelMap.inputs.LabelImageName = "allLabels.nii.gz"
computeOneLabelMap.inputs.CSVFileName = "allLabels_seg.csv"
cutWF.connect(inputsSpec, 'posteriorDictionary', computeOneLabelMap, 'posteriorDictionary')
cutWF.connect(mergeAllLabels, 'out', computeOneLabelMap, 'listOfImages')

outputsSpec = pe.Node(interface=IdentityInterface(fields=[
Expand All @@ -322,6 +388,20 @@ def ChangeModelPathDirectory(multiModalFileName):

cutWF.connect(computeOneLabelMap, 'outputLabelImageName', outputsSpec, 'outputLabelImageName')
cutWF.connect(computeOneLabelMap, 'outputCSVFileName', outputsSpec, 'outputCSVFileName')

cutWF.connect(computeOneLabelMap, 'CleanedLeftCaudate', outputsSpec, 'outputBinaryLeftCaudate')
cutWF.connect(computeOneLabelMap, 'CleanedRightCaudate', outputsSpec, 'outputBinaryRightCaudate')
cutWF.connect(computeOneLabelMap, 'CleanedLeftHippocampus', outputsSpec, 'outputBinaryLeftHippocampus')
cutWF.connect(computeOneLabelMap, 'CleanedRightHippocampus', outputsSpec, 'outputBinaryRightHippocampus')
cutWF.connect(computeOneLabelMap, 'CleanedLeftPutamen', outputsSpec, 'outputBinaryLeftPutamen')
cutWF.connect(computeOneLabelMap, 'CleanedRightPutamen', outputsSpec, 'outputBinaryRightPutamen')
cutWF.connect(computeOneLabelMap, 'CleanedLeftThalamus', outputsSpec, 'outputBinaryLeftThalamus')
cutWF.connect(computeOneLabelMap, 'CleanedRightThalamus', outputsSpec, 'outputBinaryRightThalamus')
cutWF.connect(computeOneLabelMap, 'CleanedLeftAccumben', outputsSpec, 'outputBinaryLeftAccumben')
cutWF.connect(computeOneLabelMap, 'CleanedRightAccumben', outputsSpec, 'outputBinaryRightAccumben')
cutWF.connect(computeOneLabelMap, 'CleanedLeftGlobus', outputsSpec, 'outputBinaryLeftGlobus')
cutWF.connect(computeOneLabelMap, 'CleanedRightGlobus', outputsSpec, 'outputBinaryRightGlobus')
"""
cutWF.connect(RF12BC, 'outputBinaryLeftCaudate', outputsSpec, 'outputBinaryLeftCaudate')
cutWF.connect(RF12BC, 'outputBinaryRightCaudate', outputsSpec, 'outputBinaryRightCaudate')
cutWF.connect(RF12BC, 'outputBinaryLeftHippocampus', outputsSpec, 'outputBinaryLeftHippocampus')
Expand All @@ -334,6 +414,7 @@ def ChangeModelPathDirectory(multiModalFileName):
cutWF.connect(RF12BC, 'outputBinaryRightAccumben', outputsSpec, 'outputBinaryRightAccumben')
cutWF.connect(RF12BC, 'outputBinaryLeftGlobus', outputsSpec, 'outputBinaryLeftGlobus')
cutWF.connect(RF12BC, 'outputBinaryRightGlobus', outputsSpec, 'outputBinaryRightGlobus')
"""
cutWF.connect(RF12BC, 'xmlFilename', outputsSpec, 'xmlFilename')

return cutWF

0 comments on commit a16921c

Please sign in to comment.