diff --git a/AutoWorkup/BAWantsRegistrationBuildTemplate.py b/AutoWorkup/BAWantsRegistrationBuildTemplate.py index 56cc7b37..c269da70 100644 --- a/AutoWorkup/BAWantsRegistrationBuildTemplate.py +++ b/AutoWorkup/BAWantsRegistrationBuildTemplate.py @@ -15,62 +15,66 @@ from nipype.interfaces.utility import Function from nipype.interfaces.ants import ( - Registration, - ApplyTransforms, - AverageImages, MultiplyImages, - AverageAffineTransform) + Registration, + ApplyTransforms, + AverageImages, MultiplyImages, + AverageAffineTransform) + def makeListOfOneElement(inputFile): - outputList=[inputFile] + outputList = [inputFile] return outputList + def GetFirstListElement(this_list): return this_list[0] + def MakeTransformListWithGradientWarps(averageAffineTranform, gradientStepWarp): return [averageAffineTranform, gradientStepWarp, gradientStepWarp, gradientStepWarp, gradientStepWarp] -def RenestDeformedPassiveImages(deformedPassiveImages,flattened_image_nametypes,interpolationMapping): +def RenestDeformedPassiveImages(deformedPassiveImages, flattened_image_nametypes, interpolationMapping): import os """ Now make a list of lists of images where the outter list is per image type, and the inner list is the same size as the number of subjects to be averaged. In this case, the first element will be a list of all the deformed T2's, and the second element will be a list of all deformed POSTERIOR_AIR, etc.. """ - all_images_size=len(deformedPassiveImages) - image_dictionary_of_lists=dict() - nested_imagetype_list=list() - outputAverageImageName_list=list() - image_type_list=list() - nested_interpolation_type=list() + all_images_size = len(deformedPassiveImages) + image_dictionary_of_lists = dict() + nested_imagetype_list = list() + outputAverageImageName_list = list() + image_type_list = list() + nested_interpolation_type = list() ## make empty_list, this is not efficient, but it works for name in flattened_image_nametypes: - image_dictionary_of_lists[name]=list() - for index in range(0,all_images_size): - curr_name=flattened_image_nametypes[index] - curr_file=deformedPassiveImages[index] + image_dictionary_of_lists[name] = list() + for index in range(0, all_images_size): + curr_name = flattened_image_nametypes[index] + curr_file = deformedPassiveImages[index] image_dictionary_of_lists[curr_name].append(curr_file) - for image_type,image_list in image_dictionary_of_lists.items(): + for image_type, image_list in image_dictionary_of_lists.items(): nested_imagetype_list.append(image_list) - outputAverageImageName_list.append('AVG_'+image_type+'.nii.gz') - image_type_list.append('WARP_AVG_'+image_type) - if interpolationMapping.has_key(image_type): + outputAverageImageName_list.append('AVG_' + image_type + '.nii.gz') + image_type_list.append('WARP_AVG_' + image_type) + if image_type in interpolationMapping: nested_interpolation_type.append(interpolationMapping[image_type]) else: - nested_interpolation_type.append('Linear') #Linear is the default. - print "\n"*10 + nested_interpolation_type.append('Linear') # Linear is the default. + print "\n" * 10 print "HACK: ", nested_imagetype_list print "HACK: ", outputAverageImageName_list print "HACK: ", image_type_list print "HACK: ", nested_interpolation_type - return nested_imagetype_list,outputAverageImageName_list,image_type_list,nested_interpolation_type + return nested_imagetype_list, outputAverageImageName_list, image_type_list, nested_interpolation_type + def SplitAffineAndWarpComponents(list_of_transforms_lists): ### Nota bene: The outputs will include the initial_moving_transform from Registration (which depends on what ### the invert_initial_moving_transform is set to) affine_component_list = [] - warp_component_list = [] + warp_component_list = [] for transform in list_of_transforms_lists: affine_component_list.append(transform[0]) warp_component_list.append(transform[1]) @@ -78,69 +82,72 @@ def SplitAffineAndWarpComponents(list_of_transforms_lists): return affine_component_list, warp_component_list ## Flatten and return equal length transform and images lists. -def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries,transforms,invert_transform_flags,interpolationMapping): + + +def FlattenTransformAndImagesList(ListOfPassiveImagesDictionaries, transforms, invert_transform_flags, interpolationMapping): import sys print("HACK: DEBUG: ListOfPassiveImagesDictionaries\n{lpi}\n".format(lpi=ListOfPassiveImagesDictionaries)) - subjCount=len(ListOfPassiveImagesDictionaries) - tranCount=len(transforms) + subjCount = len(ListOfPassiveImagesDictionaries) + tranCount = len(transforms) if subjCount != tranCount: - print "ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount,tranCount) + print "ERROR: subjCount must equal tranCount {0} != {1}".format(subjCount, tranCount) sys.exit(-1) - invertTfmsFlagsCount=len(invert_transform_flags) + invertTfmsFlagsCount = len(invert_transform_flags) if subjCount != invertTfmsFlagsCount: - print "ERROR: subjCount must equal invertTfmsFlags {0} != {1}".format(subjCount,invertTfmsFlagsCount) + print "ERROR: subjCount must equal invertTfmsFlags {0} != {1}".format(subjCount, invertTfmsFlagsCount) sys.exit(-1) - flattened_images=list() - flattened_image_nametypes=list() - flattened_transforms=list() - flattened_invert_transform_flags=list() - flattened_interpolation_type=list() + flattened_images = list() + flattened_image_nametypes = list() + flattened_transforms = list() + flattened_invert_transform_flags = list() + flattened_interpolation_type = list() passiveImagesCount = len(ListOfPassiveImagesDictionaries[0]) - for subjIndex in range(0,subjCount): - #if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): + for subjIndex in range(0, subjCount): + # if passiveImagesCount != len(ListOfPassiveImagesDictionaries[subjIndex]): # print "ERROR: all image lengths must be equal {0} != {1}".format(passiveImagesCount,len(ListOfPassiveImagesDictionaries[subjIndex])) # sys.exit(-1) - subjImgDictionary=ListOfPassiveImagesDictionaries[subjIndex] - subjToAtlasTransform=transforms[subjIndex] - subjToAtlasInvertFlags=invert_transform_flags[subjIndex] - for imgname,img in subjImgDictionary.items(): + subjImgDictionary = ListOfPassiveImagesDictionaries[subjIndex] + subjToAtlasTransform = transforms[subjIndex] + subjToAtlasInvertFlags = invert_transform_flags[subjIndex] + for imgname, img in subjImgDictionary.items(): flattened_images.append(img) flattened_image_nametypes.append(imgname) flattened_transforms.append(subjToAtlasTransform) flattened_invert_transform_flags.append(subjToAtlasInvertFlags) - if interpolationMapping.has_key(imgname): + if imgname in interpolationMapping: flattened_interpolation_type.append(interpolationMapping[imgname]) else: - flattened_interpolation_type.append('Linear') #Linear is the default. + flattened_interpolation_type.append('Linear') # Linear is the default. print("HACK: flattened images {0}\n".format(flattened_images)) print("HACK: flattened nametypes {0}\n".format(flattened_image_nametypes)) print("HACK: flattened txfms {0}\n".format(flattened_transforms)) print("HACK: flattened txfmsFlags{0}\n".format(flattened_invert_transform_flags)) - return flattened_images,flattened_transforms,flattened_invert_transform_flags,flattened_image_nametypes,flattened_interpolation_type + return flattened_images, flattened_transforms, flattened_invert_transform_flags, flattened_image_nametypes, flattened_interpolation_type -def GetMovingImages(ListOfImagesDictionaries,registrationImageTypes,interpolationMapping): +def GetMovingImages(ListOfImagesDictionaries, registrationImageTypes, interpolationMapping): """ This currently ONLY works when registrationImageTypes has length of exactly 1. When the new multi-variate registration is introduced, it will be expanded. """ - if len(registrationImageTypes) !=1: + if len(registrationImageTypes) != 1: print("ERROR: Multivariate imageing not supported yet!") return [] - moving_images=[ mdict[ registrationImageTypes[0] ] for mdict in ListOfImagesDictionaries ] - moving_interpolation_type=interpolationMapping[ registrationImageTypes[0] ] - return moving_images,moving_interpolation_type + moving_images = [mdict[registrationImageTypes[0]] for mdict in ListOfImagesDictionaries] + moving_interpolation_type = interpolationMapping[registrationImageTypes[0]] + return moving_images, moving_interpolation_type -def GetPassiveImages(ListOfImagesDictionaries,registrationImageTypes): - if len(registrationImageTypes) !=1: + +def GetPassiveImages(ListOfImagesDictionaries, registrationImageTypes): + if len(registrationImageTypes) != 1: print("ERROR: Multivariate imageing not supported yet!") return [dict()] - passive_images=list() + passive_images = list() for mdict in ListOfImagesDictionaries: - ThisSubjectPassiveImages=dict() - for key,value in mdict.items(): + ThisSubjectPassiveImages = dict() + for key, value in mdict.items(): if key not in registrationImageTypes: - ThisSubjectPassiveImages[key]=value + ThisSubjectPassiveImages[key] = value passive_images.append(ThisSubjectPassiveImages) return passive_images @@ -149,13 +156,15 @@ def GetPassiveImages(ListOfImagesDictionaries,registrationImageTypes): ## 'SINGLE_IMAGE' is quick shorthand when you are building an atlas with a single subject, then registration can ## be short-circuted ## any other string indicates the normal mode that you would expect and replicates the shell script build_template_parallel.sh + + def BAWantsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): """ Inputs:: inputspec.images : - inputspec.fixed_image : + inputspec.fixed_image : inputspec.ListOfPassiveImagesDictionaries : inputspec.interpolationMapping : @@ -163,136 +172,135 @@ def BAWantsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): outputspec.template : outputspec.transforms_list : - outputspec.passive_deformed_templates : + outputspec.passive_deformed_templates : """ - TemplateBuildSingleIterationWF = pe.Workflow(name = 'antsRegistrationTemplateBuildSingleIterationWF_'+str(iterationPhasePrefix) ) + TemplateBuildSingleIterationWF = pe.Workflow(name='antsRegistrationTemplateBuildSingleIterationWF_' + str(iterationPhasePrefix)) inputSpec = pe.Node(interface=util.IdentityInterface(fields=[ - 'ListOfImagesDictionaries', 'registrationImageTypes', - #'maskRegistrationImageType', - 'interpolationMapping','fixed_image']), - run_without_submitting=True, - name='inputspec') + 'ListOfImagesDictionaries', 'registrationImageTypes', + #'maskRegistrationImageType', + 'interpolationMapping', 'fixed_image']), + run_without_submitting=True, + name='inputspec') ## HACK: TODO: We need to have the AVG_AIR.nii.gz be warped with a default voxel value of 1.0 ## HACK: TODO: Need to move all local functions to a common untility file, or at the top of the file so that ## they do not change due to re-indenting. Otherwise re-indenting for flow control will trigger ## their hash to change. ## HACK: TODO: REMOVE 'transforms_list' it is not used. That will change all the hashes ## HACK: TODO: Need to run all python files through the code beutifiers. It has gotten pretty ugly. - outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template','transforms_list', - 'passive_deformed_templates']), - run_without_submitting=True, - name='outputspec') - + outputSpec = pe.Node(interface=util.IdentityInterface(fields=['template', 'transforms_list', + 'passive_deformed_templates']), + run_without_submitting=True, + name='outputspec') ### NOTE MAP NODE! warp each of the original images to the provided fixed_image as the template - BeginANTS=pe.MapNode(interface=Registration(), name = 'BeginANTS', iterfield=['moving_image']) + BeginANTS = pe.MapNode(interface=Registration(), name='BeginANTS', iterfield=['moving_image']) BeginANTS.inputs.dimension = 3 """ This is the recommended set of parameters from the ANTS developers """ - BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix)+'_tfm' - BeginANTS.inputs.transforms = ["Rigid", "Affine", "SyN"] - BeginANTS.inputs.transform_parameters = [[0.1], [0.1], [0.15,3.0,0.0]] - BeginANTS.inputs.metric = ['Mattes', 'Mattes', 'CC'] - BeginANTS.inputs.sampling_strategy = ['Regular', 'Regular', None] - BeginANTS.inputs.sampling_percentage = [0.1, 0.1, 1.0] - BeginANTS.inputs.metric_weight = [1.0, 1.0, 1.0] - BeginANTS.inputs.radius_or_number_of_bins = [32, 32, 4] - BeginANTS.inputs.number_of_iterations = [[2000,2000,2000], [1000, 1000, 1000], [10000,500,500,200]] - BeginANTS.inputs.convergence_threshold = [1e-9, 1e-9, 1e-9] - BeginANTS.inputs.convergence_window_size = [15, 15, 15] - BeginANTS.inputs.use_histogram_matching = [True, True, True] - BeginANTS.inputs.shrink_factors = [[4,2,1], [4,2,1], [6,4,2,1]] - BeginANTS.inputs.smoothing_sigmas = [[4,2,0], [4,2,0], [6,4,2,0]] - BeginANTS.inputs.use_estimate_learning_rate_once = [False, False, False] - BeginANTS.inputs.write_composite_transform=True - BeginANTS.inputs.collapse_output_transforms=True + BeginANTS.inputs.output_transform_prefix = str(iterationPhasePrefix) + '_tfm' + BeginANTS.inputs.transforms = ["Rigid", "Affine", "SyN"] + BeginANTS.inputs.transform_parameters = [[0.1], [0.1], [0.15, 3.0, 0.0]] + BeginANTS.inputs.metric = ['Mattes', 'Mattes', 'CC'] + BeginANTS.inputs.sampling_strategy = ['Regular', 'Regular', None] + BeginANTS.inputs.sampling_percentage = [0.1, 0.1, 1.0] + BeginANTS.inputs.metric_weight = [1.0, 1.0, 1.0] + BeginANTS.inputs.radius_or_number_of_bins = [32, 32, 4] + BeginANTS.inputs.number_of_iterations = [[2000, 2000, 2000], [1000, 1000, 1000], [10000, 500, 500, 200]] + BeginANTS.inputs.convergence_threshold = [1e-9, 1e-9, 1e-9] + BeginANTS.inputs.convergence_window_size = [15, 15, 15] + BeginANTS.inputs.use_histogram_matching = [True, True, True] + BeginANTS.inputs.shrink_factors = [[4, 2, 1], [4, 2, 1], [6, 4, 2, 1]] + BeginANTS.inputs.smoothing_sigmas = [[4, 2, 0], [4, 2, 0], [6, 4, 2, 0]] + BeginANTS.inputs.use_estimate_learning_rate_once = [False, False, False] + BeginANTS.inputs.write_composite_transform = True + BeginANTS.inputs.collapse_output_transforms = True BeginANTS.inputs.output_warped_image = 'atlas2subject.nii.gz' BeginANTS.inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' GetMovingImagesNode = pe.Node(interface=util.Function(function=GetMovingImages, - input_names=['ListOfImagesDictionaries','registrationImageTypes','interpolationMapping'], - output_names=['moving_images','moving_interpolation_type']), - run_without_submitting=True, - name='99_GetMovingImagesNode') + input_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping'], + output_names=['moving_images', 'moving_interpolation_type']), + run_without_submitting=True, + name='99_GetMovingImagesNode') TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetMovingImagesNode, 'ListOfImagesDictionaries') TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetMovingImagesNode, 'registrationImageTypes') - TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping',GetMovingImagesNode,'interpolationMapping') + TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', GetMovingImagesNode, 'interpolationMapping') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', BeginANTS, 'moving_image') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_interpolation_type', BeginANTS, 'interpolation') TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', BeginANTS, 'fixed_image') ## Now warp all the input_images images - wimtdeformed = pe.MapNode(interface = ApplyTransforms(), - iterfield=['transforms','invert_transform_flags','input_image'], - name ='wimtdeformed') + wimtdeformed = pe.MapNode(interface=ApplyTransforms(), + iterfield=['transforms', 'invert_transform_flags', 'input_image'], + name='wimtdeformed') wimtdeformed.inputs.interpolation = 'Linear' wimtdeformed.default_value = 0 - #HACK: Should try using forward_composite_transform - TemplateBuildSingleIterationWF.connect(BeginANTS,'forward_transforms',wimtdeformed,'transforms') - TemplateBuildSingleIterationWF.connect(BeginANTS,'forward_invert_flags',wimtdeformed,'invert_transform_flags') + # HACK: Should try using forward_composite_transform + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', wimtdeformed, 'transforms') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', wimtdeformed, 'invert_transform_flags') TemplateBuildSingleIterationWF.connect(GetMovingImagesNode, 'moving_images', wimtdeformed, 'input_image') TemplateBuildSingleIterationWF.connect(inputSpec, 'fixed_image', wimtdeformed, 'reference_image') ## Shape Update Next ===== ## Now Average All input_images deformed images together to create an updated template average - AvgDeformedImages=pe.Node(interface=AverageImages(), name='AvgDeformedImages') + AvgDeformedImages = pe.Node(interface=AverageImages(), name='AvgDeformedImages') AvgDeformedImages.inputs.dimension = 3 - AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix)+'.nii.gz' + AvgDeformedImages.inputs.output_average_image = str(iterationPhasePrefix) + '.nii.gz' AvgDeformedImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(wimtdeformed, "output_image", AvgDeformedImages, 'images') ## Now average all affine transforms together - AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name = 'AvgAffineTransform') + AvgAffineTransform = pe.Node(interface=AverageAffineTransform(), name='AvgAffineTransform') AvgAffineTransform.inputs.dimension = 3 - AvgAffineTransform.inputs.output_affine_transform = 'Avererage_'+str(iterationPhasePrefix)+'_Affine.h5' + AvgAffineTransform.inputs.output_affine_transform = 'Avererage_' + str(iterationPhasePrefix) + '_Affine.h5' SplitAffineAndWarpsNode = pe.Node(interface=util.Function(function=SplitAffineAndWarpComponents, input_names=['list_of_transforms_lists'], output_names=['affine_component_list', 'warp_component_list']), run_without_submitting=True, name='99_SplitAffineAndWarpsNode') - TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms',SplitAffineAndWarpsNode,'list_of_transforms_lists') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', SplitAffineAndWarpsNode, 'list_of_transforms_lists') TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, 'affine_component_list', AvgAffineTransform, 'transforms') ## Now average the warp fields togther - AvgWarpImages=pe.Node(interface=AverageImages(), name='AvgWarpImages') + AvgWarpImages = pe.Node(interface=AverageImages(), name='AvgWarpImages') AvgWarpImages.inputs.dimension = 3 - AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix)+'warp.nii.gz' + AvgWarpImages.inputs.output_average_image = str(iterationPhasePrefix) + 'warp.nii.gz' AvgWarpImages.inputs.normalize = True TemplateBuildSingleIterationWF.connect(SplitAffineAndWarpsNode, 'warp_component_list', AvgWarpImages, 'images') ## Now average the images together ## TODO: For now GradientStep is set to 0.25 as a hard coded default value. GradientStep = 0.25 - GradientStepWarpImage=pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') + GradientStepWarpImage = pe.Node(interface=MultiplyImages(), name='GradientStepWarpImage') GradientStepWarpImage.inputs.dimension = 3 GradientStepWarpImage.inputs.second_input = -1.0 * GradientStep - GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_'+str(iterationPhasePrefix)+'_warp.nii.gz' + GradientStepWarpImage.inputs.output_product_image = 'GradientStep0.25_' + str(iterationPhasePrefix) + '_warp.nii.gz' TemplateBuildSingleIterationWF.connect(AvgWarpImages, 'output_average_image', GradientStepWarpImage, 'first_input') ## Now create the new template shape based on the average of all deformed images - UpdateTemplateShape = pe.Node(interface = ApplyTransforms(), name = 'UpdateTemplateShape') + UpdateTemplateShape = pe.Node(interface=ApplyTransforms(), name='UpdateTemplateShape') UpdateTemplateShape.inputs.invert_transform_flags = [True] UpdateTemplateShape.inputs.interpolation = 'Linear' UpdateTemplateShape.default_value = 0 TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', UpdateTemplateShape, 'reference_image') - TemplateBuildSingleIterationWF.connect( [ (AvgAffineTransform, UpdateTemplateShape, [(('affine_transform', makeListOfOneElement ), 'transforms')] ), ]) + TemplateBuildSingleIterationWF.connect([(AvgAffineTransform, UpdateTemplateShape, [(('affine_transform', makeListOfOneElement), 'transforms')]), ]) TemplateBuildSingleIterationWF.connect(GradientStepWarpImage, 'output_product_image', UpdateTemplateShape, 'input_image') ApplyInvAverageAndFourTimesGradientStepWarpImage = pe.Node(interface=util.Function(function=MakeTransformListWithGradientWarps, - input_names=['averageAffineTranform', 'gradientStepWarp'], - output_names=['TransformListWithGradientWarps']), - run_without_submitting=True, - name='99_MakeTransformListWithGradientWarps') + input_names=['averageAffineTranform', 'gradientStepWarp'], + output_names=['TransformListWithGradientWarps']), + run_without_submitting=True, + name='99_MakeTransformListWithGradientWarps') ApplyInvAverageAndFourTimesGradientStepWarpImage.inputs.ignore_exception = True TemplateBuildSingleIterationWF.connect(AvgAffineTransform, 'affine_transform', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'averageAffineTranform') TemplateBuildSingleIterationWF.connect(UpdateTemplateShape, 'output_image', ApplyInvAverageAndFourTimesGradientStepWarpImage, 'gradientStepWarp') - ReshapeAverageImageWithShapeUpdate = pe.Node(interface = ApplyTransforms(), name = 'ReshapeAverageImageWithShapeUpdate') - ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [ True, False, False, False, False ] + ReshapeAverageImageWithShapeUpdate = pe.Node(interface=ApplyTransforms(), name='ReshapeAverageImageWithShapeUpdate') + ReshapeAverageImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False] ReshapeAverageImageWithShapeUpdate.inputs.interpolation = 'Linear' ReshapeAverageImageWithShapeUpdate.default_value = 0 ReshapeAverageImageWithShapeUpdate.inputs.output_image = 'ReshapeAverageImageWithShapeUpdate.nii.gz' @@ -309,57 +317,57 @@ def BAWantsRegistrationTemplateBuildSingleIterationWF(iterationPhasePrefix=''): ###### ############################################## ## Now warp all the ListOfPassiveImagesDictionaries images - FlattenTransformAndImagesListNode = pe.Node( Function(function=FlattenTransformAndImagesList, - input_names = ['ListOfPassiveImagesDictionaries','transforms', - 'invert_transform_flags','interpolationMapping'], - output_names = ['flattened_images','flattened_transforms','flattened_invert_transform_flags', - 'flattened_image_nametypes','flattened_interpolation_type']), - run_without_submitting=True, name="99_FlattenTransformAndImagesList") + FlattenTransformAndImagesListNode = pe.Node(Function(function=FlattenTransformAndImagesList, + input_names=['ListOfPassiveImagesDictionaries', 'transforms', + 'invert_transform_flags', 'interpolationMapping'], + output_names=['flattened_images', 'flattened_transforms', 'flattened_invert_transform_flags', + 'flattened_image_nametypes', 'flattened_interpolation_type']), + run_without_submitting=True, name="99_FlattenTransformAndImagesList") GetPassiveImagesNode = pe.Node(interface=util.Function(function=GetPassiveImages, - input_names=['ListOfImagesDictionaries','registrationImageTypes'], - output_names=['ListOfPassiveImagesDictionaries']), - run_without_submitting=True, - name='99_GetPassiveImagesNode') + input_names=['ListOfImagesDictionaries', 'registrationImageTypes'], + output_names=['ListOfPassiveImagesDictionaries']), + run_without_submitting=True, + name='99_GetPassiveImagesNode') TemplateBuildSingleIterationWF.connect(inputSpec, 'ListOfImagesDictionaries', GetPassiveImagesNode, 'ListOfImagesDictionaries') TemplateBuildSingleIterationWF.connect(inputSpec, 'registrationImageTypes', GetPassiveImagesNode, 'registrationImageTypes') - TemplateBuildSingleIterationWF.connect( GetPassiveImagesNode,'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries' ) - TemplateBuildSingleIterationWF.connect( inputSpec,'interpolationMapping', FlattenTransformAndImagesListNode, 'interpolationMapping' ) - TemplateBuildSingleIterationWF.connect( BeginANTS,'forward_transforms', FlattenTransformAndImagesListNode, 'transforms' ) - TemplateBuildSingleIterationWF.connect( BeginANTS,'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags' ) - wimtPassivedeformed = pe.MapNode(interface = ApplyTransforms(), - iterfield=['transforms','invert_transform_flags', 'input_image','interpolation'], - name ='wimtPassivedeformed') + TemplateBuildSingleIterationWF.connect(GetPassiveImagesNode, 'ListOfPassiveImagesDictionaries', FlattenTransformAndImagesListNode, 'ListOfPassiveImagesDictionaries') + TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', FlattenTransformAndImagesListNode, 'interpolationMapping') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_transforms', FlattenTransformAndImagesListNode, 'transforms') + TemplateBuildSingleIterationWF.connect(BeginANTS, 'forward_invert_flags', FlattenTransformAndImagesListNode, 'invert_transform_flags') + wimtPassivedeformed = pe.MapNode(interface=ApplyTransforms(), + iterfield=['transforms', 'invert_transform_flags', 'input_image', 'interpolation'], + name='wimtPassivedeformed') wimtPassivedeformed.default_value = 0 - TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image',wimtPassivedeformed,'reference_image') + TemplateBuildSingleIterationWF.connect(AvgDeformedImages, 'output_average_image', wimtPassivedeformed, 'reference_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_interpolation_type', wimtPassivedeformed, 'interpolation') - TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') + TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_images', wimtPassivedeformed, 'input_image') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_transforms', wimtPassivedeformed, 'transforms') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_invert_transform_flags', wimtPassivedeformed, 'invert_transform_flags') - RenestDeformedPassiveImagesNode = pe.Node( Function(function=RenestDeformedPassiveImages, - input_names = ['deformedPassiveImages','flattened_image_nametypes','interpolationMapping'], - output_names = ['nested_imagetype_list','outputAverageImageName_list', - 'image_type_list','nested_interpolation_type']), - run_without_submitting=True, name="99_RenestDeformedPassiveImages") + RenestDeformedPassiveImagesNode = pe.Node(Function(function=RenestDeformedPassiveImages, + input_names=['deformedPassiveImages', 'flattened_image_nametypes', 'interpolationMapping'], + output_names=['nested_imagetype_list', 'outputAverageImageName_list', + 'image_type_list', 'nested_interpolation_type']), + run_without_submitting=True, name="99_RenestDeformedPassiveImages") TemplateBuildSingleIterationWF.connect(inputSpec, 'interpolationMapping', RenestDeformedPassiveImagesNode, 'interpolationMapping') TemplateBuildSingleIterationWF.connect(wimtPassivedeformed, 'output_image', RenestDeformedPassiveImagesNode, 'deformedPassiveImages') TemplateBuildSingleIterationWF.connect(FlattenTransformAndImagesListNode, 'flattened_image_nametypes', RenestDeformedPassiveImagesNode, 'flattened_image_nametypes') ## Now Average All passive input_images deformed images together to create an updated template average - AvgDeformedPassiveImages=pe.MapNode(interface=AverageImages(), - iterfield=['images','output_average_image'], - name='AvgDeformedPassiveImages') + AvgDeformedPassiveImages = pe.MapNode(interface=AverageImages(), + iterfield=['images', 'output_average_image'], + name='AvgDeformedPassiveImages') AvgDeformedPassiveImages.inputs.dimension = 3 AvgDeformedPassiveImages.inputs.normalize = False TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "nested_imagetype_list", AvgDeformedPassiveImages, 'images') TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, "outputAverageImageName_list", AvgDeformedPassiveImages, 'output_average_image') ## -- TODO: Now neeed to reshape all the passive images as well - ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface = ApplyTransforms(), - iterfield=['input_image','reference_image','output_image','interpolation'], - name = 'ReshapeAveragePassiveImageWithShapeUpdate') - ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [ True, False, False, False, False ] + ReshapeAveragePassiveImageWithShapeUpdate = pe.MapNode(interface=ApplyTransforms(), + iterfield=['input_image', 'reference_image', 'output_image', 'interpolation'], + name='ReshapeAveragePassiveImageWithShapeUpdate') + ReshapeAveragePassiveImageWithShapeUpdate.inputs.invert_transform_flags = [True, False, False, False, False] ReshapeAveragePassiveImageWithShapeUpdate.default_value = 0 TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'nested_interpolation_type', ReshapeAveragePassiveImageWithShapeUpdate, 'interpolation') TemplateBuildSingleIterationWF.connect(RenestDeformedPassiveImagesNode, 'outputAverageImageName_list', ReshapeAveragePassiveImageWithShapeUpdate, 'output_image') diff --git a/AutoWorkup/BRAINSABCext.py b/AutoWorkup/BRAINSABCext.py index 97034a38..427a6d2c 100644 --- a/AutoWorkup/BRAINSABCext.py +++ b/AutoWorkup/BRAINSABCext.py @@ -1,9 +1,11 @@ from nipype.interfaces.base import CommandLine, CommandLineInputSpec, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath import os -from SEMTools.segmentation.specialized import BRAINSABCOutputSpec, BRAINSABCInputSpec, BRAINSABC -#from SEMTools import BRAINSABCInputSpec,BRAINSABCOutputSpec,BRAINSABC +from SEMTools.segmentation.specialized import BRAINSABCOutputSpec, BRAINSABCInputSpec, BRAINSABC +# from SEMTools import BRAINSABCInputSpec,BRAINSABCOutputSpec,BRAINSABC from xml.etree import ElementTree as et + + class GetPosteriorsFromAtlasXML(): def __init__(self, xmlFile): @@ -36,7 +38,7 @@ def getPosteriorFileNameList(self, posteriorTemplate): for priorType in self.priorTypeNameList: posteriorFileNameList.append("POSTERIOR_{priorT}.nii.gz".format(priorT=priorType)) ## HACK: The following is correct from the command line posteriorTemplate arguments - #posteriorFileNameList.append(posteriorTemplate % priorType) + # posteriorFileNameList.append(posteriorTemplate % priorType) return posteriorFileNameList """ @@ -45,17 +47,19 @@ class BRAINSABCextInputSpec(BRAINSABCInputSpec): posteriorImages = traits.Either(traits.Bool(True,desc="The automatically generated posterior images"), InputMultiPath(File(),), hash_files = False,argstr = "") """ + class BRAINSABCextOutputSpec(BRAINSABCOutputSpec): # Not convenient outputAverageImages = OutputMultiPath(File(exists=True), exists = True) - outputT1AverageImage = traits.Either( File(exists=True), None ) - outputT2AverageImage = traits.Either( File(exists=True), None ) - outputPDAverageImage = traits.Either( File(exists=True), None ) - outputFLAverageImage = traits.Either( File(exists=True), None ) + outputT1AverageImage = traits.Either(File(exists=True), None) + outputT2AverageImage = traits.Either(File(exists=True), None) + outputPDAverageImage = traits.Either(File(exists=True), None) + outputFLAverageImage = traits.Either(File(exists=True), None) posteriorImages = OutputMultiPath(File(exists=True), exists=True) - atlasToSubjectInverseTransform = traits.Either( File(exists=True), None ) + atlasToSubjectInverseTransform = traits.Either(File(exists=True), None) + class BRAINSABCext(BRAINSABC): - #input_spec= BRAINSABCextInputSpec + # input_spec= BRAINSABCextInputSpec output_spec = BRAINSABCextOutputSpec def _list_outputs(self): @@ -65,17 +69,17 @@ def _list_outputs(self): 'outputPDAverageImage', 'outputFLAverageImage', 'atlasToSubjectInverseTransform' - ] + ] full_outputs = self.output_spec().get() pruned_outputs = dict() for key, value in full_outputs.iteritems(): if key not in custom_implied_outputs_with_no_inputs: pruned_outputs[key] = value - outputs = super(BRAINSABCext,self)._outputs_from_inputs( pruned_outputs ) - input_check = {'T1':('outputT1AverageImage', 't1_average_BRAINSABC.nii.gz'), - 'T2':('outputT2AverageImage', 't2_average_BRAINSABC.nii.gz'), - 'PD':('outputPDAverageImage', 'pd_average_BRAINSABC.nii.gz'), - 'FL':('outputFLAverageImage', 'fl_average_BRAINSABC.nii.gz')} + outputs = super(BRAINSABCext, self)._outputs_from_inputs(pruned_outputs) + input_check = {'T1': ('outputT1AverageImage', 't1_average_BRAINSABC.nii.gz'), + 'T2': ('outputT2AverageImage', 't2_average_BRAINSABC.nii.gz'), + 'PD': ('outputPDAverageImage', 'pd_average_BRAINSABC.nii.gz'), + 'FL': ('outputFLAverageImage', 'fl_average_BRAINSABC.nii.gz')} for key, values in input_check.iteritems(): if key in self.inputs.inputVolumeTypes: outputs[values[0]] = os.path.abspath(values[1]) @@ -86,10 +90,9 @@ def _list_outputs(self): PosteriorPaths = PosteriorOutputs.getPosteriorFileNameList(self.inputs.posteriorTemplate) outputs['posteriorImages'] = [os.path.abspath(postPath) for postPath in PosteriorPaths] - fixed_inverse_name=os.path.abspath(outputs['atlasToSubjectTransform'].replace(".h5","_Inverse.h5")) + fixed_inverse_name = os.path.abspath(outputs['atlasToSubjectTransform'].replace(".h5", "_Inverse.h5")) if os.path.exists(fixed_inverse_name): outputs['atlasToSubjectInverseTransform'] = fixed_inverse_name else: outputs['atlasToSubjectInverseTransform'] = None return outputs - diff --git a/AutoWorkup/BRAINSCutCMD.py b/AutoWorkup/BRAINSCutCMD.py index 1ce8755f..db50f5ef 100755 --- a/AutoWorkup/BRAINSCutCMD.py +++ b/AutoWorkup/BRAINSCutCMD.py @@ -7,151 +7,152 @@ import argparse import subprocess -def addProbabilityMapElement( probabilityMap, maskName, outputStream ): - outputStream.write( " \n") +def addProbabilityMapElement(probabilityMap, maskName, outputStream): + outputStream.write(" \n") -def xmlGenerator( args, roi="" ): + +def xmlGenerator(args, roi=""): xmlFilename = args.xmlFilename + roi + ".xml" - outputStream = open( xmlFilename, 'w') - registrationID="BSpline_ROI" + outputStream = open(xmlFilename, 'w') + registrationID = "BSpline_ROI" - outputStream.write( "\n" ) + outputStream.write("\n") # # template # - outputStream.write( " \n") - outputStream.write( " \n".format(fn=args.inputTemplateT1)) + outputStream.write(" \n") + outputStream.write(" \n".format(fn=args.inputTemplateT1)) if args.inputSubjectT2Filename is not None: - outputStream.write( " \n".format(fn="na")) - outputStream.write( " \n".format(fn="na")) - #outputStream.write( " \n".format(fn="na")) - #outputStream.write( " \n".format(fn=args.inputTemplateRegistrationROIFilename)) + outputStream.write(" \n".format(fn="na")) + outputStream.write(" \n".format(fn="na")) + # outputStream.write( " \n".format(fn="na")) + # outputStream.write( " \n".format(fn=args.inputTemplateRegistrationROIFilename)) - outputStream.write( " \n") - outputStream.write( " \n") - outputStream.write( " \n") - outputStream.write( " \n") + outputStream.write(" \n") + outputStream.write(" \n") + outputStream.write(" \n") + outputStream.write(" \n") # # Registration # - outputStream.write( " \n") + outputStream.write(" \n") # # training vector configuration (feature vector) # - outputStream.write( " \n") + outputStream.write(" \n") # # random forest parameters # - outputStream.write( " \n") + outputStream.write(" \n") # # ANN Parameters # - outputStream.write( " \n") + outputStream.write(" \n") # # apply conditions # - outputStream.write( "\n") + outputStream.write("\n") # # add probability maps (ROIs) # if roi == "caudate": - addProbabilityMapElement( args.probabilityMapsLeftCaudate, "l_caudate", outputStream); - addProbabilityMapElement( args.probabilityMapsRightCaudate, "r_caudate", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftCaudate, "l_caudate", outputStream) + addProbabilityMapElement(args.probabilityMapsRightCaudate, "r_caudate", outputStream) elif roi == 'putamen': - addProbabilityMapElement( args.probabilityMapsLeftPutamen, "l_putamen", outputStream); - addProbabilityMapElement( args.probabilityMapsRightPutamen, "r_putamen", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftPutamen, "l_putamen", outputStream) + addProbabilityMapElement(args.probabilityMapsRightPutamen, "r_putamen", outputStream) elif roi == 'thalamus': - addProbabilityMapElement( args.probabilityMapsLeftThalamus, "l_thalamus", outputStream); - addProbabilityMapElement( args.probabilityMapsRightThalamus, "r_thalamus", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftThalamus, "l_thalamus", outputStream) + addProbabilityMapElement(args.probabilityMapsRightThalamus, "r_thalamus", outputStream) elif roi == 'hippocampus': - addProbabilityMapElement( args.probabilityMapsLeftHippocampus, "l_hippocampus", outputStream); - addProbabilityMapElement( args.probabilityMapsRightHippocampus,"r_hippocampus", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftHippocampus, "l_hippocampus", outputStream) + addProbabilityMapElement(args.probabilityMapsRightHippocampus, "r_hippocampus", outputStream) elif roi == 'accumben': - addProbabilityMapElement( args.probabilityMapsLeftAccumben, "l_accumben", outputStream); - addProbabilityMapElement( args.probabilityMapsRightAccumben, "r_accumben", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftAccumben, "l_accumben", outputStream) + addProbabilityMapElement(args.probabilityMapsRightAccumben, "r_accumben", outputStream) elif roi == 'globus': - addProbabilityMapElement( args.probabilityMapsLeftGlobus, "l_globus", outputStream); - addProbabilityMapElement( args.probabilityMapsRightGlobus, "r_globus", outputStream); + addProbabilityMapElement(args.probabilityMapsLeftGlobus, "l_globus", outputStream) + addProbabilityMapElement(args.probabilityMapsRightGlobus, "r_globus", outputStream) # # subject # - outputStream.write( " \n") - outputStream.write( " \n") + outputStream.write(" \n") + outputStream.write(" \n") if args.inputSubjectT2Filename is not None: - outputStream.write( " \n") - outputStream.write( " \n") - #outputStream.write( " \n".format(fn=args.inputSubjectTotalGMFilename)) - #outputStream.write( " \n".format(fn=args.inputSubjectRegistrationROIFilename)) - - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - #outputStream.write( " \n") - - #if args.inputSubjectBrainMaskFilename != "NA": + outputStream.write(" \n") + outputStream.write(" \n") + # outputStream.write( " \n".format(fn=args.inputSubjectTotalGMFilename)) + # outputStream.write( " \n".format(fn=args.inputSubjectRegistrationROIFilename)) + + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + # outputStream.write( " \n") + + # if args.inputSubjectBrainMaskFilename != "NA": # outputStream.write( " \n") if not args.deformationFromSubjectToTemplate is None: - outputStream.write( ' \n") - outputStream.write( " \n") + outputStream.write(' \n") + outputStream.write(" \n") - outputStream.write( "\n" ) + outputStream.write("\n") outputStream.close() return xmlFilename @@ -160,43 +161,43 @@ def xmlGenerator( args, roi="" ): ## main ## -brainscutParser = argparse.ArgumentParser( description ='BRAINSCut command line argument parser') +brainscutParser = argparse.ArgumentParser(description='BRAINSCut command line argument parser') # # input arguments # -brainscutParser.add_argument('--inputSubjectT1Filename', help='T1 subject filename', required=True ) -brainscutParser.add_argument('--inputSubjectT2Filename', help='T2 subject filename', required=False ) -#brainscutParser.add_argument('--inputSubjectTotalGMFilename', help='TotalGM filename', required=True ) -brainscutParser.add_argument('--inputSubjectGadSGFilename', help='GadSG subject filename', required=False ) -#brainscutParser.add_argument('--inputSubjectBrainMaskFilename', help='BrainMask subject filename' ) -#brainscutParser.add_argument('--inputSubjectRegistrationROIFilename', help='template brain mask filename' ) - -brainscutParser.add_argument('--inputTemplateT1', help='template T1-weighted filename', required=True ) -#brainscutParser.add_argument('--inputTemplateRegistrationROIFilename', help='template brain region filename', required=True ) - -brainscutParser.add_argument('--inputTemplateRhoFilename', help='template rho filename', required=True ) -brainscutParser.add_argument('--inputTemplatePhiFilename', help='template phi filename', required=True ) -brainscutParser.add_argument('--inputTemplateThetaFilename', help='template theta filename', required=True ) - -brainscutParser.add_argument('--trainingVectorFilename', help='training vector filename', default="NA" ) -#brainscutParser.add_argument('--modelFileBasename', help='model filei base name for net configuration file (xml).', default="NA" ) -brainscutParser.add_argument('--modelFilename', help='model filename', default="NA", required=True ) +brainscutParser.add_argument('--inputSubjectT1Filename', help='T1 subject filename', required=True) +brainscutParser.add_argument('--inputSubjectT2Filename', help='T2 subject filename', required=False) +# brainscutParser.add_argument('--inputSubjectTotalGMFilename', help='TotalGM filename', required=True ) +brainscutParser.add_argument('--inputSubjectGadSGFilename', help='GadSG subject filename', required=False) +# brainscutParser.add_argument('--inputSubjectBrainMaskFilename', help='BrainMask subject filename' ) +# brainscutParser.add_argument('--inputSubjectRegistrationROIFilename', help='template brain mask filename' ) + +brainscutParser.add_argument('--inputTemplateT1', help='template T1-weighted filename', required=True) +# brainscutParser.add_argument('--inputTemplateRegistrationROIFilename', help='template brain region filename', required=True ) + +brainscutParser.add_argument('--inputTemplateRhoFilename', help='template rho filename', required=True) +brainscutParser.add_argument('--inputTemplatePhiFilename', help='template phi filename', required=True) +brainscutParser.add_argument('--inputTemplateThetaFilename', help='template theta filename', required=True) + +brainscutParser.add_argument('--trainingVectorFilename', help='training vector filename', default="NA") +# brainscutParser.add_argument('--modelFileBasename', help='model filei base name for net configuration file (xml).', default="NA" ) +brainscutParser.add_argument('--modelFilename', help='model filename', default="NA", required=True) brainscutParser.add_argument('--vectorNormalization', help='feature vector normalization (IQR,Linear,Sigmoid_Q01,Sigmoid_Q05,ZScore,NONE)', required=True) # probability maps -brainscutParser.add_argument('--probabilityMapsLeftCaudate', help='model probability maps for left caudate' , required=True) -brainscutParser.add_argument('--probabilityMapsRightCaudate', help='model probability maps for right caudate' , required=True) -brainscutParser.add_argument('--probabilityMapsLeftPutamen', help='model probability maps for left putamen' , required=True) -brainscutParser.add_argument('--probabilityMapsRightPutamen', help='model probability maps for right putamen' , required=True) -brainscutParser.add_argument('--probabilityMapsLeftThalamus', help='model probability maps for left thalamus' , required=True) -brainscutParser.add_argument('--probabilityMapsRightThalamus', help='model probability maps for right thalamus' , required=True) -brainscutParser.add_argument('--probabilityMapsLeftHippocampus', help='model probability maps for left hippocampus' , required=True) -brainscutParser.add_argument('--probabilityMapsRightHippocampus', help='model probability maps for right hippocampus' , required=True) -brainscutParser.add_argument('--probabilityMapsLeftAccumben', help='model probability maps for left accumben' , required=True) -brainscutParser.add_argument('--probabilityMapsRightAccumben', help='model probability maps for right accumben' , required=True) -brainscutParser.add_argument('--probabilityMapsLeftGlobus', help='model probability maps for left globus' , required=True) -brainscutParser.add_argument('--probabilityMapsRightGlobus', help='model probability maps for right globus' , required=True) +brainscutParser.add_argument('--probabilityMapsLeftCaudate', help='model probability maps for left caudate', required=True) +brainscutParser.add_argument('--probabilityMapsRightCaudate', help='model probability maps for right caudate', required=True) +brainscutParser.add_argument('--probabilityMapsLeftPutamen', help='model probability maps for left putamen', required=True) +brainscutParser.add_argument('--probabilityMapsRightPutamen', help='model probability maps for right putamen', required=True) +brainscutParser.add_argument('--probabilityMapsLeftThalamus', help='model probability maps for left thalamus', required=True) +brainscutParser.add_argument('--probabilityMapsRightThalamus', help='model probability maps for right thalamus', required=True) +brainscutParser.add_argument('--probabilityMapsLeftHippocampus', help='model probability maps for left hippocampus', required=True) +brainscutParser.add_argument('--probabilityMapsRightHippocampus', help='model probability maps for right hippocampus', required=True) +brainscutParser.add_argument('--probabilityMapsLeftAccumben', help='model probability maps for left accumben', required=True) +brainscutParser.add_argument('--probabilityMapsRightAccumben', help='model probability maps for right accumben', required=True) +brainscutParser.add_argument('--probabilityMapsLeftGlobus', help='model probability maps for left globus', required=True) +brainscutParser.add_argument('--probabilityMapsRightGlobus', help='model probability maps for right globus', required=True) brainscutParser.add_argument('--deformationFromTemplateToSubject', help="deformationFromTemplateToSubject") brainscutParser.add_argument('--deformationFromSubjectToTemplate', help="deformationFromSubjectToTemplate") @@ -204,41 +205,41 @@ def xmlGenerator( args, roi="" ): # # output arguments # -brainscutParser.add_argument('--outputBinaryLeftCaudate', help='output binary file name for left caudate' ) -brainscutParser.add_argument('--outputBinaryRightCaudate', help='output binary file name for right caudate' ) -brainscutParser.add_argument('--outputBinaryLeftPutamen', help='output binary file name for left putamen' ) -brainscutParser.add_argument('--outputBinaryRightPutamen', help='output binary file name for right putamen' ) -brainscutParser.add_argument('--outputBinaryLeftThalamus', help='output binary file name for left thalamus' ) -brainscutParser.add_argument('--outputBinaryRightThalamus', help='output binary file name for right thalamus' ) -brainscutParser.add_argument('--outputBinaryLeftHippocampus', help='output binary file name for left hippocampus' ) -brainscutParser.add_argument('--outputBinaryRightHippocampus', help='output binary file name for right hippocampus' ) -brainscutParser.add_argument('--outputBinaryLeftAccumben', help='output binary file name for left accumben' ) -brainscutParser.add_argument('--outputBinaryRightAccumben', help='output binary file name for right accumben' ) -brainscutParser.add_argument('--outputBinaryLeftGlobus', help='output binary file name for left globus' ) -brainscutParser.add_argument('--outputBinaryRightGlobus', help='output binary file name for right globus' ) - -brainscutParser.add_argument('--xmlFilename',help='BRAINSCut xml configuration filename', default="output.xml") - -args=brainscutParser.parse_args() +brainscutParser.add_argument('--outputBinaryLeftCaudate', help='output binary file name for left caudate') +brainscutParser.add_argument('--outputBinaryRightCaudate', help='output binary file name for right caudate') +brainscutParser.add_argument('--outputBinaryLeftPutamen', help='output binary file name for left putamen') +brainscutParser.add_argument('--outputBinaryRightPutamen', help='output binary file name for right putamen') +brainscutParser.add_argument('--outputBinaryLeftThalamus', help='output binary file name for left thalamus') +brainscutParser.add_argument('--outputBinaryRightThalamus', help='output binary file name for right thalamus') +brainscutParser.add_argument('--outputBinaryLeftHippocampus', help='output binary file name for left hippocampus') +brainscutParser.add_argument('--outputBinaryRightHippocampus', help='output binary file name for right hippocampus') +brainscutParser.add_argument('--outputBinaryLeftAccumben', help='output binary file name for left accumben') +brainscutParser.add_argument('--outputBinaryRightAccumben', help='output binary file name for right accumben') +brainscutParser.add_argument('--outputBinaryLeftGlobus', help='output binary file name for left globus') +brainscutParser.add_argument('--outputBinaryRightGlobus', help='output binary file name for right globus') + +brainscutParser.add_argument('--xmlFilename', help='BRAINSCut xml configuration filename', default="output.xml") + +args = brainscutParser.parse_args() ## HACK: DOUBLE CHECK THAT IQR IS USED if args.vectorNormalization != "IQR": - print "ERROR: ONLY IQR SUPPORTED AT THE MOMENT" - exit -1 - + print "ERROR: ONLY IQR SUPPORTED AT THE MOMENT" + exit - 1 + -print( args ) +print(args) roiList = ['accumben', 'caudate', 'putamen', 'globus', 'thalamus', 'hippocampus'] for roi in roiList: - currentXmlFilename = xmlGenerator( args, roi ) - currentModelFilename = args.modelFilename[:-3] + '_' + roi + '.gz' # trainModelFile.txtD0060NT0060_accumben.gz - - BRAINSCutCommand=["BRAINSCut" + " --applyModel " + - " --netConfiguration " + currentXmlFilename + - " --modelFilename " + currentModelFilename + - " --method RandomForest" + - " --numberOfTrees 60 --randomTreeDepth 60" - ] + currentXmlFilename = xmlGenerator(args, roi) + currentModelFilename = args.modelFilename[:-3] + '_' + roi + '.gz' # trainModelFile.txtD0060NT0060_accumben.gz + + BRAINSCutCommand = ["BRAINSCut" + " --applyModel " + + " --netConfiguration " + currentXmlFilename + + " --modelFilename " + currentModelFilename + + " --method RandomForest" + + " --numberOfTrees 60 --randomTreeDepth 60" + ] print("HACK: BRAINCUT COMMAND: {0}".format(BRAINSCutCommand)) subprocess.call(BRAINSCutCommand, shell=True) """ diff --git a/AutoWorkup/PipeLineFunctionHelpers.py b/AutoWorkup/PipeLineFunctionHelpers.py index 6eb00afc..fb91217b 100644 --- a/AutoWorkup/PipeLineFunctionHelpers.py +++ b/AutoWorkup/PipeLineFunctionHelpers.py @@ -17,139 +17,143 @@ 'HIPPOCAMPUS', 'CRBLGM', 'CRBLWM', 'CSF', 'VB', 'NOTCSF', 'NOTGM', 'NOTWM', 'NOTVB', 'AIR'] + def makeListOfValidImages(imageFile): - if imageFile is None: - return [] # an empty iterable is a valid input to a data string - else: + if imageFile is None: + return [] # an empty iterable is a valid input to a data string + else: return imageFile -def getListIndex( imageList, index): +def getListIndex(imageList, index): return imageList[index] -def ClipT1ImageWithBrainMask(t1_image,brain_labels,clipped_file_name): + +def ClipT1ImageWithBrainMask(t1_image, brain_labels, clipped_file_name): import os import sys import SimpleITK as sitk ## Now clean up the posteriors based on anatomical knowlege. ## sometimes the posteriors are not relevant for priors ## due to anomolies around the edges. - t1=sitk.Cast(sitk.ReadImage(t1_image),sitk.sitkFloat32) - bl=sitk.Cast(sitk.ReadImage(brain_labels),sitk.sitkFloat32) - bl_binary=sitk.Cast(sitk.BinaryThreshold(bl,1,1000000),sitk.sitkFloat32) - clipped=t1*bl_binary - sitk.WriteImage(clipped,clipped_file_name) - clipped_file=os.path.realpath(clipped_file_name) + t1 = sitk.Cast(sitk.ReadImage(t1_image), sitk.sitkFloat32) + bl = sitk.Cast(sitk.ReadImage(brain_labels), sitk.sitkFloat32) + bl_binary = sitk.Cast(sitk.BinaryThreshold(bl, 1, 1000000), sitk.sitkFloat32) + clipped = t1 * bl_binary + sitk.WriteImage(clipped, clipped_file_name) + clipped_file = os.path.realpath(clipped_file_name) return clipped_file + def UnwrapPosteriorImagesFromDictionaryFunction(postDict): return postDict.values() -def GetOnePosteriorImageFromDictionaryFunction(postDict,key): + +def GetOnePosteriorImageFromDictionaryFunction(postDict, key): return postDict[key] -def FixWMPartitioning(brainMask,PosteriorsList): + +def FixWMPartitioning(brainMask, PosteriorsList): """"There were some errors in mis-classifications for WM/NON_WM""" import SimpleITK as sitk import os - def FillHolePreserveEdge(inputMask,HOLE_FILL_SIZE): + def FillHolePreserveEdge(inputMask, HOLE_FILL_SIZE): """This function fills holes and tries to preserve the exterior topology. Holes that are within 3 units of the exterior topology may not be completely filled. Any voxel in the original mask will be guanteed to be in the returned mask.""" return sitk.BinaryThreshold( - inputMask + - sitk.ErodeObjectMorphology( - sitk.VotingBinaryHoleFilling(BM,[HOLE_FILL_SIZE,HOLE_FILL_SIZE,HOLE_FILL_SIZE]) - ,HOLE_FILL_SIZE) - ,1,10000) - - BM=sitk.BinaryThreshold(sitk.ReadImage(brainMask),1,1000) - BM_FILLED= FillHolePreserveEdge(BM,3) - - NOTCSF_index=None #Note: Purposfully using '-1' as it will force an error. - CSF_index=None - NOTGM_index=None - GM_index=None - NOTWM_index=None - WM_index=None - NOTVB_index=None - VB_index=None - AIR_index=None - for i in range(0,len(PosteriorsList)): + inputMask + + sitk.ErodeObjectMorphology( + sitk.VotingBinaryHoleFilling(BM, [HOLE_FILL_SIZE, HOLE_FILL_SIZE, HOLE_FILL_SIZE]), HOLE_FILL_SIZE), 1, 10000) + + BM = sitk.BinaryThreshold(sitk.ReadImage(brainMask), 1, 1000) + BM_FILLED = FillHolePreserveEdge(BM, 3) + + NOTCSF_index = None # Note: Purposfully using '-1' as it will force an error. + CSF_index = None + NOTGM_index = None + GM_index = None + NOTWM_index = None + WM_index = None + NOTVB_index = None + VB_index = None + AIR_index = None + for i in range(0, len(PosteriorsList)): if os.path.basename(PosteriorsList[i]) == 'POSTERIOR_NOTCSF.nii.gz': - NOTCSF_index=i + NOTCSF_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_CSF.nii.gz': - CSF_index=i + CSF_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_NOTGM.nii.gz': - NOTGM_index=i + NOTGM_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_SURFGM.nii.gz': - GM_index=i + GM_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_NOTWM.nii.gz': - NOTWM_index=i + NOTWM_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_WM.nii.gz': - WM_index=i + WM_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_NOTVB.nii.gz': - NOTVB_index=i + NOTVB_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_VB.nii.gz': - VB_index=i + VB_index = i elif os.path.basename(PosteriorsList[i]) == 'POSTERIOR_AIR.nii.gz': - AIR_index=i - - def ShiftValueForHardPartition(BM_FILLED,ShiftPosteriorsList,NOTREGION_index,REGION_index,REGION_NAME,NOTREGION_NAME): - NOTREGION=sitk.ReadImage(ShiftPosteriorsList[NOTREGION_index]) - REGION=sitk.ReadImage(ShiftPosteriorsList[REGION_index]) - ALL_REGION=NOTREGION+REGION - NEW_REGION=ALL_REGION*sitk.Cast(BM_FILLED,sitk.sitkFloat32) - NEW_NOTREGION=ALL_REGION*sitk.Cast((1-BM_FILLED),sitk.sitkFloat32) - NEW_REGION_FN=os.path.realpath('POSTERIOR_{0}.nii.gz'.format(REGION_NAME)) - NEW_NOTREGION_FN=os.path.realpath('POSTERIOR_{0}.nii.gz'.format(NOTREGION_NAME)) - sitk.WriteImage(NEW_REGION,NEW_REGION_FN) - sitk.WriteImage(NEW_NOTREGION,NEW_NOTREGION_FN) - ShiftPosteriorsList[NOTREGION_index]=NEW_NOTREGION_FN - ShiftPosteriorsList[REGION_index]=NEW_REGION_FN + AIR_index = i + + def ShiftValueForHardPartition(BM_FILLED, ShiftPosteriorsList, NOTREGION_index, REGION_index, REGION_NAME, NOTREGION_NAME): + NOTREGION = sitk.ReadImage(ShiftPosteriorsList[NOTREGION_index]) + REGION = sitk.ReadImage(ShiftPosteriorsList[REGION_index]) + ALL_REGION = NOTREGION + REGION + NEW_REGION = ALL_REGION * sitk.Cast(BM_FILLED, sitk.sitkFloat32) + NEW_NOTREGION = ALL_REGION * sitk.Cast((1 - BM_FILLED), sitk.sitkFloat32) + NEW_REGION_FN = os.path.realpath('POSTERIOR_{0}.nii.gz'.format(REGION_NAME)) + NEW_NOTREGION_FN = os.path.realpath('POSTERIOR_{0}.nii.gz'.format(NOTREGION_NAME)) + sitk.WriteImage(NEW_REGION, NEW_REGION_FN) + sitk.WriteImage(NEW_NOTREGION, NEW_NOTREGION_FN) + ShiftPosteriorsList[NOTREGION_index] = NEW_NOTREGION_FN + ShiftPosteriorsList[REGION_index] = NEW_REGION_FN return ShiftPosteriorsList - UpdatedPosteriorsList=list(PosteriorsList) - UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED,UpdatedPosteriorsList,NOTCSF_index,CSF_index,'CSF','NOTCSF') - UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED,UpdatedPosteriorsList,NOTGM_index,GM_index,'SURFGM','NOTGM') - UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED,UpdatedPosteriorsList,NOTWM_index,WM_index,'WM','NOTWM') - UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED,UpdatedPosteriorsList,NOTVB_index,VB_index,'VB','NOTVB') - - AirMask=sitk.BinaryThreshold( sitk.ReadImage(PosteriorsList[AIR_index]),0.50,1000000) - nonAirMask=sitk.Cast(1-AirMask,sitk.sitkUInt8) - nonAirRegionMask=os.path.realpath('NonAirMask.nii.gz') - sitk.WriteImage(nonAirMask,nonAirRegionMask) - - POSTERIOR_LABELS=dict() #(FG,Label) - POSTERIOR_LABELS["POSTERIOR_ACCUMBEN.nii.gz"] = (1,20) - POSTERIOR_LABELS["POSTERIOR_AIR.nii.gz"] = (0,0) - POSTERIOR_LABELS["POSTERIOR_CAUDATE.nii.gz"] = (1,21) - POSTERIOR_LABELS["POSTERIOR_CRBLGM.nii.gz"] = (1,11) - POSTERIOR_LABELS["POSTERIOR_CRBLWM.nii.gz"] = (1,12) - POSTERIOR_LABELS["POSTERIOR_CSF.nii.gz"] = (1,4) - POSTERIOR_LABELS["POSTERIOR_GLOBUS.nii.gz"] = (1,23) - POSTERIOR_LABELS["POSTERIOR_HIPPOCAMPUS.nii.gz"] = (1,25) - POSTERIOR_LABELS["POSTERIOR_NOTCSF.nii.gz"] = (0,6) - POSTERIOR_LABELS["POSTERIOR_NOTGM.nii.gz"] = (0,7) - POSTERIOR_LABELS["POSTERIOR_NOTVB.nii.gz"] = (0,9) - POSTERIOR_LABELS["POSTERIOR_NOTWM.nii.gz"] = (0,8) - POSTERIOR_LABELS["POSTERIOR_PUTAMEN.nii.gz"] = (1,22) - POSTERIOR_LABELS["POSTERIOR_SURFGM.nii.gz"] = (1,2) - POSTERIOR_LABELS["POSTERIOR_THALAMUS.nii.gz"] = (1,24) - POSTERIOR_LABELS["POSTERIOR_VB.nii.gz"] = (1,5) - POSTERIOR_LABELS["POSTERIOR_WM.nii.gz"] = (1,1) - - MatchingFGCodeList=list() - MatchingLabelList=list() + UpdatedPosteriorsList = list(PosteriorsList) + UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED, UpdatedPosteriorsList, NOTCSF_index, CSF_index, 'CSF', 'NOTCSF') + UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED, UpdatedPosteriorsList, NOTGM_index, GM_index, 'SURFGM', 'NOTGM') + UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED, UpdatedPosteriorsList, NOTWM_index, WM_index, 'WM', 'NOTWM') + UpdatedPosteriorsList = ShiftValueForHardPartition(BM_FILLED, UpdatedPosteriorsList, NOTVB_index, VB_index, 'VB', 'NOTVB') + + AirMask = sitk.BinaryThreshold(sitk.ReadImage(PosteriorsList[AIR_index]), 0.50, 1000000) + nonAirMask = sitk.Cast(1 - AirMask, sitk.sitkUInt8) + nonAirRegionMask = os.path.realpath('NonAirMask.nii.gz') + sitk.WriteImage(nonAirMask, nonAirRegionMask) + + POSTERIOR_LABELS = dict() # (FG,Label) + POSTERIOR_LABELS["POSTERIOR_ACCUMBEN.nii.gz"] = (1, 20) + POSTERIOR_LABELS["POSTERIOR_AIR.nii.gz"] = (0, 0) + POSTERIOR_LABELS["POSTERIOR_CAUDATE.nii.gz"] = (1, 21) + POSTERIOR_LABELS["POSTERIOR_CRBLGM.nii.gz"] = (1, 11) + POSTERIOR_LABELS["POSTERIOR_CRBLWM.nii.gz"] = (1, 12) + POSTERIOR_LABELS["POSTERIOR_CSF.nii.gz"] = (1, 4) + POSTERIOR_LABELS["POSTERIOR_GLOBUS.nii.gz"] = (1, 23) + POSTERIOR_LABELS["POSTERIOR_HIPPOCAMPUS.nii.gz"] = (1, 25) + POSTERIOR_LABELS["POSTERIOR_NOTCSF.nii.gz"] = (0, 6) + POSTERIOR_LABELS["POSTERIOR_NOTGM.nii.gz"] = (0, 7) + POSTERIOR_LABELS["POSTERIOR_NOTVB.nii.gz"] = (0, 9) + POSTERIOR_LABELS["POSTERIOR_NOTWM.nii.gz"] = (0, 8) + POSTERIOR_LABELS["POSTERIOR_PUTAMEN.nii.gz"] = (1, 22) + POSTERIOR_LABELS["POSTERIOR_SURFGM.nii.gz"] = (1, 2) + POSTERIOR_LABELS["POSTERIOR_THALAMUS.nii.gz"] = (1, 24) + POSTERIOR_LABELS["POSTERIOR_VB.nii.gz"] = (1, 5) + POSTERIOR_LABELS["POSTERIOR_WM.nii.gz"] = (1, 1) + + MatchingFGCodeList = list() + MatchingLabelList = list() for full_post_path_fn in UpdatedPosteriorsList: - post_key=os.path.basename(full_post_path_fn) + post_key = os.path.basename(full_post_path_fn) MatchingFGCodeList.append(POSTERIOR_LABELS[post_key][0]) MatchingLabelList.append(POSTERIOR_LABELS[post_key][1]) - return UpdatedPosteriorsList,MatchingFGCodeList,MatchingLabelList,nonAirRegionMask + return UpdatedPosteriorsList, MatchingFGCodeList, MatchingLabelList, nonAirRegionMask + def AccumulateLikeTissuePosteriors(posteriorImages): import os @@ -159,74 +163,74 @@ def AccumulateLikeTissuePosteriors(posteriorImages): ## sometimes the posteriors are not relevant for priors ## due to anomolies around the edges. - load_images_list=dict() + load_images_list = dict() for full_pathname in posteriorImages: - base_name=os.path.basename(full_pathname) - load_images_list[base_name]=sitk.ReadImage(full_pathname) - GM_ACCUM=[ - 'POSTERIOR_ACCUMBEN.nii.gz', - 'POSTERIOR_CAUDATE.nii.gz', - 'POSTERIOR_CRBLGM.nii.gz', - 'POSTERIOR_HIPPOCAMPUS.nii.gz', - 'POSTERIOR_PUTAMEN.nii.gz', - 'POSTERIOR_THALAMUS.nii.gz', - 'POSTERIOR_SURFGM.nii.gz', - ] - WM_ACCUM=[ - 'POSTERIOR_CRBLWM.nii.gz', - 'POSTERIOR_WM.nii.gz' - ] - CSF_ACCUM=[ - 'POSTERIOR_CSF.nii.gz', - ] - VB_ACCUM=[ - 'POSTERIOR_VB.nii.gz', - ] - GLOBUS_ACCUM=[ - 'POSTERIOR_GLOBUS.nii.gz', - ] - BACKGROUND_ACCUM=[ - 'POSTERIOR_AIR.nii.gz', - 'POSTERIOR_NOTCSF.nii.gz', - 'POSTERIOR_NOTGM.nii.gz', - 'POSTERIOR_NOTVB.nii.gz', - 'POSTERIOR_NOTWM.nii.gz', - ] + base_name = os.path.basename(full_pathname) + load_images_list[base_name] = sitk.ReadImage(full_pathname) + GM_ACCUM = [ + 'POSTERIOR_ACCUMBEN.nii.gz', + 'POSTERIOR_CAUDATE.nii.gz', + 'POSTERIOR_CRBLGM.nii.gz', + 'POSTERIOR_HIPPOCAMPUS.nii.gz', + 'POSTERIOR_PUTAMEN.nii.gz', + 'POSTERIOR_THALAMUS.nii.gz', + 'POSTERIOR_SURFGM.nii.gz', + ] + WM_ACCUM = [ + 'POSTERIOR_CRBLWM.nii.gz', + 'POSTERIOR_WM.nii.gz' + ] + CSF_ACCUM = [ + 'POSTERIOR_CSF.nii.gz', + ] + VB_ACCUM = [ + 'POSTERIOR_VB.nii.gz', + ] + GLOBUS_ACCUM = [ + 'POSTERIOR_GLOBUS.nii.gz', + ] + BACKGROUND_ACCUM = [ + 'POSTERIOR_AIR.nii.gz', + 'POSTERIOR_NOTCSF.nii.gz', + 'POSTERIOR_NOTGM.nii.gz', + 'POSTERIOR_NOTVB.nii.gz', + 'POSTERIOR_NOTWM.nii.gz', + ] ## The next 2 items MUST be syncronized - AccumulatePriorsNames=['POSTERIOR_GM_TOTAL.nii.gz','POSTERIOR_WM_TOTAL.nii.gz', - 'POSTERIOR_CSF_TOTAL.nii.gz','POSTERIOR_VB_TOTAL.nii.gz', - 'POSTERIOR_GLOBUS_TOTAL.nii.gz','POSTERIOR_BACKGROUND_TOTAL.nii.gz'] - ForcedOrderingLists=[GM_ACCUM,WM_ACCUM,CSF_ACCUM,VB_ACCUM,GLOBUS_ACCUM,BACKGROUND_ACCUM] - AccumulatePriorsList=list() - for index in range(0,len(ForcedOrderingLists)): - outname=AccumulatePriorsNames[index] - inlist=ForcedOrderingLists[index] - accum_image= load_images_list[ inlist[0] ] # copy first image - for curr_image in range(1,len(inlist)): - accum_image=accum_image + load_images_list[ inlist[curr_image] ] - sitk.WriteImage(accum_image,outname) + AccumulatePriorsNames = ['POSTERIOR_GM_TOTAL.nii.gz', 'POSTERIOR_WM_TOTAL.nii.gz', + 'POSTERIOR_CSF_TOTAL.nii.gz', 'POSTERIOR_VB_TOTAL.nii.gz', + 'POSTERIOR_GLOBUS_TOTAL.nii.gz', 'POSTERIOR_BACKGROUND_TOTAL.nii.gz'] + ForcedOrderingLists = [GM_ACCUM, WM_ACCUM, CSF_ACCUM, VB_ACCUM, GLOBUS_ACCUM, BACKGROUND_ACCUM] + AccumulatePriorsList = list() + for index in range(0, len(ForcedOrderingLists)): + outname = AccumulatePriorsNames[index] + inlist = ForcedOrderingLists[index] + accum_image = load_images_list[inlist[0]] # copy first image + for curr_image in range(1, len(inlist)): + accum_image = accum_image + load_images_list[inlist[curr_image]] + sitk.WriteImage(accum_image, outname) AccumulatePriorsList.append(os.path.realpath(outname)) print "HACK \n\n\n\n\n\n\n HACK \n\n\n: {APL}\n".format(APL=AccumulatePriorsList) print ": {APN}\n".format(APN=AccumulatePriorsNames) - return AccumulatePriorsList,AccumulatePriorsNames + return AccumulatePriorsList, AccumulatePriorsNames def mkdir_p(path): """ Safely make a new directory, checking if it already exists""" try: os.makedirs(path) - except OSError as exc: # Python >2.5 + except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(path): pass - else: raise + else: + raise def make_dummy_file(fn): """This function just makes a file with the correct name and time stamp""" import time mkdir_p(os.path.dirname(fn)) - ff=open(fn,'w') + ff = open(fn, 'w') ff.write("DummyFile with Proper time stamp") - time.sleep(1) # 1 second + time.sleep(1) # 1 second ff.close() - diff --git a/AutoWorkup/RF12BRAINSCutWrapper.py b/AutoWorkup/RF12BRAINSCutWrapper.py index ac760fbe..fd7e76f7 100644 --- a/AutoWorkup/RF12BRAINSCutWrapper.py +++ b/AutoWorkup/RF12BRAINSCutWrapper.py @@ -9,88 +9,92 @@ Requirements: <<< Interface specifications >>> """ -from nipype.interfaces.base import ( File, TraitedSpec, Interface, CommandLineInputSpec, CommandLine, traits, isdefined) +from nipype.interfaces.base import (File, TraitedSpec, Interface, CommandLineInputSpec, CommandLine, traits, isdefined) import sys import os import warnings ### CommandLine + + class RF12BRAINSCutWrapperCLInputSpec(CommandLineInputSpec): ### subject specific - inputSubjectT1Filename = File( desc="Subject T1 Volume", exists=True, mandatory=True, argstr="--inputSubjectT1Filename %s") - inputSubjectT2Filename = File( desc="Subject T2 Volume", exists=True, mandatory=False, argstr="--inputSubjectT2Filename %s") - inputSubjectGadSGFilename = File( desc="Subject SG Volume", exists=True, mandatory=False, argstr="--inputSubjectGadSGFilename %s") - vectorNormalization = traits.Enum("IQR","Linear","Sigmoid_Q01","Sigmoid_Q05","ZScore","NONE", - desc="The type of intensity normalization to use",exists=True,mandatory=True,argstr="--vectorNormalization %s") + inputSubjectT1Filename = File(desc="Subject T1 Volume", exists=True, mandatory=True, argstr="--inputSubjectT1Filename %s") + inputSubjectT2Filename = File(desc="Subject T2 Volume", exists=True, mandatory=False, argstr="--inputSubjectT2Filename %s") + inputSubjectGadSGFilename = File(desc="Subject SG Volume", exists=True, mandatory=False, argstr="--inputSubjectGadSGFilename %s") + vectorNormalization = traits.Enum("IQR", "Linear", "Sigmoid_Q01", "Sigmoid_Q05", "ZScore", "NONE", + desc="The type of intensity normalization to use", exists=True, mandatory=True, argstr="--vectorNormalization %s") ### model specific - modelFilename = File( desc="modelFilename", exists=True, mandatory=True, argstr="--modelFilename %s") - trainingVectorFilename = File( desc="training vectof file name", exists=False, mandatory=False, argstr="--trainingVectorFilename %s") - inputTemplateT1 = File( desc="Atlas Template T1 image", exists=False, mandatory=False, argstr="--inputTemplateT1 %s") - inputTemplateRhoFilename = File( desc="Atlas Template rho image", exists=False, mandatory=False, argstr="--inputTemplateRhoFilename %s") - inputTemplatePhiFilename = File( desc="Atlas Template phi image", exists=False, mandatory=False, argstr="--inputTemplatePhiFilename %s") - inputTemplateThetaFilename = File( desc="Atlas Template theta image", exists=False, mandatory=False, argstr="--inputTemplateThetaFilename %s") - deformationFromTemplateToSubject = File( desc="Atlas To subject Deformation", exists=False, mandatory=False, argstr="--deformationFromTemplateToSubject %s") + modelFilename = File(desc="modelFilename", exists=True, mandatory=True, argstr="--modelFilename %s") + trainingVectorFilename = File(desc="training vectof file name", exists=False, mandatory=False, argstr="--trainingVectorFilename %s") + inputTemplateT1 = File(desc="Atlas Template T1 image", exists=False, mandatory=False, argstr="--inputTemplateT1 %s") + inputTemplateRhoFilename = File(desc="Atlas Template rho image", exists=False, mandatory=False, argstr="--inputTemplateRhoFilename %s") + inputTemplatePhiFilename = File(desc="Atlas Template phi image", exists=False, mandatory=False, argstr="--inputTemplatePhiFilename %s") + inputTemplateThetaFilename = File(desc="Atlas Template theta image", exists=False, mandatory=False, argstr="--inputTemplateThetaFilename %s") + deformationFromTemplateToSubject = File(desc="Atlas To subject Deformation", exists=False, mandatory=False, argstr="--deformationFromTemplateToSubject %s") ### probability maps - probabilityMapsLeftAccumben = File( desc="Spatial probability map of left accumben", exists=True, mandatory=True, argstr="--probabilityMapsLeftAccumben %s") - probabilityMapsRightAccumben = File( desc="Spatial probability map of right accumben", exists=True, mandatory=True, argstr="--probabilityMapsRightAccumben %s") + probabilityMapsLeftAccumben = File(desc="Spatial probability map of left accumben", exists=True, mandatory=True, argstr="--probabilityMapsLeftAccumben %s") + probabilityMapsRightAccumben = File(desc="Spatial probability map of right accumben", exists=True, mandatory=True, argstr="--probabilityMapsRightAccumben %s") - probabilityMapsLeftCaudate = File( desc="Spatial probability map of left caudate", exists=True, mandatory=True, argstr="--probabilityMapsLeftCaudate %s") - probabilityMapsRightCaudate = File( desc="Spatial probability map of right caudate", exists=True, mandatory=True, argstr="--probabilityMapsRightCaudate %s") + probabilityMapsLeftCaudate = File(desc="Spatial probability map of left caudate", exists=True, mandatory=True, argstr="--probabilityMapsLeftCaudate %s") + probabilityMapsRightCaudate = File(desc="Spatial probability map of right caudate", exists=True, mandatory=True, argstr="--probabilityMapsRightCaudate %s") - probabilityMapsLeftGlobus = File( desc="Spatial probability map of left globus", exists=True, mandatory=True, argstr="--probabilityMapsLeftGlobus %s") - probabilityMapsRightGlobus = File( desc="Spatial probability map of right globus", exists=True, mandatory=True, argstr="--probabilityMapsRightGlobus %s") + probabilityMapsLeftGlobus = File(desc="Spatial probability map of left globus", exists=True, mandatory=True, argstr="--probabilityMapsLeftGlobus %s") + probabilityMapsRightGlobus = File(desc="Spatial probability map of right globus", exists=True, mandatory=True, argstr="--probabilityMapsRightGlobus %s") - probabilityMapsLeftHippocampus = File( desc="Spatial probability map of left hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsLeftHippocampus %s") - probabilityMapsRightHippocampus = File( desc="Spatial probability map of right hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsRightHippocampus %s") + probabilityMapsLeftHippocampus = File(desc="Spatial probability map of left hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsLeftHippocampus %s") + probabilityMapsRightHippocampus = File(desc="Spatial probability map of right hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsRightHippocampus %s") - probabilityMapsLeftPutamen = File( desc="Spatial probability map of left putamen", exists=True, mandatory=True, argstr="--probabilityMapsLeftPutamen %s") - probabilityMapsRightPutamen = File( desc="Spatial probability map of right putamen", exists=True, mandatory=True, argstr="--probabilityMapsRightPutamen %s") + probabilityMapsLeftPutamen = File(desc="Spatial probability map of left putamen", exists=True, mandatory=True, argstr="--probabilityMapsLeftPutamen %s") + probabilityMapsRightPutamen = File(desc="Spatial probability map of right putamen", exists=True, mandatory=True, argstr="--probabilityMapsRightPutamen %s") - probabilityMapsLeftThalamus = File( desc="Spatial probability map of left thalamus", exists=True, mandatory=True, argstr="--probabilityMapsLeftThalamus %s") - probabilityMapsRightThalamus = File( desc="Spatial probability map of right thalamus", exists=True, mandatory=True, argstr="--probabilityMapsRightThalamus %s") + probabilityMapsLeftThalamus = File(desc="Spatial probability map of left thalamus", exists=True, mandatory=True, argstr="--probabilityMapsLeftThalamus %s") + probabilityMapsRightThalamus = File(desc="Spatial probability map of right thalamus", exists=True, mandatory=True, argstr="--probabilityMapsRightThalamus %s") - xmlFilename = File( desc = "Net configuration xml file", exists = False, mandatory = False, argstr="--xmlFilename %s") + xmlFilename = File(desc="Net configuration xml file", exists=False, mandatory=False, argstr="--xmlFilename %s") - outputBinaryLeftAccumben = File( desc = "Output binary file of left accumben", exists = False, mandatory = True, argstr="--outputBinaryLeftAccumben %s") - outputBinaryRightAccumben = File( desc = "Output binary file of right accumben", exists = False, mandatory = True, argstr="--outputBinaryRightAccumben %s") + outputBinaryLeftAccumben = File(desc="Output binary file of left accumben", exists=False, mandatory=True, argstr="--outputBinaryLeftAccumben %s") + outputBinaryRightAccumben = File(desc="Output binary file of right accumben", exists=False, mandatory=True, argstr="--outputBinaryRightAccumben %s") - outputBinaryLeftCaudate = File( desc = "Output binary file of left caudate", exists = False, mandatory = True, argstr="--outputBinaryLeftCaudate %s") - outputBinaryRightCaudate = File( desc = "Output binary file of right caudate", exists = False, mandatory = True, argstr="--outputBinaryRightCaudate %s") + outputBinaryLeftCaudate = File(desc="Output binary file of left caudate", exists=False, mandatory=True, argstr="--outputBinaryLeftCaudate %s") + outputBinaryRightCaudate = File(desc="Output binary file of right caudate", exists=False, mandatory=True, argstr="--outputBinaryRightCaudate %s") - outputBinaryLeftGlobus = File( desc = "Output binary file of left globus", exists = False, mandatory = True, argstr="--outputBinaryLeftGlobus %s") - outputBinaryRightGlobus = File( desc = "Output binary file of right globus", exists = False, mandatory = True, argstr="--outputBinaryRightGlobus %s") + outputBinaryLeftGlobus = File(desc="Output binary file of left globus", exists=False, mandatory=True, argstr="--outputBinaryLeftGlobus %s") + outputBinaryRightGlobus = File(desc="Output binary file of right globus", exists=False, mandatory=True, argstr="--outputBinaryRightGlobus %s") - outputBinaryLeftHippocampus = File( desc = "Output binary file of left hippocampus", exists = False, mandatory = True, argstr="--outputBinaryLeftHippocampus %s") - outputBinaryRightHippocampus = File( desc = "Output binary file of right hippocampus", exists = False, mandatory = True, argstr="--outputBinaryRightHippocampus %s") + outputBinaryLeftHippocampus = File(desc="Output binary file of left hippocampus", exists=False, mandatory=True, argstr="--outputBinaryLeftHippocampus %s") + outputBinaryRightHippocampus = File(desc="Output binary file of right hippocampus", exists=False, mandatory=True, argstr="--outputBinaryRightHippocampus %s") - outputBinaryLeftPutamen = File( desc = "Output binary file of left putamen", exists = False, mandatory = True, argstr="--outputBinaryLeftPutamen %s") - outputBinaryRightPutamen = File( desc = "Output binary file of right putamen", exists = False, mandatory = True, argstr="--outputBinaryRightPutamen %s") + outputBinaryLeftPutamen = File(desc="Output binary file of left putamen", exists=False, mandatory=True, argstr="--outputBinaryLeftPutamen %s") + outputBinaryRightPutamen = File(desc="Output binary file of right putamen", exists=False, mandatory=True, argstr="--outputBinaryRightPutamen %s") + + outputBinaryLeftThalamus = File(desc="Output binary file of left thalamus", exists=False, mandatory=True, argstr="--outputBinaryLeftThalamus %s") + outputBinaryRightThalamus = File(desc="Output binary file of right thalamus", exists=False, mandatory=True, argstr="--outputBinaryRightThalamus %s") - outputBinaryLeftThalamus = File( desc = "Output binary file of left thalamus", exists = False, mandatory = True, argstr="--outputBinaryLeftThalamus %s") - outputBinaryRightThalamus = File( desc = "Output binary file of right thalamus", exists = False, mandatory = True, argstr="--outputBinaryRightThalamus %s") class RF12BRAINSCutWrapperCLOutputSpec(TraitedSpec): - xmlFilename = File( desc = "Net configuration xml file", exists = False, mandatory = False) + xmlFilename = File(desc="Net configuration xml file", exists=False, mandatory=False) + + outputBinaryLeftAccumben = File(desc="Output binary file of left accumben", exists=True, mandatory=True) + outputBinaryRightAccumben = File(desc="Output binary file of right accumben", exists=True, mandatory=True) - outputBinaryLeftAccumben = File( desc = "Output binary file of left accumben", exists = True, mandatory = True) - outputBinaryRightAccumben = File( desc = "Output binary file of right accumben", exists = True, mandatory = True) + outputBinaryLeftCaudate = File(desc="Output binary file of left caudate", exists=True, mandatory=True) + outputBinaryRightCaudate = File(desc="Output binary file of right caudate", exists=True, mandatory=True) - outputBinaryLeftCaudate = File( desc = "Output binary file of left caudate", exists = True, mandatory = True) - outputBinaryRightCaudate = File( desc = "Output binary file of right caudate", exists = True, mandatory = True) + outputBinaryLeftGlobus = File(desc="Output binary file of left globus", exists=True, mandatory=True) + outputBinaryRightGlobus = File(desc="Output binary file of right globus", exists=True, mandatory=True) - outputBinaryLeftGlobus = File( desc = "Output binary file of left globus", exists = True, mandatory = True) - outputBinaryRightGlobus = File( desc = "Output binary file of right globus", exists = True, mandatory = True) + outputBinaryLeftHippocampus = File(desc="Output binary file of left hippocampus", exists=True, mandatory=True) + outputBinaryRightHippocampus = File(desc="Output binary file of right hippocampus", exists=True, mandatory=True) - outputBinaryLeftHippocampus = File( desc = "Output binary file of left hippocampus", exists = True, mandatory = True) - outputBinaryRightHippocampus = File( desc = "Output binary file of right hippocampus", exists = True, mandatory = True) + outputBinaryLeftPutamen = File(desc="Output binary file of left putamen", exists=True, mandatory=True) + outputBinaryRightPutamen = File(desc="Output binary file of right putamen", exists=True, mandatory=True) - outputBinaryLeftPutamen = File( desc = "Output binary file of left putamen", exists = True, mandatory = True) - outputBinaryRightPutamen = File( desc = "Output binary file of right putamen", exists = True, mandatory = True) + outputBinaryLeftThalamus = File(desc="Output binary file of left thalamus", exists=True, mandatory=True) + outputBinaryRightThalamus = File(desc="Output binary file of right thalamus", exists=True, mandatory=True) - outputBinaryLeftThalamus = File( desc = "Output binary file of left thalamus", exists = True, mandatory = True) - outputBinaryRightThalamus = File( desc = "Output binary file of right thalamus", exists = True, mandatory = True) class RF12BRAINSCutWrapper(CommandLine): """ @@ -117,6 +121,6 @@ def _outputs_from_inputs(self, outputs): outputs[name] = os.path.abspath(coresponding_input) return outputs -#if __name__ == '__main__': +# if __name__ == '__main__': # RF12Test = RF12BRAINSCutWrapper(sys.argv) # RF12Test.run() diff --git a/AutoWorkup/RF8BRAINSCutWrapper.py b/AutoWorkup/RF8BRAINSCutWrapper.py index 6c87f51c..09cc3885 100644 --- a/AutoWorkup/RF8BRAINSCutWrapper.py +++ b/AutoWorkup/RF8BRAINSCutWrapper.py @@ -9,71 +9,75 @@ Requirements: <<< Interface specifications >>> """ -from nipype.interfaces.base import ( File, TraitedSpec, Interface, CommandLineInputSpec, CommandLine, traits , isdefined) +from nipype.interfaces.base import (File, TraitedSpec, Interface, CommandLineInputSpec, CommandLine, traits, isdefined) import sys import os import warnings ### CommandLine + + class RF8BRAINSCutWrapperCLInputSpec(CommandLineInputSpec): ### subject specific - inputSubjectT1Filename = File( desc="Subject T1 Volume", exists=True, mandatory=True, argstr="--inputSubjectT1Filename %s") - inputSubjectT2Filename = File( desc="Subject T2 Volume", exists=True, mandatory=True, argstr="--inputSubjectT2Filename %s") - inputSubjectGadSGFilename = File( desc="Subject SG Volume", exists=True, mandatory=True, argstr="--inputSubjectGadSGFilename %s") - inputSubjectTotalGMFilename = File( desc="Subject Total GM Volume", exists=True, mandatory=True, argstr="--inputSubjectTotalGMFilename %s") - inputSubjectRegistrationROIFilename = File( desc="Subject Registration ROIMask Volume", exists=True, mandatory=True, argstr="--inputSubjectRegistrationROIFilename %s") + inputSubjectT1Filename = File(desc="Subject T1 Volume", exists=True, mandatory=True, argstr="--inputSubjectT1Filename %s") + inputSubjectT2Filename = File(desc="Subject T2 Volume", exists=True, mandatory=True, argstr="--inputSubjectT2Filename %s") + inputSubjectGadSGFilename = File(desc="Subject SG Volume", exists=True, mandatory=True, argstr="--inputSubjectGadSGFilename %s") + inputSubjectTotalGMFilename = File(desc="Subject Total GM Volume", exists=True, mandatory=True, argstr="--inputSubjectTotalGMFilename %s") + inputSubjectRegistrationROIFilename = File(desc="Subject Registration ROIMask Volume", exists=True, mandatory=True, argstr="--inputSubjectRegistrationROIFilename %s") ### model specific - modelFilename = File( desc="modelFilename", exists=True, mandatory=True, argstr="--modelFilename %s") - trainingVectorFilename = File( desc="training vectof file name", exists=False, mandatory=False, argstr="--trainingVectorFilename %s") - inputTemplateT1 = File( desc="Atlas Template T1 image", exists=False, mandatory=False, argstr="--inputTemplateT1 %s") - inputTemplateRegistrationROIFilename = File( desc="Atlas Template MaskROI image", exists=False, mandatory=False, argstr="--inputTemplateRegistrationROIFilename %s") - inputTemplateRhoFilename = File( desc="Atlas Template rho image", exists=False, mandatory=False, argstr="--inputTemplateRhoFilename %s") - inputTemplatePhiFilename = File( desc="Atlas Template phi image", exists=False, mandatory=False, argstr="--inputTemplatePhiFilename %s") - inputTemplateThetaFilename = File( desc="Atlas Template theta image", exists=False, mandatory=False, argstr="--inputTemplateThetaFilename %s") - deformationFromTemplateToSubject = File( desc="Atlas To subject Deformation", exists=False, mandatory=False, argstr="--deformationFromTemplateToSubject %s") + modelFilename = File(desc="modelFilename", exists=True, mandatory=True, argstr="--modelFilename %s") + trainingVectorFilename = File(desc="training vectof file name", exists=False, mandatory=False, argstr="--trainingVectorFilename %s") + inputTemplateT1 = File(desc="Atlas Template T1 image", exists=False, mandatory=False, argstr="--inputTemplateT1 %s") + inputTemplateRegistrationROIFilename = File(desc="Atlas Template MaskROI image", exists=False, mandatory=False, argstr="--inputTemplateRegistrationROIFilename %s") + inputTemplateRhoFilename = File(desc="Atlas Template rho image", exists=False, mandatory=False, argstr="--inputTemplateRhoFilename %s") + inputTemplatePhiFilename = File(desc="Atlas Template phi image", exists=False, mandatory=False, argstr="--inputTemplatePhiFilename %s") + inputTemplateThetaFilename = File(desc="Atlas Template theta image", exists=False, mandatory=False, argstr="--inputTemplateThetaFilename %s") + deformationFromTemplateToSubject = File(desc="Atlas To subject Deformation", exists=False, mandatory=False, argstr="--deformationFromTemplateToSubject %s") ### probability maps - probabilityMapsLeftCaudate = File( desc="Spatial probability map of left caudate", exists=True, mandatory=True, argstr="--probabilityMapsLeftCaudate %s") - probabilityMapsRightCaudate = File( desc="Spatial probability map of right caudate", exists=True, mandatory=True, argstr="--probabilityMapsRightCaudate %s") + probabilityMapsLeftCaudate = File(desc="Spatial probability map of left caudate", exists=True, mandatory=True, argstr="--probabilityMapsLeftCaudate %s") + probabilityMapsRightCaudate = File(desc="Spatial probability map of right caudate", exists=True, mandatory=True, argstr="--probabilityMapsRightCaudate %s") - probabilityMapsLeftHippocampus = File( desc="Spatial probability map of left hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsLeftHippocampus %s") - probabilityMapsRightHippocampus = File( desc="Spatial probability map of right hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsRightHippocampus %s") + probabilityMapsLeftHippocampus = File(desc="Spatial probability map of left hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsLeftHippocampus %s") + probabilityMapsRightHippocampus = File(desc="Spatial probability map of right hippocampus", exists=True, mandatory=True, argstr="--probabilityMapsRightHippocampus %s") - probabilityMapsLeftPutamen = File( desc="Spatial probability map of left putamen", exists=True, mandatory=True, argstr="--probabilityMapsLeftPutamen %s") - probabilityMapsRightPutamen = File( desc="Spatial probability map of right putamen", exists=True, mandatory=True, argstr="--probabilityMapsRightPutamen %s") + probabilityMapsLeftPutamen = File(desc="Spatial probability map of left putamen", exists=True, mandatory=True, argstr="--probabilityMapsLeftPutamen %s") + probabilityMapsRightPutamen = File(desc="Spatial probability map of right putamen", exists=True, mandatory=True, argstr="--probabilityMapsRightPutamen %s") - probabilityMapsLeftThalamus = File( desc="Spatial probability map of left thalamus", exists=True, mandatory=True, argstr="--probabilityMapsLeftThalamus %s") - probabilityMapsRightThalamus = File( desc="Spatial probability map of right thalamus", exists=True, mandatory=True, argstr="--probabilityMapsRightThalamus %s") + probabilityMapsLeftThalamus = File(desc="Spatial probability map of left thalamus", exists=True, mandatory=True, argstr="--probabilityMapsLeftThalamus %s") + probabilityMapsRightThalamus = File(desc="Spatial probability map of right thalamus", exists=True, mandatory=True, argstr="--probabilityMapsRightThalamus %s") - xmlFilename = File( desc = "Net configuration xml file", exists = False, mandatory = False, argstr="--xmlFilename %s") + xmlFilename = File(desc="Net configuration xml file", exists=False, mandatory=False, argstr="--xmlFilename %s") - outputBinaryLeftCaudate = File( desc = "Output binary file of left caudate", exists = False, mandatory = True, argstr="--outputBinaryLeftCaudate %s") - outputBinaryRightCaudate = File( desc = "Output binary file of right caudate", exists = False, mandatory = True, argstr="--outputBinaryRightCaudate %s") + outputBinaryLeftCaudate = File(desc="Output binary file of left caudate", exists=False, mandatory=True, argstr="--outputBinaryLeftCaudate %s") + outputBinaryRightCaudate = File(desc="Output binary file of right caudate", exists=False, mandatory=True, argstr="--outputBinaryRightCaudate %s") - outputBinaryLeftHippocampus = File( desc = "Output binary file of left hippocampus", exists = False, mandatory = True, argstr="--outputBinaryLeftHippocampus %s") - outputBinaryRightHippocampus = File( desc = "Output binary file of right hippocampus", exists = False, mandatory = True, argstr="--outputBinaryRightHippocampus %s") + outputBinaryLeftHippocampus = File(desc="Output binary file of left hippocampus", exists=False, mandatory=True, argstr="--outputBinaryLeftHippocampus %s") + outputBinaryRightHippocampus = File(desc="Output binary file of right hippocampus", exists=False, mandatory=True, argstr="--outputBinaryRightHippocampus %s") - outputBinaryLeftPutamen = File( desc = "Output binary file of left putamen", exists = False, mandatory = True, argstr="--outputBinaryLeftPutamen %s") - outputBinaryRightPutamen = File( desc = "Output binary file of right putamen", exists = False, mandatory = True, argstr="--outputBinaryRightPutamen %s") + outputBinaryLeftPutamen = File(desc="Output binary file of left putamen", exists=False, mandatory=True, argstr="--outputBinaryLeftPutamen %s") + outputBinaryRightPutamen = File(desc="Output binary file of right putamen", exists=False, mandatory=True, argstr="--outputBinaryRightPutamen %s") + + outputBinaryLeftThalamus = File(desc="Output binary file of left thalamus", exists=False, mandatory=True, argstr="--outputBinaryLeftThalamus %s") + outputBinaryRightThalamus = File(desc="Output binary file of right thalamus", exists=False, mandatory=True, argstr="--outputBinaryRightThalamus %s") - outputBinaryLeftThalamus = File( desc = "Output binary file of left thalamus", exists = False, mandatory = True, argstr="--outputBinaryLeftThalamus %s") - outputBinaryRightThalamus = File( desc = "Output binary file of right thalamus", exists = False, mandatory = True, argstr="--outputBinaryRightThalamus %s") class RF8BRAINSCutWrapperCLOutputSpec(TraitedSpec): - xmlFilename = File( desc = "Net configuration xml file", exists = True, mandatory = True) + xmlFilename = File(desc="Net configuration xml file", exists=True, mandatory=True) + + outputBinaryLeftCaudate = File(desc="Output binary file of left caudate", exists=True, mandatory=True) + outputBinaryRightCaudate = File(desc="Output binary file of right caudate", exists=True, mandatory=True) - outputBinaryLeftCaudate = File( desc = "Output binary file of left caudate", exists = True, mandatory = True) - outputBinaryRightCaudate = File( desc = "Output binary file of right caudate", exists = True, mandatory = True) + outputBinaryLeftHippocampus = File(desc="Output binary file of left hippocampus", exists=True, mandatory=True) + outputBinaryRightHippocampus = File(desc="Output binary file of right hippocampus", exists=True, mandatory=True) - outputBinaryLeftHippocampus = File( desc = "Output binary file of left hippocampus", exists = True, mandatory = True) - outputBinaryRightHippocampus = File( desc = "Output binary file of right hippocampus", exists = True, mandatory = True) + outputBinaryLeftPutamen = File(desc="Output binary file of left putamen", exists=True, mandatory=True) + outputBinaryRightPutamen = File(desc="Output binary file of right putamen", exists=True, mandatory=True) - outputBinaryLeftPutamen = File( desc = "Output binary file of left putamen", exists = True, mandatory = True) - outputBinaryRightPutamen = File( desc = "Output binary file of right putamen", exists = True, mandatory = True) + outputBinaryLeftThalamus = File(desc="Output binary file of left thalamus", exists=True, mandatory=True) + outputBinaryRightThalamus = File(desc="Output binary file of right thalamus", exists=True, mandatory=True) - outputBinaryLeftThalamus = File( desc = "Output binary file of left thalamus", exists = True, mandatory = True) - outputBinaryRightThalamus = File( desc = "Output binary file of right thalamus", exists = True, mandatory = True) class RF8BRAINSCutWrapper(CommandLine): """ @@ -100,6 +104,6 @@ def _outputs_from_inputs(self, outputs): outputs[name] = os.path.abspath(coresponding_input) return outputs -#if __name__ == '__main__': +# if __name__ == '__main__': # RF8Test = RF8BRAINSCutWrapper(sys.argv) # RF8Test.run() diff --git a/AutoWorkup/SessionDB.py b/AutoWorkup/SessionDB.py index 64ea5177..875553bc 100644 --- a/AutoWorkup/SessionDB.py +++ b/AutoWorkup/SessionDB.py @@ -2,20 +2,21 @@ import sqlite3 as lite import csv + class SessionDB(): - def __init__(self, defaultDBName='TempFileForDB.db',subject_list=[]): + def __init__(self, defaultDBName='TempFileForDB.db', subject_list=[]): self.MasterTableName = "MasterDB" self.dbName = defaultDBName self._local_openDB() - subject_filter="( " + subject_filter = "( " for curr_subject in subject_list: - subject_filter+= "'"+curr_subject+"'," - subject_filter=subject_filter.rstrip(',') # Remove last , - subject_filter+=" )" + subject_filter += "'" + curr_subject + "'," + subject_filter = subject_filter.rstrip(',') # Remove last , + subject_filter += " )" self.MasterQueryFilter = "SELECT * FROM {_tablename} WHERE subj IN {_subjid}".format( - _tablename=self.MasterTableName, - _subjid=subject_filter) + _tablename=self.MasterTableName, + _subjid=subject_filter) def _local_openDB(self): self.connection = lite.connect(self.dbName) @@ -26,8 +27,8 @@ def _local_fillDB_AndClose(self, sqlCommandList): for sqlCommand in sqlCommandList: self.cursor.execute(sqlCommand) self.connection.commit() - #self.cursor.close() - #self.connection.close() + # self.cursor.close() + # self.connection.close() print "Finished filling SQLite database SessionDB.py" def MakeNewDB(self, subject_data_file, mountPrefix): @@ -38,12 +39,12 @@ def MakeNewDB(self, subject_data_file, mountPrefix): os.remove(self.dbName) self._local_openDB() - dbColTypes = "project TEXT, subj TEXT, session TEXT, type TEXT, Qpos INT, filename TEXT" - self.cursor.execute("CREATE TABLE {tablename}({coltypes});".format(tablename=self.MasterTableName,coltypes=dbColTypes)) + dbColTypes = "project TEXT, subj TEXT, session TEXT, type TEXT, Qpos INT, filename TEXT" + self.cursor.execute("CREATE TABLE {tablename}({coltypes});".format(tablename=self.MasterTableName, coltypes=dbColTypes)) self.connection.commit() sqlCommandList = list() print "Building Subject returnList: " + subject_data_file - subjData=csv.reader(open(subject_data_file,'rb'), delimiter=',', quotechar='"') + subjData = csv.reader(open(subject_data_file, 'rb'), delimiter=',', quotechar='"') for row in subjData: if len(row) < 1: # contine of it is an empty row @@ -54,24 +55,24 @@ def MakeNewDB(self, subject_data_file, mountPrefix): if row[0] == 'project': # continue if header line continue - currDict=dict() - validEntry=True + currDict = dict() + validEntry = True if len(row) == 4: currDict = {'project': row[0], 'subj': row[1], 'session': row[2]} - rawDict=eval(row[3]) + rawDict = eval(row[3]) for imageType in rawDict.keys(): currDict['type'] = imageType - fullPaths=[ mountPrefix+i for i in rawDict[imageType] ] + fullPaths = [mountPrefix + i for i in rawDict[imageType]] if len(fullPaths) < 1: print("Invalid Entry! {0}".format(currDict)) - validEntry=False + validEntry = False for i in range(len(fullPaths)): imagePath = fullPaths[i] if not os.path.exists(imagePath): print("Missing File: {0}".format(imagePath)) - validEntry=False + validEntry = False if validEntry == True: currDict['Qpos'] = str(i) currDict['filename'] = imagePath @@ -92,12 +93,12 @@ def makeSQLiteCommand(self, imageDict): col_names = ",".join(keys) values = ', '.join(map(lambda x: "'" + x + "'", vals)) sqlCommand = "INSERT INTO {_tablename} ({_col_names}) VALUES ({_values});".format( - _tablename=self.MasterTableName, - _col_names=col_names, _values=values) + _tablename=self.MasterTableName, + _col_names=col_names, _values=values) return sqlCommand def getInfoFromDB(self, sqlCommand): - #print("getInfoFromDB({0})".format(sqlCommand)) + # print("getInfoFromDB({0})".format(sqlCommand)) self.cursor.execute(sqlCommand) dbInfo = self.cursor.fetchall() return dbInfo @@ -111,13 +112,13 @@ def getFirstScan(self, sessionid, scantype): return filename def getFirstT1(self, sessionid): - scantype='T1-30' + scantype = 'T1-30' sqlCommand = "SELECT filename FROM ({_master_query}) WHERE session='{_sessionid}' AND type='{_scantype}' AND Qpos='0';".format( - _master_query=self.MasterQueryFilter, - _sessionid=sessionid, _scantype=scantype) + _master_query=self.MasterQueryFilter, + _sessionid=sessionid, _scantype=scantype) val = self.getInfoFromDB(sqlCommand) - #print "HACK: ",sqlCommand - #print "HACK: ", val + # print "HACK: ",sqlCommand + # print "HACK: ", val filename = str(val[0][0]) return filename @@ -133,13 +134,13 @@ def getFilenamesByScantype(self, sessionid, scantypelist): return returnList def findScanTypeLength(self, sessionid, scantypelist): - countList=self.getFilenamesByScantype(sessionid,scantypelist) + countList = self.getFilenamesByScantype(sessionid, scantypelist) return len(countlist) def getT1sT2s(self, sessionid): sqlCommand = "SELECT filename FROM ({_master_query}) WHERE session='{_sessionid}' ORDER BY type ASC, Qpos ASC;".format( - _master_query=self.MasterQueryFilter, - _sessionid=sessionid) + _master_query=self.MasterQueryFilter, + _sessionid=sessionid) val = self.getInfoFromDB(sqlCommand) returnList = list() for i in val: @@ -163,7 +164,7 @@ def getAllSubjects(self): return returnList def getAllSessions(self): - #print("HACK: This is a temporary until complete re-write") + # print("HACK: This is a temporary until complete re-write") sqlCommand = "SELECT DISTINCT session FROM ({_master_query});".format(_master_query=self.MasterQueryFilter) val = self.getInfoFromDB(sqlCommand) returnList = list() @@ -171,10 +172,10 @@ def getAllSessions(self): returnList.append(str(i[0])) return returnList - def getSessionsFromSubject(self,subj): + def getSessionsFromSubject(self, subj): sqlCommand = "SELECT DISTINCT session FROM ({_master_query}) WHERE subj='{_subjid}';".format( - _master_query=self.MasterQueryFilter, - _subjid=subj) + _master_query=self.MasterQueryFilter, + _subjid=subj) val = self.getInfoFromDB(sqlCommand) returnList = list() for i in val: @@ -189,21 +190,20 @@ def getEverything(self): returnList.append(i) return returnList - def getSubjectsFromProject(self,project): + def getSubjectsFromProject(self, project): sqlCommand = "SELECT DISTINCT subj FROM ({_master_query}) WHERE project='{_projectid}';".format( - _master_query=self.MasterQueryFilter, - _projectid=project) + _master_query=self.MasterQueryFilter, + _projectid=project) val = self.getInfoFromDB(sqlCommand) returnList = list() for i in val: returnList.append(str(i[0])) return returnList - - def getSubjFromSession(self,session): + def getSubjFromSession(self, session): sqlCommand = "SELECT DISTINCT subj FROM ({_master_query}) WHERE session='{_sessionid}';".format( - _master_query=self.MasterQueryFilter, - _sessionid=session) + _master_query=self.MasterQueryFilter, + _sessionid=session) val = self.getInfoFromDB(sqlCommand) returnList = list() for i in val: @@ -213,10 +213,10 @@ def getSubjFromSession(self,session): sys.exit(-1) return returnList[0] - def getProjFromSession(self,session): + def getProjFromSession(self, session): sqlCommand = "SELECT DISTINCT project FROM ({_master_query}) WHERE session='{_sessionid}';".format( - _master_query=self.MasterQueryFilter, - _sessionid=session) + _master_query=self.MasterQueryFilter, + _sessionid=session) val = self.getInfoFromDB(sqlCommand) returnList = list() for i in val: diff --git a/AutoWorkup/ShortWorkupT1T2.py b/AutoWorkup/ShortWorkupT1T2.py index 0f683527..c77e9028 100644 --- a/AutoWorkup/ShortWorkupT1T2.py +++ b/AutoWorkup/ShortWorkupT1T2.py @@ -17,9 +17,9 @@ import string import argparse #"""Import necessary modules from nipype.""" -#from nipype.utils.config import config -#config.set('logging', 'log_to_file', 'false') -#config.set_log_dir(os.getcwd()) +# from nipype.utils.config import config +# config.set('logging', 'log_to_file', 'false') +# config.set_log_dir(os.getcwd()) #--config.set('logging', 'workflow_level', 'DEBUG') #--config.set('logging', 'interface_level', 'DEBUG') #--config.set('execution','remove_unnecessary_outputs','false') @@ -32,7 +32,7 @@ from nipype.interfaces.freesurfer import ReconAll from nipype.utils.misc import package_check -#package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version +# package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') package_check('networkx', '1.0', 'tutorial1') @@ -43,7 +43,9 @@ from WorkupT1T2AtlasNode import MakeAtlasNode from PipeLineFunctionHelpers import getListIndex -#HACK: [('buildTemplateIteration2', 'SUBJECT_TEMPLATES/0249/buildTemplateIteration2')] +# HACK: [('buildTemplateIteration2', 'SUBJECT_TEMPLATES/0249/buildTemplateIteration2')] + + def GenerateSubjectOutputPattern(subjectid): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. HACK: [('ANTSTemplate/Iteration02_Reshaped.nii.gz', 'SUBJECT_TEMPLATES/0668/T1_RESHAPED.nii.gz'), @@ -63,100 +65,105 @@ def GenerateSubjectOutputPattern(subjectid): pe.sub(subs,test) """ - patternList=[] + patternList = [] - find_pat=os.path.join('ANTSTemplate','Iteration02_Reshaped.nii.gz') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'AVG_T1.nii.gz') - patternList.append( (find_pat,replace_pat) ) + find_pat = os.path.join('ANTSTemplate', 'Iteration02_Reshaped.nii.gz') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'AVG_T1.nii.gz') + patternList.append((find_pat, replace_pat)) - find_pat=os.path.join('ANTSTemplate',r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_[A-Z0-9]*WARP_(?PAVG_[A-Z0-9]*.nii.gz)') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'\g') - patternList.append( (find_pat,replace_pat) ) + find_pat = os.path.join('ANTSTemplate', r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_[A-Z0-9]*WARP_(?PAVG_[A-Z0-9]*.nii.gz)') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'\g') + patternList.append((find_pat, replace_pat)) - find_pat=os.path.join('ANTSTemplate',r'CLIPPED_AVG_[A-Z]*WARP_(?PAVG_[A-Z]*.nii.gz)') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'\g') - patternList.append( (find_pat,replace_pat) ) + find_pat = os.path.join('ANTSTemplate', r'CLIPPED_AVG_[A-Z]*WARP_(?PAVG_[A-Z]*.nii.gz)') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'\g') + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList -def GenerateOutputPattern(projectid, subjectid, sessionid,DefaultNodeName): + +def GenerateOutputPattern(projectid, subjectid, sessionid, DefaultNodeName): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. """ - patternList=[] - find_pat=os.path.join(DefaultNodeName) - replace_pat=os.path.join(projectid,subjectid,sessionid,DefaultNodeName) - patternList.append( (find_pat,replace_pat) ) + patternList = [] + find_pat = os.path.join(DefaultNodeName) + replace_pat = os.path.join(projectid, subjectid, sessionid, DefaultNodeName) + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList + def GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. """ - patternList=[] - find_pat="POSTERIOR_" - replace_pat=os.path.join(projectid,subjectid,sessionid)+"/POSTERIOR_" - patternList.append( (find_pat,replace_pat) ) + patternList = [] + find_pat = "POSTERIOR_" + replace_pat = os.path.join(projectid, subjectid, sessionid) + "/POSTERIOR_" + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList ## This takes several lists and merges them, but it also removes all empty values from the lists -def MergeByExtendListElements(t2_averageList,pd_averageList,fl_averageList,outputLabels_averageList,ListOfPosteriorImagesDictionary): - for t2_index in range(0,len(t2_averageList)): + + +def MergeByExtendListElements(t2_averageList, pd_averageList, fl_averageList, outputLabels_averageList, ListOfPosteriorImagesDictionary): + for t2_index in range(0, len(t2_averageList)): if t2_averageList[t2_index] is not None: - ListOfPosteriorImagesDictionary[t2_index]['T2']=t2_averageList[t2_index] + ListOfPosteriorImagesDictionary[t2_index]['T2'] = t2_averageList[t2_index] if pd_averageList[t2_index] is not None: - ListOfPosteriorImagesDictionary[t2_index]['PD']=pd_averageList[t2_index] + ListOfPosteriorImagesDictionary[t2_index]['PD'] = pd_averageList[t2_index] if fl_averageList[t2_index] is not None: - ListOfPosteriorImagesDictionary[t2_index]['FL']=fl_averageList[t2_index] + ListOfPosteriorImagesDictionary[t2_index]['FL'] = fl_averageList[t2_index] if outputLabels_averageList[t2_index] is not None: - ListOfPosteriorImagesDictionary[t2_index]['BRAINMASK']=outputLabels_averageList[t2_index] + ListOfPosteriorImagesDictionary[t2_index]['BRAINMASK'] = outputLabels_averageList[t2_index] return ListOfPosteriorImagesDictionary -def MakeNewAtlasTemplate(t1_image,deformed_list, - AtlasTemplate,outDefinition): + +def MakeNewAtlasTemplate(t1_image, deformed_list, + AtlasTemplate, outDefinition): import os import sys import SimpleITK as sitk - patternDict= { - 'AVG_AIRWARP_AVG_AIR.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_AIR.nii.gz', - 'AVG_BGMWARP_AVG_BGM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_BASALTISSUE.nii.gz', - 'AVG_CRBLGMWARP_AVG_CRBLGM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_CRBLGM.nii.gz', - 'AVG_CRBLWMWARP_AVG_CRBLWM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_CRBLWM.nii.gz', + patternDict = { + 'AVG_AIRWARP_AVG_AIR.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_AIR.nii.gz', + 'AVG_BGMWARP_AVG_BGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_BASALTISSUE.nii.gz', + 'AVG_CRBLGMWARP_AVG_CRBLGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CRBLGM.nii.gz', + 'AVG_CRBLWMWARP_AVG_CRBLWM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CRBLWM.nii.gz', 'AVG_CSFWARP_AVG_CSF.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CSF.nii.gz', - 'AVG_NOTCSFWARP_AVG_NOTCSF.nii.gz' :'@ATLAS_DIRECTORY@/EXTENDED_NOTCSF.nii.gz', + 'AVG_NOTCSFWARP_AVG_NOTCSF.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTCSF.nii.gz', 'AVG_NOTGMWARP_AVG_NOTGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTGM.nii.gz', 'AVG_NOTVBWARP_AVG_NOTVB.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTVB.nii.gz', 'AVG_NOTWMWARP_AVG_NOTWM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTWM.nii.gz', 'AVG_SURFGMWARP_AVG_SURFGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_SURFGM.nii.gz', 'AVG_VBWARP_AVG_VB.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_VB.nii.gz', - 'AVG_WMWARP_AVG_WM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_WM.nii.gz', + 'AVG_WMWARP_AVG_WM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_WM.nii.gz', 'AVG_ACCUMBENWARP_AVG_ACCUMBEN.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_ACCUMBEN.nii.gz', 'AVG_CAUDATEWARP_AVG_CAUDATE.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CAUDATE.nii.gz', 'AVG_PUTAMENWARP_AVG_PUTAMEN.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_PUTAMEN.nii.gz', 'AVG_GLOBUSWARP_AVG_GLOBUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_GLOBUS.nii.gz', 'AVG_THALAMUSWARP_AVG_THALAMUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_THALAMUS.nii.gz', 'AVG_HIPPOCAMPUSWARP_AVG_HIPPOCAMPUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_HIPPOCAMPUS.nii.gz', - 'AVG_T2WARP_AVG_T2.nii.gz':'@ATLAS_DIRECTORY@/template_t2.nii.gz', - 'AVG_BRAINMASKWARP_AVG_BRAINMASK.nii.gz':'@ATLAS_DIRECTORY@/template_brain.nii.gz', - 'T1_RESHAPED.nii.gz':'@ATLAS_DIRECTORY@/template_t1.nii.gz' - } - templateFile = open(AtlasTemplate,'r') + 'AVG_T2WARP_AVG_T2.nii.gz': '@ATLAS_DIRECTORY@/template_t2.nii.gz', + 'AVG_BRAINMASKWARP_AVG_BRAINMASK.nii.gz': '@ATLAS_DIRECTORY@/template_brain.nii.gz', + 'T1_RESHAPED.nii.gz': '@ATLAS_DIRECTORY@/template_t1.nii.gz' + } + templateFile = open(AtlasTemplate, 'r') content = templateFile.read() # read entire file into memory templateFile.close() ## Now clean up the posteriors based on anatomical knowlege. ## sometimes the posteriors are not relevant for priors ## due to anomolies around the edges. - load_images_list=dict() + load_images_list = dict() for full_pathname in deformed_list: - base_name=os.path.basename(full_pathname) + base_name = os.path.basename(full_pathname) if base_name in patternDict.keys(): - load_images_list[base_name]=sitk.ReadImage(full_pathname) + load_images_list[base_name] = sitk.ReadImage(full_pathname) ## Make binary dilated mask - binmask=sitk.BinaryThreshold(load_images_list['AVG_BRAINMASKWARP_AVG_BRAINMASK.nii.gz'],1,1000000) - dilated5=sitk.DilateObjectMorphology(binmask,5) - dilated5=sitk.Cast(dilated5,sitk.sitkFloat32) # Convert to Float32 for multiply + binmask = sitk.BinaryThreshold(load_images_list['AVG_BRAINMASKWARP_AVG_BRAINMASK.nii.gz'], 1, 1000000) + dilated5 = sitk.DilateObjectMorphology(binmask, 5) + dilated5 = sitk.Cast(dilated5, sitk.sitkFloat32) # Convert to Float32 for multiply ## Now clip the interior brain mask with dilated5 interiorPriors = [ 'AVG_BGMWARP_AVG_BGM.nii.gz', @@ -172,43 +179,44 @@ def MakeNewAtlasTemplate(t1_image,deformed_list, 'AVG_GLOBUSWARP_AVG_GLOBUS.nii.gz', 'AVG_THALAMUSWARP_AVG_THALAMUS.nii.gz', 'AVG_HIPPOCAMPUSWARP_AVG_HIPPOCAMPUS.nii.gz', - ] - clean_deformed_list=deformed_list - for index in range(0,len(deformed_list)): - full_pathname=deformed_list[index] - base_name=os.path.basename(full_pathname) + ] + clean_deformed_list = deformed_list + for index in range(0, len(deformed_list)): + full_pathname = deformed_list[index] + base_name = os.path.basename(full_pathname) if base_name == 'AVG_BRAINMASKWARP_AVG_BRAINMASK.nii.gz': ### Make Brain Mask Binary - clipped_name='CLIPPED_'+base_name - patternDict[clipped_name]=patternDict[base_name] - sitk.WriteImage(binmask,clipped_name) - clean_deformed_list[index]=os.path.realpath(clipped_name) + clipped_name = 'CLIPPED_' + base_name + patternDict[clipped_name] = patternDict[base_name] + sitk.WriteImage(binmask, clipped_name) + clean_deformed_list[index] = os.path.realpath(clipped_name) if base_name in interiorPriors: ### Make clipped posteriors for brain regions - curr=sitk.Cast(sitk.ReadImage(full_pathname),sitk.sitkFloat32) - curr=curr*dilated5 - clipped_name='CLIPPED_'+base_name - patternDict[clipped_name]=patternDict[base_name] - sitk.WriteImage(curr,clipped_name) - clean_deformed_list[index]=os.path.realpath(clipped_name) + curr = sitk.Cast(sitk.ReadImage(full_pathname), sitk.sitkFloat32) + curr = curr * dilated5 + clipped_name = 'CLIPPED_' + base_name + patternDict[clipped_name] = patternDict[base_name] + sitk.WriteImage(curr, clipped_name) + clean_deformed_list[index] = os.path.realpath(clipped_name) print "HACK: ", clean_deformed_list[index] - curr=None - binmask=None - dilated5=None + curr = None + binmask = None + dilated5 = None for full_pathname in clean_deformed_list: - base_name=os.path.basename(full_pathname) + base_name = os.path.basename(full_pathname) if base_name in patternDict.keys(): - content=content.replace(patternDict[base_name],full_pathname) - content=content.replace('@ATLAS_DIRECTORY@/template_t1.nii.gz',t1_image) + content = content.replace(patternDict[base_name], full_pathname) + content = content.replace('@ATLAS_DIRECTORY@/template_t1.nii.gz', t1_image) ## NOTE: HEAD REGION CAN JUST BE T1 image. - content=content.replace('@ATLAS_DIRECTORY@/template_headregion.nii.gz',t1_image) + content = content.replace('@ATLAS_DIRECTORY@/template_headregion.nii.gz', t1_image) ## NOTE: BRAIN REGION CAN JUST BE the label images. - outAtlasFullPath=os.path.realpath(outDefinition) + outAtlasFullPath = os.path.realpath(outDefinition) newFile = open(outAtlasFullPath, 'w') newFile.write(content) # write the file with the text substitution newFile.close() - return outAtlasFullPath,clean_deformed_list + return outAtlasFullPath, clean_deformed_list + def AccumulateLikeTissuePosteriors(posteriorImages): import os @@ -217,56 +225,56 @@ def AccumulateLikeTissuePosteriors(posteriorImages): ## Now clean up the posteriors based on anatomical knowlege. ## sometimes the posteriors are not relevant for priors ## due to anomolies around the edges. - load_images_list=dict() + load_images_list = dict() for full_pathname in posteriorImages.values(): - base_name=os.path.basename(full_pathname) - load_images_list[base_name]=sitk.ReadImage(full_pathname) - GM_ACCUM=[ - 'POSTERIOR_ACCUMBEN.nii.gz', - 'POSTERIOR_CAUDATE.nii.gz', - 'POSTERIOR_CRBLGM.nii.gz', - 'POSTERIOR_HIPPOCAMPUS.nii.gz', - 'POSTERIOR_PUTAMEN.nii.gz', - 'POSTERIOR_THALAMUS.nii.gz', - 'POSTERIOR_SURFGM.nii.gz', - ] - WM_ACCUM=[ - 'POSTERIOR_CRBLWM.nii.gz', - 'POSTERIOR_WM.nii.gz' - ] - CSF_ACCUM=[ - 'POSTERIOR_CSF.nii.gz', - ] - VB_ACCUM=[ - 'POSTERIOR_VB.nii.gz', - ] - GLOBUS_ACCUM=[ - 'POSTERIOR_GLOBUS.nii.gz', - ] - BACKGROUND_ACCUM=[ - 'POSTERIOR_AIR.nii.gz', - 'POSTERIOR_NOTCSF.nii.gz', - 'POSTERIOR_NOTGM.nii.gz', - 'POSTERIOR_NOTVB.nii.gz', - 'POSTERIOR_NOTWM.nii.gz', - ] + base_name = os.path.basename(full_pathname) + load_images_list[base_name] = sitk.ReadImage(full_pathname) + GM_ACCUM = [ + 'POSTERIOR_ACCUMBEN.nii.gz', + 'POSTERIOR_CAUDATE.nii.gz', + 'POSTERIOR_CRBLGM.nii.gz', + 'POSTERIOR_HIPPOCAMPUS.nii.gz', + 'POSTERIOR_PUTAMEN.nii.gz', + 'POSTERIOR_THALAMUS.nii.gz', + 'POSTERIOR_SURFGM.nii.gz', + ] + WM_ACCUM = [ + 'POSTERIOR_CRBLWM.nii.gz', + 'POSTERIOR_WM.nii.gz' + ] + CSF_ACCUM = [ + 'POSTERIOR_CSF.nii.gz', + ] + VB_ACCUM = [ + 'POSTERIOR_VB.nii.gz', + ] + GLOBUS_ACCUM = [ + 'POSTERIOR_GLOBUS.nii.gz', + ] + BACKGROUND_ACCUM = [ + 'POSTERIOR_AIR.nii.gz', + 'POSTERIOR_NOTCSF.nii.gz', + 'POSTERIOR_NOTGM.nii.gz', + 'POSTERIOR_NOTVB.nii.gz', + 'POSTERIOR_NOTWM.nii.gz', + ] ## The next 2 items MUST be syncronized - AccumulatePriorsNames=['POSTERIOR_GM_TOTAL.nii.gz','POSTERIOR_WM_TOTAL.nii.gz', - 'POSTERIOR_CSF_TOTAL.nii.gz','POSTERIOR_VB_TOTAL.nii.gz', - 'POSTERIOR_GLOBUS_TOTAL.nii.gz','POSTERIOR_BACKGROUND_TOTAL.nii.gz'] - ForcedOrderingLists=[GM_ACCUM,WM_ACCUM,CSF_ACCUM,VB_ACCUM,GLOBUS_ACCUM,BACKGROUND_ACCUM] - AccumulatePriorsList=list() - for index in range(0,len(ForcedOrderingLists)): - outname=AccumulatePriorsNames[index] - inlist=ForcedOrderingLists[index] - accum_image= load_images_list[ inlist[0] ] # copy first image - for curr_image in range(1,len(inlist)): - accum_image=accum_image + load_images_list[ inlist[curr_image] ] - sitk.WriteImage(accum_image,outname) + AccumulatePriorsNames = ['POSTERIOR_GM_TOTAL.nii.gz', 'POSTERIOR_WM_TOTAL.nii.gz', + 'POSTERIOR_CSF_TOTAL.nii.gz', 'POSTERIOR_VB_TOTAL.nii.gz', + 'POSTERIOR_GLOBUS_TOTAL.nii.gz', 'POSTERIOR_BACKGROUND_TOTAL.nii.gz'] + ForcedOrderingLists = [GM_ACCUM, WM_ACCUM, CSF_ACCUM, VB_ACCUM, GLOBUS_ACCUM, BACKGROUND_ACCUM] + AccumulatePriorsList = list() + for index in range(0, len(ForcedOrderingLists)): + outname = AccumulatePriorsNames[index] + inlist = ForcedOrderingLists[index] + accum_image = load_images_list[inlist[0]] # copy first image + for curr_image in range(1, len(inlist)): + accum_image = accum_image + load_images_list[inlist[curr_image]] + sitk.WriteImage(accum_image, outname) AccumulatePriorsList.append(os.path.realpath(outname)) print "HACK \n\n\n\n\n\n\n HACK \n\n\n: {APL}\n".format(APL=AccumulatePriorsList) print ": {APN}\n".format(APN=AccumulatePriorsNames) - return AccumulatePriorsList,AccumulatePriorsNames + return AccumulatePriorsList, AccumulatePriorsNames ########################################################################### ########################################################################### ########################################################################### @@ -279,8 +287,10 @@ def AccumulateLikeTissuePosteriors(posteriorImages): ########################################################################### ########################################################################### ########################################################################### -def ShortWorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path, - InterpolationMode="Linear", Mode=10,DwiList=[],WORKFLOW_COMPONENTS=[],CLUSTER_QUEUE='',CLUSTER_QUEUE_LONG=''): + + +def ShortWorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path, + InterpolationMode="Linear", Mode=10, DwiList=[], WORKFLOW_COMPONENTS=[], CLUSTER_QUEUE='', CLUSTER_QUEUE_LONG=''): """ Run autoworkup on all subjects data defined in the ExperimentDatabase @@ -294,133 +304,131 @@ def ShortWorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, Experime ########### PIPELINE INITIALIZATION ############# baw200 = pe.Workflow(name="BAW_20120813") baw200.config['execution'] = { - 'plugin':'Linear', - #'stop_on_first_crash':'true', - #'stop_on_first_rerun': 'true', - 'stop_on_first_crash':'false', - 'stop_on_first_rerun': 'false', ## This stops at first attempt to rerun, before running, and before deleting previous results. - 'hash_method': 'timestamp', - 'single_thread_matlab':'true', ## Multi-core 2011a multi-core for matrix multiplication. - 'remove_unnecessary_outputs':'false', - 'use_relative_paths':'false', ## relative paths should be on, require hash update when changed. - 'remove_node_directories':'false', ## Experimental - 'local_hash_check':'true', ## - 'job_finished_timeout':15 ## - } + 'plugin': 'Linear', + #'stop_on_first_crash':'true', + #'stop_on_first_rerun': 'true', + 'stop_on_first_crash': 'false', + 'stop_on_first_rerun': 'false', # This stops at first attempt to rerun, before running, and before deleting previous results. + 'hash_method': 'timestamp', + 'single_thread_matlab': 'true', # Multi-core 2011a multi-core for matrix multiplication. + 'remove_unnecessary_outputs': 'false', + 'use_relative_paths': 'false', # relative paths should be on, require hash update when changed. + 'remove_node_directories': 'false', # Experimental + 'local_hash_check': 'true', + 'job_finished_timeout': 15 + } baw200.config['logging'] = { - 'workflow_level':'DEBUG', - 'filemanip_level':'DEBUG', - 'interface_level':'DEBUG', - 'log_directory': ExperimentBaseDirectoryCache + 'workflow_level': 'DEBUG', + 'filemanip_level': 'DEBUG', + 'interface_level': 'DEBUG', + 'log_directory': ExperimentBaseDirectoryCache } baw200.base_dir = ExperimentBaseDirectoryCache - import WorkupT1T2Single - MergeT1s=dict() - MergeT2s=dict() - MergePDs=dict() - MergeFLs=dict() - MergeOutputLabels=dict() - MergePosteriors=dict() - BAtlas=dict() + MergeT1s = dict() + MergeT2s = dict() + MergePDs = dict() + MergeFLs = dict() + MergeOutputLabels = dict() + MergePosteriors = dict() + BAtlas = dict() if True: print("===================== SUBJECT: {0} ===========================".format(subjectid)) - PHASE_1_oneSubjWorkflow=dict() - PHASE_1_subjInfoNode=dict() + PHASE_1_oneSubjWorkflow = dict() + PHASE_1_subjInfoNode = dict() allSessions = ExperimentDatabase.getSessionsFromSubject(subjectid) - print("Running sessions: {ses} for subject {sub}".format(ses=allSessions,sub=subjectid)) - BAtlas[subjectid] = MakeAtlasNode(atlas_fname_wpath,"BAtlas_"+str(subjectid)) ## Call function to create node + print("Running sessions: {ses} for subject {sub}".format(ses=allSessions, sub=subjectid)) + BAtlas[subjectid] = MakeAtlasNode(atlas_fname_wpath, "BAtlas_" + str(subjectid)) # Call function to create node - for sessionid in allSessions: - global_AllT1s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15']) - global_AllT2s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15']) - global_AllPDs=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15']) - global_AllFLs=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15']) - global_AllOthers=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15']) - print("HACK: all T1s: {0} {1}".format(global_AllT1s, len(global_AllT1s) )) - print("HACK: all T2s: {0} {1}".format(global_AllT2s, len(global_AllT2s) )) - print("HACK: all PDs: {0} {1}".format(global_AllPDs, len(global_AllPDs) )) - print("HACK: all FLs: {0} {1}".format(global_AllFLs, len(global_AllFLs) )) - print("HACK: all Others: {0} {1}".format(global_AllOthers, len(global_AllOthers) )) + global_AllT1s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T1-30', 'T1-15']) + global_AllT2s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T2-30', 'T2-15']) + global_AllPDs = ExperimentDatabase.getFilenamesByScantype(sessionid, ['PD-30', 'PD-15']) + global_AllFLs = ExperimentDatabase.getFilenamesByScantype(sessionid, ['FL-30', 'FL-15']) + global_AllOthers = ExperimentDatabase.getFilenamesByScantype(sessionid, ['OTHER-30', 'OTHER-15']) + print("HACK: all T1s: {0} {1}".format(global_AllT1s, len(global_AllT1s))) + print("HACK: all T2s: {0} {1}".format(global_AllT2s, len(global_AllT2s))) + print("HACK: all PDs: {0} {1}".format(global_AllPDs, len(global_AllPDs))) + print("HACK: all FLs: {0} {1}".format(global_AllFLs, len(global_AllFLs))) + print("HACK: all Others: {0} {1}".format(global_AllOthers, len(global_AllOthers))) projectid = ExperimentDatabase.getProjFromSession(sessionid) - print("PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid,subjectid,sessionid)) + print("PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid, subjectid, sessionid)) PHASE_1_subjInfoNode[sessionid] = pe.Node(interface=IdentityInterface(fields= - ['sessionid','subjectid','projectid', - 'allT1s', - 'allT2s', - 'allPDs', - 'allFLs', - 'allOthers']), - run_without_submitting=True, - name='99_PHASE_1_SubjInfoNode_'+str(subjectid)+"_"+str(sessionid) ) - PHASE_1_subjInfoNode[sessionid].inputs.projectid=projectid - PHASE_1_subjInfoNode[sessionid].inputs.subjectid=subjectid - PHASE_1_subjInfoNode[sessionid].inputs.sessionid=sessionid - PHASE_1_subjInfoNode[sessionid].inputs.allT1s=global_AllT1s - PHASE_1_subjInfoNode[sessionid].inputs.allT2s=global_AllT2s - PHASE_1_subjInfoNode[sessionid].inputs.allPDs=global_AllPDs - PHASE_1_subjInfoNode[sessionid].inputs.allFLs=global_AllFLs - PHASE_1_subjInfoNode[sessionid].inputs.allOthers=global_AllOthers - - PROCESSING_PHASE='PHASE_1' - PHASE_1_WORKFLOW_COMPONENTS = ['BASIC','TISSUE_CLASSIFY'] - PHASE_1_oneSubjWorkflow[sessionid]=WorkupT1T2Single.MakeOneSubWorkFlow( - projectid, subjectid, sessionid,PROCESSING_PHASE, - PHASE_1_WORKFLOW_COMPONENTS, - BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) - baw200.connect(PHASE_1_subjInfoNode[sessionid],'projectid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.projectid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'subjectid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.subjectid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'sessionid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.sessionid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allT1s',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allT1s') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allT2s',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allT2s') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allPDs',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allPDs') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allFLs',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allFLs') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allOthers',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allOthers') - - baw200.connect(BAtlas[subjectid],'template_landmarks_31_fcsv', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_landmarks_31_fcsv') - baw200.connect(BAtlas[subjectid],'template_landmark_weights_31_csv', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_landmark_weights_31_csv') - baw200.connect(BAtlas[subjectid],'template_t1', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_t1') - baw200.connect(BAtlas[subjectid],'ExtendedAtlasDefinition_xml', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.atlasDefinition') - - BASIC_DataSink=dict() - TC_DataSink=dict() - AccumulateLikeTissuePosteriorsNode=dict() - AddLikeTissueSink=dict() + ['sessionid', 'subjectid', 'projectid', + 'allT1s', + 'allT2s', + 'allPDs', + 'allFLs', + 'allOthers']), + run_without_submitting=True, + name='99_PHASE_1_SubjInfoNode_' + str(subjectid) + "_" + str(sessionid)) + PHASE_1_subjInfoNode[sessionid].inputs.projectid = projectid + PHASE_1_subjInfoNode[sessionid].inputs.subjectid = subjectid + PHASE_1_subjInfoNode[sessionid].inputs.sessionid = sessionid + PHASE_1_subjInfoNode[sessionid].inputs.allT1s = global_AllT1s + PHASE_1_subjInfoNode[sessionid].inputs.allT2s = global_AllT2s + PHASE_1_subjInfoNode[sessionid].inputs.allPDs = global_AllPDs + PHASE_1_subjInfoNode[sessionid].inputs.allFLs = global_AllFLs + PHASE_1_subjInfoNode[sessionid].inputs.allOthers = global_AllOthers + + PROCESSING_PHASE = 'PHASE_1' + PHASE_1_WORKFLOW_COMPONENTS = ['BASIC', 'TISSUE_CLASSIFY'] + PHASE_1_oneSubjWorkflow[sessionid] = WorkupT1T2Single.MakeOneSubWorkFlow( + projectid, subjectid, sessionid, PROCESSING_PHASE, + PHASE_1_WORKFLOW_COMPONENTS, + BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'projectid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.projectid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'subjectid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.subjectid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'sessionid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.sessionid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allT1s', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allT1s') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allT2s', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allT2s') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allPDs', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allPDs') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allFLs', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allFLs') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allOthers', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allOthers') + + baw200.connect(BAtlas[subjectid], 'template_landmarks_31_fcsv', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_landmarks_31_fcsv') + baw200.connect(BAtlas[subjectid], 'template_landmark_weights_31_csv', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_landmark_weights_31_csv') + baw200.connect(BAtlas[subjectid], 'template_t1', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_t1') + baw200.connect(BAtlas[subjectid], 'ExtendedAtlasDefinition_xml', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.atlasDefinition') + + BASIC_DataSink = dict() + TC_DataSink = dict() + AccumulateLikeTissuePosteriorsNode = dict() + AddLikeTissueSink = dict() if True: ### Now define where the final organized outputs should go. - BASIC_DataSink[sessionid]=pe.Node(nio.DataSink(),name="BASIC_DS_"+str(subjectid)+"_"+str(sessionid)) - BASIC_DataSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - BASIC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ACPCAlign') + BASIC_DataSink[sessionid] = pe.Node(nio.DataSink(), name="BASIC_DS_" + str(subjectid) + "_" + str(sessionid)) + BASIC_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + BASIC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ACPCAlign') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.outputLandmarksInACPCAlignedSpace',BASIC_DataSink[sessionid],'ACPCAlign.@outputLandmarksInACPCAlignedSpace') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.BCD_ACPC_T1',BASIC_DataSink[sessionid],'ACPCAlign.@BCD_ACPC_T1') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.outputLandmarksInInputSpace',BASIC_DataSink[sessionid],'ACPCAlign.@outputLandmarksInInputSpace') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.outputTransform',BASIC_DataSink[sessionid],'ACPCAlign.@outputTransform') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.atlasToSubjectTransform',BASIC_DataSink[sessionid],'ACPCAlign.@atlasToSubjectTransform') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.outputLandmarksInACPCAlignedSpace', BASIC_DataSink[sessionid], 'ACPCAlign.@outputLandmarksInACPCAlignedSpace') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.BCD_ACPC_T1', BASIC_DataSink[sessionid], 'ACPCAlign.@BCD_ACPC_T1') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.outputLandmarksInInputSpace', BASIC_DataSink[sessionid], 'ACPCAlign.@outputLandmarksInInputSpace') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.outputTransform', BASIC_DataSink[sessionid], 'ACPCAlign.@outputTransform') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.atlasToSubjectTransform', BASIC_DataSink[sessionid], 'ACPCAlign.@atlasToSubjectTransform') ### Now define where the final organized outputs should go. - TC_DataSink[sessionid]=pe.Node(nio.DataSink(),name="TISSUE_CLASSIFY_DS_"+str(subjectid)+"_"+str(sessionid)) - TC_DataSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - TC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'TissueClassify') - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.TissueClassifyOutputDir', TC_DataSink[sessionid],'TissueClassify.@TissueClassifyOutputDir') + TC_DataSink[sessionid] = pe.Node(nio.DataSink(), name="TISSUE_CLASSIFY_DS_" + str(subjectid) + "_" + str(sessionid)) + TC_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + TC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'TissueClassify') + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.TissueClassifyOutputDir', TC_DataSink[sessionid], 'TissueClassify.@TissueClassifyOutputDir') ### Now clean up by adding together many of the items PHASE_1_oneSubjWorkflow - currentAccumulateLikeTissuePosteriorsName='AccumulateLikeTissuePosteriors_'+str(subjectid)+"_"+str(sessionid) + currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str(sessionid) AccumulateLikeTissuePosteriorsNode[sessionid] = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, - input_names=['posteriorImages'], - output_names=['AccumulatePriorsList','AccumulatePriorsNames']), - name=currentAccumulateLikeTissuePosteriorsName) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.posteriorImages', - AccumulateLikeTissuePosteriorsNode[sessionid],'posteriorImages') + input_names=['posteriorImages'], + output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), + name=currentAccumulateLikeTissuePosteriorsName) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.posteriorImages', + AccumulateLikeTissuePosteriorsNode[sessionid], 'posteriorImages') ### Now define where the final organized outputs should go. - AddLikeTissueSink[sessionid]=pe.Node(nio.DataSink(),name="ACCUMULATED_POSTERIORS_"+str(subjectid)+"_"+str(sessionid)) - AddLikeTissueSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - #AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid) - AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ACCUMULATED_POSTERIORS') - baw200.connect(AccumulateLikeTissuePosteriorsNode[sessionid], 'AccumulatePriorsList', AddLikeTissueSink[sessionid],'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir') + AddLikeTissueSink[sessionid] = pe.Node(nio.DataSink(), name="ACCUMULATED_POSTERIORS_" + str(subjectid) + "_" + str(sessionid)) + AddLikeTissueSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + # AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid) + AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ACCUMULATED_POSTERIORS') + baw200.connect(AccumulateLikeTissuePosteriorsNode[sessionid], 'AccumulatePriorsList', AddLikeTissueSink[sessionid], 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir') return baw200 diff --git a/AutoWorkup/WorkupT1T2.py b/AutoWorkup/WorkupT1T2.py index a6edaefb..02a7d991 100644 --- a/AutoWorkup/WorkupT1T2.py +++ b/AutoWorkup/WorkupT1T2.py @@ -24,7 +24,7 @@ from nipype.interfaces.freesurfer import ReconAll from nipype.utils.misc import package_check -#package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version +# package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') package_check('networkx', '1.0', 'tutorial1') @@ -41,7 +41,9 @@ GLOBAL_DATA_SINK_REWRITE = True -#HACK: [('buildTemplateIteration2', 'SUBJECT_TEMPLATES/0249/buildTemplateIteration2')] +# HACK: [('buildTemplateIteration2', 'SUBJECT_TEMPLATES/0249/buildTemplateIteration2')] + + def GenerateSubjectOutputPattern(subjectid): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. HACK: [('ANTSTemplate/Iteration02_Reshaped.nii.gz', 'SUBJECT_TEMPLATES/0668/T1_RESHAPED.nii.gz'), @@ -63,46 +65,50 @@ def GenerateSubjectOutputPattern(subjectid): /nfsscratch/PREDICT/johnsonhj/ExpandedExperiment/20120801.SubjectOrganized_Results/ANTSTemplate/CLIPPED_AVG_CSFWARP_AVG_CSF.nii.gz -> /nfsscratch/PREDICT/johnsonhj/ExpandedExperiment/20120801.SubjectOrganized_Results/SUBJECT_TEMPLATES/2013/AVG_CSF.nii.gz """ - patternList=[] + patternList = [] - find_pat=os.path.join('ANTSTemplate','ReshapeAverageImageWithShapeUpdate.nii.gz') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'AVG_T1.nii.gz') - patternList.append( (find_pat,replace_pat) ) + find_pat = os.path.join('ANTSTemplate', 'ReshapeAverageImageWithShapeUpdate.nii.gz') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'AVG_T1.nii.gz') + patternList.append((find_pat, replace_pat)) -#find_pat=os.path.join('ANTSTemplate',r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_[A-Z0-9]*WARP_(?PAVG_[A-Z0-9]*.nii.gz)') - find_pat=os.path.join('ANTSTemplate',r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_(?P.*.nii.gz)') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'AVG_\g') - patternList.append( (find_pat,replace_pat) ) +# find_pat=os.path.join('ANTSTemplate',r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_[A-Z0-9]*WARP_(?PAVG_[A-Z0-9]*.nii.gz)') + find_pat = os.path.join('ANTSTemplate', r'_ReshapeAveragePassiveImageWithShapeUpdate[0-9]*/AVG_(?P.*.nii.gz)') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'AVG_\g') + patternList.append((find_pat, replace_pat)) - find_pat=os.path.join('ANTSTemplate',r'CLIPPED_AVG_(?P.*.nii.gz)') - replace_pat=os.path.join('SUBJECT_TEMPLATES',subjectid,r'AVG_\g') - patternList.append( (find_pat,replace_pat) ) + find_pat = os.path.join('ANTSTemplate', r'CLIPPED_AVG_(?P.*.nii.gz)') + replace_pat = os.path.join('SUBJECT_TEMPLATES', subjectid, r'AVG_\g') + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList -def GenerateOutputPattern(projectid, subjectid, sessionid,DefaultNodeName): + +def GenerateOutputPattern(projectid, subjectid, sessionid, DefaultNodeName): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. """ - patternList=[] - find_pat=os.path.join(DefaultNodeName) - replace_pat=os.path.join(projectid,subjectid,sessionid,DefaultNodeName) - patternList.append( (find_pat,replace_pat) ) + patternList = [] + find_pat = os.path.join(DefaultNodeName) + replace_pat = os.path.join(projectid, subjectid, sessionid, DefaultNodeName) + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList + def GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. """ - patternList=[] - find_pat="POSTERIOR_" - replace_pat=os.path.join(projectid,subjectid,sessionid)+"/POSTERIOR_" - patternList.append( (find_pat,replace_pat) ) + patternList = [] + find_pat = "POSTERIOR_" + replace_pat = os.path.join(projectid, subjectid, sessionid) + "/POSTERIOR_" + patternList.append((find_pat, replace_pat)) print "HACK: ", patternList return patternList ## This takes several lists and merges them, but it also removes all empty values from the lists -def MergeByExtendListElements(t1_averageList,t2_averageList,pd_averageList,fl_averageList,outputLabels_averageList,ListOfPosteriorImagesDictionary): + + +def MergeByExtendListElements(t1_averageList, t2_averageList, pd_averageList, fl_averageList, outputLabels_averageList, ListOfPosteriorImagesDictionary): """ ListOfImagesDictionaries=[ {'T1':os.path.join(mydatadir,'01_T1_half.nii.gz'),'INV_T1':os.path.join(mydatadir,'01_T1_inv_half.nii.gz'),'LABEL_MAP':os.path.join(mydatadir,'01_T1_inv_half.nii @@ -119,81 +125,82 @@ def MergeByExtendListElements(t1_averageList,t2_averageList,pd_averageList,fl_av t2_averageList = ['t2_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] """ - print "t1_averageList",t1_averageList - print "t2_averageList",t2_averageList - print "pd_averageList",pd_averageList - print "fl_averageList",fl_averageList - print "outputLabels_averageList",outputLabels_averageList + print "t1_averageList", t1_averageList + print "t2_averageList", t2_averageList + print "pd_averageList", pd_averageList + print "fl_averageList", fl_averageList + print "outputLabels_averageList", outputLabels_averageList print "$$$$$$$$$$$$$$$$$$$$$$$" - print "ListOfPosteriorImagesDictionary",ListOfPosteriorImagesDictionary + print "ListOfPosteriorImagesDictionary", ListOfPosteriorImagesDictionary ## Initial list with empty dictionaries - ListOfImagesDictionaries=[dict() for i in range(0,len(t1_averageList))] + ListOfImagesDictionaries = [dict() for i in range(0, len(t1_averageList))] ## HACK: Need to make it so that AVG_AIR.nii.gz is has a background value of 1 - registrationImageTypes=['T1'] ## ['T1','T2'] someday. - #DefaultContinuousInterpolationType='LanczosWindowedSinc' ## Could also be Linear for speed. - DefaultContinuousInterpolationType='Linear' - interpolationMapping={'T1':DefaultContinuousInterpolationType, - 'T2':DefaultContinuousInterpolationType, - 'PD':DefaultContinuousInterpolationType, - 'FL':DefaultContinuousInterpolationType, - 'BRAINMASK':'MultiLabel' - } + registrationImageTypes = ['T1'] # ['T1','T2'] someday. + # DefaultContinuousInterpolationType='LanczosWindowedSinc' ## Could also be Linear for speed. + DefaultContinuousInterpolationType = 'Linear' + interpolationMapping = {'T1': DefaultContinuousInterpolationType, + 'T2': DefaultContinuousInterpolationType, + 'PD': DefaultContinuousInterpolationType, + 'FL': DefaultContinuousInterpolationType, + 'BRAINMASK': 'MultiLabel' + } ## NOTE: ALl input lists MUST have the same number of elements (even if they are null) - for list_index in range(0,len(t1_averageList)): + for list_index in range(0, len(t1_averageList)): if t1_averageList[list_index] is not None: - ListOfImagesDictionaries[list_index]['T1']=t1_averageList[list_index] + ListOfImagesDictionaries[list_index]['T1'] = t1_averageList[list_index] if t2_averageList[list_index] is not None: - ListOfImagesDictionaries[list_index]['T2']=t2_averageList[list_index] + ListOfImagesDictionaries[list_index]['T2'] = t2_averageList[list_index] if pd_averageList[list_index] is not None: - ListOfImagesDictionaries[list_index]['PD']=pd_averageList[list_index] + ListOfImagesDictionaries[list_index]['PD'] = pd_averageList[list_index] if fl_averageList[list_index] is not None: - ListOfImagesDictionaries[list_index]['FL']=fl_averageList[list_index] + ListOfImagesDictionaries[list_index]['FL'] = fl_averageList[list_index] if outputLabels_averageList[list_index] is not None: - ListOfImagesDictionaries[list_index]['BRAINMASK']=outputLabels_averageList[list_index] - this_subj_posteriors=ListOfPosteriorImagesDictionary[list_index] + ListOfImagesDictionaries[list_index]['BRAINMASK'] = outputLabels_averageList[list_index] + this_subj_posteriors = ListOfPosteriorImagesDictionary[list_index] for post_items in this_subj_posteriors.items(): - print "post_items",post_items + print "post_items", post_items ListOfImagesDictionaries[list_index][post_items[0]] = post_items[1] print "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%" - print "ListOfImagesDictionaries",ListOfImagesDictionaries - print "registrationImageTypes",registrationImageTypes - print "interpolationMapping",interpolationMapping - return ListOfImagesDictionaries,registrationImageTypes,interpolationMapping + print "ListOfImagesDictionaries", ListOfImagesDictionaries + print "registrationImageTypes", registrationImageTypes + print "interpolationMapping", interpolationMapping + return ListOfImagesDictionaries, registrationImageTypes, interpolationMapping -def MakeNewAtlasTemplate(t1_image,deformed_list, - AtlasTemplate,outDefinition): + +def MakeNewAtlasTemplate(t1_image, deformed_list, + AtlasTemplate, outDefinition): import os import sys import SimpleITK as sitk - patternDict= { - 'AVG_AIR.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_AIR.nii.gz', - 'AVG_BGM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_BASALTISSUE.nii.gz', - 'AVG_CRBLGM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_CRBLGM.nii.gz', - 'AVG_CRBLWM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_CRBLWM.nii.gz', + patternDict = { + 'AVG_AIR.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_AIR.nii.gz', + 'AVG_BGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_BASALTISSUE.nii.gz', + 'AVG_CRBLGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CRBLGM.nii.gz', + 'AVG_CRBLWM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CRBLWM.nii.gz', 'AVG_CSF.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CSF.nii.gz', - 'AVG_NOTCSF.nii.gz' :'@ATLAS_DIRECTORY@/EXTENDED_NOTCSF.nii.gz', + 'AVG_NOTCSF.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTCSF.nii.gz', 'AVG_NOTGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTGM.nii.gz', 'AVG_NOTVB.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTVB.nii.gz', 'AVG_NOTWM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_NOTWM.nii.gz', 'AVG_SURFGM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_SURFGM.nii.gz', 'AVG_VB.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_VB.nii.gz', - 'AVG_WM.nii.gz':'@ATLAS_DIRECTORY@/EXTENDED_WM.nii.gz', + 'AVG_WM.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_WM.nii.gz', 'AVG_ACCUMBEN.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_ACCUMBEN.nii.gz', 'AVG_CAUDATE.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_CAUDATE.nii.gz', 'AVG_PUTAMEN.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_PUTAMEN.nii.gz', 'AVG_GLOBUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_GLOBUS.nii.gz', 'AVG_THALAMUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_THALAMUS.nii.gz', 'AVG_HIPPOCAMPUS.nii.gz': '@ATLAS_DIRECTORY@/EXTENDED_HIPPOCAMPUS.nii.gz', - 'AVG_BRAINMASK.nii.gz':'@ATLAS_DIRECTORY@/template_brain.nii.gz', - 'T1_RESHAPED.nii.gz':'@ATLAS_DIRECTORY@/template_t1.nii.gz', - 'AVG_T2.nii.gz':'@ATLAS_DIRECTORY@/template_t2.nii.gz', - 'AVG_PD.nii.gz':'@ATLAS_DIRECTORY@/template_t2.nii.gz', - 'AVG_FL.nii.gz':'@ATLAS_DIRECTORY@/template_t2.nii.gz' - } - templateFile = open(AtlasTemplate,'r') + 'AVG_BRAINMASK.nii.gz': '@ATLAS_DIRECTORY@/template_brain.nii.gz', + 'T1_RESHAPED.nii.gz': '@ATLAS_DIRECTORY@/template_t1.nii.gz', + 'AVG_T2.nii.gz': '@ATLAS_DIRECTORY@/template_t2.nii.gz', + 'AVG_PD.nii.gz': '@ATLAS_DIRECTORY@/template_t2.nii.gz', + 'AVG_FL.nii.gz': '@ATLAS_DIRECTORY@/template_t2.nii.gz' + } + templateFile = open(AtlasTemplate, 'r') content = templateFile.read() # read entire file into memory templateFile.close() @@ -201,17 +208,17 @@ def MakeNewAtlasTemplate(t1_image,deformed_list, ## sometimes the posteriors are not relevant for priors ## due to anomolies around the edges. print("\n\n\nALL_FILES: {0}\n\n\n".format(deformed_list)) - load_images_list=dict() + load_images_list = dict() for full_pathname in deformed_list: - base_name=os.path.basename(full_pathname) + base_name = os.path.basename(full_pathname) if base_name in patternDict.keys(): - load_images_list[base_name]=sitk.ReadImage(full_pathname) + load_images_list[base_name] = sitk.ReadImage(full_pathname) else: print("MISSING FILE FROM patternDict: {0}".format(base_name)) ## Make binary dilated mask - binmask=sitk.BinaryThreshold(load_images_list['AVG_BRAINMASK.nii.gz'],1,1000000) - dilated5=sitk.DilateObjectMorphology(binmask,5) - dilated5=sitk.Cast(dilated5,sitk.sitkFloat32) # Convert to Float32 for multiply + binmask = sitk.BinaryThreshold(load_images_list['AVG_BRAINMASK.nii.gz'], 1, 1000000) + dilated5 = sitk.DilateObjectMorphology(binmask, 5) + dilated5 = sitk.Cast(dilated5, sitk.sitkFloat32) # Convert to Float32 for multiply ## Now clip the interior brain mask with dilated5 interiorPriors = [ 'AVG_BGMWARP_AVG_BGM.nii.gz', @@ -227,54 +234,54 @@ def MakeNewAtlasTemplate(t1_image,deformed_list, 'AVG_GLOBUSWARP_AVG_GLOBUS.nii.gz', 'AVG_THALAMUSWARP_AVG_THALAMUS.nii.gz', 'AVG_HIPPOCAMPUSWARP_AVG_HIPPOCAMPUS.nii.gz', - ] - clean_deformed_list=deformed_list - T2File=None - PDFile=None - for index in range(0,len(deformed_list)): - full_pathname=deformed_list[index] - base_name=os.path.basename(full_pathname) + ] + clean_deformed_list = deformed_list + T2File = None + PDFile = None + for index in range(0, len(deformed_list)): + full_pathname = deformed_list[index] + base_name = os.path.basename(full_pathname) if base_name == 'AVG_BRAINMASK.nii.gz': ### Make Brain Mask Binary - clipped_name='CLIPPED_'+base_name - patternDict[clipped_name]=patternDict[base_name] - sitk.WriteImage(binmask,clipped_name) - clean_deformed_list[index]=os.path.realpath(clipped_name) + clipped_name = 'CLIPPED_' + base_name + patternDict[clipped_name] = patternDict[base_name] + sitk.WriteImage(binmask, clipped_name) + clean_deformed_list[index] = os.path.realpath(clipped_name) if base_name == 'AVG_T2WARP_AVG_T2.nii.gz': - T2File=full_pathname + T2File = full_pathname if base_name == 'AVG_PDWARP_AVG_PD.nii.gz': - PDFile=full_pathname + PDFile = full_pathname if base_name in interiorPriors: ### Make clipped posteriors for brain regions - curr=sitk.Cast(sitk.ReadImage(full_pathname),sitk.sitkFloat32) - curr=curr*dilated5 - clipped_name='CLIPPED_'+base_name - patternDict[clipped_name]=patternDict[base_name] - sitk.WriteImage(curr,clipped_name) - clean_deformed_list[index]=os.path.realpath(clipped_name) + curr = sitk.Cast(sitk.ReadImage(full_pathname), sitk.sitkFloat32) + curr = curr * dilated5 + clipped_name = 'CLIPPED_' + base_name + patternDict[clipped_name] = patternDict[base_name] + sitk.WriteImage(curr, clipped_name) + clean_deformed_list[index] = os.path.realpath(clipped_name) print "HACK: ", clean_deformed_list[index] - curr=None - binmask=None - dilated5=None + curr = None + binmask = None + dilated5 = None for full_pathname in clean_deformed_list: - base_name=os.path.basename(full_pathname) + base_name = os.path.basename(full_pathname) if base_name in patternDict.keys(): - content=content.replace(patternDict[base_name],full_pathname) + content = content.replace(patternDict[base_name], full_pathname) ## If there is no T2, then use the PD image if T2File is not None: - content=content.replace('@ATLAS_DIRECTORY@/template_t2.nii.gz',T2File) + content = content.replace('@ATLAS_DIRECTORY@/template_t2.nii.gz', T2File) elif PDFile is not None: - content=content.replace('@ATLAS_DIRECTORY@/template_t2.nii.gz',PDFile) - content=content.replace('@ATLAS_DIRECTORY@/template_t1.nii.gz',t1_image) + content = content.replace('@ATLAS_DIRECTORY@/template_t2.nii.gz', PDFile) + content = content.replace('@ATLAS_DIRECTORY@/template_t1.nii.gz', t1_image) ## NOTE: HEAD REGION CAN JUST BE T1 image. - content=content.replace('@ATLAS_DIRECTORY@/template_headregion.nii.gz',t1_image) + content = content.replace('@ATLAS_DIRECTORY@/template_headregion.nii.gz', t1_image) ## NOTE: BRAIN REGION CAN JUST BE the label images. - outAtlasFullPath=os.path.realpath(outDefinition) + outAtlasFullPath = os.path.realpath(outDefinition) newFile = open(outAtlasFullPath, 'w') newFile.write(content) # write the file with the text substitution newFile.close() - return outAtlasFullPath,clean_deformed_list + return outAtlasFullPath, clean_deformed_list ########################################################################### @@ -289,8 +296,8 @@ def MakeNewAtlasTemplate(t1_image,deformed_list, ########################################################################### ########################################################################### ########################################################################### -def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path, - InterpolationMode="Linear", Mode=10,DwiList=[],WORKFLOW_COMPONENTS=[],CLUSTER_QUEUE='',CLUSTER_QUEUE_LONG='',SGE_JOB_SCRIPT='#!/bin/bash'): +def WorkupT1T2(subjectid, mountPrefix, ExperimentBaseDirectoryCache, ExperimentBaseDirectoryResults, ExperimentDatabase, atlas_fname_wpath, BCD_model_path, + InterpolationMode="Linear", Mode=10, DwiList=[], WORKFLOW_COMPONENTS=[], CLUSTER_QUEUE='', CLUSTER_QUEUE_LONG='', SGE_JOB_SCRIPT='#!/bin/bash'): """ Run autoworkup on all subjects data defined in the ExperimentDatabase @@ -302,183 +309,180 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas print "Building Pipeline" ########### PIPELINE INITIALIZATION ############# - baw200 = pe.Workflow(name="BAW_20120813") ### HACK: This needs to be specified in the config file. + baw200 = pe.Workflow(name="BAW_20120813") # HACK: This needs to be specified in the config file. baw200.config['execution'] = { - 'plugin':'Linear', - #'stop_on_first_crash':'true', - #'stop_on_first_rerun': 'true', - 'stop_on_first_crash':'false', - 'stop_on_first_rerun': 'false', ## This stops at first attempt to rerun, before running, and before deleting previous results. - 'hash_method': 'timestamp', - 'single_thread_matlab':'true', ## Multi-core 2011a multi-core for matrix multiplication. - 'remove_unnecessary_outputs':'false', - 'use_relative_paths':'false', ## relative paths should be on, require hash update when changed. - 'remove_node_directories':'false', ## Experimental - 'local_hash_check':'true', ## - 'job_finished_timeout':45 ## - } + 'plugin': 'Linear', + #'stop_on_first_crash':'true', + #'stop_on_first_rerun': 'true', + 'stop_on_first_crash': 'false', + 'stop_on_first_rerun': 'false', # This stops at first attempt to rerun, before running, and before deleting previous results. + 'hash_method': 'timestamp', + 'single_thread_matlab': 'true', # Multi-core 2011a multi-core for matrix multiplication. + 'remove_unnecessary_outputs': 'false', + 'use_relative_paths': 'false', # relative paths should be on, require hash update when changed. + 'remove_node_directories': 'false', # Experimental + 'local_hash_check': 'true', + 'job_finished_timeout': 45 + } baw200.config['logging'] = { - 'workflow_level':'DEBUG', - 'filemanip_level':'DEBUG', - 'interface_level':'DEBUG', - 'log_directory': ExperimentBaseDirectoryCache + 'workflow_level': 'DEBUG', + 'filemanip_level': 'DEBUG', + 'interface_level': 'DEBUG', + 'log_directory': ExperimentBaseDirectoryCache } baw200.base_dir = ExperimentBaseDirectoryCache - import WorkupT1T2Single - MergeT1s=dict() - MergeT2s=dict() - MergePDs=dict() - MergeFLs=dict() - MergeOutputLabels=dict() - MergePosteriors=dict() - BAtlas=dict() - FREESURFER_ID=dict() - FixWMPartitioningNode=dict() - BRAINSCreateLabelMapFromProbabilityMapsNode=dict() + MergeT1s = dict() + MergeT2s = dict() + MergePDs = dict() + MergeFLs = dict() + MergeOutputLabels = dict() + MergePosteriors = dict() + BAtlas = dict() + FREESURFER_ID = dict() + FixWMPartitioningNode = dict() + BRAINSCreateLabelMapFromProbabilityMapsNode = dict() if True: print("===================== SUBJECT: {0} ===========================".format(subjectid)) - PHASE_1_oneSubjWorkflow=dict() - PHASE_1_subjInfoNode=dict() + PHASE_1_oneSubjWorkflow = dict() + PHASE_1_subjInfoNode = dict() allSessions = ExperimentDatabase.getSessionsFromSubject(subjectid) - print("Running sessions: {ses} for subject {sub}".format(ses=allSessions,sub=subjectid)) - BAtlas[subjectid] = MakeAtlasNode(atlas_fname_wpath,"BAtlas_"+str(subjectid)) ## Call function to create node - - - global_AllT1s=dict() - global_AllT2s=dict() - global_AllPDs=dict() - global_AllFLs=dict() - global_AllOthers=dict() + print("Running sessions: {ses} for subject {sub}".format(ses=allSessions, sub=subjectid)) + BAtlas[subjectid] = MakeAtlasNode(atlas_fname_wpath, "BAtlas_" + str(subjectid)) # Call function to create node + + global_AllT1s = dict() + global_AllT2s = dict() + global_AllPDs = dict() + global_AllFLs = dict() + global_AllOthers = dict() for sessionid in allSessions: - global_AllT1s[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15']) - global_AllT2s[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15']) - global_AllPDs[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15']) - global_AllFLs[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15']) - global_AllOthers[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15']) - print("HACK: all T1s: {0} {1}".format(global_AllT1s[sessionid], len(global_AllT1s[sessionid]) )) - print("HACK: all T2s: {0} {1}".format(global_AllT2s[sessionid], len(global_AllT2s[sessionid]) )) - print("HACK: all PDs: {0} {1}".format(global_AllPDs[sessionid], len(global_AllPDs[sessionid]) )) - print("HACK: all FLs: {0} {1}".format(global_AllFLs[sessionid], len(global_AllFLs[sessionid]) )) - print("HACK: all Others: {0} {1}".format(global_AllOthers[sessionid], len(global_AllOthers[sessionid]) )) + global_AllT1s[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T1-30', 'T1-15']) + global_AllT2s[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T2-30', 'T2-15']) + global_AllPDs[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['PD-30', 'PD-15']) + global_AllFLs[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['FL-30', 'FL-15']) + global_AllOthers[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['OTHER-30', 'OTHER-15']) + print("HACK: all T1s: {0} {1}".format(global_AllT1s[sessionid], len(global_AllT1s[sessionid]))) + print("HACK: all T2s: {0} {1}".format(global_AllT2s[sessionid], len(global_AllT2s[sessionid]))) + print("HACK: all PDs: {0} {1}".format(global_AllPDs[sessionid], len(global_AllPDs[sessionid]))) + print("HACK: all FLs: {0} {1}".format(global_AllFLs[sessionid], len(global_AllFLs[sessionid]))) + print("HACK: all Others: {0} {1}".format(global_AllOthers[sessionid], len(global_AllOthers[sessionid]))) projectid = ExperimentDatabase.getProjFromSession(sessionid) - print("PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid,subjectid,sessionid)) + print("PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid, subjectid, sessionid)) PHASE_1_subjInfoNode[sessionid] = pe.Node(interface=IdentityInterface(fields= - ['sessionid','subjectid','projectid', - 'allT1s', - 'allT2s', - 'allPDs', - 'allFLs', - 'allOthers']), - run_without_submitting=True, - name='99_PHASE_1_SubjInfoNode_'+str(subjectid)+"_"+str(sessionid) ) - PHASE_1_subjInfoNode[sessionid].inputs.projectid=projectid - PHASE_1_subjInfoNode[sessionid].inputs.subjectid=subjectid - PHASE_1_subjInfoNode[sessionid].inputs.sessionid=sessionid - PHASE_1_subjInfoNode[sessionid].inputs.allT1s=global_AllT1s[sessionid] - PHASE_1_subjInfoNode[sessionid].inputs.allT2s=global_AllT2s[sessionid] - PHASE_1_subjInfoNode[sessionid].inputs.allPDs=global_AllPDs[sessionid] - PHASE_1_subjInfoNode[sessionid].inputs.allFLs=global_AllFLs[sessionid] - PHASE_1_subjInfoNode[sessionid].inputs.allOthers=global_AllOthers[sessionid] - - PROCESSING_PHASE='PHASE_1' - PHASE_1_WORKFLOW_COMPONENTS = ['BASIC','TISSUE_CLASSIFY'] - PHASE_1_oneSubjWorkflow[sessionid]=WorkupT1T2Single.MakeOneSubWorkFlow( - projectid, subjectid, sessionid,PROCESSING_PHASE, - PHASE_1_WORKFLOW_COMPONENTS, - BCD_model_path, InterpolationMode, CLUSTER_QUEUE,CLUSTER_QUEUE_LONG) - baw200.connect(PHASE_1_subjInfoNode[sessionid],'projectid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.projectid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'subjectid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.subjectid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'sessionid',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.sessionid') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allT1s',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allT1s') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allT2s',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allT2s') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allPDs',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allPDs') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allFLs',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allFLs') - baw200.connect(PHASE_1_subjInfoNode[sessionid],'allOthers',PHASE_1_oneSubjWorkflow[sessionid],'inputspec.allOthers') - - baw200.connect(BAtlas[subjectid],'template_landmarks_31_fcsv', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_landmarks_31_fcsv') - baw200.connect(BAtlas[subjectid],'template_landmark_weights_31_csv', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_landmark_weights_31_csv') - baw200.connect(BAtlas[subjectid],'template_t1', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.template_t1') - baw200.connect(BAtlas[subjectid],'ExtendedAtlasDefinition_xml', PHASE_1_oneSubjWorkflow[sessionid],'inputspec.atlasDefinition') - - numSessions=len(allSessions) - if True or numSessions > 1: ## Merge all BCD_Results into a global average - mergeSubjectSessionNamesT1="99_MergeAllSessions_T1_"+str(subjectid) + ['sessionid', 'subjectid', 'projectid', + 'allT1s', + 'allT2s', + 'allPDs', + 'allFLs', + 'allOthers']), + run_without_submitting=True, + name='99_PHASE_1_SubjInfoNode_' + str(subjectid) + "_" + str(sessionid)) + PHASE_1_subjInfoNode[sessionid].inputs.projectid = projectid + PHASE_1_subjInfoNode[sessionid].inputs.subjectid = subjectid + PHASE_1_subjInfoNode[sessionid].inputs.sessionid = sessionid + PHASE_1_subjInfoNode[sessionid].inputs.allT1s = global_AllT1s[sessionid] + PHASE_1_subjInfoNode[sessionid].inputs.allT2s = global_AllT2s[sessionid] + PHASE_1_subjInfoNode[sessionid].inputs.allPDs = global_AllPDs[sessionid] + PHASE_1_subjInfoNode[sessionid].inputs.allFLs = global_AllFLs[sessionid] + PHASE_1_subjInfoNode[sessionid].inputs.allOthers = global_AllOthers[sessionid] + + PROCESSING_PHASE = 'PHASE_1' + PHASE_1_WORKFLOW_COMPONENTS = ['BASIC', 'TISSUE_CLASSIFY'] + PHASE_1_oneSubjWorkflow[sessionid] = WorkupT1T2Single.MakeOneSubWorkFlow( + projectid, subjectid, sessionid, PROCESSING_PHASE, + PHASE_1_WORKFLOW_COMPONENTS, + BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'projectid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.projectid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'subjectid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.subjectid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'sessionid', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.sessionid') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allT1s', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allT1s') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allT2s', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allT2s') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allPDs', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allPDs') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allFLs', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allFLs') + baw200.connect(PHASE_1_subjInfoNode[sessionid], 'allOthers', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.allOthers') + + baw200.connect(BAtlas[subjectid], 'template_landmarks_31_fcsv', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_landmarks_31_fcsv') + baw200.connect(BAtlas[subjectid], 'template_landmark_weights_31_csv', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_landmark_weights_31_csv') + baw200.connect(BAtlas[subjectid], 'template_t1', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.template_t1') + baw200.connect(BAtlas[subjectid], 'ExtendedAtlasDefinition_xml', PHASE_1_oneSubjWorkflow[sessionid], 'inputspec.atlasDefinition') + + numSessions = len(allSessions) + if True or numSessions > 1: # Merge all BCD_Results into a global average + mergeSubjectSessionNamesT1 = "99_MergeAllSessions_T1_" + str(subjectid) MergeT1s[subjectid] = pe.Node(interface=Merge(numSessions), run_without_submitting=True, name=mergeSubjectSessionNamesT1) - mergeSubjectSessionNamesT2="99_MergeAllSessions_T2_"+str(subjectid) + mergeSubjectSessionNamesT2 = "99_MergeAllSessions_T2_" + str(subjectid) MergeT2s[subjectid] = pe.Node(interface=Merge(numSessions), run_without_submitting=True, name=mergeSubjectSessionNamesT2) - mergeSubjectSessionNamesPD="99_MergeAllSessions_PD_"+str(subjectid) + mergeSubjectSessionNamesPD = "99_MergeAllSessions_PD_" + str(subjectid) MergePDs[subjectid] = pe.Node(interface=Merge(numSessions), run_without_submitting=True, name=mergeSubjectSessionNamesPD) - mergeSubjectSessionNamesFL="99_MergeAllSessions_FL_"+str(subjectid) + mergeSubjectSessionNamesFL = "99_MergeAllSessions_FL_" + str(subjectid) MergeFLs[subjectid] = pe.Node(interface=Merge(numSessions), run_without_submitting=True, name=mergeSubjectSessionNamesFL) - mergeSubjectSessionNamesoutputLabels="99_MergeAllSessions_outputLabels_"+str(subjectid) + mergeSubjectSessionNamesoutputLabels = "99_MergeAllSessions_outputLabels_" + str(subjectid) MergeOutputLabels[subjectid] = pe.Node(interface=Merge(numSessions), - run_without_submitting=True, - name=mergeSubjectSessionNamesoutputLabels) - mergeSubjectSessionNamesPosteriors="99_MergeAllSessions_Posteriors_"+str(subjectid) + run_without_submitting=True, + name=mergeSubjectSessionNamesoutputLabels) + mergeSubjectSessionNamesPosteriors = "99_MergeAllSessions_Posteriors_" + str(subjectid) MergePosteriors[subjectid] = pe.Node(interface=Merge(numSessions), - run_without_submitting=True, - name=mergeSubjectSessionNamesPosteriors) - index=1 - #print("HACK: HACK: HACK: {0}".format(allSessions)) + run_without_submitting=True, + name=mergeSubjectSessionNamesPosteriors) + index = 1 + # print("HACK: HACK: HACK: {0}".format(allSessions)) for sessionid in allSessions: - index_name='in'+str(index) - index+=1 - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.t1_average',MergeT1s[subjectid],index_name) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.t2_average',MergeT2s[subjectid],index_name) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.pd_average',MergePDs[subjectid],index_name) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.fl_average',MergeFLs[subjectid],index_name) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.outputLabels',MergeOutputLabels[subjectid],index_name) - baw200.connect(PHASE_1_oneSubjWorkflow[sessionid],'outputspec.posteriorImages',MergePosteriors[subjectid],index_name) - - MergeByExtendListElementsNode = pe.Node( Function(function=MergeByExtendListElements, - input_names = ['t1_averageList','t2_averageList', - 'pd_averageList','fl_averageList', - 'outputLabels_averageList','ListOfPosteriorImagesDictionary'], - output_names = ['ListOfImagesDictionaries','registrationImageTypes','interpolationMapping']), - run_without_submitting=True, name="99_MergeByExtendListElements") - baw200.connect( MergeT1s[subjectid],'out', MergeByExtendListElementsNode, 't1_averageList' ) - baw200.connect( MergeT2s[subjectid],'out', MergeByExtendListElementsNode, 't2_averageList' ) - baw200.connect( MergePDs[subjectid],'out', MergeByExtendListElementsNode, 'pd_averageList' ) - baw200.connect( MergeFLs[subjectid],'out', MergeByExtendListElementsNode, 'fl_averageList' ) - baw200.connect( MergeOutputLabels[subjectid],'out', MergeByExtendListElementsNode, 'outputLabels_averageList' ) - baw200.connect( MergePosteriors[subjectid],'out', MergeByExtendListElementsNode, 'ListOfPosteriorImagesDictionary' ) - + index_name = 'in' + str(index) + index += 1 + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeT1s[subjectid], index_name) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeT2s[subjectid], index_name) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.pd_average', MergePDs[subjectid], index_name) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.fl_average', MergeFLs[subjectid], index_name) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', MergeOutputLabels[subjectid], index_name) + baw200.connect(PHASE_1_oneSubjWorkflow[sessionid], 'outputspec.posteriorImages', MergePosteriors[subjectid], index_name) + + MergeByExtendListElementsNode = pe.Node(Function(function=MergeByExtendListElements, + input_names=['t1_averageList', 't2_averageList', + 'pd_averageList', 'fl_averageList', + 'outputLabels_averageList', 'ListOfPosteriorImagesDictionary'], + output_names=['ListOfImagesDictionaries', 'registrationImageTypes', 'interpolationMapping']), + run_without_submitting=True, name="99_MergeByExtendListElements") + baw200.connect(MergeT1s[subjectid], 'out', MergeByExtendListElementsNode, 't1_averageList') + baw200.connect(MergeT2s[subjectid], 'out', MergeByExtendListElementsNode, 't2_averageList') + baw200.connect(MergePDs[subjectid], 'out', MergeByExtendListElementsNode, 'pd_averageList') + baw200.connect(MergeFLs[subjectid], 'out', MergeByExtendListElementsNode, 'fl_averageList') + baw200.connect(MergeOutputLabels[subjectid], 'out', MergeByExtendListElementsNode, 'outputLabels_averageList') + baw200.connect(MergePosteriors[subjectid], 'out', MergeByExtendListElementsNode, 'ListOfPosteriorImagesDictionary') ### USE ANTS import nipype.interfaces.ants as ants - myInitAvgWF = pe.Node(interface=ants.AverageImages(), name ='Phase1_antsSimpleAverage') + myInitAvgWF = pe.Node(interface=ants.AverageImages(), name='Phase1_antsSimpleAverage') myInitAvgWF.inputs.dimension = 3 myInitAvgWF.inputs.normalize = True baw200.connect(MergeT1s[subjectid], 'out', myInitAvgWF, "images") - TEMPLATE_BUILD_RUN_MODE='MULTI_IMAGE' + TEMPLATE_BUILD_RUN_MODE = 'MULTI_IMAGE' if numSessions == 1: - TEMPLATE_BUILD_RUN_MODE='SINGLE_IMAGE' + TEMPLATE_BUILD_RUN_MODE = 'SINGLE_IMAGE' ### USE ANTS REGISTRATION - #from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF + # from nipype.workflows.smri.ants import antsRegistrationTemplateBuildSingleIterationWF from BAWantsRegistrationBuildTemplate import BAWantsRegistrationTemplateBuildSingleIterationWF - buildTemplateIteration1=BAWantsRegistrationTemplateBuildSingleIterationWF('iteration01') + buildTemplateIteration1 = BAWantsRegistrationTemplateBuildSingleIterationWF('iteration01') ## TODO: Change these parameters BeginANTS_iter1 = buildTemplateIteration1.get_node("BeginANTS") - BeginANTS_iter1.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} + BeginANTS_iter1.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} wimtdeformed_iter1 = buildTemplateIteration1.get_node("wimtdeformed") - wimtdeformed_iter1.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + wimtdeformed_iter1.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} AvgAffineTransform_iter1 = buildTemplateIteration1.get_node("AvgAffineTransform") - AvgAffineTransform_iter1.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + AvgAffineTransform_iter1.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} wimtPassivedeformed_iter1 = buildTemplateIteration1.get_node("wimtPassivedeformed") - wimtPassivedeformed_iter1.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + wimtPassivedeformed_iter1.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} baw200.connect(myInitAvgWF, 'output_average_image', buildTemplateIteration1, 'inputspec.fixed_image') baw200.connect(MergeByExtendListElementsNode, 'ListOfImagesDictionaries', buildTemplateIteration1, 'inputspec.ListOfImagesDictionaries') @@ -489,14 +493,13 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas buildTemplateIteration2 = BAWantsRegistrationTemplateBuildSingleIterationWF('Iteration02') ## TODO: Change these parameters BeginANTS_iter2 = buildTemplateIteration2.get_node("BeginANTS") - BeginANTS_iter2.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} + BeginANTS_iter2.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} wimtdeformed_iter2 = buildTemplateIteration2.get_node("wimtdeformed") - wimtdeformed_iter2.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + wimtdeformed_iter2.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} AvgAffineTransform_iter2 = buildTemplateIteration2.get_node("AvgAffineTransform") - AvgAffineTransform_iter2.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + AvgAffineTransform_iter2.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} wimtPassivedeformed_iter2 = buildTemplateIteration2.get_node("wimtPassivedeformed") - wimtPassivedeformed_iter2.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} - + wimtPassivedeformed_iter2.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1-2 -l mem_free=2000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} baw200.connect(buildTemplateIteration1, 'outputspec.template', buildTemplateIteration2, 'inputspec.fixed_image') baw200.connect(MergeByExtendListElementsNode, 'ListOfImagesDictionaries', buildTemplateIteration2, 'inputspec.ListOfImagesDictionaries') @@ -504,103 +507,104 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas baw200.connect(MergeByExtendListElementsNode, 'interpolationMapping', buildTemplateIteration2, 'inputspec.interpolationMapping') ### Now define where the final organized outputs should go. - SubjectTemplate_DataSink=pe.Node(nio.DataSink(),name="SubjectTemplate_DS") - SubjectTemplate_DataSink.overwrite=GLOBAL_DATA_SINK_REWRITE - SubjectTemplate_DataSink.inputs.base_directory=ExperimentBaseDirectoryResults + SubjectTemplate_DataSink = pe.Node(nio.DataSink(), name="SubjectTemplate_DS") + SubjectTemplate_DataSink.overwrite = GLOBAL_DATA_SINK_REWRITE + SubjectTemplate_DataSink.inputs.base_directory = ExperimentBaseDirectoryResults SubjectTemplate_DataSink.inputs.regexp_substitutions = GenerateSubjectOutputPattern(subjectid) - baw200.connect(buildTemplateIteration2,'outputspec.template',SubjectTemplate_DataSink,'ANTSTemplate.@template') + baw200.connect(buildTemplateIteration2, 'outputspec.template', SubjectTemplate_DataSink, 'ANTSTemplate.@template') MakeNewAtlasTemplateNode = pe.Node(interface=Function(function=MakeNewAtlasTemplate, - input_names=['t1_image', 'deformed_list','AtlasTemplate','outDefinition'], - output_names=['outAtlasFullPath','clean_deformed_list']), - # This is a lot of work, so submit it run_without_submitting=True, - run_without_submitting=True, ### HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! - name='99_MakeNewAtlasTemplate') - MakeNewAtlasTemplateNode.plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1-1 -l mem_free=1000M -o /nfsscratch/PREDICT/hjohnson/TrackOn/scripts/MNA_out.out -e /nfsscratch/PREDICT/hjohnson/TrackOn/scripts/MNA_err.err {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} - MakeNewAtlasTemplateNode.inputs.outDefinition='AtlasDefinition_'+subjectid+'.xml' - baw200.connect(BAtlas[subjectid],'ExtendedAtlasDefinition_xml_in',MakeNewAtlasTemplateNode,'AtlasTemplate') - baw200.connect(buildTemplateIteration2,'outputspec.template',MakeNewAtlasTemplateNode,'t1_image') - baw200.connect(buildTemplateIteration2,'outputspec.passive_deformed_templates',MakeNewAtlasTemplateNode,'deformed_list') - baw200.connect(MakeNewAtlasTemplateNode,'clean_deformed_list',SubjectTemplate_DataSink,'ANTSTemplate.@passive_deformed_templates') + input_names=['t1_image', 'deformed_list', 'AtlasTemplate', 'outDefinition'], + output_names=['outAtlasFullPath', 'clean_deformed_list']), + # This is a lot of work, so submit it run_without_submitting=True, + run_without_submitting=True, # HACK: THIS NODE REALLY SHOULD RUN ON THE CLUSTER! + name='99_MakeNewAtlasTemplate') + MakeNewAtlasTemplateNode.plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1-1 -l mem_free=1000M -o /nfsscratch/PREDICT/hjohnson/TrackOn/scripts/MNA_out.out -e /nfsscratch/PREDICT/hjohnson/TrackOn/scripts/MNA_err.err {QUEUE_OPTIONS}'.format( + QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + MakeNewAtlasTemplateNode.inputs.outDefinition = 'AtlasDefinition_' + subjectid + '.xml' + baw200.connect(BAtlas[subjectid], 'ExtendedAtlasDefinition_xml_in', MakeNewAtlasTemplateNode, 'AtlasTemplate') + baw200.connect(buildTemplateIteration2, 'outputspec.template', MakeNewAtlasTemplateNode, 't1_image') + baw200.connect(buildTemplateIteration2, 'outputspec.passive_deformed_templates', MakeNewAtlasTemplateNode, 'deformed_list') + baw200.connect(MakeNewAtlasTemplateNode, 'clean_deformed_list', SubjectTemplate_DataSink, 'ANTSTemplate.@passive_deformed_templates') ###### Starting Phase II - PHASE_2_oneSubjWorkflow=dict() - PHASE_2_subjInfoNode=dict() - BASIC_DataSink=dict() - TC_DataSink=dict() - AddLikeTissueSink=dict() - AccumulateLikeTissuePosteriorsNode=dict() + PHASE_2_oneSubjWorkflow = dict() + PHASE_2_subjInfoNode = dict() + BASIC_DataSink = dict() + TC_DataSink = dict() + AddLikeTissueSink = dict() + AccumulateLikeTissuePosteriorsNode = dict() for sessionid in allSessions: projectid = ExperimentDatabase.getProjFromSession(sessionid) - print("PHASE II PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid,subjectid,sessionid)) + print("PHASE II PROJECT: {0} SUBJECT: {1} SESSION: {2}".format(projectid, subjectid, sessionid)) PHASE_2_subjInfoNode[sessionid] = pe.Node(interface=IdentityInterface(fields= - ['sessionid','subjectid','projectid', - 'allT1s', - 'allT2s', - 'allPDs', - 'allFLs', - 'allOthers']), - run_without_submitting=True, - name='99_PHASE_2_SubjInfoNode_'+str(subjectid)+"_"+str(sessionid) ) - PHASE_2_subjInfoNode[sessionid].inputs.projectid=projectid - PHASE_2_subjInfoNode[sessionid].inputs.subjectid=subjectid - PHASE_2_subjInfoNode[sessionid].inputs.sessionid=sessionid - PHASE_2_subjInfoNode[sessionid].inputs.allT1s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15']) - PHASE_2_subjInfoNode[sessionid].inputs.allT2s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15']) - PHASE_2_subjInfoNode[sessionid].inputs.allPDs=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15']) - PHASE_2_subjInfoNode[sessionid].inputs.allFLs=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15']) - PHASE_2_subjInfoNode[sessionid].inputs.allOthers=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15']) - - PROCESSING_PHASE='PHASE_2' - PHASE_2_oneSubjWorkflow[sessionid]=WorkupT1T2Single.MakeOneSubWorkFlow( - projectid, subjectid, sessionid,PROCESSING_PHASE, + ['sessionid', 'subjectid', 'projectid', + 'allT1s', + 'allT2s', + 'allPDs', + 'allFLs', + 'allOthers']), + run_without_submitting=True, + name='99_PHASE_2_SubjInfoNode_' + str(subjectid) + "_" + str(sessionid)) + PHASE_2_subjInfoNode[sessionid].inputs.projectid = projectid + PHASE_2_subjInfoNode[sessionid].inputs.subjectid = subjectid + PHASE_2_subjInfoNode[sessionid].inputs.sessionid = sessionid + PHASE_2_subjInfoNode[sessionid].inputs.allT1s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T1-30', 'T1-15']) + PHASE_2_subjInfoNode[sessionid].inputs.allT2s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T2-30', 'T2-15']) + PHASE_2_subjInfoNode[sessionid].inputs.allPDs = ExperimentDatabase.getFilenamesByScantype(sessionid, ['PD-30', 'PD-15']) + PHASE_2_subjInfoNode[sessionid].inputs.allFLs = ExperimentDatabase.getFilenamesByScantype(sessionid, ['FL-30', 'FL-15']) + PHASE_2_subjInfoNode[sessionid].inputs.allOthers = ExperimentDatabase.getFilenamesByScantype(sessionid, ['OTHER-30', 'OTHER-15']) + + PROCESSING_PHASE = 'PHASE_2' + PHASE_2_oneSubjWorkflow[sessionid] = WorkupT1T2Single.MakeOneSubWorkFlow( + projectid, subjectid, sessionid, PROCESSING_PHASE, WORKFLOW_COMPONENTS, - BCD_model_path, InterpolationMode, CLUSTER_QUEUE,CLUSTER_QUEUE_LONG) - baw200.connect(PHASE_2_subjInfoNode[sessionid],'projectid',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.projectid') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'subjectid',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.subjectid') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'sessionid',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.sessionid') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'allT1s',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.allT1s') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'allT2s',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.allT2s') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'allPDs',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.allPDs') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'allFLs',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.allFLs') - baw200.connect(PHASE_2_subjInfoNode[sessionid],'allOthers',PHASE_2_oneSubjWorkflow[sessionid],'inputspec.allOthers') - - baw200.connect(BAtlas[subjectid],'template_landmarks_31_fcsv', PHASE_2_oneSubjWorkflow[sessionid],'inputspec.template_landmarks_31_fcsv') - baw200.connect(BAtlas[subjectid],'template_landmark_weights_31_csv', PHASE_2_oneSubjWorkflow[sessionid],'inputspec.template_landmark_weights_31_csv') - baw200.connect(buildTemplateIteration2,'outputspec.template', PHASE_2_oneSubjWorkflow[sessionid],'inputspec.template_t1') - baw200.connect(MakeNewAtlasTemplateNode,'outAtlasFullPath', PHASE_2_oneSubjWorkflow[sessionid],'inputspec.atlasDefinition') + BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG) + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'projectid', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.projectid') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'subjectid', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.subjectid') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'sessionid', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.sessionid') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'allT1s', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.allT1s') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'allT2s', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.allT2s') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'allPDs', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.allPDs') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'allFLs', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.allFLs') + baw200.connect(PHASE_2_subjInfoNode[sessionid], 'allOthers', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.allOthers') + + baw200.connect(BAtlas[subjectid], 'template_landmarks_31_fcsv', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.template_landmarks_31_fcsv') + baw200.connect(BAtlas[subjectid], 'template_landmark_weights_31_csv', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.template_landmark_weights_31_csv') + baw200.connect(buildTemplateIteration2, 'outputspec.template', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.template_t1') + baw200.connect(MakeNewAtlasTemplateNode, 'outAtlasFullPath', PHASE_2_oneSubjWorkflow[sessionid], 'inputspec.atlasDefinition') ### Now define where the final organized outputs should go. - BASIC_DataSink[sessionid]=pe.Node(nio.DataSink(),name="BASIC_DS_"+str(subjectid)+"_"+str(sessionid)) - BASIC_DataSink[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE - BASIC_DataSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - BASIC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ACPCAlign') - - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLandmarksInACPCAlignedSpace',BASIC_DataSink[sessionid],'ACPCAlign.@outputLandmarksInACPCAlignedSpace') - #baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.BCD_ACPC_T1',BASIC_DataSink[sessionid],'ACPCAlign.@BCD_ACPC_T1') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.BCD_ACPC_T1_CROPPED',BASIC_DataSink[sessionid],'ACPCAlign.@BCD_ACPC_T1_CROPPED') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLandmarksInInputSpace',BASIC_DataSink[sessionid],'ACPCAlign.@outputLandmarksInInputSpace') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputTransform',BASIC_DataSink[sessionid],'ACPCAlign.@outputTransform') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.LMIatlasToSubjectTransform',BASIC_DataSink[sessionid],'ACPCAlign.@LMIatlasToSubjectTransform') - #baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.TissueClassifyatlasToSubjectTransform',BASIC_DataSink[sessionid],'ACPCAlign.@TissueClassifyatlasToSubjectTransform') - - currentFixWMPartitioningName='FixWMPartitioning_'+str(subjectid)+"_"+str(sessionid) + BASIC_DataSink[sessionid] = pe.Node(nio.DataSink(), name="BASIC_DS_" + str(subjectid) + "_" + str(sessionid)) + BASIC_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + BASIC_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + BASIC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ACPCAlign') + + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLandmarksInACPCAlignedSpace', BASIC_DataSink[sessionid], 'ACPCAlign.@outputLandmarksInACPCAlignedSpace') + # baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.BCD_ACPC_T1',BASIC_DataSink[sessionid],'ACPCAlign.@BCD_ACPC_T1') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.BCD_ACPC_T1_CROPPED', BASIC_DataSink[sessionid], 'ACPCAlign.@BCD_ACPC_T1_CROPPED') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLandmarksInInputSpace', BASIC_DataSink[sessionid], 'ACPCAlign.@outputLandmarksInInputSpace') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputTransform', BASIC_DataSink[sessionid], 'ACPCAlign.@outputTransform') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.LMIatlasToSubjectTransform', BASIC_DataSink[sessionid], 'ACPCAlign.@LMIatlasToSubjectTransform') + # baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.TissueClassifyatlasToSubjectTransform',BASIC_DataSink[sessionid],'ACPCAlign.@TissueClassifyatlasToSubjectTransform') + + currentFixWMPartitioningName = 'FixWMPartitioning_' + str(subjectid) + "_" + str(sessionid) FixWMPartitioningNode[sessionid] = pe.Node(interface=Function(function=FixWMPartitioning, - input_names=['brainMask','PosteriorsList'], - output_names=['UpdatedPosteriorsList','MatchingFGCodeList','MatchingLabelList','nonAirRegionMask']), + input_names=['brainMask', 'PosteriorsList'], + output_names=['UpdatedPosteriorsList', 'MatchingFGCodeList', 'MatchingLabelList', 'nonAirRegionMask']), name=currentFixWMPartitioningName) - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels',FixWMPartitioningNode[sessionid],'brainMask') - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], FixWMPartitioningNode[sessionid], - [ ( ( 'outputspec.posteriorImages', UnwrapPosteriorImagesFromDictionaryFunction ), 'PosteriorsList')] ) ] ) + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', FixWMPartitioningNode[sessionid], 'brainMask') + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], FixWMPartitioningNode[sessionid], + [(('outputspec.posteriorImages', UnwrapPosteriorImagesFromDictionaryFunction), 'PosteriorsList')])]) - currentBRAINSCreateLabelMapFromProbabilityMapsName='BRAINSCreateLabelMapFromProbabilityMaps_'+str(subjectid)+"_"+str(sessionid) + currentBRAINSCreateLabelMapFromProbabilityMapsName = 'BRAINSCreateLabelMapFromProbabilityMaps_' + str(subjectid) + "_" + str(sessionid) BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid] = pe.Node(interface=BRAINSCreateLabelMapFromProbabilityMaps(), name=currentBRAINSCreateLabelMapFromProbabilityMapsName) - baw200.connect(FixWMPartitioningNode[sessionid],'UpdatedPosteriorsList',BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid],'inputProbabilityVolume') - baw200.connect(FixWMPartitioningNode[sessionid],'MatchingFGCodeList',BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid],'foregroundPriors') - baw200.connect(FixWMPartitioningNode[sessionid],'MatchingLabelList',BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid],'priorLabelCodes') - baw200.connect(FixWMPartitioningNode[sessionid],'nonAirRegionMask',BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid],'nonAirRegionMask') + baw200.connect(FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList', BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid], 'inputProbabilityVolume') + baw200.connect(FixWMPartitioningNode[sessionid], 'MatchingFGCodeList', BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid], 'foregroundPriors') + baw200.connect(FixWMPartitioningNode[sessionid], 'MatchingLabelList', BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid], 'priorLabelCodes') + baw200.connect(FixWMPartitioningNode[sessionid], 'nonAirRegionMask', BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid], 'nonAirRegionMask') ## TODO: Fix the file names BRAINSCreateLabelMapFromProbabilityMapsNode[sessionid].inputs.dirtyLabelVolume = 'fixed_headlabels_seg.nii.gz' @@ -609,8 +613,8 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas ### Now define where the final organized outputs should go. ### Now define where the final organized outputs should go. - TC_DataSink[sessionid] = pe.Node(nio.DataSink(), name="TISSUE_CLASSIFY_DS_"+str(subjectid)+"_"+str(sessionid)) - TC_DataSink[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE + TC_DataSink[sessionid] = pe.Node(nio.DataSink(), name="TISSUE_CLASSIFY_DS_" + str(subjectid) + "_" + str(sessionid)) + TC_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE TC_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults TC_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'TissueClassify') @@ -619,378 +623,373 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas from PipeLineFunctionHelpers import makeListOfValidImages if len(global_AllT1s[sessionid]) > 0: - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [ ( ( 'outputspec.t1_average', makeListOfValidImages ), 'TissueClassify.@t1_average' ) ] ) ] ) + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [(('outputspec.t1_average', makeListOfValidImages), 'TissueClassify.@t1_average')])]) if len(global_AllT2s[sessionid]) > 0: print "XXXXYYYY {0}".format(global_AllT2s[sessionid]) - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [ ( ( 'outputspec.t2_average', makeListOfValidImages ), 'TissueClassify.@t2_average' ) ] ) ] ) + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [(('outputspec.t2_average', makeListOfValidImages), 'TissueClassify.@t2_average')])]) if len(global_AllPDs[sessionid]) > 0: - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [ ( ( 'outputspec.pd_average', makeListOfValidImages ), 'TissueClassify.@pd_average' ) ] ) ] ) + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [(('outputspec.pd_average', makeListOfValidImages), 'TissueClassify.@pd_average')])]) if len(global_AllFLs[sessionid]) > 0: - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [ ( ( 'outputspec.fl_average', makeListOfValidImages ), 'TissueClassify.@fl_average' ) ] ) ] ) + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], TC_DataSink[sessionid], [(('outputspec.fl_average', makeListOfValidImages), 'TissueClassify.@fl_average')])]) baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.TissueClassifyatlasToSubjectTransform', TC_DataSink[sessionid], 'TissueClassify.@atlasToSubjectTransform') baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.TissueClassifyatlasToSubjectInverseTransform', TC_DataSink[sessionid], 'TissueClassify.@atlasToSubjectInverseTransform') - baw200.connect( FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList',TC_DataSink[sessionid],'TissueClassify.@posteriors') + baw200.connect(FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList', TC_DataSink[sessionid], 'TissueClassify.@posteriors') ### Now clean up by adding together many of the items PHASE_2_oneSubjWorkflow - currentAccumulateLikeTissuePosteriorsName='AccumulateLikeTissuePosteriors_'+str(subjectid)+"_"+str(sessionid) + currentAccumulateLikeTissuePosteriorsName = 'AccumulateLikeTissuePosteriors_' + str(subjectid) + "_" + str(sessionid) AccumulateLikeTissuePosteriorsNode[sessionid] = pe.Node(interface=Function(function=AccumulateLikeTissuePosteriors, input_names=['posteriorImages'], - output_names=['AccumulatePriorsList','AccumulatePriorsNames']), + output_names=['AccumulatePriorsList', 'AccumulatePriorsNames']), name=currentAccumulateLikeTissuePosteriorsName) - baw200.connect( FixWMPartitioningNode[sessionid],'UpdatedPosteriorsList', AccumulateLikeTissuePosteriorsNode[sessionid],'posteriorImages') + baw200.connect(FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList', AccumulateLikeTissuePosteriorsNode[sessionid], 'posteriorImages') ### Now define where the final organized outputs should go. - AddLikeTissueSink[sessionid]=pe.Node(nio.DataSink(),name="ACCUMULATED_POSTERIORS_"+str(subjectid)+"_"+str(sessionid)) - AddLikeTissueSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - #AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid) - AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ACCUMULATED_POSTERIORS') - baw200.connect(AccumulateLikeTissuePosteriorsNode[sessionid], 'AccumulatePriorsList', AddLikeTissueSink[sessionid],'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir') - - ClipT1ImageWithBrainMaskNode=dict() - AtlasToSubjectantsRegistration=dict() - AntsLabelWarpToSubject=dict() - AntsLabelWarpedToSubject_DS=dict() - myLocalSegWF=dict() - SEGMENTATION_DataSink=dict() - myLocalFSWF=dict() - FSPREP_DataSink=dict() - FS_DS=dict() - - MergeStage2AverageImages=dict() - MergeStage2BinaryVolumes=dict() - SnapShotWriter=dict() - - MergeSessionSubjectToAtlas=dict() - MergeMultiLabelSessionSubjectToAtlas=dict() - LinearSubjectToAtlasANTsApplyTransforms=dict() - MultiLabelSubjectToAtlasANTsApplyTransforms=dict() - Subj2Atlas_DS=dict() - - if 'SEGMENTATION' in WORKFLOW_COMPONENTS: ## Run the ANTS Registration from Atlas to Subject for BCut spatial priors propagation. + AddLikeTissueSink[sessionid] = pe.Node(nio.DataSink(), name="ACCUMULATED_POSTERIORS_" + str(subjectid) + "_" + str(sessionid)) + AddLikeTissueSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + # AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateAccumulatorImagesOutputPattern(projectid, subjectid, sessionid) + AddLikeTissueSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ACCUMULATED_POSTERIORS') + baw200.connect(AccumulateLikeTissuePosteriorsNode[sessionid], 'AccumulatePriorsList', AddLikeTissueSink[sessionid], 'ACCUMULATED_POSTERIORS.@AccumulateLikeTissuePosteriorsOutputDir') + + ClipT1ImageWithBrainMaskNode = dict() + AtlasToSubjectantsRegistration = dict() + AntsLabelWarpToSubject = dict() + AntsLabelWarpedToSubject_DS = dict() + myLocalSegWF = dict() + SEGMENTATION_DataSink = dict() + myLocalFSWF = dict() + FSPREP_DataSink = dict() + FS_DS = dict() + + MergeStage2AverageImages = dict() + MergeStage2BinaryVolumes = dict() + SnapShotWriter = dict() + + MergeSessionSubjectToAtlas = dict() + MergeMultiLabelSessionSubjectToAtlas = dict() + LinearSubjectToAtlasANTsApplyTransforms = dict() + MultiLabelSubjectToAtlasANTsApplyTransforms = dict() + Subj2Atlas_DS = dict() + + if 'SEGMENTATION' in WORKFLOW_COMPONENTS: # Run the ANTS Registration from Atlas to Subject for BCut spatial priors propagation. import PipeLineFunctionHelpers ## Second clip to brain tissue region ### Now clean up by adding together many of the items PHASE_2_oneSubjWorkflow - currentClipT1ImageWithBrainMaskName='ClipT1ImageWithBrainMask_'+str(subjectid)+"_"+str(sessionid) + currentClipT1ImageWithBrainMaskName = 'ClipT1ImageWithBrainMask_' + str(subjectid) + "_" + str(sessionid) ClipT1ImageWithBrainMaskNode[sessionid] = pe.Node(interface=Function(function=PipeLineFunctionHelpers.ClipT1ImageWithBrainMask, - input_names=['t1_image','brain_labels','clipped_file_name'], + input_names=['t1_image', 'brain_labels', 'clipped_file_name'], output_names=['clipped_file']), name=currentClipT1ImageWithBrainMaskName) ClipT1ImageWithBrainMaskNode[sessionid].inputs.clipped_file_name = 'clipped_from_BABC_labels_t1.nii.gz' - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.t1_average',ClipT1ImageWithBrainMaskNode[sessionid],'t1_image') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLabels',ClipT1ImageWithBrainMaskNode[sessionid],'brain_labels') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', ClipT1ImageWithBrainMaskNode[sessionid], 't1_image') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', ClipT1ImageWithBrainMaskNode[sessionid], 'brain_labels') - from nipype.interfaces.ants import ( Registration, ApplyTransforms) - currentAtlasToSubjectantsRegistration='AtlasToSubjectantsRegistration_'+str(subjectid)+"_"+str(sessionid) - AtlasToSubjectantsRegistration[sessionid]=pe.Node(interface=Registration(), name = currentAtlasToSubjectantsRegistration) + from nipype.interfaces.ants import (Registration, ApplyTransforms) + currentAtlasToSubjectantsRegistration = 'AtlasToSubjectantsRegistration_' + str(subjectid) + "_" + str(sessionid) + AtlasToSubjectantsRegistration[sessionid] = pe.Node(interface=Registration(), name=currentAtlasToSubjectantsRegistration) AtlasToSubjectantsRegistration[sessionid].inputs.dimension = 3 - AtlasToSubjectantsRegistration[sessionid].inputs.transforms = ["Affine", "SyN"] - AtlasToSubjectantsRegistration[sessionid].inputs.transform_parameters = [[0.1], [0.15,3.0,0.0]] - AtlasToSubjectantsRegistration[sessionid].inputs.metric = ['Mattes', 'CC'] - AtlasToSubjectantsRegistration[sessionid].inputs.sampling_strategy = ['Regular', None] - AtlasToSubjectantsRegistration[sessionid].inputs.sampling_percentage = [ 0.1, 1.0] - AtlasToSubjectantsRegistration[sessionid].inputs.metric_weight = [ 1.0, 1.0] - AtlasToSubjectantsRegistration[sessionid].inputs.radius_or_number_of_bins = [ 32, 4] - AtlasToSubjectantsRegistration[sessionid].inputs.number_of_iterations = [ [1000, 1000, 1000], [10000,500,500,200]] - AtlasToSubjectantsRegistration[sessionid].inputs.convergence_threshold = [ 1e-9, 1e-9] - AtlasToSubjectantsRegistration[sessionid].inputs.convergence_window_size = [ 15, 15] - AtlasToSubjectantsRegistration[sessionid].inputs.use_histogram_matching = [ True, True] - AtlasToSubjectantsRegistration[sessionid].inputs.shrink_factors = [ [4,2,1], [6,4,2,1]] - AtlasToSubjectantsRegistration[sessionid].inputs.smoothing_sigmas = [ [4,2,0], [6,4,2,0]] - AtlasToSubjectantsRegistration[sessionid].inputs.use_estimate_learning_rate_once = [False, False] - AtlasToSubjectantsRegistration[sessionid].inputs.write_composite_transform=True - AtlasToSubjectantsRegistration[sessionid].inputs.collapse_output_transforms=True + AtlasToSubjectantsRegistration[sessionid].inputs.transforms = ["Affine", "SyN"] + AtlasToSubjectantsRegistration[sessionid].inputs.transform_parameters = [[0.1], [0.15, 3.0, 0.0]] + AtlasToSubjectantsRegistration[sessionid].inputs.metric = ['Mattes', 'CC'] + AtlasToSubjectantsRegistration[sessionid].inputs.sampling_strategy = ['Regular', None] + AtlasToSubjectantsRegistration[sessionid].inputs.sampling_percentage = [0.1, 1.0] + AtlasToSubjectantsRegistration[sessionid].inputs.metric_weight = [1.0, 1.0] + AtlasToSubjectantsRegistration[sessionid].inputs.radius_or_number_of_bins = [32, 4] + AtlasToSubjectantsRegistration[sessionid].inputs.number_of_iterations = [[1000, 1000, 1000], [10000, 500, 500, 200]] + AtlasToSubjectantsRegistration[sessionid].inputs.convergence_threshold = [1e-9, 1e-9] + AtlasToSubjectantsRegistration[sessionid].inputs.convergence_window_size = [15, 15] + AtlasToSubjectantsRegistration[sessionid].inputs.use_histogram_matching = [True, True] + AtlasToSubjectantsRegistration[sessionid].inputs.shrink_factors = [[4, 2, 1], [6, 4, 2, 1]] + AtlasToSubjectantsRegistration[sessionid].inputs.smoothing_sigmas = [[4, 2, 0], [6, 4, 2, 0]] + AtlasToSubjectantsRegistration[sessionid].inputs.use_estimate_learning_rate_once = [False, False] + AtlasToSubjectantsRegistration[sessionid].inputs.write_composite_transform = True + AtlasToSubjectantsRegistration[sessionid].inputs.collapse_output_transforms = True AtlasToSubjectantsRegistration[sessionid].inputs.output_transform_prefix = 'AtlasToSubject_' AtlasToSubjectantsRegistration[sessionid].inputs.winsorize_lower_quantile = 0.025 AtlasToSubjectantsRegistration[sessionid].inputs.winsorize_upper_quantile = 0.975 AtlasToSubjectantsRegistration[sessionid].inputs.collapse_linear_transforms_to_fixed_image_header = False AtlasToSubjectantsRegistration[sessionid].inputs.output_warped_image = 'atlas2subject.nii.gz' AtlasToSubjectantsRegistration[sessionid].inputs.output_inverse_warped_image = 'subject2atlas.nii.gz' - AtlasToSubjectantsRegistration[sessionid].plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} - - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.t1_average', AtlasToSubjectantsRegistration[sessionid], 'fixed_image') - baw200.connect(BAtlas[subjectid],'template_t1',AtlasToSubjectantsRegistration[sessionid], 'moving_image') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.LMIatlasToSubjectTransform',AtlasToSubjectantsRegistration[sessionid],'initial_moving_transform') - #baw200.connect(BAtlas[subjectid],'template_t1_clipped',AtlasToSubjectantsRegistration[sessionid], 'moving_image') - #baw200.connect(ClipT1ImageWithBrainMaskNode[sessionid], 'clipped_file', AtlasToSubjectantsRegistration[sessionid], 'fixed_image') - - global_AllT1s[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30','T1-15']) - global_AllT2s[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30','T2-15']) - global_AllPDs[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['PD-30','PD-15']) - global_AllFLs[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['FL-30','FL-15']) - global_AllOthers[sessionid]=ExperimentDatabase.getFilenamesByScantype(sessionid,['OTHER-30','OTHER-15']) - print("HACK2: all T1s: {0} {1}".format(global_AllT1s[sessionid], len(global_AllT1s[sessionid]) )) - print("HACK2: all T2s: {0} {1}".format(global_AllT2s[sessionid], len(global_AllT2s[sessionid]) )) - print("HACK2: all PDs: {0} {1}".format(global_AllPDs[sessionid], len(global_AllPDs[sessionid]) )) - print("HACK2: all FLs: {0} {1}".format(global_AllFLs[sessionid], len(global_AllFLs[sessionid]) )) - print("HACK2: all Others: {0} {1}".format(global_AllOthers[sessionid], len(global_AllOthers[sessionid]) )) - if ( 'SEGMENTATION' in WORKFLOW_COMPONENTS ) : # Currently only works with multi-modal_data + AtlasToSubjectantsRegistration[sessionid].plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 4-8 -l mem_free=9000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE_LONG), 'overwrite': True} + + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', AtlasToSubjectantsRegistration[sessionid], 'fixed_image') + baw200.connect(BAtlas[subjectid], 'template_t1', AtlasToSubjectantsRegistration[sessionid], 'moving_image') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.LMIatlasToSubjectTransform', AtlasToSubjectantsRegistration[sessionid], 'initial_moving_transform') + # baw200.connect(BAtlas[subjectid],'template_t1_clipped',AtlasToSubjectantsRegistration[sessionid], 'moving_image') + # baw200.connect(ClipT1ImageWithBrainMaskNode[sessionid], 'clipped_file', AtlasToSubjectantsRegistration[sessionid], 'fixed_image') + + global_AllT1s[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T1-30', 'T1-15']) + global_AllT2s[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T2-30', 'T2-15']) + global_AllPDs[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['PD-30', 'PD-15']) + global_AllFLs[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['FL-30', 'FL-15']) + global_AllOthers[sessionid] = ExperimentDatabase.getFilenamesByScantype(sessionid, ['OTHER-30', 'OTHER-15']) + print("HACK2: all T1s: {0} {1}".format(global_AllT1s[sessionid], len(global_AllT1s[sessionid]))) + print("HACK2: all T2s: {0} {1}".format(global_AllT2s[sessionid], len(global_AllT2s[sessionid]))) + print("HACK2: all PDs: {0} {1}".format(global_AllPDs[sessionid], len(global_AllPDs[sessionid]))) + print("HACK2: all FLs: {0} {1}".format(global_AllFLs[sessionid], len(global_AllFLs[sessionid]))) + print("HACK2: all Others: {0} {1}".format(global_AllOthers[sessionid], len(global_AllOthers[sessionid]))) + if ('SEGMENTATION' in WORKFLOW_COMPONENTS): # Currently only works with multi-modal_data print("HACK SEGMENTATION IN WORKFLOW_COMPONENTS {0}".format(WORKFLOW_COMPONENTS)) - if ( len(global_AllT2s[sessionid]) > 0 ): # Currently only works with multi-modal_data - print("HACK len(global_AllT2s[sessionid]) > 0 : {0}".format(len(global_AllT2s[sessionid]) )) + if (len(global_AllT2s[sessionid]) > 0): # Currently only works with multi-modal_data + print("HACK len(global_AllT2s[sessionid]) > 0 : {0}".format(len(global_AllT2s[sessionid]))) print("HACK") - if ( 'SEGMENTATION' in WORKFLOW_COMPONENTS ): + if ('SEGMENTATION' in WORKFLOW_COMPONENTS): from WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow - t1Only = not( len(global_AllT2s[sessionid]) > 0 ) - myLocalSegWF[sessionid] = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid,'Segmentation', - CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,BAtlas[subjectid], t1Only) ##Note: Passing in the entire BAtlas Object here! - - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', myLocalSegWF[sessionid], "inputspec.T1Volume" ) + t1Only = not(len(global_AllT2s[sessionid]) > 0) + myLocalSegWF[sessionid] = CreateBRAINSCutWorkflow(projectid, subjectid, sessionid, 'Segmentation', + CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, BAtlas[subjectid], t1Only) # Note: Passing in the entire BAtlas Object here! - if ( len(global_AllT2s[sessionid]) > 0 ): - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', myLocalSegWF[sessionid], "inputspec.T2Volume") + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', myLocalSegWF[sessionid], "inputspec.T1Volume") - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', myLocalSegWF[sessionid],"inputspec.RegistrationROI") + if (len(global_AllT2s[sessionid]) > 0): + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', myLocalSegWF[sessionid], "inputspec.T2Volume") + + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', myLocalSegWF[sessionid], "inputspec.RegistrationROI") ## NOTE: Element 0 of AccumulatePriorsList is the accumulated GM tissue - baw200.connect( [ ( AccumulateLikeTissuePosteriorsNode[sessionid], myLocalSegWF[sessionid], - [ (( 'AccumulatePriorsList', getListIndex, 0 ), "inputspec.TotalGM")] ), - ] ) - baw200.connect( AtlasToSubjectantsRegistration[sessionid],'composite_transform',myLocalSegWF[sessionid],'inputspec.atlasToSubjectTransform') + baw200.connect([(AccumulateLikeTissuePosteriorsNode[sessionid], myLocalSegWF[sessionid], + [(('AccumulatePriorsList', getListIndex, 0), "inputspec.TotalGM")]), + ]) + baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'composite_transform', myLocalSegWF[sessionid], 'inputspec.atlasToSubjectTransform') ### Now define where the final organized outputs should go. - SEGMENTATION_DataSink[sessionid]=pe.Node(nio.DataSink(),name="SEGMENTATION_DS_"+str(subjectid)+"_"+str(sessionid)) - SEGMENTATION_DataSink[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE - SEGMENTATION_DataSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - #SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut') - #SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) - SEGMENTATION_DataSink[sessionid].inputs.substitutions = [ ( 'Segmentations',os.path.join(projectid, subjectid, sessionid,'SingleRFSegmentations') ), - ( 'subjectANNLabel_', '' ), - ( '.nii.gz', '_seg.nii.gz') + SEGMENTATION_DataSink[sessionid] = pe.Node(nio.DataSink(), name="SEGMENTATION_DS_" + str(subjectid) + "_" + str(sessionid)) + SEGMENTATION_DataSink[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + SEGMENTATION_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + # SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut') + # SEGMENTATION_DataSink[sessionid].inputs.regexp_substitutions = GenerateBRAINSCutImagesOutputPattern(projectid, subjectid, sessionid) + SEGMENTATION_DataSink[sessionid].inputs.substitutions = [('Segmentations', os.path.join(projectid, subjectid, sessionid, 'SingleRFSegmentations')), + ('subjectANNLabel_', ''), + ('.nii.gz', '_seg.nii.gz') ] - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftCaudate') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightCaudate') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftHippocampus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightHippocampus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightHippocampus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftPutamen') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightPutamen') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftThalamus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightThalamus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftAccumben') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightAccumben') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftGlobus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus',SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightGlobus') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputLabelImageName', SEGMENTATION_DataSink[sessionid],'Segmentations.@outputLabelImageName') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputCSVFileName', SEGMENTATION_DataSink[sessionid],'Segmentations.@outputCSVFileName') - - MergeStage2BinaryVolumesName="99_MergeStage2BinaryVolumes_"+str(sessionid) + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftCaudate') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightCaudate') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftHippocampus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightHippocampus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightHippocampus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftPutamen') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightPutamen') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftThalamus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightThalamus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftAccumben') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightAccumben') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryLeftGlobus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputBinaryRightGlobus') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputLabelImageName', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputLabelImageName') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputCSVFileName', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputCSVFileName') + + MergeStage2BinaryVolumesName = "99_MergeStage2BinaryVolumes_" + str(sessionid) MergeStage2BinaryVolumes[sessionid] = pe.Node(interface=Merge(12), run_without_submitting=True, name=MergeStage2BinaryVolumesName) - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeStage2BinaryVolumes[sessionid], 'in1') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeStage2BinaryVolumes[sessionid], 'in2') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeStage2BinaryVolumes[sessionid], 'in3') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeStage2BinaryVolumes[sessionid], 'in4') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeStage2BinaryVolumes[sessionid], 'in5') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeStage2BinaryVolumes[sessionid], 'in6') - - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeStage2BinaryVolumes[sessionid], 'in7') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeStage2BinaryVolumes[sessionid], 'in8') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeStage2BinaryVolumes[sessionid], 'in9') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeStage2BinaryVolumes[sessionid], 'in10') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeStage2BinaryVolumes[sessionid], 'in11') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeStage2BinaryVolumes[sessionid], 'in1') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeStage2BinaryVolumes[sessionid], 'in2') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeStage2BinaryVolumes[sessionid], 'in3') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeStage2BinaryVolumes[sessionid], 'in4') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeStage2BinaryVolumes[sessionid], 'in5') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeStage2BinaryVolumes[sessionid], 'in6') + + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeStage2BinaryVolumes[sessionid], 'in7') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeStage2BinaryVolumes[sessionid], 'in8') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeStage2BinaryVolumes[sessionid], 'in9') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeStage2BinaryVolumes[sessionid], 'in10') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeStage2BinaryVolumes[sessionid], 'in11') baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightHippocampus', MergeStage2BinaryVolumes[sessionid], 'in12') - MergeStage2AverageImagesName="99_mergeAvergeStage2Images_"+str(sessionid) + MergeStage2AverageImagesName = "99_mergeAvergeStage2Images_" + str(sessionid) MergeStage2AverageImages[sessionid] = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeStage2AverageImagesName) - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeStage2AverageImages[sessionid], 'in1') - if ( len(global_AllT2s[sessionid]) > 0 ): - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeStage2AverageImages[sessionid], 'in2') - + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeStage2AverageImages[sessionid], 'in1') + if (len(global_AllT2s[sessionid]) > 0): + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeStage2AverageImages[sessionid], 'in2') ## SnapShotWriter[sessionid] for Segmented result checking: - SnapShotWriterNodeName="SnapShotWriter_"+str(sessionid) - SnapShotWriter[sessionid]=pe.Node( interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) + SnapShotWriterNodeName = "SnapShotWriter_" + str(sessionid) + SnapShotWriter[sessionid] = pe.Node(interface=BRAINSSnapShotWriter(), name=SnapShotWriterNodeName) ## output specification - SnapShotWriter[sessionid].inputs.outputFilename = 'snapShot'+str(sessionid)+'.png' + SnapShotWriter[sessionid].inputs.outputFilename = 'snapShot' + str(sessionid) + '.png' ## neccessary parameters (FIXED) - SnapShotWriter[sessionid].inputs.inputPlaneDirection = [2,1,1,1,1,0,0] - SnapShotWriter[sessionid].inputs.inputSliceToExtractInPhysicalPoint = [-3,-7,-3,5,7,22,-22] + SnapShotWriter[sessionid].inputs.inputPlaneDirection = [2, 1, 1, 1, 1, 0, 0] + SnapShotWriter[sessionid].inputs.inputSliceToExtractInPhysicalPoint = [-3, -7, -3, 5, 7, 22, -22] ## connect SnapShotWriter[sessionid] to the baw200 - baw200.connect( MergeStage2AverageImages[sessionid], 'out', SnapShotWriter[sessionid], 'inputVolumes') - baw200.connect( MergeStage2BinaryVolumes[sessionid], 'out', SnapShotWriter[sessionid], 'inputBinaryVolumes') + baw200.connect(MergeStage2AverageImages[sessionid], 'out', SnapShotWriter[sessionid], 'inputVolumes') + baw200.connect(MergeStage2BinaryVolumes[sessionid], 'out', SnapShotWriter[sessionid], 'inputBinaryVolumes') ##### ### Now define where the final organized outputs should go. - baw200.connect( SnapShotWriter[sessionid], 'outputFilename', + baw200.connect(SnapShotWriter[sessionid], 'outputFilename', SEGMENTATION_DataSink[sessionid], 'Segmentations.@outputSnapShot') ##### ### Nec atlas label to subject space warping (WORKING) from nipype.interfaces.ants import ApplyTransforms - currentAntsLabelWarpToSubject='AntsLabelWarpToSubject'+str(subjectid)+"_"+str(sessionid) + currentAntsLabelWarpToSubject = 'AntsLabelWarpToSubject' + str(subjectid) + "_" + str(sessionid) AntsLabelWarpToSubject[sessionid] = pe.Node(interface=ApplyTransforms(), - name = currentAntsLabelWarpToSubject) - - AntsLabelWarpToSubject[sessionid].inputs.dimension = 3 - AntsLabelWarpToSubject[sessionid].inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' - AntsLabelWarpToSubject[sessionid].inputs.interpolation = "MultiLabel" - baw200.connect( AtlasToSubjectantsRegistration[sessionid], 'composite_transform', # check with Hans, why not sessionid??? - AntsLabelWarpToSubject[sessionid], 'transforms') - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', - AntsLabelWarpToSubject[sessionid], 'reference_image') - baw200.connect( BAtlas[subjectid], 'hncma-atlas', - AntsLabelWarpToSubject[sessionid], 'input_image') + name=currentAntsLabelWarpToSubject) + + AntsLabelWarpToSubject[sessionid].inputs.dimension = 3 + AntsLabelWarpToSubject[sessionid].inputs.output_image = 'warped_hncma_atlas_seg.nii.gz' + AntsLabelWarpToSubject[sessionid].inputs.interpolation = "MultiLabel" + baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'composite_transform', # check with Hans, why not sessionid??? + AntsLabelWarpToSubject[sessionid], 'transforms') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', + AntsLabelWarpToSubject[sessionid], 'reference_image') + baw200.connect(BAtlas[subjectid], 'hncma-atlas', + AntsLabelWarpToSubject[sessionid], 'input_image') ##### ### Now define where the final organized outputs should go. - AntsLabelWarpedToSubject_DSName="AntsLabelWarpedToSubject_DS_"+str(sessionid) - AntsLabelWarpedToSubject_DS[sessionid]=pe.Node(nio.DataSink(),name=AntsLabelWarpedToSubject_DSName) - AntsLabelWarpedToSubject_DS[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE - AntsLabelWarpedToSubject_DS[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - AntsLabelWarpedToSubject_DS[sessionid].inputs.substitutions = [ ( 'AntsLabelWarpedToSubject',os.path.join(projectid, subjectid, sessionid,'AntsLabelWarpedToSubject') )] - baw200.connect( AntsLabelWarpToSubject[sessionid], 'output_image', + AntsLabelWarpedToSubject_DSName = "AntsLabelWarpedToSubject_DS_" + str(sessionid) + AntsLabelWarpedToSubject_DS[sessionid] = pe.Node(nio.DataSink(), name=AntsLabelWarpedToSubject_DSName) + AntsLabelWarpedToSubject_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + AntsLabelWarpedToSubject_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + AntsLabelWarpedToSubject_DS[sessionid].inputs.substitutions = [('AntsLabelWarpedToSubject', os.path.join(projectid, subjectid, sessionid, 'AntsLabelWarpedToSubject'))] + baw200.connect(AntsLabelWarpToSubject[sessionid], 'output_image', AntsLabelWarpedToSubject_DS[sessionid], 'AntsLabelWarpedToSubject') ##### #============================================================================================================================= #======== Start warping subject to atlas images - MergeSessionSubjectToAtlasName="99_MergeSessionSubjectToAtlas_"+str(sessionid) - if ( len(global_AllT2s[sessionid]) > 0 ): + MergeSessionSubjectToAtlasName = "99_MergeSessionSubjectToAtlas_" + str(sessionid) + if (len(global_AllT2s[sessionid]) > 0): MergeSessionSubjectToAtlas[sessionid] = pe.Node(interface=Merge(15), run_without_submitting=True, name=MergeSessionSubjectToAtlasName) - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeSessionSubjectToAtlas[sessionid], 'in1') - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeSessionSubjectToAtlas[sessionid], 'in2') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeSessionSubjectToAtlas[sessionid], 'in3') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeSessionSubjectToAtlas[sessionid], 'in4') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeSessionSubjectToAtlas[sessionid], 'in5') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeSessionSubjectToAtlas[sessionid], 'in6') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeSessionSubjectToAtlas[sessionid], 'in7') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in8') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeSessionSubjectToAtlas[sessionid], 'in9') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeSessionSubjectToAtlas[sessionid], 'in10') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeSessionSubjectToAtlas[sessionid], 'in11') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeSessionSubjectToAtlas[sessionid], 'in12') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeSessionSubjectToAtlas[sessionid], 'in13') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeSessionSubjectToAtlas[sessionid], 'in1') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeSessionSubjectToAtlas[sessionid], 'in2') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeSessionSubjectToAtlas[sessionid], 'in3') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeSessionSubjectToAtlas[sessionid], 'in4') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeSessionSubjectToAtlas[sessionid], 'in5') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeSessionSubjectToAtlas[sessionid], 'in6') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeSessionSubjectToAtlas[sessionid], 'in7') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in8') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeSessionSubjectToAtlas[sessionid], 'in9') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeSessionSubjectToAtlas[sessionid], 'in10') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeSessionSubjectToAtlas[sessionid], 'in11') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeSessionSubjectToAtlas[sessionid], 'in12') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeSessionSubjectToAtlas[sessionid], 'in13') baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in14') - baw200.connect( FixWMPartitioningNode[sessionid],'UpdatedPosteriorsList' ,MergeSessionSubjectToAtlas[sessionid], 'in15') + baw200.connect(FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList', MergeSessionSubjectToAtlas[sessionid], 'in15') else: MergeSessionSubjectToAtlas[sessionid] = pe.Node(interface=Merge(14), run_without_submitting=True, name=MergeSessionSubjectToAtlasName) - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeSessionSubjectToAtlas[sessionid], 'in1') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeSessionSubjectToAtlas[sessionid], 'in2') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeSessionSubjectToAtlas[sessionid], 'in3') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeSessionSubjectToAtlas[sessionid], 'in4') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeSessionSubjectToAtlas[sessionid], 'in5') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in6') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeSessionSubjectToAtlas[sessionid], 'in7') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeSessionSubjectToAtlas[sessionid], 'in8') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeSessionSubjectToAtlas[sessionid], 'in9') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeSessionSubjectToAtlas[sessionid], 'in10') - baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeSessionSubjectToAtlas[sessionid], 'in11') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftAccumben', MergeSessionSubjectToAtlas[sessionid], 'in1') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftCaudate', MergeSessionSubjectToAtlas[sessionid], 'in2') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftPutamen', MergeSessionSubjectToAtlas[sessionid], 'in3') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftGlobus', MergeSessionSubjectToAtlas[sessionid], 'in4') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftThalamus', MergeSessionSubjectToAtlas[sessionid], 'in5') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryLeftHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in6') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightAccumben', MergeSessionSubjectToAtlas[sessionid], 'in7') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightCaudate', MergeSessionSubjectToAtlas[sessionid], 'in8') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightPutamen', MergeSessionSubjectToAtlas[sessionid], 'in9') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightGlobus', MergeSessionSubjectToAtlas[sessionid], 'in10') + baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightThalamus', MergeSessionSubjectToAtlas[sessionid], 'in11') baw200.connect(myLocalSegWF[sessionid], 'outputspec.outputBinaryRightHippocampus', MergeSessionSubjectToAtlas[sessionid], 'in12') - baw200.connect( FixWMPartitioningNode[sessionid],'UpdatedPosteriorsList' ,MergeSessionSubjectToAtlas[sessionid], 'in13') - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeSessionSubjectToAtlas[sessionid], 'in14') + baw200.connect(FixWMPartitioningNode[sessionid], 'UpdatedPosteriorsList', MergeSessionSubjectToAtlas[sessionid], 'in13') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', MergeSessionSubjectToAtlas[sessionid], 'in14') ## NOTE: SKIPPING baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', MergeSessionSubjectToAtlas[sessionid], 'in2') - - LinearSubjectToAtlasANTsApplyTransformsName='LinearSubjectToAtlasANTsApplyTransforms_'+str(sessionid) - LinearSubjectToAtlasANTsApplyTransforms[sessionid] = pe.MapNode(interface=ApplyTransforms(), iterfield=['input_image'],name=LinearSubjectToAtlasANTsApplyTransformsName) - LinearSubjectToAtlasANTsApplyTransforms[sessionid].plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=1000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + LinearSubjectToAtlasANTsApplyTransformsName = 'LinearSubjectToAtlasANTsApplyTransforms_' + str(sessionid) + LinearSubjectToAtlasANTsApplyTransforms[sessionid] = pe.MapNode(interface=ApplyTransforms(), iterfield=['input_image'], name=LinearSubjectToAtlasANTsApplyTransformsName) + LinearSubjectToAtlasANTsApplyTransforms[sessionid].plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=1000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} LinearSubjectToAtlasANTsApplyTransforms[sessionid].inputs.interpolation = 'Linear' baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'reverse_transforms', LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'transforms') baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'reverse_invert_flags', LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'invert_transform_flags') - baw200.connect(BAtlas[subjectid],'template_t1', LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'reference_image') + baw200.connect(BAtlas[subjectid], 'template_t1', LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'reference_image') baw200.connect(MergeSessionSubjectToAtlas[sessionid], 'out', LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'input_image') - - MergeMultiLabelSessionSubjectToAtlasName="99_MergeMultiLabelSessionSubjectToAtlas_"+str(sessionid) + MergeMultiLabelSessionSubjectToAtlasName = "99_MergeMultiLabelSessionSubjectToAtlas_" + str(sessionid) MergeMultiLabelSessionSubjectToAtlas[sessionid] = pe.Node(interface=Merge(2), run_without_submitting=True, name=MergeMultiLabelSessionSubjectToAtlasName) - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', MergeMultiLabelSessionSubjectToAtlas[sessionid], 'in1') - baw200.connect( PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputHeadLabels', MergeMultiLabelSessionSubjectToAtlas[sessionid], 'in2') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', MergeMultiLabelSessionSubjectToAtlas[sessionid], 'in1') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputHeadLabels', MergeMultiLabelSessionSubjectToAtlas[sessionid], 'in2') ### This is taking this sessions RF label map back into NAC atlas space. #{ - MultiLabelSubjectToAtlasANTsApplyTransformsName='MultiLabelSubjectToAtlasANTsApplyTransforms_'+str(sessionid) - MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid] = pe.MapNode(interface=ApplyTransforms(), iterfield=['input_image'],name=MultiLabelSubjectToAtlasANTsApplyTransformsName) - MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid].plugin_args={'template':SGE_JOB_SCRIPT,'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=1000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} + MultiLabelSubjectToAtlasANTsApplyTransformsName = 'MultiLabelSubjectToAtlasANTsApplyTransforms_' + str(sessionid) + MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid] = pe.MapNode(interface=ApplyTransforms(), iterfield=['input_image'], name=MultiLabelSubjectToAtlasANTsApplyTransformsName) + MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid].plugin_args = {'template': SGE_JOB_SCRIPT, 'qsub_args': '-S /bin/bash -pe smp1 1 -l mem_free=1000M -o /dev/null -e /dev/null {QUEUE_OPTIONS}'.format(QUEUE_OPTIONS=CLUSTER_QUEUE), 'overwrite': True} MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid].inputs.interpolation = 'MultiLabel' baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'reverse_transforms', MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid], 'transforms') baw200.connect(AtlasToSubjectantsRegistration[sessionid], 'reverse_invert_flags', MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid], 'invert_transform_flags') - baw200.connect(BAtlas[subjectid],'template_t1', MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid], 'reference_image') + baw200.connect(BAtlas[subjectid], 'template_t1', MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid], 'reference_image') baw200.connect(MergeMultiLabelSessionSubjectToAtlas[sessionid], 'out', MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid], 'input_image') #} ### Now we must take the sessions to THIS SUBJECTS personalized atlas. #{ #} - ### Now define where the final organized outputs should go. - Subj2Atlas_DSName="SubjectToAtlas_DS_"+str(sessionid) - Subj2Atlas_DS[sessionid]=pe.Node(nio.DataSink(),name=Subj2Atlas_DSName) - Subj2Atlas_DS[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE - Subj2Atlas_DS[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - #Subj2Atlas_DS[sessionid].inputs.regexp_substitutions = GenerateSubjectOutputPattern(subjectid) + Subj2Atlas_DSName = "SubjectToAtlas_DS_" + str(sessionid) + Subj2Atlas_DS[sessionid] = pe.Node(nio.DataSink(), name=Subj2Atlas_DSName) + Subj2Atlas_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + Subj2Atlas_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + # Subj2Atlas_DS[sessionid].inputs.regexp_substitutions = GenerateSubjectOutputPattern(subjectid) Subj2Atlas_DS[sessionid].inputs.regexp_substitutions = [ - (r'_LinearSubjectToAtlasANTsApplyTransforms_[^/]*' ,r'' + sessionid + '/' ) + (r'_LinearSubjectToAtlasANTsApplyTransforms_[^/]*', r'' + sessionid + '/') ] - baw200.connect(LinearSubjectToAtlasANTsApplyTransforms[sessionid],'output_image',Subj2Atlas_DS[sessionid],'SubjectToAtlasWarped.@linear_output_images') - #baw200.connect(MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid],'output_image',Subj2Atlas_DS[sessionid],'SubjectToAtlasWarped.@multilabel_output_images') + baw200.connect(LinearSubjectToAtlasANTsApplyTransforms[sessionid], 'output_image', Subj2Atlas_DS[sessionid], 'SubjectToAtlasWarped.@linear_output_images') + # baw200.connect(MultiLabelSubjectToAtlasANTsApplyTransforms[sessionid],'output_image',Subj2Atlas_DS[sessionid],'SubjectToAtlasWarped.@multilabel_output_images') print("HACK: DEBUGGING HERE") else: - print("SKIPPING SEGMENTATION PHASE FOR {0} {1} {2}, lenT2s {3}".format(projectid, subjectid, sessionid, len(global_AllT2s[sessionid]) )) - + print("SKIPPING SEGMENTATION PHASE FOR {0} {1} {2}, lenT2s {3}".format(projectid, subjectid, sessionid, len(global_AllT2s[sessionid]))) ## Synthesized images are only valid for 3T where the T2 and T1 have approximately the same resolution. - global_All3T_T1s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T1-30']) - global_All3T_T2s=ExperimentDatabase.getFilenamesByScantype(sessionid,['T2-30']) - #RunAllFSComponents=False ## A hack to avoid 26 hour run of freesurfer - RunAllFSComponents=True ## A hack to avoid 26 hour run of freesurfer - if 'FREESURFER' in WORKFLOW_COMPONENTS: # and ( ( len(global_All3T_T2s) > 0 ) or RunAllFSComponents == True ): + global_All3T_T1s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T1-30']) + global_All3T_T2s = ExperimentDatabase.getFilenamesByScantype(sessionid, ['T2-30']) + # RunAllFSComponents=False ## A hack to avoid 26 hour run of freesurfer + RunAllFSComponents = True # A hack to avoid 26 hour run of freesurfer + if 'FREESURFER' in WORKFLOW_COMPONENTS: # and ( ( len(global_All3T_T2s) > 0 ) or RunAllFSComponents == True ): print "Doing Freesurfer" from PipeLineFunctionHelpers import mkdir_p - constructed_FS_SUBJECTS_DIR=os.path.join(ExperimentBaseDirectoryCache,'BAWFS_SUBJECTS') + constructed_FS_SUBJECTS_DIR = os.path.join(ExperimentBaseDirectoryCache, 'BAWFS_SUBJECTS') mkdir_p(constructed_FS_SUBJECTS_DIR) from WorkupT1T2FreeSurfer_custom import CreateFreeSurferWorkflow_custom - if ( len(global_All3T_T2s) > 0 ): # If multi-modal, then create synthesized image before running + if (len(global_All3T_T2s) > 0): # If multi-modal, then create synthesized image before running print("HACK FREESURFER len(global_All3T_T2s) > 0 ") - myLocalFSWF[sessionid]= CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid,"Level1_FSTest", - CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,RunAllFSComponents,True,constructed_FS_SUBJECTS_DIR) + myLocalFSWF[sessionid] = CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, "Level1_FSTest", + CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents, True, constructed_FS_SUBJECTS_DIR) else: - myLocalFSWF[sessionid]= CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid,"Level1_FSTest", - CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,RunAllFSComponents,False,constructed_FS_SUBJECTS_DIR) + myLocalFSWF[sessionid] = CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, "Level1_FSTest", + CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents, False, constructed_FS_SUBJECTS_DIR) - FREESURFER_ID[sessionid]= pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID']), + FREESURFER_ID[sessionid] = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID']), run_without_submitting=True, - name='99_FSNodeName'+str(subjectid)+"_"+str(sessionid) ) - FREESURFER_ID[sessionid].inputs.FreeSurfer_ID=str(subjectid)+"_"+str(sessionid) + name='99_FSNodeName' + str(subjectid) + "_" + str(sessionid)) + FREESURFER_ID[sessionid].inputs.FreeSurfer_ID = str(subjectid) + "_" + str(sessionid) - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.t1_average',myLocalFSWF[sessionid],'inputspec.T1_files') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.t2_average',myLocalFSWF[sessionid],'inputspec.T2_files') - baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLabels',myLocalFSWF[sessionid],'inputspec.label_file') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t1_average', myLocalFSWF[sessionid], 'inputspec.T1_files') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.t2_average', myLocalFSWF[sessionid], 'inputspec.T2_files') + baw200.connect(PHASE_2_oneSubjWorkflow[sessionid], 'outputspec.outputLabels', myLocalFSWF[sessionid], 'inputspec.label_file') - from PipeLineFunctionHelpers import GetOnePosteriorImageFromDictionaryFunction - baw200.connect( [ ( PHASE_2_oneSubjWorkflow[sessionid], myLocalFSWF[sessionid], - [ ( ( 'outputspec.posteriorImages', GetOnePosteriorImageFromDictionaryFunction, 'WM' ), 'inputspec.wm_prob')] ) ] ) - baw200.connect(FREESURFER_ID[sessionid],'FreeSurfer_ID',myLocalFSWF[sessionid],'inputspec.FreeSurfer_ID') - #baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLabels',myLocalFSWF[sessionid],'inputspec.mask_file') #Yes, the same file as label_file! + from PipeLineFunctionHelpers import GetOnePosteriorImageFromDictionaryFunction + baw200.connect([(PHASE_2_oneSubjWorkflow[sessionid], myLocalFSWF[sessionid], + [(('outputspec.posteriorImages', GetOnePosteriorImageFromDictionaryFunction, 'WM'), 'inputspec.wm_prob')])]) + baw200.connect(FREESURFER_ID[sessionid], 'FreeSurfer_ID', myLocalFSWF[sessionid], 'inputspec.FreeSurfer_ID') + # baw200.connect(PHASE_2_oneSubjWorkflow[sessionid],'outputspec.outputLabels',myLocalFSWF[sessionid],'inputspec.mask_file') #Yes, the same file as label_file! ### Now define where the final organized outputs should go. if RunAllFSComponents == True: - FS_DS[sessionid]=pe.Node(nio.DataSink(),name="FREESURFER_DS_"+str(subjectid)+"_"+str(sessionid)) - FS_DS[sessionid].overwrite=GLOBAL_DATA_SINK_REWRITE - FS_DS[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults + FS_DS[sessionid] = pe.Node(nio.DataSink(), name="FREESURFER_DS_" + str(subjectid) + "_" + str(sessionid)) + FS_DS[sessionid].overwrite = GLOBAL_DATA_SINK_REWRITE + FS_DS[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults FS_DS[sessionid].inputs.regexp_substitutions = [ - ('/_uid_(?P[^/]*)',r'/\g') + ('/_uid_(?P[^/]*)', r'/\g') ] - baw200.connect(myLocalFSWF[sessionid], 'outputspec.FreesurferOutputDirectory', FS_DS[sessionid],'FREESURFER_SUBJ.@FreesurferOutputDirectory') + baw200.connect(myLocalFSWF[sessionid], 'outputspec.FreesurferOutputDirectory', FS_DS[sessionid], 'FREESURFER_SUBJ.@FreesurferOutputDirectory') ### Now define where the final organized outputs should go. - FSPREP_DataSink[sessionid]=pe.Node(nio.DataSink(),name="FREESURFER_PREP_"+str(subjectid)+"_"+str(sessionid)) - FSPREP_DataSink[sessionid].inputs.base_directory=ExperimentBaseDirectoryResults - FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid,'FREESURFER_PREP') + FSPREP_DataSink[sessionid] = pe.Node(nio.DataSink(), name="FREESURFER_PREP_" + str(subjectid) + "_" + str(sessionid)) + FSPREP_DataSink[sessionid].inputs.base_directory = ExperimentBaseDirectoryResults + FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, 'FREESURFER_PREP') FSPREP_DataSink[sessionid].inputs.regexp_substitutions = FREESURFER_PREP_PATTERNS print "=========================" print "=========================" @@ -999,7 +998,7 @@ def WorkupT1T2(subjectid,mountPrefix,ExperimentBaseDirectoryCache, ExperimentBas print "=========================" print "=========================" print "=========================" - baw200.connect(myLocalFSWF[sessionid], 'outputspec.cnr_optimal_image', FSPREP_DataSink[sessionid],'FREESURFER_PREP.@cnr_optimal_image') + baw200.connect(myLocalFSWF[sessionid], 'outputspec.cnr_optimal_image', FSPREP_DataSink[sessionid], 'FREESURFER_PREP.@cnr_optimal_image') else: print "Skipping freesurfer" return baw200 diff --git a/AutoWorkup/WorkupT1T2ANTS.py b/AutoWorkup/WorkupT1T2ANTS.py index 5be0171f..2d558458 100644 --- a/AutoWorkup/WorkupT1T2ANTS.py +++ b/AutoWorkup/WorkupT1T2ANTS.py @@ -17,91 +17,93 @@ ANTSWF.connect( BAtlas,'template_t1', myLocalAntsWF,"inputspec.movingVolumesList") ANTSWF.connect(myLocalLMIWF,'outputspec.atlasToSubjectTransform',myLocalAntsWF,'inputspec.initial_moving_transform') """ -def CreateANTSRegistrationWorkflow(WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,NumberOfThreads=-1): - ANTSWF= pe.Workflow(name=WFname) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['fixedVolumesList','movingVolumesList','initial_moving_transform', - 'fixedBinaryVolume','movingBinaryVolume','warpFixedVolumesList' - ]), name='inputspec' ) + +def CreateANTSRegistrationWorkflow(WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, NumberOfThreads=-1): + ANTSWF = pe.Workflow(name=WFname) + + inputsSpec = pe.Node(interface=IdentityInterface(fields=['fixedVolumesList', 'movingVolumesList', 'initial_moving_transform', + 'fixedBinaryVolume', 'movingBinaryVolume', 'warpFixedVolumesList' + ]), name='inputspec') print("""Run ANTS Registration""") - BFitAtlasToSubject = pe.Node(interface=BRAINSFit(),name="bfA2S") - BF_cpu_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 2-12 -l h_vmem=14G,mem_free=4G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - BFitAtlasToSubject.plugin_args=BF_cpu_sge_options_dictionary - BFitAtlasToSubject.inputs.costMetric="MMI" - BFitAtlasToSubject.inputs.numberOfSamples=1000000 - BFitAtlasToSubject.inputs.numberOfIterations=[1500] - BFitAtlasToSubject.inputs.numberOfHistogramBins=50 - BFitAtlasToSubject.inputs.maximumStepLength=0.2 - BFitAtlasToSubject.inputs.minimumStepLength=[0.000005] - BFitAtlasToSubject.inputs.useAffine=True ## Using initial transform from BRAINSABC - BFitAtlasToSubject.inputs.maskInferiorCutOffFromCenter=65 - BFitAtlasToSubject.inputs.outputVolume="Trial_Initializer_Output.nii.gz" + BFitAtlasToSubject = pe.Node(interface=BRAINSFit(), name="bfA2S") + BF_cpu_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 2-12 -l h_vmem=14G,mem_free=4G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + BFitAtlasToSubject.plugin_args = BF_cpu_sge_options_dictionary + BFitAtlasToSubject.inputs.costMetric = "MMI" + BFitAtlasToSubject.inputs.numberOfSamples = 1000000 + BFitAtlasToSubject.inputs.numberOfIterations = [1500] + BFitAtlasToSubject.inputs.numberOfHistogramBins = 50 + BFitAtlasToSubject.inputs.maximumStepLength = 0.2 + BFitAtlasToSubject.inputs.minimumStepLength = [0.000005] + BFitAtlasToSubject.inputs.useAffine = True # Using initial transform from BRAINSABC + BFitAtlasToSubject.inputs.maskInferiorCutOffFromCenter = 65 + BFitAtlasToSubject.inputs.outputVolume = "Trial_Initializer_Output.nii.gz" # Bug in BRAINSFit PREDICTIMG-1379 BFitAtlasToSubject.inputs.outputFixedVolumeROI="FixedROI.nii.gz" # Bug in BRAINSFit PREDICTIMG-1379 BFitAtlasToSubject.inputs.outputMovingVolumeROI="MovingROI.nii.gz" - BFitAtlasToSubject.inputs.outputTransform="Trial_Initializer_Output.h5" - BFitAtlasToSubject.inputs.maskProcessingMode="ROIAUTO" - BFitAtlasToSubject.inputs.ROIAutoDilateSize=4 - #BFitAtlasToSubject.inputs.maskProcessingMode="ROI" + BFitAtlasToSubject.inputs.outputTransform = "Trial_Initializer_Output.h5" + BFitAtlasToSubject.inputs.maskProcessingMode = "ROIAUTO" + BFitAtlasToSubject.inputs.ROIAutoDilateSize = 4 + # BFitAtlasToSubject.inputs.maskProcessingMode="ROI" # ANTSWF.connect(inputsSpec,'fixedBinaryVolume',BFitAtlasToSubject,'fixedBinaryVolume') # ANTSWF.connect(inputsSpec,'movingBinaryVolume',BFitAtlasToSubject,'movingBinaryVolume') - ANTSWF.connect(inputsSpec,'fixedVolumesList',BFitAtlasToSubject,'fixedVolume') - ANTSWF.connect(inputsSpec,'movingVolumesList',BFitAtlasToSubject,'movingVolume') - ANTSWF.connect(inputsSpec,'initial_moving_transform',BFitAtlasToSubject,'initialTransform') + ANTSWF.connect(inputsSpec, 'fixedVolumesList', BFitAtlasToSubject, 'fixedVolume') + ANTSWF.connect(inputsSpec, 'movingVolumesList', BFitAtlasToSubject, 'movingVolume') + ANTSWF.connect(inputsSpec, 'initial_moving_transform', BFitAtlasToSubject, 'initialTransform') ComputeAtlasToSubjectTransform = pe.Node(interface=antsRegistration(), name="antsA2S") - many_cpu_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 5-12 -l h_vmem=17G,mem_free=9G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - ComputeAtlasToSubjectTransform.plugin_args=many_cpu_sge_options_dictionary - - ComputeAtlasToSubjectTransform.inputs.dimension=3 - ComputeAtlasToSubjectTransform.inputs.metric='CC' ## This is a family of interfaces, CC,MeanSquares,Demons,GC,MI,Mattes - ComputeAtlasToSubjectTransform.inputs.transform='SyN[0.25,3.0,0.0]' - ComputeAtlasToSubjectTransform.inputs.number_of_iterations=[250,100,20] - ComputeAtlasToSubjectTransform.inputs.convergence_threshold=1e-7 - ComputeAtlasToSubjectTransform.inputs.smoothing_sigmas=[0,0,0] - ComputeAtlasToSubjectTransform.inputs.shrink_factors=[3,2,1] - ComputeAtlasToSubjectTransform.inputs.use_estimate_learning_rate_once=True - ComputeAtlasToSubjectTransform.inputs.use_histogram_matching=True - ComputeAtlasToSubjectTransform.inputs.invert_initial_moving_transform=False - ComputeAtlasToSubjectTransform.inputs.output_transform_prefix='antsRegPrefix_' - ComputeAtlasToSubjectTransform.inputs.output_warped_image='moving_to_fixed.nii.gz' - ComputeAtlasToSubjectTransform.inputs.output_inverse_warped_image='fixed_to_moving.nii.gz' - #ComputeAtlasToSubjectTransform.inputs.num_threads=-1 - #if os.environ.has_key('NSLOTS'): + many_cpu_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 5-12 -l h_vmem=17G,mem_free=9G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + ComputeAtlasToSubjectTransform.plugin_args = many_cpu_sge_options_dictionary + + ComputeAtlasToSubjectTransform.inputs.dimension = 3 + ComputeAtlasToSubjectTransform.inputs.metric = 'CC' # This is a family of interfaces, CC,MeanSquares,Demons,GC,MI,Mattes + ComputeAtlasToSubjectTransform.inputs.transform = 'SyN[0.25,3.0,0.0]' + ComputeAtlasToSubjectTransform.inputs.number_of_iterations = [250, 100, 20] + ComputeAtlasToSubjectTransform.inputs.convergence_threshold = 1e-7 + ComputeAtlasToSubjectTransform.inputs.smoothing_sigmas = [0, 0, 0] + ComputeAtlasToSubjectTransform.inputs.shrink_factors = [3, 2, 1] + ComputeAtlasToSubjectTransform.inputs.use_estimate_learning_rate_once = True + ComputeAtlasToSubjectTransform.inputs.use_histogram_matching = True + ComputeAtlasToSubjectTransform.inputs.invert_initial_moving_transform = False + ComputeAtlasToSubjectTransform.inputs.output_transform_prefix = 'antsRegPrefix_' + ComputeAtlasToSubjectTransform.inputs.output_warped_image = 'moving_to_fixed.nii.gz' + ComputeAtlasToSubjectTransform.inputs.output_inverse_warped_image = 'fixed_to_moving.nii.gz' + # ComputeAtlasToSubjectTransform.inputs.num_threads=-1 + # if os.environ.has_key('NSLOTS'): # ComputeAtlasToSubjectTransform.inputs.num_threads=int(os.environ.has_key('NSLOTS')) - #else: + # else: # ComputeAtlasToSubjectTransform.inputs.num_threads=NumberOfThreads # ComputeAtlasToSubjectTransform.inputs.fixedMask=SUBJ_A_small_T2_mask.nii.gz # ComputeAtlasToSubjectTransform.inputs.movingMask=SUBJ_B_small_T2_mask.nii.gz - ANTSWF.connect( inputsSpec,'fixedVolumesList', ComputeAtlasToSubjectTransform,"fixed_image") - ANTSWF.connect( inputsSpec,'movingVolumesList',ComputeAtlasToSubjectTransform,"moving_image") - ANTSWF.connect( BFitAtlasToSubject,'outputTransform', ComputeAtlasToSubjectTransform,'initial_moving_transform') + ANTSWF.connect(inputsSpec, 'fixedVolumesList', ComputeAtlasToSubjectTransform, "fixed_image") + ANTSWF.connect(inputsSpec, 'movingVolumesList', ComputeAtlasToSubjectTransform, "moving_image") + ANTSWF.connect(BFitAtlasToSubject, 'outputTransform', ComputeAtlasToSubjectTransform, 'initial_moving_transform') if 1 == 1: - mergeAffineWarp = pe.Node(interface=Merge(2),name="Merge_AffineWarp") - ANTSWF.connect(ComputeAtlasToSubjectTransform,'warp_transform', mergeAffineWarp,'in1') - ANTSWF.connect(BFitAtlasToSubject,'outputTransform', mergeAffineWarp,'in2') + mergeAffineWarp = pe.Node(interface=Merge(2), name="Merge_AffineWarp") + ANTSWF.connect(ComputeAtlasToSubjectTransform, 'warp_transform', mergeAffineWarp, 'in1') + ANTSWF.connect(BFitAtlasToSubject, 'outputTransform', mergeAffineWarp, 'in2') from nipype.interfaces.ants import WarpImageMultiTransform debugWarpTest = pe.Node(interface=WarpImageMultiTransform(), name="dbgWarpTest") # Not allowed as an input debugWarpTest.inputs.output_image = 'debugWarpedMovingToFixed.nii.gz' ANTSWF.connect(inputsSpec, 'fixedVolumesList', debugWarpTest, 'reference_image') - ANTSWF.connect(inputsSpec, 'movingVolumesList',debugWarpTest, 'moving_image') - ANTSWF.connect(mergeAffineWarp,'out', debugWarpTest, 'transformation_series') + ANTSWF.connect(inputsSpec, 'movingVolumesList', debugWarpTest, 'moving_image') + ANTSWF.connect(mergeAffineWarp, 'out', debugWarpTest, 'transformation_series') ############# - outputsSpec = pe.Node(interface=IdentityInterface(fields=['warped_image','inverse_warped_image','warp_transform', - 'inverse_warp_transform','affine_transform' - ]), name='outputspec' ) + outputsSpec = pe.Node(interface=IdentityInterface(fields=['warped_image', 'inverse_warped_image', 'warp_transform', + 'inverse_warp_transform', 'affine_transform' + ]), name='outputspec') - ANTSWF.connect(ComputeAtlasToSubjectTransform,'warped_image', outputsSpec,'warped_image') - ANTSWF.connect(ComputeAtlasToSubjectTransform,'inverse_warped_image', outputsSpec,'inverse_warped_image') - ANTSWF.connect(ComputeAtlasToSubjectTransform,'warp_transform', outputsSpec,'warp_transform') - ANTSWF.connect(ComputeAtlasToSubjectTransform,'inverse_warp_transform',outputsSpec,'inverse_warp_transform') - ANTSWF.connect(BFitAtlasToSubject,'outputTransform', outputsSpec,'affine_transform') + ANTSWF.connect(ComputeAtlasToSubjectTransform, 'warped_image', outputsSpec, 'warped_image') + ANTSWF.connect(ComputeAtlasToSubjectTransform, 'inverse_warped_image', outputsSpec, 'inverse_warped_image') + ANTSWF.connect(ComputeAtlasToSubjectTransform, 'warp_transform', outputsSpec, 'warp_transform') + ANTSWF.connect(ComputeAtlasToSubjectTransform, 'inverse_warp_transform', outputsSpec, 'inverse_warp_transform') + ANTSWF.connect(BFitAtlasToSubject, 'outputTransform', outputsSpec, 'affine_transform') return ANTSWF @@ -110,56 +112,58 @@ def CreateANTSRegistrationWorkflow(WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,Numbe #### #### #### + + def TempHolderForOldAnts(): if 0 == 1: ANTS_AtlasToSubjectTransform = pe.Node(interface=ANTS(), name="ANTS_ANTS_AtlasToSubjectTransform") - ANTS_AtlasToSubjectTransform.plugin_args=many_cpu_sge_options_dictionary + ANTS_AtlasToSubjectTransform.plugin_args = many_cpu_sge_options_dictionary - ANTS_AtlasToSubjectTransform.inputs.dimension=3 - ANTS_AtlasToSubjectTransform.inputs.output_transform_prefix='antsRegPrefix_' - ANTS_AtlasToSubjectTransform.inputs.metric=['CC'] ## This is a family of interfaces, CC,MeanSquares,Demons,GC,MI,Mattes - ANTS_AtlasToSubjectTransform.inputs.metric_weight= [1.0] + ANTS_AtlasToSubjectTransform.inputs.dimension = 3 + ANTS_AtlasToSubjectTransform.inputs.output_transform_prefix = 'antsRegPrefix_' + ANTS_AtlasToSubjectTransform.inputs.metric = ['CC'] # This is a family of interfaces, CC,MeanSquares,Demons,GC,MI,Mattes + ANTS_AtlasToSubjectTransform.inputs.metric_weight = [1.0] ANTS_AtlasToSubjectTransform.inputs.radius = [5] ANTS_AtlasToSubjectTransform.inputs.radius = [5] - ANTS_AtlasToSubjectTransform.inputs.affine_gradient_descent_option = [0.25,0.05,0.0001,0.0001] + ANTS_AtlasToSubjectTransform.inputs.affine_gradient_descent_option = [0.25, 0.05, 0.0001, 0.0001] ANTS_AtlasToSubjectTransform.inputs.transformation_model = 'SyN' ANTS_AtlasToSubjectTransform.inputs.gradient_step_length = 0.25 ANTS_AtlasToSubjectTransform.inputs.number_of_time_steps = 3.0 ANTS_AtlasToSubjectTransform.inputs.delta_time = 0.0 - ANTS_AtlasToSubjectTransform.inputs.number_of_iterations = [100,35,10] - ANTS_AtlasToSubjectTransform.inputs.subsampling_factors = [3,2,1] - ANTS_AtlasToSubjectTransform.inputs.smoothing_sigmas = [0,0,0] + ANTS_AtlasToSubjectTransform.inputs.number_of_iterations = [100, 35, 10] + ANTS_AtlasToSubjectTransform.inputs.subsampling_factors = [3, 2, 1] + ANTS_AtlasToSubjectTransform.inputs.smoothing_sigmas = [0, 0, 0] ANTS_AtlasToSubjectTransform.inputs.use_histogram_matching = True - #ANTS_AtlasToSubjectTransform.inputs.output_warped_image='fixed_to_moving.nii.gz' - #ANTS_AtlasToSubjectTransform.inputs.output_inverse_warped_image='moving_to_fixed.nii.gz' - #if os.environ.has_key('NSLOTS'): + # ANTS_AtlasToSubjectTransform.inputs.output_warped_image='fixed_to_moving.nii.gz' + # ANTS_AtlasToSubjectTransform.inputs.output_inverse_warped_image='moving_to_fixed.nii.gz' + # if os.environ.has_key('NSLOTS'): # ANTS_AtlasToSubjectTransform.inputs.num_threads=int(os.environ.has_key('NSLOTS')) - #else: + # else: # ANTS_AtlasToSubjectTransform.inputs.num_threads=NumberOfThreads # ANTS_AtlasToSubjectTransform.inputs.fixedMask=SUBJ_A_small_T2_mask.nii.gz # ANTS_AtlasToSubjectTransform.inputs.movingMask=SUBJ_B_small_T2_mask.nii.gz - ANTSWF.connect( inputsSpec,'fixedVolumesList', ANTS_AtlasToSubjectTransform,"fixed_image") - ANTSWF.connect( inputsSpec,'movingVolumesList',ANTS_AtlasToSubjectTransform,"moving_image") - #ANTSWF.connect( BFitAtlasToSubject,'outputTransform', ANTS_AtlasToSubjectTransform,'initial_moving_transform') + ANTSWF.connect(inputsSpec, 'fixedVolumesList', ANTS_AtlasToSubjectTransform, "fixed_image") + ANTSWF.connect(inputsSpec, 'movingVolumesList', ANTS_AtlasToSubjectTransform, "moving_image") + # ANTSWF.connect( BFitAtlasToSubject,'outputTransform', ANTS_AtlasToSubjectTransform,'initial_moving_transform') ############# - outputsSpec = pe.Node(interface=IdentityInterface(fields=['warped_image','inverse_warped_image','warp_transform', - 'inverse_warp_transform','affine_transform' - ]), name='outputspec' ) + outputsSpec = pe.Node(interface=IdentityInterface(fields=['warped_image', 'inverse_warped_image', 'warp_transform', + 'inverse_warp_transform', 'affine_transform' + ]), name='outputspec') - #ANTSWF.connect(ANTS_AtlasToSubjectTransform,'warped_image', outputsSpec,'warped_image') - #ANTSWF.connect(ANTS_AtlasToSubjectTransform,'inverse_warped_image', outputsSpec,'inverse_warped_image') - ANTSWF.connect(ANTS_AtlasToSubjectTransform,'affine_transform', outputsSpec,'affine_transform') - ANTSWF.connect(ANTS_AtlasToSubjectTransform,'warp_transform', outputsSpec,'warp_transform') - ANTSWF.connect(ANTS_AtlasToSubjectTransform,'inverse_warp_transform',outputsSpec,'inverse_warp_transform') + # ANTSWF.connect(ANTS_AtlasToSubjectTransform,'warped_image', outputsSpec,'warped_image') + # ANTSWF.connect(ANTS_AtlasToSubjectTransform,'inverse_warped_image', outputsSpec,'inverse_warped_image') + ANTSWF.connect(ANTS_AtlasToSubjectTransform, 'affine_transform', outputsSpec, 'affine_transform') + ANTSWF.connect(ANTS_AtlasToSubjectTransform, 'warp_transform', outputsSpec, 'warp_transform') + ANTSWF.connect(ANTS_AtlasToSubjectTransform, 'inverse_warp_transform', outputsSpec, 'inverse_warp_transform') if 0 == 1: - TestResampleMovingImage=pe.Node(interface=BRAINSResample(),name="99_TestAffineRegistration") + TestResampleMovingImage = pe.Node(interface=BRAINSResample(), name="99_TestAffineRegistration") TestResampleMovingImage.inputs.interpolationMode = "Linear" TestResampleMovingImage.inputs.outputVolume = "atlasToSubjectTest.nii.gz" - ANTSWF.connect(inputsSpec,'initial_moving_transform',TestResampleMovingImage,'warpTransform') - ANTSWF.connect(inputsSpec,'fixedVolumesList',TestResampleMovingImage,'referenceVolume') - ANTSWF.connect(inputsSpec,'movingVolumesList',TestResampleMovingImage,'inputVolume') + ANTSWF.connect(inputsSpec, 'initial_moving_transform', TestResampleMovingImage, 'warpTransform') + ANTSWF.connect(inputsSpec, 'fixedVolumesList', TestResampleMovingImage, 'referenceVolume') + ANTSWF.connect(inputsSpec, 'movingVolumesList', TestResampleMovingImage, 'inputVolume') diff --git a/AutoWorkup/WorkupT1T2AtlasNode.py b/AutoWorkup/WorkupT1T2AtlasNode.py index b8236229..bfccf04c 100644 --- a/AutoWorkup/WorkupT1T2AtlasNode.py +++ b/AutoWorkup/WorkupT1T2AtlasNode.py @@ -8,58 +8,59 @@ ####################### HACK: Needed to make some global variables for quick ####################### processing needs -#Generate by running a file system list "ls -1 $AtlasDir *.nii.gz *.xml *.fcsv *.wgts" -#atlas_file_names=atlas_file_list.split(' ') +# Generate by running a file system list "ls -1 $AtlasDir *.nii.gz *.xml *.fcsv *.wgts" +# atlas_file_names=atlas_file_list.split(' ') ## HACK -atlas_file_names=["AtlasPVDefinition.xml","AtlasPVDefinition.xml.in", - "ExtendedAtlasDefinition.xml","ExtendedAtlasDefinition.xml.in", - "avg_t1.nii.gz","avg_t2.nii.gz","tempNOTVBBOX.nii.gz", - "template_ABC_labels.nii.gz","template_WMPM2_labels.nii.gz", - "template_WMPM2_labels.txt","template_brain.nii.gz", - "template_cerebellum.nii.gz","template_class.nii.gz", - "template_headregion.nii.gz","template_leftHemisphere.nii.gz", - "template_nac_labels.nii.gz","template_nac_labels.txt", - "hncma-atlas.nii.gz","hncma-atlas-lut-mod2.ctbl", - "template_rightHemisphere.nii.gz","template_t1.nii.gz", - "template_t1_clipped.nii.gz","template_t2.nii.gz", - "template_t2_clipped.nii.gz","template_ventricles.nii.gz", - "template_landmarks.fcsv","template_landmark_weights.csv", - "template_landmarks_31.fcsv","template_landmark_weights_31.csv", +atlas_file_names = ["AtlasPVDefinition.xml", "AtlasPVDefinition.xml.in", + "ExtendedAtlasDefinition.xml", "ExtendedAtlasDefinition.xml.in", + "avg_t1.nii.gz", "avg_t2.nii.gz", "tempNOTVBBOX.nii.gz", + "template_ABC_labels.nii.gz", "template_WMPM2_labels.nii.gz", + "template_WMPM2_labels.txt", "template_brain.nii.gz", + "template_cerebellum.nii.gz", "template_class.nii.gz", + "template_headregion.nii.gz", "template_leftHemisphere.nii.gz", + "template_nac_labels.nii.gz", "template_nac_labels.txt", + "hncma-atlas.nii.gz", "hncma-atlas-lut-mod2.ctbl", + "template_rightHemisphere.nii.gz", "template_t1.nii.gz", + "template_t1_clipped.nii.gz", "template_t2.nii.gz", + "template_t2_clipped.nii.gz", "template_ventricles.nii.gz", + "template_landmarks.fcsv", "template_landmark_weights.csv", + "template_landmarks_31.fcsv", "template_landmark_weights_31.csv", - "probabilityMaps/l_accumben_ProbabilityMap.nii.gz", - "probabilityMaps/r_accumben_ProbabilityMap.nii.gz", - "probabilityMaps/l_caudate_ProbabilityMap.nii.gz", - "probabilityMaps/r_caudate_ProbabilityMap.nii.gz", - "probabilityMaps/l_globus_ProbabilityMap.nii.gz", - "probabilityMaps/r_globus_ProbabilityMap.nii.gz", - "probabilityMaps/l_hippocampus_ProbabilityMap.nii.gz", - "probabilityMaps/r_hippocampus_ProbabilityMap.nii.gz", - "probabilityMaps/l_putamen_ProbabilityMap.nii.gz", - "probabilityMaps/r_putamen_ProbabilityMap.nii.gz", - "probabilityMaps/l_thalamus_ProbabilityMap.nii.gz", - "probabilityMaps/r_thalamus_ProbabilityMap.nii.gz", + "probabilityMaps/l_accumben_ProbabilityMap.nii.gz", + "probabilityMaps/r_accumben_ProbabilityMap.nii.gz", + "probabilityMaps/l_caudate_ProbabilityMap.nii.gz", + "probabilityMaps/r_caudate_ProbabilityMap.nii.gz", + "probabilityMaps/l_globus_ProbabilityMap.nii.gz", + "probabilityMaps/r_globus_ProbabilityMap.nii.gz", + "probabilityMaps/l_hippocampus_ProbabilityMap.nii.gz", + "probabilityMaps/r_hippocampus_ProbabilityMap.nii.gz", + "probabilityMaps/l_putamen_ProbabilityMap.nii.gz", + "probabilityMaps/r_putamen_ProbabilityMap.nii.gz", + "probabilityMaps/l_thalamus_ProbabilityMap.nii.gz", + "probabilityMaps/r_thalamus_ProbabilityMap.nii.gz", - "spatialImages/phi.nii.gz", - "spatialImages/rho.nii.gz", - "spatialImages/theta.nii.gz", + "spatialImages/phi.nii.gz", + "spatialImages/rho.nii.gz", + "spatialImages/theta.nii.gz", - "modelFiles/trainModelFile.txtD0060NT0060.gz" + "modelFiles/trainModelFile.txtD0060NT0060.gz" - ] + ] ## Remove filename extensions for images, but replace . with _ for other file types -atlas_file_keys=[os.path.basename(fn).replace('.nii.gz','').replace('.','_') for fn in atlas_file_names] -atlas_outputs_filename_match = dict(zip(atlas_file_keys,atlas_file_names)) +atlas_file_keys = [os.path.basename(fn).replace('.nii.gz', '').replace('.', '_') for fn in atlas_file_names] +atlas_outputs_filename_match = dict(zip(atlas_file_keys, atlas_file_names)) -def MakeAtlasNode(atlasDirectory,AtlasNodeName): + +def MakeAtlasNode(atlasDirectory, AtlasNodeName): BAtlas = pe.Node(interface=nio.DataGrabber(outfields=atlas_file_keys), - run_without_submitting=True, - name=AtlasNodeName) + run_without_submitting=True, + name=AtlasNodeName) BAtlas.inputs.base_directory = atlasDirectory BAtlas.inputs.template = '*' ## Prefix every filename with atlasDirectory - atlas_search_paths=['{0}'.format(fn) for fn in atlas_file_names] - BAtlas.inputs.field_template = dict(zip(atlas_file_keys,atlas_search_paths)) + atlas_search_paths = ['{0}'.format(fn) for fn in atlas_file_names] + BAtlas.inputs.field_template = dict(zip(atlas_file_keys, atlas_search_paths)) ## Give 'atlasDirectory' as the substitution argument - atlas_template_args_match=[ [[]] for i in atlas_file_keys ] ##build a list of proper lenght with repeated entries - BAtlas.inputs.template_args = dict(zip(atlas_file_keys,atlas_template_args_match)) + atlas_template_args_match = [[[]] for i in atlas_file_keys] # build a list of proper lenght with repeated entries + BAtlas.inputs.template_args = dict(zip(atlas_file_keys, atlas_template_args_match)) return BAtlas diff --git a/AutoWorkup/WorkupT1T2BRAINSCut.py b/AutoWorkup/WorkupT1T2BRAINSCut.py index da015451..442be895 100644 --- a/AutoWorkup/WorkupT1T2BRAINSCut.py +++ b/AutoWorkup/WorkupT1T2BRAINSCut.py @@ -11,10 +11,12 @@ from PipeLineFunctionHelpers import getListIndex -def GenerateWFName(projectid, subjectid, sessionid,WFName): - return WFName+'_'+str(subjectid)+"_"+str(sessionid)+"_"+str(projectid) -def CreateLabelMap(listOfImages,LabelImageName,CSVFileName,projectid, subjectid, sessionid): +def GenerateWFName(projectid, subjectid, sessionid, WFName): + return WFName + '_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) + + +def CreateLabelMap(listOfImages, LabelImageName, CSVFileName, projectid, subjectid, sessionid): """ A function to create a consolidated label map and a csv file of volume measurements. @@ -24,76 +26,76 @@ def CreateLabelMap(listOfImages,LabelImageName,CSVFileName,projectid, subjectid, import os import csv orderOfPriority = [ - "l_caudate" , - "r_caudate" , - "l_putamen" , - "r_putamen" , - "l_hippocampus" , - "r_hippocampus" , - "l_thalamus" , - "r_thalamus" , - "l_accumben" , - "r_accumben" , - "l_globus" , - "r_globus" + "l_caudate", + "r_caudate", + "l_putamen", + "r_putamen", + "l_hippocampus", + "r_hippocampus", + "l_thalamus", + "r_thalamus", + "l_accumben", + "r_accumben", + "l_globus", + "r_globus" ] - valueDict={ - "l_caudate" : 1, - "r_caudate" : 2, - "l_putamen" : 3, - "r_putamen" : 4, - "l_hippocampus" : 5, - "r_hippocampus" : 6, - "l_thalamus" : 7, - "r_thalamus" : 8, - "l_accumben" : 9, - "r_accumben" :10, - "l_globus" :11, - "r_globus" :12 + valueDict = { + "l_caudate": 1, + "r_caudate": 2, + "l_putamen": 3, + "r_putamen": 4, + "l_hippocampus": 5, + "r_hippocampus": 6, + "l_thalamus": 7, + "r_thalamus": 8, + "l_accumben": 9, + "r_accumben": 10, + "l_globus": 11, + "r_globus": 12 } labelImage = None for segFN in listOfImages: im = sitk.ReadImage(segFN) im.GetSize() - remove_pre_postfix=os.path.basename(segFN.replace(".nii.gz","").replace("subjectANNLabel_","").replace("_seg","")) - structName=remove_pre_postfix.lower() + remove_pre_postfix = os.path.basename(segFN.replace(".nii.gz", "").replace("subjectANNLabel_", "").replace("_seg", "")) + structName = remove_pre_postfix.lower() if labelImage is None: - labelImage = im*valueDict[structName] + labelImage = im * valueDict[structName] else: - mask=sitk.Not(im) + mask = sitk.Not(im) ## Clear out an empty space for the next mask to be inserted labelImage *= mask ## Add in the mask image with it's proper label - labelImage = labelImage + im*valueDict[structName] - sitk.WriteImage(labelImage,LabelImageName) + labelImage = labelImage + im * valueDict[structName] + sitk.WriteImage(labelImage, LabelImageName) ls = sitk.LabelStatisticsImageFilter() - ls.Execute(labelImage,labelImage) - ImageSpacing=labelImage.GetSpacing() - csvFile=open(CSVFileName,'w') - dWriter=csv.DictWriter(csvFile,['projectid', 'subjectid', 'sessionid','Structure','LabelCode','Volume_mm3'],restval='', extrasaction='raise', dialect='excel') + ls.Execute(labelImage, labelImage) + ImageSpacing = labelImage.GetSpacing() + csvFile = open(CSVFileName, 'w') + dWriter = csv.DictWriter(csvFile, ['projectid', 'subjectid', 'sessionid', 'Structure', 'LabelCode', 'Volume_mm3'], restval='', extrasaction='raise', dialect='excel') dWriter.writeheader() - writeDictionary=dict() + writeDictionary = dict() for name in orderOfPriority: value = valueDict[name] if ls.HasLabel(value): - #print "Displaying: ", name, value + # print "Displaying: ", name, value myMeasurementMap = ls.GetMeasurementMap(value) - dictKeys=myMeasurementMap.GetVectorOfMeasurementNames() - dictValues=myMeasurementMap.GetVectorOfMeasurementValues() - measurementDict=dict(zip(dictKeys, dictValues)) - structVolume=ImageSpacing[0]*ImageSpacing[1]*ImageSpacing[2]*measurementDict['Count'] - writeDictionary['Volume_mm3']=structVolume - writeDictionary['Structure']=name - writeDictionary['LabelCode']=value - #writeDictionary['FileName']=os.path.abspath(LabelImageName) - writeDictionary['projectid']=projectid - writeDictionary['subjectid']=subjectid - writeDictionary['sessionid']=sessionid + dictKeys = myMeasurementMap.GetVectorOfMeasurementNames() + dictValues = myMeasurementMap.GetVectorOfMeasurementValues() + measurementDict = dict(zip(dictKeys, dictValues)) + structVolume = ImageSpacing[0] * ImageSpacing[1] * ImageSpacing[2] * measurementDict['Count'] + writeDictionary['Volume_mm3'] = structVolume + writeDictionary['Structure'] = name + writeDictionary['LabelCode'] = value + # writeDictionary['FileName']=os.path.abspath(LabelImageName) + writeDictionary['projectid'] = projectid + writeDictionary['subjectid'] = subjectid + writeDictionary['sessionid'] = sessionid dWriter.writerow(writeDictionary) - return os.path.abspath(LabelImageName),os.path.abspath(CSVFileName) + return os.path.abspath(LabelImageName), os.path.abspath(CSVFileName) #============================================== #============================================== @@ -111,56 +113,58 @@ def CreateLabelMap(listOfImages,LabelImageName,CSVFileName,projectid, subjectid, cutWF.connect(BAtlas,'template_brain',myLocalcutWF,'movingBinaryVolume') cutWF.connect(BLI,'outputTransformFilename',myLocalcutWF,'initialTransform') """ -def CreateBRAINSCutWorkflow( projectid, - subjectid, - sessionid, - WFName, - CLUSTER_QUEUE, - CLUSTER_QUEUE_LONG, - atlasObject, - t1Only = False ): - cutWF= pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid,WFName)) - - inputsSpec = pe.Node(interface=IdentityInterface(fields=['T1Volume','T2Volume', - 'TotalGM','RegistrationROI', - 'atlasToSubjectTransform']), name='inputspec' ) + + +def CreateBRAINSCutWorkflow(projectid, + subjectid, + sessionid, + WFName, + CLUSTER_QUEUE, + CLUSTER_QUEUE_LONG, + atlasObject, + t1Only=False): + cutWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, WFName)) + + inputsSpec = pe.Node(interface=IdentityInterface(fields=['T1Volume', 'T2Volume', + 'TotalGM', 'RegistrationROI', + 'atlasToSubjectTransform']), name='inputspec') if not t1Only: """ Gradient Anistropic Diffusion images for BRAINSCut """ - GADT1=pe.Node(interface=GradientAnisotropicDiffusionImageFilter(),name="GADT1") + GADT1 = pe.Node(interface=GradientAnisotropicDiffusionImageFilter(), name="GADT1") GADT1.inputs.timeStep = 0.025 GADT1.inputs.conductance = 1 GADT1.inputs.numberOfIterations = 5 GADT1.inputs.outputVolume = "GADT1.nii.gz" - cutWF.connect(inputsSpec,'T1Volume',GADT1,'inputVolume') + cutWF.connect(inputsSpec, 'T1Volume', GADT1, 'inputVolume') - GADT2=pe.Node(interface=GradientAnisotropicDiffusionImageFilter(),name="GADT2") + GADT2 = pe.Node(interface=GradientAnisotropicDiffusionImageFilter(), name="GADT2") GADT2.inputs.timeStep = 0.025 GADT2.inputs.conductance = 1 GADT2.inputs.numberOfIterations = 5 GADT2.inputs.outputVolume = "GADT2.nii.gz" - cutWF.connect(inputsSpec,'T2Volume',GADT2,'inputVolume') + cutWF.connect(inputsSpec, 'T2Volume', GADT2, 'inputVolume') """ Sum the gradient images for BRAINSCut """ - SGI=pe.Node(interface=GenerateSummedGradientImage(),name="SGI") + SGI = pe.Node(interface=GenerateSummedGradientImage(), name="SGI") SGI.inputs.outputFileName = "SummedGradImage.nii.gz" - cutWF.connect(GADT1,'outputVolume',SGI,'inputVolume1') - cutWF.connect(GADT2,'outputVolume',SGI,'inputVolume2') + cutWF.connect(GADT1, 'outputVolume', SGI, 'inputVolume1') + cutWF.connect(GADT2, 'outputVolume', SGI, 'inputVolume2') """ BRAINSCut """ - RF12BC = pe.Node(interface=RF12BRAINSCutWrapper(),name="IQR_NORM_SEP_RF12_BRAINSCut") - many_cpu_RF12BC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 8-8 -l h_vmem=24G,mem_free=20G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} -#many_cpu_RF12BC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 2-8 -l big_mem=true,h_vmem=60G,mem_free=30G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} -#many_cpu_RF12BC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 4-6 -l big_mem=true,h_vmem=22G,mem_free=22G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - RF12BC.plugin_args=many_cpu_RF12BC_options_dictionary + RF12BC = pe.Node(interface=RF12BRAINSCutWrapper(), name="IQR_NORM_SEP_RF12_BRAINSCut") + many_cpu_RF12BC_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 8-8 -l h_vmem=24G,mem_free=20G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} +# many_cpu_RF12BC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 2-8 -l big_mem=true,h_vmem=60G,mem_free=30G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} +# many_cpu_RF12BC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 4-6 -l big_mem=true,h_vmem=22G,mem_free=22G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} + RF12BC.plugin_args = many_cpu_RF12BC_options_dictionary RF12BC.inputs.trainingVectorFilename = "trainingVectorFilename.txt" RF12BC.inputs.xmlFilename = "BRAINSCutSegmentationDefinition.xml" RF12BC.inputs.vectorNormalization = "IQR" @@ -176,109 +180,109 @@ def CreateBRAINSCutWorkflow( projectid, subjectANNLabel_r_thalamus.nii.gz """ - RF12BC.inputs.outputBinaryLeftCaudate= 'subjectANNLabel_l_caudate.nii.gz' - RF12BC.inputs.outputBinaryRightCaudate= 'subjectANNLabel_r_caudate.nii.gz' - RF12BC.inputs.outputBinaryLeftHippocampus= 'subjectANNLabel_l_hippocampus.nii.gz' - RF12BC.inputs.outputBinaryRightHippocampus= 'subjectANNLabel_r_hippocampus.nii.gz' - RF12BC.inputs.outputBinaryLeftPutamen= 'subjectANNLabel_l_putamen.nii.gz' - RF12BC.inputs.outputBinaryRightPutamen= 'subjectANNLabel_r_putamen.nii.gz' - RF12BC.inputs.outputBinaryLeftThalamus= 'subjectANNLabel_l_thalamus.nii.gz' - RF12BC.inputs.outputBinaryRightThalamus= 'subjectANNLabel_r_thalamus.nii.gz' - RF12BC.inputs.outputBinaryLeftAccumben= 'subjectANNLabel_l_accumben.nii.gz' - RF12BC.inputs.outputBinaryRightAccumben= 'subjectANNLabel_r_accumben.nii.gz' - RF12BC.inputs.outputBinaryLeftGlobus= 'subjectANNLabel_l_globus.nii.gz' - RF12BC.inputs.outputBinaryRightGlobus= 'subjectANNLabel_r_globus.nii.gz' - - cutWF.connect(inputsSpec,'T1Volume',RF12BC,'inputSubjectT1Filename') + RF12BC.inputs.outputBinaryLeftCaudate = 'subjectANNLabel_l_caudate.nii.gz' + RF12BC.inputs.outputBinaryRightCaudate = 'subjectANNLabel_r_caudate.nii.gz' + RF12BC.inputs.outputBinaryLeftHippocampus = 'subjectANNLabel_l_hippocampus.nii.gz' + RF12BC.inputs.outputBinaryRightHippocampus = 'subjectANNLabel_r_hippocampus.nii.gz' + RF12BC.inputs.outputBinaryLeftPutamen = 'subjectANNLabel_l_putamen.nii.gz' + RF12BC.inputs.outputBinaryRightPutamen = 'subjectANNLabel_r_putamen.nii.gz' + RF12BC.inputs.outputBinaryLeftThalamus = 'subjectANNLabel_l_thalamus.nii.gz' + RF12BC.inputs.outputBinaryRightThalamus = 'subjectANNLabel_r_thalamus.nii.gz' + RF12BC.inputs.outputBinaryLeftAccumben = 'subjectANNLabel_l_accumben.nii.gz' + RF12BC.inputs.outputBinaryRightAccumben = 'subjectANNLabel_r_accumben.nii.gz' + RF12BC.inputs.outputBinaryLeftGlobus = 'subjectANNLabel_l_globus.nii.gz' + RF12BC.inputs.outputBinaryRightGlobus = 'subjectANNLabel_r_globus.nii.gz' + + cutWF.connect(inputsSpec, 'T1Volume', RF12BC, 'inputSubjectT1Filename') if not t1Only: - cutWF.connect(inputsSpec,'T2Volume',RF12BC,'inputSubjectT2Filename') - #cutWF.connect(inputsSpec,'TotalGM',RF12BC,'inputSubjectTotalGMFilename') - #cutWF.connect(inputsSpec,'RegistrationROI',RF12BC,'inputSubjectRegistrationROIFilename') + cutWF.connect(inputsSpec, 'T2Volume', RF12BC, 'inputSubjectT2Filename') + # cutWF.connect(inputsSpec,'TotalGM',RF12BC,'inputSubjectTotalGMFilename') + # cutWF.connect(inputsSpec,'RegistrationROI',RF12BC,'inputSubjectRegistrationROIFilename') # Error cutWF.connect(SGI,'outputVolume',RF12BC,'inputSubjectGadSGFilename') - cutWF.connect(SGI,'outputFileName',RF12BC,'inputSubjectGadSGFilename') - cutWF.connect(atlasObject,'template_t1',RF12BC,'inputTemplateT1') - #cutWF.connect(atlasObject,'template_brain',RF12BC,'inputTemplateRegistrationROIFilename') - - cutWF.connect(atlasObject,'rho',RF12BC,'inputTemplateRhoFilename') - cutWF.connect(atlasObject,'phi',RF12BC,'inputTemplatePhiFilename') - cutWF.connect(atlasObject,'theta',RF12BC,'inputTemplateThetaFilename') - - cutWF.connect(atlasObject,'l_caudate_ProbabilityMap',RF12BC,'probabilityMapsLeftCaudate') - cutWF.connect(atlasObject,'r_caudate_ProbabilityMap',RF12BC,'probabilityMapsRightCaudate') - cutWF.connect(atlasObject,'l_hippocampus_ProbabilityMap',RF12BC,'probabilityMapsLeftHippocampus') - cutWF.connect(atlasObject,'r_hippocampus_ProbabilityMap',RF12BC,'probabilityMapsRightHippocampus') - cutWF.connect(atlasObject,'l_putamen_ProbabilityMap',RF12BC,'probabilityMapsLeftPutamen') - cutWF.connect(atlasObject,'r_putamen_ProbabilityMap',RF12BC,'probabilityMapsRightPutamen') - cutWF.connect(atlasObject,'l_thalamus_ProbabilityMap',RF12BC,'probabilityMapsLeftThalamus') - cutWF.connect(atlasObject,'r_thalamus_ProbabilityMap',RF12BC,'probabilityMapsRightThalamus') - cutWF.connect(atlasObject,'l_accumben_ProbabilityMap',RF12BC,'probabilityMapsLeftAccumben') - cutWF.connect(atlasObject,'r_accumben_ProbabilityMap',RF12BC,'probabilityMapsRightAccumben') - cutWF.connect(atlasObject,'l_globus_ProbabilityMap',RF12BC,'probabilityMapsLeftGlobus') - cutWF.connect(atlasObject,'r_globus_ProbabilityMap',RF12BC,'probabilityMapsRightGlobus') - ##TODO: + cutWF.connect(SGI, 'outputFileName', RF12BC, 'inputSubjectGadSGFilename') + cutWF.connect(atlasObject, 'template_t1', RF12BC, 'inputTemplateT1') + # cutWF.connect(atlasObject,'template_brain',RF12BC,'inputTemplateRegistrationROIFilename') + + cutWF.connect(atlasObject, 'rho', RF12BC, 'inputTemplateRhoFilename') + cutWF.connect(atlasObject, 'phi', RF12BC, 'inputTemplatePhiFilename') + cutWF.connect(atlasObject, 'theta', RF12BC, 'inputTemplateThetaFilename') + + cutWF.connect(atlasObject, 'l_caudate_ProbabilityMap', RF12BC, 'probabilityMapsLeftCaudate') + cutWF.connect(atlasObject, 'r_caudate_ProbabilityMap', RF12BC, 'probabilityMapsRightCaudate') + cutWF.connect(atlasObject, 'l_hippocampus_ProbabilityMap', RF12BC, 'probabilityMapsLeftHippocampus') + cutWF.connect(atlasObject, 'r_hippocampus_ProbabilityMap', RF12BC, 'probabilityMapsRightHippocampus') + cutWF.connect(atlasObject, 'l_putamen_ProbabilityMap', RF12BC, 'probabilityMapsLeftPutamen') + cutWF.connect(atlasObject, 'r_putamen_ProbabilityMap', RF12BC, 'probabilityMapsRightPutamen') + cutWF.connect(atlasObject, 'l_thalamus_ProbabilityMap', RF12BC, 'probabilityMapsLeftThalamus') + cutWF.connect(atlasObject, 'r_thalamus_ProbabilityMap', RF12BC, 'probabilityMapsRightThalamus') + cutWF.connect(atlasObject, 'l_accumben_ProbabilityMap', RF12BC, 'probabilityMapsLeftAccumben') + cutWF.connect(atlasObject, 'r_accumben_ProbabilityMap', RF12BC, 'probabilityMapsRightAccumben') + cutWF.connect(atlasObject, 'l_globus_ProbabilityMap', RF12BC, 'probabilityMapsLeftGlobus') + cutWF.connect(atlasObject, 'r_globus_ProbabilityMap', RF12BC, 'probabilityMapsRightGlobus') + # TODO: if not t1Only: - cutWF.connect(atlasObject,'trainModelFile_txtD0060NT0060_gz',RF12BC,'modelFilename') + cutWF.connect(atlasObject, 'trainModelFile_txtD0060NT0060_gz', RF12BC, 'modelFilename') else: ### TODO: Replace with proper atlasObject name in the future!!! This is a HACK ### to avoid changing the hash keys of the input files from the atlas. def ChangeModelPathDirectory(multiModalFileName): - return multiModalFileName.replace('modelFiles','T1OnlyModels') - cutWF.connect( [ ( atlasObject, RF12BC, - [ ( ( 'trainModelFile_txtD0060NT0060_gz', ChangeModelPathDirectory ), 'modelFilename')] ) ] ) + return multiModalFileName.replace('modelFiles', 'T1OnlyModels') + cutWF.connect([(atlasObject, RF12BC, + [(('trainModelFile_txtD0060NT0060_gz', ChangeModelPathDirectory), 'modelFilename')])]) ## Need to index from next line cutWF.connect(inputsSpec,'atlasToSubjectTransform',RF12BC,'deformationFromTemplateToSubject') - cutWF.connect( [ ( inputsSpec, RF12BC, [ (( 'atlasToSubjectTransform', getListIndex, 0 ), 'deformationFromTemplateToSubject')]), ] ) + cutWF.connect([(inputsSpec, RF12BC, [(('atlasToSubjectTransform', getListIndex, 0), 'deformationFromTemplateToSubject')]), ]) - mergeAllLabels=pe.Node(interface=Merge(12),name="labelMergeNode") + mergeAllLabels = pe.Node(interface=Merge(12), name="labelMergeNode") # NOTE: Ordering is important - cutWF.connect(RF12BC,'outputBinaryLeftCaudate',mergeAllLabels,'in1') - cutWF.connect(RF12BC,'outputBinaryRightCaudate',mergeAllLabels,'in2') - cutWF.connect(RF12BC,'outputBinaryLeftPutamen',mergeAllLabels,'in3') - cutWF.connect(RF12BC,'outputBinaryRightPutamen',mergeAllLabels,'in4') - cutWF.connect(RF12BC,'outputBinaryLeftHippocampus',mergeAllLabels,'in5') - cutWF.connect(RF12BC,'outputBinaryRightHippocampus',mergeAllLabels,'in6') - cutWF.connect(RF12BC,'outputBinaryLeftThalamus',mergeAllLabels,'in7') - cutWF.connect(RF12BC,'outputBinaryRightThalamus',mergeAllLabels,'in8') ## HACK: CHECK ORDERING - cutWF.connect(RF12BC,'outputBinaryLeftAccumben',mergeAllLabels,'in9') - cutWF.connect(RF12BC,'outputBinaryRightAccumben',mergeAllLabels,'in10') - cutWF.connect(RF12BC,'outputBinaryLeftGlobus',mergeAllLabels,'in11') - cutWF.connect(RF12BC,'outputBinaryRightGlobus',mergeAllLabels,'in12') - - computeOneLabelMap = pe.Node(interface=Function(['listOfImages','LabelImageName','CSVFileName', - 'projectid', 'subjectid', 'sessionid' ], - ['outputLabelImageName','outputCSVFileName'], - function=CreateLabelMap),name="ComputeOneLabelMap") - computeOneLabelMap.inputs.projectid=projectid - computeOneLabelMap.inputs.subjectid=subjectid - computeOneLabelMap.inputs.sessionid=sessionid - computeOneLabelMap.inputs.LabelImageName="allLabels.nii.gz" + cutWF.connect(RF12BC, 'outputBinaryLeftCaudate', mergeAllLabels, 'in1') + cutWF.connect(RF12BC, 'outputBinaryRightCaudate', mergeAllLabels, 'in2') + cutWF.connect(RF12BC, 'outputBinaryLeftPutamen', mergeAllLabels, 'in3') + cutWF.connect(RF12BC, 'outputBinaryRightPutamen', mergeAllLabels, 'in4') + cutWF.connect(RF12BC, 'outputBinaryLeftHippocampus', mergeAllLabels, 'in5') + cutWF.connect(RF12BC, 'outputBinaryRightHippocampus', mergeAllLabels, 'in6') + cutWF.connect(RF12BC, 'outputBinaryLeftThalamus', mergeAllLabels, 'in7') + cutWF.connect(RF12BC, 'outputBinaryRightThalamus', mergeAllLabels, 'in8') # HACK: CHECK ORDERING + cutWF.connect(RF12BC, 'outputBinaryLeftAccumben', mergeAllLabels, 'in9') + cutWF.connect(RF12BC, 'outputBinaryRightAccumben', mergeAllLabels, 'in10') + cutWF.connect(RF12BC, 'outputBinaryLeftGlobus', mergeAllLabels, 'in11') + cutWF.connect(RF12BC, 'outputBinaryRightGlobus', mergeAllLabels, 'in12') + + computeOneLabelMap = pe.Node(interface=Function(['listOfImages', 'LabelImageName', 'CSVFileName', + 'projectid', 'subjectid', 'sessionid'], + ['outputLabelImageName', 'outputCSVFileName'], + function=CreateLabelMap), name="ComputeOneLabelMap") + computeOneLabelMap.inputs.projectid = projectid + computeOneLabelMap.inputs.subjectid = subjectid + computeOneLabelMap.inputs.sessionid = sessionid + computeOneLabelMap.inputs.LabelImageName = "allLabels.nii.gz" computeOneLabelMap.inputs.CSVFileName = "allLabels_seg.csv" - cutWF.connect(mergeAllLabels,'out',computeOneLabelMap,'listOfImages') + cutWF.connect(mergeAllLabels, 'out', computeOneLabelMap, 'listOfImages') outputsSpec = pe.Node(interface=IdentityInterface(fields=[ - 'outputBinaryLeftCaudate','outputBinaryRightCaudate', - 'outputBinaryLeftHippocampus','outputBinaryRightHippocampus', - 'outputBinaryLeftPutamen','outputBinaryRightPutamen', - 'outputBinaryLeftThalamus','outputBinaryRightThalamus', - 'outputBinaryLeftAccumben','outputBinaryRightAccumben', - 'outputBinaryLeftGlobus','outputBinaryRightGlobus', - 'outputLabelImageName','outputCSVFileName', - 'xmlFilename']), name='outputspec' ) - - cutWF.connect(computeOneLabelMap,'outputLabelImageName',outputsSpec,'outputLabelImageName') - cutWF.connect(computeOneLabelMap,'outputCSVFileName',outputsSpec,'outputCSVFileName') - cutWF.connect(RF12BC,'outputBinaryLeftCaudate',outputsSpec,'outputBinaryLeftCaudate') - cutWF.connect(RF12BC,'outputBinaryRightCaudate',outputsSpec,'outputBinaryRightCaudate') - cutWF.connect(RF12BC,'outputBinaryLeftHippocampus',outputsSpec,'outputBinaryLeftHippocampus') - cutWF.connect(RF12BC,'outputBinaryRightHippocampus',outputsSpec,'outputBinaryRightHippocampus') - cutWF.connect(RF12BC,'outputBinaryLeftPutamen',outputsSpec,'outputBinaryLeftPutamen') - cutWF.connect(RF12BC,'outputBinaryRightPutamen',outputsSpec,'outputBinaryRightPutamen') - cutWF.connect(RF12BC,'outputBinaryLeftThalamus',outputsSpec,'outputBinaryLeftThalamus') - cutWF.connect(RF12BC,'outputBinaryRightThalamus',outputsSpec,'outputBinaryRightThalamus') - cutWF.connect(RF12BC,'outputBinaryLeftAccumben',outputsSpec,'outputBinaryLeftAccumben') - cutWF.connect(RF12BC,'outputBinaryRightAccumben',outputsSpec,'outputBinaryRightAccumben') - cutWF.connect(RF12BC,'outputBinaryLeftGlobus',outputsSpec,'outputBinaryLeftGlobus') - cutWF.connect(RF12BC,'outputBinaryRightGlobus',outputsSpec,'outputBinaryRightGlobus') - cutWF.connect(RF12BC,'xmlFilename',outputsSpec,'xmlFilename') + 'outputBinaryLeftCaudate', 'outputBinaryRightCaudate', + 'outputBinaryLeftHippocampus', 'outputBinaryRightHippocampus', + 'outputBinaryLeftPutamen', 'outputBinaryRightPutamen', + 'outputBinaryLeftThalamus', 'outputBinaryRightThalamus', + 'outputBinaryLeftAccumben', 'outputBinaryRightAccumben', + 'outputBinaryLeftGlobus', 'outputBinaryRightGlobus', + 'outputLabelImageName', 'outputCSVFileName', + 'xmlFilename']), name='outputspec') + + cutWF.connect(computeOneLabelMap, 'outputLabelImageName', outputsSpec, 'outputLabelImageName') + cutWF.connect(computeOneLabelMap, 'outputCSVFileName', outputsSpec, 'outputCSVFileName') + cutWF.connect(RF12BC, 'outputBinaryLeftCaudate', outputsSpec, 'outputBinaryLeftCaudate') + cutWF.connect(RF12BC, 'outputBinaryRightCaudate', outputsSpec, 'outputBinaryRightCaudate') + cutWF.connect(RF12BC, 'outputBinaryLeftHippocampus', outputsSpec, 'outputBinaryLeftHippocampus') + cutWF.connect(RF12BC, 'outputBinaryRightHippocampus', outputsSpec, 'outputBinaryRightHippocampus') + cutWF.connect(RF12BC, 'outputBinaryLeftPutamen', outputsSpec, 'outputBinaryLeftPutamen') + cutWF.connect(RF12BC, 'outputBinaryRightPutamen', outputsSpec, 'outputBinaryRightPutamen') + cutWF.connect(RF12BC, 'outputBinaryLeftThalamus', outputsSpec, 'outputBinaryLeftThalamus') + cutWF.connect(RF12BC, 'outputBinaryRightThalamus', outputsSpec, 'outputBinaryRightThalamus') + cutWF.connect(RF12BC, 'outputBinaryLeftAccumben', outputsSpec, 'outputBinaryLeftAccumben') + cutWF.connect(RF12BC, 'outputBinaryRightAccumben', outputsSpec, 'outputBinaryRightAccumben') + cutWF.connect(RF12BC, 'outputBinaryLeftGlobus', outputsSpec, 'outputBinaryLeftGlobus') + cutWF.connect(RF12BC, 'outputBinaryRightGlobus', outputsSpec, 'outputBinaryRightGlobus') + cutWF.connect(RF12BC, 'xmlFilename', outputsSpec, 'xmlFilename') return cutWF diff --git a/AutoWorkup/WorkupT1T2FreeSurfer.py b/AutoWorkup/WorkupT1T2FreeSurfer.py index 7b15ec62..0258d7e9 100644 --- a/AutoWorkup/WorkupT1T2FreeSurfer.py +++ b/AutoWorkup/WorkupT1T2FreeSurfer.py @@ -16,64 +16,66 @@ baw200.connect(SplitAvgBABC,'avgBABCT1',myLocalFSWF,'inputspec.T1_files') """ -def MakeFreesurferOutputDirectory(subjects_dir,subject_id): - return subjects_dir+'/'+subject_id -def GenerateWFName(projectid, subjectid, sessionid,WFName): - return WFName+'_'+str(subjectid)+"_"+str(sessionid)+"_"+str(projectid) +def MakeFreesurferOutputDirectory(subjects_dir, subject_id): + return subjects_dir + '/' + subject_id -def CreateFreeSurferWorkflow(projectid, subjectid, sessionid,WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,RunAllFSComponents=True,RunMultiMode=True): - freesurferWF= pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid,WFname)) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID','T1_files','T2_files', - 'label_file','mask_file']), name='inputspec' ) - outputsSpec = pe.Node(interface=IdentityInterface(fields=['subject_id','subjects_dir', - 'FreesurferOutputDirectory','cnr_optimal_image']), name='outputspec' ) +def GenerateWFName(projectid, subjectid, sessionid, WFName): + return WFName + '_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) + + +def CreateFreeSurferWorkflow(projectid, subjectid, sessionid, WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents=True, RunMultiMode=True): + freesurferWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, WFname)) + + inputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID', 'T1_files', 'T2_files', + 'label_file', 'mask_file']), name='inputspec') + outputsSpec = pe.Node(interface=IdentityInterface(fields=['subject_id', 'subjects_dir', + 'FreesurferOutputDirectory', 'cnr_optimal_image']), name='outputspec') if RunMultiMode: - mergeT1T2 = pe.Node(interface=Merge(2),name="Merge_T1T2") - freesurferWF.connect(inputsSpec,'T1_files', mergeT1T2,'in1') - freesurferWF.connect(inputsSpec,'T2_files', mergeT1T2,'in2') + mergeT1T2 = pe.Node(interface=Merge(2), name="Merge_T1T2") + freesurferWF.connect(inputsSpec, 'T1_files', mergeT1T2, 'in1') + freesurferWF.connect(inputsSpec, 'T2_files', mergeT1T2, 'in2') - #Some constants based on assumpts about the label_file from BRAINSABC + # Some constants based on assumpts about the label_file from BRAINSABC white_label = 1 grey_label = 2 - msLDA_GenerateWeights = pe.Node(interface=MS_LDA(),name="MS_LDA") - MSLDA_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=12G,mem_free=2G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - msLDA_GenerateWeights.plugin_args=MSLDA_sge_options_dictionary - msLDA_GenerateWeights.inputs.lda_labels=[white_label,grey_label] + msLDA_GenerateWeights = pe.Node(interface=MS_LDA(), name="MS_LDA") + MSLDA_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=12G,mem_free=2G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + msLDA_GenerateWeights.plugin_args = MSLDA_sge_options_dictionary + msLDA_GenerateWeights.inputs.lda_labels = [white_label, grey_label] msLDA_GenerateWeights.inputs.weight_file = 'weights.txt' - msLDA_GenerateWeights.inputs.use_weights=False + msLDA_GenerateWeights.inputs.use_weights = False msLDA_GenerateWeights.inputs.output_synth = 'synth_out.nii.gz' - #msLDA_GenerateWeights.inputs.vol_synth_file = 'synth_out.nii.gz' - #msLDA_GenerateWeights.inputs.shift = 0 # value to shift by + # msLDA_GenerateWeights.inputs.vol_synth_file = 'synth_out.nii.gz' + # msLDA_GenerateWeights.inputs.shift = 0 # value to shift by - freesurferWF.connect(mergeT1T2,'out', msLDA_GenerateWeights,'images') - freesurferWF.connect(inputsSpec,'label_file', msLDA_GenerateWeights,'label_file') - #freesurferWF.connect(inputsSpec,'mask_file', msLDA_GenerateWeights,'mask_file') ## Mask file MUST be unsigned char - freesurferWF.connect(msLDA_GenerateWeights,'vol_synth_file',outputsSpec,'cnr_optimal_image') + freesurferWF.connect(mergeT1T2, 'out', msLDA_GenerateWeights, 'images') + freesurferWF.connect(inputsSpec, 'label_file', msLDA_GenerateWeights, 'label_file') + # freesurferWF.connect(inputsSpec,'mask_file', msLDA_GenerateWeights,'mask_file') ## Mask file MUST be unsigned char + freesurferWF.connect(msLDA_GenerateWeights, 'vol_synth_file', outputsSpec, 'cnr_optimal_image') if RunAllFSComponents == True: print("""Run Freesurfer ReconAll at""") - fs_reconall = pe.Node(interface=ReconAll(),name="FS510") - freesurfer_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 4 -l h_vmem=18G,mem_free=8G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - fs_reconall.plugin_args=freesurfer_sge_options_dictionary + fs_reconall = pe.Node(interface=ReconAll(), name="FS510") + freesurfer_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 4 -l h_vmem=18G,mem_free=8G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + fs_reconall.plugin_args = freesurfer_sge_options_dictionary fs_reconall.inputs.directive = 'all' - freesurferWF.connect(inputsSpec,'subject_id',fs_reconall,'subject_id') + freesurferWF.connect(inputsSpec, 'subject_id', fs_reconall, 'subject_id') if RunMultiMode: ## Use the output of the synthesized T1 with maximized contrast - freesurferWF.connect(msLDA_GenerateWeights,'vol_synth_file', fs_reconall,'T1_files') + freesurferWF.connect(msLDA_GenerateWeights, 'vol_synth_file', fs_reconall, 'T1_files') else: ## Use the output of the T1 only image - freesurferWF.connect(inputsSpec,'T1_files', fs_reconall,'T1_files') - + freesurferWF.connect(inputsSpec, 'T1_files', fs_reconall, 'T1_files') - computeFinalDirectory = pe.Node( Function(function=MakeFreesurferOutputDirectory, input_names = ['subjects_dir','subject_id'], output_names = ['FreesurferOutputDirectory']), run_without_submitting=True, name="99_computeFreesurferOutputDirectory") - freesurferWF.connect(fs_reconall,'subjects_dir',computeFinalDirectory,'subjects_dir') - freesurferWF.connect(fs_reconall,'subject_id',computeFinalDirectory,'subject_id') + computeFinalDirectory = pe.Node(Function(function=MakeFreesurferOutputDirectory, input_names=['subjects_dir', 'subject_id'], output_names=['FreesurferOutputDirectory']), run_without_submitting=True, name="99_computeFreesurferOutputDirectory") + freesurferWF.connect(fs_reconall, 'subjects_dir', computeFinalDirectory, 'subjects_dir') + freesurferWF.connect(fs_reconall, 'subject_id', computeFinalDirectory, 'subject_id') - freesurferWF.connect(fs_reconall,'subject_id',outputsSpec,'subject_id') - freesurferWF.connect(fs_reconall,'subjects_dir',outputsSpec,'subjects_dir') - freesurferWF.connect(computeFinalDirectory,'FreesurferOutputDirectory',outputsSpec,'FreesurferOutputDirectory') + freesurferWF.connect(fs_reconall, 'subject_id', outputsSpec, 'subject_id') + freesurferWF.connect(fs_reconall, 'subjects_dir', outputsSpec, 'subjects_dir') + freesurferWF.connect(computeFinalDirectory, 'FreesurferOutputDirectory', outputsSpec, 'FreesurferOutputDirectory') return freesurferWF diff --git a/AutoWorkup/WorkupT1T2FreeSurfer_custom.py b/AutoWorkup/WorkupT1T2FreeSurfer_custom.py index 162ec817..43ba7d9b 100644 --- a/AutoWorkup/WorkupT1T2FreeSurfer_custom.py +++ b/AutoWorkup/WorkupT1T2FreeSurfer_custom.py @@ -5,7 +5,7 @@ from nipype.interfaces.utility import Merge, Split, Function, Rename, IdentityInterface import nipype.interfaces.io as nio # Data i/o import nipype.pipeline.engine as pe # pypeline engine -#from nipype.interfaces.freesurfer import ReconAll +# from nipype.interfaces.freesurfer import ReconAll import fswrap from nipype.interfaces.freesurfer.model import MS_LDA @@ -18,76 +18,78 @@ baw200.connect(SplitAvgBABC,'avgBABCT1',myLocalFSWF,'inputspec.T1_files') """ -def MakeFreesurferOutputDirectory(subjects_dir,subject_id): - return subjects_dir+'/'+subject_id -def GenerateWFName(projectid, subjectid, sessionid,WFName): - return WFName+'_'+str(subjectid)+"_"+str(sessionid)+"_"+str(projectid) +def MakeFreesurferOutputDirectory(subjects_dir, subject_id): + return subjects_dir + '/' + subject_id -def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid,WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,RunAllFSComponents=True,RunMultiMode=True,constructed_FS_SUBJECTS_DIR='/never_use_this'): - freesurferWF= pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid,WFname)) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID','T1_files','T2_files','subjects_dir', - 'wm_prob','label_file','mask_file']), name='inputspec' ) - outputsSpec = pe.Node(interface=IdentityInterface(fields=['subject_id','subjects_dir', - 'FreesurferOutputDirectory','cnr_optimal_image']), name='outputspec' ) +def GenerateWFName(projectid, subjectid, sessionid, WFName): + return WFName + '_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) + + +def CreateFreeSurferWorkflow_custom(projectid, subjectid, sessionid, WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents=True, RunMultiMode=True, constructed_FS_SUBJECTS_DIR='/never_use_this'): + freesurferWF = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, WFname)) + + inputsSpec = pe.Node(interface=IdentityInterface(fields=['FreeSurfer_ID', 'T1_files', 'T2_files', 'subjects_dir', + 'wm_prob', 'label_file', 'mask_file']), name='inputspec') + outputsSpec = pe.Node(interface=IdentityInterface(fields=['subject_id', 'subjects_dir', + 'FreesurferOutputDirectory', 'cnr_optimal_image']), name='outputspec') ### HACK: the nipype interface requires that this environmental variable is set before running print "HACK SETTING SUBJECTS_DIR {0}".format(constructed_FS_SUBJECTS_DIR) - os.environ['SUBJECTS_DIR']=constructed_FS_SUBJECTS_DIR - inputsSpec.inputs.subjects_dir=constructed_FS_SUBJECTS_DIR ## HACK + os.environ['SUBJECTS_DIR'] = constructed_FS_SUBJECTS_DIR + inputsSpec.inputs.subjects_dir = constructed_FS_SUBJECTS_DIR # HACK if RunMultiMode: - mergeT1T2 = pe.Node(interface=Merge(2),name="Merge_T1T2") - freesurferWF.connect(inputsSpec,'T1_files', mergeT1T2,'in1') - freesurferWF.connect(inputsSpec,'T2_files', mergeT1T2,'in2') + mergeT1T2 = pe.Node(interface=Merge(2), name="Merge_T1T2") + freesurferWF.connect(inputsSpec, 'T1_files', mergeT1T2, 'in1') + freesurferWF.connect(inputsSpec, 'T2_files', mergeT1T2, 'in2') - #Some constants based on assumpts about the label_file from BRAINSABC + # Some constants based on assumpts about the label_file from BRAINSABC white_label = 1 grey_label = 2 - - msLDA_GenerateWeights = pe.Node(interface=MS_LDA(),name="MS_LDA") - MSLDA_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=12G,mem_free=2G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - msLDA_GenerateWeights.plugin_args=MSLDA_sge_options_dictionary - msLDA_GenerateWeights.inputs.lda_labels=[white_label,grey_label] + msLDA_GenerateWeights = pe.Node(interface=MS_LDA(), name="MS_LDA") + MSLDA_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=12G,mem_free=2G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + msLDA_GenerateWeights.plugin_args = MSLDA_sge_options_dictionary + msLDA_GenerateWeights.inputs.lda_labels = [white_label, grey_label] msLDA_GenerateWeights.inputs.weight_file = 'weights.txt' - msLDA_GenerateWeights.inputs.use_weights=False + msLDA_GenerateWeights.inputs.use_weights = False msLDA_GenerateWeights.inputs.vol_synth_file = 'synth_out.nii.gz' - #msLDA_GenerateWeights.inputs.vol_synth_file = 'synth_out.nii.gz' - #msLDA_GenerateWeights.inputs.shift = 0 # value to shift by + # msLDA_GenerateWeights.inputs.vol_synth_file = 'synth_out.nii.gz' + # msLDA_GenerateWeights.inputs.shift = 0 # value to shift by - freesurferWF.connect(mergeT1T2,'out', msLDA_GenerateWeights,'images') - #freesurferWF.connect(inputsSpec,'subjects_dir', msLDA_GenerateWeights,'subjects_dir') - freesurferWF.connect(inputsSpec,'label_file', msLDA_GenerateWeights,'label_file') - #freesurferWF.connect(inputsSpec,'mask_file', msLDA_GenerateWeights,'mask_file') ## Mask file MUST be unsigned char - freesurferWF.connect(msLDA_GenerateWeights,'vol_synth_file',outputsSpec,'cnr_optimal_image') + freesurferWF.connect(mergeT1T2, 'out', msLDA_GenerateWeights, 'images') + # freesurferWF.connect(inputsSpec,'subjects_dir', msLDA_GenerateWeights,'subjects_dir') + freesurferWF.connect(inputsSpec, 'label_file', msLDA_GenerateWeights, 'label_file') + # freesurferWF.connect(inputsSpec,'mask_file', msLDA_GenerateWeights,'mask_file') ## Mask file MUST be unsigned char + freesurferWF.connect(msLDA_GenerateWeights, 'vol_synth_file', outputsSpec, 'cnr_optimal_image') if RunAllFSComponents == True: print("""Run Freesurfer ReconAll at""") - fs_reconall = pe.Node(interface=fswrap.FSScript(),name="FS52_custom") - freesurfer_sge_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=18G,mem_free=8G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - fs_reconall.plugin_args=freesurfer_sge_options_dictionary - #fs_reconall.inputs.directive = 'all' - #fs_reconall.inputs.fs_env_script = '' # NOTE: NOT NEEDED HERE 'FreeSurferEnv.sh' - #fs_reconall.inputs.fs_home = '' # NOTE: NOT NEEDED HERE - freesurferWF.connect(inputsSpec,'FreeSurfer_ID',fs_reconall,'subject_id') + fs_reconall = pe.Node(interface=fswrap.FSScript(), name="FS52_custom") + freesurfer_sge_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 1 -l h_vmem=18G,mem_free=8G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + fs_reconall.plugin_args = freesurfer_sge_options_dictionary + # fs_reconall.inputs.directive = 'all' + # fs_reconall.inputs.fs_env_script = '' # NOTE: NOT NEEDED HERE 'FreeSurferEnv.sh' + # fs_reconall.inputs.fs_home = '' # NOTE: NOT NEEDED HERE + freesurferWF.connect(inputsSpec, 'FreeSurfer_ID', fs_reconall, 'subject_id') if RunMultiMode: ## Use the output of the synthesized T1 with maximized contrast - freesurferWF.connect(msLDA_GenerateWeights,'vol_synth_file', fs_reconall,'T1_files') + freesurferWF.connect(msLDA_GenerateWeights, 'vol_synth_file', fs_reconall, 'T1_files') else: ## Use the output of the T1 only image - freesurferWF.connect(inputsSpec,'T1_files', fs_reconall,'T1_files') + freesurferWF.connect(inputsSpec, 'T1_files', fs_reconall, 'T1_files') - computeFinalDirectory = pe.Node( Function(function=MakeFreesurferOutputDirectory, input_names = ['subjects_dir','subject_id'], output_names = ['FreesurferOutputDirectory']), run_without_submitting=True, name="99_computeFreesurferOutputDirectory") - freesurferWF.connect(inputsSpec,'subjects_dir',computeFinalDirectory,'subjects_dir') - freesurferWF.connect(inputsSpec,'FreeSurfer_ID',computeFinalDirectory,'subject_id') + computeFinalDirectory = pe.Node(Function(function=MakeFreesurferOutputDirectory, input_names=['subjects_dir', 'subject_id'], output_names=['FreesurferOutputDirectory']), run_without_submitting=True, name="99_computeFreesurferOutputDirectory") + freesurferWF.connect(inputsSpec, 'subjects_dir', computeFinalDirectory, 'subjects_dir') + freesurferWF.connect(inputsSpec, 'FreeSurfer_ID', computeFinalDirectory, 'subject_id') - freesurferWF.connect(inputsSpec,'label_file',fs_reconall,'brainmask') - freesurferWF.connect(inputsSpec,'wm_prob',fs_reconall,'wm_prob') - freesurferWF.connect(inputsSpec,'subjects_dir',fs_reconall,'subjects_dir') + freesurferWF.connect(inputsSpec, 'label_file', fs_reconall, 'brainmask') + freesurferWF.connect(inputsSpec, 'wm_prob', fs_reconall, 'wm_prob') + freesurferWF.connect(inputsSpec, 'subjects_dir', fs_reconall, 'subjects_dir') - freesurferWF.connect(inputsSpec,'FreeSurfer_ID',outputsSpec,'subject_id') - freesurferWF.connect(inputsSpec,'subjects_dir',outputsSpec,'subjects_dir') - freesurferWF.connect(computeFinalDirectory,'FreesurferOutputDirectory',outputsSpec,'FreesurferOutputDirectory') + freesurferWF.connect(inputsSpec, 'FreeSurfer_ID', outputsSpec, 'subject_id') + freesurferWF.connect(inputsSpec, 'subjects_dir', outputsSpec, 'subjects_dir') + freesurferWF.connect(computeFinalDirectory, 'FreesurferOutputDirectory', outputsSpec, 'FreesurferOutputDirectory') return freesurferWF diff --git a/AutoWorkup/WorkupT1T2LandmarkInitialization.py b/AutoWorkup/WorkupT1T2LandmarkInitialization.py index f55552cb..b4020765 100644 --- a/AutoWorkup/WorkupT1T2LandmarkInitialization.py +++ b/AutoWorkup/WorkupT1T2LandmarkInitialization.py @@ -19,44 +19,45 @@ landmarkInitializeWF.connect(BAtlas,'template_t1',myLocalLMIWF,'inputsSpec.atlasVolume') """ -def CreateLandmarkInitializeWorkflow(WFname,BCD_model_path,InterpolationMode,DoReverseInit=False): - landmarkInitializeWF= pe.Workflow(name=WFname) + + +def CreateLandmarkInitializeWorkflow(WFname, BCD_model_path, InterpolationMode, DoReverseInit=False): + landmarkInitializeWF = pe.Workflow(name=WFname) ############# inputsSpec = pe.Node(interface=IdentityInterface(fields=['inputVolume', - 'atlasLandmarkFilename','atlasWeightFilename','atlasVolume']), + 'atlasLandmarkFilename', 'atlasWeightFilename', 'atlasVolume']), run_without_submitting=True, - name='inputspec' ) + name='inputspec') ############# outputsSpec = pe.Node(interface=IdentityInterface(fields=['outputLandmarksInACPCAlignedSpace', - 'outputResampledVolume','outputResampledCroppedVolume', - 'outputLandmarksInInputSpace', - 'outputTransform','outputMRML','atlasToSubjectTransform' - ]), - run_without_submitting=True, - name='outputspec' ) - + 'outputResampledVolume', 'outputResampledCroppedVolume', + 'outputLandmarksInInputSpace', + 'outputTransform', 'outputMRML', 'atlasToSubjectTransform' + ]), + run_without_submitting=True, + name='outputspec') ########################################################/ # Run ACPC Detect on first T1 Image - Base Image ######################################################## BCD = pe.Node(interface=BRAINSConstellationDetector(), name="BCD") ## Use program default BCD.inputs.inputTemplateModel = T1ACPCModelFile - ##BCD.inputs.outputVolume = "BCD_OUT" + "_ACPC_InPlace.nii.gz" #$# T1AcpcImageList - BCD.inputs.outputTransform = "BCD" + "_Original2ACPC_transform.h5" + # BCD.inputs.outputVolume = "BCD_OUT" + "_ACPC_InPlace.nii.gz" #$# T1AcpcImageList + BCD.inputs.outputTransform = "BCD" + "_Original2ACPC_transform.h5" BCD.inputs.outputResampledVolume = "BCD" + "_ACPC.nii.gz" BCD.inputs.outputLandmarksInInputSpace = "BCD" + "_Original.fcsv" BCD.inputs.outputLandmarksInACPCAlignedSpace = "BCD" + "_ACPC_Landmarks.fcsv" - #BCD.inputs.outputMRML = "BCD" + "_Scene.mrml" + # BCD.inputs.outputMRML = "BCD" + "_Scene.mrml" BCD.inputs.interpolationMode = InterpolationMode BCD.inputs.houghEyeDetectorMode = 1 # Look for dark eyes like on a T1 image, 0=Look for bright eyes like in a T2 image - BCD.inputs.acLowerBound = 80.0 # Chop the data set 80mm below the AC PC point. - BCD.inputs.LLSModel = os.path.join(BCD_model_path,'LLSModel-2ndVersion.h5') - BCD.inputs.inputTemplateModel = os.path.join(BCD_model_path,'T1-2ndVersion.mdl') + BCD.inputs.acLowerBound = 80.0 # Chop the data set 80mm below the AC PC point. + BCD.inputs.LLSModel = os.path.join(BCD_model_path, 'LLSModel-2ndVersion.h5') + BCD.inputs.inputTemplateModel = os.path.join(BCD_model_path, 'T1-2ndVersion.mdl') # Entries below are of the form: - landmarkInitializeWF.connect( inputsSpec , 'inputVolume', BCD, 'inputVolume') + landmarkInitializeWF.connect(inputsSpec, 'inputVolume', BCD, 'inputVolume') ######################################################## # Run BLI atlas_to_subject @@ -65,8 +66,8 @@ def CreateLandmarkInitializeWorkflow(WFname,BCD_model_path,InterpolationMode,DoR BLI.inputs.outputTransformFilename = "landmarkInitializer_atlas_to_subject_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI, 'inputWeightFilename') - landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI, 'inputMovingLandmarkFilename' ) - landmarkInitializeWF.connect(BCD,'outputLandmarksInACPCAlignedSpace', BLI,'inputFixedLandmarkFilename'), + landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI, 'inputMovingLandmarkFilename') + landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', BLI, 'inputFixedLandmarkFilename'), ## This is for debugging purposes, and it is not intended for general use. if DoReverseInit == True: @@ -77,39 +78,39 @@ def CreateLandmarkInitializeWorkflow(WFname,BCD_model_path,InterpolationMode,DoR BLI2Atlas.inputs.outputTransformFilename = "landmarkInitializer_subject_to_atlas_transform.h5" landmarkInitializeWF.connect(inputsSpec, 'atlasWeightFilename', BLI2Atlas, 'inputWeightFilename') - landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI2Atlas, 'inputFixedLandmarkFilename' ) - landmarkInitializeWF.connect(BCD,'outputLandmarksInInputSpace',BLI2Atlas,'inputMovingLandmarkFilename') + landmarkInitializeWF.connect(inputsSpec, 'atlasLandmarkFilename', BLI2Atlas, 'inputFixedLandmarkFilename') + landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', BLI2Atlas, 'inputMovingLandmarkFilename') - Resample2Atlas=pe.Node(interface=BRAINSResample(),name="Resample2Atlas") + Resample2Atlas = pe.Node(interface=BRAINSResample(), name="Resample2Atlas") Resample2Atlas.inputs.interpolationMode = "Linear" Resample2Atlas.inputs.outputVolume = "subject2atlas.nii.gz" - landmarkInitializeWF.connect( inputsSpec , 'inputVolume', Resample2Atlas, 'inputVolume') - landmarkInitializeWF.connect(BLI2Atlas,'outputTransformFilename',Resample2Atlas,'warpTransform') - landmarkInitializeWF.connect(inputsSpec,'atlasVolume',Resample2Atlas,'referenceVolume') + landmarkInitializeWF.connect(inputsSpec, 'inputVolume', Resample2Atlas, 'inputVolume') + landmarkInitializeWF.connect(BLI2Atlas, 'outputTransformFilename', Resample2Atlas, 'warpTransform') + landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', Resample2Atlas, 'referenceVolume') DO_DEBUG = True if (DoReverseInit == True) and (DO_DEBUG == True): - ResampleFromAtlas=pe.Node(interface=BRAINSResample(),name="ResampleFromAtlas") + ResampleFromAtlas = pe.Node(interface=BRAINSResample(), name="ResampleFromAtlas") ResampleFromAtlas.inputs.interpolationMode = "Linear" ResampleFromAtlas.inputs.outputVolume = "atlas2subject.nii.gz" - landmarkInitializeWF.connect( inputsSpec , 'atlasVolume', ResampleFromAtlas, 'inputVolume') - landmarkInitializeWF.connect(BLI,'outputTransformFilename',ResampleFromAtlas,'warpTransform') - landmarkInitializeWF.connect(BCD,'outputResampledVolume',ResampleFromAtlas,'referenceVolume') + landmarkInitializeWF.connect(inputsSpec, 'atlasVolume', ResampleFromAtlas, 'inputVolume') + landmarkInitializeWF.connect(BLI, 'outputTransformFilename', ResampleFromAtlas, 'warpTransform') + landmarkInitializeWF.connect(BCD, 'outputResampledVolume', ResampleFromAtlas, 'referenceVolume') BROIAUTO = pe.Node(interface=BRAINSROIAuto(), name="BROIAuto_cropped") - BROIAUTO.inputs.outputVolume="Cropped_BCD_ACPC_Aligned.nii.gz" - BROIAUTO.inputs.ROIAutoDilateSize=10 - BROIAUTO.inputs.cropOutput=True - landmarkInitializeWF.connect(BCD,'outputResampledVolume', BROIAUTO,'inputVolume') - - landmarkInitializeWF.connect(BROIAUTO,'outputVolume',outputsSpec,'outputResampledCroppedVolume') - landmarkInitializeWF.connect(BCD,'outputLandmarksInACPCAlignedSpace',outputsSpec,'outputLandmarksInACPCAlignedSpace') - landmarkInitializeWF.connect(BCD,'outputResampledVolume',outputsSpec,'outputResampledVolume') - landmarkInitializeWF.connect(BCD,'outputLandmarksInInputSpace',outputsSpec,'outputLandmarksInInputSpace') - landmarkInitializeWF.connect(BCD,'outputTransform',outputsSpec,'outputTransform') - landmarkInitializeWF.connect(BCD,'outputMRML',outputsSpec,'outputMRML') - landmarkInitializeWF.connect(BLI,'outputTransformFilename',outputsSpec,'atlasToSubjectTransform') + BROIAUTO.inputs.outputVolume = "Cropped_BCD_ACPC_Aligned.nii.gz" + BROIAUTO.inputs.ROIAutoDilateSize = 10 + BROIAUTO.inputs.cropOutput = True + landmarkInitializeWF.connect(BCD, 'outputResampledVolume', BROIAUTO, 'inputVolume') + + landmarkInitializeWF.connect(BROIAUTO, 'outputVolume', outputsSpec, 'outputResampledCroppedVolume') + landmarkInitializeWF.connect(BCD, 'outputLandmarksInACPCAlignedSpace', outputsSpec, 'outputLandmarksInACPCAlignedSpace') + landmarkInitializeWF.connect(BCD, 'outputResampledVolume', outputsSpec, 'outputResampledVolume') + landmarkInitializeWF.connect(BCD, 'outputLandmarksInInputSpace', outputsSpec, 'outputLandmarksInInputSpace') + landmarkInitializeWF.connect(BCD, 'outputTransform', outputsSpec, 'outputTransform') + landmarkInitializeWF.connect(BCD, 'outputMRML', outputsSpec, 'outputMRML') + landmarkInitializeWF.connect(BLI, 'outputTransformFilename', outputsSpec, 'atlasToSubjectTransform') return landmarkInitializeWF diff --git a/AutoWorkup/WorkupT1T2PERSISTANCE_CHECK.py b/AutoWorkup/WorkupT1T2PERSISTANCE_CHECK.py index 72965bf1..b54f4f76 100644 --- a/AutoWorkup/WorkupT1T2PERSISTANCE_CHECK.py +++ b/AutoWorkup/WorkupT1T2PERSISTANCE_CHECK.py @@ -18,38 +18,39 @@ PERSISTANCE_CHECKWF.connect(BLI,'outputTransformFilename',myLocalPERSISTANCE_CHECKWF,'initialTransform') """ + def CreatePERSISTANCE_CHECKWorkflow(WFname): """ The purpose of this workflow is to debug the automatic deletion of files from the output directory. """ - PERSISTANCE_CHECKWF= pe.Workflow(name=WFname) + PERSISTANCE_CHECKWF = pe.Workflow(name=WFname) - inputsSpec = pe.Node(interface=IdentityInterface(fields=['fixedVolume','fixedBinaryVolume','movingVolume','movingBinaryVolume','initialTransform']), name='inputspec' ) - PERSISTANCE_CHECKWF.connect(inputsSpec,'subject_id',fs_reconall,'subject_id') - PERSISTANCE_CHECKWF.connect(inputsSpec,'T1_files', fs_reconall,'T1_files') + inputsSpec = pe.Node(interface=IdentityInterface(fields=['fixedVolume', 'fixedBinaryVolume', 'movingVolume', 'movingBinaryVolume', 'initialTransform']), name='inputspec') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'subject_id', fs_reconall, 'subject_id') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'T1_files', fs_reconall, 'T1_files') print("DOING FILE PERSISTANCE CHECK") - PERSISTANCE_CHECK = pe.Node(interface=BRAINSFit(),name="99999_PERSISTANCE_CHECK_PERSISTANCE_CHECK") - PERSISTANCE_CHECK.inputs.costMetric="MMI" - PERSISTANCE_CHECK.inputs.debugLevel=10 - PERSISTANCE_CHECK.inputs.maskProcessingMode="ROI" - PERSISTANCE_CHECK.inputs.numberOfSamples=1000 - PERSISTANCE_CHECK.inputs.numberOfIterations=[1500] - PERSISTANCE_CHECK.inputs.numberOfHistogramBins=50 - PERSISTANCE_CHECK.inputs.maximumStepLength=0.2 - PERSISTANCE_CHECK.inputs.minimumStepLength=[0.005] - PERSISTANCE_CHECK.inputs.transformType= ["Affine"] - PERSISTANCE_CHECK.inputs.maxBSplineDisplacement= 7 - PERSISTANCE_CHECK.inputs.maskInferiorCutOffFromCenter=65 - PERSISTANCE_CHECK.inputs.splineGridSize=[28,20,24] - PERSISTANCE_CHECK.inputs.outputVolume="Trial_Initializer_Output.nii.gz" - PERSISTANCE_CHECK.inputs.outputTransform="Trial_Initializer_Output.h5" - - PERSISTANCE_CHECKWF.connect(inputsSpec,'fixedVolume', PERSISTANCE_CHECK,'fixedVolume') - PERSISTANCE_CHECKWF.connect(inputsSpec,'fixedBinaryVolume', PERSISTANCE_CHECK,'fixedBinaryVolume') - PERSISTANCE_CHECKWF.connect(inputsSpec,'movingVolume', PERSISTANCE_CHECK,'movingVolume') - PERSISTANCE_CHECKWF.connect(inputsSpec,'movingBinaryVolume',PERSISTANCE_CHECK,'movingBinaryVolume') - PERSISTANCE_CHECKWF.connect(inputsSpec,'initialTransform', PERSISTANCE_CHECK,'initialTransform') - - outputsSpec = pe.Node(interface=IdentityInterface(fields=['outputVolume','outputTransform']), name='outputspec' ) + PERSISTANCE_CHECK = pe.Node(interface=BRAINSFit(), name="99999_PERSISTANCE_CHECK_PERSISTANCE_CHECK") + PERSISTANCE_CHECK.inputs.costMetric = "MMI" + PERSISTANCE_CHECK.inputs.debugLevel = 10 + PERSISTANCE_CHECK.inputs.maskProcessingMode = "ROI" + PERSISTANCE_CHECK.inputs.numberOfSamples = 1000 + PERSISTANCE_CHECK.inputs.numberOfIterations = [1500] + PERSISTANCE_CHECK.inputs.numberOfHistogramBins = 50 + PERSISTANCE_CHECK.inputs.maximumStepLength = 0.2 + PERSISTANCE_CHECK.inputs.minimumStepLength = [0.005] + PERSISTANCE_CHECK.inputs.transformType = ["Affine"] + PERSISTANCE_CHECK.inputs.maxBSplineDisplacement = 7 + PERSISTANCE_CHECK.inputs.maskInferiorCutOffFromCenter = 65 + PERSISTANCE_CHECK.inputs.splineGridSize = [28, 20, 24] + PERSISTANCE_CHECK.inputs.outputVolume = "Trial_Initializer_Output.nii.gz" + PERSISTANCE_CHECK.inputs.outputTransform = "Trial_Initializer_Output.h5" + + PERSISTANCE_CHECKWF.connect(inputsSpec, 'fixedVolume', PERSISTANCE_CHECK, 'fixedVolume') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'fixedBinaryVolume', PERSISTANCE_CHECK, 'fixedBinaryVolume') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'movingVolume', PERSISTANCE_CHECK, 'movingVolume') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'movingBinaryVolume', PERSISTANCE_CHECK, 'movingBinaryVolume') + PERSISTANCE_CHECKWF.connect(inputsSpec, 'initialTransform', PERSISTANCE_CHECK, 'initialTransform') + + outputsSpec = pe.Node(interface=IdentityInterface(fields=['outputVolume', 'outputTransform']), name='outputspec') return PERSISTANCE_CHECKWF diff --git a/AutoWorkup/WorkupT1T2Single.py b/AutoWorkup/WorkupT1T2Single.py index bf1bac9f..6af2ddfa 100644 --- a/AutoWorkup/WorkupT1T2Single.py +++ b/AutoWorkup/WorkupT1T2Single.py @@ -17,9 +17,9 @@ import string import argparse #"""Import necessary modules from nipype.""" -#from nipype.utils.config import config -#config.set('logging', 'log_to_file', 'false') -#config.set_log_dir(os.getcwd()) +# from nipype.utils.config import config +# config.set('logging', 'log_to_file', 'false') +# config.set_log_dir(os.getcwd()) #--config.set('logging', 'workflow_level', 'DEBUG') #--config.set('logging', 'interface_level', 'DEBUG') #--config.set('execution','remove_unnecessary_outputs','false') @@ -32,7 +32,7 @@ from nipype.interfaces.freesurfer import ReconAll from nipype.utils.misc import package_check -#package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version +# package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') package_check('networkx', '1.0', 'tutorial1') @@ -47,11 +47,14 @@ ## Utility functions for the pipeline ############################################################################# ############################################################################# -def get_first_T1_and_T2(in_files,T1_count): + + +def get_first_T1_and_T2(in_files, T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' - return in_files[0],in_files[T1_count] + return in_files[0], in_files[T1_count] + def GetExtensionlessBaseName(filename): ''' @@ -66,19 +69,25 @@ def GetExtensionlessBaseName(filename): currExt = os.path.splitext(currBaseName)[1] return currBaseName -def get_list_element( nestedList, index ): + +def get_list_element(nestedList, index): return nestedList[index] + + def getAllT1sLength(allT1s): return len(allT1s) -def get_list_element( nestedList, index ): + +def get_list_element(nestedList, index): return nestedList[index] -def MakeList(firstElement,secondElement): + +def MakeList(firstElement, secondElement): return [firstElement, secondElement] -def GenerateWFName(projectid, subjectid, sessionid,processing_phase): - return 'WF_'+str(subjectid)+"_"+str(sessionid)+"_"+str(projectid)+"_"+processing_phase + +def GenerateWFName(projectid, subjectid, sessionid, processing_phase): + return 'WF_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) + "_" + processing_phase ########################################################################### ########################################################################### @@ -92,7 +101,9 @@ def GenerateWFName(projectid, subjectid, sessionid,processing_phase): ########################################################################### ########################################################################### ########################################################################### -def MakeOneSubWorkFlow(projectid, subjectid, sessionid,processing_phase, WORKFLOW_COMPONENTS, BCD_model_path, InterpolationMode, CLUSTER_QUEUE,CLUSTER_QUEUE_LONG): + + +def MakeOneSubWorkFlow(projectid, subjectid, sessionid, processing_phase, WORKFLOW_COMPONENTS, BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG): """ Run autoworkup on a single Subject @@ -102,12 +113,12 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid,processing_phase, WORKFLO the path and filename of the atlas to use. """ - print "Building Pipeline for ",sessionid + print "Building Pipeline for ", sessionid ########### PIPELINE INITIALIZATION ############# - T1T2WorkupSingle = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid,processing_phase)) + T1T2WorkupSingle = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid, processing_phase)) inputsSpec = pe.Node(interface=IdentityInterface(fields= - ['sessionid','subjectid','projectid', + ['sessionid', 'subjectid', 'projectid', 'allT1s', 'allT2s', 'allPDs', @@ -117,71 +128,71 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid,processing_phase, WORKFLO 'template_landmark_weights_31_csv', 'template_t1', 'atlasDefinition' - ]), + ]), run_without_submitting=True, - name='inputspec' ) + name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=[ - 't1_average','t2_average', - 'pd_average','fl_average', - 'posteriorImages', - 'outputLabels','outputHeadLabels', - #'TissueClassifyOutputDir', - 'TissueClassifyatlasToSubjectTransform', - 'TissueClassifyatlasToSubjectInverseTransform', - -# 'BCD_ACPC_T1', - 'BCD_ACPC_T1_CROPPED', - 'outputLandmarksInACPCAlignedSpace', - 'outputLandmarksInInputSpace', - 'outputTransform','LMIatlasToSubjectTransform' - ]), - run_without_submitting=True, - name='outputspec' ) - - if True: #'BASIC' in WORKFLOW_COMPONENTS: + 't1_average', 't2_average', + 'pd_average', 'fl_average', + 'posteriorImages', + 'outputLabels', 'outputHeadLabels', + #'TissueClassifyOutputDir', + 'TissueClassifyatlasToSubjectTransform', + 'TissueClassifyatlasToSubjectInverseTransform', + + # 'BCD_ACPC_T1', + 'BCD_ACPC_T1_CROPPED', + 'outputLandmarksInACPCAlignedSpace', + 'outputLandmarksInInputSpace', + 'outputTransform', 'LMIatlasToSubjectTransform' + ]), + run_without_submitting=True, + name='outputspec') + + if True: # 'BASIC' in WORKFLOW_COMPONENTS: from WorkupT1T2LandmarkInitialization import CreateLandmarkInitializeWorkflow DoReverseMapping = False # Set to true for debugging outputs if 'AUXLMK' in WORKFLOW_COMPONENTS: DoReverseMapping = True - myLocalLMIWF= CreateLandmarkInitializeWorkflow("LandmarkInitialize", BCD_model_path, InterpolationMode,DoReverseMapping) + myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", BCD_model_path, InterpolationMode, DoReverseMapping) - T1T2WorkupSingle.connect( [ ( inputsSpec, myLocalLMIWF, [ ( ( 'allT1s', get_list_element, 0 ), 'inputspec.inputVolume') ] ), ] ) - T1T2WorkupSingle.connect( inputsSpec, 'template_landmarks_31_fcsv', myLocalLMIWF,'inputspec.atlasLandmarkFilename') - T1T2WorkupSingle.connect( inputsSpec, 'template_landmark_weights_31_csv', myLocalLMIWF,'inputspec.atlasWeightFilename') + T1T2WorkupSingle.connect([(inputsSpec, myLocalLMIWF, [(('allT1s', get_list_element, 0), 'inputspec.inputVolume')]), ]) + T1T2WorkupSingle.connect(inputsSpec, 'template_landmarks_31_fcsv', myLocalLMIWF, 'inputspec.atlasLandmarkFilename') + T1T2WorkupSingle.connect(inputsSpec, 'template_landmark_weights_31_csv', myLocalLMIWF, 'inputspec.atlasWeightFilename') if 'AUXLMK' in WORKFLOW_COMPONENTS: - T1T2WorkupSingle.connect(inputsSpec,'template_t1',myLocalLMIWF,'inputspec.atlasVolume') + T1T2WorkupSingle.connect(inputsSpec, 'template_t1', myLocalLMIWF, 'inputspec.atlasVolume') ### Now connect outputspec # T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputResampledVolume', outputsSpec, 'BCD_ACPC_T1' ) - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputResampledCroppedVolume', outputsSpec, 'BCD_ACPC_T1_CROPPED' ) - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputLandmarksInACPCAlignedSpace',outputsSpec,'outputLandmarksInACPCAlignedSpace') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputLandmarksInInputSpace',outputsSpec,'outputLandmarksInInputSpace') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputTransform',outputsSpec,'outputTransform') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.atlasToSubjectTransform',outputsSpec,'LMIatlasToSubjectTransform') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledCroppedVolume', outputsSpec, 'BCD_ACPC_T1_CROPPED') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace', outputsSpec, 'outputLandmarksInACPCAlignedSpace') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInInputSpace', outputsSpec, 'outputLandmarksInInputSpace') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputTransform', outputsSpec, 'outputTransform') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.atlasToSubjectTransform', outputsSpec, 'LMIatlasToSubjectTransform') if 'TISSUE_CLASSIFY' in WORKFLOW_COMPONENTS: from WorkupT1T2TissueClassify import CreateTissueClassifyWorkflow - myLocalTCWF= CreateTissueClassifyWorkflow("TissueClassify",CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,InterpolationMode) - T1T2WorkupSingle.connect( inputsSpec, 'allT1s', myLocalTCWF, 'inputspec.T1List') - T1T2WorkupSingle.connect( inputsSpec, 'allT2s', myLocalTCWF, 'inputspec.T2List') - T1T2WorkupSingle.connect( inputsSpec, 'allPDs', myLocalTCWF, 'inputspec.PDList') - T1T2WorkupSingle.connect( inputsSpec, 'allFLs', myLocalTCWF, 'inputspec.FLList') - T1T2WorkupSingle.connect( inputsSpec, 'allOthers', myLocalTCWF, 'inputspec.OtherList') - T1T2WorkupSingle.connect( [ (inputsSpec, myLocalTCWF, [(('allT1s', getAllT1sLength), 'inputspec.T1_count')] ), ]) - T1T2WorkupSingle.connect( inputsSpec,'atlasDefinition',myLocalTCWF,'inputspec.atlasDefinition') - T1T2WorkupSingle.connect( myLocalLMIWF, 'outputspec.outputResampledCroppedVolume', myLocalTCWF, 'inputspec.PrimaryT1' ) - T1T2WorkupSingle.connect( myLocalLMIWF,'outputspec.atlasToSubjectTransform',myLocalTCWF,'inputspec.atlasToSubjectInitialTransform') + myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, InterpolationMode) + T1T2WorkupSingle.connect(inputsSpec, 'allT1s', myLocalTCWF, 'inputspec.T1List') + T1T2WorkupSingle.connect(inputsSpec, 'allT2s', myLocalTCWF, 'inputspec.T2List') + T1T2WorkupSingle.connect(inputsSpec, 'allPDs', myLocalTCWF, 'inputspec.PDList') + T1T2WorkupSingle.connect(inputsSpec, 'allFLs', myLocalTCWF, 'inputspec.FLList') + T1T2WorkupSingle.connect(inputsSpec, 'allOthers', myLocalTCWF, 'inputspec.OtherList') + T1T2WorkupSingle.connect([(inputsSpec, myLocalTCWF, [(('allT1s', getAllT1sLength), 'inputspec.T1_count')]), ]) + T1T2WorkupSingle.connect(inputsSpec, 'atlasDefinition', myLocalTCWF, 'inputspec.atlasDefinition') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledCroppedVolume', myLocalTCWF, 'inputspec.PrimaryT1') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.atlasToSubjectTransform', myLocalTCWF, 'inputspec.atlasToSubjectInitialTransform') ### Now connect outputspec - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t1_average', outputsSpec,'t1_average') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t2_average', outputsSpec,'t2_average') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.pd_average', outputsSpec,'pd_average') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.fl_average', outputsSpec,'fl_average') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.posteriorImages', outputsSpec,'posteriorImages') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputLabels', outputsSpec,'outputLabels') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputHeadLabels', outputsSpec,'outputHeadLabels') - #T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.TissueClassifyOutputDir', outputsSpec,'TissueClassifyOutputDir') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', outputsSpec,'TissueClassifyatlasToSubjectTransform') - T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.atlasToSubjectInverseTransform', outputsSpec,'TissueClassifyatlasToSubjectInverseTransform') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t1_average', outputsSpec, 't1_average') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t2_average', outputsSpec, 't2_average') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.pd_average', outputsSpec, 'pd_average') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.fl_average', outputsSpec, 'fl_average') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.posteriorImages', outputsSpec, 'posteriorImages') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputLabels', outputsSpec, 'outputLabels') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputHeadLabels', outputsSpec, 'outputHeadLabels') + # T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.TissueClassifyOutputDir', outputsSpec,'TissueClassifyOutputDir') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', outputsSpec, 'TissueClassifyatlasToSubjectTransform') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.atlasToSubjectInverseTransform', outputsSpec, 'TissueClassifyatlasToSubjectInverseTransform') return T1T2WorkupSingle diff --git a/AutoWorkup/WorkupT1T2SingleSubjectPhaseII.py b/AutoWorkup/WorkupT1T2SingleSubjectPhaseII.py index a0f4b535..03eeddb5 100644 --- a/AutoWorkup/WorkupT1T2SingleSubjectPhaseII.py +++ b/AutoWorkup/WorkupT1T2SingleSubjectPhaseII.py @@ -17,9 +17,9 @@ import string import argparse #"""Import necessary modules from nipype.""" -#from nipype.utils.config import config -#config.set('logging', 'log_to_file', 'false') -#config.set_log_dir(os.getcwd()) +# from nipype.utils.config import config +# config.set('logging', 'log_to_file', 'false') +# config.set_log_dir(os.getcwd()) #--config.set('logging', 'workflow_level', 'DEBUG') #--config.set('logging', 'interface_level', 'DEBUG') #--config.set('execution','remove_unnecessary_outputs','false') @@ -32,7 +32,7 @@ from nipype.interfaces.freesurfer import ReconAll from nipype.utils.misc import package_check -#package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version +# package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') package_check('networkx', '1.0', 'tutorial1') @@ -47,11 +47,12 @@ ## Utility functions for the pipeline ############################################################################# ############################################################################# -def get_first_T1_and_T2(in_files,T1_count): +def get_first_T1_and_T2(in_files, T1_count): ''' Returns the first T1 and T2 file in in_files, based on offset in T1_count. ''' - return in_files[0],in_files[T1_count] + return in_files[0], in_files[T1_count] + def GetExtensionlessBaseName(filename): ''' @@ -66,29 +67,38 @@ def GetExtensionlessBaseName(filename): currExt = os.path.splitext(currBaseName)[1] return currBaseName -def get_list_element( nestedList, index ): + +def get_list_element(nestedList, index): return nestedList[index] + + def getAllT1sLength(allT1s): return len(allT1s) -def get_list_element( nestedList, index ): + + +def get_list_element(nestedList, index): return nestedList[index] -def MakeList(firstElement,secondElement): + + +def MakeList(firstElement, secondElement): return [firstElement, secondElement] + def GenerateWFName(projectid, subjectid, sessionid): - return 'WF_'+str(subjectid)+"_"+str(sessionid)+"_"+str(projectid) + return 'WF_' + str(subjectid) + "_" + str(sessionid) + "_" + str(projectid) + def GenerateOutputPattern(projectid, subjectid, sessionid, DefaultNodeName, uidIsFirst): """ This function generates output path substitutions for workflows and nodes that conform to a common standard. """ - WFName=""#GenerateWFName(projectid,subjectid,sessionid) + WFName = "" # GenerateWFName(projectid,subjectid,sessionid) patternList = [] if uidIsFirst == True: find_pat = os.path.join(DefaultNodeName, WFName) else: find_pat = os.path.join(WFName, DefaultNodeName) replace_pat = os.path.join(projectid, subjectid, sessionid, DefaultNodeName) - patternList.append((find_pat,replace_pat)) + patternList.append((find_pat, replace_pat)) return patternList ########################################################################### @@ -103,7 +113,9 @@ def GenerateOutputPattern(projectid, subjectid, sessionid, DefaultNodeName, uidI ########################################################################### ########################################################################### ########################################################################### -def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONENTS, BCD_model_path, InterpolationMode, CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,ExperimentBaseDirectoryResults): + + +def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONENTS, BCD_model_path, InterpolationMode, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, ExperimentBaseDirectoryResults): """ Run autoworkup on a single Subject @@ -118,58 +130,58 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONE T1T2WorkupSingle = pe.Workflow(name=GenerateWFName(projectid, subjectid, sessionid)) inputsSpec = pe.Node(interface=IdentityInterface(fields= - ['sessionid','subjectid','projectid', + ['sessionid', 'subjectid', 'projectid', 'allT1s', 'allT2s', 'allPDs', 'allOthers' - ]), + ]), run_without_submitting=True, - name='inputspec' ) + name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['BCD_ACPC_T1', - 't1_average','t2_average' - ]), - run_without_submitting=True, - name='outputspec' ) + 't1_average', 't2_average' + ]), + run_without_submitting=True, + name='outputspec') if 'BASIC' in WORKFLOW_COMPONENTS: from WorkupT1T2LandmarkInitialization import CreateLandmarkInitializeWorkflow DoReverseMapping = False # Set to true for debugging outputs if 'AUXLMK' in WORKFLOW_COMPONENTS: DoReverseMapping = True - myLocalLMIWF= CreateLandmarkInitializeWorkflow("LandmarkInitialize", BCD_model_path, InterpolationMode,DoReverseMapping) + myLocalLMIWF = CreateLandmarkInitializeWorkflow("LandmarkInitialize", BCD_model_path, InterpolationMode, DoReverseMapping) - T1T2WorkupSingle.connect( [ ( inputsSpec, myLocalLMIWF, [ ( ( 'allT1s', get_list_element, 0 ), 'inputspec.inputVolume') ] ), ] ) - T1T2WorkupSingle.connect( BAtlas, 'template_landmarks_31_fcsv', myLocalLMIWF,'inputspec.atlasLandmarkFilename') - T1T2WorkupSingle.connect( BAtlas, 'template_landmark_weights_31_csv', myLocalLMIWF,'inputspec.atlasWeightFilename') + T1T2WorkupSingle.connect([(inputsSpec, myLocalLMIWF, [(('allT1s', get_list_element, 0), 'inputspec.inputVolume')]), ]) + T1T2WorkupSingle.connect(BAtlas, 'template_landmarks_31_fcsv', myLocalLMIWF, 'inputspec.atlasLandmarkFilename') + T1T2WorkupSingle.connect(BAtlas, 'template_landmark_weights_31_csv', myLocalLMIWF, 'inputspec.atlasWeightFilename') if 'AUXLMK' in WORKFLOW_COMPONENTS: - T1T2WorkupSingle.connect(BAtlas,'template_t1',myLocalLMIWF,'inputspec.atlasVolume') + T1T2WorkupSingle.connect(BAtlas, 'template_t1', myLocalLMIWF, 'inputspec.atlasVolume') ### Now define where the final organized outputs should go. - BASIC_DataSink=pe.Node(nio.DataSink(),name="BASIC_DS") - BASIC_DataSink.inputs.base_directory=ExperimentBaseDirectoryResults - BASIC_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ACPCAlign',False) - - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputLandmarksInACPCAlignedSpace',BASIC_DataSink,'ACPCAlign.@outputLandmarksInACPCAlignedSpace') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputResampledVolume',BASIC_DataSink,'ACPCAlign.@outputResampledVolume') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputLandmarksInInputSpace',BASIC_DataSink,'ACPCAlign.@outputLandmarksInInputSpace') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.outputTransform',BASIC_DataSink,'ACPCAlign.@outputTransform') - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.atlasToSubjectTransform',BASIC_DataSink,'ACPCAlign.@atlasToSubjectTransform') + BASIC_DataSink = pe.Node(nio.DataSink(), name="BASIC_DS") + BASIC_DataSink.inputs.base_directory = ExperimentBaseDirectoryResults + BASIC_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ACPCAlign', False) + + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace', BASIC_DataSink, 'ACPCAlign.@outputLandmarksInACPCAlignedSpace') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledVolume', BASIC_DataSink, 'ACPCAlign.@outputResampledVolume') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInInputSpace', BASIC_DataSink, 'ACPCAlign.@outputLandmarksInInputSpace') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputTransform', BASIC_DataSink, 'ACPCAlign.@outputTransform') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.atlasToSubjectTransform', BASIC_DataSink, 'ACPCAlign.@atlasToSubjectTransform') ### Now connect outputspec - T1T2WorkupSingle.connect( myLocalLMIWF, 'outputspec.outputResampledVolume', outputsSpec, 'BCD_ACPC_T1' ) + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledVolume', outputsSpec, 'BCD_ACPC_T1') if 'TISSUE_CLASSIFY' in WORKFLOW_COMPONENTS: from WorkupT1T2TissueClassify import CreateTissueClassifyWorkflow - myLocalTCWF= CreateTissueClassifyWorkflow("TissueClassify",CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,InterpolationMode) - T1T2WorkupSingle.connect( inputsSpec, 'allT1s', myLocalTCWF, 'inputspec.T1List') - T1T2WorkupSingle.connect( inputsSpec, 'allT2s', myLocalTCWF, 'inputspec.T2List') - T1T2WorkupSingle.connect( inputsSpec, 'allPDs', myLocalTCWF, 'inputspec.PDList') - T1T2WorkupSingle.connect( inputsSpec, 'allOthers', myLocalTCWF, 'inputspec.OtherList') - T1T2WorkupSingle.connect( [ (inputsSpec, myLocalTCWF, [(('allT1s', getAllT1sLength), 'inputspec.T1_count')] ), ]) - T1T2WorkupSingle.connect( BAtlas,'ExtendedAtlasDefinition.xml',myLocalTCWF,'inputspec.atlasDefinition') - T1T2WorkupSingle.connect( myLocalLMIWF, 'outputspec.outputResampledVolume', myLocalTCWF, 'inputspec.PrimaryT1' ) - T1T2WorkupSingle.connect( myLocalLMIWF,'outputspec.atlasToSubjectTransform',myLocalTCWF,'inputspec.atlasToSubjectInitialTransform') + myLocalTCWF = CreateTissueClassifyWorkflow("TissueClassify", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, InterpolationMode) + T1T2WorkupSingle.connect(inputsSpec, 'allT1s', myLocalTCWF, 'inputspec.T1List') + T1T2WorkupSingle.connect(inputsSpec, 'allT2s', myLocalTCWF, 'inputspec.T2List') + T1T2WorkupSingle.connect(inputsSpec, 'allPDs', myLocalTCWF, 'inputspec.PDList') + T1T2WorkupSingle.connect(inputsSpec, 'allOthers', myLocalTCWF, 'inputspec.OtherList') + T1T2WorkupSingle.connect([(inputsSpec, myLocalTCWF, [(('allT1s', getAllT1sLength), 'inputspec.T1_count')]), ]) + T1T2WorkupSingle.connect(BAtlas, 'ExtendedAtlasDefinition.xml', myLocalTCWF, 'inputspec.atlasDefinition') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledVolume', myLocalTCWF, 'inputspec.PrimaryT1') + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.atlasToSubjectTransform', myLocalTCWF, 'inputspec.atlasToSubjectInitialTransform') ### Now define where the final organized outputs should go. ### For posterior probability files, we need to use a MapNode for the keys from the @@ -187,79 +199,79 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONE T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t2_average', TC_DataSink, 'TissueClassify.@t2_average') T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.posteriorImages', TC_DataSink, 'TissueClassify.@posteriorImages') ### Now connect outputspec - T1T2WorkupSingle.connect(TC_DataSink, 'TissueClassify.@t1_average', outputsSpec,'t1_average') - T1T2WorkupSingle.connect(TC_DataSink, 'TissueClassify.@t2_average', outputsSpec,'t2_average') + T1T2WorkupSingle.connect(TC_DataSink, 'TissueClassify.@t1_average', outputsSpec, 't1_average') + T1T2WorkupSingle.connect(TC_DataSink, 'TissueClassify.@t2_average', outputsSpec, 't2_average') ## Make deformed Atlas image space if 'ANTS' in WORKFLOW_COMPONENTS: from WorkupT1T2ANTS import CreateANTSRegistrationWorkflow - myLocalAntsWF = CreateANTSRegistrationWorkflow("ANTSRegistration",CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,-1) - T1T2WorkupSingle.connect( myLocalTCWF,'outputspec.t1_average',myLocalAntsWF,"inputspec.fixedVolumesList") - T1T2WorkupSingle.connect( BAtlas,'template_t1', myLocalAntsWF,"inputspec.movingVolumesList") - T1T2WorkupSingle.connect(myLocalLMIWF,'outputspec.atlasToSubjectTransform',myLocalAntsWF,'inputspec.initial_moving_transform') + myLocalAntsWF = CreateANTSRegistrationWorkflow("ANTSRegistration", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, -1) + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t1_average', myLocalAntsWF, "inputspec.fixedVolumesList") + T1T2WorkupSingle.connect(BAtlas, 'template_t1', myLocalAntsWF, "inputspec.movingVolumesList") + T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.atlasToSubjectTransform', myLocalAntsWF, 'inputspec.initial_moving_transform') # Must register the entire head, not just the brain! - T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.outputHeadLabels',myLocalAntsWF,'inputspec.fixedBinaryVolume') - T1T2WorkupSingle.connect(BAtlas,'template_headregion',myLocalAntsWF,'inputspec.movingBinaryVolume') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputHeadLabels', myLocalAntsWF, 'inputspec.fixedBinaryVolume') + T1T2WorkupSingle.connect(BAtlas, 'template_headregion', myLocalAntsWF, 'inputspec.movingBinaryVolume') ### Now define where the final organized outputs should go. - ANTS_DataSink=pe.Node(nio.DataSink(),name="ANTSRegistration_DS") - ANTS_DataSink.inputs.base_directory=ExperimentBaseDirectoryResults - ANTS_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'ANTSRegistration',False) - T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.warped_image', ANTS_DataSink,'ANTSRegistration.@warped_image') - T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.inverse_warped_image', ANTS_DataSink,'ANTSRegistration.@inverse_warped_image') - T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.affine_transform', ANTS_DataSink,'ANTSRegistration.@affine_transform') - T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.warp_transform', ANTS_DataSink,'ANTSRegistration.@warp_transform') - T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.inverse_warp_transform', ANTS_DataSink,'ANTSRegistration.@inverse_warp_transform') + ANTS_DataSink = pe.Node(nio.DataSink(), name="ANTSRegistration_DS") + ANTS_DataSink.inputs.base_directory = ExperimentBaseDirectoryResults + ANTS_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'ANTSRegistration', False) + T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.warped_image', ANTS_DataSink, 'ANTSRegistration.@warped_image') + T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.inverse_warped_image', ANTS_DataSink, 'ANTSRegistration.@inverse_warped_image') + T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.affine_transform', ANTS_DataSink, 'ANTSRegistration.@affine_transform') + T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.warp_transform', ANTS_DataSink, 'ANTSRegistration.@warp_transform') + T1T2WorkupSingle.connect(myLocalAntsWF, 'outputspec.inverse_warp_transform', ANTS_DataSink, 'ANTSRegistration.@inverse_warp_transform') if 'SEGMENTATION' in WORKFLOW_COMPONENTS: from WorkupT1T2BRAINSCut import CreateBRAINSCutWorkflow ## TODO: Remove BAtlas From Here as well! - myLocalSegWF = CreateBRAINSCutWorkflow("Segmentation",CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,BAtlas) ##Note: Passing in the entire BAtlas Object here! - T1T2WorkupSingle.connect( myLocalTCWF,'outputspec.t1_average',myLocalSegWF,'inputspec.T1Volume') - T1T2WorkupSingle.connect( myLocalTCWF,'outputspec.t2_average',myLocalSegWF,'inputspec.T2Volume') - T1T2WorkupSingle.connect( myLocalTCWF,'outputspec.atlasToSubjectTransform',myLocalSegWF,'inputspec.atlasToSubjectTransform') + myLocalSegWF = CreateBRAINSCutWorkflow("Segmentation", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, BAtlas) # Note: Passing in the entire BAtlas Object here! + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t1_average', myLocalSegWF, 'inputspec.T1Volume') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t2_average', myLocalSegWF, 'inputspec.T2Volume') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.atlasToSubjectTransform', myLocalSegWF, 'inputspec.atlasToSubjectTransform') ### Now define where the final organized outputs should go. - SEGMENTATION_DataSink=pe.Node(nio.DataSink(),name="SEGMENTATION_DS") - SEGMENTATION_DataSink.inputs.base_directory=ExperimentBaseDirectoryResults - SEGMENTATION_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid,'BRAINSCut',False) - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftAccumben',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftAccumben') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightAccumben',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightAccumben') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftCaudate',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftCaudate') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightCaudate',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightCaudate') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftGlobus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftGlobus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightGlobus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightGlobus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftHippocampus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftHippocampus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightHippocampus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightHippocampus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftPutamen',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftPutamen') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightPutamen',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightPutamen') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftThalamus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftThalamus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightThalamus',SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightThalamus') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputLabelImageName', SEGMENTATION_DataSink,'BRAINSCut.@outputLabelImageName') - T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputCSVFileName', SEGMENTATION_DataSink,'BRAINSCut.@outputCSVFileName') + SEGMENTATION_DataSink = pe.Node(nio.DataSink(), name="SEGMENTATION_DS") + SEGMENTATION_DataSink.inputs.base_directory = ExperimentBaseDirectoryResults + SEGMENTATION_DataSink.inputs.regexp_substitutions = GenerateOutputPattern(projectid, subjectid, sessionid, 'BRAINSCut', False) + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftAccumben', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftAccumben') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightAccumben', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightAccumben') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftCaudate', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftCaudate') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightCaudate', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightCaudate') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftGlobus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftGlobus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightGlobus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightGlobus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftHippocampus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftHippocampus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightHippocampus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightHippocampus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftPutamen', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftPutamen') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightPutamen', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightPutamen') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryLeftThalamus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryLeftThalamus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputBinaryRightThalamus', SEGMENTATION_DataSink, 'BRAINSCut.@outputBinaryRightThalamus') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputLabelImageName', SEGMENTATION_DataSink, 'BRAINSCut.@outputLabelImageName') + T1T2WorkupSingle.connect(myLocalSegWF, 'outputspec.outputCSVFileName', SEGMENTATION_DataSink, 'BRAINSCut.@outputCSVFileName') if 'FREESURFER' in WORKFLOW_COMPONENTS: - RunAllFSComponents=True ## A hack to avoid 26 hour run of freesurfer + RunAllFSComponents = True # A hack to avoid 26 hour run of freesurfer from WorkupT1T2FreeSurfer import CreateFreeSurferWorkflow - myLocalFSWF= CreateFreeSurferWorkflow("Level1_FSTest",CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,RunAllFSComponents) - T1T2WorkupSingle.connect(inputsSpec,'sessionid',myLocalFSWF,'inputspec.subject_id') - T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.t1_average',myLocalFSWF,'inputspec.T1_files') - T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.t2_average',myLocalFSWF,'inputspec.T2_files') - T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.outputLabels',myLocalFSWF,'inputspec.label_file') - #T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.outputLabels',myLocalFSWF,'inputspec.mask_file') #Yes, the same file as label_file! + myLocalFSWF = CreateFreeSurferWorkflow("Level1_FSTest", CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, RunAllFSComponents) + T1T2WorkupSingle.connect(inputsSpec, 'sessionid', myLocalFSWF, 'inputspec.subject_id') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t1_average', myLocalFSWF, 'inputspec.T1_files') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.t2_average', myLocalFSWF, 'inputspec.T2_files') + T1T2WorkupSingle.connect(myLocalTCWF, 'outputspec.outputLabels', myLocalFSWF, 'inputspec.label_file') + # T1T2WorkupSingle.connect(myLocalTCWF,'outputspec.outputLabels',myLocalFSWF,'inputspec.mask_file') #Yes, the same file as label_file! ### Now define where the final organized outputs should go. if RunAllFSComponents == True: - T1T2WorkupSingleDataSink=pe.Node(nio.DataSink(),name="FREESURFER_DS") - T1T2WorkupSingleDataSink.inputs.base_directory=ExperimentBaseDirectoryResults + T1T2WorkupSingleDataSink = pe.Node(nio.DataSink(), name="FREESURFER_DS") + T1T2WorkupSingleDataSink.inputs.base_directory = ExperimentBaseDirectoryResults T1T2WorkupSingleDataSink.inputs.regexp_substitutions = [ - ('/_uid_(?P[^/]*)',r'/\g') - ] - T1T2WorkupSingle.connect(myLocalFSWF, 'outputspec.FreesurferOutputDirectory', T1T2WorkupSingleDataSink,'FREESURFER_SUBJ.@FreesurferOutputDirectory') + ('/_uid_(?P[^/]*)', r'/\g') + ] + T1T2WorkupSingle.connect(myLocalFSWF, 'outputspec.FreesurferOutputDirectory', T1T2WorkupSingleDataSink, 'FREESURFER_SUBJ.@FreesurferOutputDirectory') ### Now define where the final organized outputs should go. - FSPREP_DataSink=pe.Node(nio.DataSink(),name="FREESURFER_PREP") - FSPREP_DataSink.inputs.base_directory=ExperimentBaseDirectoryResults - FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid,'FREESURFER_PREP',False) + FSPREP_DataSink = pe.Node(nio.DataSink(), name="FREESURFER_PREP") + FSPREP_DataSink.inputs.base_directory = ExperimentBaseDirectoryResults + FREESURFER_PREP_PATTERNS = GenerateOutputPattern(projectid, subjectid, sessionid, 'FREESURFER_PREP', False) FSPREP_DataSink.inputs.regexp_substitutions = FREESURFER_PREP_PATTERNS print "=========================" print "=========================" @@ -268,10 +280,10 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONE print "=========================" print "=========================" print "=========================" - T1T2WorkupSingle.connect(myLocalFSWF, 'outputspec.cnr_optimal_image', FSPREP_DataSink,'FREESURFER_PREP.@cnr_optimal_image') + T1T2WorkupSingle.connect(myLocalFSWF, 'outputspec.cnr_optimal_image', FSPREP_DataSink, 'FREESURFER_PREP.@cnr_optimal_image') else: pass - #print "Skipping freesurfer" + # print "Skipping freesurfer" """ try: T1T2WorkupSingle.write_graph() @@ -284,11 +296,11 @@ def MakeOneSubWorkFlow(projectid, subjectid, sessionid, BAtlas, WORKFLOW_COMPONE ############# ## The following are just notes, and not really part of this script. ## - #T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace', T1T2WorkupSingleDataSink,'foo.@outputLandmarksInACPCAlignedSpace') - #T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledVolume', T1T2WorkupSingleDataSink,'foo.@outputResampledVolume') - #T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInInputSpace', T1T2WorkupSingleDataSink,'foo.@outputLandmarksInInputSpace') - #T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputTransform', T1T2WorkupSingleDataSink,'foo.@outputTransform') - #T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputMRML', T1T2WorkupSingleDataSink,'foo.@outputMRML') + # T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInACPCAlignedSpace', T1T2WorkupSingleDataSink,'foo.@outputLandmarksInACPCAlignedSpace') + # T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputResampledVolume', T1T2WorkupSingleDataSink,'foo.@outputResampledVolume') + # T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputLandmarksInInputSpace', T1T2WorkupSingleDataSink,'foo.@outputLandmarksInInputSpace') + # T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputTransform', T1T2WorkupSingleDataSink,'foo.@outputTransform') + # T1T2WorkupSingle.connect(myLocalLMIWF, 'outputspec.outputMRML', T1T2WorkupSingleDataSink,'foo.@outputMRML') """ subs=r'test/\g/\g/\g' pe.sub(subs,test) diff --git a/AutoWorkup/WorkupT1T2TissueClassify.py b/AutoWorkup/WorkupT1T2TissueClassify.py index 2df1ceda..af6f3e7d 100644 --- a/AutoWorkup/WorkupT1T2TissueClassify.py +++ b/AutoWorkup/WorkupT1T2TissueClassify.py @@ -19,12 +19,13 @@ tissueClassifyWF.connect(BLI,'outputTransformFilename',myLocalTCWF,'atlasToSubjectInitialTransform') """ -def MakeOneFileList(T1List,T2List,PDList,FLList,OtherList,PrimaryT1): + +def MakeOneFileList(T1List, T2List, PDList, FLList, OtherList, PrimaryT1): """ This funciton uses PrimaryT1 for the first T1, and the append the rest of the T1's and T2's """ - imagePathList=list() - imagePathList.append(PrimaryT1) # Force replacement of the first element + imagePathList = list() + imagePathList.append(PrimaryT1) # Force replacement of the first element for i in T1List[1:]: - imagePathList.append(i) # The reset of the elements + imagePathList.append(i) # The reset of the elements for i in T2List[0:]: imagePathList.append(i) for i in PDList[0:]: @@ -34,15 +35,18 @@ def MakeOneFileList(T1List,T2List,PDList,FLList,OtherList,PrimaryT1): for i in OtherList[0:]: imagePathList.append(i) return imagePathList -def MakeOneFileTypeList(T1List,T2List,PDList,FLList,OtherList): - input_types = ["T1"]*len(T1List) - input_types.extend( ["T2"]*len(T2List) ) - input_types.extend( ["PD"]*len(PDList) ) - input_types.extend( ["FL"]*len(FLList) ) - input_types.extend( ["OTHER"]*len(OtherList) ) + + +def MakeOneFileTypeList(T1List, T2List, PDList, FLList, OtherList): + input_types = ["T1"] * len(T1List) + input_types.extend(["T2"] * len(T2List)) + input_types.extend(["PD"] * len(PDList)) + input_types.extend(["FL"] * len(FLList)) + input_types.extend(["OTHER"] * len(OtherList)) return input_types -def MakeOutFileList(T1List,T2List,PDList,FLList,OtherList): + +def MakeOutFileList(T1List, T2List, PDList, FLList, OtherList): def GetExtBaseName(filename): ''' Get the filename without the extension. Works for .ext and .ext.gz @@ -55,22 +59,24 @@ def GetExtBaseName(filename): currBaseName = os.path.splitext(currBaseName)[0] currExt = os.path.splitext(currBaseName)[1] return currBaseName - all_files=T1List + all_files = T1List all_files.extend(T2List) all_files.extend(PDList) all_files.extend(FLList) all_files.extend(OtherList) - out_corrected_names=[] + out_corrected_names = [] for i in all_files: - out_name=GetExtBaseName(i)+"_corrected.nii.gz" + out_name = GetExtBaseName(i) + "_corrected.nii.gz" out_corrected_names.append(out_name) return out_corrected_names -def getListIndexOrNoneIfOutOfRange( imageList, index): + +def getListIndexOrNoneIfOutOfRange(imageList, index): if index < len(imageList): - return imageList[index] + return imageList[index] else: - return None + return None + def MakePosteriorDictionaryFunc(posteriorImages): from PipeLineFunctionHelpers import POSTERIORS @@ -81,19 +87,20 @@ def MakePosteriorDictionaryFunc(posteriorImages): temp_dictionary = dict(zip(POSTERIORS, posteriorImages)) return temp_dictionary -def CreateTissueClassifyWorkflow(WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,InterpolationMode): - tissueClassifyWF= pe.Workflow(name=WFname) + +def CreateTissueClassifyWorkflow(WFname, CLUSTER_QUEUE, CLUSTER_QUEUE_LONG, InterpolationMode): + tissueClassifyWF = pe.Workflow(name=WFname) inputsSpec = pe.Node(interface=IdentityInterface(fields=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'T1_count', 'PrimaryT1', 'atlasDefinition', 'atlasToSubjectInitialTransform']), - run_without_submitting=True, - name='inputspec' ) + run_without_submitting=True, + name='inputspec') outputsSpec = pe.Node(interface=IdentityInterface(fields=['atlasToSubjectTransform', 'atlasToSubjectInverseTransform', 'outputLabels', - 'outputHeadLabels', # ??? + 'outputHeadLabels', # ??? #'t1_corrected', 't2_corrected', 't1_average', 't2_average', @@ -101,67 +108,67 @@ def CreateTissueClassifyWorkflow(WFname,CLUSTER_QUEUE,CLUSTER_QUEUE_LONG,Interpo 'fl_average', 'posteriorImages']), run_without_submitting=True, - name='outputspec' ) + name='outputspec') ######################################################## # Run BABCext on Multi-modal images ######################################################## - makeImagePathList = pe.Node( Function(function=MakeOneFileList, - input_names = ['T1List','T2List','PDList','FLList','OtherList','PrimaryT1'], - output_names = ['imagePathList']), run_without_submitting=True, name="99_makeImagePathList") - tissueClassifyWF.connect( inputsSpec, 'T1List', makeImagePathList, 'T1List' ) - tissueClassifyWF.connect( inputsSpec, 'T2List', makeImagePathList, 'T2List' ) - tissueClassifyWF.connect( inputsSpec, 'PDList', makeImagePathList, 'PDList' ) - tissueClassifyWF.connect( inputsSpec, 'FLList', makeImagePathList, 'FLList' ) - tissueClassifyWF.connect( inputsSpec, 'OtherList', makeImagePathList, 'OtherList' ) + makeImagePathList = pe.Node(Function(function=MakeOneFileList, + input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList', 'PrimaryT1'], + output_names=['imagePathList']), run_without_submitting=True, name="99_makeImagePathList") + tissueClassifyWF.connect(inputsSpec, 'T1List', makeImagePathList, 'T1List') + tissueClassifyWF.connect(inputsSpec, 'T2List', makeImagePathList, 'T2List') + tissueClassifyWF.connect(inputsSpec, 'PDList', makeImagePathList, 'PDList') + tissueClassifyWF.connect(inputsSpec, 'FLList', makeImagePathList, 'FLList') + tissueClassifyWF.connect(inputsSpec, 'OtherList', makeImagePathList, 'OtherList') # -- Standard mode to make 256^3 images - tissueClassifyWF.connect( inputsSpec, 'PrimaryT1', makeImagePathList, 'PrimaryT1' ) - - makeImageTypeList = pe.Node( Function(function=MakeOneFileTypeList, - input_names = ['T1List','T2List','PDList','FLList','OtherList'], - output_names = ['imageTypeList']), run_without_submitting=True, name="99_makeImageTypeList") - tissueClassifyWF.connect( inputsSpec, 'T1List', makeImageTypeList, 'T1List' ) - tissueClassifyWF.connect( inputsSpec, 'T2List', makeImageTypeList, 'T2List' ) - tissueClassifyWF.connect( inputsSpec, 'PDList', makeImageTypeList, 'PDList' ) - tissueClassifyWF.connect( inputsSpec, 'FLList', makeImageTypeList, 'FLList' ) - tissueClassifyWF.connect( inputsSpec, 'OtherList', makeImageTypeList, 'OtherList' ) - - makeOutImageList = pe.Node( Function(function=MakeOutFileList, - input_names = ['T1List','T2List','PDList','FLList','OtherList'], - output_names = ['outImageList']), run_without_submitting=True, name="99_makeOutImageList") - tissueClassifyWF.connect( inputsSpec, 'T1List', makeOutImageList, 'T1List' ) - tissueClassifyWF.connect( inputsSpec, 'T2List', makeOutImageList, 'T2List' ) - tissueClassifyWF.connect( inputsSpec, 'PDList', makeOutImageList, 'PDList' ) - makeOutImageList.inputs.FLList=[] ## an emptyList HACK - #HACK tissueClassifyWF.connect( inputsSpec, 'FLList', makeOutImageList, 'FLList' ) - tissueClassifyWF.connect( inputsSpec, 'OtherList', makeOutImageList, 'OtherList' ) - - BABCext= pe.Node(interface=BRAINSABCext(), name="BABC") - many_cpu_BABC_options_dictionary={'qsub_args': '-S /bin/bash -pe smp1 4-4 -l h_vmem=23G,mem_free=8G -o /dev/null -e /dev/null '+CLUSTER_QUEUE, 'overwrite': True} - BABCext.plugin_args=many_cpu_BABC_options_dictionary - tissueClassifyWF.connect(makeImagePathList,'imagePathList',BABCext,'inputVolumes') - tissueClassifyWF.connect(makeImageTypeList,'imageTypeList',BABCext,'inputVolumeTypes') - tissueClassifyWF.connect(makeOutImageList,'outImageList',BABCext,'outputVolumes') + tissueClassifyWF.connect(inputsSpec, 'PrimaryT1', makeImagePathList, 'PrimaryT1') + + makeImageTypeList = pe.Node(Function(function=MakeOneFileTypeList, + input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList'], + output_names=['imageTypeList']), run_without_submitting=True, name="99_makeImageTypeList") + tissueClassifyWF.connect(inputsSpec, 'T1List', makeImageTypeList, 'T1List') + tissueClassifyWF.connect(inputsSpec, 'T2List', makeImageTypeList, 'T2List') + tissueClassifyWF.connect(inputsSpec, 'PDList', makeImageTypeList, 'PDList') + tissueClassifyWF.connect(inputsSpec, 'FLList', makeImageTypeList, 'FLList') + tissueClassifyWF.connect(inputsSpec, 'OtherList', makeImageTypeList, 'OtherList') + + makeOutImageList = pe.Node(Function(function=MakeOutFileList, + input_names=['T1List', 'T2List', 'PDList', 'FLList', 'OtherList'], + output_names=['outImageList']), run_without_submitting=True, name="99_makeOutImageList") + tissueClassifyWF.connect(inputsSpec, 'T1List', makeOutImageList, 'T1List') + tissueClassifyWF.connect(inputsSpec, 'T2List', makeOutImageList, 'T2List') + tissueClassifyWF.connect(inputsSpec, 'PDList', makeOutImageList, 'PDList') + makeOutImageList.inputs.FLList = [] # an emptyList HACK + # HACK tissueClassifyWF.connect( inputsSpec, 'FLList', makeOutImageList, 'FLList' ) + tissueClassifyWF.connect(inputsSpec, 'OtherList', makeOutImageList, 'OtherList') + + BABCext = pe.Node(interface=BRAINSABCext(), name="BABC") + many_cpu_BABC_options_dictionary = {'qsub_args': '-S /bin/bash -pe smp1 4-4 -l h_vmem=23G,mem_free=8G -o /dev/null -e /dev/null ' + CLUSTER_QUEUE, 'overwrite': True} + BABCext.plugin_args = many_cpu_BABC_options_dictionary + tissueClassifyWF.connect(makeImagePathList, 'imagePathList', BABCext, 'inputVolumes') + tissueClassifyWF.connect(makeImageTypeList, 'imageTypeList', BABCext, 'inputVolumeTypes') + tissueClassifyWF.connect(makeOutImageList, 'outImageList', BABCext, 'outputVolumes') BABCext.inputs.debuglevel = 0 BABCext.inputs.maxIterations = 3 BABCext.inputs.maxBiasDegree = 4 BABCext.inputs.filterIteration = 3 BABCext.inputs.filterMethod = 'GradientAnisotropicDiffusion' BABCext.inputs.atlasToSubjectTransformType = 'SyN' - #BABCext.inputs.atlasToSubjectTransformType = 'BSpline' - #BABCext.inputs.gridSize = [28,20,24] - BABCext.inputs.gridSize = [10,10,10] + # BABCext.inputs.atlasToSubjectTransformType = 'BSpline' + # BABCext.inputs.gridSize = [28,20,24] + BABCext.inputs.gridSize = [10, 10, 10] BABCext.inputs.outputFormat = "NIFTI" BABCext.inputs.outputLabels = "brain_label_seg.nii.gz" BABCext.inputs.outputDirtyLabels = "volume_label_seg.nii.gz" BABCext.inputs.posteriorTemplate = "POSTERIOR_%s.nii.gz" BABCext.inputs.atlasToSubjectTransform = "atlas_to_subject.h5" - #BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] + # BABCext.inputs.implicitOutputs = ['t1_average_BRAINSABC.nii.gz', 't2_average_BRAINSABC.nii.gz'] BABCext.inputs.interpolationMode = InterpolationMode BABCext.inputs.outputDir = './' - tissueClassifyWF.connect(inputsSpec,'atlasDefinition',BABCext,'atlasDefinition') - tissueClassifyWF.connect(inputsSpec,'atlasToSubjectInitialTransform',BABCext,'atlasToSubjectInitialTransform') + tissueClassifyWF.connect(inputsSpec, 'atlasDefinition', BABCext, 'atlasDefinition') + tissueClassifyWF.connect(inputsSpec, 'atlasToSubjectInitialTransform', BABCext, 'atlasToSubjectInitialTransform') """ Get the first T1 and T2 corrected images from BABCext """ @@ -187,31 +194,31 @@ def get_first_T1_and_T2(in_files,T1_count): """ ############# - tissueClassifyWF.connect(BABCext,'atlasToSubjectTransform',outputsSpec,'atlasToSubjectTransform') + tissueClassifyWF.connect(BABCext, 'atlasToSubjectTransform', outputsSpec, 'atlasToSubjectTransform') def MakeInverseTransformFileName(TransformFileName): """### HACK: This function is to work around a deficiency in BRAINSABCext where the inverse transform name is not being computed properly in the list outputs""" - fixed_inverse_name=TransformFileName.replace(".h5","_Inverse.h5") + fixed_inverse_name = TransformFileName.replace(".h5", "_Inverse.h5") return fixed_inverse_name - tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'atlasToSubjectTransform', MakeInverseTransformFileName ), "atlasToSubjectInverseTransform")] ), ] ) - tissueClassifyWF.connect(BABCext,'outputLabels',outputsSpec,'outputLabels') - tissueClassifyWF.connect(BABCext,'outputDirtyLabels',outputsSpec,'outputHeadLabels') + tissueClassifyWF.connect([(BABCext, outputsSpec, [(('atlasToSubjectTransform', MakeInverseTransformFileName), "atlasToSubjectInverseTransform")]), ]) + tissueClassifyWF.connect(BABCext, 'outputLabels', outputsSpec, 'outputLabels') + tissueClassifyWF.connect(BABCext, 'outputDirtyLabels', outputsSpec, 'outputHeadLabels') - tissueClassifyWF.connect( BABCext , 'outputT1AverageImage', outputsSpec, 't1_average') - tissueClassifyWF.connect( BABCext , 'outputT2AverageImage', outputsSpec, 't2_average') - tissueClassifyWF.connect( BABCext , 'outputPDAverageImage', outputsSpec, 'pd_average') - tissueClassifyWF.connect( BABCext , 'outputFLAverageImage', outputsSpec, 'fl_average') + tissueClassifyWF.connect(BABCext, 'outputT1AverageImage', outputsSpec, 't1_average') + tissueClassifyWF.connect(BABCext, 'outputT2AverageImage', outputsSpec, 't2_average') + tissueClassifyWF.connect(BABCext, 'outputPDAverageImage', outputsSpec, 'pd_average') + tissueClassifyWF.connect(BABCext, 'outputFLAverageImage', outputsSpec, 'fl_average') ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 0 ), "t1_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 1 ), "t2_average")] ), ] ) ## remove tissueClassifyWF.connect( [ ( BABCext, outputsSpec, [ (( 'outputAverageImages', getListIndexOrNoneIfOutOfRange, 2 ), "pd_average")] ), ] ) - MakePosteriorDictionaryNode = pe.Node( Function(function=MakePosteriorDictionaryFunc, - input_names = ['posteriorImages'], - output_names = ['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") - tissueClassifyWF.connect(BABCext,'posteriorImages',MakePosteriorDictionaryNode,'posteriorImages') + MakePosteriorDictionaryNode = pe.Node(Function(function=MakePosteriorDictionaryFunc, + input_names=['posteriorImages'], + output_names=['posteriorDictionary']), run_without_submitting=True, name="99_makePosteriorDictionary") + tissueClassifyWF.connect(BABCext, 'posteriorImages', MakePosteriorDictionaryNode, 'posteriorImages') - tissueClassifyWF.connect(MakePosteriorDictionaryNode,'posteriorDictionary',outputsSpec,'posteriorImages') + tissueClassifyWF.connect(MakePosteriorDictionaryNode, 'posteriorDictionary', outputsSpec, 'posteriorImages') return tissueClassifyWF diff --git a/AutoWorkup/baw_exp.py b/AutoWorkup/baw_exp.py index 13c24459..18445a31 100644 --- a/AutoWorkup/baw_exp.py +++ b/AutoWorkup/baw_exp.py @@ -15,17 +15,19 @@ import sys ############################################################################## -def get_global_sge_script(pythonPathsList,binPathsList,customEnvironment={}): + + +def get_global_sge_script(pythonPathsList, binPathsList, customEnvironment={}): """This is a wrapper script for running commands on an SGE cluster so that all the python modules and commands are pathed properly""" - custEnvString="" - for key,value in customEnvironment.items(): - custEnvString+="export "+key+"="+value+"\n" + custEnvString = "" + for key, value in customEnvironment.items(): + custEnvString += "export " + key + "=" + value + "\n" - PYTHONPATH=":".join(pythonPathsList) - BASE_BUILDS=":".join(binPathsList) - GLOBAL_SGE_SCRIPT="""#!/bin/bash + PYTHONPATH = ":".join(pythonPathsList) + BASE_BUILDS = ":".join(binPathsList) + GLOBAL_SGE_SCRIPT = """#!/bin/bash echo "STARTED at: $(date +'%F-%T')" echo "Ran on: $(hostname)" export PATH={BINPATH} @@ -40,22 +42,24 @@ def get_global_sge_script(pythonPathsList,binPathsList,customEnvironment={}): echo {CUSTENV} {CUSTENV} ## NOTE: nipype inserts the actual commands that need running below this section. -""".format(PYTHONPATH=PYTHONPATH,BINPATH=BASE_BUILDS,CUSTENV=custEnvString) +""".format(PYTHONPATH=PYTHONPATH, BINPATH=BASE_BUILDS, CUSTENV=custEnvString) return GLOBAL_SGE_SCRIPT # From http://stackoverflow.com/questions/1597649/replace-strings-in-files-by-python + + def file_replace(fname, out_fname, pat, s_after): if fname == out_fname: print "ERROR: input and output file names can not match" sys.exit(-1) - return #input and output files can not match + return # input and output files can not match # first, see if the pattern is even in the file. with open(fname) as f: if not any(re.search(pat, line) for line in f): print "ERROR: substitution pattern not found in reference file" sys.exit(-1) - return # pattern does not occur in file so we are done. + return # pattern does not occur in file so we are done. # pattern is in the file, so perform replace operation. with open(fname) as f: @@ -64,6 +68,7 @@ def file_replace(fname, out_fname, pat, s_after): out.write(re.sub(pat, s_after, line)) out.close() + def main(argv=None): import argparse import ConfigParser @@ -92,23 +97,23 @@ def main(argv=None): expConfig.read(input_arguments.ExperimentConfig) # Experiment specific information - subject_data_file=expConfig.get('EXPERIMENT_DATA','SESSION_DB') - ExperimentName=expConfig.get('EXPERIMENT_DATA','EXPERIMENTNAME') - WORKFLOW_COMPONENTS_STRING=expConfig.get('EXPERIMENT_DATA','WORKFLOW_COMPONENTS') - WORKFLOW_COMPONENTS=eval(WORKFLOW_COMPONENTS_STRING) + subject_data_file = expConfig.get('EXPERIMENT_DATA', 'SESSION_DB') + ExperimentName = expConfig.get('EXPERIMENT_DATA', 'EXPERIMENTNAME') + WORKFLOW_COMPONENTS_STRING = expConfig.get('EXPERIMENT_DATA', 'WORKFLOW_COMPONENTS') + WORKFLOW_COMPONENTS = eval(WORKFLOW_COMPONENTS_STRING) # Platform specific information # Prepend the python search paths - PYTHON_AUX_PATHS=expConfig.get(input_arguments.processingEnvironment,'PYTHON_AUX_PATHS') - PYTHON_AUX_PATHS=PYTHON_AUX_PATHS.split(':') + PYTHON_AUX_PATHS = expConfig.get(input_arguments.processingEnvironment, 'PYTHON_AUX_PATHS') + PYTHON_AUX_PATHS = PYTHON_AUX_PATHS.split(':') PYTHON_AUX_PATHS.extend(sys.path) - sys.path=PYTHON_AUX_PATHS + sys.path = PYTHON_AUX_PATHS ###################################################################################### ###### Now ensure that all the required packages can be read in from this custom path #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ - #print sys.path - from nipype import config ## NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified - config.enable_debug_mode() ## NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified + # print sys.path + from nipype import config # NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified + config.enable_debug_mode() # NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified ############################################################################## from nipype.interfaces.base import CommandLine, CommandLineInputSpec, TraitedSpec, File, Directory from nipype.interfaces.base import traits, isdefined, BaseInterface @@ -117,9 +122,8 @@ def main(argv=None): import nipype.pipeline.engine as pe # pypeline engine from nipype.interfaces.freesurfer import ReconAll - from nipype.utils.misc import package_check - #package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version + # package_check('nipype', '5.4', 'tutorial1') ## HACK: Check nipype version package_check('numpy', '1.3', 'tutorial1') package_check('scipy', '0.7', 'tutorial1') package_check('networkx', '1.0', 'tutorial1') @@ -130,16 +134,16 @@ def main(argv=None): #\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/ ##################################################################################### # Prepend the shell environment search paths - PROGRAM_PATHS=expConfig.get(input_arguments.processingEnvironment,'PROGRAM_PATHS') - PROGRAM_PATHS=PROGRAM_PATHS.split(':') + PROGRAM_PATHS = expConfig.get(input_arguments.processingEnvironment, 'PROGRAM_PATHS') + PROGRAM_PATHS = PROGRAM_PATHS.split(':') PROGRAM_PATHS.extend(os.environ['PATH'].split(':')) - os.environ['PATH']=':'.join(PROGRAM_PATHS) + os.environ['PATH'] = ':'.join(PROGRAM_PATHS) # Define platform specific output write paths - mountPrefix=expConfig.get(input_arguments.processingEnvironment,'MOUNTPREFIX') - BASEOUTPUTDIR=expConfig.get(input_arguments.processingEnvironment,'BASEOUTPUTDIR') - ExperimentBaseDirectoryPrefix=os.path.realpath(os.path.join(BASEOUTPUTDIR,ExperimentName)) - ExperimentBaseDirectoryCache=ExperimentBaseDirectoryPrefix+"_CACHE" - ExperimentBaseDirectoryResults=ExperimentBaseDirectoryPrefix +"_Results" + mountPrefix = expConfig.get(input_arguments.processingEnvironment, 'MOUNTPREFIX') + BASEOUTPUTDIR = expConfig.get(input_arguments.processingEnvironment, 'BASEOUTPUTDIR') + ExperimentBaseDirectoryPrefix = os.path.realpath(os.path.join(BASEOUTPUTDIR, ExperimentName)) + ExperimentBaseDirectoryCache = ExperimentBaseDirectoryPrefix + "_CACHE" + ExperimentBaseDirectoryResults = ExperimentBaseDirectoryPrefix + "_Results" if not os.path.exists(ExperimentBaseDirectoryCache): os.makedirs(ExperimentBaseDirectoryCache) if not os.path.exists(ExperimentBaseDirectoryResults): @@ -147,17 +151,17 @@ def main(argv=None): # Define workup common reference data sets # The ATLAS needs to be copied to the ExperimentBaseDirectoryPrefix # The ATLAS pathing must stay constant - ATLASPATH=expConfig.get(input_arguments.processingEnvironment,'ATLASPATH') + ATLASPATH = expConfig.get(input_arguments.processingEnvironment, 'ATLASPATH') if not os.path.exists(ATLASPATH): print("ERROR: Invalid Path for Atlas: {0}".format(ATLASPATH)) sys.exit(-1) - CACHE_ATLASPATH=os.path.realpath(os.path.join(ExperimentBaseDirectoryCache,'Atlas')) + CACHE_ATLASPATH = os.path.realpath(os.path.join(ExperimentBaseDirectoryCache, 'Atlas')) from distutils.dir_util import copy_tree if not os.path.exists(CACHE_ATLASPATH): - print("Copying a reference of the atlas to the experiment cache directory:\n from: {0}\n to: {1}".format(ATLASPATH,CACHE_ATLASPATH)) - copy_tree(ATLASPATH,CACHE_ATLASPATH,preserve_mode=1,preserve_times=1) + print("Copying a reference of the atlas to the experiment cache directory:\n from: {0}\n to: {1}".format(ATLASPATH, CACHE_ATLASPATH)) + copy_tree(ATLASPATH, CACHE_ATLASPATH, preserve_mode=1, preserve_times=1) ## Now generate the xml file with the correct pathing - file_replace(os.path.join(ATLASPATH,'ExtendedAtlasDefinition.xml.in'),os.path.join(CACHE_ATLASPATH,'ExtendedAtlasDefinition.xml'),"@ATLAS_DIRECTORY@",CACHE_ATLASPATH) + file_replace(os.path.join(ATLASPATH, 'ExtendedAtlasDefinition.xml.in'), os.path.join(CACHE_ATLASPATH, 'ExtendedAtlasDefinition.xml'), "@ATLAS_DIRECTORY@", CACHE_ATLASPATH) else: print("Atlas already exists in experiment cache directory: {0}".format(CACHE_ATLASPATH)) # Just to be safe, copy the model file as well @@ -169,7 +173,7 @@ def main(argv=None): BCDModelFile = os.path.join('Transforms_h5', BCDModelFile) orig = os.path.join(BCDMODELPATH, BCDModelFile) new = os.path.join(CACHE_BCDMODELPATH, BCDModelFile) - new = new.replace('Transforms_h5/','') # Flatten back out, even if you needed to get files from subdirectory. + new = new.replace('Transforms_h5/', '') # Flatten back out, even if you needed to get files from subdirectory. if not os.path.exists(CACHE_BCDMODELPATH): os.mkdir(CACHE_BCDMODELPATH) if not os.path.exists(new): @@ -178,28 +182,27 @@ def main(argv=None): else: print("BCD Model exists in cache directory: {0}".format(new)) - - CUSTOM_ENVIRONMENT=expConfig.get(input_arguments.processingEnvironment,'CUSTOM_ENVIRONMENT') - CUSTOM_ENVIRONMENT=eval(CUSTOM_ENVIRONMENT) + CUSTOM_ENVIRONMENT = expConfig.get(input_arguments.processingEnvironment, 'CUSTOM_ENVIRONMENT') + CUSTOM_ENVIRONMENT = eval(CUSTOM_ENVIRONMENT) ## Set custom environmental variables so that subproceses work properly (i.e. for Freesurfer) - #print CUSTOM_ENVIRONMENT - for key,value in CUSTOM_ENVIRONMENT.items(): - #print "SETTING: ", key, value - os.putenv(key,value) - os.environ[key]=value - #print os.environ - #sys.exit(-1) + # print CUSTOM_ENVIRONMENT + for key, value in CUSTOM_ENVIRONMENT.items(): + # print "SETTING: ", key, value + os.putenv(key, value) + os.environ[key] = value + # print os.environ + # sys.exit(-1) ## If freesurfer is requested, then ensure that a sane environment is available if 'FREESURFER' in WORKFLOW_COMPONENTS: print "FREESURFER NEEDS TO CHECK FOR SANE ENVIRONMENT HERE." - CLUSTER_QUEUE=expConfig.get(input_arguments.processingEnvironment,'CLUSTER_QUEUE') - CLUSTER_QUEUE_LONG=expConfig.get(input_arguments.processingEnvironment,'CLUSTER_QUEUE_LONG') + CLUSTER_QUEUE = expConfig.get(input_arguments.processingEnvironment, 'CLUSTER_QUEUE') + CLUSTER_QUEUE_LONG = expConfig.get(input_arguments.processingEnvironment, 'CLUSTER_QUEUE_LONG') ## Setup environment for CPU load balancing of ITK based programs. import multiprocessing - total_CPUS=multiprocessing.cpu_count() + total_CPUS = multiprocessing.cpu_count() if input_arguments.wfrun == 'helium_all.q': pass elif input_arguments.wfrun == 'helium_all.q_graph': @@ -207,13 +210,13 @@ def main(argv=None): elif input_arguments.wfrun == 'ipl_OSX': pass elif input_arguments.wfrun == 'local_4': - os.environ['NSLOTS']="{0}".format(total_CPUS/4) + os.environ['NSLOTS'] = "{0}".format(total_CPUS / 4) elif input_arguments.wfrun == 'local_12': - os.environ['NSLOTS']="{0}".format(total_CPUS/12) + os.environ['NSLOTS'] = "{0}".format(total_CPUS / 12) elif input_arguments.wfrun == 'local': - os.environ['NSLOTS']="{0}".format(total_CPUS/1) + os.environ['NSLOTS'] = "{0}".format(total_CPUS / 1) elif input_arguments.wfrun == 'ds_runner': - os.environ['NSLOTS']="{0}".format(total_CPUS/1) + os.environ['NSLOTS'] = "{0}".format(total_CPUS / 1) else: print "FAILED RUN: You must specify the run environment type. [helium_all.q,helium_all.q_graph,ipl_OSX,local_4,local_12,local,ds_runner]" print input_arguments.wfrun @@ -221,17 +224,17 @@ def main(argv=None): print "Configuring Pipeline" import SessionDB - subjectDatabaseFile=os.path.join( ExperimentBaseDirectoryCache,'InternalWorkflowSubjectDB.db') - subject_list=input_arguments.subject.split(',') + subjectDatabaseFile = os.path.join(ExperimentBaseDirectoryCache, 'InternalWorkflowSubjectDB.db') + subject_list = input_arguments.subject.split(',') ## TODO: Only make DB if db is older than subject_data_file. - if ( not os.path.exists(subjectDatabaseFile) ) or ( os.path.getmtime(subjectDatabaseFile) < os.path.getmtime(subject_data_file) ): - ExperimentDatabase=SessionDB.SessionDB(subjectDatabaseFile,subject_list) - ExperimentDatabase.MakeNewDB(subject_data_file,mountPrefix) - ExperimentDatabase=None - ExperimentDatabase=SessionDB.SessionDB(subjectDatabaseFile,subject_list) + if (not os.path.exists(subjectDatabaseFile)) or (os.path.getmtime(subjectDatabaseFile) < os.path.getmtime(subject_data_file)): + ExperimentDatabase = SessionDB.SessionDB(subjectDatabaseFile, subject_list) + ExperimentDatabase.MakeNewDB(subject_data_file, mountPrefix) + ExperimentDatabase = None + ExperimentDatabase = SessionDB.SessionDB(subjectDatabaseFile, subject_list) else: print("Using cached database, {0}".format(subjectDatabaseFile)) - ExperimentDatabase=SessionDB.SessionDB(subjectDatabaseFile,subject_list) + ExperimentDatabase = SessionDB.SessionDB(subjectDatabaseFile, subject_list) print "ENTIRE DB for {_subjid}: ".format(_subjid=ExperimentDatabase.getSubjectFilter()) print "^^^^^^^^^^^^^" for row in ExperimentDatabase.getEverything(): @@ -240,52 +243,52 @@ def main(argv=None): ## Create the shell wrapper script for ensuring that all jobs running on remote hosts from SGE # have the same environment as the job submission host. - JOB_SCRIPT=get_global_sge_script(sys.path,PROGRAM_PATHS,CUSTOM_ENVIRONMENT) + JOB_SCRIPT = get_global_sge_script(sys.path, PROGRAM_PATHS, CUSTOM_ENVIRONMENT) print JOB_SCRIPT - import WorkupT1T2 ## NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified + import WorkupT1T2 # NOTE: This needs to occur AFTER the PYTHON_AUX_PATHS has been modified print "TESTER" import ShortWorkupT1T2 for subjectid in ExperimentDatabase.getAllSubjects(): if input_arguments.doshort: - baw200=ShortWorkupT1T2.ShortWorkupT1T2(subjectid,mountPrefix, - os.path.join(ExperimentBaseDirectoryCache,str(subjectid)), - ExperimentBaseDirectoryResults, - ExperimentDatabase, - CACHE_ATLASPATH, - CACHE_BCDMODELPATH,WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS,CLUSTER_QUEUE=CLUSTER_QUEUE,CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG) + baw200 = ShortWorkupT1T2.ShortWorkupT1T2(subjectid, mountPrefix, + os.path.join(ExperimentBaseDirectoryCache, str(subjectid)), + ExperimentBaseDirectoryResults, + ExperimentDatabase, + CACHE_ATLASPATH, + CACHE_BCDMODELPATH, WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG) else: - baw200=WorkupT1T2.WorkupT1T2(subjectid,mountPrefix, - os.path.join(ExperimentBaseDirectoryCache,str(subjectid)), - ExperimentBaseDirectoryResults, - ExperimentDatabase, - CACHE_ATLASPATH, - CACHE_BCDMODELPATH,WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS,CLUSTER_QUEUE=CLUSTER_QUEUE,CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG,SGE_JOB_SCRIPT=JOB_SCRIPT) + baw200 = WorkupT1T2.WorkupT1T2(subjectid, mountPrefix, + os.path.join(ExperimentBaseDirectoryCache, str(subjectid)), + ExperimentBaseDirectoryResults, + ExperimentDatabase, + CACHE_ATLASPATH, + CACHE_BCDMODELPATH, WORKFLOW_COMPONENTS=WORKFLOW_COMPONENTS, CLUSTER_QUEUE=CLUSTER_QUEUE, CLUSTER_QUEUE_LONG=CLUSTER_QUEUE_LONG, SGE_JOB_SCRIPT=JOB_SCRIPT) print "Start Processing" - SGEFlavor='SGE' + SGEFlavor = 'SGE' try: if input_arguments.wfrun == 'helium_all.q': baw200.run(plugin=SGEFlavor, - plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null "+CLUSTER_QUEUE)) + plugin_args=dict(template=JOB_SCRIPT, qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null " + CLUSTER_QUEUE)) elif input_arguments.wfrun == 'helium_all.q_graph': - SGEFlavor='SGEGraph' #Use the SGEGraph processing + SGEFlavor = 'SGEGraph' # Use the SGEGraph processing baw200.run(plugin=SGEFlavor, - plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null "+CLUSTER_QUEUE)) + plugin_args=dict(template=JOB_SCRIPT, qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null " + CLUSTER_QUEUE)) elif input_arguments.wfrun == 'ipl_OSX': baw200.write_graph() print "Running On ipl_OSX" baw200.run(plugin=SGEFlavor, - plugin_args=dict(template=JOB_SCRIPT,qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null "+CLUSTER_QUEUE)) + plugin_args=dict(template=JOB_SCRIPT, qsub_args="-S /bin/bash -pe smp1 1-12 -l h_vmem=19G,mem_free=9G -o /dev/null -e /dev/null " + CLUSTER_QUEUE)) elif input_arguments.wfrun == 'local_4': baw200.write_graph() print "Running with 4 parallel processes on local machine" - baw200.run(plugin='MultiProc', plugin_args={'n_procs' : 4}) + baw200.run(plugin='MultiProc', plugin_args={'n_procs': 4}) elif input_arguments.wfrun == 'local_12': baw200.write_graph() print "Running with 12 parallel processes on local machine" - baw200.run(plugin='MultiProc', plugin_args={'n_procs' : 12}) - elif input_arguments.wfrun =='ds_runner': + baw200.run(plugin='MultiProc', plugin_args={'n_procs': 12}) + elif input_arguments.wfrun == 'ds_runner': class ds_runner(object): def run(self, graph, **kwargs): for node in graph.nodes(): @@ -298,13 +301,13 @@ def run(self, graph, **kwargs): except: pass print "Running sequentially on local machine" - #baw200.run(updatehash=True) + # baw200.run(updatehash=True) baw200.run() else: print "You must specify the run environment type. [helium_all.q,helium_all.q_graph,ipl_OSX,local_4,local_12,local]" print input_arguments.wfrun sys.exit(-1) - except Exception,err: + except Exception, err: print("ERROR: EXCEPTION CAUGHT IN RUNNING SUBJECT {0}".format(subjectid)) raise err diff --git a/AutoWorkup/local_generate_classes.py b/AutoWorkup/local_generate_classes.py index e69dff78..c5440806 100644 --- a/AutoWorkup/local_generate_classes.py +++ b/AutoWorkup/local_generate_classes.py @@ -7,110 +7,110 @@ # build wiht all options turned on (even the non-default options) # export PATH=~/src/BSA-clang31/bin:${PATH} # export PYTHONPATH=~/src/BSA-clang31/NIPYPE -#cd ~/src/BRAINSStandAlone/AutoWorkup/SEMTools/; rm -rf ~/src/BRAINSStandAlone/AutoWorkup/SEMTools/* ; python ../local_generate_classes.py +# cd ~/src/BRAINSStandAlone/AutoWorkup/SEMTools/; rm -rf ~/src/BRAINSStandAlone/AutoWorkup/SEMTools/* ; python ../local_generate_classes.py from nipype.interfaces.slicer.generate_classes import generate_all_classes modules_list = [ - 'BRAINSMultiSTAPLE', - 'AssignArray', - 'AverageBrainGenerator', - 'AverageScalarsByResampling', - 'BRAINSABC', - 'BRAINSAlignMSP', - 'BRAINSApplySurfaceLabels', - 'BRAINSAssignSurfaceFeatures', - 'BRAINSClipInferior', - 'BRAINSConstellationDetector', - 'BRAINSConstellationModeler', - 'BRAINSContinuousClass', - 'BRAINSCreateLabelMapFromProbabilityMaps', - 'BRAINSCut', - 'BRAINSDemonWarp', - 'BRAINSEyeDetector', - 'BRAINSFit', - 'BRAINSFitEZ', - 'BRAINSInitializedControlPoints', - 'BRAINSLandmarkInitializer', - 'BRAINSLinearModelerEPCA', - 'BRAINSLmkTransform', - 'BRAINSMeasureSurface', - 'BRAINSMultiModeSegment', - 'BRAINSMush', - 'BRAINSPosteriorToContinuousClass', - 'BRAINSROIAuto', - 'BRAINSResample', - 'BRAINSResize', - 'BRAINSSnapShotWriter', - 'BRAINSSurfaceFlattening', - 'BRAINSSurfaceGeneration', - 'BRAINSTransformConvert', - 'BRAINSTransformFromFiducials', - 'BRAINSTrimForegroundInDirection', - 'BinaryMaskEditorBasedOnLandmarks', - 'CannyEdge', - 'CannySegmentationLevelSetImageFilter', - 'CleanUpOverlapLabels', - 'CombineLabels', - 'CompareSurfaces', - 'CreateMask', - 'DWIConvert', - 'DilateImage', - 'DilateMask', - 'DistanceMaps', - 'ESLR', - 'ErodeImage', - 'FlippedDifference', - 'GenerateBrainClippedImage', - 'GenerateCsfClippedFromClassifiedImage', - 'GenerateLabelMapFromProbabilityMap', - 'GenerateSummedGradientImage', - 'GenerateTestImage', - 'GradientAnisotropicDiffusionImageFilter', - 'HammerAttributeCreator', - 'HistogramMatchingFilter', - 'IcosahedronResampler', - 'ImageRegionPlotter', - 'JointHistogram', - 'LabelMaps', - 'MultiResolutionRegistration', - 'NeighborhoodMean', - 'NeighborhoodMedian', - 'NoiseGenerator', - 'ProbabilityLabels', - 'QuadEdgeMeshClampScalars', - 'QuadEdgeMeshHistogramMatching', - 'QuadEdgeMeshPiecewiseRescale', - 'QuadEdgeMeshSimilarity', - 'RearrangeSurfaceLabels', - 'RemoveTinyLabels', - 'ResampleQuadEdgeMesh', - 'STAPLEAnalysis', - 'ShuffleVectorsModule', - 'SimilarityIndex', - 'SurfaceColor', - 'SurfaceLabelCleanUp', - 'TextureFromNoiseImageFilter', - 'TextureMeasureFilter', - 'VBRAINSDemonWarp', - 'WarpQuadEdgeMesh', - 'compareTractInclusion', - 'extractNrrdVectorIndex', - 'fcsv_to_hdf5', - 'gtractAnisotropyMap', - 'gtractAverageBvalues', - 'gtractClipAnisotropy', - 'gtractCoRegAnatomy', - 'gtractConcatDwi', - 'gtractCopyImageOrientation', - 'gtractCoregBvalues', - 'gtractCostFastMarching', - 'gtractCreateGuideFiber', - 'gtractFastMarchingTracking', - 'gtractFiberTracking', - 'gtractImageConformity', - 'gtractInvertBSplineTransform', - 'gtractInvertDisplacementField', + 'BRAINSMultiSTAPLE', + 'AssignArray', + 'AverageBrainGenerator', + 'AverageScalarsByResampling', + 'BRAINSABC', + 'BRAINSAlignMSP', + 'BRAINSApplySurfaceLabels', + 'BRAINSAssignSurfaceFeatures', + 'BRAINSClipInferior', + 'BRAINSConstellationDetector', + 'BRAINSConstellationModeler', + 'BRAINSContinuousClass', + 'BRAINSCreateLabelMapFromProbabilityMaps', + 'BRAINSCut', + 'BRAINSDemonWarp', + 'BRAINSEyeDetector', + 'BRAINSFit', + 'BRAINSFitEZ', + 'BRAINSInitializedControlPoints', + 'BRAINSLandmarkInitializer', + 'BRAINSLinearModelerEPCA', + 'BRAINSLmkTransform', + 'BRAINSMeasureSurface', + 'BRAINSMultiModeSegment', + 'BRAINSMush', + 'BRAINSPosteriorToContinuousClass', + 'BRAINSROIAuto', + 'BRAINSResample', + 'BRAINSResize', + 'BRAINSSnapShotWriter', + 'BRAINSSurfaceFlattening', + 'BRAINSSurfaceGeneration', + 'BRAINSTransformConvert', + 'BRAINSTransformFromFiducials', + 'BRAINSTrimForegroundInDirection', + 'BinaryMaskEditorBasedOnLandmarks', + 'CannyEdge', + 'CannySegmentationLevelSetImageFilter', + 'CleanUpOverlapLabels', + 'CombineLabels', + 'CompareSurfaces', + 'CreateMask', + 'DWIConvert', + 'DilateImage', + 'DilateMask', + 'DistanceMaps', + 'ESLR', + 'ErodeImage', + 'FlippedDifference', + 'GenerateBrainClippedImage', + 'GenerateCsfClippedFromClassifiedImage', + 'GenerateLabelMapFromProbabilityMap', + 'GenerateSummedGradientImage', + 'GenerateTestImage', + 'GradientAnisotropicDiffusionImageFilter', + 'HammerAttributeCreator', + 'HistogramMatchingFilter', + 'IcosahedronResampler', + 'ImageRegionPlotter', + 'JointHistogram', + 'LabelMaps', + 'MultiResolutionRegistration', + 'NeighborhoodMean', + 'NeighborhoodMedian', + 'NoiseGenerator', + 'ProbabilityLabels', + 'QuadEdgeMeshClampScalars', + 'QuadEdgeMeshHistogramMatching', + 'QuadEdgeMeshPiecewiseRescale', + 'QuadEdgeMeshSimilarity', + 'RearrangeSurfaceLabels', + 'RemoveTinyLabels', + 'ResampleQuadEdgeMesh', + 'STAPLEAnalysis', + 'ShuffleVectorsModule', + 'SimilarityIndex', + 'SurfaceColor', + 'SurfaceLabelCleanUp', + 'TextureFromNoiseImageFilter', + 'TextureMeasureFilter', + 'VBRAINSDemonWarp', + 'WarpQuadEdgeMesh', + 'compareTractInclusion', + 'extractNrrdVectorIndex', + 'fcsv_to_hdf5', + 'gtractAnisotropyMap', + 'gtractAverageBvalues', + 'gtractClipAnisotropy', + 'gtractCoRegAnatomy', + 'gtractConcatDwi', + 'gtractCopyImageOrientation', + 'gtractCoregBvalues', + 'gtractCostFastMarching', + 'gtractCreateGuideFiber', + 'gtractFastMarchingTracking', + 'gtractFiberTracking', + 'gtractImageConformity', + 'gtractInvertBSplineTransform', + 'gtractInvertDisplacementField', 'gtractInvertRigidTransform', 'gtractResampleAnisotropy', 'gtractResampleB0', @@ -126,5 +126,5 @@ 'SmoothingMeshScalars' ] -launcher=[''] +launcher = [''] generate_all_classes(modules_list=modules_list, launcher=[]) diff --git a/AutoWorkup/runOneAW.py b/AutoWorkup/runOneAW.py index 4d5c7ed2..f2cb80d7 100644 --- a/AutoWorkup/runOneAW.py +++ b/AutoWorkup/runOneAW.py @@ -3,6 +3,7 @@ import csv import sys + class runOneAW(): def main(self): @@ -24,9 +25,9 @@ def executeAW(self): -ExperimentConfig {configFile} \ -pe LOCAL_ENVIRONMENT \ -wfrun local \ - -subject {subject} \n""".format(brainsToolsScriptsDir= input_arguments.brainsToolsScriptsDir, + -subject {subject} \n""".format(brainsToolsScriptsDir=input_arguments.brainsToolsScriptsDir, configFile=self.configPath, subject=input_arguments.subject) - print '-'*80 + print '-' * 80 print '\nExecuting command: \n{bawCommand}'.format(bawCommand=bawCommand) os.system(bawCommand) @@ -139,10 +140,10 @@ def generateConfigFile(self): ATLASPATH=%(_BRAINSTOOLS_BUILD_PATH)s/ReferenceAtlas-build/Atlas/Atlas_20120830 # The path to the model files to be used by BCD. BCDMODELPATH=%(_BRAINSTOOLS_BUILD_PATH)s/BRAINSTools-build/TestData""" - firstReplace = configString.replace('[replaceme_sessionDB]',self.sessionPath) - secondReplace = firstReplace.replace('[replaceme_outputDir]',input_arguments.experimentOutputDir) - thirdReplace = secondReplace.replace('[replaceme_brainsToolsScriptsDir]',input_arguments.brainsToolsScriptsDir) - newConfigString = thirdReplace.replace('[replaceme_brainsToolsBuildDir]',input_arguments.brainsToolsBuildDir) + firstReplace = configString.replace('[replaceme_sessionDB]', self.sessionPath) + secondReplace = firstReplace.replace('[replaceme_outputDir]', input_arguments.experimentOutputDir) + thirdReplace = secondReplace.replace('[replaceme_brainsToolsScriptsDir]', input_arguments.brainsToolsScriptsDir) + newConfigString = thirdReplace.replace('[replaceme_brainsToolsBuildDir]', input_arguments.brainsToolsBuildDir) handle = open(self.configPath, 'w') handle.write(newConfigString) handle.close() @@ -171,7 +172,7 @@ def generateConfigFile(self): group.add_argument('-brainsToolsBuildDir', action="store", dest='brainsToolsBuildDir', required=True, help='The build directory for BRAINSSTANDALONE ') parser.add_argument('--version', action='version', version='%(prog)s 1.0') - #parser.add_argument('-v', action='store_false', dest='verbose', default=True, + # parser.add_argument('-v', action='store_false', dest='verbose', default=True, # help='If not present, prints the locations') input_arguments = parser.parse_args() diff --git a/AutoWorkup/updateAutoWorkupFile.py b/AutoWorkup/updateAutoWorkupFile.py index d3177ade..b3ef0d99 100644 --- a/AutoWorkup/updateAutoWorkupFile.py +++ b/AutoWorkup/updateAutoWorkupFile.py @@ -3,6 +3,7 @@ import os import textwrap + class UpdateAutoWorkup(): def _getBlackList(self): diff --git a/AutoWorkup/updateAutoWorkupFile_DWI.py b/AutoWorkup/updateAutoWorkupFile_DWI.py index f156d5dd..e634aa6f 100644 --- a/AutoWorkup/updateAutoWorkupFile_DWI.py +++ b/AutoWorkup/updateAutoWorkupFile_DWI.py @@ -4,6 +4,7 @@ import textwrap import sqlite3 as lite + class UpdateAutoWorkup(): def updateAutoWorkup(self): @@ -37,6 +38,7 @@ def _generateNewPathName(self): newPath = os.path.join(dirname, "{}_{}".format(inputArguments.modality, basename)) return newPath + class MakeNewImageDict(): def __init__(self): @@ -51,7 +53,7 @@ def __init__(self): self._fillDB() def _makeNewImagesFile(self): - command = 'find %s -name "*_DWI_CONCAT_QCed.nrrd" |awk -F/ \'{print "%s," $5 "," $6 "," $7 "," $0}\' |tee %s'%( + command = 'find %s -name "*_DWI_CONCAT_QCed.nrrd" |awk -F/ \'{print "%s," $5 "," $6 "," $7 "," $0}\' |tee %s' % ( inputArguments.inputDir, inputArguments.modality, self.newImagesFilepath) os.system(command) @@ -60,7 +62,7 @@ def _createCommandList(self): for row in handle: if handle.line_num > 1: if len(row) == 5: - imageInfo = {'modality': row[0],'project': row[1], 'subject': row[2], + imageInfo = {'modality': row[0], 'project': row[1], 'subject': row[2], 'session': row[3], 'filepath': row[4]} sqlCommand = self._makeSQLiteCommand(imageInfo) self._appendCommand(sqlCommand) @@ -70,10 +72,10 @@ def _createCommandList(self): def _makeDB(self): if os.path.exists(self.dbName): os.remove(self.dbName) - dbColTypes = "modality TEXT, project TEXT, subject TEXT, session TEXT, filepath TEXT" + dbColTypes = "modality TEXT, project TEXT, subject TEXT, session TEXT, filepath TEXT" con = lite.connect(self.dbName) dbCur = con.cursor() - dbCur.execute("CREATE TABLE {dbTableName}({dbColTypes});".format(dbColTypes=dbColTypes,dbTableName=self.dbTableName)) + dbCur.execute("CREATE TABLE {dbTableName}({dbColTypes});".format(dbColTypes=dbColTypes, dbTableName=self.dbTableName)) dbCur.close() def _fillDB(self): @@ -97,7 +99,7 @@ def _appendCommand(self, val): self.commandList.append(val) def getNewImagesList(self, project, subject, session): - sqlQuery = self._makeDBquery( project, subject, session) + sqlQuery = self._makeDBquery(project, subject, session) dbInfo = self._getInfoFromDB(sqlQuery) newImages = list() for item in dbInfo: