From 36cf9161a85f263ab481c54a4f3225e7fc1c83d0 Mon Sep 17 00:00:00 2001 From: "Diego Alvarez S." Date: Mon, 9 Dec 2024 03:03:43 -0300 Subject: [PATCH] Solve languageserver errors in subworkflows --- subworkflows/local/binning.nf | 6 +-- subworkflows/local/binning_preparation.nf | 14 +++-- subworkflows/local/binning_refinement.nf | 20 ++++---- subworkflows/local/depths.nf | 8 +-- subworkflows/local/gtdbtk.nf | 5 +- subworkflows/local/longread_preprocessing.nf | 2 +- subworkflows/local/tiara.nf | 54 ++++++++++---------- 7 files changed, 57 insertions(+), 52 deletions(-) diff --git a/subworkflows/local/binning.nf b/subworkflows/local/binning.nf index 51caaeb9..eb6a7661 100644 --- a/subworkflows/local/binning.nf +++ b/subworkflows/local/binning.nf @@ -16,7 +16,7 @@ include { FASTA_BINNING_CONCOCT } from '../../subworkflows/nf-co workflow BINNING { take: assemblies // channel: [ val(meta), path(assembly), path(bams), path(bais) ] - reads // channel: [ val(meta), [ reads ] ] + _reads // channel: [ val(meta), [ reads ] ] main: @@ -24,7 +24,7 @@ workflow BINNING { // generate coverage depths for each contig ch_summarizedepth_input = assemblies - .map { meta, assembly, bams, bais -> + .map { meta, _assembly, bams, bais -> [ meta, bams, bais ] } @@ -45,7 +45,7 @@ workflow BINNING { [ meta_new, assembly, bams, bais ] } .join( ch_metabat_depths, by: 0 ) - .map { meta, assembly, bams, bais, depths -> + .map { meta, assembly, _bams, _bais, depths -> [ meta, assembly, depths ] } diff --git a/subworkflows/local/binning_preparation.nf b/subworkflows/local/binning_preparation.nf index 60f63a26..9170594b 100644 --- a/subworkflows/local/binning_preparation.nf +++ b/subworkflows/local/binning_preparation.nf @@ -21,20 +21,24 @@ workflow BINNING_PREPARATION { .combine(reads) } else if (params.binning_map_mode == 'group'){ // combine assemblies with reads of samples from same group - ch_reads_bowtie2 = reads.map{ meta, reads -> [ meta.group, meta, reads ] } + ch_reads_bowtie2 = reads.map{ meta, sample_reads -> [ meta.group, meta, sample_reads ] } ch_bowtie2_input = BOWTIE2_ASSEMBLY_BUILD.out.assembly_index .map { meta, assembly, index -> [ meta.group, meta, assembly, index ] } .combine(ch_reads_bowtie2, by: 0) - .map { group, assembly_meta, assembly, index, reads_meta, reads -> [ assembly_meta, assembly, index, reads_meta, reads ] } + .map { _group, assembly_meta, assembly, index, reads_meta, sample_reads -> + [ assembly_meta, assembly, index, reads_meta, sample_reads ] + } } else { // i.e. --binning_map_mode 'own' // combine assemblies (not co-assembled) with reads from own sample - ch_reads_bowtie2 = reads.map{ meta, reads -> [ meta.id, meta, reads ] } + ch_reads_bowtie2 = reads.map{ meta, sample_reads -> [ meta.id, meta, sample_reads ] } ch_bowtie2_input = BOWTIE2_ASSEMBLY_BUILD.out.assembly_index .map { meta, assembly, index -> [ meta.id, meta, assembly, index ] } .combine(ch_reads_bowtie2, by: 0) - .map { id, assembly_meta, assembly, index, reads_meta, reads -> [ assembly_meta, assembly, index, reads_meta, reads ] } + .map { _id, assembly_meta, assembly, index, reads_meta, sample_reads -> + [ assembly_meta, assembly, index, reads_meta, sample_reads ] + } } @@ -45,7 +49,7 @@ workflow BINNING_PREPARATION { .map { meta, assembly, bams, bais -> [ meta, assembly.sort()[0], bams, bais ] } // multiple symlinks to the same assembly -> use first of sorted list emit: - bowtie2_assembly_multiqc = BOWTIE2_ASSEMBLY_ALIGN.out.log.map { assembly_meta, reads_meta, log -> [ log ] } + bowtie2_assembly_multiqc = BOWTIE2_ASSEMBLY_ALIGN.out.log.map { _assembly_meta, _reads_meta, log -> [ log ] } bowtie2_version = BOWTIE2_ASSEMBLY_ALIGN.out.versions grouped_mappings = ch_grouped_mappings } diff --git a/subworkflows/local/binning_refinement.nf b/subworkflows/local/binning_refinement.nf index f92bf0cb..ba3210ea 100644 --- a/subworkflows/local/binning_refinement.nf +++ b/subworkflows/local/binning_refinement.nf @@ -16,7 +16,7 @@ include { RENAME_POSTDASTOOL } from workflow BINNING_REFINEMENT { take: ch_contigs_for_dastool // channel: [ val(meta), path(contigs) ] - bins // channel: [ val(meta), path(bins) ] + bins // channel: [ val(meta), path(bins) ] main: ch_versions = Channel.empty() @@ -24,13 +24,13 @@ workflow BINNING_REFINEMENT { // remove domain information, will add it back later // everything here is either unclassified or a prokaryote ch_bins = bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta - meta.subMap(['domain','refinement']) - [meta_new, bins] + [meta_new, bin_list] } .groupTuple() .map { - meta, bins -> [meta, bins.flatten()] + meta, bin_list -> [meta, bin_list.flatten()] } // prepare bins @@ -87,27 +87,27 @@ workflow BINNING_REFINEMENT { } .groupTuple() .map { - meta, bins -> + meta, bin_list -> def domain_class = params.bin_domain_classification ? 'prokarya' : 'unclassified' def meta_new = meta + [refinement: 'dastool_refined', domain: domain_class] - [ meta_new, bins ] + [ meta_new, bin_list ] } ch_input_for_renamedastool = DASTOOL_DASTOOL.out.bins .map { - meta, bins -> + meta, bin_list -> def domain_class = params.bin_domain_classification ? 'prokarya' : 'unclassified' def meta_new = meta + [refinement: 'dastool_refined', binner: 'DASTool', domain: domain_class] - [ meta_new, bins ] + [ meta_new, bin_list ] } RENAME_POSTDASTOOL ( ch_input_for_renamedastool ) refined_unbins = RENAME_POSTDASTOOL.out.refined_unbins .map { - meta, bins -> + meta, bin_list -> def meta_new = meta + [refinement: 'dastool_refined_unbinned'] - [meta_new, bins] + [meta_new, bin_list] } emit: diff --git a/subworkflows/local/depths.nf b/subworkflows/local/depths.nf index a2b69c95..73dbfb06 100644 --- a/subworkflows/local/depths.nf +++ b/subworkflows/local/depths.nf @@ -6,7 +6,7 @@ include { MAG_DEPTHS_SUMMARY } from '../../modules/local/mag_ * Get number of columns in file (first line) */ def getColNo(filename) { - lines = file(filename).readLines() + def lines = file(filename).readLines() return lines[0].split('\t').size() } @@ -14,7 +14,7 @@ def getColNo(filename) { * Get number of rows in a file */ def getRowNo(filename) { - lines = file(filename).readLines() + def lines = file(filename).readLines() return lines.size() } @@ -40,7 +40,7 @@ workflow DEPTHS { .combine(depths, by: 0) .transpose() .map { - meta_combine, meta, bins, depth -> + _meta_combine, meta, bins, depth -> def meta_new = meta - meta.subMap('domain','refinement') [meta_new, bins, depth] } @@ -58,7 +58,7 @@ workflow DEPTHS { // Plot bin depths heatmap for each assembly and mapped samples (according to `binning_map_mode`) // create file containing group information for all samples ch_sample_groups = reads - .collectFile(name:'sample_groups.tsv'){ meta, reads -> meta.id + '\t' + meta.group + '\n' } + .collectFile(name:'sample_groups.tsv'){ meta, _sample_reads -> meta.id + '\t' + meta.group + '\n' } // Filter MAG depth files: use only those for plotting that contain depths for > 2 samples // as well as > 2 bins diff --git a/subworkflows/local/gtdbtk.nf b/subworkflows/local/gtdbtk.nf index 6da5680d..9d01dea6 100644 --- a/subworkflows/local/gtdbtk.nf +++ b/subworkflows/local/gtdbtk.nf @@ -24,7 +24,8 @@ workflow GTDBTK { .map { row -> def completeness = -1 def contamination = -1 - def missing, duplicated + def missing + def duplicated def busco_db = file(params.busco_db) if (busco_db.getBaseName().contains('odb10')) { missing = row.'%Missing (specific)' // TODO or just take '%Complete'? @@ -54,7 +55,7 @@ workflow GTDBTK { .transpose() .map { meta, bin -> [bin.getName(), bin, meta]} .join(ch_bin_metrics, failOnDuplicate: true) - .map { bin_name, bin, meta, completeness, contamination -> [meta, bin, completeness, contamination] } + .map { _bin_name, bin, meta, completeness, contamination -> [meta, bin, completeness, contamination] } .branch { passed: (it[2] != -1 && it[2] >= params.gtdbtk_min_completeness && it[3] != -1 && it[3] <= params.gtdbtk_max_contamination) return [it[0], it[1]] diff --git a/subworkflows/local/longread_preprocessing.nf b/subworkflows/local/longread_preprocessing.nf index ec434858..8fb257f3 100644 --- a/subworkflows/local/longread_preprocessing.nf +++ b/subworkflows/local/longread_preprocessing.nf @@ -67,7 +67,7 @@ workflow LONGREAD_PREPROCESSING { ch_short_and_long_reads = ch_long_reads .map { meta, lr -> [ meta.id, meta, lr ] } .join(ch_short_reads_tmp, by: 0) - .map { id, meta_lr, lr, meta_sr, sr -> [ meta_lr, sr, lr ] } // should not occur for single-end, since SPAdes (hybrid) does not support single-end + .map { _id, meta_lr, lr, _meta_sr, sr -> [ meta_lr, sr, lr ] } // should not occur for single-end, since SPAdes (hybrid) does not support single-end FILTLONG ( ch_short_and_long_reads diff --git a/subworkflows/local/tiara.nf b/subworkflows/local/tiara.nf index ab274cc8..8ea29bea 100644 --- a/subworkflows/local/tiara.nf +++ b/subworkflows/local/tiara.nf @@ -13,16 +13,16 @@ workflow TIARA { ch_versions = Channel.empty() bins = bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [bin: 'bins'] meta_new.bin = 'bins' - [meta_new, bins] + [meta_new, bin_list] } unbins = unbins - .map { meta, unbins -> + .map { meta, unbin_list -> def meta_new = meta + [bin: 'unbins'] - [meta_new, unbins] + [meta_new, unbin_list] } ch_tiara_input = bins.mix(unbins) @@ -38,54 +38,54 @@ workflow TIARA { // Have to remove binner information from the meta map to do this ch_contigs_to_bin_tiara = DASTOOL_FASTATOCONTIG2BIN_TIARA.out.fastatocontig2bin .combine(ch_tiara_input, by: 0) - .map { meta, contig2bin, bins -> + .map { meta, contig2bin, bin_list -> def meta_join = meta - meta.subMap('binner', 'bin') - [ meta_join, meta, contig2bin, bins ] + [ meta_join, meta, contig2bin, bin_list ] } ch_tiara_classify_input = ch_contigs_to_bin_tiara .combine( TIARA_TIARA.out.classifications, by: 0) - .map { meta_join, meta, contig2bin, bins, classifications -> - [ meta, classifications, contig2bin, bins ] + .map { _meta_join, meta, contig2bin, bin_list, classifications -> + [ meta, classifications, contig2bin, bin_list ] } TIARA_CLASSIFY( ch_tiara_classify_input ) ch_versions = ch_versions.mix(TIARA_CLASSIFY.out.versions.first()) ch_eukarya_bins = TIARA_CLASSIFY.out.eukarya_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'eukarya'] - [meta_new, bins] + [meta_new, bin_list] } ch_prokarya_bins = TIARA_CLASSIFY.out.prokarya_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'prokarya'] - [meta_new, bins] + [meta_new, bin_list] } ch_bacteria_bins = TIARA_CLASSIFY.out.bacteria_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'bacteria'] - [meta_new, bins] + [meta_new, bin_list] } ch_archaea_bins = TIARA_CLASSIFY.out.archaea_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'archaea'] - [meta_new, bins] + [meta_new, bin_list] } ch_organelle_bins = TIARA_CLASSIFY.out.organelle_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'organelle'] - [meta_new, bins] + [meta_new, bin_list] } ch_unknown_bins = TIARA_CLASSIFY.out.unknown_bins - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta + [domain: 'unknown'] - [meta_new, bins] + [meta_new, bin_list] } ch_classified_bins_unbins = ch_eukarya_bins @@ -96,25 +96,25 @@ workflow TIARA { .mix(ch_unknown_bins) ch_classified_bins = ch_classified_bins_unbins - .filter { meta, bins -> + .filter { meta, _bin_list -> meta.bin == "bins" } - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta - meta.subMap('bin') - [meta_new, bins] + [meta_new, bin_list] } ch_classified_unbins = ch_classified_bins_unbins - .filter { meta, bins -> + .filter { meta, _bin_list -> meta.bin == "unbins" } - .map { meta, bins -> + .map { meta, bin_list -> def meta_new = meta - meta.subMap('bin') - [meta_new, bins] + [meta_new, bin_list] } ch_bin_classifications = TIARA_CLASSIFY.out.bin_classifications - .map { meta, classification -> + .map { _meta, classification -> [ classification ] } .collect()