diff --git a/.editorconfig b/.editorconfig
index 72dda289..6d9b74cc 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -31,3 +31,7 @@ indent_size = unset
# ignore python and markdown
[*.{py,md}]
indent_style = unset
+
+# ignore ro-crate metadata files
+[**/ro-crate-metadata.json]
+insert_final_newline = unset
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index ff3408fe..424b4138 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -1,4 +1,4 @@
-# nf-core/oncoanalyser: Contributing Guidelines
+# `nf-core/oncoanalyser`: Contributing Guidelines
Hi there!
Many thanks for taking an interest in improving nf-core/oncoanalyser.
@@ -55,9 +55,9 @@ These tests are run both with the latest available version of `Nextflow` and als
:warning: Only in the unlikely and regretful event of a release happening with a bug.
-- On your own fork, make a new branch `patch` based on `upstream/master`.
+- On your own fork, make a new branch `patch` based on `upstream/main` or `upstream/master`.
- Fix the bug, and bump version (X.Y.Z+1).
-- A PR should be made on `master` from patch to directly this particular bug.
+- Open a pull-request from `patch` to `main`/`master` with the changes.
## Getting help
@@ -65,13 +65,13 @@ For further information/help, please consult the [nf-core/oncoanalyser documenta
## Pipeline contribution conventions
-To make the nf-core/oncoanalyser code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written.
+To make the `nf-core/oncoanalyser` code and processing logic more understandable for new contributors and to ensure quality, we semi-standardise the way the code and other contributions are written.
### Adding a new step
If you wish to contribute a new step, please use the following coding standards:
-1. Define the corresponding input channel into your new process from the expected previous process channel
+1. Define the corresponding input channel into your new process from the expected previous process channel.
2. Write the process block (see below).
3. Define the output channel if needed (see below).
4. Add any new parameters to `nextflow.config` with a default (see below).
@@ -84,7 +84,7 @@ If you wish to contribute a new step, please use the following coding standards:
### Default values
-Parameters should be initialised / defined with default values in `nextflow.config` under the `params` scope.
+Parameters should be initialised / defined with default values within the `params` scope in `nextflow.config`.
Once there, use `nf-core pipelines schema build` to add to `nextflow_schema.json`.
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 93b3c04b..08d84845 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -9,7 +9,6 @@ body:
- [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting)
- [nf-core/oncoanalyser pipeline documentation](https://nf-co.re/oncoanalyser/usage)
-
- type: textarea
id: description
attributes:
diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml
index 02d90d9d..202b473b 100644
--- a/.github/workflows/awsfulltest.yml
+++ b/.github/workflows/awsfulltest.yml
@@ -1,11 +1,12 @@
name: nf-core AWS full size tests
-# This workflow is triggered on PRs opened against the master branch.
+# This workflow is triggered on PRs opened against the main/master branch.
# It can be additionally triggered manually with GitHub actions workflow dispatch button.
# It runs the -profile 'test_full' on AWS batch
on:
pull_request:
branches:
+ - main
- master
workflow_dispatch:
pull_request_review:
@@ -18,18 +19,30 @@ jobs:
if: github.repository == 'nf-core/oncoanalyser' && github.event.review.state == 'approved' && github.event.pull_request.base.ref == 'master' || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- - uses: octokit/request-action@v2.x
+ - name: Get PR reviews
+ uses: octokit/request-action@v2.x
+ if: github.event_name != 'workflow_dispatch'
id: check_approvals
+ continue-on-error: true
with:
- route: GET /repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews
+ route: GET /repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/reviews?per_page=100
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - id: test_variables
+
+ - name: Check for approvals
+ if: ${{ failure() && github.event_name != 'workflow_dispatch' }}
+ run: |
+ echo "No review approvals found. At least 2 approvals are required to run this action automatically."
+ exit 1
+
+ - name: Check for enough approvals (>=2)
+ id: test_variables
if: github.event_name != 'workflow_dispatch'
run: |
JSON_RESPONSE='${{ steps.check_approvals.outputs.data }}'
CURRENT_APPROVALS_COUNT=$(echo $JSON_RESPONSE | jq -c '[.[] | select(.state | contains("APPROVED")) ] | length')
test $CURRENT_APPROVALS_COUNT -ge 2 || exit 1 # At least 2 approvals are required
+
- name: Launch workflow via Seqera Platform
uses: seqeralabs/action-tower-launch@v2
# TODO nf-core: You can customise AWS full pipeline tests as required
diff --git a/.github/workflows/branch.yml b/.github/workflows/branch.yml
index 0494f13f..026488ea 100644
--- a/.github/workflows/branch.yml
+++ b/.github/workflows/branch.yml
@@ -1,15 +1,17 @@
name: nf-core branch protection
-# This workflow is triggered on PRs to master branch on the repository
-# It fails when someone tries to make a PR against the nf-core `master` branch instead of `dev`
+# This workflow is triggered on PRs to `main`/`master` branch on the repository
+# It fails when someone tries to make a PR against the nf-core `main`/`master` branch instead of `dev`
on:
pull_request_target:
- branches: [master]
+ branches:
+ - main
+ - master
jobs:
test:
runs-on: ubuntu-latest
steps:
- # PRs to the nf-core repo master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches
+ # PRs to the nf-core repo main/master branch are only ok if coming from the nf-core repo `dev` or any `patch` branches
- name: Check PRs
if: github.repository == 'nf-core/oncoanalyser'
run: |
@@ -22,7 +24,7 @@ jobs:
uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
with:
message: |
- ## This PR is against the `master` branch :x:
+ ## This PR is against the `${{github.event.pull_request.base.ref}}` branch :x:
* Do not close this PR
* Click _Edit_ and change the `base` to `dev`
@@ -32,9 +34,9 @@ jobs:
Hi @${{ github.event.pull_request.user.login }},
- It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `master` branch.
- The `master` branch on nf-core repositories should always contain code from the latest release.
- Because of this, PRs to `master` are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch.
+ It looks like this pull-request is has been made against the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) ${{github.event.pull_request.base.ref}} branch.
+ The ${{github.event.pull_request.base.ref}} branch on nf-core repositories should always contain code from the latest release.
+ Because of this, PRs to ${{github.event.pull_request.base.ref}} are only allowed if they come from the [${{github.event.pull_request.head.repo.full_name }}](https://github.com/${{github.event.pull_request.head.repo.full_name }}) `dev` branch.
You do not need to close this PR, you can change the target branch to `dev` by clicking the _"Edit"_ button at the top of this page.
Note that even after this, the test will continue to show as failing until you push a new commit.
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 76eef1d7..d7a1b268 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -45,7 +45,9 @@ jobs:
profile: "singularity"
steps:
- name: Check out pipeline code
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
+ with:
+ fetch-depth: 0
- name: Set up Nextflow
uses: nf-core/setup-nextflow@v2
diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml
index 50eae814..ab06316e 100644
--- a/.github/workflows/download_pipeline.yml
+++ b/.github/workflows/download_pipeline.yml
@@ -2,7 +2,7 @@ name: Test successful pipeline download with 'nf-core pipelines download'
# Run the workflow when:
# - dispatched manually
-# - when a PR is opened or reopened to master branch
+# - when a PR is opened or reopened to main/master branch
# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
on:
workflow_dispatch:
@@ -17,17 +17,34 @@ on:
- edited
- synchronize
branches:
+ - main
- master
pull_request_target:
branches:
+ - main
- master
env:
NXF_ANSI_LOG: false
jobs:
+ configure:
+ runs-on: ubuntu-latest
+ outputs:
+ REPO_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPO_LOWERCASE }}
+ REPOTITLE_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPOTITLE_LOWERCASE }}
+ REPO_BRANCH: ${{ steps.get_repo_properties.outputs.REPO_BRANCH }}
+ steps:
+ - name: Get the repository name and current branch
+ id: get_repo_properties
+ run: |
+ echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT"
+ echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> "$GITHUB_OUTPUT"
+ echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> "$GITHUB_OUTPUT"
+
download:
runs-on: ubuntu-latest
+ needs: configure
steps:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
@@ -35,7 +52,7 @@ jobs:
- name: Disk space cleanup
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1
- - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
+ - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5
with:
python-version: "3.12"
architecture: "x64"
@@ -50,12 +67,6 @@ jobs:
python -m pip install --upgrade pip
pip install git+https://github.com/nf-core/tools.git@dev
- - name: Get the repository name and current branch set as environment variable
- run: |
- echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
- echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
- echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV}
-
- name: Make a cache directory for the container images
run: |
mkdir -p ./singularity_container_images
@@ -64,24 +75,27 @@ jobs:
env:
NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
run: |
- nf-core pipelines download ${{ env.REPO_LOWERCASE }} \
- --revision ${{ env.REPO_BRANCH }} \
- --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
+ nf-core pipelines download ${{ needs.configure.outputs.REPO_LOWERCASE }} \
+ --revision ${{ needs.configure.outputs.REPO_BRANCH }} \
+ --outdir ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }} \
--compress "none" \
--container-system 'singularity' \
- --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io" \
+ --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io/library/" \
--container-cache-utilisation 'amend' \
--download-configuration 'yes'
- name: Inspect download
- run: tree ./${{ env.REPOTITLE_LOWERCASE }}
+ run: tree ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}
+
+ - name: Inspect container images
+ run: tree ./singularity_container_images | tee ./container_initial
- name: Count the downloaded number of container images
id: count_initial
run: |
image_count=$(ls -1 ./singularity_container_images | wc -l | xargs)
echo "Initial container image count: $image_count"
- echo "IMAGE_COUNT_INITIAL=$image_count" >> ${GITHUB_ENV}
+ echo "IMAGE_COUNT_INITIAL=$image_count" >> "$GITHUB_OUTPUT"
- name: Run the downloaded pipeline (stub)
id: stub_run_pipeline
@@ -89,30 +103,31 @@ jobs:
env:
NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
NXF_SINGULARITY_HOME_MOUNT: true
- run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test_stub,singularity --outdir ./results
+ run: nextflow run ./${{needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
- name: Run the downloaded pipeline (stub run not supported)
id: run_pipeline
- if: ${{ job.steps.stub_run_pipeline.status == failure() }}
+ if: ${{ steps.stub_run_pipeline.outcome == 'failure' }}
env:
NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
NXF_SINGULARITY_HOME_MOUNT: true
- run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -profile test_stub,singularity --outdir ./results
+ run: nextflow run ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -profile test,singularity --outdir ./results
- name: Count the downloaded number of container images
id: count_afterwards
run: |
image_count=$(ls -1 ./singularity_container_images | wc -l | xargs)
echo "Post-pipeline run container image count: $image_count"
- echo "IMAGE_COUNT_AFTER=$image_count" >> ${GITHUB_ENV}
+ echo "IMAGE_COUNT_AFTER=$image_count" >> "$GITHUB_OUTPUT"
- name: Compare container image counts
run: |
- if [ "${{ env.IMAGE_COUNT_INITIAL }}" -ne "${{ env.IMAGE_COUNT_AFTER }}" ]; then
- initial_count=${{ env.IMAGE_COUNT_INITIAL }}
- final_count=${{ env.IMAGE_COUNT_AFTER }}
+ if [ "${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }}" -ne "${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }}" ]; then
+ initial_count=${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }}
+ final_count=${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }}
difference=$((final_count - initial_count))
echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!"
- tree ./singularity_container_images
+ tree ./singularity_container_images > ./container_afterwards
+ diff ./container_initial ./container_afterwards
exit 1
else
echo "The pipeline can be downloaded successfully!"
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml
index 02ca9338..ba1787ce 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix-linting.yml
@@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
token: ${{ secrets.nf_core_bot_auth_token }}
@@ -32,7 +32,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
# Install and run pre-commit
- - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
+ - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5
with:
python-version: "3.12"
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
index a502573c..dbd52d5a 100644
--- a/.github/workflows/linting.yml
+++ b/.github/workflows/linting.yml
@@ -14,10 +14,10 @@ jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
+ - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Set up Python 3.12
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
+ uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5
with:
python-version: "3.12"
@@ -31,12 +31,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out pipeline code
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
- - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
+ - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5
with:
python-version: "3.12"
architecture: "x64"
@@ -74,7 +74,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4
+ uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4
with:
name: linting-logs
path: |
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
index 42e519bf..0bed96d3 100644
--- a/.github/workflows/linting_comment.yml
+++ b/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@bf251b5aa9c2f7eeb574a96ee720e24f801b7c11 # v6
+ uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7
with:
workflow: linting.yml
workflow_conclusion: completed
diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml
index c6ba35df..450b1d5e 100644
--- a/.github/workflows/release-announcements.yml
+++ b/.github/workflows/release-announcements.yml
@@ -31,7 +31,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5
+ - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5
with:
python-version: "3.10"
- name: Install dependencies
diff --git a/.github/workflows/template_version_comment.yml b/.github/workflows/template_version_comment.yml
index e8aafe44..537529bc 100644
--- a/.github/workflows/template_version_comment.yml
+++ b/.github/workflows/template_version_comment.yml
@@ -9,7 +9,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out pipeline code
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
with:
ref: ${{ github.event.pull_request.head.sha }}
diff --git a/.gitpod.yml b/.gitpod.yml
index 46118637..83599f63 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -6,12 +6,5 @@ tasks:
nextflow self-update
vscode:
- extensions: # based on nf-core.nf-core-extensionpack
- #- esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code
- - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files
- - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar
- - mechatroner.rainbow-csv # Highlight columns in csv files in different colors
- - nextflow.nextflow # Nextflow syntax highlighting
- - oderwat.indent-rainbow # Highlight indentation level
- - streetsidesoftware.code-spell-checker # Spelling checker for source code
- - charliermarsh.ruff # Code linter Ruff
+ extensions:
+ - nf-core.nf-core-extensionpack # https://github.com/nf-core/vscode-extensionpack
diff --git a/.nf-core.yml b/.nf-core.yml
index 61198068..87579679 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -1,5 +1,16 @@
+lint:
+ actions_ci: false
+ files_exist:
+ - lib/Utils.groovy
+ - lib/WorkflowMain.groovy
+ - lib/WorkflowOncoanalyser.groovy
+ multiqc_config: false
+ nextflow_config:
+ - config_defaults:
+ - params.fastp_umi_length
+ - params.fastp_umi_skip
+nf_core_version: 3.1.2
repository_type: pipeline
-nf_core_version: 3.0.2
template:
author: Stephen Watts
description: A comprehensive cancer DNA/RNA analysis and reporting pipeline
@@ -8,19 +19,4 @@ template:
name: oncoanalyser
org: nf-core
outdir: .
- skip_features: null
version: 1.1.0dev
-lint:
- actions_ci: False
- multiqc_config: False
- files_exist:
- - lib/Utils.groovy
- - lib/WorkflowMain.groovy
- - lib/WorkflowOncoanalyser.groovy
- nextflow_config:
- - config_defaults:
- - params.fastp_umi_length
- - params.fastp_umi_skip
-bump_version: null
-org_path: null
-update: null
diff --git a/.prettierignore b/.prettierignore
index 437d763d..edd29f01 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -10,3 +10,4 @@ testing/
testing*
*.pyc
bin/
+ro-crate-metadata.json
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000..a33b527c
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,3 @@
+{
+ "markdown.styles": ["public/vscode_markdown.css"]
+}
diff --git a/LICENSE b/LICENSE
index f92ae0c3..4e7e7bd7 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) Stephen Watts
+Copyright (c) The nf-core/oncoanalyser team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 10e016c2..24797dc2 100644
--- a/README.md
+++ b/README.md
@@ -151,8 +151,10 @@ channel](https://nfcore.slack.com/channels/oncoanalyser) (you can join with [thi
## Citations
-You can cite the `oncoanalyser` zenodo record for a specific version using the following doi:
-[10.5281/zenodo.XXXXXXX](https://doi.org/10.5281/zenodo.XXXXXXX)
+
+
+
+
An extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md)
file.
diff --git a/conf/base.config b/conf/base.config
index 2036ebb7..132d1776 100644
--- a/conf/base.config
+++ b/conf/base.config
@@ -20,7 +20,7 @@ process {
maxErrors = '-1'
// Process-specific resource requirements
- // NOTE - Please try and re-use the labels below as much as possible.
+ // NOTE - Please try and reuse the labels below as much as possible.
// These labels are used and recognised by default in DSL2 files hosted on nf-core/modules.
// If possible, it would be nice to keep the same label naming convention when
// adding in your local modules too.
diff --git a/docs/usage.md b/docs/usage.md
index 0f28947f..cdbc2948 100644
--- a/docs/usage.md
+++ b/docs/usage.md
@@ -21,7 +21,9 @@ These features enable `oncoanalyser` to be run in a highly flexible way. For exa
existing PURPLE data as the starting point and skip variant calling processes. Additionally, reference/resource files
can be staged locally to optimise execution or modified to create user-defined driver gene panels.
-:::danger
+
+
+:::danger{title=Caution}
When starting from BAMs rather than FASTQ it is expected that:
@@ -48,16 +50,8 @@ samplesheet. The supported analysis types for each workflow are listed below.
\* Supported analyses relate to the TSO500 panel only
-:::note
-
-The default settings of `oncoanalyser` will accommodate typical sequencing depths for sample inputs and each individual
-tool is generally sequencing depth agnostic. However, variant calling is optimised for 100x tumor and 40x normal when
-invoked in `wgts` mode and expects sparse high-depth read data characteristic of panel sequencing when run in `targeted`
-mode. For atypical input sequence data you may consult the [hmftools
-documentation](https://github.com/hartwigmedical/hmftools) and [configure](#custom-tool-arguments) `oncoanalyser`
-accordingly.
-
-:::
+> [!NOTE]
+> The default settings of `oncoanalyser` will accommodate typical sequencing depths for sample inputs and each individual tool is generally sequencing depth agnostic. However, variant calling is optimised for 100x tumor and 40x normal when invoked in `wgts` mode and expects sparse high-depth read data characteristic of panel sequencing when run in `targeted` mode. For atypical input sequence data you may consult the [hmftools documentation](https://github.com/hartwigmedical/hmftools) and [configure](#custom-tool-arguments) `oncoanalyser` accordingly.
## Samplesheet
@@ -72,11 +66,8 @@ during execution such as the PURPLE output directory. The full list of recognise
#### FASTQ
-:::note
-
-Currently only non-interleaved paired-end reads are accepted as FASTQ input.
-
-:::
+> [!NOTE]
+> Currently only non-interleaved paired-end reads are accepted as FASTQ input.
```csv title="samplesheet.csv"
group_id,subject_id,sample_id,sample_type,sequence_type,filetype,info,filepath
@@ -87,6 +78,8 @@ P1_wgts,P1,SC,tumor,rna,fastq,library_id:SC_library;lane:001,/path/to/P1.SC.tumo
#### BAM
+
+
:::note
Inputs with the `bam` filetype will be processed by MarkDups as required by hmftools. Where an input BAM has already
@@ -166,13 +159,8 @@ nextflow run nf-core/oncoanalyser \
This will launch the pipeline with the `docker` configuration profile. See below for more information about profiles.
-:::note
-
-Reference data will be retrieved by `oncoanalyser` for every analysis run. It is therefore strongly recommended when
-running multiple analyses to pre-stage reference data locally to avoid it being retrieved multiple times. See [Staging
-reference data](#staging-reference-data).
-
-:::
+> [!NOTE]
+> Reference data will be retrieved by `oncoanalyser` for every analysis run. It is therefore strongly recommended when running multiple analyses to pre-stage reference data locally to avoid it being retrieved multiple times. See [Staging reference data](#staging-reference-data).
Note that the pipeline will create the following files in your working directory:
@@ -187,11 +175,8 @@ If you wish to repeatedly use the same parameters for multiple runs, rather than
Pipeline settings can be provided in a `yaml` or `json` file via `-params-file `.
-:::warning
-
-Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args).
-
-:::
+> [!WARNING]
+> Do not use `-c ` to specify parameters as this will result in errors. Custom config files specified with `-c` must only be used for [tuning process resource specifications](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources), other infrastructural tweaks (such as output directories), or module arguments (args).
The above pipeline run specified with a params file in yaml format:
@@ -221,7 +206,7 @@ nextflow pull nf-core/oncoanalyser
### Reproducibility
-It is a good idea to specify a pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since.
+It is a good idea to specify the pipeline version when running the pipeline on your data. This ensures that a specific version of the pipeline code and software are used when you run your pipeline. If you keep using the same tag, you'll be running the same version of the pipeline, even if there have been changes to the code since.
First, go to the [nf-core/oncoanalyser releases page](https://github.com/nf-core/oncoanalyser/releases) and find the latest pipeline version - numeric only (eg. `1.0.0`). Then specify this when running the pipeline with `-r` (one hyphen) - eg. `-r 1.0.0`. Of course, you can switch to another version by changing the number after the `-r` flag.
@@ -229,9 +214,8 @@ This version number will be logged in reports when you run the pipeline, so that
To further assist in reproducbility, you can share and re-use [parameter files](#running-the-pipeline) to repeat pipeline runs with the same settings without having to write out a command with every single parameter.
-:::tip
-If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles.
-:::
+> [!TIP]
+> If you wish to share such profile (such as upload as supplementary material for academic publications), make sure to NOT include cluster specific paths to files, nor institutional specific profiles.
## Advanced usage
@@ -243,12 +227,8 @@ Multiple processes can be given as a comma-separated list. While there are some
skipping resource intensive processes such as VIRUSBreakend), it becomes more powerful when combined with existing
inputs as described in the following section.
-:::warning
-
-When skipping components no checks are done to identify orphan processes in the execution DAG or for redundant
-processes.
-
-:::
+> [!WARNING]
+> When skipping components no checks are done to identify orphan processes in the execution DAG or for redundant processes.
### Existing inputs
@@ -269,12 +249,8 @@ P1_wgts,P1,SB,tumor,dna,bam,/path/to/P1.SB.tumor.dna.wgs.bam
P1_wgts,P1,SB,tumor,dna,purple_dir,/path/to/P1.purple_dir/
```
-:::note
-
-The original source input file (i.e. BAM or FASTQ) must always be provided for `oncoanalyser` to infer the correct
-analysis type.
-
-:::
+> [!NOTE]
+> The original source input file (i.e. BAM or FASTQ) must always be provided for `oncoanalyser` to infer the correct analysis type.
And now run and skip variant calling:
@@ -289,12 +265,8 @@ nextflow run nf-core/oncoanalyser \
--outdir output/
```
-:::warning
-
-Providing existing inputs will cause `oncoanalyser` to skip the corresponding process but _not any_ of the upstream
-processes. It is the responsibility of the user to skip all relevant processes.
-
-:::
+> [!WARNING]
+> Providing existing inputs will cause `oncoanalyser` to skip the corresponding process but _not any_ of the upstream processes. It is the responsibility of the user to skip all relevant processes.
### Configuring reference data
@@ -441,13 +413,8 @@ params {
Each index required for the analysis will first be created before running the rest of `oncoanalyser` with the following
command:
-:::note
-
-In a process similar to [staging reference data](#staging-reference-data), you can first generate the required indexes
-by setting `--prepare_reference_only` and then provide the prepared reference files to `oncoanalyser` through a custom
-config file. This avoids having to regenerate indexes for each new analysis.
-
-:::
+> [!NOTE]
+> In a process similar to [staging reference data](#staging-reference-data), you can first generate the required indexes by setting `--prepare_reference_only` and then provide the prepared reference files to `oncoanalyser` through a custom config file. This avoids having to regenerate indexes for each new analysis.
```bash
nextflow run nf-core/oncoanalyser \
@@ -473,12 +440,8 @@ Creation of a STAR index also requires transcript annotations, please provide ei
- GRCh38: [GENCODE v37 (Ensembl v74)
annotations](https://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_19/gencode.v19.annotation.gtf.gz)
-:::warning
-
-STAR index must use transcript annotations from Ensembl versions that match hmftools resource data (GRCh37: v74; GRCh38:
-v104).
-
-:::
+> [!WARNING]
+> STAR index must use transcript annotations from Ensembl versions that match hmftools resource data (GRCh37: v74; GRCh38: v104).
When creating indexes for reference genomes with alternative haplotypes, an ALT file must be given with
`--ref_data_genome_alt`. Importantly, a STAR index will not be generated for reference genomes with alternative
@@ -486,9 +449,8 @@ haplotypes since this requires careful processing and is hence left to the user.
## Core Nextflow arguments
-:::note
-These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen).
-:::
+> [!NOTE]
+> These options are part of Nextflow and use a _single_ hyphen (pipeline parameters use a double-hyphen)
### `-profile`
@@ -496,11 +458,10 @@ Use this parameter to choose a configuration profile. Profiles can give configur
Several generic profiles are bundled with the pipeline which instruct the pipeline to use software packaged using different methods (Docker, Singularity, Podman, Shifter, Charliecloud, Apptainer, Conda) - see below.
-:::info
-We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported.
-:::
+> [!IMPORTANT]
+> We highly recommend the use of Docker or Singularity containers for full pipeline reproducibility, however when this is not possible, Conda is also supported.
-The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to see if your system is available in these configs please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation).
+The pipeline also dynamically loads configurations from [https://github.com/nf-core/configs](https://github.com/nf-core/configs) when it runs, making multiple config profiles for various institutional clusters available at run time. For more information and to check if your system is supported, please see the [nf-core/configs documentation](https://github.com/nf-core/configs#documentation).
Note that multiple profiles can be loaded, for example: `-profile test,docker` - the order of arguments is important!
They are loaded in sequence, so later profiles can overwrite earlier profiles.
@@ -541,13 +502,13 @@ Specify the path to a specific config file (this is a core Nextflow command). Se
### Resource requests
-Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the steps in the pipeline, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher requests (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped.
+Whilst the default requirements set within the pipeline will hopefully work for most people and with most input data, you may find that you want to customise the compute resources that the pipeline requests. Each step in the pipeline has a default set of requirements for number of CPUs, memory and time. For most of the pipeline steps, if the job exits with any of the error codes specified [here](https://github.com/nf-core/rnaseq/blob/4c27ef5610c87db00c3c5a3eed10b1d161abf575/conf/base.config#L18) it will automatically be resubmitted with higher resources request (2 x original, then 3 x original). If it still fails after the third attempt then the pipeline execution is stopped.
To change the resource requests, please see the [max resources](https://nf-co.re/docs/usage/configuration#max-resources) and [tuning workflow resources](https://nf-co.re/docs/usage/configuration#tuning-workflow-resources) section of the nf-core website.
### Custom containers
-In some cases you may wish to change which container or conda environment a step of the pipeline uses for a particular tool. By default nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However in some cases the pipeline specified version maybe out of date.
+In some cases, you may wish to change the container or conda environment used by a pipeline steps for a particular tool. By default, nf-core pipelines use containers and software from the [biocontainers](https://biocontainers.pro/) or [bioconda](https://bioconda.github.io/) projects. However, in some cases the pipeline specified version maybe out of date.
To use a different container from the default container or conda environment specified in a pipeline, please see the [updating tool versions](https://nf-co.re/docs/usage/configuration#updating-tool-versions) section of the nf-core website.
diff --git a/modules.json b/modules.json
index bca83177..2d149fd5 100644
--- a/modules.json
+++ b/modules.json
@@ -56,17 +56,17 @@
"nf-core": {
"utils_nextflow_pipeline": {
"branch": "master",
- "git_sha": "3aa0aec1d52d492fe241919f0c6100ebf0074082",
+ "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b",
"installed_by": ["subworkflows"]
},
"utils_nfcore_pipeline": {
"branch": "master",
- "git_sha": "1b6b9a3338d011367137808b49b923515080e3ba",
+ "git_sha": "51ae5406a030d4da1e49e4dab49756844fdd6c7a",
"installed_by": ["subworkflows"]
},
"utils_nfschema_plugin": {
"branch": "master",
- "git_sha": "bbd5a41f4535a8defafe6080e00ea74c45f4f96c",
+ "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e",
"installed_by": ["subworkflows"]
}
}
diff --git a/nextflow.config b/nextflow.config
index fdb47d5c..d4f3b12f 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -61,6 +61,7 @@ params {
show_hidden = false
version = false
pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/oncoanalyser'
+ trace_report_suffix = new java.util.Date().format('yyyy-MM-dd_HH-mm-ss')
// Config options
config_profile_name = null
@@ -189,6 +190,13 @@ profiles {
executor.name = 'local'
executor.cpus = 4
executor.memory = 8.GB
+ process {
+ resourceLimits = [
+ memory: 8.GB,
+ cpus : 4,
+ time : 1.h
+ ]
+ }
}
test { includeConfig 'conf/test.config' }
test_full { includeConfig 'conf/test_full.config' }
@@ -235,30 +243,48 @@ set -C # No clobber - prevent output redirection from overwriting files.
// Disable process selector warnings by default. Use debug profile to enable warnings.
nextflow.enable.configProcessNamesValidation = false
-def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
timeline {
enabled = true
- file = "${params.outdir}/pipeline_info/execution_timeline_${trace_timestamp}.html"
+ file = "${params.outdir}/pipeline_info/execution_timeline_${params.trace_report_suffix}.html"
}
report {
enabled = true
- file = "${params.outdir}/pipeline_info/execution_report_${trace_timestamp}.html"
+ file = "${params.outdir}/pipeline_info/execution_report_${params.trace_report_suffix}.html"
}
trace {
enabled = true
- file = "${params.outdir}/pipeline_info/execution_trace_${trace_timestamp}.txt"
+ file = "${params.outdir}/pipeline_info/execution_trace_${params.trace_report_suffix}.txt"
}
dag {
enabled = true
- file = "${params.outdir}/pipeline_info/pipeline_dag_${trace_timestamp}.html"
+ file = "${params.outdir}/pipeline_info/pipeline_dag_${params.trace_report_suffix}.html"
}
manifest {
name = 'nf-core/oncoanalyser'
author = """Stephen Watts"""
+ contributors = [
+ [
+ name: 'Stephen Watts',
+ affiliation: '',
+ email: '',
+ github: '',
+ contribution: ['author'],
+ orcid: ''
+ ],
+ [
+ name: 'Hartwig Medical Foundation Australia',
+ affiliation: '',
+ email: '',
+ github: '',
+ contribution: ['contributor'],
+ orcid: ''
+ ],
+ ]
homePage = 'https://github.com/nf-core/oncoanalyser'
description = """A comprehensive cancer DNA/RNA analysis and reporting pipeline"""
mainScript = 'main.nf'
+ defaultBranch = 'master'
nextflowVersion = '!>=24.04.2'
version = '1.1.0dev'
doi = ''
@@ -266,7 +292,7 @@ manifest {
// Nextflow plugins
plugins {
- id 'nf-schema@2.1.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet
+ id 'nf-schema@2.3.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet
}
validation {
@@ -301,9 +327,11 @@ validation {
lenientMode = true
+ monochromeLogs = params.monochrome_logs
+
help {
enabled = true
- command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir "
+ command = "nextflow run nf-core/oncoanalyser -profile --input samplesheet.csv --outdir "
fullParameter = "help_full"
showHiddenParameter = "show_hidden"
beforeText = """
@@ -313,15 +341,15 @@ validation {
\033[0;34m |\\ | |__ __ / ` / \\ |__) |__ \033[0;33m} {\033[0m
\033[0;34m | \\| | \\__, \\__/ | \\ |___ \033[0;32m\\`-._,-`-,\033[0m
\033[0;32m`._,._,\'\033[0m
-\033[0;35m ${manifest.name} ${manifest.version}\033[0m
+\033[0;35m nf-core/oncoanalyser ${manifest.version}\033[0m
-\033[2m----------------------------------------------------\033[0m-
"""
- afterText = """${manifest.doi ? "* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""}
+ afterText = """${manifest.doi ? "\n* The pipeline\n" : ""}${manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${manifest.doi ? "\n" : ""}
* The nf-core framework
https://doi.org/10.1038/s41587-020-0439-x
* Software dependencies
- https://github.com/${manifest.name}/blob/master/CITATIONS.md
+ https://github.com/nf-core/oncoanalyser/blob/master/CITATIONS.md
"""
}
summary {
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 79126c59..bd4557f9 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -390,6 +390,12 @@
"description": "Base URL or local path to location of pipeline test dataset files",
"default": "https://raw.githubusercontent.com/nf-core/test-datasets/oncoanalyser",
"hidden": true
+ },
+ "trace_report_suffix": {
+ "type": "string",
+ "fa_icon": "far calendar",
+ "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.",
+ "hidden": true
}
}
}
diff --git a/ro-crate-metadata.json b/ro-crate-metadata.json
new file mode 100644
index 00000000..7e14bcb5
--- /dev/null
+++ b/ro-crate-metadata.json
@@ -0,0 +1,311 @@
+{
+ "@context": [
+ "https://w3id.org/ro/crate/1.1/context",
+ {
+ "GithubService": "https://w3id.org/ro/terms/test#GithubService",
+ "JenkinsService": "https://w3id.org/ro/terms/test#JenkinsService",
+ "PlanemoEngine": "https://w3id.org/ro/terms/test#PlanemoEngine",
+ "TestDefinition": "https://w3id.org/ro/terms/test#TestDefinition",
+ "TestInstance": "https://w3id.org/ro/terms/test#TestInstance",
+ "TestService": "https://w3id.org/ro/terms/test#TestService",
+ "TestSuite": "https://w3id.org/ro/terms/test#TestSuite",
+ "TravisService": "https://w3id.org/ro/terms/test#TravisService",
+ "definition": "https://w3id.org/ro/terms/test#definition",
+ "engineVersion": "https://w3id.org/ro/terms/test#engineVersion",
+ "instance": "https://w3id.org/ro/terms/test#instance",
+ "resource": "https://w3id.org/ro/terms/test#resource",
+ "runsOn": "https://w3id.org/ro/terms/test#runsOn"
+ }
+ ],
+ "@graph": [
+ {
+ "@id": "./",
+ "@type": "Dataset",
+ "creativeWorkStatus": "InProgress",
+ "datePublished": "2025-01-20T14:36:26+00:00",
+ "description": "
\n \n
\n\n[![GitHub Actions CI Status](https://github.com/nf-core/oncoanalyser/actions/workflows/ci.yml/badge.svg)](https://github.com/nf-core/oncoanalyser/actions/workflows/ci.yml)\n[![GitHub Actions Linting Status](https://github.com/nf-core/oncoanalyser/actions/workflows/linting.yml/badge.svg)](https://github.com/nf-core/oncoanalyser/actions/workflows/linting.yml)[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/oncoanalyser/results)[![Cite with Zenodo](http://img.shields.io/badge/DOI-10.5281/zenodo.XXXXXXX-1073c8?labelColor=000000)](https://doi.org/10.5281/zenodo.XXXXXXX)\n[![nf-test](https://img.shields.io/badge/unit_tests-nf--test-337ab7.svg)](https://www.nf-test.com)\n\n[![Nextflow](https://img.shields.io/badge/nextflow%20DSL2-%E2%89%A524.04.2-23aa62.svg)](https://www.nextflow.io/)\n[![run with conda](http://img.shields.io/badge/run%20with-conda-3EB049?labelColor=000000&logo=anaconda)](https://docs.conda.io/en/latest/)\n[![run with docker](https://img.shields.io/badge/run%20with-docker-0db7ed?labelColor=000000&logo=docker)](https://www.docker.com/)\n[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/)\n[![Launch on Seqera Platform](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Seqera%20Platform-%234256e7)](https://cloud.seqera.io/launch?pipeline=https://github.com/nf-core/oncoanalyser)\n\n[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23oncoanalyser-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/oncoanalyser)[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core)[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core)[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core)\n\n## Introduction\n\n**nf-core/oncoanalyser** is a bioinformatics pipeline that ...\n\n\n\n\n1. Read QC ([`FastQC`](https://www.bioinformatics.babraham.ac.uk/projects/fastqc/))2. Present QC for raw reads ([`MultiQC`](http://multiqc.info/))\n\n## Usage\n\n> [!NOTE]\n> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.\n\n\n\nNow, you can run the pipeline using:\n\n\n\n```bash\nnextflow run nf-core/oncoanalyser \\\n -profile \\\n --input samplesheet.csv \\\n --outdir \n```\n\n> [!WARNING]\n> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_; see [docs](https://nf-co.re/docs/usage/getting_started/configuration#custom-configuration-files).\n\nFor more details and further functionality, please refer to the [usage documentation](https://nf-co.re/oncoanalyser/usage) and the [parameter documentation](https://nf-co.re/oncoanalyser/parameters).\n\n## Pipeline output\n\nTo see the results of an example test run with a full size dataset refer to the [results](https://nf-co.re/oncoanalyser/results) tab on the nf-core website pipeline page.\nFor more details about the output files and reports, please refer to the\n[output documentation](https://nf-co.re/oncoanalyser/output).\n\n## Credits\n\nnf-core/oncoanalyser was originally written by Stephen Watts.\n\nWe thank the following people for their extensive assistance in the development of this pipeline:\n\n\n\n## Contributions and Support\n\nIf you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md).\n\nFor further information or help, don't hesitate to get in touch on the [Slack `#oncoanalyser` channel](https://nfcore.slack.com/channels/oncoanalyser) (you can join with [this invite](https://nf-co.re/join/slack)).\n\n## Citations\n\n\n\n\n\n\nAn extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file.\n\nYou can cite the `nf-core` publication as follows:\n\n> **The nf-core framework for community-curated bioinformatics pipelines.**\n>\n> Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen.\n>\n> _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x).\n",
+ "hasPart": [
+ {
+ "@id": "main.nf"
+ },
+ {
+ "@id": "assets/"
+ },
+ {
+ "@id": "conf/"
+ },
+ {
+ "@id": "docs/"
+ },
+ {
+ "@id": "docs/images/"
+ },
+ {
+ "@id": "modules/"
+ },
+ {
+ "@id": "modules/nf-core/"
+ },
+ {
+ "@id": "workflows/"
+ },
+ {
+ "@id": "subworkflows/"
+ },
+ {
+ "@id": "nextflow.config"
+ },
+ {
+ "@id": "README.md"
+ },
+ {
+ "@id": "nextflow_schema.json"
+ },
+ {
+ "@id": "CHANGELOG.md"
+ },
+ {
+ "@id": "LICENSE"
+ },
+ {
+ "@id": "CODE_OF_CONDUCT.md"
+ },
+ {
+ "@id": "CITATIONS.md"
+ },
+ {
+ "@id": "modules.json"
+ },
+ {
+ "@id": "docs/usage.md"
+ },
+ {
+ "@id": "docs/output.md"
+ },
+ {
+ "@id": ".nf-core.yml"
+ },
+ {
+ "@id": ".pre-commit-config.yaml"
+ },
+ {
+ "@id": ".prettierignore"
+ }
+ ],
+ "isBasedOn": "https://github.com/nf-core/oncoanalyser",
+ "license": "MIT",
+ "mainEntity": {
+ "@id": "main.nf"
+ },
+ "mentions": [
+ {
+ "@id": "#288ddb36-2068-49ee-b3c0-0a3bedf0b03b"
+ }
+ ],
+ "name": "nf-core/oncoanalyser"
+ },
+ {
+ "@id": "ro-crate-metadata.json",
+ "@type": "CreativeWork",
+ "about": {
+ "@id": "./"
+ },
+ "conformsTo": [
+ {
+ "@id": "https://w3id.org/ro/crate/1.1"
+ },
+ {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate/1.0"
+ }
+ ]
+ },
+ {
+ "@id": "main.nf",
+ "@type": ["File", "SoftwareSourceCode", "ComputationalWorkflow"],
+ "creator": [
+ {
+ "@id": "#hello@stephen.ac"
+ }
+ ],
+ "dateCreated": "",
+ "dateModified": "2025-01-20T14:36:26Z",
+ "dct:conformsTo": "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/",
+ "keywords": ["nf-core", "nextflow"],
+ "license": ["MIT"],
+ "maintainer": [
+ {
+ "@id": "#hello@stephen.ac"
+ }
+ ],
+ "name": ["nf-core/oncoanalyser"],
+ "programmingLanguage": {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate#nextflow"
+ },
+ "sdPublisher": {
+ "@id": "https://nf-co.re/"
+ },
+ "url": ["https://github.com/nf-core/oncoanalyser", "https://nf-co.re/oncoanalyser/dev/"],
+ "version": ["1.1.0dev"]
+ },
+ {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate#nextflow",
+ "@type": "ComputerLanguage",
+ "identifier": {
+ "@id": "https://www.nextflow.io/"
+ },
+ "name": "Nextflow",
+ "url": {
+ "@id": "https://www.nextflow.io/"
+ },
+ "version": "!>=24.04.2"
+ },
+ {
+ "@id": "#288ddb36-2068-49ee-b3c0-0a3bedf0b03b",
+ "@type": "TestSuite",
+ "instance": [
+ {
+ "@id": "#5587a0fa-66a0-406b-9de0-0eafc24864d0"
+ }
+ ],
+ "mainEntity": {
+ "@id": "main.nf"
+ },
+ "name": "Test suite for nf-core/oncoanalyser"
+ },
+ {
+ "@id": "#5587a0fa-66a0-406b-9de0-0eafc24864d0",
+ "@type": "TestInstance",
+ "name": "GitHub Actions workflow for testing nf-core/oncoanalyser",
+ "resource": "repos/nf-core/oncoanalyser/actions/workflows/ci.yml",
+ "runsOn": {
+ "@id": "https://w3id.org/ro/terms/test#GithubService"
+ },
+ "url": "https://api.github.com"
+ },
+ {
+ "@id": "https://w3id.org/ro/terms/test#GithubService",
+ "@type": "TestService",
+ "name": "Github Actions",
+ "url": {
+ "@id": "https://github.com"
+ }
+ },
+ {
+ "@id": "assets/",
+ "@type": "Dataset",
+ "description": "Additional files"
+ },
+ {
+ "@id": "conf/",
+ "@type": "Dataset",
+ "description": "Configuration files"
+ },
+ {
+ "@id": "docs/",
+ "@type": "Dataset",
+ "description": "Markdown files for documenting the pipeline"
+ },
+ {
+ "@id": "docs/images/",
+ "@type": "Dataset",
+ "description": "Images for the documentation files"
+ },
+ {
+ "@id": "modules/",
+ "@type": "Dataset",
+ "description": "Modules used by the pipeline"
+ },
+ {
+ "@id": "modules/nf-core/",
+ "@type": "Dataset",
+ "description": "nf-core modules"
+ },
+ {
+ "@id": "workflows/",
+ "@type": "Dataset",
+ "description": "Main pipeline workflows to be executed in main.nf"
+ },
+ {
+ "@id": "subworkflows/",
+ "@type": "Dataset",
+ "description": "Smaller subworkflows"
+ },
+ {
+ "@id": "nextflow.config",
+ "@type": "File",
+ "description": "Main Nextflow configuration file"
+ },
+ {
+ "@id": "README.md",
+ "@type": "File",
+ "description": "Basic pipeline usage information"
+ },
+ {
+ "@id": "nextflow_schema.json",
+ "@type": "File",
+ "description": "JSON schema for pipeline parameter specification"
+ },
+ {
+ "@id": "CHANGELOG.md",
+ "@type": "File",
+ "description": "Information on changes made to the pipeline"
+ },
+ {
+ "@id": "LICENSE",
+ "@type": "File",
+ "description": "The license - should be MIT"
+ },
+ {
+ "@id": "CODE_OF_CONDUCT.md",
+ "@type": "File",
+ "description": "The nf-core code of conduct"
+ },
+ {
+ "@id": "CITATIONS.md",
+ "@type": "File",
+ "description": "Citations needed when using the pipeline"
+ },
+ {
+ "@id": "modules.json",
+ "@type": "File",
+ "description": "Version information for modules from nf-core/modules"
+ },
+ {
+ "@id": "docs/usage.md",
+ "@type": "File",
+ "description": "Usage documentation"
+ },
+ {
+ "@id": "docs/output.md",
+ "@type": "File",
+ "description": "Output documentation"
+ },
+ {
+ "@id": ".nf-core.yml",
+ "@type": "File",
+ "description": "nf-core configuration file, configuring template features and linting rules"
+ },
+ {
+ "@id": ".pre-commit-config.yaml",
+ "@type": "File",
+ "description": "Configuration file for pre-commit hooks"
+ },
+ {
+ "@id": ".prettierignore",
+ "@type": "File",
+ "description": "Ignore file for prettier"
+ },
+ {
+ "@id": "https://nf-co.re/",
+ "@type": "Organization",
+ "name": "nf-core",
+ "url": "https://nf-co.re/"
+ },
+ {
+ "@id": "#hello@stephen.ac",
+ "@type": "Person",
+ "email": "hello@stephen.ac",
+ "name": "Stephen Watts"
+ }
+ ]
+}
diff --git a/subworkflows/local/utils_nfcore_oncoanalyser_pipeline/main.nf b/subworkflows/local/utils_nfcore_oncoanalyser_pipeline/main.nf
index f5d139cf..1d8a7613 100644
--- a/subworkflows/local/utils_nfcore_oncoanalyser_pipeline/main.nf
+++ b/subworkflows/local/utils_nfcore_oncoanalyser_pipeline/main.nf
@@ -102,7 +102,6 @@ workflow PIPELINE_COMPLETION {
plaintext_email,
outdir,
monochrome_logs,
- multiqc_report.toList()
)
}
@@ -202,7 +201,7 @@ def toolBibliographyText() {
}
def methodsDescriptionText(mqc_methods_yaml) {
- // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
+ // Convert to a named map so can be used as with familiar NXF ${workflow} variable syntax in the MultiQC YML file
def meta = [:]
meta.workflow = workflow.toMap()
meta["manifest_map"] = workflow.manifest.toMap()
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
index 0fcbf7b3..d6e593e8 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/main.nf
@@ -92,10 +92,12 @@ def checkCondaChannels() {
channels = config.channels
}
catch (NullPointerException e) {
+ log.debug(e)
log.warn("Could not verify conda channel configuration.")
return null
}
catch (IOException e) {
+ log.debug(e)
log.warn("Could not verify conda channel configuration.")
return null
}
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
index ca964ce8..02dbf094 100644
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
+++ b/subworkflows/nf-core/utils_nextflow_pipeline/tests/main.workflow.nf.test
@@ -52,10 +52,12 @@ nextflow_workflow {
}
then {
- assertAll(
- { assert workflow.success },
- { assert workflow.stdout.contains("nextflow_workflow v9.9.9") }
- )
+ expect {
+ with(workflow) {
+ assert success
+ assert "nextflow_workflow v9.9.9" in stdout
+ }
+ }
}
}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
index 5cb7bafe..bfd25876 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/main.nf
@@ -56,21 +56,6 @@ def checkProfileProvided(nextflow_cli_args) {
}
}
-//
-// Citation string for pipeline
-//
-def workflowCitation() {
- def temp_doi_ref = ""
- def manifest_doi = workflow.manifest.doi.tokenize(",")
- // Handling multiple DOIs
- // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers
- // Removing ` ` since the manifest.doi is a string and not a proper list
- manifest_doi.each { doi_ref ->
- temp_doi_ref += " https://doi.org/${doi_ref.replace('https://doi.org/', '').replace(' ', '')}\n"
- }
- return "If you use ${workflow.manifest.name} for your analysis please cite:\n\n" + "* The pipeline\n" + temp_doi_ref + "\n" + "* The nf-core framework\n" + " https://doi.org/10.1038/s41587-020-0439-x\n\n" + "* Software dependencies\n" + " https://github.com/${workflow.manifest.name}/blob/master/CITATIONS.md"
-}
-
//
// Generate workflow version string
//
@@ -150,33 +135,6 @@ def paramsSummaryMultiqc(summary_params) {
return yaml_file_text
}
-//
-// nf-core logo
-//
-def nfCoreLogo(monochrome_logs=true) {
- def colors = logColours(monochrome_logs) as Map
- String.format(
- """\n
- ${dashedLine(monochrome_logs)}
- ${colors.green},--.${colors.black}/${colors.green},-.${colors.reset}
- ${colors.blue} ___ __ __ __ ___ ${colors.green}/,-._.--~\'${colors.reset}
- ${colors.blue} |\\ | |__ __ / ` / \\ |__) |__ ${colors.yellow}} {${colors.reset}
- ${colors.blue} | \\| | \\__, \\__/ | \\ |___ ${colors.green}\\`-._,-`-,${colors.reset}
- ${colors.green}`._,._,\'${colors.reset}
- ${colors.purple} ${workflow.manifest.name} ${getWorkflowVersion()}${colors.reset}
- ${dashedLine(monochrome_logs)}
- """.stripIndent()
- )
-}
-
-//
-// Return dashed line
-//
-def dashedLine(monochrome_logs=true) {
- def colors = logColours(monochrome_logs) as Map
- return "-${colors.dim}----------------------------------------------------${colors.reset}-"
-}
-
//
// ANSII colours used for terminal logging
//
@@ -245,28 +203,24 @@ def logColours(monochrome_logs=true) {
return colorcodes
}
-//
-// Attach the multiqc report to email
-//
-def attachMultiqcReport(multiqc_report) {
- def mqc_report = null
- try {
- if (workflow.success) {
- mqc_report = multiqc_report.getVal()
- if (mqc_report.getClass() == ArrayList && mqc_report.size() >= 1) {
- if (mqc_report.size() > 1) {
- log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one")
- }
- mqc_report = mqc_report[0]
- }
+// Return a single report from an object that may be a Path or List
+//
+def getSingleReport(multiqc_reports) {
+ if (multiqc_reports instanceof Path) {
+ return multiqc_reports
+ } else if (multiqc_reports instanceof List) {
+ if (multiqc_reports.size() == 0) {
+ log.warn("[${workflow.manifest.name}] No reports found from process 'MULTIQC'")
+ return null
+ } else if (multiqc_reports.size() == 1) {
+ return multiqc_reports.first()
+ } else {
+ log.warn("[${workflow.manifest.name}] Found multiple reports from process 'MULTIQC', will use only one")
+ return multiqc_reports.first()
}
+ } else {
+ return null
}
- catch (Exception all) {
- if (multiqc_report) {
- log.warn("[${workflow.manifest.name}] Could not attach MultiQC report to summary email")
- }
- }
- return mqc_report
}
//
@@ -320,7 +274,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi
email_fields['summary'] = summary << misc_fields
// On success try attach the multiqc report
- def mqc_report = attachMultiqcReport(multiqc_report)
+ def mqc_report = getSingleReport(multiqc_report)
// Check if we are only sending emails on failure
def email_address = email
@@ -340,7 +294,7 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi
def email_html = html_template.toString()
// Render the sendmail template
- def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as nextflow.util.MemoryUnit
+ def max_multiqc_email_size = (params.containsKey('max_multiqc_email_size') ? params.max_multiqc_email_size : 0) as MemoryUnit
def smail_fields = [email: email_address, subject: subject, email_txt: email_txt, email_html: email_html, projectDir: "${workflow.projectDir}", mqcFile: mqc_report, mqcMaxSize: max_multiqc_email_size.toBytes()]
def sf = new File("${workflow.projectDir}/assets/sendmail_template.txt")
def sendmail_template = engine.createTemplate(sf).make(smail_fields)
@@ -351,14 +305,17 @@ def completionEmail(summary_params, email, email_on_fail, plaintext_email, outdi
if (email_address) {
try {
if (plaintext_email) {
-new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML') }
+ new org.codehaus.groovy.GroovyException('Send plaintext e-mail, not HTML')
+ }
// Try to send HTML e-mail using sendmail
def sendmail_tf = new File(workflow.launchDir.toString(), ".sendmail_tmp.html")
sendmail_tf.withWriter { w -> w << sendmail_html }
['sendmail', '-t'].execute() << sendmail_html
log.info("-${colors.purple}[${workflow.manifest.name}]${colors.green} Sent summary e-mail to ${email_address} (sendmail)-")
}
- catch (Exception all) {
+ catch (Exception msg) {
+ log.debug(msg.toString())
+ log.debug("Trying with mail instead of sendmail")
// Catch failures and try with plaintext
def mail_cmd = ['mail', '-s', subject, '--content-type=text/html', email_address]
mail_cmd.execute() << email_html
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
index 1dc317f8..f117040c 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test
@@ -41,26 +41,14 @@ nextflow_function {
}
}
- test("Test Function workflowCitation") {
-
- function "workflowCitation"
-
- then {
- assertAll(
- { assert function.success },
- { assert snapshot(function.result).match() }
- )
- }
- }
-
- test("Test Function nfCoreLogo") {
+ test("Test Function without logColours") {
- function "nfCoreLogo"
+ function "logColours"
when {
function {
"""
- input[0] = false
+ input[0] = true
"""
}
}
@@ -73,9 +61,8 @@ nextflow_function {
}
}
- test("Test Function dashedLine") {
-
- function "dashedLine"
+ test("Test Function with logColours") {
+ function "logColours"
when {
function {
@@ -93,14 +80,13 @@ nextflow_function {
}
}
- test("Test Function without logColours") {
-
- function "logColours"
+ test("Test Function getSingleReport with a single file") {
+ function "getSingleReport"
when {
function {
"""
- input[0] = true
+ input[0] = file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true)
"""
}
}
@@ -108,18 +94,22 @@ nextflow_function {
then {
assertAll(
{ assert function.success },
- { assert snapshot(function.result).match() }
+ { assert function.result.contains("test.tsv") }
)
}
}
- test("Test Function with logColours") {
- function "logColours"
+ test("Test Function getSingleReport with multiple files") {
+ function "getSingleReport"
when {
function {
"""
- input[0] = false
+ input[0] = [
+ file(params.modules_testdata_base_path + '/generic/tsv/test.tsv', checkIfExists: true),
+ file(params.modules_testdata_base_path + '/generic/tsv/network.tsv', checkIfExists: true),
+ file(params.modules_testdata_base_path + '/generic/tsv/expression.tsv', checkIfExists: true)
+ ]
"""
}
}
@@ -127,7 +117,9 @@ nextflow_function {
then {
assertAll(
{ assert function.success },
- { assert snapshot(function.result).match() }
+ { assert function.result.contains("test.tsv") },
+ { assert !function.result.contains("network.tsv") },
+ { assert !function.result.contains("expression.tsv") }
)
}
}
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
index 1037232c..02c67014 100644
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
+++ b/subworkflows/nf-core/utils_nfcore_pipeline/tests/main.function.nf.test.snap
@@ -17,26 +17,6 @@
},
"timestamp": "2024-02-28T12:02:59.729647"
},
- "Test Function nfCoreLogo": {
- "content": [
- "\n\n-\u001b[2m----------------------------------------------------\u001b[0m-\n \u001b[0;32m,--.\u001b[0;30m/\u001b[0;32m,-.\u001b[0m\n\u001b[0;34m ___ __ __ __ ___ \u001b[0;32m/,-._.--~'\u001b[0m\n\u001b[0;34m |\\ | |__ __ / ` / \\ |__) |__ \u001b[0;33m} {\u001b[0m\n\u001b[0;34m | \\| | \\__, \\__/ | \\ |___ \u001b[0;32m\\`-._,-`-,\u001b[0m\n \u001b[0;32m`._,._,'\u001b[0m\n\u001b[0;35m nextflow_workflow v9.9.9\u001b[0m\n-\u001b[2m----------------------------------------------------\u001b[0m-\n"
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:10.562934"
- },
- "Test Function workflowCitation": {
- "content": [
- "If you use nextflow_workflow for your analysis please cite:\n\n* The pipeline\n https://doi.org/10.5281/zenodo.5070524\n\n* The nf-core framework\n https://doi.org/10.1038/s41587-020-0439-x\n\n* Software dependencies\n https://github.com/nextflow_workflow/blob/master/CITATIONS.md"
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:07.019761"
- },
"Test Function without logColours": {
"content": [
{
@@ -95,16 +75,6 @@
},
"timestamp": "2024-02-28T12:03:17.969323"
},
- "Test Function dashedLine": {
- "content": [
- "-\u001b[2m----------------------------------------------------\u001b[0m-"
- ],
- "meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
- },
- "timestamp": "2024-02-28T12:03:14.366181"
- },
"Test Function with logColours": {
"content": [
{
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
index 842dc432..8fb30164 100644
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
+++ b/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
@@ -42,7 +42,7 @@ nextflow_workflow {
params {
test_data = ''
- outdir = 1
+ outdir = null
}
workflow {
@@ -94,7 +94,7 @@ nextflow_workflow {
params {
test_data = ''
- outdir = 1
+ outdir = null
}
workflow {