diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index b290e09..97c8c97 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,20 +1,20 @@
{
"name": "nfcore",
- "image": "nfcore/gitpod:latest",
- "remoteUser": "gitpod",
- "runArgs": ["--privileged"],
+ "image": "nfcore/devcontainer:latest",
- // Configure tool-specific properties.
- "customizations": {
- // Configure properties specific to VS Code.
- "vscode": {
- // Set *default* container specific settings.json values on container create.
- "settings": {
- "python.defaultInterpreterPath": "/opt/conda/bin/python"
- },
+ "remoteUser": "root",
+ "privileged": true,
- // Add the IDs of extensions you want installed when the container is created.
- "extensions": ["ms-python.python", "ms-python.vscode-pylance", "nf-core.nf-core-extensionpack"]
- }
+ "remoteEnv": {
+ // Workspace path on the host for mounting with docker-outside-of-docker
+ "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
+ },
+
+ "onCreateCommand": "./.devcontainer/setup.sh",
+
+ "hostRequirements": {
+ "cpus": 4,
+ "memory": "16gb",
+ "storage": "32gb"
}
}
diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh
new file mode 100755
index 0000000..057e746
--- /dev/null
+++ b/.devcontainer/setup.sh
@@ -0,0 +1,13 @@
+#!/usr/bin/env bash
+
+# Customise the terminal command prompt
+echo "export PROMPT_DIRTRIM=2" >> $HOME/.bashrc
+echo "export PS1='\[\e[3;36m\]\w ->\[\e[0m\\] '" >> $HOME/.bashrc
+export PROMPT_DIRTRIM=2
+export PS1='\[\e[3;36m\]\w ->\[\e[0m\\] '
+
+# Update Nextflow
+nextflow self-update
+
+# Update welcome message
+echo "Welcome to the nf-cmgg/sampletracking devcontainer!" > /usr/local/etc/vscode-dev-containers/first-run-notice.txt
diff --git a/.editorconfig b/.editorconfig
deleted file mode 100644
index cbf9a83..0000000
--- a/.editorconfig
+++ /dev/null
@@ -1,51 +0,0 @@
-root = true
-
-[*]
-charset = utf-8
-end_of_line = lf
-insert_final_newline = true
-trim_trailing_whitespace = true
-indent_size = 4
-indent_style = space
-
-[*.{md,yml,yaml,html,css,scss,js}]
-indent_size = 2
-
-# These files are edited and tested upstream in nf-core/modules
-[/modules/nf-core/**]
-charset = unset
-end_of_line = unset
-insert_final_newline = unset
-trim_trailing_whitespace = unset
-indent_style = unset
-[/subworkflows/nf-core/**]
-charset = unset
-end_of_line = unset
-insert_final_newline = unset
-trim_trailing_whitespace = unset
-indent_style = unset
-
-[/assets/email*]
-indent_size = unset
-
-[/assets/haplotype_maps/*]
-charset = unset
-end_of_line = unset
-insert_final_newline = unset
-trim_trailing_whitespace = unset
-indent_style = unset
-
-[/utils/nf-cmgg-sampletracking/*]
-charset = unset
-end_of_line = unset
-insert_final_newline = unset
-trim_trailing_whitespace = unset
-indent_style = unset
-
-# ignore Readme
-[README.md]
-indent_style = unset
-
-# ignore python
-[*.{py,md}]
-indent_style = unset
diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
index 5876edf..8101d53 100644
--- a/.github/CONTRIBUTING.md
+++ b/.github/CONTRIBUTING.md
@@ -71,7 +71,7 @@ If you wish to contribute a new step, please use the following coding standards:
5. Add any new parameters to `nextflow_schema.json` with help text (via the `nf-core pipelines schema build` tool).
6. Add sanity checks and validation for all relevant parameters.
7. Perform local tests to validate that the new code works as expected.
-8. If applicable, add a new test command in `.github/workflow/ci.yml`.
+8. If applicable, add a new test in the `tests` directory.
9. Update MultiQC config `assets/multiqc_config.yml` so relevant suffixes, file name clean up and module plots are in the appropriate order. If applicable, add a [MultiQC](https://https://multiqc.info/) module.
10. Add a description of the output files and if relevant any appropriate images from the MultiQC report to `docs/output.md`.
diff --git a/.github/actions/get-shards/action.yml b/.github/actions/get-shards/action.yml
new file mode 100644
index 0000000..3408527
--- /dev/null
+++ b/.github/actions/get-shards/action.yml
@@ -0,0 +1,69 @@
+name: "Get number of shards"
+description: "Get the number of nf-test shards for the current CI job"
+inputs:
+ max_shards:
+ description: "Maximum number of shards allowed"
+ required: true
+ paths:
+ description: "Component paths to test"
+ required: false
+ tags:
+ description: "Tags to pass as argument for nf-test --tag parameter"
+ required: false
+outputs:
+ shard:
+ description: "Array of shard numbers"
+ value: ${{ steps.shards.outputs.shard }}
+ total_shards:
+ description: "Total number of shards"
+ value: ${{ steps.shards.outputs.total_shards }}
+runs:
+ using: "composite"
+ steps:
+ - name: Install nf-test
+ uses: nf-core/setup-nf-test@v1
+ with:
+ version: ${{ env.NFT_VER }}
+ - name: Get number of shards
+ id: shards
+ shell: bash
+ run: |
+ # Run nf-test with dynamic parameter
+ nftest_output=$(nf-test test \
+ --profile +docker \
+ $(if [ -n "${{ inputs.tags }}" ]; then echo "--tag ${{ inputs.tags }}"; fi) \
+ --dry-run \
+ --ci \
+ --changed-since HEAD^) || {
+ echo "nf-test command failed with exit code $?"
+ echo "Full output: $nftest_output"
+ exit 1
+ }
+ echo "nf-test dry-run output: $nftest_output"
+
+ # Default values for shard and total_shards
+ shard="[]"
+ total_shards=0
+
+ # Check if there are related tests
+ if echo "$nftest_output" | grep -q 'No tests to execute'; then
+ echo "No related tests found."
+ else
+ # Extract the number of related tests
+ number_of_shards=$(echo "$nftest_output" | sed -n 's|.*Executed \([0-9]*\) tests.*|\1|p')
+ if [[ -n "$number_of_shards" && "$number_of_shards" -gt 0 ]]; then
+ shards_to_run=$(( $number_of_shards < ${{ inputs.max_shards }} ? $number_of_shards : ${{ inputs.max_shards }} ))
+ shard=$(seq 1 "$shards_to_run" | jq -R . | jq -c -s .)
+ total_shards="$shards_to_run"
+ else
+ echo "Unexpected output format. Falling back to default values."
+ fi
+ fi
+
+ # Write to GitHub Actions outputs
+ echo "shard=$shard" >> $GITHUB_OUTPUT
+ echo "total_shards=$total_shards" >> $GITHUB_OUTPUT
+
+ # Debugging output
+ echo "Final shard array: $shard"
+ echo "Total number of shards: $total_shards"
diff --git a/.github/actions/nf-test/action.yml b/.github/actions/nf-test/action.yml
new file mode 100644
index 0000000..3b9724c
--- /dev/null
+++ b/.github/actions/nf-test/action.yml
@@ -0,0 +1,111 @@
+name: "nf-test Action"
+description: "Runs nf-test with common setup steps"
+inputs:
+ profile:
+ description: "Profile to use"
+ required: true
+ shard:
+ description: "Shard number for this CI job"
+ required: true
+ total_shards:
+ description: "Total number of test shards(NOT the total number of matrix jobs)"
+ required: true
+ paths:
+ description: "Test paths"
+ required: true
+ tags:
+ description: "Tags to pass as argument for nf-test --tag parameter"
+ required: false
+runs:
+ using: "composite"
+ steps:
+ - name: Setup Nextflow
+ uses: nf-core/setup-nextflow@v2
+ with:
+ version: "${{ env.NXF_VERSION }}"
+
+ - name: Set up Python
+ uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6
+ with:
+ python-version: "3.14"
+
+ - name: Install nf-test
+ uses: nf-core/setup-nf-test@v1
+ with:
+ version: "${{ env.NFT_VER }}"
+ install-pdiff: true
+
+ - name: Setup apptainer
+ if: contains(inputs.profile, 'singularity')
+ uses: eWaterCycle/setup-apptainer@main
+
+ - name: Set up Singularity
+ if: contains(inputs.profile, 'singularity')
+ shell: bash
+ run: |
+ mkdir -p $NXF_SINGULARITY_CACHEDIR
+ mkdir -p $NXF_SINGULARITY_LIBRARYDIR
+
+ - name: Conda setup
+ if: contains(inputs.profile, 'conda')
+ uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3
+ with:
+ auto-update-conda: true
+ conda-solver: libmamba
+ channels: conda-forge
+ channel-priority: strict
+ conda-remove-defaults: true
+
+ - name: Run nf-test
+ shell: bash
+ env:
+ NFT_WORKDIR: ${{ env.NFT_WORKDIR }}
+ run: |
+ nf-test test \
+ --profile=+${{ inputs.profile }} \
+ $(if [ -n "${{ inputs.tags }}" ]; then echo "--tag ${{ inputs.tags }}"; fi) \
+ --ci \
+ --changed-since HEAD^ \
+ --verbose \
+ --tap=test.tap \
+ --shard ${{ inputs.shard }}/${{ inputs.total_shards }}
+
+ # Save the absolute path of the test.tap file to the output
+ echo "tap_file_path=$(realpath test.tap)" >> $GITHUB_OUTPUT
+
+ - name: Generate test summary
+ if: always()
+ shell: bash
+ run: |
+ # Add header if it doesn't exist (using a token file to track this)
+ if [ ! -f ".summary_header" ]; then
+ echo "# 🚀 nf-test results" >> $GITHUB_STEP_SUMMARY
+ echo "" >> $GITHUB_STEP_SUMMARY
+ echo "| Status | Test Name | Profile | Shard |" >> $GITHUB_STEP_SUMMARY
+ echo "|:------:|-----------|---------|-------|" >> $GITHUB_STEP_SUMMARY
+ touch .summary_header
+ fi
+
+ if [ -f test.tap ]; then
+ while IFS= read -r line; do
+ if [[ $line =~ ^ok ]]; then
+ test_name="${line#ok }"
+ # Remove the test number from the beginning
+ test_name="${test_name#* }"
+ echo "| ✅ | ${test_name} | ${{ inputs.profile }} | ${{ inputs.shard }}/${{ inputs.total_shards }} |" >> $GITHUB_STEP_SUMMARY
+ elif [[ $line =~ ^not\ ok ]]; then
+ test_name="${line#not ok }"
+ # Remove the test number from the beginning
+ test_name="${test_name#* }"
+ echo "| ❌ | ${test_name} | ${{ inputs.profile }} | ${{ inputs.shard }}/${{ inputs.total_shards }} |" >> $GITHUB_STEP_SUMMARY
+ fi
+ done < test.tap
+ else
+ echo "| ⚠️ | No test results found | ${{ inputs.profile }} | ${{ inputs.shard }}/${{ inputs.total_shards }} |" >> $GITHUB_STEP_SUMMARY
+ fi
+
+ - name: Clean up
+ if: always()
+ shell: bash
+ run: |
+ sudo rm -rf /home/ubuntu/tests/
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
deleted file mode 100644
index bae2e47..0000000
--- a/.github/workflows/ci.yml
+++ /dev/null
@@ -1,94 +0,0 @@
-name: nf-core CI
-# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
-on:
- push:
- branches:
- - dev
- pull_request:
- release:
- types: [published]
-
-env:
- NXF_ANSI_LOG: false
- NFT_VER: "0.8.4"
- NFT_WORKDIR: "~"
- NFT_DIFF: "pdiff"
- NFT_DIFF_ARGS: "--line-numbers --expand-tabs=2"
- AWS_ACCESS_KEY_ID: ${{ secrets.UGENT_S3_ACCESS_KEY }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.UGENT_S3_SECRET_KEY }}
-
-concurrency:
- group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
- cancel-in-progress: true
-
-jobs:
- test:
- name: ${{ matrix.tags }} ${{ matrix.profile }} NF-${{ matrix.NXF_VER }}
- # Only run on push if this is the nf-core dev branch (merged PRs)
- if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-cmgg/sampletracking') }}"
- runs-on: ubuntu-latest
- strategy:
- matrix:
- NXF_VER:
- - "24.04.1"
- - "latest-stable"
- tags:
- - "workflows"
- - "pipeline"
- steps:
- - name: Check out pipeline code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
-
- - name: Install Nextflow
- uses: nf-core/setup-nextflow@v2
- with:
- version: "${{ matrix.NXF_VER }}"
-
- - name: Disk space cleanup
- uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1
-
- - uses: actions/setup-python@v4
- with:
- python-version: "3.11"
- architecture: "x64"
-
- - name: Install pdiff to see diff between nf-test snapshots
- run: |
- python -m pip install --upgrade pip
- pip install pdiff
-
- - name: Cache nf-test installation
- id: cache-software
- uses: actions/cache@v3
- with:
- path: |
- /usr/local/bin/nf-test
- /home/runner/.nf-test/nf-test.jar
- key: ${{ runner.os }}-${{ env.NFT_VER }}-nftest
-
- - name: Install nf-test
- if: steps.cache-software.outputs.cache-hit != 'true'
- run: |
- wget -qO- https://code.askimed.com/install/nf-test | bash
- sudo mv nf-test /usr/local/bin/
-
- - name: Run nf-test
- run: |
- nf-test test --verbose --tag ${{ matrix.tags }} --junitxml=test.xml --tap=test.tap
-
- - uses: pcolby/tap-summary@v1
- with:
- path: >-
- test.tap
-
- - name: Output log on failure
- if: failure()
- run: |
- sudo apt install bat > /dev/null
- batcat --decorations=always --color=always ${{ github.workspace }}/.nf-test/tests/*/meta/nextflow.log
-
- - name: Publish Test Report
- uses: mikepenz/action-junit-report@v3
- if: always() # always run even if the previous step fails
- with:
- report_paths: test.xml
diff --git a/.github/workflows/clean-up.yml b/.github/workflows/clean-up.yml
index 0b6b1f2..6adb0ff 100644
--- a/.github/workflows/clean-up.yml
+++ b/.github/workflows/clean-up.yml
@@ -10,7 +10,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
+ - uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
diff --git a/.github/workflows/download_pipeline.yml b/.github/workflows/download_pipeline.yml
index f361db6..6d94bcb 100644
--- a/.github/workflows/download_pipeline.yml
+++ b/.github/workflows/download_pipeline.yml
@@ -1,72 +1,134 @@
-name: Test successful pipeline download with 'nf-core download'
+name: Test successful pipeline download with 'nf-core pipelines download'
# Run the workflow when:
# - dispatched manually
-# - when a PR is opened or reopened to master branch
+# - when a PR is opened or reopened to main/master branch
# - the head branch of the pull request is updated, i.e. if fixes for a release are pushed last minute to dev.
on:
workflow_dispatch:
inputs:
testbranch:
- description: "The specific branch you wish to utilize for the test execution of nf-core download."
+ description: "The specific branch you wish to utilize for the test execution of nf-core pipelines download."
required: true
default: "dev"
pull_request:
- types:
- - opened
- branches:
- - master
- pull_request_target:
branches:
+ - main
- master
env:
NXF_ANSI_LOG: false
jobs:
+ configure:
+ runs-on: ubuntu-latest
+ outputs:
+ REPO_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPO_LOWERCASE }}
+ REPOTITLE_LOWERCASE: ${{ steps.get_repo_properties.outputs.REPOTITLE_LOWERCASE }}
+ REPO_BRANCH: ${{ steps.get_repo_properties.outputs.REPO_BRANCH }}
+ steps:
+ - name: Get the repository name and current branch
+ id: get_repo_properties
+ run: |
+ echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> "$GITHUB_OUTPUT"
+ echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> "$GITHUB_OUTPUT"
+ echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> "$GITHUB_OUTPUT"
+
download:
runs-on: ubuntu-latest
+ needs: configure
steps:
- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
- - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ - name: Disk space cleanup
+ uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1
+
+ - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6
with:
- python-version: "3.11"
+ python-version: "3.14"
architecture: "x64"
- - uses: eWaterCycle/setup-singularity@931d4e31109e875b13309ae1d07c70ca8fbc8537 # v7
+
+ - name: Setup Apptainer
+ uses: eWaterCycle/setup-apptainer@4bb22c52d4f63406c49e94c804632975787312b3 # v2.0.0
with:
- singularity-version: 3.8.3
+ apptainer-version: 1.3.4
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- pip install git+https://github.com/nf-core/tools.git@dev
+ pip install git+https://github.com/nf-core/tools.git
- - name: Get the repository name and current branch set as environment variable
+ - name: Make a cache directory for the container images
run: |
- echo "REPO_LOWERCASE=${GITHUB_REPOSITORY,,}" >> ${GITHUB_ENV}
- echo "REPOTITLE_LOWERCASE=$(basename ${GITHUB_REPOSITORY,,})" >> ${GITHUB_ENV}
- echo "REPO_BRANCH=${{ github.event.inputs.testbranch || 'dev' }}" >> ${GITHUB_ENV}
+ mkdir -p ./singularity_container_images
- name: Download the pipeline
env:
- NXF_SINGULARITY_CACHEDIR: ./
+ NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
run: |
- nf-core download ${{ env.REPO_LOWERCASE }} \
- --revision ${{ env.REPO_BRANCH }} \
- --outdir ./${{ env.REPOTITLE_LOWERCASE }} \
+ nf-core pipelines download ${{ needs.configure.outputs.REPO_LOWERCASE }} \
+ --revision ${{ needs.configure.outputs.REPO_BRANCH }} \
+ --outdir ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }} \
--compress "none" \
--container-system 'singularity' \
- --container-library "quay.io" -l "docker.io" -l "ghcr.io" \
+ --container-library "quay.io" -l "docker.io" -l "community.wave.seqera.io/library/" \
--container-cache-utilisation 'amend' \
- --download-configuration
+ --download-configuration 'yes'
- name: Inspect download
- run: tree ./${{ env.REPOTITLE_LOWERCASE }}
+ run: tree ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}
- - name: Run the downloaded pipeline
+ - name: Inspect container images
+ run: tree ./singularity_container_images | tee ./container_initial
+
+ - name: Count the downloaded number of container images
+ id: count_initial
+ run: |
+ image_count=$(ls -1 ./singularity_container_images | wc -l | xargs)
+ echo "Initial container image count: $image_count"
+ echo "IMAGE_COUNT_INITIAL=$image_count" >> "$GITHUB_OUTPUT"
+
+ - name: Run the downloaded pipeline (stub)
+ id: stub_run_pipeline
+ continue-on-error: true
+ env:
+ NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
+ NXF_SINGULARITY_HOME_MOUNT: true
+ run: nextflow run ./${{needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
+ - name: Run the downloaded pipeline (stub run not supported)
+ id: run_pipeline
+ if: ${{ steps.stub_run_pipeline.outcome == 'failure' }}
env:
- NXF_SINGULARITY_CACHEDIR: ./
+ NXF_SINGULARITY_CACHEDIR: ./singularity_container_images
NXF_SINGULARITY_HOME_MOUNT: true
- run: nextflow run ./${{ env.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ env.REPO_BRANCH }}) -stub -profile test,singularity --outdir ./results
+ run: nextflow run ./${{ needs.configure.outputs.REPOTITLE_LOWERCASE }}/$( sed 's/\W/_/g' <<< ${{ needs.configure.outputs.REPO_BRANCH }}) -profile test,singularity --outdir ./results
+
+ - name: Count the downloaded number of container images
+ id: count_afterwards
+ run: |
+ image_count=$(ls -1 ./singularity_container_images | wc -l | xargs)
+ echo "Post-pipeline run container image count: $image_count"
+ echo "IMAGE_COUNT_AFTER=$image_count" >> "$GITHUB_OUTPUT"
+
+ - name: Compare container image counts
+ id: count_comparison
+ run: |
+ if [ "${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }}" -ne "${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }}" ]; then
+ initial_count=${{ steps.count_initial.outputs.IMAGE_COUNT_INITIAL }}
+ final_count=${{ steps.count_afterwards.outputs.IMAGE_COUNT_AFTER }}
+ difference=$((final_count - initial_count))
+ echo "$difference additional container images were \n downloaded at runtime . The pipeline has no support for offline runs!"
+ tree ./singularity_container_images > ./container_afterwards
+ diff ./container_initial ./container_afterwards
+ exit 1
+ else
+ echo "The pipeline can be downloaded successfully!"
+ fi
+
+ - name: Upload Nextflow logfile for debugging purposes
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
+ with:
+ name: nextflow_logfile.txt
+ path: .nextflow.log*
+ include-hidden-files: true
diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix_linting.yml
similarity index 80%
rename from .github/workflows/fix-linting.yml
rename to .github/workflows/fix_linting.yml
index 85d4dae..ab714bd 100644
--- a/.github/workflows/fix-linting.yml
+++ b/.github/workflows/fix_linting.yml
@@ -13,13 +13,13 @@ jobs:
runs-on: ubuntu-latest
steps:
# Use the @nf-core-bot token to check out so we can push later
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
with:
token: ${{ secrets.nf_core_bot_auth_token }}
# indication that the linting is being fixed
- name: React on comment
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5
with:
comment-id: ${{ github.event.comment.id }}
reactions: eyes
@@ -32,9 +32,9 @@ jobs:
GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }}
# Install and run pre-commit
- - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6
with:
- python-version: 3.11
+ python-version: "3.14"
- name: Install pre-commit
run: pip install pre-commit
@@ -47,7 +47,7 @@ jobs:
# indication that the linting has finished
- name: react if linting finished succesfully
if: steps.pre-commit.outcome == 'success'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5
with:
comment-id: ${{ github.event.comment.id }}
reactions: "+1"
@@ -67,21 +67,21 @@ jobs:
- name: react if linting errors were fixed
id: react-if-fixed
if: steps.commit-and-push.outcome == 'success'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5
with:
comment-id: ${{ github.event.comment.id }}
reactions: hooray
- name: react if linting errors were not fixed
if: steps.commit-and-push.outcome == 'failure'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5
with:
comment-id: ${{ github.event.comment.id }}
reactions: confused
- name: react if linting errors were not fixed
if: steps.commit-and-push.outcome == 'failure'
- uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
+ uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v5
with:
issue-number: ${{ github.event.issue.number }}
body: |
diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml
index ecabc61..30e6602 100644
--- a/.github/workflows/linting.yml
+++ b/.github/workflows/linting.yml
@@ -1,11 +1,8 @@
name: nf-core linting
# This workflow is triggered on pushes and PRs to the repository.
-# It runs the `nf-core lint` and markdown lint tests to ensure
+# It runs the `nf-core pipelines lint` and markdown lint tests to ensure
# that the code meets the nf-core guidelines.
on:
- push:
- branches:
- - dev
pull_request:
release:
types: [published]
@@ -14,45 +11,59 @@ jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- - name: Set up Python 3.11
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ - name: Set up Python 3.14
+ uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6
with:
- python-version: 3.11
- cache: "pip"
+ python-version: "3.14"
- name: Install pre-commit
run: pip install pre-commit
- name: Run pre-commit
- run: pre-commit run
+ run: pre-commit run --all-files
nf-core:
runs-on: ubuntu-latest
steps:
- name: Check out pipeline code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4
+ uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- name: Install Nextflow
uses: nf-core/setup-nextflow@v2
- - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
+ - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6
with:
- python-version: "3.11"
+ python-version: "3.14"
architecture: "x64"
+ - name: read .nf-core.yml
+ uses: pietrobolcato/action-read-yaml@9f13718d61111b69f30ab4ac683e67a56d254e1d # 1.1.0
+ id: read_yml
+ with:
+ config: ${{ github.workspace }}/.nf-core.yml
+
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- pip install nf-core
+ pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }}
+
+ - name: Run nf-core pipelines lint
+ if: ${{ github.base_ref != 'master' }}
+ env:
+ GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }}
+ run: nf-core -l lint_log.txt pipelines lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md
- - name: Run nf-core lint
+ - name: Run nf-core pipelines lint --release
+ if: ${{ github.base_ref == 'master' }}
env:
GITHUB_COMMENTS_URL: ${{ github.event.pull_request.comments_url }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_PR_COMMIT: ${{ github.event.pull_request.head.sha }}
- run: nf-core -l lint_log.txt lint --dir ${GITHUB_WORKSPACE} --markdown lint_results.md
+ run: nf-core -l lint_log.txt pipelines lint --release --dir ${GITHUB_WORKSPACE} --markdown lint_results.md
- name: Save PR number
if: ${{ always() }}
@@ -60,7 +71,7 @@ jobs:
- name: Upload linting log file artifact
if: ${{ always() }}
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4
+ uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: linting-logs
path: |
diff --git a/.github/workflows/linting_comment.yml b/.github/workflows/linting_comment.yml
index 0bed96d..e6e9bc2 100644
--- a/.github/workflows/linting_comment.yml
+++ b/.github/workflows/linting_comment.yml
@@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Download lint results
- uses: dawidd6/action-download-artifact@80620a5d27ce0ae443b965134db88467fc607b43 # v7
+ uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11
with:
workflow: linting.yml
workflow_conclusion: completed
@@ -21,7 +21,7 @@ jobs:
run: echo "pr_number=$(cat linting-logs/PR_number.txt)" >> $GITHUB_OUTPUT
- name: Post PR comment
- uses: marocchino/sticky-pull-request-comment@331f8f5b4215f0445d3c07b4967662a32a2d3e31 # v2
+ uses: marocchino/sticky-pull-request-comment@773744901bac0e8cbb5a0dc842800d45e9b2b405 # v2
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
number: ${{ steps.pr_number.outputs.pr_number }}
diff --git a/.github/workflows/nf-test.yml b/.github/workflows/nf-test.yml
new file mode 100644
index 0000000..fc21bf0
--- /dev/null
+++ b/.github/workflows/nf-test.yml
@@ -0,0 +1,141 @@
+name: Run nf-test
+on:
+ pull_request:
+ paths-ignore:
+ - "docs/**"
+ - "**/meta.yml"
+ - "**/*.md"
+ - "**/*.png"
+ - "**/*.svg"
+ release:
+ types: [published]
+ workflow_dispatch:
+
+# Cancel if a newer run is started
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ NFT_VER: "0.9.3"
+ NFT_WORKDIR: "~"
+ NXF_ANSI_LOG: false
+ NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity
+ NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity
+
+jobs:
+ nf-test-changes:
+ name: nf-test-changes
+ runs-on: # use GitHub runners
+ - "ubuntu-latest"
+ outputs:
+ shard: ${{ steps.set-shards.outputs.shard }}
+ total_shards: ${{ steps.set-shards.outputs.total_shards }}
+ steps:
+ - name: Clean Workspace # Purge the workspace in case it's running on a self-hosted runner
+ run: |
+ ls -la ./
+ rm -rf ./* || true
+ rm -rf ./.??* || true
+ ls -la ./
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
+ with:
+ fetch-depth: 0
+
+ - name: get number of shards
+ id: set-shards
+ uses: ./.github/actions/get-shards
+ env:
+ NFT_VER: ${{ env.NFT_VER }}
+ with:
+ max_shards: 7
+
+ - name: debug
+ run: |
+ echo ${{ steps.set-shards.outputs.shard }}
+ echo ${{ steps.set-shards.outputs.total_shards }}
+
+ nf-test:
+ name: "${{ matrix.profile }} | ${{ matrix.NXF_VER }} | ${{ matrix.shard }}/${{ needs.nf-test-changes.outputs.total_shards }}"
+ needs: [nf-test-changes]
+ if: ${{ needs.nf-test-changes.outputs.total_shards != '0' }}
+ runs-on: # use GitHub runners
+ - "ubuntu-latest"
+ strategy:
+ fail-fast: false
+ matrix:
+ shard: ${{ fromJson(needs.nf-test-changes.outputs.shard) }}
+ profile: [conda, docker, singularity]
+ isMain:
+ - ${{ github.base_ref == 'master' || github.base_ref == 'main' }}
+ # Exclude conda and singularity on dev
+ exclude:
+ - isMain: false
+ profile: "conda"
+ - isMain: false
+ profile: "singularity"
+ NXF_VER:
+ - "25.10.0"
+ - "latest-everything"
+ env:
+ NXF_ANSI_LOG: false
+ TOTAL_SHARDS: ${{ needs.nf-test-changes.outputs.total_shards }}
+
+ steps:
+ - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
+ with:
+ fetch-depth: 0
+
+ - name: Run nf-test
+ id: run_nf_test
+ uses: ./.github/actions/nf-test
+ continue-on-error: ${{ matrix.NXF_VER == 'latest-everything' }}
+ env:
+ NFT_WORKDIR: ${{ env.NFT_WORKDIR }}
+ NXF_VERSION: ${{ matrix.NXF_VER }}
+ with:
+ profile: ${{ matrix.profile }}
+ shard: ${{ matrix.shard }}
+ total_shards: ${{ env.TOTAL_SHARDS }}
+
+ - name: Report test status
+ if: ${{ always() }}
+ run: |
+ if [[ "${{ steps.run_nf_test.outcome }}" == "failure" ]]; then
+ echo "::error::Test with ${{ matrix.NXF_VER }} failed"
+ # Add to workflow summary
+ echo "## ❌ Test failed: ${{ matrix.profile }} | ${{ matrix.NXF_VER }} | Shard ${{ matrix.shard }}/${{ env.TOTAL_SHARDS }}" >> $GITHUB_STEP_SUMMARY
+ if [[ "${{ matrix.NXF_VER }}" == "latest-everything" ]]; then
+ echo "::warning::Test with latest-everything failed but will not cause workflow failure. Please check if the error is expected or if it needs fixing."
+ fi
+ if [[ "${{ matrix.NXF_VER }}" != "latest-everything" ]]; then
+ exit 1
+ fi
+ fi
+
+ confirm-pass:
+ needs: [nf-test]
+ if: always()
+ runs-on: # use GitHub runners
+ - "ubuntu-latest"
+ steps:
+ - name: One or more tests failed (excluding latest-everything)
+ if: ${{ contains(needs.*.result, 'failure') }}
+ run: exit 1
+
+ - name: One or more tests cancelled
+ if: ${{ contains(needs.*.result, 'cancelled') }}
+ run: exit 1
+
+ - name: All tests ok
+ if: ${{ contains(needs.*.result, 'success') }}
+ run: exit 0
+
+ - name: debug-print
+ if: always()
+ run: |
+ echo "::group::DEBUG: `needs` Contents"
+ echo "DEBUG: toJSON(needs) = ${{ toJSON(needs) }}"
+ echo "DEBUG: toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}"
+ echo "::endgroup::"
diff --git a/.github/workflows/release-announcements.yml b/.github/workflows/release-announcements.yml
deleted file mode 100644
index d468aea..0000000
--- a/.github/workflows/release-announcements.yml
+++ /dev/null
@@ -1,75 +0,0 @@
-name: release-announcements
-# Automatic release toot and tweet anouncements
-on:
- release:
- types: [published]
- workflow_dispatch:
-
-jobs:
- toot:
- runs-on: ubuntu-latest
- steps:
- - name: get topics and convert to hashtags
- id: get_topics
- run: |
- curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ' >> $GITHUB_OUTPUT
-
- - uses: rzr/fediverse-action@master
- with:
- access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }}
- host: "mstdn.science" # custom host if not "mastodon.social" (default)
- # GitHub event payload
- # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release
- message: |
- Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
-
- Please see the changelog: ${{ github.event.release.html_url }}
-
- ${{ steps.get_topics.outputs.GITHUB_OUTPUT }} #nfcore #openscience #nextflow #bioinformatics
-
- send-tweet:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5
- with:
- python-version: "3.10"
- - name: Install dependencies
- run: pip install tweepy==4.14.0
- - name: Send tweet
- shell: python
- run: |
- import os
- import tweepy
-
- client = tweepy.Client(
- access_token=os.getenv("TWITTER_ACCESS_TOKEN"),
- access_token_secret=os.getenv("TWITTER_ACCESS_TOKEN_SECRET"),
- consumer_key=os.getenv("TWITTER_CONSUMER_KEY"),
- consumer_secret=os.getenv("TWITTER_CONSUMER_SECRET"),
- )
- tweet = os.getenv("TWEET")
- client.create_tweet(text=tweet)
- env:
- TWEET: |
- Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
-
- Please see the changelog: ${{ github.event.release.html_url }}
- TWITTER_CONSUMER_KEY: ${{ secrets.TWITTER_CONSUMER_KEY }}
- TWITTER_CONSUMER_SECRET: ${{ secrets.TWITTER_CONSUMER_SECRET }}
- TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }}
- TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
-
- bsky-post:
- runs-on: ubuntu-latest
- steps:
- - uses: zentered/bluesky-post-action@80dbe0a7697de18c15ad22f4619919ceb5ccf597 # v0.1.0
- with:
- post: |
- Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}!
-
- Please see the changelog: ${{ github.event.release.html_url }}
- env:
- BSKY_IDENTIFIER: ${{ secrets.BSKY_IDENTIFIER }}
- BSKY_PASSWORD: ${{ secrets.BSKY_PASSWORD }}
- #
diff --git a/.github/workflows/template-version-comment.yml b/.github/workflows/template-version-comment.yml
new file mode 100644
index 0000000..c5988af
--- /dev/null
+++ b/.github/workflows/template-version-comment.yml
@@ -0,0 +1,46 @@
+name: nf-core template version comment
+# This workflow is triggered on PRs to check if the pipeline template version matches the latest nf-core version.
+# It posts a comment to the PR, even if it comes from a fork.
+
+on: pull_request_target
+
+jobs:
+ template_version:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Check out pipeline code
+ uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - name: Read template version from .nf-core.yml
+ uses: nichmor/minimal-read-yaml@1f7205277e25e156e1f63815781db80a6d490b8f # v0.0.2
+ id: read_yml
+ with:
+ config: ${{ github.workspace }}/.nf-core.yml
+
+ - name: Install nf-core
+ run: |
+ python -m pip install --upgrade pip
+ pip install nf-core==${{ steps.read_yml.outputs['nf_core_version'] }}
+
+ - name: Check nf-core outdated
+ id: nf_core_outdated
+ run: echo "OUTPUT=$(pip list --outdated | grep nf-core)" >> ${GITHUB_ENV}
+
+ - name: Post nf-core template version comment
+ uses: mshick/add-pr-comment@b8f338c590a895d50bcbfa6c5859251edc8952fc # v2
+ if: |
+ contains(env.OUTPUT, 'nf-core')
+ with:
+ repo-token: ${{ secrets.NF_CORE_BOT_AUTH_TOKEN }}
+ allow-repeats: false
+ message: |
+ > [!WARNING]
+ > Newer version of the nf-core template is available.
+ >
+ > Your pipeline is using an old version of the nf-core template: ${{ steps.read_yml.outputs['nf_core_version'] }}.
+ > Please update your pipeline to the latest version.
+ >
+ > For more documentation on how to update your pipeline, please see the [nf-core documentation](https://github.com/nf-core/tools?tab=readme-ov-file#sync-a-pipeline-with-the-template) and [Synchronisation documentation](https://nf-co.re/docs/contributing/sync).
+ #
diff --git a/.gitignore b/.gitignore
index e492601..ca0d4f6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,6 @@ results/
testing/
testing*
*.pyc
+null/
.env
.nf-test*
-null
diff --git a/.nf-core.yml b/.nf-core.yml
index 1ffd9c7..f27f7cc 100644
--- a/.nf-core.yml
+++ b/.nf-core.yml
@@ -1,26 +1,39 @@
+repository_type: pipeline
+
+nf_core_version: 3.4.1
+
lint:
- files_exist:
- - CODE_OF_CONDUCT.md
- - assets/nf-core-sampletracking_logo_light.png
- - docs/images/nf-core-sampletracking_logo_light.png
- - docs/images/nf-core-sampletracking_logo_dark.png
- - .github/ISSUE_TEMPLATE/config.yml
- - .github/workflows/awstest.yml
- - .github/workflows/awsfulltest.yml
files_unchanged:
- CODE_OF_CONDUCT.md
- assets/nf-core-sampletracking_logo_light.png
- docs/images/nf-core-sampletracking_logo_light.png
- docs/images/nf-core-sampletracking_logo_dark.png
- - .github/workflows/linting.yml
- - .gitignore
- multiqc_config:
- - report_comment
+ - .github/ISSUE_TEMPLATE/bug_report.yml
+ - .github/CONTRIBUTING.md
+ - .github/PULL_REQUEST_TEMPLATE.md
+ - assets/email_template.txt
+ - docs/README.md
nextflow_config:
- manifest.name
- manifest.homePage
-repository_type: pipeline
+ multiqc_config:
+ - report_comment
+ files_exist:
+ - CODE_OF_CONDUCT.md
+ - assets/nf-core-sampletracking_logo_light.png
+ - docs/images/nf-core-sampletracking_logo_light.png
+ - docs/images/nf-core-sampletracking_logo_dark.png
+ - .github/ISSUE_TEMPLATE/config.yml
+ - .github/workflows/awstest.yml
+ - .github/workflows/awsfulltest.yml
+ nf_test_content: false
+
template:
- prefix: nf-cmgg
- skip:
- - igenomes
+ org: nf-cmgg
+ name: sampletracking
+ description: CMGG Sampletracking workflow
+ author: Matthias De Smet
+ version: 1.0.1dev
+ force: true
+ outdir: .
+ is_nfcore: false
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index af57081..d06777a 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -3,8 +3,25 @@ repos:
rev: "v3.1.0"
hooks:
- id: prettier
- - repo: https://github.com/editorconfig-checker/editorconfig-checker.python
- rev: "2.7.3"
+ additional_dependencies:
+ - prettier@3.6.2
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v6.0.0
hooks:
- - id: editorconfig-checker
- alias: ec
+ - id: trailing-whitespace
+ args: [--markdown-linebreak-ext=md]
+ exclude: |
+ (?x)^(
+ .*ro-crate-metadata.json$|
+ modules/nf-core/.*|
+ subworkflows/nf-core/.*|
+ .*\.snap$
+ )$
+ - id: end-of-file-fixer
+ exclude: |
+ (?x)^(
+ .*ro-crate-metadata.json$|
+ modules/nf-core/.*|
+ subworkflows/nf-core/.*|
+ .*\.snap$
+ )$
diff --git a/.prettierignore b/.prettierignore
index 437d763..2255e3e 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -10,3 +10,5 @@ testing/
testing*
*.pyc
bin/
+.nf-test/
+ro-crate-metadata.json
diff --git a/.prettierrc.yml b/.prettierrc.yml
index c81f9a7..07dbd8b 100644
--- a/.prettierrc.yml
+++ b/.prettierrc.yml
@@ -1 +1,6 @@
printWidth: 120
+tabWidth: 4
+overrides:
+ - files: "*.{md,yml,yaml,html,css,scss,js,cff}"
+ options:
+ tabWidth: 2
diff --git a/README.md b/README.md
index cf7d6da..b4a44ea 100644
--- a/README.md
+++ b/README.md
@@ -1,14 +1,16 @@
#  
-[](https://github.com/nf-cmgg/sampletracking/actions/workflows/ci.yml)
+[](https://github.com/codespaces/new/nf-cmgg/sampletracking)
+[](https://github.com/nf-cmgg/sampletracking/actions/workflows/nf-test.yml)
[](https://github.com/nf-cmgg/sampletracking/actions/workflows/linting.yml)[](https://doi.org/10.5281/zenodo.XXXXXXX)
[](https://www.nf-test.com)
-[](https://www.nextflow.io/)
+[](https://www.nextflow.io/)
+[](https://github.com/nf-core/tools/releases/tag/3.4.1)
[](https://docs.conda.io/en/latest/)
[](https://www.docker.com/)
[](https://sylabs.io/docs/)
-[](https://tower.nf/launch?pipeline=https://github.com/nf-cmgg/sampletracking)
+[](https://cloud.seqera.io/launch?pipeline=https://github.com/nf-cmgg/sampletracking)
## Introduction
diff --git a/assets/adaptivecard.json b/assets/adaptivecard.json
index 0b78f95..89d6af5 100644
--- a/assets/adaptivecard.json
+++ b/assets/adaptivecard.json
@@ -54,7 +54,8 @@
"body": [
{
"type": "FactSet",
- "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"}.join(",\n") %>
+ "facts": [<% out << summary.collect{ k,v -> "{\"title\": \"$k\", \"value\" : \"$v\"}"
+ }.join(",\n") %>
]
}
]
diff --git a/assets/multiqc_config.yml b/assets/multiqc_config.yml
index e315a35..e4351cc 100644
--- a/assets/multiqc_config.yml
+++ b/assets/multiqc_config.yml
@@ -1,6 +1,5 @@
report_comment: >
- This report has been generated by the nf-cmgg/sampletracking analysis pipeline.
+ This report has been generated by the nf-cmgg/sampletracking analysis pipeline.
report_section_order:
"nf-cmgg-sampletracking-methods-description":
order: -1000
@@ -16,11 +15,11 @@ disable_version_detection: true
table_cond_formatting_rules:
match_certainty:
pass:
- - s_eq: 100
+ - s_eq: 100
warn:
- - lt: 99
+ - lt: 99
fail:
- - lt: 50
+ - lt: 50
table_columns_name:
sex_prediction:
@@ -38,9 +37,9 @@ table_columns_name:
custom_plot_config:
sex_prediction:
defaultsort:
- - column: "Certainty of sex match"
- direction: asc
+ - column: "Certainty of sex match"
+ direction: asc
picard-crosscheckfingerprints-sample-table:
defaultsort:
- - column: "Best match LOD"
- direction: asc
+ - column: "Best match LOD"
+ direction: asc
diff --git a/assets/samplesheet.csv b/assets/samplesheet.csv
index a17cbde..f5472ee 100644
--- a/assets/samplesheet.csv
+++ b/assets/samplesheet.csv
@@ -1,3 +1,3 @@
sample,pool,sex,sample_bam,sample_bam_index,snp_fastq_1,snp_fastq_2,snp_bam,snp_bam_index
-sample1,pool1,U,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample1.sort.bam,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample1.sort.bam.bai,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/snp/fastq/snp_sample1_R1.fastq.gz,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/snp/fastq/snp_sample1_R2.fastq.gz,,
-sample2,pool1,U,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/exome/cram/sample2.sorted.cram,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/exome/cram/sample2.sorted.cram.crai,,,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/snp/cram/snp_sample2.sorted.cram,https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/illumina/snp/cram/snp_sample2.sorted.cram.crai
+sample1,pool1,U,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample1.sort.bam,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample1.sort.bam.bai,,,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/snp/bam/snp_sample1.sorted.bam,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/snp/bam/snp_sample1.sorted.bam.bai
+sample2,pool1,U,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample2.sort.bam,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/exome/bam/sample2.sort.bam.bai,,,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/snp/bam/snp_sample2.sorted.bam,https://github.com/nf-cmgg/test-datasets/raw/refs/heads/sampletracking/data/genomics/homo_sapiens/illumina/snp/bam/snp_sample2.sorted.bam.bai
diff --git a/assets/slackreport.json b/assets/slackreport.json
index 7e12ad9..9350cca 100644
--- a/assets/slackreport.json
+++ b/assets/slackreport.json
@@ -22,10 +22,17 @@
},
{
"title": "Pipeline configuration",
- "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k}_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k}_: `${v}`" : (v.class.toString().contains('DateTime') ? ("_${k}_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k}_: ${v}") ) }.join(",\n") %>",
+ "value": "<% out << summary.collect{ k,v -> k == "hook_url" ? "_${k
+ }_: (_hidden_)" : ( ( v.class.toString().contains('Path') || ( v.class.toString().contains('String') && v.contains('/') ) ) ? "_${k
+ }_: `${v
+ }`" : (v.class.toString().contains('DateTime') ? ("_${k
+ }_: " + v.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM))) : "_${k
+ }_: ${v
+ }") ) }.join(",\n") %>",
"short": false
}
- <% }
+ <%
+ }
%>
],
"footer": "Completed at <% out << dateComplete.format(java.time.format.DateTimeFormatter.ofLocalizedDateTime(java.time.format.FormatStyle.MEDIUM)) %> (duration: ${duration})"
diff --git a/conf/base.config b/conf/base.config
index 3ecf7a1..13d529f 100644
--- a/conf/base.config
+++ b/conf/base.config
@@ -11,32 +11,31 @@
process {
cpus = { 1 * task.attempt }
- memory = { 8.GB * task.attempt }
+ memory = { 6.GB * task.attempt }
time = { 4.h * task.attempt }
- errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' }
+ errorStrategy = { task.exitStatus in ((130..145) + 104 + 175) ? 'retry' : 'finish' }
maxRetries = 1
maxErrors = '-1'
- // Process-specific resource requirements
withLabel:process_single {
cpus = { 1 }
- memory = { 8.GB * task.attempt }
+ memory = { 6.GB * task.attempt }
time = { 4.h * task.attempt }
}
withLabel:process_low {
cpus = { 2 * task.attempt }
- memory = { 16.GB * task.attempt }
+ memory = { 12.GB * task.attempt }
time = { 4.h * task.attempt }
}
withLabel:process_medium {
- cpus = { 4 * task.attempt }
- memory = { 32.GB * task.attempt }
+ cpus = { 6 * task.attempt }
+ memory = { 36.GB * task.attempt }
time = { 8.h * task.attempt }
}
withLabel:process_high {
- cpus = { 8 * task.attempt }
- memory = { 64.GB * task.attempt }
+ cpus = { 12 * task.attempt }
+ memory = { 72.GB * task.attempt }
time = { 16.h * task.attempt }
}
withLabel:process_long {
@@ -52,4 +51,8 @@ process {
errorStrategy = 'retry'
maxRetries = 2
}
+ withLabel: process_gpu {
+ ext.use_gpu = { workflow.profile.contains('gpu') }
+ accelerator = { workflow.profile.contains('gpu') ? 1 : null }
+ }
}
diff --git a/conf/empty_genomes.config b/conf/empty_genomes.config
deleted file mode 100644
index a90c75d..0000000
--- a/conf/empty_genomes.config
+++ /dev/null
@@ -1,3 +0,0 @@
-// DON'T REMOVE THIS FILE!
-
-params.genomes = [:]
diff --git a/conf/modules.config b/conf/modules.config
index 98dd94d..3227877 100644
--- a/conf/modules.config
+++ b/conf/modules.config
@@ -40,6 +40,7 @@ process {
"--CALCULATE_TUMOR_AWARE_RESULTS false",
"--LOD_THRESHOLD 5.0",
"--EXIT_CODE_WHEN_MISMATCH 0",
+ "--EXIT_CODE_WHEN_NO_VALID_CHECKS 0",
"--VALIDATION_STRINGENCY LENIENT",
"--ALLOW_DUPLICATE_READS true",
].join(" ").trim()}
diff --git a/conf/slurm.config b/conf/slurm.config
index b07aaa1..508002e 100644
--- a/conf/slurm.config
+++ b/conf/slurm.config
@@ -25,4 +25,3 @@ process {
report {
overwrite = true
}
-
diff --git a/conf/test.config b/conf/test.config
index 7471bcb..744489a 100644
--- a/conf/test.config
+++ b/conf/test.config
@@ -10,6 +10,30 @@
----------------------------------------------------------------------------------------
*/
+process {
+ resourceLimits = [
+ cpus: 4,
+ memory: '15.GB',
+ time: '1.h'
+ ]
+
+ // Picard CrosscheckFingerprints
+ withName: "PICARD_CROSSCHECKFINGERPRINTS" {
+ publishDir = [ enabled:false ]
+ ext.args = {[
+ "--CROSSCHECK_BY SAMPLE",
+ "--CROSSCHECK_MODE CHECK_ALL_OTHERS",
+ "--CALCULATE_TUMOR_AWARE_RESULTS false",
+ "--LOD_THRESHOLD 0.0",
+ "--EXIT_CODE_WHEN_MISMATCH 0",
+ "--EXIT_CODE_WHEN_NO_VALID_CHECKS 0",
+ "--VALIDATION_STRINGENCY LENIENT",
+ "--ALLOW_DUPLICATE_READS true",
+ ].join(" ").trim()}
+ }
+
+}
+
params {
config_profile_name = 'Test profile'
config_profile_description = 'Minimal test dataset to check pipeline function'
@@ -18,10 +42,11 @@ params {
bwa_index = "s3://test-data/genomics/homo_sapiens/genome/bwa/"
fasta = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/seq/GCA_000001405.15_GRCh38_full_plus_hs38d1_analysis_set_chr21.fna"
fai = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/seq/GCA_000001405.15_GRCh38_full_plus_hs38d1_analysis_set_chr21.fna.fai"
- haplotype_map = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/picard/haplotype_map.txt"
+ haplotype_map = "s3://test-data/sampletracking/haplotype_map.txt"
}
aws {
+ profile = "ugent"
client {
endpoint = "https://s3.ugent.be"
protocol = "https"
@@ -29,13 +54,3 @@ aws {
connectionTimeout = 60000
}
}
-
-process {
- // Limit resources so that this can run on GitHub Actions
- resourceLimits = [
- cpus : 2,
- memory: 6.GB,
- time : 6.h
- ]
-}
-
diff --git a/conf/test_full.config b/conf/test_full.config
index f782738..e24aa6c 100644
--- a/conf/test_full.config
+++ b/conf/test_full.config
@@ -17,7 +17,7 @@ params {
// Input data for full size test
// TODO nf-core: Specify the paths to your full test data ( on nf-core/test-datasets or directly in repositories, e.g. SRA)
// TODO nf-core: Give any required params for the test so that command line flags are not needed
- input = 'https://raw.githubusercontent.com/nf-core/test-datasets/viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv'
+ input = params.pipelines_testdata_base_path + 'viralrecon/samplesheet/samplesheet_full_illumina_amplicon.csv'
// Genome references
genome = 'R64-1-1'
diff --git a/main.nf b/main.nf
index 8dfa2ac..007f38b 100644
--- a/main.nf
+++ b/main.nf
@@ -13,15 +13,44 @@
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
-include { SAMPLETRACKING } from './workflows/sampletracking'
-include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_sampletracking_pipeline'
-include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_sampletracking_pipeline'
+include { SAMPLETRACKING } from './workflows/sampletracking'
+include { PIPELINE_INITIALISATION } from './subworkflows/local/utils_nfcore_sampletracking_pipeline'
+include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_sampletracking_pipeline'
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
NAMED WORKFLOWS FOR PIPELINE
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
+//
+// WORKFLOW: Run main analysis pipeline depending on type of input
+//
+workflow NFCMGG_SAMPLETRACKING {
+
+ take:
+ samplesheet // channel: samplesheet read in from --input
+ bwa_index // channel: bwa index file
+ genome_fasta // channel: genome fasta file
+ haplotype_map // channel: haplotype map file
+ outdir // channel: output directory
+
+ main:
+
+ //
+ // WORKFLOW: Run pipeline
+ //
+ SAMPLETRACKING (
+ samplesheet,
+ bwa_index,
+ genome_fasta,
+ haplotype_map,
+ outdir
+ )
+ emit:
+ multiqc_report = SAMPLETRACKING.out.multiqc_report // channel: /path/to/multiqc_report.html
+}
+/*
+
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -32,22 +61,25 @@ include { PIPELINE_COMPLETION } from './subworkflows/local/utils_nfcore_sa
workflow {
main:
-
//
// SUBWORKFLOW: Run initialisation tasks
//
PIPELINE_INITIALISATION (
params.version,
params.validate_params,
+ params.monochrome_logs,
args,
params.outdir,
- params.input
+ params.input,
+ params.help,
+ params.help_full,
+ params.show_hidden
)
//
// WORKFLOW: Run main workflow
//
- SAMPLETRACKING (
+ NFCMGG_SAMPLETRACKING (
PIPELINE_INITIALISATION.out.samplesheet,
Channel.value([
[id: "bwa"],
@@ -62,12 +94,8 @@ workflow {
[[id:"haplotype_map"],
file(params.haplotype_map, checkIfExists: true)
]),
- params.outdir,
- params.multiqc_config,
- params.multiqc_logo,
- params.multiqc_methods_description
+ params.outdir
)
-
//
// SUBWORKFLOW: Run completion tasks
//
@@ -78,7 +106,7 @@ workflow {
params.outdir,
params.monochrome_logs,
params.hook_url,
- SAMPLETRACKING.out.multiqc_report
+ NFCMGG_SAMPLETRACKING.out.multiqc_report
)
}
diff --git a/modules.json b/modules.json
index 68b2dfe..0c1ce60 100644
--- a/modules.json
+++ b/modules.json
@@ -7,33 +7,36 @@
"nf-core": {
"bwa/index": {
"branch": "master",
- "git_sha": "2d20463181b1c38981a02e90d3084b5f9fa8d540",
- "installed_by": [
- "modules"
- ]
+ "git_sha": "1c46359c837ef768b004519f535c30378e8289fc",
+ "installed_by": ["modules"]
},
"bwa/mem": {
"branch": "master",
- "git_sha": "2d20463181b1c38981a02e90d3084b5f9fa8d540",
- "installed_by": [
- "modules"
- ]
+ "git_sha": "1c46359c837ef768b004519f535c30378e8289fc",
+ "installed_by": ["modules"]
},
"multiqc": {
"branch": "master",
- "git_sha": "f80914f78fb7fa1c00b14cfeb29575ee12240d9c",
- "installed_by": [
- "modules"
- ],
+ "git_sha": "af27af1be706e6a2bb8fe454175b0cdf77f47b49",
+ "installed_by": ["modules"],
"patch": "modules/nf-core/multiqc/multiqc.diff"
},
+ "ngsbits/samplegender": {
+ "branch": "master",
+ "git_sha": "ae8668d026462283f3bdc70ad2d6c78f8919fe68",
+ "installed_by": ["modules"],
+ "patch": "modules/nf-core/ngsbits/samplegender/ngsbits-samplegender.diff"
+ },
"picard/crosscheckfingerprints": {
"branch": "master",
- "git_sha": "49f4e50534fe4b64101e62ea41d5dc43b1324358",
- "installed_by": [
- "modules"
- ],
+ "git_sha": "df124e87c74d8b40285199f8cc20151f5aa57255",
+ "installed_by": ["modules"],
"patch": "modules/nf-core/picard/crosscheckfingerprints/picard-crosscheckfingerprints.diff"
+ },
+ "samtools/index": {
+ "branch": "master",
+ "git_sha": "c8be52dba1166c678e74cda9c3a3c221635c8bb1",
+ "installed_by": ["modules"]
}
}
},
@@ -41,27 +44,21 @@
"nf-core": {
"utils_nextflow_pipeline": {
"branch": "master",
- "git_sha": "c2b22d85f30a706a3073387f30380704fcae013b",
- "installed_by": [
- "subworkflows"
- ]
+ "git_sha": "05954dab2ff481bcb999f24455da29a5828af08d",
+ "installed_by": ["subworkflows"]
},
"utils_nfcore_pipeline": {
"branch": "master",
- "git_sha": "51ae5406a030d4da1e49e4dab49756844fdd6c7a",
- "installed_by": [
- "subworkflows"
- ]
+ "git_sha": "05954dab2ff481bcb999f24455da29a5828af08d",
+ "installed_by": ["subworkflows"]
},
"utils_nfschema_plugin": {
"branch": "master",
- "git_sha": "2fd2cd6d0e7b273747f32e465fdc6bcc3ae0814e",
- "installed_by": [
- "subworkflows"
- ]
+ "git_sha": "4b406a74dc0449c0401ed87d5bfff4252fd277fd",
+ "installed_by": ["subworkflows"]
}
}
}
}
}
-}
\ No newline at end of file
+}
diff --git a/modules/nf-core/bwa/index/environment.yml b/modules/nf-core/bwa/index/environment.yml
index d8789a2..54e6794 100644
--- a/modules/nf-core/bwa/index/environment.yml
+++ b/modules/nf-core/bwa/index/environment.yml
@@ -1,5 +1,13 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
+
dependencies:
- - bioconda::bwa=0.7.18
+ # renovate: datasource=conda depName=bioconda/bwa
+ - bioconda::bwa=0.7.19
+ # renovate: datasource=conda depName=bioconda/htslib
+ - bioconda::htslib=1.22.1
+ # renovate: datasource=conda depName=bioconda/samtools
+ - bioconda::samtools=1.22.1
diff --git a/modules/nf-core/bwa/index/main.nf b/modules/nf-core/bwa/index/main.nf
index 29d9957..1860ecf 100644
--- a/modules/nf-core/bwa/index/main.nf
+++ b/modules/nf-core/bwa/index/main.nf
@@ -1,11 +1,13 @@
process BWA_INDEX {
tag "$fasta"
- label 'process_single'
+ // NOTE requires 5.37N memory where N is the size of the database
+ // source: https://bio-bwa.sourceforge.net/bwa.shtml#8
+ memory { 6.B * fasta.size() }
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/bwa:0.7.18--he4a0461_0' :
- 'biocontainers/bwa:0.7.18--he4a0461_0' }"
+ 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/d7/d7e24dc1e4d93ca4d3a76a78d4c834a7be3985b0e1e56fddd61662e047863a8a/data' :
+ 'community.wave.seqera.io/library/bwa_htslib_samtools:83b50ff84ead50d0' }"
input:
tuple val(meta), path(fasta)
diff --git a/modules/nf-core/bwa/index/meta.yml b/modules/nf-core/bwa/index/meta.yml
index ce5cb8f..1781586 100644
--- a/modules/nf-core/bwa/index/meta.yml
+++ b/modules/nf-core/bwa/index/meta.yml
@@ -14,7 +14,7 @@ tools:
documentation: https://bio-bwa.sourceforge.net/bwa.shtml
arxiv: arXiv:1303.3997
licence: ["GPL-3.0-or-later"]
- identifier: ""
+ identifier: "biotools:bwa"
input:
- - meta:
type: map
@@ -24,25 +24,31 @@ input:
- fasta:
type: file
description: Input genome fasta file
+ ontologies:
+ - edam: "http://edamontology.org/data_2044" # Sequence
+ - edam: "http://edamontology.org/format_1929" # FASTA
output:
- - index:
- - meta:
+ index:
+ - - meta:
type: map
description: |
Groovy Map containing reference information.
e.g. [ id:'test', single_end:false ]
- pattern: "*.{amb,ann,bwt,pac,sa}"
- bwa:
type: map
description: |
Groovy Map containing reference information.
e.g. [ id:'test', single_end:false ]
pattern: "*.{amb,ann,bwt,pac,sa}"
- - versions:
- - versions.yml:
- type: file
- description: File containing software versions
- pattern: "versions.yml"
+ ontologies:
+ - edam: "http://edamontology.org/data_3210" # Genome index
+ versions:
+ - versions.yml:
+ type: file
+ description: File containing software versions
+ pattern: "versions.yml"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
authors:
- "@drpatelh"
- "@maxulysse"
diff --git a/modules/nf-core/bwa/index/tests/main.nf.test.snap b/modules/nf-core/bwa/index/tests/main.nf.test.snap
index 7c8f046..8fdb482 100644
--- a/modules/nf-core/bwa/index/tests/main.nf.test.snap
+++ b/modules/nf-core/bwa/index/tests/main.nf.test.snap
@@ -17,7 +17,7 @@
]
],
"1": [
- "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb"
+ "versions.yml:md5,9a94c646009e4e01912bde135de16400"
],
"index": [
[
@@ -34,14 +34,14 @@
]
],
"versions": [
- "versions.yml:md5,a64462ac7dfb21f4ade9b02e7f65c5bb"
+ "versions.yml:md5,9a94c646009e4e01912bde135de16400"
]
}
],
"meta": {
- "nf-test": "0.8.4",
- "nextflow": "23.10.1"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-05-16T11:40:09.925307"
+ "timestamp": "2025-09-23T11:05:03.657185748"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/bwa/index/tests/tags.yml b/modules/nf-core/bwa/index/tests/tags.yml
deleted file mode 100644
index 28bb483..0000000
--- a/modules/nf-core/bwa/index/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-bwa/index:
- - modules/nf-core/bwa/index/**
diff --git a/modules/nf-core/bwa/mem/environment.yml b/modules/nf-core/bwa/mem/environment.yml
index ef7b966..54e6794 100644
--- a/modules/nf-core/bwa/mem/environment.yml
+++ b/modules/nf-core/bwa/mem/environment.yml
@@ -1,8 +1,13 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
dependencies:
- - bwa=0.7.18
- - htslib=1.20.0
- - samtools=1.20
+ # renovate: datasource=conda depName=bioconda/bwa
+ - bioconda::bwa=0.7.19
+ # renovate: datasource=conda depName=bioconda/htslib
+ - bioconda::htslib=1.22.1
+ # renovate: datasource=conda depName=bioconda/samtools
+ - bioconda::samtools=1.22.1
diff --git a/modules/nf-core/bwa/mem/main.nf b/modules/nf-core/bwa/mem/main.nf
index d18cd93..6893b04 100644
--- a/modules/nf-core/bwa/mem/main.nf
+++ b/modules/nf-core/bwa/mem/main.nf
@@ -4,8 +4,8 @@ process BWA_MEM {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:1bd8542a8a0b42e0981337910954371d0230828e-0' :
- 'biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:1bd8542a8a0b42e0981337910954371d0230828e-0' }"
+ 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/d7/d7e24dc1e4d93ca4d3a76a78d4c834a7be3985b0e1e56fddd61662e047863a8a/data' :
+ 'community.wave.seqera.io/library/bwa_htslib_samtools:83b50ff84ead50d0' }"
input:
tuple val(meta) , path(reads)
diff --git a/modules/nf-core/bwa/mem/meta.yml b/modules/nf-core/bwa/mem/meta.yml
index 37467d2..e1265ab 100644
--- a/modules/nf-core/bwa/mem/meta.yml
+++ b/modules/nf-core/bwa/mem/meta.yml
@@ -17,7 +17,7 @@ tools:
documentation: https://bio-bwa.sourceforge.net/bwa.shtml
arxiv: arXiv:1303.3997
licence: ["GPL-3.0-or-later"]
- identifier: ""
+ identifier: "biotools:bwa"
input:
- - meta:
type: map
@@ -29,6 +29,9 @@ input:
description: |
List of input FastQ files of size 1 and 2 for single-end and paired-end data,
respectively.
+ ontologies:
+ - edam: "http://edamontology.org/data_2044" # Sequence
+ - edam: "http://edamontology.org/format_1930" # FASTQ
- - meta2:
type: map
description: |
@@ -38,6 +41,8 @@ input:
type: file
description: BWA genome index files
pattern: "Directory containing BWA index *.{amb,ann,bwt,pac,sa}"
+ ontologies:
+ - edam: "http://edamontology.org/data_3210" # Genome index
- - meta3:
type: map
description: |
@@ -47,52 +52,60 @@ input:
type: file
description: Reference genome in FASTA format
pattern: "*.{fasta,fa}"
- - - sort_bam:
- type: boolean
- description: use samtools sort (true) or samtools view (false)
- pattern: "true or false"
+ ontologies:
+ - edam: "http://edamontology.org/data_2044" # Sequence
+ - edam: "http://edamontology.org/format_1929" # FASTA
+ - sort_bam:
+ type: boolean
+ description: use samtools sort (true) or samtools view (false)
+ pattern: "true or false"
output:
- - bam:
- - meta:
- type: file
- description: Output BAM file containing read alignments
- pattern: "*.{bam}"
+ bam:
+ - - meta:
+ type: map
+ description: Groovy Map containing sample information
- "*.bam":
type: file
description: Output BAM file containing read alignments
pattern: "*.{bam}"
- - cram:
- - meta:
+ ontologies:
+ - edam: "http://edamontology.org/format_2572" # BAM
+ cram:
+ - - meta:
type: file
- description: Output CRAM file containing read alignments
- pattern: "*.{cram}"
+ description: Output BAM file containing read alignments
+ ontologies: []
- "*.cram":
type: file
description: Output CRAM file containing read alignments
pattern: "*.{cram}"
- - csi:
- - meta:
- type: file
- description: Optional index file for BAM file
- pattern: "*.{csi}"
+ ontologies:
+ - edam: "http://edamontology.org/format_3462" # CRAM
+ csi:
+ - - meta:
+ type: map
+ description: Groovy Map containing sample information
- "*.csi":
type: file
description: Optional index file for BAM file
pattern: "*.{csi}"
- - crai:
- - meta:
- type: file
- description: Optional index file for CRAM file
- pattern: "*.{crai}"
+ ontologies: []
+ crai:
+ - - meta:
+ type: map
+ description: Groovy Map containing sample information
- "*.crai":
type: file
description: Optional index file for CRAM file
pattern: "*.{crai}"
- - versions:
- - versions.yml:
- type: file
- description: File containing software versions
- pattern: "versions.yml"
+ ontologies: []
+ versions:
+ - versions.yml:
+ type: file
+ description: File containing software versions
+ pattern: "versions.yml"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
authors:
- "@drpatelh"
- "@jeremy1805"
diff --git a/modules/nf-core/bwa/mem/tests/main.nf.test.snap b/modules/nf-core/bwa/mem/tests/main.nf.test.snap
index 2079ea2..51496a3 100644
--- a/modules/nf-core/bwa/mem/tests/main.nf.test.snap
+++ b/modules/nf-core/bwa/mem/tests/main.nf.test.snap
@@ -11,16 +11,16 @@
],
[
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
- "b6d9cb250261a4c125413c5d867d87a7",
+ "37b4ee1649480bd1ff98666447f64fa5",
"798439cbd7fd81cbcc5078022dc5479d"
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:22:28.051598"
+ "timestamp": "2025-09-23T11:05:11.396076472"
},
"Single-End Sort": {
"content": [
@@ -34,16 +34,16 @@
],
[
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
- "848434ae4b79cfdcb2281c60b33663ce",
+ "57106634fcaf3bf503d5487a7717c5d3",
"94fcf617f5b994584c4e8d4044e16b4f"
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:22:39.671154"
+ "timestamp": "2025-09-23T11:05:19.529514701"
},
"Paired-End": {
"content": [
@@ -57,16 +57,16 @@
],
[
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
- "5b34d31be84478761f789e3e2e805e31",
+ "57770ff7c7186ed40c42f3d71c16ce3c",
"57aeef88ed701a8ebc8e2f0a381b2a6"
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:22:51.919479"
+ "timestamp": "2025-09-23T11:05:27.433790935"
},
"Paired-End Sort": {
"content": [
@@ -80,16 +80,16 @@
],
[
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
- "69003376d9a8952622d8587b39c3eaae",
+ "8f5d8f83b485dcfa1f47a73ae645e3a7",
"af8628d9df18b2d3d4f6fd47ef2bb872"
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:23:00.833562"
+ "timestamp": "2025-09-23T11:05:35.775774487"
},
"Single-end - stub": {
"content": [
@@ -125,7 +125,7 @@
]
],
"4": [
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
"bam": [
[
@@ -158,15 +158,15 @@
]
],
"versions": [
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:31:29.46282"
+ "timestamp": "2025-09-23T11:05:51.638917351"
},
"Paired-End - no fasta": {
"content": [
@@ -180,16 +180,16 @@
],
[
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
- "5b34d31be84478761f789e3e2e805e31",
+ "57770ff7c7186ed40c42f3d71c16ce3c",
"57aeef88ed701a8ebc8e2f0a381b2a6"
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:23:09.942545"
+ "timestamp": "2025-09-23T11:05:43.764589371"
},
"Paired-end - stub": {
"content": [
@@ -225,7 +225,7 @@
]
],
"4": [
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
],
"bam": [
[
@@ -258,14 +258,14 @@
]
],
"versions": [
- "versions.yml:md5,478b816fbd37871f5e8c617833d51d80"
+ "versions.yml:md5,f882069f942ae2a95e2c91f82e4445c6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-08-02T12:31:37.757037"
+ "timestamp": "2025-09-23T11:05:59.642014144"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/bwa/mem/tests/tags.yml b/modules/nf-core/bwa/mem/tests/tags.yml
deleted file mode 100644
index 82992d1..0000000
--- a/modules/nf-core/bwa/mem/tests/tags.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-bwa/mem:
- - modules/nf-core/bwa/index/**
- - modules/nf-core/bwa/mem/**
diff --git a/modules/nf-core/multiqc/environment.yml b/modules/nf-core/multiqc/environment.yml
index 439a7c2..d02016a 100644
--- a/modules/nf-core/multiqc/environment.yml
+++ b/modules/nf-core/multiqc/environment.yml
@@ -1,5 +1,7 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
dependencies:
- - bioconda::multiqc=1.26
+ - bioconda::multiqc=1.32
diff --git a/modules/nf-core/multiqc/main.nf b/modules/nf-core/multiqc/main.nf
index b8dc40a..d3d9307 100644
--- a/modules/nf-core/multiqc/main.nf
+++ b/modules/nf-core/multiqc/main.nf
@@ -3,11 +3,11 @@ process MULTIQC {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/multiqc:1.26--pyhdfd78af_0' :
- 'biocontainers/multiqc:1.26--pyhdfd78af_0' }"
+ 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/8c/8c6c120d559d7ee04c7442b61ad7cf5a9e8970be5feefb37d68eeaa60c1034eb/data' :
+ 'community.wave.seqera.io/library/multiqc:1.32--d58f60e4deb769bf' }"
input:
- tuple val(meta), path(multiqc_files)
+ tuple val(meta), path(multiqc_files, stageAs: "?/*")
path(multiqc_config)
path(extra_multiqc_config)
path(multiqc_logo)
@@ -25,7 +25,10 @@ process MULTIQC {
script:
def args = task.ext.args ?: ''
- def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : ''
+ def prefix = ''
+ if(args.contains("--title")) {
+ prefix = args.split("--title ")[-1].split(" ")[0] + "_"
+ }
def config = multiqc_config ? "--config $multiqc_config" : ''
def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : ''
def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : ''
diff --git a/modules/nf-core/multiqc/meta.yml b/modules/nf-core/multiqc/meta.yml
index b16c187..ce30eb7 100644
--- a/modules/nf-core/multiqc/meta.yml
+++ b/modules/nf-core/multiqc/meta.yml
@@ -15,57 +15,71 @@ tools:
licence: ["GPL-3.0-or-later"]
identifier: biotools:multiqc
input:
- - - multiqc_files:
- type: file
- description: |
- List of reports / files recognised by MultiQC, for example the html and zip output of FastQC
- - - multiqc_config:
- type: file
- description: Optional config yml for MultiQC
- pattern: "*.{yml,yaml}"
- - - extra_multiqc_config:
- type: file
- description: Second optional config yml for MultiQC. Will override common sections
- in multiqc_config.
- pattern: "*.{yml,yaml}"
- - - multiqc_logo:
+ - multiqc_files:
+ type: file
+ description: |
+ List of reports / files recognised by MultiQC, for example the html and zip output of FastQC
+ ontologies: []
+ - multiqc_config:
+ type: file
+ description: Optional config yml for MultiQC
+ pattern: "*.{yml,yaml}"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
+ - extra_multiqc_config:
+ type: file
+ description: Second optional config yml for MultiQC. Will override common sections
+ in multiqc_config.
+ pattern: "*.{yml,yaml}"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
+ - multiqc_logo:
+ type: file
+ description: Optional logo file for MultiQC
+ pattern: "*.{png}"
+ ontologies: []
+ - replace_names:
+ type: file
+ description: |
+ Optional two-column sample renaming file. First column a set of
+ patterns, second column a set of corresponding replacements. Passed via
+ MultiQC's `--replace-names` option.
+ pattern: "*.{tsv}"
+ ontologies:
+ - edam: http://edamontology.org/format_3475 # TSV
+ - sample_names:
+ type: file
+ description: |
+ Optional TSV file with headers, passed to the MultiQC --sample_names
+ argument.
+ pattern: "*.{tsv}"
+ ontologies:
+ - edam: http://edamontology.org/format_3475 # TSV
+output:
+ report:
+ - "*multiqc_report.html":
type: file
- description: Optional logo file for MultiQC
- pattern: "*.{png}"
- - - replace_names:
+ description: MultiQC report file
+ pattern: "multiqc_report.html"
+ ontologies: []
+ data:
+ - "*_data":
+ type: directory
+ description: MultiQC data dir
+ pattern: "multiqc_data"
+ plots:
+ - "*_plots":
type: file
- description: |
- Optional two-column sample renaming file. First column a set of
- patterns, second column a set of corresponding replacements. Passed via
- MultiQC's `--replace-names` option.
- pattern: "*.{tsv}"
- - - sample_names:
+ description: Plots created by MultiQC
+ pattern: "*_data"
+ ontologies: []
+ versions:
+ - versions.yml:
type: file
- description: |
- Optional TSV file with headers, passed to the MultiQC --sample_names
- argument.
- pattern: "*.{tsv}"
-output:
- - report:
- - "*multiqc_report.html":
- type: file
- description: MultiQC report file
- pattern: "multiqc_report.html"
- - data:
- - "*_data":
- type: directory
- description: MultiQC data dir
- pattern: "multiqc_data"
- - plots:
- - "*_plots":
- type: file
- description: Plots created by MultiQC
- pattern: "*_data"
- - versions:
- - versions.yml:
- type: file
- description: File containing software versions
- pattern: "versions.yml"
+ description: File containing software versions
+ pattern: "versions.yml"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
authors:
- "@abhi18av"
- "@bunop"
diff --git a/modules/nf-core/multiqc/multiqc.diff b/modules/nf-core/multiqc/multiqc.diff
index 99ee16a..ed42434 100644
--- a/modules/nf-core/multiqc/multiqc.diff
+++ b/modules/nf-core/multiqc/multiqc.diff
@@ -3,15 +3,27 @@ Changes in 'multiqc/main.nf':
--- modules/nf-core/multiqc/main.nf
+++ modules/nf-core/multiqc/main.nf
@@ -7,7 +7,7 @@
- 'biocontainers/multiqc:1.26--pyhdfd78af_0' }"
+ 'community.wave.seqera.io/library/multiqc:1.32--d58f60e4deb769bf' }"
input:
- path multiqc_files, stageAs: "?/*"
-+ tuple val(meta), path(multiqc_files)
++ tuple val(meta), path(multiqc_files, stageAs: "?/*")
path(multiqc_config)
path(extra_multiqc_config)
path(multiqc_logo)
-@@ -50,10 +50,15 @@
+@@ -25,7 +25,10 @@
+
+ script:
+ def args = task.ext.args ?: ''
+- def prefix = task.ext.prefix ? "--filename ${task.ext.prefix}.html" : ''
++ def prefix = ''
++ if(args.contains("--title")) {
++ prefix = args.split("--title ")[-1].split(" ")[0] + "_"
++ }
+ def config = multiqc_config ? "--config $multiqc_config" : ''
+ def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : ''
+ def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : ''
+@@ -50,10 +53,15 @@
"""
stub:
@@ -33,7 +45,6 @@ Changes in 'multiqc/main.nf':
'modules/nf-core/multiqc/environment.yml' is unchanged
'modules/nf-core/multiqc/meta.yml' is unchanged
-'modules/nf-core/multiqc/tests/tags.yml' is unchanged
'modules/nf-core/multiqc/tests/main.nf.test' is unchanged
'modules/nf-core/multiqc/tests/main.nf.test.snap' is unchanged
'modules/nf-core/multiqc/tests/nextflow.config' is unchanged
diff --git a/modules/nf-core/multiqc/tests/main.nf.test.snap b/modules/nf-core/multiqc/tests/main.nf.test.snap
index ee01208..a88bafd 100644
--- a/modules/nf-core/multiqc/tests/main.nf.test.snap
+++ b/modules/nf-core/multiqc/tests/main.nf.test.snap
@@ -2,14 +2,14 @@
"multiqc_versions_single": {
"content": [
[
- "versions.yml:md5,4cab99fb04e679fd2d72e29eda1b9646"
+ "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.3",
+ "nextflow": "24.10.4"
},
- "timestamp": "2024-10-02T17:51:46.317523"
+ "timestamp": "2025-10-27T13:33:24.356715"
},
"multiqc_stub": {
"content": [
@@ -17,25 +17,25 @@
"multiqc_report.html",
"multiqc_data",
"multiqc_plots",
- "versions.yml:md5,4cab99fb04e679fd2d72e29eda1b9646"
+ "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.3",
+ "nextflow": "24.10.4"
},
- "timestamp": "2024-10-02T17:52:20.680978"
+ "timestamp": "2025-10-27T13:34:11.103619"
},
"multiqc_versions_config": {
"content": [
[
- "versions.yml:md5,4cab99fb04e679fd2d72e29eda1b9646"
+ "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.3",
+ "nextflow": "24.10.4"
},
- "timestamp": "2024-10-02T17:52:09.185842"
+ "timestamp": "2025-10-27T13:34:04.615233"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/multiqc/tests/tags.yml b/modules/nf-core/multiqc/tests/tags.yml
deleted file mode 100644
index bea6c0d..0000000
--- a/modules/nf-core/multiqc/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-multiqc:
- - modules/nf-core/multiqc/**
diff --git a/modules/nf-core/ngsbits/samplegender/environment.yml b/modules/nf-core/ngsbits/samplegender/environment.yml
new file mode 100644
index 0000000..d46a0cd
--- /dev/null
+++ b/modules/nf-core/ngsbits/samplegender/environment.yml
@@ -0,0 +1,8 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
+channels:
+ - conda-forge
+ - bioconda
+dependencies:
+ # renovate: datasource=conda depName=bioconda/ngs-bits
+ - bioconda::ngs-bits=2025_09
diff --git a/modules/local/ngsbits/samplegender/main.nf b/modules/nf-core/ngsbits/samplegender/main.nf
similarity index 88%
rename from modules/local/ngsbits/samplegender/main.nf
rename to modules/nf-core/ngsbits/samplegender/main.nf
index 2b1709c..e33c661 100644
--- a/modules/local/ngsbits/samplegender/main.nf
+++ b/modules/nf-core/ngsbits/samplegender/main.nf
@@ -4,8 +4,8 @@ process NGSBITS_SAMPLEGENDER {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/ngs-bits:2024_11--py312hd80e9a6_0':
- 'biocontainers/ngs-bits:2024_11--py312hd80e9a6_0' }"
+ 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/2b/2be56a07ac1d5a447a10fd061be4d6144620bec00bac834f58c2bdef0330147f/data':
+ 'community.wave.seqera.io/library/ngs-bits:2025_09--f6ea3a4494373ed6' }"
input:
tuple val(meta), path(bam), path(bai)
diff --git a/modules/nf-core/ngsbits/samplegender/meta.yml b/modules/nf-core/ngsbits/samplegender/meta.yml
new file mode 100644
index 0000000..4b79dfc
--- /dev/null
+++ b/modules/nf-core/ngsbits/samplegender/meta.yml
@@ -0,0 +1,80 @@
+name: "ngsbits_samplegender"
+description: Determines the gender of a sample from the BAM/CRAM file.
+keywords:
+ - gender
+ - cram
+ - bam
+ - short reads
+tools:
+ - "ngsbits":
+ description: "Short-read sequencing tools"
+ homepage: "https://github.com/imgag/ngs-bits"
+ documentation: "https://github.com/imgag/ngs-bits"
+ tool_dev_url: "https://github.com/imgag/ngs-bits"
+ licence: ["MIT"]
+ identifier: ""
+input:
+ - - meta:
+ type: map
+ description: |
+ Groovy Map containing sample information
+ e.g. [ id:'test', single_end:false ]
+ - bam:
+ type: file
+ description: One or more BAM/CRAM files to determine the gender of
+ pattern: "*.{bam,cram}"
+ ontologies: []
+ - bai:
+ type: file
+ description: The index file(s) from the input BAM/CRAM file(s)
+ pattern: "*.{bai,crai}"
+ ontologies: []
+ - - meta2:
+ type: map
+ description: |
+ Groovy Map containing reference fasta information
+ e.g. [ id:'test' ]
+ - fasta:
+ type: file
+ description: The reference FASTA to use (mandatory when CRAM files are used)
+ pattern: "*.{fasta,fa,fna}"
+ ontologies: []
+ - - meta3:
+ type: map
+ description: |
+ Groovy Map containing reference fasta information
+ e.g. [ id:'test' ]
+ - fai:
+ type: file
+ description: The index file from the reference FASTA
+ pattern: "*.fai"
+ ontologies: []
+ - method:
+ type: string
+ description: The method to use to define the gender (possibilities are 'xy', 'hetx'
+ and 'sry')
+ pattern: "(xy|hetx|sry)"
+output:
+ tsv:
+ - - meta:
+ type: map
+ description: |
+ Groovy Map containing sample information
+ e.g. [ id:'test', single_end:false ]
+ - "*.tsv":
+ type: file
+ description: An output TSV file containing the results of the gender prediction
+ pattern: "*.tsv"
+ ontologies:
+ - edam: http://edamontology.org/format_3475 # TSV
+ versions:
+ - versions.yml:
+ type: file
+ description: File containing software versions
+ pattern: "versions.yml"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
+authors:
+ - "@nvnieuwk"
+maintainers:
+ - "@nvnieuwk"
diff --git a/modules/nf-core/ngsbits/samplegender/ngsbits-samplegender.diff b/modules/nf-core/ngsbits/samplegender/ngsbits-samplegender.diff
new file mode 100644
index 0000000..f60ed79
--- /dev/null
+++ b/modules/nf-core/ngsbits/samplegender/ngsbits-samplegender.diff
@@ -0,0 +1,71 @@
+Changes in component 'nf-core/ngsbits/samplegender'
+'modules/nf-core/ngsbits/samplegender/environment.yml' is unchanged
+'modules/nf-core/ngsbits/samplegender/meta.yml' is unchanged
+Changes in 'ngsbits/samplegender/main.nf':
+--- modules/nf-core/ngsbits/samplegender/main.nf
++++ modules/nf-core/ngsbits/samplegender/main.nf
+@@ -11,26 +11,43 @@
+ tuple val(meta), path(bam), path(bai)
+ tuple val(meta2), path(fasta)
+ tuple val(meta3), path(fai)
+- val method
+
+ output:
+- tuple val(meta), path("*.tsv"), emit: tsv
+- path "versions.yml" , emit: versions
++ tuple val(meta), path("*_xy.tsv") , emit: xy_tsv
++ tuple val(meta), path("*_hetx.tsv") , emit: hetx_tsv
++ tuple val(meta), path("*_sry.tsv") , emit: sry_tsv
++ path "versions.yml" , emit: versions
+
+ when:
+ task.ext.when == null || task.ext.when
+
+ script:
+ def args = task.ext.args ?: ''
++ def args2 = task.ext.args2 ?: ''
++ def args3 = task.ext.args3 ?: ''
+ def prefix = task.ext.prefix ?: "${meta.id}"
+ def ref = fasta ? "-ref ${fasta}" : ""
+ """
+ SampleGender \\
+ -in ${bam} \\
+- -method ${method} \\
+- -out ${prefix}.tsv \\
++ -method xy \\
++ -out ${prefix}_xy.tsv \\
+ ${ref} \\
+- ${args}
++ ${args} \\
++ && \\
++ SampleGender \\
++ -in ${bam} \\
++ -method hetx \\
++ -out ${prefix}_hetx.tsv \\
++ ${ref} \\
++ ${args2} \\
++ && \\
++ SampleGender \\
++ -in ${bam} \\
++ -method sry \\
++ -out ${prefix}_sry.tsv \\
++ ${ref} \\
++ ${args3}
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+@@ -41,7 +58,9 @@
+ stub:
+ def prefix = task.ext.prefix ?: "${meta.id}"
+ """
+- touch ${prefix}.tsv
++ touch ${prefix}_xy.tsv
++ touch ${prefix}_hetx.tsv
++ touch ${prefix}_sry.tsv
+
+ cat <<-END_VERSIONS > versions.yml
+ "${task.process}":
+
+'modules/nf-core/ngsbits/samplegender/tests/main.nf.test.snap' is unchanged
+'modules/nf-core/ngsbits/samplegender/tests/main.nf.test' is unchanged
+************************************************************
diff --git a/modules/nf-core/ngsbits/samplegender/tests/main.nf.test b/modules/nf-core/ngsbits/samplegender/tests/main.nf.test
new file mode 100644
index 0000000..0917b7d
--- /dev/null
+++ b/modules/nf-core/ngsbits/samplegender/tests/main.nf.test
@@ -0,0 +1,47 @@
+nextflow_process {
+
+ name "Test Process NGSBITS_SAMPLEGENDER"
+ script "../main.nf"
+ process "NGSBITS_SAMPLEGENDER"
+
+ tag "modules"
+ tag "modules_nfcore"
+ tag "ngsbits"
+ tag "ngsbits/samplegender"
+
+ // Only a stub test here because the module needs the full chrX and chrY
+ test("homo_sapiens - bam, bai, [], [], sry - stub") {
+
+ options "-stub"
+
+ when {
+ process {
+ """
+ input[0] = [
+ [ id:'test' ], // meta map
+ file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam', checkIfExists: true),
+ file(params.modules_testdata_base_path + 'genomics/homo_sapiens/illumina/bam/test.paired_end.sorted.bam.bai', checkIfExists: true)
+ ]
+ input[1] = [
+ [ id:'reference'], // meta map
+ []
+ ]
+ input[2] = [
+ [ id:'reference'], // meta map
+ []
+ ]
+ input[3] = "sry"
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert process.success },
+ { assert snapshot(process.out).match() }
+ )
+ }
+
+ }
+
+}
diff --git a/modules/nf-core/ngsbits/samplegender/tests/main.nf.test.snap b/modules/nf-core/ngsbits/samplegender/tests/main.nf.test.snap
new file mode 100644
index 0000000..332dbc9
--- /dev/null
+++ b/modules/nf-core/ngsbits/samplegender/tests/main.nf.test.snap
@@ -0,0 +1,35 @@
+{
+ "homo_sapiens - bam, bai, [], [], sry - stub": {
+ "content": [
+ {
+ "0": [
+ [
+ {
+ "id": "test"
+ },
+ "test.tsv:md5,d41d8cd98f00b204e9800998ecf8427e"
+ ]
+ ],
+ "1": [
+ "versions.yml:md5,c04e34f7e4a6606e29846359c34440e9"
+ ],
+ "tsv": [
+ [
+ {
+ "id": "test"
+ },
+ "test.tsv:md5,d41d8cd98f00b204e9800998ecf8427e"
+ ]
+ ],
+ "versions": [
+ "versions.yml:md5,c04e34f7e4a6606e29846359c34440e9"
+ ]
+ }
+ ],
+ "meta": {
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
+ },
+ "timestamp": "2025-09-26T13:18:40.119665"
+ }
+}
\ No newline at end of file
diff --git a/modules/nf-core/picard/crosscheckfingerprints/environment.yml b/modules/nf-core/picard/crosscheckfingerprints/environment.yml
index 1d715d5..b4ac4fe 100644
--- a/modules/nf-core/picard/crosscheckfingerprints/environment.yml
+++ b/modules/nf-core/picard/crosscheckfingerprints/environment.yml
@@ -1,5 +1,8 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
dependencies:
- - bioconda::picard=3.3.0
+ # renovate: datasource=conda depName=bioconda/picard
+ - bioconda::picard=3.4.0
diff --git a/modules/nf-core/picard/crosscheckfingerprints/main.nf b/modules/nf-core/picard/crosscheckfingerprints/main.nf
index da69ad9..b1239a7 100644
--- a/modules/nf-core/picard/crosscheckfingerprints/main.nf
+++ b/modules/nf-core/picard/crosscheckfingerprints/main.nf
@@ -4,8 +4,8 @@ process PICARD_CROSSCHECKFINGERPRINTS {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/picard:3.3.0--hdfd78af_0' :
- 'biocontainers/picard:3.3.0--hdfd78af_0' }"
+ 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/08/0861295baa7c01fc593a9da94e82b44a729dcaf8da92be8e565da109aa549b25/data' :
+ 'community.wave.seqera.io/library/picard:3.4.0--e9963040df0a9bf6' }"
input:
tuple val(meta), path(input1), path(input1_index), path(input2), path(input2_index), path(haplotype_map)
diff --git a/modules/nf-core/picard/crosscheckfingerprints/meta.yml b/modules/nf-core/picard/crosscheckfingerprints/meta.yml
index 898a11a..6271e9a 100644
--- a/modules/nf-core/picard/crosscheckfingerprints/meta.yml
+++ b/modules/nf-core/picard/crosscheckfingerprints/meta.yml
@@ -27,23 +27,28 @@ input:
type: file
description: List containing 1 or more bam/vcf files or a file containing filepaths
pattern: "*.{bam,vcf,vcf.gz,txt,fofn}"
+ ontologies: []
- input1_index:
type: file
description: List containing 1 or more bam/vcf files indexes
pattern: "*.{bai,csi,crai,tbi}"
+ ontologies: []
- input2:
type: file
description: Optional list containing 1 or more bam/vcf files or a file containing
filepaths
pattern: "*.{bam,vcf,vcf.gz,txt,fofn}"
+ ontologies: []
- input2_index:
type: file
description: List containing 1 or more bam/vcf files indexes
pattern: "*.{bai,csi,crai,tbi}"
+ ontologies: []
- haplotype_map:
type: file
description: Haplotype map file
pattern: "*.{txt,vcf,vcf.gz}"
+ ontologies: []
- - meta2:
type: map
description: |
@@ -53,9 +58,10 @@ input:
type: file
description: Reference genome file
pattern: "*.{fasta,fa,fasta.gz,fa.gz}"
+ ontologies: []
output:
- - crosscheck_metrics:
- - meta:
+ crosscheck_metrics:
+ - - meta:
type: map
description: |
Groovy Map containing sample information
@@ -64,11 +70,14 @@ output:
type: file
description: Metrics created by crosscheckfingerprints
pattern: "*.{crosscheck_metrics.txt}"
- - versions:
- - versions.yml:
- type: file
- description: File containing software versions
- pattern: "versions.yml"
+ ontologies: []
+ versions:
+ - versions.yml:
+ type: file
+ description: File containing software versions
+ pattern: "versions.yml"
+ ontologies:
+ - edam: http://edamontology.org/format_3750 # YAML
authors:
- "@matthdsm"
maintainers:
diff --git a/modules/nf-core/picard/crosscheckfingerprints/picard-crosscheckfingerprints.diff b/modules/nf-core/picard/crosscheckfingerprints/picard-crosscheckfingerprints.diff
index df8ace2..93e0f34 100644
--- a/modules/nf-core/picard/crosscheckfingerprints/picard-crosscheckfingerprints.diff
+++ b/modules/nf-core/picard/crosscheckfingerprints/picard-crosscheckfingerprints.diff
@@ -1,4 +1,6 @@
-Changes in module 'nf-core/picard/crosscheckfingerprints'
+Changes in component 'nf-core/picard/crosscheckfingerprints'
+'modules/nf-core/picard/crosscheckfingerprints/environment.yml' is unchanged
+'modules/nf-core/picard/crosscheckfingerprints/meta.yml' is unchanged
Changes in 'picard/crosscheckfingerprints/main.nf':
--- modules/nf-core/picard/crosscheckfingerprints/main.nf
+++ modules/nf-core/picard/crosscheckfingerprints/main.nf
@@ -12,10 +14,7 @@ Changes in 'picard/crosscheckfingerprints/main.nf':
output:
tuple val(meta), path("*.crosscheck_metrics.txt"), emit: crosscheck_metrics
-'modules/nf-core/picard/crosscheckfingerprints/environment.yml' is unchanged
-'modules/nf-core/picard/crosscheckfingerprints/meta.yml' is unchanged
-'modules/nf-core/picard/crosscheckfingerprints/tests/tags.yml' is unchanged
-'modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test' is unchanged
'modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test.snap' is unchanged
'modules/nf-core/picard/crosscheckfingerprints/tests/nextflow.config' is unchanged
+'modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test' is unchanged
************************************************************
diff --git a/modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test.snap b/modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test.snap
index f9e6963..e8a07aa 100644
--- a/modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test.snap
+++ b/modules/nf-core/picard/crosscheckfingerprints/tests/main.nf.test.snap
@@ -22,25 +22,25 @@
"Crosscheck versions stub": {
"content": [
[
- "versions.yml:md5,baa5c1b1dc10b95cb42ea1b3d66a294f"
+ "versions.yml:md5,dd796808c6bb2051ef0209a1e4b333df"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-10-18T11:02:39.2311109"
+ "timestamp": "2025-09-15T10:48:14.486452441"
},
"Crosscheck versions - bam": {
"content": [
[
- "versions.yml:md5,baa5c1b1dc10b95cb42ea1b3d66a294f"
+ "versions.yml:md5,dd796808c6bb2051ef0209a1e4b333df"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.7"
},
- "timestamp": "2024-10-18T11:02:13.953034883"
+ "timestamp": "2025-09-15T10:48:05.046808725"
}
}
\ No newline at end of file
diff --git a/modules/nf-core/picard/crosscheckfingerprints/tests/tags.yml b/modules/nf-core/picard/crosscheckfingerprints/tests/tags.yml
deleted file mode 100644
index 1a20220..0000000
--- a/modules/nf-core/picard/crosscheckfingerprints/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-picard/crosscheckfingerprints:
- - "modules/nf-core/picard/crosscheckfingerprints/**"
diff --git a/modules/nf-core/samtools/index/environment.yml b/modules/nf-core/samtools/index/environment.yml
new file mode 100644
index 0000000..89e12a6
--- /dev/null
+++ b/modules/nf-core/samtools/index/environment.yml
@@ -0,0 +1,10 @@
+---
+# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
+channels:
+ - conda-forge
+ - bioconda
+dependencies:
+ # renovate: datasource=conda depName=bioconda/htslib
+ - bioconda::htslib=1.22.1
+ # renovate: datasource=conda depName=bioconda/samtools
+ - bioconda::samtools=1.22.1
diff --git a/modules/nf-core/samtools/index/main.nf b/modules/nf-core/samtools/index/main.nf
index 929f3a8..a77ad82 100644
--- a/modules/nf-core/samtools/index/main.nf
+++ b/modules/nf-core/samtools/index/main.nf
@@ -4,8 +4,8 @@ process SAMTOOLS_INDEX {
conda "${moduleDir}/environment.yml"
container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
- 'https://depot.galaxyproject.org/singularity/samtools:1.21--h50ea8bc_0' :
- 'community.wave.seqera.io/library/samtools:1.22.1--eccb42ff8fb55509' }"
+ 'https://depot.galaxyproject.org/singularity/samtools:1.22.1--h96c455f_0' :
+ 'biocontainers/samtools:1.22.1--h96c455f_0' }"
input:
tuple val(meta), path(input)
diff --git a/modules/nf-core/samtools/index/tests/main.nf.test.snap b/modules/nf-core/samtools/index/tests/main.nf.test.snap
index 72d65e8..3836c6b 100644
--- a/modules/nf-core/samtools/index/tests/main.nf.test.snap
+++ b/modules/nf-core/samtools/index/tests/main.nf.test.snap
@@ -18,7 +18,7 @@
],
"3": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
],
"bai": [
@@ -36,15 +36,15 @@
]
],
"versions": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:21:25.261127166"
+ "timestamp": "2025-09-10T14:13:38.25787"
},
"crai - stub": {
"content": [
@@ -65,7 +65,7 @@
]
],
"3": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
],
"bai": [
@@ -83,15 +83,15 @@
],
"versions": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:21:12.653194876"
+ "timestamp": "2025-09-10T14:13:34.496412"
},
"bai - stub": {
"content": [
@@ -112,7 +112,7 @@
],
"3": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
],
"bai": [
[
@@ -130,28 +130,28 @@
],
"versions": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:21:01.854932651"
+ "timestamp": "2025-09-10T14:13:25.934431"
},
"csi": {
"content": [
"test.paired_end.sorted.bam.csi",
[
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:20:51.485364222"
+ "timestamp": "2025-09-10T14:13:22.262088"
},
"crai": {
"content": [
@@ -172,7 +172,7 @@
]
],
"3": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
],
"bai": [
@@ -190,15 +190,15 @@
],
"versions": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:20:40.518873972"
+ "timestamp": "2025-09-10T14:13:18.191664"
},
"bai": {
"content": [
@@ -219,7 +219,7 @@
],
"3": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
],
"bai": [
[
@@ -237,14 +237,14 @@
],
"versions": [
- "versions.yml:md5,5e09a6fdf76de396728f877193d72315"
+ "versions.yml:md5,b8717818c91b07de87c2a5590bad02e6"
]
}
],
"meta": {
- "nf-test": "0.9.0",
- "nextflow": "24.04.4"
+ "nf-test": "0.9.2",
+ "nextflow": "25.04.6"
},
- "timestamp": "2024-09-16T08:20:21.184050361"
+ "timestamp": "2025-09-10T14:13:08.51539"
}
}
\ No newline at end of file
diff --git a/nextflow.config b/nextflow.config
index 478d263..84b3774 100644
--- a/nextflow.config
+++ b/nextflow.config
@@ -26,159 +26,181 @@ params {
max_hetx_male = 0.05
// MultiQC options
- multiqc_config = null
- multiqc_title = null
- multiqc_logo = null
- max_multiqc_email_size = '25.MB'
+ multiqc_config = null
+ multiqc_title = null
+ multiqc_logo = null
+ max_multiqc_email_size = '25.MB'
multiqc_methods_description = null
// Boilerplate options
- outdir = './results'
- publish_dir_mode = 'copy'
- email = null
- email_on_fail = null
- plaintext_email = false
- monochrome_logs = false
- hook_url = null
- help = false
- version = false
+ outdir = 'results'
+ publish_dir_mode = 'copy'
+ email = null
+ email_on_fail = null
+ plaintext_email = false
+ monochrome_logs = false
+ hook_url = System.getenv('HOOK_URL')
+ help = false
+ help_full = false
+ show_hidden = false
+ version = false
+ pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/'
+ trace_report_suffix = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
// Config options
config_profile_name = null
config_profile_description = null
+
custom_config_version = 'master'
custom_config_base = "https://raw.githubusercontent.com/nf-core/configs/${params.custom_config_version}"
config_profile_contact = null
config_profile_url = null
// Schema validation default options
- validate_params = true
-
+ validate_params = true
}
// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
-// Load nf-core custom profiles from different Institutions",
-includeConfig !System.getenv('NXF_OFFLINE') && params.custom_config_base ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"
+// Load modules.config for DSL2 module specific options
+includeConfig 'conf/modules.config'
-// Load nf-cmgg/sampletracking custom profiles from different institutions.
-// Warning: Uncomment only if a pipeline-specific institutional config already exists on nf-core/configs!
-// try {
-// includeConfig "${params.custom_config_base}/pipeline/sampletracking.config"
-// } catch (Exception e) {
-// System.err.println("WARNING: Could not load nf-core/config/sampletracking profiles: ${params.custom_config_base}/pipeline/sampletracking.config")
-// }
profiles {
debug {
- dumpHashes = true
- process.beforeScript = 'echo $HOSTNAME'
- cleanup = false
+ dumpHashes = true
+ process.beforeScript = 'echo $HOSTNAME'
+ cleanup = false
nextflow.enable.configProcessNamesValidation = true
}
conda {
- conda.enabled = true
- conda.channels = ['conda-forge', 'bioconda']
- docker.enabled = false
- singularity.enabled = false
- podman.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
+ conda.enabled = true
+ docker.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
+ conda.channels = ['conda-forge', 'bioconda']
+ apptainer.enabled = false
}
mamba {
- conda.enabled = true
- conda.useMamba = true
- docker.enabled = false
- singularity.enabled = false
- podman.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
+ conda.enabled = true
+ conda.useMamba = true
+ docker.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
+ apptainer.enabled = false
}
docker {
- docker.enabled = true
- conda.enabled = false
- singularity.enabled = false
- podman.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
- docker.runOptions = '-u $(id -u):$(id -g)'
+ docker.enabled = true
+ conda.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
+ apptainer.enabled = false
+ docker.runOptions = '-u $(id -u):$(id -g)'
+ }
+ arm64 {
+ process.arch = 'arm64'
+ // TODO https://github.com/nf-core/modules/issues/6694
+ // For now if you're using arm64 you have to use wave for the sake of the maintainers
+ // wave profile
+ apptainer.ociAutoPull = true
+ singularity.ociAutoPull = true
+ wave.enabled = true
+ wave.freeze = true
+ wave.strategy = 'conda,container'
}
- arm {
- docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64'
+ emulate_amd64 {
+ docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64'
}
singularity {
- singularity.enabled = true
- singularity.autoMounts = true
- conda.enabled = false
- docker.enabled = false
- podman.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
+ singularity.enabled = true
+ singularity.autoMounts = true
+ conda.enabled = false
+ docker.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
+ apptainer.enabled = false
}
podman {
- podman.enabled = true
- conda.enabled = false
- docker.enabled = false
- singularity.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
+ podman.enabled = true
+ conda.enabled = false
+ docker.enabled = false
+ singularity.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
+ apptainer.enabled = false
}
shifter {
- shifter.enabled = true
- conda.enabled = false
- docker.enabled = false
- singularity.enabled = false
- podman.enabled = false
- charliecloud.enabled = false
- apptainer.enabled = false
+ shifter.enabled = true
+ conda.enabled = false
+ docker.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ charliecloud.enabled = false
+ apptainer.enabled = false
}
charliecloud {
- charliecloud.enabled = true
- conda.enabled = false
- docker.enabled = false
- singularity.enabled = false
- podman.enabled = false
- shifter.enabled = false
- apptainer.enabled = false
+ charliecloud.enabled = true
+ conda.enabled = false
+ docker.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ apptainer.enabled = false
}
apptainer {
- apptainer.enabled = true
- apptainer.autoMounts = true
- conda.enabled = false
- docker.enabled = false
- singularity.enabled = false
- podman.enabled = false
- shifter.enabled = false
- charliecloud.enabled = false
+ apptainer.enabled = true
+ apptainer.autoMounts = true
+ conda.enabled = false
+ docker.enabled = false
+ singularity.enabled = false
+ podman.enabled = false
+ shifter.enabled = false
+ charliecloud.enabled = false
}
- gitpod {
- executor.name = 'local'
- executor.cpus = 4
- executor.memory = 8.GB
+ wave {
+ apptainer.ociAutoPull = true
+ singularity.ociAutoPull = true
+ wave.enabled = true
+ wave.freeze = true
+ wave.strategy = 'conda,container'
+ }
+ gpu {
+ docker.runOptions = '-u $(id -u):$(id -g) --gpus all'
+ apptainer.runOptions = '--nv'
+ singularity.runOptions = '--nv'
}
- slurm { includeConfig 'conf/slurm.config' }
test { includeConfig 'conf/test.config' }
test_full { includeConfig 'conf/test_full.config' }
-
+ slurm { includeConfig 'conf/slurm.config' }
}
-// Set default registry for Apptainer, Docker, Podman and Singularity independent of -profile
-// Will not be used unless Apptainer / Docker / Podman / Singularity are enabled
-// Set to your registry if you have a mirror of containers
-apptainer.registry = 'quay.io'
-docker.registry = 'quay.io'
-podman.registry = 'quay.io'
-singularity.registry = 'quay.io'
+// Load nf-core custom profiles from different institutions
+
+// If params.custom_config_base is set AND either the NXF_OFFLINE environment variable is not set or params.custom_config_base is a local path, the nfcore_custom.config file from the specified base path is included.
+// Load nf-cmgg/sampletracking custom profiles from different institutions.
+includeConfig params.custom_config_base && (!System.getenv('NXF_OFFLINE') || !params.custom_config_base.startsWith('http')) ? "${params.custom_config_base}/nfcore_custom.config" : "/dev/null"
-// Nextflow plugins
-plugins {
- id 'nf-schema@2.2.0' // Validation of pipeline parameters and creation of an input channel from a sample sheet
-}
+
+// Load nf-cmgg/sampletracking custom profiles from different institutions.
+// TODO nf-core: Optionally, you can add a pipeline-specific nf-core config at https://github.com/nf-core/configs
+// includeConfig params.custom_config_base && (!System.getenv('NXF_OFFLINE') || !params.custom_config_base.startsWith('http')) ? "${params.custom_config_base}/pipeline/sampletracking.config" : "/dev/null"
+
+
+// Set default registry for Apptainer, Docker, Podman, Charliecloud and Singularity independent of -profile
+// Will not be used unless Apptainer / Docker / Podman / Charliecloud / Singularity are enabled
+// Set to your registry if you have a mirror of containers
+apptainer.registry = 'quay.io'
+docker.registry = 'quay.io'
+podman.registry = 'quay.io'
+singularity.registry = 'quay.io'
+charliecloud.registry = 'quay.io'
// Export these variables to prevent local Python/R libraries from conflicting with those in the container
// The JULIA depot path has been adjusted to a fixed path `/usr/local/share/julia` that needs to be used for packages in the container.
@@ -191,19 +213,28 @@ env {
JULIA_DEPOT_PATH = "/usr/local/share/julia"
}
-// Capture exit codes from upstream processes when piping
-process.shell = ['/bin/bash', '-euo', 'pipefail']
+// Set bash options
+process.shell = [
+ "bash",
+ "-C", // No clobber - prevent output redirection from overwriting files.
+ "-e", // Exit if a tool returns a non-zero status/exit code
+ "-u", // Treat unset variables and parameters as an error
+ "-o", // Returns the status of the last command to exit..
+ "pipefail" // ..with a non-zero status or zero if all successfully execute
+]
// Disable process selector warnings by default. Use debug profile to enable warnings.
nextflow.enable.configProcessNamesValidation = false
timeline {
- enabled = true
- file = "${params.outdir}/pipeline_info/execution_timeline_${new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')}.html"
+ enabled = true
+ overwrite = true
+ file = "${params.outdir}/pipeline_info/execution_timeline_${new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')}.html"
}
report {
- enabled = true
- file = "${params.outdir}/pipeline_info/execution_report_${new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')}.html"
+ enabled = true
+ overwrite = true
+ file = "${params.outdir}/pipeline_info/execution_report_${new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')}.html"
}
trace {
enabled = true
@@ -216,38 +247,39 @@ dag {
manifest {
name = 'nf-cmgg/sampletracking'
- author = """@matthdsm"""
+ contributors = [
+ [
+ name: 'Matthias De Smet',
+ affiliation: 'Ghent University Hospital',
+ email: 'matthias.desmet@ugent.be',
+ github: '@matthdsm',
+ contribution: ['author', 'maintainer'], // List of contribution types ('author', 'maintainer' or 'contributor')
+ orcid: 'https://orcid.org/0000-0003-2555-3114'
+ ],
+ [
+ name: 'Nicolas Nicolas Vannieuwkerke',
+ affiliation: 'Ghent University Hospital',
+ email: 'nicolas.vannieuwkerke@ugent.be',
+ github: '@nvnieuwk',
+ contribution: ['maintainer'],
+ orcid: 'https://orcid.org/0009-0003-5619-1555'
+ ],
+ ]
homePage = 'https://github.com/nf-cmgg/sampletracking'
description = """CMGG Sampletracking workflow"""
mainScript = 'main.nf'
- nextflowVersion = '!>=24.04.1'
- version = '1.1.0dev'
+ defaultBranch = 'master'
+ nextflowVersion = '!>=25.10.0'
+ version = '1.0.1dev'
doi = ''
}
-validation {
- defaultIgnoreParams = ["genomes", "test_data", "igenomes_base"]
- help {
- enabled = true
- command = "nextflow run $manifest.name -profile --input samplesheet.csv --outdir "
- fullParameter = "help_full"
- showHiddenParameter = "show_hidden"
- beforeText = """
--\033[2m----------------------------------------------------\033[0m-
- \033[0;34m ///\033[0;32m/// \033[0m
-\033[0;34m ___ __ _ _ __ __ \033[0;34m ///\033[0;32m///// \033[0m
-\033[0;34m |\\ | |__ __ / ` | \\/ | / _` / _` \033[0;34m////\033[0;32m////// \033[0m
-\033[0;34m | \\| | \\__, | | \\__| \\__| \033[0;34m///\033[0;32m///// \033[0m
- \033[0;34m///\033[0;32m/// \033[0m
-\033[0;35m ${manifest.name} ${manifest.version}\033[0m
--\033[2m----------------------------------------------------\033[0m-
-"""
- }
- summary {
- beforeText = validation.help.beforeText
- hideParams = ["genomes"]
- }
+// Nextflow plugins
+plugins {
+ id 'nf-schema@2.6.1' // Validation of pipeline parameters and creation of an input channel from a sample sheet
}
-// Load modules.config for DSL2 module specific options
-includeConfig 'conf/modules.config'
+validation {
+ defaultIgnoreParams = []
+ monochromeLogs = params.monochrome_logs
+}
diff --git a/nextflow_schema.json b/nextflow_schema.json
index 4ec13d4..f2a0593 100644
--- a/nextflow_schema.json
+++ b/nextflow_schema.json
@@ -10,7 +10,7 @@
"type": "object",
"fa_icon": "fas fa-terminal",
"description": "Define where the pipeline should find input data and save output data.",
- "required": ["input", "outdir"],
+ "required": ["input"],
"properties": {
"input": {
"type": "string",
@@ -182,12 +182,6 @@
"description": "Less common options for the pipeline, typically set in a config file.",
"help_text": "These options are common to all nf-core pipelines and allow you to customise some of the core preferences for how the pipeline runs.\n\nTypically these options would be set in a Nextflow config file loaded for all pipeline runs, such as `~/.nextflow/config`.",
"properties": {
- "help": {
- "type": "boolean",
- "description": "Display help text.",
- "fa_icon": "fas fa-question-circle",
- "hidden": true
- },
"version": {
"type": "boolean",
"description": "Display version and exit.",
@@ -262,6 +256,31 @@
"default": true,
"fa_icon": "fas fa-check-square",
"hidden": true
+ },
+ "pipelines_testdata_base_path": {
+ "type": "string",
+ "fa_icon": "far fa-check-circle",
+ "description": "Base URL or local path to location of pipeline test dataset files",
+ "default": "https://raw.githubusercontent.com/nf-core/test-datasets/",
+ "hidden": true
+ },
+ "trace_report_suffix": {
+ "type": "string",
+ "fa_icon": "far calendar",
+ "description": "Suffix to add to the trace report filename. Default is the date and time in the format yyyy-MM-dd_HH-mm-ss.",
+ "hidden": true
+ },
+ "help": {
+ "type": ["boolean", "string"],
+ "description": "Display the help message."
+ },
+ "help_full": {
+ "type": "boolean",
+ "description": "Display the full detailed help message."
+ },
+ "show_hidden": {
+ "type": "boolean",
+ "description": "Display hidden parameters in the help message (only works when --help or --help_full are provided)."
}
}
}
diff --git a/nf-test.config b/nf-test.config
index 32f55ba..3a1fff5 100644
--- a/nf-test.config
+++ b/nf-test.config
@@ -1,8 +1,24 @@
config {
+ // location for all nf-test tests
+ testsDir "."
- testsDir "tests"
- workDir ".nf-test"
- configFile "tests/config/nf-test.config"
- profile "docker"
+ // nf-test directory including temporary files for each test
+ workDir System.getenv("NFT_WORKDIR") ?: ".nf-test"
+ // location of an optional nextflow.config file specific for executing tests
+ configFile "tests/nextflow.config"
+
+ // ignore tests coming from the nf-core/modules repo
+ ignore 'modules/nf-core/**/tests/*', 'subworkflows/nf-core/**/tests/*'
+
+ // run all test with defined profile(s) from the main nextflow.config
+ profile "test"
+
+ // list of filenames or patterns that should be trigger a full test run
+ triggers 'nextflow.config', 'nf-test.config', 'conf/test.config', 'tests/nextflow.config', 'tests/.nftignore'
+
+ // load the necessary plugins
+ plugins {
+ load "nft-utils@0.0.3"
+ }
}
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index 5611062..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,15 +0,0 @@
-# Config file for Python. Mostly used to configure linting of bin/*.py with Ruff.
-# Should be kept the same as nf-core/tools to avoid fighting with template synchronisation.
-[tool.ruff]
-line-length = 120
-target-version = "py38"
-cache-dir = "~/.cache/ruff"
-
-[tool.ruff.lint]
-select = ["I", "E1", "E4", "E7", "E9", "F", "UP", "N"]
-
-[tool.ruff.lint.isort]
-known-first-party = ["nf_core"]
-
-[tool.ruff.lint.per-file-ignores]
-"__init__.py" = ["E402", "F401"]
diff --git a/ro-crate-metadata.json b/ro-crate-metadata.json
new file mode 100644
index 0000000..6b0d03a
--- /dev/null
+++ b/ro-crate-metadata.json
@@ -0,0 +1,350 @@
+{
+ "@context": [
+ "https://w3id.org/ro/crate/1.1/context",
+ {
+ "GithubService": "https://w3id.org/ro/terms/test#GithubService",
+ "JenkinsService": "https://w3id.org/ro/terms/test#JenkinsService",
+ "PlanemoEngine": "https://w3id.org/ro/terms/test#PlanemoEngine",
+ "TestDefinition": "https://w3id.org/ro/terms/test#TestDefinition",
+ "TestInstance": "https://w3id.org/ro/terms/test#TestInstance",
+ "TestService": "https://w3id.org/ro/terms/test#TestService",
+ "TestSuite": "https://w3id.org/ro/terms/test#TestSuite",
+ "TravisService": "https://w3id.org/ro/terms/test#TravisService",
+ "definition": "https://w3id.org/ro/terms/test#definition",
+ "engineVersion": "https://w3id.org/ro/terms/test#engineVersion",
+ "instance": "https://w3id.org/ro/terms/test#instance",
+ "resource": "https://w3id.org/ro/terms/test#resource",
+ "runsOn": "https://w3id.org/ro/terms/test#runsOn"
+ }
+ ],
+ "@graph": [
+ {
+ "@id": "./",
+ "@type": "Dataset",
+ "creativeWorkStatus": "InProgress",
+ "datePublished": "2025-10-30T12:51:04+00:00",
+ "description": "#  \n\n[](https://github.com/codespaces/new/nf-cmgg/sampletracking)\n[](https://github.com/nf-cmgg/sampletracking/actions/workflows/nf-test.yml)\n[](https://github.com/nf-cmgg/sampletracking/actions/workflows/linting.yml)[](https://doi.org/10.5281/zenodo.XXXXXXX)\n[](https://www.nf-test.com)\n\n[](https://www.nextflow.io/)\n[](https://github.com/nf-core/tools/releases/tag/3.4.1)\n[](https://docs.conda.io/en/latest/)\n[](https://www.docker.com/)\n[](https://sylabs.io/docs/)\n[](https://cloud.seqera.io/launch?pipeline=https://github.com/nf-cmgg/sampletracking)\n\n## Introduction\n\n**nf-cmgg/sampletracking** is a bioinformatics pipeline that performs sampletracking on sequencing samples. The pipeline does this by crosschecking SNP fingerprints and by checking if the expected sex matches the real sex of the sample.\n\n\n\n## Usage\n\n> [!NOTE]\n> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline) with `-profile test` before running the workflow on actual data.\n\nFirst, prepare a samplesheet with your input data that looks as follows:\n\n`samplesheet.csv`:\n\n```csv\nsample,pool,sex,sample_bam,sample_bam_index,snp_bam,snp_bam_index\nSAMPLE1,POOL1,F,SAMPLE1.bam,SAMPLE1.bam.bai,SAMPLE1_snp.cram,SAMPLE2_snp.cram.crai\n```\n\nEach row represents a sample annotated with the pool it was sequenced in and the expected sex of this sample. It also needs a BAM/CRAM file with the sample data and (optionally) a FASTQ/BAM/CRAM file with SNP tracking data for the sample. Crosschecking fingerprints will be skipped when no SNP tracking data is provided.\n\nNow, you can run the pipeline using:\n\n```bash\nnextflow run nf-cmgg/sampletracking \\\n -profile \\\n --input samplesheet.csv \\\n --outdir \n```\n\n> [!WARNING]\n> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_;\n> see [docs](https://nf-co.re/usage/configuration#custom-configuration-files).\n\n## Credits\n\nnf-cmgg/sampletracking was originally written by [@matthdsm](https://github.com/matthdsm).\n\nWe thank the following people for their extensive assistance in the development of this pipeline:\n\n- [@nvnieuwk](https://github.com/nvnieuwk)\n\n## Contributions and Support\n\nIf you would like to contribute to this pipeline, please see the [contributing guidelines](.github/CONTRIBUTING.md).\n\n## Citations\n\nAn extensive list of references for the tools used by the pipeline can be found in the [`CITATIONS.md`](CITATIONS.md) file.\n\nThis pipeline uses code and infrastructure developed and maintained by the [nf-core](https://nf-co.re) community, reused here under the [MIT license](https://github.com/nf-core/tools/blob/master/LICENSE).\n\n> **The nf-core framework for community-curated bioinformatics pipelines.**\n>\n> Philip Ewels, Alexander Peltzer, Sven Fillinger, Harshil Patel, Johannes Alneberg, Andreas Wilm, Maxime Ulysse Garcia, Paolo Di Tommaso & Sven Nahnsen.\n>\n> _Nat Biotechnol._ 2020 Feb 13. doi: [10.1038/s41587-020-0439-x](https://dx.doi.org/10.1038/s41587-020-0439-x).\n",
+ "hasPart": [
+ {
+ "@id": "main.nf"
+ },
+ {
+ "@id": "docs/images/metro_map.png"
+ },
+ {
+ "@id": "assets/"
+ },
+ {
+ "@id": "conf/"
+ },
+ {
+ "@id": "docs/"
+ },
+ {
+ "@id": "docs/images/"
+ },
+ {
+ "@id": "modules/"
+ },
+ {
+ "@id": "modules/local/"
+ },
+ {
+ "@id": "modules/nf-core/"
+ },
+ {
+ "@id": "workflows/"
+ },
+ {
+ "@id": "subworkflows/"
+ },
+ {
+ "@id": "nextflow.config"
+ },
+ {
+ "@id": "README.md"
+ },
+ {
+ "@id": "nextflow_schema.json"
+ },
+ {
+ "@id": "CHANGELOG.md"
+ },
+ {
+ "@id": "LICENSE"
+ },
+ {
+ "@id": "CITATIONS.md"
+ },
+ {
+ "@id": "modules.json"
+ },
+ {
+ "@id": "docs/usage.md"
+ },
+ {
+ "@id": "docs/output.md"
+ },
+ {
+ "@id": ".nf-core.yml"
+ },
+ {
+ "@id": ".pre-commit-config.yaml"
+ },
+ {
+ "@id": ".prettierignore"
+ }
+ ],
+ "isBasedOn": "https://github.com/nf-cmgg/sampletracking",
+ "license": "MIT",
+ "mainEntity": {
+ "@id": "main.nf"
+ },
+ "mentions": [
+ {
+ "@id": "#be1fa202-2d41-46d9-9c66-d40def7fa808"
+ }
+ ],
+ "name": "nf-cmgg/sampletracking"
+ },
+ {
+ "@id": "ro-crate-metadata.json",
+ "@type": "CreativeWork",
+ "about": {
+ "@id": "./"
+ },
+ "conformsTo": [
+ {
+ "@id": "https://w3id.org/ro/crate/1.1"
+ },
+ {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate/1.0"
+ }
+ ]
+ },
+ {
+ "@id": "main.nf",
+ "@type": [
+ "File",
+ "SoftwareSourceCode",
+ "ComputationalWorkflow"
+ ],
+ "creator": [
+ {
+ "@id": "https://orcid.org/0000-0003-2555-3114"
+ },
+ {
+ "@id": "https://orcid.org/0009-0003-5619-1555"
+ }
+ ],
+ "dateCreated": "",
+ "dateModified": "2025-10-30T13:51:04Z",
+ "dct:conformsTo": "https://bioschemas.org/profiles/ComputationalWorkflow/1.0-RELEASE/",
+ "image": {
+ "@id": "docs/images/metro_map.png"
+ },
+ "keywords": [
+ "nf-core",
+ "nextflow"
+ ],
+ "license": [
+ "MIT"
+ ],
+ "maintainer": [
+ {
+ "@id": "https://orcid.org/0000-0003-2555-3114"
+ }
+ ],
+ "name": [
+ "nf-cmgg/sampletracking"
+ ],
+ "programmingLanguage": {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate#nextflow"
+ },
+ "sdPublisher": {
+ "@id": "https://nf-co.re/"
+ },
+ "url": [
+ "https://github.com/nf-cmgg/sampletracking",
+ "https://nf-co.re/nf-cmgg/sampletracking/dev/"
+ ],
+ "version": [
+ "1.0.1dev"
+ ]
+ },
+ {
+ "@id": "https://w3id.org/workflowhub/workflow-ro-crate#nextflow",
+ "@type": "ComputerLanguage",
+ "identifier": {
+ "@id": "https://www.nextflow.io/"
+ },
+ "name": "Nextflow",
+ "url": {
+ "@id": "https://www.nextflow.io/"
+ },
+ "version": "!>=25.04.0"
+ },
+ {
+ "@id": "docs/images/metro_map.png",
+ "@type": [
+ "File",
+ "ImageObject"
+ ],
+ "name": "Workflow diagram"
+ },
+ {
+ "@id": "#be1fa202-2d41-46d9-9c66-d40def7fa808",
+ "@type": "TestSuite",
+ "instance": [
+ {
+ "@id": "#3091679a-cb1d-4767-91c4-bb534b38b123"
+ }
+ ],
+ "mainEntity": {
+ "@id": "main.nf"
+ },
+ "name": "Test suite for nf-cmgg/sampletracking"
+ },
+ {
+ "@id": "#3091679a-cb1d-4767-91c4-bb534b38b123",
+ "@type": "TestInstance",
+ "name": "GitHub Actions workflow for testing nf-cmgg/sampletracking",
+ "resource": "repos/nf-cmgg/sampletracking/actions/workflows/nf-test.yml",
+ "runsOn": {
+ "@id": "https://w3id.org/ro/terms/test#GithubService"
+ },
+ "url": "https://api.github.com"
+ },
+ {
+ "@id": "https://w3id.org/ro/terms/test#GithubService",
+ "@type": "TestService",
+ "name": "Github Actions",
+ "url": {
+ "@id": "https://github.com"
+ }
+ },
+ {
+ "@id": "assets/",
+ "@type": "Dataset",
+ "description": "Additional files"
+ },
+ {
+ "@id": "conf/",
+ "@type": "Dataset",
+ "description": "Configuration files"
+ },
+ {
+ "@id": "docs/",
+ "@type": "Dataset",
+ "description": "Markdown files for documenting the pipeline"
+ },
+ {
+ "@id": "docs/images/",
+ "@type": "Dataset",
+ "description": "Images for the documentation files"
+ },
+ {
+ "@id": "modules/",
+ "@type": "Dataset",
+ "description": "Modules used by the pipeline"
+ },
+ {
+ "@id": "modules/local/",
+ "@type": "Dataset",
+ "description": "Pipeline-specific modules"
+ },
+ {
+ "@id": "modules/nf-core/",
+ "@type": "Dataset",
+ "description": "nf-core modules"
+ },
+ {
+ "@id": "workflows/",
+ "@type": "Dataset",
+ "description": "Main pipeline workflows to be executed in main.nf"
+ },
+ {
+ "@id": "subworkflows/",
+ "@type": "Dataset",
+ "description": "Smaller subworkflows"
+ },
+ {
+ "@id": "nextflow.config",
+ "@type": "File",
+ "description": "Main Nextflow configuration file"
+ },
+ {
+ "@id": "README.md",
+ "@type": "File",
+ "description": "Basic pipeline usage information"
+ },
+ {
+ "@id": "nextflow_schema.json",
+ "@type": "File",
+ "description": "JSON schema for pipeline parameter specification"
+ },
+ {
+ "@id": "CHANGELOG.md",
+ "@type": "File",
+ "description": "Information on changes made to the pipeline"
+ },
+ {
+ "@id": "LICENSE",
+ "@type": "File",
+ "description": "The license - should be MIT"
+ },
+ {
+ "@id": "CITATIONS.md",
+ "@type": "File",
+ "description": "Citations needed when using the pipeline"
+ },
+ {
+ "@id": "modules.json",
+ "@type": "File",
+ "description": "Version information for modules from nf-core/modules"
+ },
+ {
+ "@id": "docs/usage.md",
+ "@type": "File",
+ "description": "Usage documentation"
+ },
+ {
+ "@id": "docs/output.md",
+ "@type": "File",
+ "description": "Output documentation"
+ },
+ {
+ "@id": ".nf-core.yml",
+ "@type": "File",
+ "description": "nf-core configuration file, configuring template features and linting rules"
+ },
+ {
+ "@id": ".pre-commit-config.yaml",
+ "@type": "File",
+ "description": "Configuration file for pre-commit hooks"
+ },
+ {
+ "@id": ".prettierignore",
+ "@type": "File",
+ "description": "Ignore file for prettier"
+ },
+ {
+ "@id": "https://nf-co.re/",
+ "@type": "Organization",
+ "name": "nf-core",
+ "url": "https://nf-co.re/"
+ },
+ {
+ "@id": "https://orcid.org/0000-0003-2555-3114",
+ "@type": "Person",
+ "email": "11850640+matthdsm@users.noreply.github.com",
+ "name": "Matthias De Smet"
+ },
+ {
+ "@id": "https://orcid.org/0009-0003-5619-1555",
+ "@type": "Person",
+ "email": "101190534+nvnieuwk@users.noreply.github.com",
+ "name": "Nicolas Vannieuwkerke"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/subworkflows/local/utils_nfcore_sampletracking_pipeline/main.nf b/subworkflows/local/utils_nfcore_sampletracking_pipeline/main.nf
index 302b20e..960f3b5 100644
--- a/subworkflows/local/utils_nfcore_sampletracking_pipeline/main.nf
+++ b/subworkflows/local/utils_nfcore_sampletracking_pipeline/main.nf
@@ -11,11 +11,12 @@
include { UTILS_NFSCHEMA_PLUGIN } from '../../nf-core/utils_nfschema_plugin'
include { paramsSummaryMap } from 'plugin/nf-schema'
include { samplesheetToList } from 'plugin/nf-schema'
-include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline'
+include { paramsHelp } from 'plugin/nf-schema'
include { completionEmail } from '../../nf-core/utils_nfcore_pipeline'
include { completionSummary } from '../../nf-core/utils_nfcore_pipeline'
include { imNotification } from '../../nf-core/utils_nfcore_pipeline'
include { UTILS_NFCORE_PIPELINE } from '../../nf-core/utils_nfcore_pipeline'
+include { UTILS_NEXTFLOW_PIPELINE } from '../../nf-core/utils_nextflow_pipeline'
/*
========================================================================================
@@ -28,9 +29,13 @@ workflow PIPELINE_INITIALISATION {
take:
version // boolean: Display version and exit
validate_params // boolean: Boolean whether to validate parameters against the schema at runtime
+ monochrome_logs // boolean: Do not use coloured log outputs
nextflow_cli_args // array: List of positional nextflow CLI args
outdir // string: The output directory where the results will be saved
input // string: Path to input samplesheet
+ help // boolean: Display help message and exit
+ help_full // boolean: Show the full help message
+ show_hidden // boolean: Show hidden parameters in the help message
main:
@@ -49,10 +54,18 @@ workflow PIPELINE_INITIALISATION {
//
// Validate parameters and generate parameter summary to stdout
//
+ command = "nextflow run ${workflow.manifest.name} -profile --input samplesheet.csv --outdir "
+
UTILS_NFSCHEMA_PLUGIN (
workflow,
validate_params,
- []
+ null,
+ help,
+ help_full,
+ show_hidden,
+ "",
+ "",
+ command
)
//
@@ -64,7 +77,7 @@ workflow PIPELINE_INITIALISATION {
//
// Custom validation for pipeline parameters
//
- validateInputParameters()
+ //validateInputParameters()
//
// Create channel from input file provided through params.input
@@ -108,34 +121,46 @@ workflow PIPELINE_COMPLETION {
multiqc_report // string: Path to MultiQC report
main:
-
summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
+ def multiqc_reports = multiqc_report.toList()
//
// Completion email and summary
//
workflow.onComplete {
if (email || email_on_fail) {
- completionEmail(summary_params, email, email_on_fail, plaintext_email, outdir, monochrome_logs, multiqc_report.toList())
+ completionEmail(
+ summary_params,
+ email,
+ email_on_fail,
+ plaintext_email,
+ outdir,
+ monochrome_logs,
+ multiqc_reports.getVal(),
+ )
}
completionSummary(monochrome_logs)
-
if (hook_url) {
imNotification(summary_params, hook_url)
}
}
+
+ workflow.onError {
+ log.error "Pipeline failed. Please refer to troubleshooting docs: https://nf-co.re/docs/usage/troubleshooting"
+ }
}
/*
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
FUNCTIONS
-========================================================================================
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
*/
//
// Check and validate pipeline parameters
//
def validateInputParameters() {
+ genomeExistsError()
}
//
@@ -145,14 +170,38 @@ def validateInputSamplesheet(input) {
def (metas, fastqs) = input[1..2]
// Check that multiple runs of the same sample are of the same datatype i.e. single-end / paired-end
- def endedness_ok = metas.collect{ it -> it.single_end }.unique().size == 1
+ def endedness_ok = metas.collect{ meta -> meta.single_end }.unique().size == 1
if (!endedness_ok) {
error("Please check input samplesheet -> Multiple runs of a sample must be of the same datatype i.e. single-end or paired-end: ${metas[0].id}")
}
return [ metas[0], fastqs ]
}
+//
+// Get attribute from genome config file e.g. fasta
+//
+def getGenomeAttribute(attribute) {
+ if (params.genomes && params.genome && params.genomes.containsKey(params.genome)) {
+ if (params.genomes[ params.genome ].containsKey(attribute)) {
+ return params.genomes[ params.genome ][ attribute ]
+ }
+ }
+ return null
+}
+//
+// Exit pipeline if incorrect --genome key provided
+//
+def genomeExistsError() {
+ if (params.genomes && params.genome && !params.genomes.containsKey(params.genome)) {
+ def error_string = "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n" +
+ " Genome '${params.genome}' not found in any config files provided to the pipeline.\n" +
+ " Currently, the available genome keys are:\n" +
+ " ${params.genomes.keySet().join(", ")}\n" +
+ "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
+ error(error_string)
+ }
+}
//
// Generate methods description for MultiQC
//
@@ -183,14 +232,24 @@ def toolBibliographyText() {
}
def methodsDescriptionText(mqc_methods_yaml) {
- // Convert to a named map so can be used as with familar NXF ${workflow} variable syntax in the MultiQC YML file
+ // Convert to a named map so can be used as with familiar NXF ${workflow} variable syntax in the MultiQC YML file
def meta = [:]
meta.workflow = workflow.toMap()
meta["manifest_map"] = workflow.manifest.toMap()
// Pipeline DOI
- meta["doi_text"] = meta.manifest_map.doi ? "(doi: ${meta.manifest_map.doi})" : ""
- meta["nodoi_text"] = meta.manifest_map.doi ? "": "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. "
+ if (meta.manifest_map.doi) {
+ // Using a loop to handle multiple DOIs
+ // Removing `https://doi.org/` to handle pipelines using DOIs vs DOI resolvers
+ // Removing ` ` since the manifest.doi is a string and not a proper list
+ def temp_doi_ref = ""
+ def manifest_doi = meta.manifest_map.doi.tokenize(",")
+ manifest_doi.each { doi_ref ->
+ temp_doi_ref += "(doi: ${doi_ref.replace("https://doi.org/", "").replace(" ", "")}), "
+ }
+ meta["doi_text"] = temp_doi_ref.substring(0, temp_doi_ref.length() - 2)
+ } else meta["doi_text"] = ""
+ meta["nodoi_text"] = meta.manifest_map.doi ? "" : "If available, make sure to update the text to include the Zenodo DOI of version of the pipeline used. "
// Tool references
meta["tool_citations"] = ""
diff --git a/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
deleted file mode 100644
index f847611..0000000
--- a/subworkflows/nf-core/utils_nextflow_pipeline/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-subworkflows/utils_nextflow_pipeline:
- - subworkflows/nf-core/utils_nextflow_pipeline/**
diff --git a/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml b/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
deleted file mode 100644
index ac8523c..0000000
--- a/subworkflows/nf-core/utils_nfcore_pipeline/tests/tags.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-subworkflows/utils_nfcore_pipeline:
- - subworkflows/nf-core/utils_nfcore_pipeline/**
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/main.nf b/subworkflows/nf-core/utils_nfschema_plugin/main.nf
index 4994303..ee4738c 100644
--- a/subworkflows/nf-core/utils_nfschema_plugin/main.nf
+++ b/subworkflows/nf-core/utils_nfschema_plugin/main.nf
@@ -4,6 +4,7 @@
include { paramsSummaryLog } from 'plugin/nf-schema'
include { validateParameters } from 'plugin/nf-schema'
+include { paramsHelp } from 'plugin/nf-schema'
workflow UTILS_NFSCHEMA_PLUGIN {
@@ -15,29 +16,56 @@ workflow UTILS_NFSCHEMA_PLUGIN {
// when this input is empty it will automatically use the configured schema or
// "${projectDir}/nextflow_schema.json" as default. This input should not be empty
// for meta pipelines
+ help // boolean: show help message
+ help_full // boolean: show full help message
+ show_hidden // boolean: show hidden parameters in help message
+ before_text // string: text to show before the help message and parameters summary
+ after_text // string: text to show after the help message and parameters summary
+ command // string: an example command of the pipeline
main:
+ if(help || help_full) {
+ help_options = [
+ beforeText: before_text,
+ afterText: after_text,
+ command: command,
+ showHidden: show_hidden,
+ fullHelp: help_full,
+ ]
+ if(parameters_schema) {
+ help_options << [parametersSchema: parameters_schema]
+ }
+ log.info paramsHelp(
+ help_options,
+ params.help instanceof String ? params.help : "",
+ )
+ exit 0
+ }
+
//
// Print parameter summary to stdout. This will display the parameters
// that differ from the default given in the JSON schema
//
+
+ summary_options = [:]
if(parameters_schema) {
- log.info paramsSummaryLog(input_workflow, parameters_schema:parameters_schema)
- } else {
- log.info paramsSummaryLog(input_workflow)
+ summary_options << [parametersSchema: parameters_schema]
}
+ log.info before_text
+ log.info paramsSummaryLog(summary_options, input_workflow)
+ log.info after_text
//
// Validate the parameters using nextflow_schema.json or the schema
// given via the validation.parametersSchema configuration option
//
if(validate_params) {
+ validateOptions = [:]
if(parameters_schema) {
- validateParameters(parameters_schema:parameters_schema)
- } else {
- validateParameters()
+ validateOptions << [parametersSchema: parameters_schema]
}
+ validateParameters(validateOptions)
}
emit:
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test b/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
index 8fb3016..c977917 100644
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
+++ b/subworkflows/nf-core/utils_nfschema_plugin/tests/main.nf.test
@@ -25,6 +25,12 @@ nextflow_workflow {
input[0] = workflow
input[1] = validate_params
input[2] = ""
+ input[3] = false
+ input[4] = false
+ input[5] = false
+ input[6] = ""
+ input[7] = ""
+ input[8] = ""
"""
}
}
@@ -51,6 +57,12 @@ nextflow_workflow {
input[0] = workflow
input[1] = validate_params
input[2] = ""
+ input[3] = false
+ input[4] = false
+ input[5] = false
+ input[6] = ""
+ input[7] = ""
+ input[8] = ""
"""
}
}
@@ -77,6 +89,12 @@ nextflow_workflow {
input[0] = workflow
input[1] = validate_params
input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
+ input[3] = false
+ input[4] = false
+ input[5] = false
+ input[6] = ""
+ input[7] = ""
+ input[8] = ""
"""
}
}
@@ -103,6 +121,12 @@ nextflow_workflow {
input[0] = workflow
input[1] = validate_params
input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
+ input[3] = false
+ input[4] = false
+ input[5] = false
+ input[6] = ""
+ input[7] = ""
+ input[8] = ""
"""
}
}
@@ -114,4 +138,36 @@ nextflow_workflow {
)
}
}
+
+ test("Should create a help message") {
+
+ when {
+
+ params {
+ test_data = ''
+ outdir = null
+ }
+
+ workflow {
+ """
+ validate_params = true
+ input[0] = workflow
+ input[1] = validate_params
+ input[2] = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
+ input[3] = true
+ input[4] = false
+ input[5] = false
+ input[6] = "Before"
+ input[7] = "After"
+ input[8] = "nextflow run test/test"
+ """
+ }
+ }
+
+ then {
+ assertAll(
+ { assert workflow.success }
+ )
+ }
+ }
}
diff --git a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config b/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config
index 0907ac5..f6537cc 100644
--- a/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config
+++ b/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow.config
@@ -1,8 +1,8 @@
plugins {
- id "nf-schema@2.1.0"
+ id "nf-schema@2.6.1"
}
validation {
parametersSchema = "${projectDir}/subworkflows/nf-core/utils_nfschema_plugin/tests/nextflow_schema.json"
monochromeLogs = true
-}
\ No newline at end of file
+}
diff --git a/tests/.nftignore b/tests/.nftignore
new file mode 100644
index 0000000..e128a12
--- /dev/null
+++ b/tests/.nftignore
@@ -0,0 +1,12 @@
+.DS_Store
+multiqc/multiqc_data/fastqc_top_overrepresented_sequences_table.txt
+multiqc/multiqc_data/multiqc.parquet
+multiqc/multiqc_data/multiqc.log
+multiqc/multiqc_data/multiqc_data.json
+multiqc/multiqc_data/multiqc_sources.txt
+multiqc/multiqc_data/multiqc_software_versions.txt
+multiqc/multiqc_data/llms-full.txt
+multiqc/multiqc_plots/{svg,pdf,png}/*.{svg,pdf,png}
+multiqc/multiqc_report.html
+fastqc/*_fastqc.{html,zip}
+pipeline_info/*.{html,json,txt,yml}
diff --git a/tests/config/nf-test.config b/tests/config/nf-test.config
deleted file mode 100644
index b454956..0000000
--- a/tests/config/nf-test.config
+++ /dev/null
@@ -1,17 +0,0 @@
-aws {
- client {
- endpoint = "https://s3.ugent.be"
- protocol = "https"
- s3PathStyleAccess = true
- connectionTimeout = 60000
- }
-}
-
-process {
- // Limit resources so that this can run on GitHub Actions
- resourceLimits = [
- cpus : 2,
- memory: 6.GB,
- time : 6.h
- ]
-}
diff --git a/tests/workflows/sampletracking.nf.test b/tests/default.nf.test
similarity index 99%
rename from tests/workflows/sampletracking.nf.test
rename to tests/default.nf.test
index 7f4d3f5..98d1bf5 100644
--- a/tests/workflows/sampletracking.nf.test
+++ b/tests/default.nf.test
@@ -12,7 +12,7 @@ nextflow_workflow {
options "-stub"
setup {
run("BWA_INDEX") {
- script "../../modules/nf-core/bwa/index/main.nf"
+ script "../modules/nf-core/bwa/index/main.nf"
process {
"""
input[0] = [
diff --git a/tests/workflows/sampletracking.nf.test.snap b/tests/default.nf.test.snap
similarity index 90%
rename from tests/workflows/sampletracking.nf.test.snap
rename to tests/default.nf.test.snap
index aa7020d..599939f 100644
--- a/tests/workflows/sampletracking.nf.test.snap
+++ b/tests/default.nf.test.snap
@@ -35,7 +35,7 @@
],
"4": [
"versions.yml:md5,63915ecd9cc27a8026cca35ada889a22",
- "versions.yml:md5,dac5a146f64564be62294d29fe21cb67"
+ "versions.yml:md5,7674f112f39abc693d4b32137dfe0d8f"
],
"crosscheck_metrics": [
@@ -70,15 +70,15 @@
],
"versions": [
"versions.yml:md5,63915ecd9cc27a8026cca35ada889a22",
- "versions.yml:md5,dac5a146f64564be62294d29fe21cb67"
+ "versions.yml:md5,7674f112f39abc693d4b32137dfe0d8f"
]
}
],
"meta": {
"nf-test": "0.9.2",
- "nextflow": "25.04.6"
+ "nextflow": "25.10.0"
},
- "timestamp": "2025-08-11T15:33:22.286114144"
+ "timestamp": "2025-11-13T13:50:43.009108348"
},
"Should run without failures": {
"content": [
@@ -124,9 +124,9 @@
],
"4": [
"versions.yml:md5,63915ecd9cc27a8026cca35ada889a22",
- "versions.yml:md5,7442362a1e457dad7ce796c703bd6380",
- "versions.yml:md5,ab16dda2f91f60705355cab06b0145bf",
- "versions.yml:md5,dac5a146f64564be62294d29fe21cb67"
+ "versions.yml:md5,7674f112f39abc693d4b32137dfe0d8f",
+ "versions.yml:md5,876348456fddc25acc7a59427a0bb5e5",
+ "versions.yml:md5,8b795c8654423b0a84fa18df4e35e688"
],
"crosscheck_metrics": [
[
@@ -169,16 +169,16 @@
],
"versions": [
"versions.yml:md5,63915ecd9cc27a8026cca35ada889a22",
- "versions.yml:md5,7442362a1e457dad7ce796c703bd6380",
- "versions.yml:md5,ab16dda2f91f60705355cab06b0145bf",
- "versions.yml:md5,dac5a146f64564be62294d29fe21cb67"
+ "versions.yml:md5,7674f112f39abc693d4b32137dfe0d8f",
+ "versions.yml:md5,876348456fddc25acc7a59427a0bb5e5",
+ "versions.yml:md5,8b795c8654423b0a84fa18df4e35e688"
]
}
],
"meta": {
"nf-test": "0.9.2",
- "nextflow": "25.04.6"
+ "nextflow": "25.10.0"
},
- "timestamp": "2025-08-11T15:32:16.596084438"
+ "timestamp": "2025-11-13T13:50:05.39665767"
}
}
\ No newline at end of file
diff --git a/tests/main.nf.test b/tests/main.nf.test
deleted file mode 100644
index 4d79f5e..0000000
--- a/tests/main.nf.test
+++ /dev/null
@@ -1,30 +0,0 @@
-nextflow_pipeline {
-
- name "Test Main Workflow"
- script "main.nf"
-
- tag "pipeline"
- tag "pipeline/main"
-
- test("Should run without failures") {
-
- options "-stub"
-
- when {
- params {
- input = "${projectDir}/assets/samplesheet.csv"
- bwa_index = "s3://test-data/genomics/homo_sapiens/genome/bwa/"
- fasta = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/seq/GCA_000001405.15_GRCh38_full_plus_hs38d1_analysis_set_chr21.fna"
- fai = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/seq/GCA_000001405.15_GRCh38_full_plus_hs38d1_analysis_set_chr21.fna.fai"
- haplotype_map = "https://github.com/nf-cmgg/test-datasets/raw/sampletracking/data/genomics/homo_sapiens/genome/picard/haplotype_map.txt"
- }
- }
-
- then {
- assert workflow.success
- }
-
- }
-
-}
-
diff --git a/tests/nextflow.config b/tests/nextflow.config
index c19b1ad..11330b8 100644
--- a/tests/nextflow.config
+++ b/tests/nextflow.config
@@ -1,5 +1,18 @@
/*
========================================================================================
- Nextflow config file for running tests
+ Nextflow config file for running nf-test tests
========================================================================================
*/
+params {
+ modules_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/'
+ pipelines_testdata_base_path = 'https://raw.githubusercontent.com/nf-core/test-datasets/refs/heads/sampletracking'
+}
+
+aws {
+ client {
+ endpoint = "https://s3.ugent.be"
+ protocol = "https"
+ s3PathStyleAccess = true
+ connectionTimeout = 60000
+ }
+}
diff --git a/utils/nf-cmgg-sampletracking/go.mod b/utils/nf-cmgg-sampletracking/go.mod
deleted file mode 100644
index 6459bdd..0000000
--- a/utils/nf-cmgg-sampletracking/go.mod
+++ /dev/null
@@ -1,27 +0,0 @@
-module nf-cmgg-sampletracking
-
-go 1.22.1
-
-require (
- github.com/antihax/optional v1.0.0
- github.com/google/uuid v1.6.0
- github.com/joho/godotenv v1.5.1
- github.com/sirupsen/logrus v1.9.3
- github.com/urfave/cli v1.22.14
-)
-
-require (
- github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
- github.com/golang/protobuf v1.5.3 // indirect
- github.com/russross/blackfriday/v2 v2.1.0 // indirect
- github.com/urfave/cli/v2 v2.27.1 // indirect
- github.com/urfave/cli/v3 v3.0.0-alpha9 // indirect
- github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
- gitlab.cmgg.be/cmgg/smaple-go v0.0.0-20240311122240-70203adc8b5a // indirect
- gitlab.cmgg.be/cmgg/smapleclientgo v0.0.0-20240311124656-a61bafb07235 // indirect
- golang.org/x/net v0.22.0 // indirect
- golang.org/x/oauth2 v0.18.0 // indirect
- golang.org/x/sys v0.18.0 // indirect
- google.golang.org/appengine v1.6.7 // indirect
- google.golang.org/protobuf v1.31.0 // indirect
-)
diff --git a/utils/nf-cmgg-sampletracking/go.sum b/utils/nf-cmgg-sampletracking/go.sum
deleted file mode 100644
index 65e1b87..0000000
--- a/utils/nf-cmgg-sampletracking/go.sum
+++ /dev/null
@@ -1,71 +0,0 @@
-github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
-github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg=
-github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
-github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
-github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM=
-github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
-github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
-github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
-github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
-github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
-github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
-github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
-github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
-github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
-github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
-github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
-github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
-github.com/urfave/cli v1.22.14 h1:ebbhrRiGK2i4naQJr+1Xj92HXZCrK7MsyTS/ob3HnAk=
-github.com/urfave/cli v1.22.14/go.mod h1:X0eDS6pD6Exaclxm99NJ3FiCDRED7vIHpx2mDOHLvkA=
-github.com/urfave/cli/v2 v2.27.1 h1:8xSQ6szndafKVRmfyeUMxkNUJQMjL1F2zmsZ+qHpfho=
-github.com/urfave/cli/v2 v2.27.1/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
-github.com/urfave/cli/v3 v3.0.0-alpha9 h1:P0RMy5fQm1AslQS+XCmy9UknDXctOmG/q/FZkUFnJSo=
-github.com/urfave/cli/v3 v3.0.0-alpha9/go.mod h1:0kK/RUFHyh+yIKSfWxwheGndfnrvYSmYFVeKCh03ZUc=
-github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
-github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
-gitlab.cmgg.be/cmgg/smaple-go v0.0.0-20240311122240-70203adc8b5a h1:EiUhlD4SsJj4O4I00fgTg+J47VVipWoNrH8i8442hcM=
-gitlab.cmgg.be/cmgg/smaple-go v0.0.0-20240311122240-70203adc8b5a/go.mod h1:sGLUTrCCox7NjGUP8cjXZvMK3jWIxItrK06/SqfdLP4=
-gitlab.cmgg.be/cmgg/smapleclientgo v0.0.0-20240311124656-a61bafb07235 h1:aUDTIb3+xb9rpXxnLoRnOXZEvetnXR4f4aOvV32bWII=
-gitlab.cmgg.be/cmgg/smapleclientgo v0.0.0-20240311124656-a61bafb07235/go.mod h1:TtiucYaoOZ08HosIpyq8R4sXyYyPqGZ0qk7Grutz08I=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
-golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
-golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
-golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
-golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 h1:0A+M6Uqn+Eje4kHMK80dtF3JCXC4ykBgQG4Fe06QRhQ=
-golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
-golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
-golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
-golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
-google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
-google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
-google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
-gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
-gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking.go b/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking.go
deleted file mode 100644
index 18d3e34..0000000
--- a/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking.go
+++ /dev/null
@@ -1,297 +0,0 @@
-package main
-
-import (
- "context"
- "encoding/csv"
- "io"
- "io/fs"
- "os"
- "path/filepath"
- "strings"
-
- smaple_api_client "gitlab.cmgg.be/cmgg/smapleclientgo"
-
- "github.com/antihax/optional"
- "github.com/google/uuid"
- "github.com/joho/godotenv"
- log "github.com/sirupsen/logrus"
- cli "github.com/urfave/cli/v2"
-)
-
-// Structs
-type samplesheetSample struct {
- // Sample name
- sample string `json:"sample"`
- pool string `json:"pool"`
- sampleBam string `json:"sample_bam"`
- sampleBamIndex string `json:"sample_bam_index"`
- snpFastq1 string `json:"snp_fastq_1"`
- snpFastq2 string `json:"snp_fastq_2"`
- snpBam string `json:"snp_bam"`
- snpBamIndex string `json:"snp_bam_index"`
-}
-
-func (s samplesheetSample) header() []string {
- return []string{"sample", "pool", "sample_bam", "sample_bam_index", "snp_fastq_1", "snp_fastq_2", "snp_bam", "snp_bam_index"}
-}
-func (s samplesheetSample) csv() []string {
- return []string{s.sample, s.pool, s.sampleBam, s.sampleBamIndex, s.snpFastq1, s.snpFastq2, s.snpBam, s.snpBamIndex}
-}
-
-func main() {
- // Load environment variables from .env file
- err := godotenv.Load()
- if err != nil {
- log.Warn("Error loading .env file")
- }
-
- var runName string
- var smapleUsername string
- var smaplePassword string
- var smapleUrl string
-
- var samplesDir cli.StringSlice
- var snpDir cli.StringSlice
-
- app := &cli.App{
- Name: "nf-cmgg-sampletracking",
- Usage: "Generate a sample sheet for the nf-cmgg/sampletracking Nextflow pipeline",
- Flags: []cli.Flag{
- &cli.StringFlag{
- Name: "run",
- Usage: "Run Name",
- Aliases: []string{"r"},
- Required: true,
- Destination: &runName,
- },
- &cli.StringSliceFlag{
- Name: "samples_directory",
- Usage: "Directory containing the full size samples",
- Aliases: []string{"w"},
- Required: true,
- Destination: &samplesDir,
- },
- &cli.StringSliceFlag{
- Name: "snp_directory",
- Usage: "Directory containing the snp samples",
- Aliases: []string{"s"},
- Required: true,
- Destination: &snpDir,
- },
- &cli.StringFlag{
- Name: "smaple_username",
- Usage: "Smaple API username",
- EnvVars: []string{"SMAPLE_USERNAME"},
- Required: true,
- Destination: &smapleUsername,
- },
- &cli.StringFlag{
- Name: "smaple_password",
- Usage: "Smaple API password",
- EnvVars: []string{"SMAPLE_PASSWORD"},
- Required: true,
- Destination: &smaplePassword,
- },
- &cli.StringFlag{
- Name: "smaple_url",
- Usage: "Smaple Base URL",
- Value: "https://smaple.cmgg.be",
- EnvVars: []string{"SMAPLE_URL"},
- Destination: &smapleUrl,
- },
- },
- Action: func(c *cli.Context) error {
- // Authenticate to Smaple
- ctx, client, err := smapleAuthenticate(
- smapleUsername,
- smaplePassword,
- smapleUrl,
- )
- if err != nil {
- log.Fatal("Unable to authenticate to Smaple: ", err)
- }
- // Fetch the samplesheet for
- samplesheet, resp, err := client.RunApi.GetSampleSheet(ctx, runName)
- if err != nil {
- body, err := io.ReadAll(resp.Body)
- log.Debug("exit code: ", resp.StatusCode, string(body))
- log.Fatal("Failed to get samplesheet for run ", runName, ": ", err)
- }
-
- // Parse the samplesheet
- var samples = make(map[string]*samplesheetSample)
- for _, lane := range samplesheet.RunLaneObjects {
- for _, pool := range lane.PoolObjects {
- for _, sample := range pool.LibPrepSamplePoolObjects {
- if sample.Tag != "WES" {
- continue
- }
- // Sample_ID
- var sample_id string = ""
- if sample.SampleNumberReference != "" {
- sample_id = sample.SampleNumberReference
- } else {
- sample_id = sample.SampleNumber
- }
- samples[sample_id+"_"+pool.PoolName] = &samplesheetSample{
- sample: sample_id,
- pool: pool.PoolName,
- }
- }
- }
- }
-
- // Find all full size (aligned) samples in
- sampleFiles := []string{}
- for _, dir := range samplesDir.Value() {
- files, err := findFilesByExtension(dir, []string{".bam", ".cram"})
- if err != nil {
- log.Fatal("Error finding files in ", dir, ": ", err)
- }
- sampleFiles = append(sampleFiles, files...)
- }
-
- // Find all snp samples in
- snpFastq := []string{}
- snpBams := []string{}
- for _, dir := range snpDir.Value() {
- snpfiles, err := findFilesByExtension(dir, []string{".fastq.gz", ".fq.gz"})
- if err != nil {
- log.Fatal("Error finding files in ", dir, ": ", err)
- }
- snpFastq = append(snpFastq, snpfiles...)
- snpbamfiles, err := findFilesByExtension(dir, []string{".bam", ".cram"})
- if err != nil {
- log.Fatal("Error finding files in ", dir, ": ", err)
- }
- snpBams = append(snpBams, snpbamfiles...)
- }
-
- if err != nil {
- log.Fatal("Error finding files in ", snpDir, ": ", err)
- }
-
- // Associate each bam/cram with a sample
- for i, sample := range samples {
- // Find the sample bam
- for _, file := range sampleFiles {
- if strings.Contains(file, sample.sample) {
- samples[i].sampleBam = file
- if _, err := os.Stat(file + ".bai"); err == nil {
- samples[i].sampleBamIndex = file + ".bai"
- } else if _, err := os.Stat(file + ".csi"); err == nil {
- samples[i].sampleBamIndex = file + ".csi"
- } else if _, err := os.Stat(file + ".crai"); err == nil {
- samples[i].sampleBamIndex = file + ".crai"
- } else {
- log.Error("Index file not found for ", file)
- }
- }
- }
- // Find the snp fastq
- for _, file := range snpFastq {
- if strings.Contains(file, sample.sample) {
- if strings.Contains(file, "_R1") {
- samples[i].snpFastq1 = file
- } else if strings.Contains(file, "_R2") {
- samples[i].snpFastq2 = file
- }
- }
- }
- // Find the snp bam
- for _, file := range snpBams {
- if strings.Contains(file, sample.sample) {
- samples[i].snpBam = file
- if _, err := os.Stat(file + ".bai"); err == nil {
- samples[i].snpBamIndex = file + ".bai"
- } else if _, err := os.Stat(file + ".csi"); err == nil {
- samples[i].snpBamIndex = file + ".csi"
- } else if _, err := os.Stat(file + ".crai"); err == nil {
- samples[i].snpBamIndex = file + ".crai"
- } else {
- log.Error("Index file not found for ", file)
- }
- }
- }
- }
-
- // Open the output file
- samplesheet_file, err := os.Create("sampletracking_samplesheet.csv")
- if err != nil {
- log.Fatalf("failed creating file: %s", err)
- }
- samplesheet_writer := csv.NewWriter(samplesheet_file)
- defer samplesheet_writer.Flush()
-
- samplesheet_writer.Write(samplesheetSample{}.header())
- for _, sample := range samples {
- if sample.sampleBam != "" && ((sample.snpFastq1 != "" && sample.snpFastq2 != "") || sample.snpBam != "") {
- samplesheet_writer.Write(sample.csv())
- }
- }
-
- return nil
- },
- }
-
- if err := app.Run(os.Args); err != nil {
- log.Fatal(err)
- }
-}
-
-// Authenticate to SMAPLE and return an authenticated client
-func smapleAuthenticate(username string, password string, url string) (context.Context, smaple_api_client.APIClient, error) {
- log.Debug("Authenticating to SMAPLE")
- log.Debug("Username:", username)
- log.Debug("URL:", url)
-
- configuration := smaple_api_client.NewConfiguration()
- configuration.BasePath = url
- client := smaple_api_client.NewAPIClient(configuration)
-
- opts := &smaple_api_client.AuthApiApiV6AuthLoginPostOpts{
- Body: optional.NewInterface(map[string]string{
- "name": uuid.New().String(),
- "password": password,
- "user": username,
- }),
- }
-
- token, _, err := client.AuthApi.ApiV6AuthLoginPost(context.Background(), opts)
- if err != nil {
- log.Fatal("Unable to fetch Smaple authentication token: ", err)
- }
- log.Debug("Smaple token:", token.AccessToken)
-
- auth := context.WithValue(context.Background(), smaple_api_client.ContextAPIKey, smaple_api_client.APIKey{
- Key: token.AccessToken,
- Prefix: "Bearer", // Omit if not necessary.
- })
-
- return auth, *client, err
-}
-
-// Find all files with a given extension in a directory
-func findFilesByExtension(dir string, extension []string) ([]string, error) {
- var files []string
-
- err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
- if err != nil {
- return err
- }
- if !d.IsDir() {
- for _, ext := range extension {
- if strings.HasSuffix(d.Name(), ext) {
- files = append(files, path)
- }
- }
- }
- return nil
- })
-
- if err != nil {
- return nil, err
- }
-
- return files, nil
-}
diff --git a/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking_test.go b/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking_test.go
deleted file mode 100644
index 0374ba4..0000000
--- a/utils/nf-cmgg-sampletracking/nf-cmgg-sampletracking_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package main
-
-import (
- "testing"
-)
-
-func TestFindFilesByExtension(t *testing.T) {
- dir := "test-data"
- extension := []string{".txt", ".csv"}
-
- files, err := findFilesByExtension(dir, extension)
- if err != nil {
- t.Errorf("Error finding files: %v", err)
- }
-
- expectedFiles := []string{
- "test-data/test1.txt",
- "test-data/test2.csv",
- // Add more expected file paths here
- }
-
- if len(files) != len(expectedFiles) {
- t.Errorf("Expected %d files, but got %d", len(expectedFiles), len(files))
- }
-
- for i, file := range files {
- if file != expectedFiles[i] {
- t.Errorf("Expected file path %s, but got %s", expectedFiles[i], file)
- }
- }
-}
diff --git a/utils/nf-cmgg-sampletracking/sampletracking_samplesheet.csv b/utils/nf-cmgg-sampletracking/sampletracking_samplesheet.csv
deleted file mode 100644
index 773b236..0000000
--- a/utils/nf-cmgg-sampletracking/sampletracking_samplesheet.csv
+++ /dev/null
@@ -1,2 +0,0 @@
-sample,pool,sample_bam,sample_bam_index,snp_fastq_1,snp_fastq_2,snp_bam,snp_bam_index
-D2217639,WES_H112,test-data/sample_data/D2217639.cram,test-data/sample_data/D2217639.cram.crai,test-data/snp_data/snp_D2217639_S201_R1_001.fastq.gz,test-data/snp_data/snp_D2217639_S201_R2_001.fastq.gz,,
diff --git a/utils/nf-cmgg-sampletracking/test-data/sample_data/D2217639.cram b/utils/nf-cmgg-sampletracking/test-data/sample_data/D2217639.cram
deleted file mode 100644
index e69de29..0000000
diff --git a/utils/nf-cmgg-sampletracking/test-data/sample_data/D2217639.cram.crai b/utils/nf-cmgg-sampletracking/test-data/sample_data/D2217639.cram.crai
deleted file mode 100644
index e69de29..0000000
diff --git a/utils/nf-cmgg-sampletracking/test-data/snp_data/snp_D2217639_S201_R1_001.fastq.gz b/utils/nf-cmgg-sampletracking/test-data/snp_data/snp_D2217639_S201_R1_001.fastq.gz
deleted file mode 100644
index e69de29..0000000
diff --git a/utils/nf-cmgg-sampletracking/test-data/snp_data/snp_D2217639_S201_R2_001.fastq.gz b/utils/nf-cmgg-sampletracking/test-data/snp_data/snp_D2217639_S201_R2_001.fastq.gz
deleted file mode 100644
index e69de29..0000000
diff --git a/utils/nf-cmgg-sampletracking/test-data/test1.txt b/utils/nf-cmgg-sampletracking/test-data/test1.txt
deleted file mode 100644
index e69de29..0000000
diff --git a/utils/nf-cmgg-sampletracking/test-data/test2.csv b/utils/nf-cmgg-sampletracking/test-data/test2.csv
deleted file mode 100644
index e69de29..0000000
diff --git a/workflows/sampletracking.nf b/workflows/sampletracking.nf
index 803542f..11ef55f 100644
--- a/workflows/sampletracking.nf
+++ b/workflows/sampletracking.nf
@@ -6,7 +6,7 @@
include { BWA_MEM } from '../modules/nf-core/bwa/mem/main'
include { PICARD_CROSSCHECKFINGERPRINTS } from '../modules/nf-core/picard/crosscheckfingerprints/main'
-include { NGSBITS_SAMPLEGENDER } from '../modules/local/ngsbits/samplegender/main'
+include { NGSBITS_SAMPLEGENDER } from '../modules/nf-core/ngsbits/samplegender/main'
include { MULTIQC as MULTIQC_POOLS } from '../modules/nf-core/multiqc/main'
include { MULTIQC as MULTIQC_MAIN } from '../modules/nf-core/multiqc/main'
include { paramsSummaryMap } from 'plugin/nf-schema'
@@ -267,16 +267,33 @@ workflow SAMPLETRACKING {
//
// MODULE: MultiQC
//
- ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)
- ch_multiqc_custom_config = multiqc_config ? Channel.fromPath(multiqc_config, checkIfExists: true) : Channel.empty()
- ch_multiqc_logo = multiqc_logo ? Channel.fromPath(multiqc_logo, checkIfExists: true) : Channel.empty()
- summary_params = paramsSummaryMap(workflow, parameters_schema: "nextflow_schema.json")
- ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params))
- ch_multiqc_custom_methods_description = multiqc_methods_description ? file(multiqc_methods_description, checkIfExists: true) : file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)
- ch_methods_description = Channel.value(methodsDescriptionText(ch_multiqc_custom_methods_description))
- ch_multiqc_files = ch_multiqc_files.mix(ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
- ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions)
- ch_multiqc_files = ch_multiqc_files.mix(ch_methods_description.collectFile(name: 'methods_description_mqc.yaml', sort: false))
+ ch_multiqc_config = Channel.fromPath(
+ "$projectDir/assets/multiqc_config.yml", checkIfExists: true)
+ ch_multiqc_custom_config = multiqc_config ?
+ Channel.fromPath(multiqc_config, checkIfExists: true) :
+ Channel.empty()
+ ch_multiqc_logo = multiqc_logo ?
+ Channel.fromPath(multiqc_logo, checkIfExists: true) :
+ Channel.empty()
+
+ summary_params = paramsSummaryMap(
+ workflow, parameters_schema: "nextflow_schema.json")
+ ch_workflow_summary = Channel.value(paramsSummaryMultiqc(summary_params))
+ ch_multiqc_files = ch_multiqc_files.mix(
+ ch_workflow_summary.collectFile(name: 'workflow_summary_mqc.yaml'))
+ ch_multiqc_custom_methods_description = multiqc_methods_description ?
+ file(multiqc_methods_description, checkIfExists: true) :
+ file("$projectDir/assets/methods_description_template.yml", checkIfExists: true)
+ ch_methods_description = Channel.value(
+ methodsDescriptionText(ch_multiqc_custom_methods_description))
+
+ ch_multiqc_files = ch_multiqc_files.mix(ch_collated_versions)
+ ch_multiqc_files = ch_multiqc_files.mix(
+ ch_methods_description.collectFile(
+ name: 'methods_description_mqc.yaml',
+ sort: true
+ )
+ )
MULTIQC_POOLS (
ch_pool_multiqc_files.groupTuple(),