diff --git a/.devcontainer/build-devcontainer/Dockerfile b/.devcontainer/build-devcontainer/Dockerfile index 430b55d262..75d83b432b 100644 --- a/.devcontainer/build-devcontainer/Dockerfile +++ b/.devcontainer/build-devcontainer/Dockerfile @@ -1,6 +1,6 @@ # devcontainers/miniconda image based on debian (bookworm) # see tags and images: https://mcr.microsoft.com/en-us/artifact/mar/devcontainers/miniconda/tags -FROM "mcr.microsoft.com/devcontainers/miniconda@sha256:8e262a2664fab1d53054738d3633338558a2078ce66d3abde55c130f0d5da94f" AS build +FROM mcr.microsoft.com/devcontainers/miniconda@sha256:19516babd35bccd1429591a255dd867b2c8c01606cc594a8a6fe5db03309ecf8 AS build # copy this repo at current revision COPY . /root/nfcore-tools/ @@ -27,7 +27,7 @@ RUN apt-get update --quiet && \ rm -rf /var/lib/apt/lists/* # Final stage to copy only the required files after installation -FROM "mcr.microsoft.com/devcontainers/base:debian@sha256:ce2e9e611939e611b737362c045bb6d3449bb3efb84898525d724aace1737b90" AS final +FROM mcr.microsoft.com/devcontainers/base:debian@sha256:2e826a6ae92e5744cc0a471a03b4411e64f6b7cc6af3adaecddad697f0018f10 AS final # Copy only the conda environment and site-packages from build stage COPY --from=build /opt/conda /opt/conda diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index add023cfa8..f6d47fc237 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,6 +1,6 @@ { "name": "nfcore", - "image": "nfcore/devcontainer:latest", + "image": "nfcore/devcontainer:dev", "remoteEnv": { // Workspace path on the host for mounting with docker-outside-of-docker diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh index e87d26c4c8..f350e4204e 100755 --- a/.devcontainer/setup.sh +++ b/.devcontainer/setup.sh @@ -9,5 +9,8 @@ export PS1='\[\e[3;36m\]\w ->\[\e[0m\\] ' # Update Nextflow nextflow self-update +# Install specifically the version of tools from the workspace +pip install --upgrade -r requirements.txt -r requirements-dev.txt -e . + # Update welcome message echo "Welcome to the nf-core devcontainer!" > /usr/local/etc/vscode-dev-containers/first-run-notice.txt diff --git a/.github/RELEASE_CHECKLIST.md b/.github/RELEASE_CHECKLIST.md index b9f4f0de76..907a119035 100644 --- a/.github/RELEASE_CHECKLIST.md +++ b/.github/RELEASE_CHECKLIST.md @@ -4,7 +4,7 @@ 2. Most importantly, pick an undeniably outstanding [name](http://www.codenamegenerator.com/) for the release where _Prefix_ = _Metal_ and _Dictionary_ = _Animal_. 3. Check the [pipeline health page](https://nf-co.re/pipeline_health) to make sure that all repos look sane (missing `TEMPLATE` branches etc) 4. Check that modules/subworkflows in template are up to date with the latest releases -5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py` and change the `.devcontainer/devcontainer.json` container to `nfcore/gitpod:latest`. +5. Create a PR to `dev` to bump the version in `CHANGELOG.md` and `setup.py`. 6. Make sure all CI tests are passing! 7. Create a PR from `dev` to `main` 8. Run a manual sync on `nf-core/testpipeline` and check that CI is passing on the resulting PR: use the `Sync template` GitHub Action from the tools repository specifying the pipeline name and running from the `dev` branch. @@ -22,4 +22,4 @@ 3. Manually trigger the `Sync template` GitHub Action for all pipelines. 4. Check that the automatic `PyPi` deployment has worked: [pypi.org/project/nf-core](https://pypi.org/project/nf-core/) 5. Check `BioConda` has an automated PR to bump the version, and merge. eg. [bioconda/bioconda-recipes #20065](https://github.com/bioconda/bioconda-recipes/pull/20065) -6. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py` and change the `.devcontainer/devcontainer.json` container to `nfcore/gitpod:dev`. +6. Create a tools PR to `dev` to bump back to the next development version in `CHANGELOG.md` and `setup.py`. diff --git a/.github/actions/create-lint-wf/action.yml b/.github/actions/create-lint-wf/action.yml index 29826c64fc..8340696110 100644 --- a/.github/actions/create-lint-wf/action.yml +++ b/.github/actions/create-lint-wf/action.yml @@ -82,7 +82,7 @@ runs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-lint-wf/log.txt diff --git a/.github/snapshots/adaptivecard.nf.test.snap b/.github/snapshots/adaptivecard.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/adaptivecard.nf.test.snap +++ b/.github/snapshots/adaptivecard.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/changelog.nf.test.snap b/.github/snapshots/changelog.nf.test.snap index 1e06207d6e..aa731135a1 100644 --- a/.github/snapshots/changelog.nf.test.snap +++ b/.github/snapshots/changelog.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:01.993722854" } -} \ No newline at end of file +} diff --git a/.github/snapshots/ci.nf.test.snap b/.github/snapshots/ci.nf.test.snap index 8332ad6339..79ff07945a 100644 --- a/.github/snapshots/ci.nf.test.snap +++ b/.github/snapshots/ci.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:03.306256217" } -} \ No newline at end of file +} diff --git a/.github/snapshots/citations.nf.test.snap b/.github/snapshots/citations.nf.test.snap index 47353a3bb3..a135c5586e 100644 --- a/.github/snapshots/citations.nf.test.snap +++ b/.github/snapshots/citations.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:14.716393995" } -} \ No newline at end of file +} diff --git a/.github/snapshots/code_linters.nf.test.snap b/.github/snapshots/code_linters.nf.test.snap index 47353a3bb3..a135c5586e 100644 --- a/.github/snapshots/code_linters.nf.test.snap +++ b/.github/snapshots/code_linters.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:14.716393995" } -} \ No newline at end of file +} diff --git a/.github/snapshots/codespaces.nf.test.snap b/.github/snapshots/codespaces.nf.test.snap index 47353a3bb3..a135c5586e 100644 --- a/.github/snapshots/codespaces.nf.test.snap +++ b/.github/snapshots/codespaces.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:14.716393995" } -} \ No newline at end of file +} diff --git a/.github/snapshots/default.nf.test.snap b/.github/snapshots/default.nf.test.snap index fc87fe6901..0af1896079 100644 --- a/.github/snapshots/default.nf.test.snap +++ b/.github/snapshots/default.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:29:04.284923" } -} \ No newline at end of file +} diff --git a/.github/snapshots/documentation.nf.test.snap b/.github/snapshots/documentation.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/documentation.nf.test.snap +++ b/.github/snapshots/documentation.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/email.nf.test.snap b/.github/snapshots/email.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/email.nf.test.snap +++ b/.github/snapshots/email.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/github_badges.nf.test.snap b/.github/snapshots/github_badges.nf.test.snap index 0b58a518f4..a0da86f7bf 100644 --- a/.github/snapshots/github_badges.nf.test.snap +++ b/.github/snapshots/github_badges.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:09.233336075" } -} \ No newline at end of file +} diff --git a/.github/snapshots/gitpod.nf.test.snap b/.github/snapshots/gitpod.nf.test.snap index 47353a3bb3..a135c5586e 100644 --- a/.github/snapshots/gitpod.nf.test.snap +++ b/.github/snapshots/gitpod.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:14.716393995" } -} \ No newline at end of file +} diff --git a/.github/snapshots/gpu.nf.test.snap b/.github/snapshots/gpu.nf.test.snap index 30424b1aaa..0d51c52abf 100644 --- a/.github/snapshots/gpu.nf.test.snap +++ b/.github/snapshots/gpu.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-06-16T14:29:10.076573" } -} \ No newline at end of file +} diff --git a/.github/snapshots/igenomes.nf.test.snap b/.github/snapshots/igenomes.nf.test.snap index 6e9b8922d7..8d57911ba9 100644 --- a/.github/snapshots/igenomes.nf.test.snap +++ b/.github/snapshots/igenomes.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:07.123394148" } -} \ No newline at end of file +} diff --git a/.github/snapshots/license.nf.test.snap b/.github/snapshots/license.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/license.nf.test.snap +++ b/.github/snapshots/license.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/nf_core_configs.nf.test.snap b/.github/snapshots/nf_core_configs.nf.test.snap index 47353a3bb3..a135c5586e 100644 --- a/.github/snapshots/nf_core_configs.nf.test.snap +++ b/.github/snapshots/nf_core_configs.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:14.716393995" } -} \ No newline at end of file +} diff --git a/.github/snapshots/nf_schema.nf.test.snap b/.github/snapshots/nf_schema.nf.test.snap index c867a67739..6ccaacba0b 100644 --- a/.github/snapshots/nf_schema.nf.test.snap +++ b/.github/snapshots/nf_schema.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:19.694086325" } -} \ No newline at end of file +} diff --git a/.github/snapshots/rocrate.nf.test.snap b/.github/snapshots/rocrate.nf.test.snap index e45193040a..c8ebafde24 100644 --- a/.github/snapshots/rocrate.nf.test.snap +++ b/.github/snapshots/rocrate.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", diff --git a/.github/snapshots/seqera_platform.nf.test.snap b/.github/snapshots/seqera_platform.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/seqera_platform.nf.test.snap +++ b/.github/snapshots/seqera_platform.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/slackreport.nf.test.snap b/.github/snapshots/slackreport.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/slackreport.nf.test.snap +++ b/.github/snapshots/slackreport.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/snapshots/vscode.nf.test.snap b/.github/snapshots/vscode.nf.test.snap index 16bb91364f..c8ebafde24 100644 --- a/.github/snapshots/vscode.nf.test.snap +++ b/.github/snapshots/vscode.nf.test.snap @@ -47,7 +47,6 @@ "multiqc/multiqc_data/multiqc_sources.txt", "multiqc/multiqc_plots", "multiqc/multiqc_plots/pdf", - "multiqc/multiqc_plots/pdf/fastqc-status-check-heatmap.pdf", "multiqc/multiqc_plots/pdf/fastqc_overrepresented_sequences_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_n_content_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_per_base_sequence_quality_plot.pdf", @@ -60,7 +59,6 @@ "multiqc/multiqc_plots/pdf/fastqc_sequence_length_distribution_plot.pdf", "multiqc/multiqc_plots/pdf/fastqc_top_overrepresented_sequences_table.pdf", "multiqc/multiqc_plots/png", - "multiqc/multiqc_plots/png/fastqc-status-check-heatmap.png", "multiqc/multiqc_plots/png/fastqc_overrepresented_sequences_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_n_content_plot.png", "multiqc/multiqc_plots/png/fastqc_per_base_sequence_quality_plot.png", @@ -73,7 +71,6 @@ "multiqc/multiqc_plots/png/fastqc_sequence_length_distribution_plot.png", "multiqc/multiqc_plots/png/fastqc_top_overrepresented_sequences_table.png", "multiqc/multiqc_plots/svg", - "multiqc/multiqc_plots/svg/fastqc-status-check-heatmap.svg", "multiqc/multiqc_plots/svg/fastqc_overrepresented_sequences_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_n_content_plot.svg", "multiqc/multiqc_plots/svg/fastqc_per_base_sequence_quality_plot.svg", @@ -111,4 +108,4 @@ }, "timestamp": "2025-05-07T13:52:10.350817122" } -} \ No newline at end of file +} diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 4f7345e6a7..cf2cbb54c6 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -132,6 +132,7 @@ jobs: if: ${{ matrix.TEMPLATE == 'all' || matrix.TEMPLATE == 'nf-test' }} run: | cd create-test-lint-wf + echo "aws.client.anonymous = true" >> nextflow.config nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results # Remove results folder before linting @@ -196,7 +197,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: nf-core-log-file-${{ matrix.TEMPLATE }} path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 7aea741b04..40b0f8e00e 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -90,7 +90,7 @@ jobs: - name: Upload log file artifact if: ${{ always() }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: nf-core-log-file-${{ matrix.NXF_VER }} path: create-test-wf/log.txt diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index 6f44f450bf..9bc80434ea 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -25,7 +25,7 @@ jobs: uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - name: Set up QEMU for multi-architecture build - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index a6251acd25..4a31a25bbd 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -25,7 +25,7 @@ jobs: uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - name: Set up QEMU for multi-architecture build - uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3 + uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3 diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 93d94f091e..bfc0d822ac 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -133,7 +133,7 @@ jobs: echo "test=${test}" >> $GITHUB_ENV - name: Store snapshot report - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 if: always() && contains(matrix.test, 'test_create_app') && steps.pytest.outcome == 'failure' with: include-hidden-files: true @@ -141,7 +141,7 @@ jobs: path: ./snapshot_report.html - name: Upload coverage - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: include-hidden-files: true name: coverage_${{ env.test }} @@ -169,7 +169,7 @@ jobs: mv .github/.coveragerc . - name: Download all artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 with: pattern: coverage_* diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index f3d6b1187b..868489b0ab 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -20,6 +20,10 @@ on: description: "Enable debug/verbose mode (true or false)" type: boolean default: false + blog_post: + description: "link to release blogpost" + type: string + required: true # Cancel if a newer run is started concurrency: @@ -114,10 +118,11 @@ jobs: --pull-request \ --username nf-core-bot \ --github-repository nf-core/${{ matrix.pipeline }} + --blogpost ${{inputs.blogpost}} - name: Upload sync log file artifact if: ${{ always() }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: sync_log_${{ matrix.pipeline }} path: sync_log_${{ matrix.pipeline }}.txt diff --git a/.github/workflows/update-template-snapshots.yml b/.github/workflows/update-template-snapshots.yml index c98cd310ed..8437bc6ecc 100644 --- a/.github/workflows/update-template-snapshots.yml +++ b/.github/workflows/update-template-snapshots.yml @@ -18,6 +18,7 @@ jobs: steps: - name: checkout uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 + - name: Create Matrix id: create_matrix run: | @@ -26,12 +27,6 @@ jobs: update-snapshots: needs: [prepare-matrix] - # Only run if comment is on a PR with the main repo, and if it contains the magic keywords - if: > - contains(github.event.comment.html_url, '/pull/') && - contains(github.event.comment.body, '@nf-core-bot') && - contains(github.event.comment.body, 'update template snapshots') && - github.repository == 'nf-core/tools' runs-on: ubuntu-latest strategy: matrix: @@ -71,6 +66,17 @@ jobs: python -m pip install --upgrade pip -r requirements-dev.txt pip install -e . + - name: Install Nextflow + uses: nf-core/setup-nextflow@v2 + with: + version: latest-everything + + - name: Install nf-test + uses: nf-core/setup-nf-test@v1 + with: + version: "0.9.3" + install-pdiff: true + # Create template files - name: Create template skip ${{ matrix.TEMPLATE }} run: | diff --git a/.gitignore b/.gitignore index c605f43847..3e20748fc5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,8 @@ .DS_Store +# Artifacts potentially generated by tests/docs +.nextflow +null .coverage .pytest_cache docs/api/_build diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe0129e270..7b2e79293b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.13.3 + rev: v0.14.4 hooks: - id: ruff-check # linter args: [--fix, --exit-non-zero-on-fix] # sort imports and fix diff --git a/CHANGELOG.md b/CHANGELOG.md index 82a883e251..1294079af7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,46 @@ # nf-core/tools: Changelog +## v3.5.0dev + +### General + +- Update GitHub Actions (major) ([#3849](https://github.com/nf-core/tools/pull/3849)) +- add optional link to blogpost to sync PR ([#3852](https://github.com/nf-core/tools/pull/3852)) +- update multiqc version to fix utils test ([#3853](https://github.com/nf-core/tools/pull/3853)) +- Update dependency textual to v6.5.0 ([#3859](https://github.com/nf-core/tools/pull/3859)) +- Update python:3.14-slim Docker digest to 4ed3310 ([#3862](https://github.com/nf-core/tools/pull/3862)) +- Update dependency textual-dev to v1.8.0 ([#3860](https://github.com/nf-core/tools/pull/3860)) +- Update pre-commit hook astral-sh/ruff-pre-commit to v0.14.3 ([#3861](https://github.com/nf-core/tools/pull/3861)) +- Avoid deleting files ignored by git during `pipelines sync` ([#3847](https://github.com/nf-core/tools/pull/3847)) +- remove trailing comas from nextflow_schema.json ([#3874](https://github.com/nf-core/tools/pull/3874)) +- Make bump-version snapshot test more stable ([#3865](https://github.com/nf-core/tools/pull/3865)) +- Update docker/setup-qemu-action digest to c7c5346 ([#3875](https://github.com/nf-core/tools/pull/3875)) +- chore(deps): update python:3.14-slim docker digest to 9813eec ([#3880](https://github.com/nf-core/tools/pull/3880)) +- chore(deps): update pre-commit hook astral-sh/ruff-pre-commit to v0.14.4 ([#3882](https://github.com/nf-core/tools/pull/3882)) +- add missing setup steps to snapshot update action ([#3883](https://github.com/nf-core/tools/pull/3883)) +- fix sync test ([#3885](https://github.com/nf-core/tools/pull/3885)) +- fix syntax in dockerfile for devcontainer ([#3887](https://github.com/nf-core/tools/pull/3887)) +- Enable authenticated pipeline download from nf-core compatible repos with github api ([#3607](https://github.com/nf-core/tools/pull/3607)) +- chore(deps): update mcr.microsoft.com/devcontainers/miniconda docker digest to 19516ba ([#3890](https://github.com/nf-core/tools/pull/3890)) +- Update dependency textual to v6.6.0 ([#3892](https://github.com/nf-core/tools/pull/3892)) +- chore(deps): update mcr.microsoft.com/devcontainers/base:debian docker digest to 2e826a6 ([#3893](https://github.com/nf-core/tools/pull/3893)) + +### Template + +- Change GitHub Codespaces badge style ([#3869](https://github.com/nf-core/tools/pull/3869) and [#3873](https://github.com/nf-core/tools/pull/3873)) +- Update multiqc to 1.32 ([#3878](https://github.com/nf-core/tools/pull/3878)) + +### Linting + +- TEMPLATE: ignore nf-core components during prettier linting ([#3858](https://github.com/nf-core/tools/pull/3858)) +- update json schema store URL ([#3877](https://github.com/nf-core/tools/pull/3877)) + +### Modules + +- Add `topics` to the template + update linting ([#3779](https://github.com/nf-core/tools/pull/3779)) + +### Subworkflows + ## [v3.4.1 - Ducol Dingo Patch 1](https://github.com/nf-core/tools/releases/tag/3.4.1) - [2025-10-16] ### Template diff --git a/Dockerfile b/Dockerfile index dc353175d6..ddb976804c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.14-slim@sha256:1e7c3510ceb3d6ebb499c86e1c418b95cb4e5e2f682f8e195069f470135f8d51 +FROM python:3.14-slim@sha256:9813eecff3a08a6ac88aea5b43663c82a931fd9557f6aceaa847f0d8ce738978 LABEL authors="phil.ewels@seqera.io,erik.danielsson@scilifelab.se" \ description="Docker image containing requirements for nf-core/tools" diff --git a/nf_core/__main__.py b/nf_core/__main__.py index f4bebb94de..4dac9b50d4 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -55,11 +55,7 @@ subworkflows_test, subworkflows_update, ) -from nf_core.commands_test_datasets import ( - test_datasets_list_branches, - test_datasets_list_remote, - test_datasets_search, -) +from nf_core.commands_test_datasets import test_datasets_list_branches, test_datasets_list_remote, test_datasets_search from nf_core.components.components_completion import autocomplete_modules, autocomplete_subworkflows from nf_core.components.constants import NF_CORE_MODULES_REMOTE from nf_core.pipelines.download.download import DownloadError diff --git a/nf_core/commands_pipelines.py b/nf_core/commands_pipelines.py index aa427d2212..0c258507e7 100644 --- a/nf_core/commands_pipelines.py +++ b/nf_core/commands_pipelines.py @@ -165,7 +165,7 @@ def pipelines_download( pipeline, revision, outdir, - compress, + compress_type, force, platform, download_configuration, @@ -188,17 +188,17 @@ def pipelines_download( pipeline, revision, outdir, - compress, - force, - platform, - download_configuration, - tag, - container_system, - container_library, - container_cache_utilisation, - container_cache_index, - parallel_downloads, - ctx.obj["hide_progress"], + compress_type=compress_type, + force=force, + platform=platform, + download_configuration=download_configuration, + additional_tags=tag, + container_system=container_system, + container_library=container_library, + container_cache_utilisation=container_cache_utilisation, + container_cache_index=container_cache_index, + parallel=parallel_downloads, + hide_progress=ctx.obj["hide_progress"], ) dl.download_workflow() diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 321127217a..79c4403105 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -528,21 +528,31 @@ def generate_meta_yml_file(self) -> None: with open(self.file_paths["meta.yml"]) as fh: meta_yml: ruamel.yaml.comments.CommentedMap = yaml.load(fh) - versions: dict[str, list[dict[str, dict]]] = { + versions: dict[str, list | dict] = { + f"versions_{self.component}": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {f"{self.component}": {"type": "string", "description": "The name of the tool"}}, + { + f"{self.component} --version": {"type": "string", "description": "The version of the tool"}, + }, + ] + ] + } + + versions_topic: dict[str, list | dict] = { "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [ - ruamel.yaml.comments.CommentedMap({"edam": "http://edamontology.org/format_3750"}) - ], - } - } + [ + {"process": {"type": "string", "description": "The process the versions were collected from"}}, + { + "tool": {"type": "string", "description": "The tool name the version was collected for"}, + }, + { + "version": {"type": "string", "description": "The version of the tool"}, + }, + ] ] } - versions["versions"][0]["versions.yml"]["ontologies"][0].yaml_add_eol_comment("YAML", "edam") if self.not_empty_template: meta_yml.yaml_set_comment_before_after_key( @@ -557,8 +567,11 @@ def generate_meta_yml_file(self) -> None: meta_yml["output"].yaml_set_start_comment( "### TODO nf-core: Add a description of all of the variables used as output", indent=2 ) + meta_yml["topics"].yaml_set_start_comment( + "### TODO nf-core: Add a description of all of the variables used as topics", indent=2 + ) - if hasattr(self, "inputs"): + if hasattr(self, "inputs") and len(self.inputs) > 0: inputs_array: list[dict | list[dict]] = [] for i, (input_name, ontologies) in enumerate(self.inputs.items()): channel_entry: dict[str, dict] = { @@ -607,7 +620,7 @@ def generate_meta_yml_file(self) -> None: meta_yml["input"][0]["bam"]["ontologies"][1].yaml_add_eol_comment("CRAM", "edam") meta_yml["input"][0]["bam"]["ontologies"][2].yaml_add_eol_comment("SAM", "edam") - if hasattr(self, "outputs"): + if hasattr(self, "outputs") and len(self.outputs) > 0: outputs_dict: dict[str, list | dict] = {} for i, (output_name, ontologies) in enumerate(self.outputs.items()): channel_contents: list[list[dict] | dict] = [] @@ -668,6 +681,8 @@ def generate_meta_yml_file(self) -> None: meta_yml["output"]["bam"][0]["*.bam"]["ontologies"][2].yaml_add_eol_comment("SAM", "edam") meta_yml["output"].update(versions) + meta_yml["topics"] = versions_topic + else: input_entry: list[dict] = [ {"input": {"type": "file", "description": "", "pattern": "", "ontologies": [{"edam": ""}]}} @@ -690,6 +705,7 @@ def generate_meta_yml_file(self) -> None: meta_yml["input"] = input_entry meta_yml["output"] = {"output": output_entry} meta_yml["output"].update(versions) + meta_yml["topics"] = versions_topic with open(self.file_paths["meta.yml"], "w") as fh: yaml.dump(meta_yml, fh) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 3720a283d9..32bcc16bf5 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -253,7 +253,7 @@ def get_outputs_from_main_nf(self): return outputs output_data = data.split("output:")[1].split("when:")[0] regex_emit = r"emit:\s*([^)\s,]+)" - regex_elements = r"\b(val|path|env|stdout)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + regex_elements = r"\b(val|path|env|stdout|eval)\s*(\(([^)]+)\)|\s*([^)\s,]+))" for line in output_data.split("\n"): match_emit = re.search(regex_emit, line) matches_elements = re.finditer(regex_elements, line) @@ -294,3 +294,41 @@ def get_outputs_from_main_nf(self): pass log.debug(f"Found {len(outputs)} outputs in {self.main_nf}") self.outputs = outputs + + def get_topics_from_main_nf(self) -> None: + with open(self.main_nf) as f: + data = f.read() + if self.component_type == "modules": + topics: dict[str, list[dict[str, dict] | list[dict[str, dict[str, str]]]]] = {} + # get topic name from main.nf after "output:". the names are always after "topic:" + if "output:" not in data: + log.debug(f"Could not find any outputs in {self.main_nf}") + self.topics = topics + return + output_data = data.split("output:")[1].split("when:")[0] + regex_topic = r"topic:\s*([^)\s,]+)" + regex_elements = r"\b(val|path|env|stdout|eval)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + for line in output_data.split("\n"): + match_topic = re.search(regex_topic, line) + matches_elements = re.finditer(regex_elements, line) + if not match_topic: + continue + channel_elements: list[dict[str, dict]] = [] + topic_name = match_topic.group(1) + if topic_name in topics: + continue + topics[match_topic.group(1)] = [] + for count, match_element in enumerate(matches_elements, start=1): + output_val = None + if match_element.group(3): + output_val = match_element.group(3) + elif match_element.group(4): + output_val = match_element.group(4) + if output_val: + channel_elements.append({f"value{count}": {}}) + if len(channel_elements) == 1: + topics[match_topic.group(1)].append(channel_elements[0]) + elif len(channel_elements) > 1: + topics[match_topic.group(1)].append(channel_elements) + log.debug(f"Found {len(list(topics.keys()))} topics in {self.main_nf}") + self.topics = topics diff --git a/nf_core/module-template/main.nf b/nf_core/module-template/main.nf index f99145b94d..49802b58c9 100644 --- a/nf_core/module-template/main.nf +++ b/nf_core/module-template/main.nf @@ -65,7 +65,12 @@ process {{ component_name_underscore|upper }} { {{ 'tuple val(meta), path("*")' if has_meta else 'path "*"' }}, emit: output {%- endif %} {%- endif %} - path "versions.yml" , emit: versions + {% if not_empty_template -%} + // TODO nf-core: Update the command here to obtain the version number of the software used in this module + // TODO nf-core: If multiple software packages are used in this module, all MUST be added here + // by copying the line below and replacing the current tool with the extra tool(s) + {%- endif %} + tuple val("${task.process}"), val('{{ component }}'), eval("{{ component }} --version"), topic: versions, emit: versions_{{ component }} when: task.ext.when == null || task.ext.when @@ -111,11 +116,6 @@ process {{ component_name_underscore|upper }} { $bam {%- endif %} {%- endif %} - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - {{ component }}: \$({{ component }} --version) - END_VERSIONS """ stub: @@ -146,10 +146,5 @@ process {{ component_name_underscore|upper }} { touch ${prefix}.bam {%- endif %} {%- endif %} - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - {{ component }}: \$({{ component }} --version) - END_VERSIONS """ } diff --git a/nf_core/module-template/meta.yml b/nf_core/module-template/meta.yml index 266b7e2882..8dc2af3c4b 100644 --- a/nf_core/module-template/meta.yml +++ b/nf_core/module-template/meta.yml @@ -46,13 +46,28 @@ output: - edam: "http://edamontology.org/format_2572" # BAM - edam: "http://edamontology.org/format_2573" # CRAM - edam: "http://edamontology.org/format_3462" # SAM + versions_{{ component }}: + - - "${task.process}": + type: string + description: The name of the process + - "{{ component }}": + type: string + description: The name of the tool + - "{{ component }} --version": + type: string + description: The version of the tool + +topics: versions: - - "versions.yml": - type: file - description: File containing software versions - pattern: "versions.yml" - ontologies: - - edam: "http://edamontology.org/format_3750" # YAML + - - process: + type: string + description: The process the versions were collected from + - tool: + type: string + description: The tool name the version was collected for + - version: + type: string + description: The version of the tool authors: - "{{ author }}" diff --git a/nf_core/modules/lint/__init__.py b/nf_core/modules/lint/__init__.py index f39356edde..059353b31b 100644 --- a/nf_core/modules/lint/__init__.py +++ b/nf_core/modules/lint/__init__.py @@ -234,6 +234,7 @@ def lint_module( if local: mod.get_inputs_from_main_nf() mod.get_outputs_from_main_nf() + mod.get_topics_from_main_nf() # Update meta.yml file if requested if self.fix and mod.meta_yml is not None: self.update_meta_yml_file(mod) @@ -260,6 +261,7 @@ def lint_module( else: mod.get_inputs_from_main_nf() mod.get_outputs_from_main_nf() + mod.get_topics_from_main_nf() # Update meta.yml file if requested if self.fix: self.update_meta_yml_file(mod) diff --git a/nf_core/modules/lint/main_nf.py b/nf_core/modules/lint/main_nf.py index 60bda6590a..0c2bdb13a1 100644 --- a/nf_core/modules/lint/main_nf.py +++ b/nf_core/modules/lint/main_nf.py @@ -36,14 +36,15 @@ def main_nf( * The module has a process label and it is among the standard ones. * If a ``meta`` map is defined as one of the modules - inputs it should be defined as one of the outputs, + inputs it should be defined as one of the emits, and be correctly configured in the ``saveAs`` function. * The module script section should contain definitions of ``software`` and ``prefix`` """ inputs: list[str] = [] - outputs: list[str] = [] + emits: list[str] = [] + topics: list[str] = [] # Check if we have a patch file affecting the 'main.nf' file # otherwise read the lines directly from the module @@ -132,8 +133,9 @@ def main_nf( line = joint_tuple inputs.extend(_parse_input(module, line)) if state == "output" and not _is_empty(line): - outputs += _parse_output(module, line) - outputs = list(set(outputs)) # remove duplicate 'meta's + emits += _parse_output_emits(module, line) + emits = list(set(emits)) # remove duplicate 'meta's + topics += _parse_output_topics(module, line) if state == "when" and not _is_empty(line): when_lines.append(line) if state == "script" and not _is_empty(line): @@ -144,7 +146,7 @@ def main_nf( exec_lines.append(line) # Check that we have required sections - if not len(outputs): + if not len(emits): module.failed.append(("main_nf", "main_nf_script_outputs", "No process 'output' block found", module.main_nf)) else: module.passed.append(("main_nf", "main_nf_script_outputs", "Process 'output' block found", module.main_nf)) @@ -192,8 +194,8 @@ def main_nf( if inputs: if "meta" in inputs: module.has_meta = True - if outputs: - if "meta" in outputs: + if emits: + if "meta" in emits: module.passed.append( ( "main_nf", @@ -213,22 +215,43 @@ def main_nf( ) # Check that a software version is emitted - if outputs: - if "versions" in outputs: + if topics: + if "versions" in topics: module.passed.append( - ("main_nf", "main_nf_version_emitted", "Module emits software version", module.main_nf) + ("main_nf", "main_nf_version_topic", "Module emits software versions as topic", module.main_nf) ) else: + module.warned.append( + ("main_nf", "main_nf_version_topic", "Module does not emit software versions as topic", module.main_nf) + ) + + if emits: + topic_versions_amount = sum(1 for t in topics if t == "versions") + emit_versions_amount = sum(1 for e in emits if e.startswith("versions")) + if topic_versions_amount == emit_versions_amount: + module.passed.append( + ("main_nf", "main_nf_version_emit", "Module emits each software version", module.main_nf) + ) + elif "versions" in emits: module.warned.append( ( "main_nf", - "main_nf_version_emitted", - "Module does not emit software version", + "main_nf_version_emit", + "Module emits software versions YAML, please update this to topics output", + module.main_nf, + ) + ) + else: + module.failed.append( + ( + "main_nf", + "main_nf_version_emit", + "Module does not have an `emit:` and `topic:` for each software version", module.main_nf, ) ) - return inputs, outputs + return inputs, emits def check_script_section(self, lines): @@ -238,14 +261,6 @@ def check_script_section(self, lines): """ script = "".join(lines) - # check that process name is used for `versions.yml` - if re.search(r"\$\{\s*task\.process\s*\}", script): - self.passed.append(("main_nf", "main_nf_version_script", "Process name used for versions.yml", self.main_nf)) - else: - self.warned.append( - ("main_nf", "main_nf_version_script", "Process name not used for versions.yml", self.main_nf) - ) - # check for prefix (only if module has a meta map as input) if self.has_meta: if re.search(r"\s*prefix\s*=\s*task.ext.prefix", script): @@ -705,16 +720,43 @@ def _parse_input(self, line_raw): return inputs -def _parse_output(self, line): +def _parse_output_emits(self, line: str) -> list[str]: output = [] if "meta" in line: output.append("meta") - if "emit:" not in line: - self.failed.append(("main_nf", "missing_emit", f"Missing emit statement: {line.strip()}", self.main_nf)) + emit_regex = re.search(r"^.*emit:\s*([^,\s]*)", line) + if not emit_regex: + self.failed.append(("missing_emit", f"Missing emit statement: {line.strip()}", self.main_nf)) else: - output.append(line.split("emit:")[1].strip()) - self.passed.append(("main_nf", "missing_emit", f"Emit statement found: {line.strip()}", self.main_nf)) + output.append(emit_regex.group(1).strip()) + return output + +def _parse_output_topics(self, line: str) -> list[str]: + output = [] + if "meta" in line: + output.append("meta") + topic_regex = re.search(r"^.*topic:\s*([^,\s]*)", line) + if topic_regex: + topic_name = topic_regex.group(1).strip() + output.append(topic_name) + if topic_name == "versions": + if not re.search(r'tuple\s+val\("\${\s*task\.process\s*}"\),\s*val\(.*\),\s*eval\(.*\)', line): + self.failed.append( + ( + "wrong_version_output", + 'Versions topic output is not correctly formatted, expected `tuple val("${task.process}"), val(\'\'), eval("")`', + self.main_nf, + ) + ) + if not re.search(r"emit:\s*versions_[\d\w]+", line): + self.failed.append( + ( + "wrong_version_emit", + "Version emit should follow the format `versions_`, e.g.: `versions_samtools`, `versions_gatk4`", + self.main_nf, + ) + ) return output diff --git a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml index 83dc64ab06..7e2eb970ac 100644 --- a/nf_core/pipeline-template/.github/workflows/awsfulltest.yml +++ b/nf_core/pipeline-template/.github/workflows/awsfulltest.yml @@ -40,7 +40,7 @@ jobs: } profiles: test_full - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: Seqera Platform debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/awstest.yml b/nf_core/pipeline-template/.github/workflows/awstest.yml index 515b0700e8..7e6b7ffb4f 100644 --- a/nf_core/pipeline-template/.github/workflows/awstest.yml +++ b/nf_core/pipeline-template/.github/workflows/awstest.yml @@ -25,7 +25,7 @@ jobs: } profiles: test - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: Seqera Platform debug log file path: | diff --git a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml index 6faff91163..8146b46eca 100644 --- a/nf_core/pipeline-template/.github/workflows/download_pipeline.yml +++ b/nf_core/pipeline-template/.github/workflows/download_pipeline.yml @@ -127,7 +127,7 @@ jobs: fi{% endraw %} - name: Upload Nextflow logfile for debugging purposes - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: nextflow_logfile.txt path: .nextflow.log* diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 61db86249d..016961520f 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -71,7 +71,7 @@ jobs: - name: Upload linting log file artifact if: ${{ always() }} - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5 with: name: linting-logs path: | diff --git a/nf_core/pipeline-template/.github/workflows/release-announcements.yml b/nf_core/pipeline-template/.github/workflows/release-announcements.yml index cee591bdaa..9d23f62bcd 100644 --- a/nf_core/pipeline-template/.github/workflows/release-announcements.yml +++ b/nf_core/pipeline-template/.github/workflows/release-announcements.yml @@ -15,10 +15,9 @@ jobs: echo "topics=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .topics[]' | awk '{print "#"$0}' | tr '\n' ' ')" | sed 's/-//g' >> $GITHUB_OUTPUT - name: get description - id: get_topics + id: get_description run: | - echo "description=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .description' >> $GITHUB_OUTPUT - + echo "description=$(curl -s https://nf-co.re/pipelines.json | jq -r '.remote_workflows[] | select(.full_name == "${{ github.repository }}") | .description')" >> $GITHUB_OUTPUT - uses: rzr/fediverse-action@master with: access-token: ${{ secrets.MASTODON_ACCESS_TOKEN }} @@ -27,9 +26,7 @@ jobs: # https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#release message: | Pipeline release! ${{ github.repository }} v${{ github.event.release.tag_name }} - ${{ github.event.release.name }}! - - ${{ steps.get_topics.outputs.description }} - + ${{ steps.get_description.outputs.description }} Please see the changelog: ${{ github.event.release.html_url }} ${{ steps.get_topics.outputs.topics }} #nfcore #openscience #nextflow #bioinformatics diff --git a/nf_core/pipeline-template/.prettierignore b/nf_core/pipeline-template/.prettierignore index aa0b8c2529..3c6e89c11b 100644 --- a/nf_core/pipeline-template/.prettierignore +++ b/nf_core/pipeline-template/.prettierignore @@ -20,3 +20,7 @@ bin/ {%- if rocrate %} ro-crate-metadata.json {%- endif %} +{%- if modules %} +modules/nf-core/ +subworkflows/nf-core/ +{%- endif %} diff --git a/nf_core/pipeline-template/README.md b/nf_core/pipeline-template/README.md index 27df819d49..d7152294a4 100644 --- a/nf_core/pipeline-template/README.md +++ b/nf_core/pipeline-template/README.md @@ -13,7 +13,7 @@ {% endif -%} {% if github_badges -%} -{% if codespaces %}[![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://github.com/codespaces/new/{{ name }}){% endif %} +{% if codespaces %}[![Open in GitHub Codespaces](https://img.shields.io/badge/Open_In_GitHub_Codespaces-black?labelColor=grey&logo=github)](https://github.com/codespaces/new/{{ name }}){% endif %} [![GitHub Actions CI Status](https://github.com/{{ name }}/actions/workflows/nf-test.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/nf-test.yml) [![GitHub Actions Linting Status](https://github.com/{{ name }}/actions/workflows/linting.yml/badge.svg)](https://github.com/{{ name }}/actions/workflows/linting.yml){% endif -%} {% if is_nfcore -%}[![AWS CI](https://img.shields.io/badge/CI%20tests-full%20size-FF9900?labelColor=000000&logo=Amazon%20AWS)](https://nf-co.re/{{ short_name }}/results){% endif -%} diff --git a/nf_core/pipeline-template/modules.json b/nf_core/pipeline-template/modules.json index 8ae5c19c7e..78a4c9d770 100644 --- a/nf_core/pipeline-template/modules.json +++ b/nf_core/pipeline-template/modules.json @@ -13,7 +13,7 @@ }{% endif %}{%- if multiqc %}{% if fastqc %},{% endif %} "multiqc": { "branch": "master", - "git_sha": "e10b76ca0c66213581bec2833e30d31f239dec0b", + "git_sha": "af27af1be706e6a2bb8fe454175b0cdf77f47b49", "installed_by": ["modules"] } {%- endif %} diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml index dd513cbd17..d02016a009 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/environment.yml @@ -4,4 +4,4 @@ channels: - conda-forge - bioconda dependencies: - - bioconda::multiqc=1.31 + - bioconda::multiqc=1.32 diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf index 5288f5ccfe..c1158fb08c 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/main.nf @@ -3,8 +3,8 @@ process MULTIQC { conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/ef/eff0eafe78d5f3b65a6639265a16b89fdca88d06d18894f90fcdb50142004329/data' : - 'community.wave.seqera.io/library/multiqc:1.31--1efbafd542a23882' }" + 'https://community-cr-prod.seqera.io/docker/registry/v2/blobs/sha256/8c/8c6c120d559d7ee04c7442b61ad7cf5a9e8970be5feefb37d68eeaa60c1034eb/data' : + 'community.wave.seqera.io/library/multiqc:1.32--d58f60e4deb769bf' }" input: path multiqc_files, stageAs: "?/*" diff --git a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap index 17881d15cf..a88bafd679 100644 --- a/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap +++ b/nf_core/pipeline-template/modules/nf-core/multiqc/tests/main.nf.test.snap @@ -2,14 +2,14 @@ "multiqc_versions_single": { "content": [ [ - "versions.yml:md5,8968b114a3e20756d8af2b80713bcc4f" + "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f" ] ], "meta": { - "nf-test": "0.9.2", - "nextflow": "25.04.6" + "nf-test": "0.9.3", + "nextflow": "24.10.4" }, - "timestamp": "2025-09-08T20:57:36.139055243" + "timestamp": "2025-10-27T13:33:24.356715" }, "multiqc_stub": { "content": [ @@ -17,25 +17,25 @@ "multiqc_report.html", "multiqc_data", "multiqc_plots", - "versions.yml:md5,8968b114a3e20756d8af2b80713bcc4f" + "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f" ] ], "meta": { - "nf-test": "0.9.2", - "nextflow": "25.04.6" + "nf-test": "0.9.3", + "nextflow": "24.10.4" }, - "timestamp": "2025-09-08T20:59:15.142230631" + "timestamp": "2025-10-27T13:34:11.103619" }, "multiqc_versions_config": { "content": [ [ - "versions.yml:md5,8968b114a3e20756d8af2b80713bcc4f" + "versions.yml:md5,737bb2c7cad54ffc2ec020791dc48b8f" ] ], "meta": { - "nf-test": "0.9.2", - "nextflow": "25.04.6" + "nf-test": "0.9.3", + "nextflow": "24.10.4" }, - "timestamp": "2025-09-08T20:58:29.629087066" + "timestamp": "2025-10-27T13:34:04.615233" } } \ No newline at end of file diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index ccd9a797cb..c76add235a 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -220,12 +220,7 @@ profiles { test_full { includeConfig 'conf/test_full.config' } {%- endif %} } -{%- if igenomes %} -// Set AWS client to anonymous when using the default igenomes_base -aws.client.anonymous = !params.igenomes_ignore && params.igenomes_base?.startsWith('s3://ngi-igenomes/igenomes/') ?: false - -{%- endif %} {% if nf_core_configs -%} // Load nf-core custom profiles from different institutions diff --git a/nf_core/pipeline-template/nextflow_schema.json b/nf_core/pipeline-template/nextflow_schema.json index c410720a82..273a1b3b3d 100644 --- a/nf_core/pipeline-template/nextflow_schema.json +++ b/nf_core/pipeline-template/nextflow_schema.json @@ -238,15 +238,15 @@ }{% if nf_schema %}, "help": { "type": ["boolean", "string"], - "description": "Display the help message.", + "description": "Display the help message." }, "help_full": { "type": "boolean", - "description": "Display the full detailed help message.", + "description": "Display the full detailed help message." }, "show_hidden": { "type": "boolean", - "description": "Display hidden parameters in the help message (only works when --help or --help_full are provided).", + "description": "Display hidden parameters in the help message (only works when --help or --help_full are provided)." }{% endif %} } } diff --git a/nf_core/pipeline-template/workflows/pipeline.nf b/nf_core/pipeline-template/workflows/pipeline.nf index 6126f9ec69..94bdae9895 100644 --- a/nf_core/pipeline-template/workflows/pipeline.nf +++ b/nf_core/pipeline-template/workflows/pipeline.nf @@ -50,7 +50,25 @@ workflow {{ short_name|upper }} { // // Collate and save software versions // - softwareVersionsToYAML(ch_versions) + def topic_versions = Channel.topic("versions") + .distinct() + .branch { entry -> + versions_file: entry instanceof Path + versions_tuple: true + } + + def topic_versions_string = topic_versions.versions_tuple + .map { process, tool, version -> + [ process[process.lastIndexOf(':')+1..-1], " ${tool}: ${version}" ] + } + .groupTuple(by:0) + .map { process, tool_versions -> + tool_versions.unique().sort() + "${process}:\n${tool_versions.join('\n')}" + } + + softwareVersionsToYAML(ch_versions.mix(topic_versions.versions_file)) + .mix(topic_versions_string) .collectFile( storeDir: "${params.outdir}/pipeline_info", name: {% if is_nfcore %}'nf_core_' + {% endif %} '{{ short_name }}_software_' {% if multiqc %} + 'mqc_' {% endif %} + 'versions.yml', diff --git a/nf_core/pipelines/create/template_features.yml b/nf_core/pipelines/create/template_features.yml index 9d88275b8d..0801f1ff75 100644 --- a/nf_core/pipelines/create/template_features.yml +++ b/nf_core/pipelines/create/template_features.yml @@ -239,6 +239,8 @@ components_modules: - "conf/base.config" - "conf/modules.config" - "modules.json" + files_unchanged: + - ".prettierignore" nfcore_pipelines: False custom_pipelines: True default: true diff --git a/nf_core/pipelines/download/download.py b/nf_core/pipelines/download/download.py index f451e8112c..05c3ccea8d 100644 --- a/nf_core/pipelines/download/download.py +++ b/nf_core/pipelines/download/download.py @@ -28,6 +28,7 @@ NF_INSPECT_MIN_NF_VERSION, NFCORE_VER_LAST_WITHOUT_NF_INSPECT, check_nextflow_version, + gh_api, pretty_nf_version, run_cmd, ) @@ -47,7 +48,7 @@ class DownloadWorkflow: Can also download its Singularity container image if required. Args: - pipeline (str | None): A nf-core pipeline name. + pipeline (str | None): The name of an nf-core-compatible pipeline in the form org/repo revision (tuple[str] | str | None): The workflow revision(s) to download, like `1.0` or `dev` . Defaults to None. outdir (Path | None): Path to the local download directory. Defaults to None. compress_type (str | None): Type of compression for the downloaded files. Defaults to None. @@ -148,6 +149,11 @@ def __init__( self.container_cache_index = container_cache_index # allows to specify a container library / registry or a respective mirror to download images from self.parallel = parallel + self.hide_progress = hide_progress + + if not gh_api.has_init: + gh_api.lazy_init() + self.authenticated = gh_api.auth is not None self.wf_revisions: list[dict[str, Any]] = [] self.wf_branches: dict[str, Any] = {} @@ -461,11 +467,19 @@ def get_revision_hash(self) -> None: if not self.platform: for revision, wf_sha in self.wf_sha.items(): - # Set the download URL and return - only applicable for classic downloads - self.wf_download_url = { - **self.wf_download_url, - revision: f"https://github.com/{self.pipeline}/archive/{wf_sha}.zip", - } + # Set the download URL - only applicable for classic downloads + if self.authenticated: + # For authenticated downloads, use the GitHub API + self.wf_download_url = { + **self.wf_download_url, + revision: f"https://api.github.com/repos/{self.pipeline}/zipball/{wf_sha}", + } + else: + # For unauthenticated downloads, use the archive URL + self.wf_download_url = { + **self.wf_download_url, + revision: f"https://github.com/{self.pipeline}/archive/{wf_sha}.zip", + } def prompt_config_inclusion(self) -> None: """Prompt for inclusion of institutional configurations""" @@ -555,22 +569,23 @@ def prompt_compression_type(self) -> None: def download_wf_files(self, revision: str, wf_sha: str, download_url: str) -> str: """Downloads workflow files from GitHub to the :attr:`self.outdir`.""" + log.debug(f"Downloading {download_url}") - # Download GitHub zip file into memory and extract - url = requests.get(download_url) - with ZipFile(io.BytesIO(url.content)) as zipfile: + # GitHub API download: fetch via API and get topdir from zip contents + content = gh_api.get(download_url).content + with ZipFile(io.BytesIO(content)) as zipfile: + topdir = zipfile.namelist()[0] # API zipballs have a generated directory name zipfile.extractall(self.outdir) - # create a filesystem-safe version of the revision name for the directory + # Create a filesystem-safe version of the revision name for the directory revision_dirname = re.sub("[^0-9a-zA-Z]+", "_", revision) - # account for name collisions, if there is a branch / release named "configs" or container output dir + # Account for name collisions, if there is a branch / release named "configs" or container output dir if revision_dirname in ["configs", self.get_container_output_dir()]: revision_dirname = re.sub("[^0-9a-zA-Z]+", "_", self.pipeline + revision_dirname) # Rename the internal directory name to be more friendly - gh_name = f"{self.pipeline}-{wf_sha if bool(wf_sha) else ''}".split("/")[-1] - ((self.outdir / gh_name).rename(self.outdir / revision_dirname),) + (self.outdir / topdir).rename(self.outdir / revision_dirname) # Make downloaded files executable for dirpath, _, filelist in os.walk(self.outdir / revision_dirname): diff --git a/nf_core/pipelines/lint/actions_schema_validation.py b/nf_core/pipelines/lint/actions_schema_validation.py index 272397ad79..cefbb009fa 100644 --- a/nf_core/pipelines/lint/actions_schema_validation.py +++ b/nf_core/pipelines/lint/actions_schema_validation.py @@ -12,7 +12,7 @@ def actions_schema_validation(self) -> dict[str, list[str]]: nf-core pipelines use GitHub actions workflows to run CI tests, check formatting and also linting, among others. These workflows are defined by ``yml`` scripts in ``.github/workflows/``. This lint test verifies that these scripts are valid - by comparing them against the `JSON schema for GitHub workflows `_. + by comparing them against the `JSON schema for GitHub workflows `_. To pass this test, make sure that all your workflows contain the required properties ``on`` and ``jobs`` and that all other properties are of the correct type, as specified in the schema (link above). @@ -28,11 +28,11 @@ def actions_schema_validation(self) -> dict[str, list[str]]: action_workflows = list(Path(self.wf_path).glob(".github/workflows/*.y*ml")) # Load the GitHub workflow schema - r = requests.get("https://json.schemastore.org/github-workflow", allow_redirects=True) + r = requests.get("https://www.schemastore.org/github-workflow", allow_redirects=True) # handle "Service Unavailable" error if r.status_code not in [200, 301]: warned.append( - f"Failed to fetch schema: Response code for `https://json.schemastore.org/github-workflow` was {r.status_code}" + f"Failed to fetch schema: Response code for `https://www.schemastore.org/github-workflow` was {r.status_code}" ) return {"passed": passed, "failed": failed, "warned": warned} schema: dict[str, Any] = r.json() diff --git a/nf_core/pipelines/lint/rocrate_readme_sync.py b/nf_core/pipelines/lint/rocrate_readme_sync.py index 7cd503382d..ffe9679b3f 100644 --- a/nf_core/pipelines/lint/rocrate_readme_sync.py +++ b/nf_core/pipelines/lint/rocrate_readme_sync.py @@ -8,14 +8,12 @@ def rocrate_readme_sync(self): """ Check if the RO-Crate description in ro-crate-metadata.json matches the README.md content. - If the --fix is set, the RO-Crate description will be updated to match the README.md content. + If not, the RO-Crate description will be automatically updated to match the README.md content during linting. """ passed = [] - failed = [] ignored = [] fixed = [] - could_fix: bool = False # Check if the file exists before trying to load it metadata_file = Path(self.wf_path, "ro-crate-metadata.json") @@ -27,7 +25,7 @@ def rocrate_readme_sync(self): ignored.append("`ro-crate-metadata.json` not found") if not readme_file.exists(): ignored.append("`README.md` not found") - return {"passed": passed, "failed": failed, "ignored": ignored} + return {"passed": passed, "fixed": fixed, "ignored": ignored} try: metadata_content = metadata_file.read_text(encoding="utf-8") @@ -35,38 +33,27 @@ def rocrate_readme_sync(self): except json.JSONDecodeError as e: log.error("Failed to decode JSON from `ro-crate-metadata.json`: %s", e) ignored.append("Invalid JSON in `ro-crate-metadata.json`") - return {"passed": passed, "failed": failed, "ignored": ignored} + return {"passed": passed, "fixed": fixed, "ignored": ignored} readme_content = readme_file.read_text(encoding="utf-8") graph = metadata_dict.get("@graph") + if not graph or not isinstance(graph, list) or not graph[0] or not isinstance(graph[0], dict): ignored.append("Invalid RO-Crate metadata structure.") else: # Check if the 'description' key is present if "description" not in graph[0]: - if "rocrate_readme_sync" in self.fix: - metadata_dict.get("@graph")[0]["description"] = readme_content - fixed.append("Fixed: add the same description from `README.md` to the RO-Crate metadata.") - else: - ignored.append("No description found in `ro-crate-metadata.json`.") - return {"passed": passed, "failed": failed, "ignored": ignored} + metadata_dict.get("@graph")[0]["description"] = readme_content + fixed.append("Fixed: add the same description from `README.md` to the RO-Crate metadata.") rc_description_graph = metadata_dict.get("@graph", [{}])[0].get("description") # Compare the two strings and add a linting error if they don't match if readme_content != rc_description_graph: - # If the --fix flag is set, you could overwrite the RO-Crate description with the README content: - if "rocrate_readme_sync" in self.fix: - metadata_dict.get("@graph")[0]["description"] = readme_content - fixed.append("Fixed: add the same description from `README.md` to the RO-Crate metadata.") - with metadata_file.open("w", encoding="utf-8") as f: - json.dump(metadata_dict, f, indent=4) - passed.append("RO-Crate description matches the `README.md`.") - fixed.append("Mismatch fixed: RO-Crate description updated from `README.md`.") - else: - failed.append( - "The RO-Crate descriptions do not match the README.md content. Use `nf-core pipelines lint --fix rocrate_readme_sync` to update." - ) - could_fix = True + metadata_dict.get("@graph")[0]["description"] = readme_content + with metadata_file.open("w", encoding="utf-8") as f: + json.dump(metadata_dict, f, indent=4) + passed.append("RO-Crate description matches the `README.md`.") + fixed.append("Mismatch fixed: RO-Crate description updated from `README.md`.") else: passed.append("RO-Crate descriptions are in sync with `README.md`.") - return {"passed": passed, "failed": failed, "ignored": ignored, "fixed": fixed, "could_fix": could_fix} + return {"passed": passed, "fixed": fixed, "ignored": ignored} diff --git a/nf_core/pipelines/sync.py b/nf_core/pipelines/sync.py index e977d51fc4..33fa4674ee 100644 --- a/nf_core/pipelines/sync.py +++ b/nf_core/pipelines/sync.py @@ -4,7 +4,6 @@ import logging import os import re -import shutil from pathlib import Path from typing import Any @@ -70,6 +69,7 @@ def __init__( gh_username: str | None = None, template_yaml_path: str | None = None, force_pr: bool = False, + blog_post: str = "", ): """Initialise syncing object""" @@ -92,6 +92,7 @@ def __init__( self.gh_username = gh_username self.gh_repo = gh_repo self.pr_url = "" + self.blog_post = blog_post self.config_yml_path, self.config_yml = nf_core.utils.load_tools_config(self.pipeline_dir) assert self.config_yml_path is not None and self.config_yml is not None # mypy @@ -140,7 +141,7 @@ def sync(self) -> None: self.inspect_sync_dir() self.get_wf_config() self.checkout_template_branch() - self.delete_template_branch_files() + self.delete_tracked_template_branch_files() self.make_template_pipeline() self.commit_template_changes() @@ -163,7 +164,6 @@ def sync(self) -> None: self.create_merge_base_branch() self.push_merge_branch() self.make_pull_request() - self.close_open_template_merge_prs() except PullRequestExceptionError as e: self.reset_target_dir() raise PullRequestExceptionError(e) @@ -194,9 +194,14 @@ def inspect_sync_dir(self): # Check to see if there are uncommitted changes on current branch if self.repo.is_dirty(untracked_files=True): raise SyncExceptionError( - "Uncommitted changes found in pipeline directory!\nPlease commit these before running nf-core pipelines sync" + "Uncommitted changes found in pipeline directory!\n" + "Please commit these before running nf-core pipelines sync.\n" + "(Hint: .gitignored files are ignored.)" ) + # Track ignored files to avoid processing them + self.ignored_files = self._get_ignored_files() + def get_wf_config(self): """Check out the target branch if requested and fetch the nextflow config. Check that we have the required config variables. @@ -240,25 +245,51 @@ def checkout_template_branch(self): except GitCommandError: raise SyncExceptionError("Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'") - def delete_template_branch_files(self): + def delete_tracked_template_branch_files(self): """ - Delete all files in the TEMPLATE branch + Delete all tracked files and subsequent empty directories in the TEMPLATE branch """ - # Delete everything - log.info("Deleting all files in 'TEMPLATE' branch") - for the_file in os.listdir(self.pipeline_dir): - if the_file == ".git": - continue - file_path = os.path.join(self.pipeline_dir, the_file) + # Delete tracked files + log.info("Deleting tracked files in 'TEMPLATE' branch") + self._delete_tracked_files() + self._clean_up_empty_dirs() + + def _delete_tracked_files(self): + """ + Delete all tracked files in the repository + """ + for the_file in self._get_tracked_files(): + file_path = Path(self.pipeline_dir) / the_file log.debug(f"Deleting {file_path}") try: - if os.path.isfile(file_path): - os.unlink(file_path) - elif os.path.isdir(file_path): - shutil.rmtree(file_path) + file_path.unlink() except Exception as e: raise SyncExceptionError(e) + def _clean_up_empty_dirs(self): + """ + Delete empty directories in the repository + + Walks the directory tree from the bottom up, deleting empty directories as it goes. + """ + # Track deleted child directories so we know they've been deleted when evaluating if the parent is empty + deleted = set() + + for curr_dir, sub_dirs, files in os.walk(self.pipeline_dir, topdown=False): + # Don't delete the root directory (should never happen due to .git, but just in case) + if curr_dir == str(self.pipeline_dir): + continue + + subdir_set = set(Path(curr_dir) / d for d in sub_dirs) + currdir_is_empty = (len(subdir_set - deleted) == 0) and (len(files) == 0) + if currdir_is_empty: + log.debug(f"Deleting empty directory {curr_dir}") + try: + Path(curr_dir).rmdir() + except Exception as e: + raise SyncExceptionError(e) + deleted.add(Path(curr_dir)) + def make_template_pipeline(self): """ Delete all files and make a fresh template using the workflow variables @@ -311,7 +342,10 @@ def commit_template_changes(self): return False # Commit changes try: - self.repo.git.add(A=True) + newly_ignored_files = self._get_ignored_files() + # add and commit all files except self.ignored_files + # :! syntax to exclude files using git pathspec + self.repo.git.add([f":!{f}" for f in self.ignored_files if f not in newly_ignored_files], all=True) self.repo.index.commit(f"Template update for nf-core/tools version {nf_core.__version__}") self.made_changes = True log.info("Committed changes to 'TEMPLATE' branch") @@ -377,7 +411,9 @@ def make_pull_request(self): pr_title = f"Important! Template update for nf-core/tools v{nf_core.__version__}" pr_body_text = ( "Version `{tag}` of [nf-core/tools](https://github.com/nf-core/tools) has just been released with updates to the nf-core template. " - "This automated pull-request attempts to apply the relevant updates to this pipeline.\n\n" + f"For more details, check out the blog post: {self.blog_post}\n\n" + if self.blog_post != "" + else "" "Please make sure to merge this pull-request as soon as possible, " f"resolving any merge conflicts in the `{self.merge_branch}` branch (or your own fork, if you prefer). " "Once complete, make a new minor release of your pipeline.\n\n" @@ -385,6 +421,9 @@ def make_pull_request(self): "[https://nf-co.re/docs/contributing/sync/](https://nf-co.re/docs/contributing/sync/#merging-automated-prs).\n\n" "For more information about this release of [nf-core/tools](https://github.com/nf-core/tools), " "please see the `v{tag}` [release page](https://github.com/nf-core/tools/releases/tag/{tag})." + "> [!NOTE]\n" + "> Since nf-core/tools 3.5.0, older template update PRs will not be automatically closed, but will remain open in your pipeline repository." + "Older template PRs will be automatically closed once a newer template PR has been merged." ).format(tag=nf_core.__version__) # Make new pull-request @@ -410,74 +449,6 @@ def make_pull_request(self): log.debug(f"GitHub API PR worked, return code {r.status_code}") log.info(f"GitHub PR created: {self.gh_pr_returned_data['html_url']}") - def close_open_template_merge_prs(self): - """Get all template merging branches (starting with 'nf-core-template-merge-') - and check for any open PRs from these branches to the self.from_branch - If open PRs are found, add a comment and close them - """ - log.info("Checking for open PRs from template merge branches") - - # Look for existing pull-requests - list_prs_url = f"https://api.github.com/repos/{self.gh_repo}/pulls" - with self.gh_api.cache_disabled(): - list_prs_request = self.gh_api.get(list_prs_url) - - list_prs_json, list_prs_pp = self._parse_json_response(list_prs_request) - - log.debug(f"GitHub API listing existing PRs:\n{list_prs_url}\n{list_prs_pp}") - if list_prs_request.status_code != 200: - log.warning(f"Could not list open PRs ('{list_prs_request.status_code}')\n{list_prs_url}\n{list_prs_pp}") - return False - - for pr in list_prs_json: - if isinstance(pr, int): - log.debug(f"Incorrect PR format: {pr}") - else: - log.debug(f"Looking at PR from '{pr['head']['ref']}': {pr['html_url']}") - # Ignore closed PRs - if pr["state"] != "open": - log.debug(f"Ignoring PR as state not open ({pr['state']}): {pr['html_url']}") - continue - - # Don't close the new PR that we just opened - if pr["head"]["ref"] == self.merge_branch: - continue - - # PR is from an automated branch and goes to our target base - if pr["head"]["ref"].startswith("nf-core-template-merge-") and pr["base"]["ref"] == self.from_branch: - self.close_open_pr(pr) - - def close_open_pr(self, pr) -> bool: - """Given a PR API response, add a comment and close.""" - log.debug(f"Attempting to close PR: '{pr['html_url']}'") - - # Make a new comment explaining why the PR is being closed - comment_text = ( - f"Version `{nf_core.__version__}` of the [nf-core/tools](https://github.com/nf-core/tools) pipeline template has just been released. " - f"This pull-request is now outdated and has been closed in favour of {self.pr_url}\n\n" - f"Please use {self.pr_url} to merge in the new changes from the nf-core template as soon as possible." - ) - with self.gh_api.cache_disabled(): - self.gh_api.post(url=pr["comments_url"], data=json.dumps({"body": comment_text})) - - # Update the PR status to be closed - with self.gh_api.cache_disabled(): - pr_request = self.gh_api.patch(url=pr["url"], data=json.dumps({"state": "closed"})) - - pr_request_json, pr_request_pp = self._parse_json_response(pr_request) - - # PR update worked - if pr_request.status_code == 200: - log.debug(f"GitHub API PR-update worked:\n{pr_request_pp}") - log.info( - f"Closed GitHub PR from '{pr['head']['ref']}' to '{pr['base']['ref']}': {pr_request_json['html_url']}" - ) - return True - # Something went wrong - else: - log.warning(f"Could not close PR ('{pr_request.status_code}'):\n{pr['url']}\n{pr_request_pp}") - return False - @staticmethod def _parse_json_response(response) -> tuple[Any, str]: """Helper method to parse JSON response and create pretty-printed string. @@ -503,3 +474,19 @@ def reset_target_dir(self): self.repo.git.checkout(self.original_branch) except GitCommandError as e: raise SyncExceptionError(f"Could not reset to original branch `{self.original_branch}`:\n{e}") + + def _get_ignored_files(self) -> list[str]: + """ + Get a list of all files in the repo ignored by git. + """ + # -z separates with \0 and makes sure special characters are handled correctly + raw_ignored_files = self.repo.git.ls_files(z=True, ignored=True, others=True, exclude_standard=True) + return raw_ignored_files.split("\0")[:-1] if raw_ignored_files else [] + + def _get_tracked_files(self) -> list[str]: + """ + Get a list of all files in the repo tracked by git. + """ + # -z separates with \0 and makes sure special characters are handled correctly + raw_tracked_files = self.repo.git.ls_files(z=True) + return raw_tracked_files.split("\0")[:-1] if raw_tracked_files else [] diff --git a/nf_core/subworkflow-template/main.nf b/nf_core/subworkflow-template/main.nf index d9c3ed687c..17356a00af 100644 --- a/nf_core/subworkflow-template/main.nf +++ b/nf_core/subworkflow-template/main.nf @@ -14,9 +14,6 @@ workflow {{ component_name_underscore|upper }} { ch_bam // channel: [ val(meta), [ bam ] ] main: - - ch_versions = Channel.empty() - // TODO nf-core: substitute modules here for the modules of your subworkflow SAMTOOLS_SORT ( ch_bam ) @@ -30,6 +27,4 @@ workflow {{ component_name_underscore|upper }} { bam = SAMTOOLS_SORT.out.bam // channel: [ val(meta), [ bam ] ] bai = SAMTOOLS_INDEX.out.bai // channel: [ val(meta), [ bai ] ] csi = SAMTOOLS_INDEX.out.csi // channel: [ val(meta), [ csi ] ] - - versions = ch_versions // channel: [ versions.yml ] } diff --git a/requirements-dev.txt b/requirements-dev.txt index 840cb6d558..444f27a821 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ responses ruff Sphinx sphinx-rtd-theme -textual-dev==1.7.0 +textual-dev==1.8.0 types-PyYAML types-requests types-jsonschema diff --git a/requirements.txt b/requirements.txt index 6e1102d936..46da8dbde7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,7 @@ rich>=13.3.1 rocrate repo2rocrate tabulate -textual==6.2.1 +textual==6.6.0 trogon pdiff ruamel.yaml diff --git a/setup.py b/setup.py index bb5f81090b..d677f5ddd7 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup -version = "3.4.1" +version = "3.5.0dev" with open("README.md") as f: readme = f.read() diff --git a/tests/modules/lint/test_main_nf.py b/tests/modules/lint/test_main_nf.py index 227a32511c..16fd08cd81 100644 --- a/tests/modules/lint/test_main_nf.py +++ b/tests/modules/lint/test_main_nf.py @@ -112,6 +112,8 @@ def setUp(self): # Install samtools/sort module for all tests in this class if not self.mods_install.install("samtools/sort"): self.skipTest("Could not install samtools/sort module") + if not self.mods_install.install("bamstats/generalstats"): + self.skipTest("Could not install samtools/sort module") def test_main_nf_lint_with_alternative_registry(self): """Test main.nf linting with alternative container registry""" @@ -131,3 +133,24 @@ def test_main_nf_lint_with_alternative_registry(self): module_lint.lint(print_results=False, module="samtools/sort") assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" assert len(module_lint.passed) > 0 + + def test_topics_and_emits_version_check(self): + """Test that main_nf version emit and topics check works correctly""" + + # Lint a module known to have versions YAML in main.nf (for now) + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="samtools/sort") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.warned) == 2, ( + f"Linting warned with {[x.__dict__ for x in module_lint.warned]}, expected 2 warnings" + ) + assert len(module_lint.passed) > 0 + + # Lint a module known to have topics as output in main.nf + module_lint = nf_core.modules.lint.ModuleLint(directory=self.pipeline_dir) + module_lint.lint(print_results=False, module="bamstats/generalstats") + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.warned) == 0, ( + f"Linting warned with {[x.__dict__ for x in module_lint.warned]}, expected 1 warning" + ) + assert len(module_lint.passed) > 0 diff --git a/tests/modules/test_create.py b/tests/modules/test_create.py index 6ef7c3db3a..9e246675c3 100644 --- a/tests/modules/test_create.py +++ b/tests/modules/test_create.py @@ -236,17 +236,23 @@ def test_modules_meta_yml_structure_biotools_meta(self): }, ] ], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_bpipe": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"bpipe": {"type": "string", "description": "The name of the tool"}}, + {"bpipe --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } @@ -312,17 +318,23 @@ def test_modules_meta_yml_structure_biotools_nometa(self): } } ], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_bpipe": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"bpipe": {"type": "string", "description": "The name of the tool"}}, + {"bpipe --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } @@ -411,17 +423,23 @@ def test_modules_meta_yml_structure_template_meta( }, ] ], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_test": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"test": {"type": "string", "description": "The name of the tool"}}, + {"test --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } @@ -494,17 +512,23 @@ def test_modules_meta_yml_structure_template_nometa( } } ], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_test": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"test": {"type": "string", "description": "The name of the tool"}}, + {"test --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } @@ -571,17 +595,23 @@ def test_modules_meta_yml_structure_empty_meta( {"*": {"type": "file", "description": "", "pattern": "", "ontologies": [{"edam": ""}]}}, ] ], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_test": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"test": {"type": "string", "description": "The name of the tool"}}, + {"test --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } @@ -628,17 +658,23 @@ def test_modules_meta_yml_structure_empty_nometa( "input": [{"input": {"type": "file", "description": "", "pattern": "", "ontologies": [{"edam": ""}]}}], "output": { "output": [{"*": {"type": "file", "description": "", "pattern": "", "ontologies": [{"edam": ""}]}}], - "versions": [ - { - "versions.yml": { - "type": "file", - "description": "File containing software versions", - "pattern": "versions.yml", - "ontologies": [{"edam": "http://edamontology.org/format_3750"}], - } - } + "versions_test": [ + [ + {"${task.process}": {"type": "string", "description": "The name of the process"}}, + {"test": {"type": "string", "description": "The name of the tool"}}, + {"test --version": {"type": "string", "description": "The version of the tool"}}, + ] ], }, + "topics": { + "versions": [ + [ + {"process": {"description": "The process the versions were collected from", "type": "string"}}, + {"tool": {"description": "The tool name the version was collected for", "type": "string"}}, + {"version": {"description": "The version of the tool", "type": "string"}}, + ] + ] + }, "authors": ["@author"], "maintainers": ["@author"], } diff --git a/tests/pipelines/download/test_download.py b/tests/pipelines/download/test_download.py index acf4c08734..5ecaa5e47a 100644 --- a/tests/pipelines/download/test_download.py +++ b/tests/pipelines/download/test_download.py @@ -49,6 +49,38 @@ def __contains__(self, item: str) -> bool: # # Tests for 'get_release_hash' # + def test_get_release_hash_release_noauth(self): + wfs = nf_core.pipelines.list.Workflows() + wfs.get_remote_workflows() + pipeline = "methylseq" + + try: + # explicitly overwrite the gh_api authentication state + # to force using the archive url + from nf_core.utils import gh_api + + gh_api.lazy_init() + gh_api.auth = None + + download_obj = DownloadWorkflow(pipeline=pipeline, revision="1.6") + ( + download_obj.pipeline, + download_obj.wf_revisions, + download_obj.wf_branches, + ) = nf_core.utils.get_repo_releases_branches(pipeline, wfs) + download_obj.get_revision_hash() + assert download_obj.wf_sha[download_obj.revision[0]] == "b3e5e3b95aaf01d98391a62a10a3990c0a4de395" + assert download_obj.outdir == Path("nf-core-methylseq_1.6") + + assert ( + download_obj.wf_download_url[download_obj.revision[0]] + == "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + ) + + finally: + gh_api.has_init = False + gh_api.auth = None + def test_get_release_hash_release(self): wfs = nf_core.pipelines.list.Workflows() wfs.get_remote_workflows() @@ -64,7 +96,7 @@ def test_get_release_hash_release(self): assert download_obj.outdir == Path("nf-core-methylseq_1.6") assert ( download_obj.wf_download_url[download_obj.revision[0]] - == "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + == "https://api.github.com/repos/nf-core/methylseq/zipball/b3e5e3b95aaf01d98391a62a10a3990c0a4de395" ) def test_get_release_hash_branch(self): @@ -83,7 +115,7 @@ def test_get_release_hash_branch(self): assert download_obj.outdir == Path("nf-core-exoseq_dev") assert ( download_obj.wf_download_url[download_obj.revision[0]] - == "https://github.com/nf-core/exoseq/archive/819cbac792b76cf66c840b567ed0ee9a2f620db7.zip" + == "https://api.github.com/repos/nf-core/exoseq/zipball/819cbac792b76cf66c840b567ed0ee9a2f620db7" ) def test_get_release_hash_long_commit(self): @@ -104,7 +136,7 @@ def test_get_release_hash_long_commit(self): assert download_obj.outdir == Path(f"nf-core-exoseq_{revision}") assert ( download_obj.wf_download_url[download_obj.revision[0]] - == f"https://github.com/nf-core/exoseq/archive/{revision}.zip" + == f"https://api.github.com/repos/nf-core/exoseq/zipball/{revision}" ) def test_get_release_hash_short_commit(self): @@ -127,7 +159,7 @@ def test_get_release_hash_short_commit(self): assert download_obj.outdir == Path(f"nf-core-exoseq_{short_rev}") assert ( download_obj.wf_download_url[download_obj.revision[0]] - == f"https://github.com/nf-core/exoseq/archive/{revision}.zip" + == f"https://api.github.com/repos/nf-core/exoseq/zipball/{revision}" ) def test_get_release_hash_non_existent_release(self): @@ -153,7 +185,7 @@ def test_download_wf_files(self, outdir): download_obj.outdir = outdir download_obj.wf_sha = {"1.6": "b3e5e3b95aaf01d98391a62a10a3990c0a4de395"} download_obj.wf_download_url = { - "1.6": "https://github.com/nf-core/methylseq/archive/b3e5e3b95aaf01d98391a62a10a3990c0a4de395.zip" + "1.6": "https://api.github.com/repos/nf-core/methylseq/zipball/b3e5e3b95aaf01d98391a62a10a3990c0a4de395" } rev = download_obj.download_wf_files( download_obj.revision[0], diff --git a/tests/pipelines/lint/test_rocrate_readme_sync.py b/tests/pipelines/lint/test_rocrate_readme_sync.py index 6b8e7f7d93..cd600481e2 100644 --- a/tests/pipelines/lint/test_rocrate_readme_sync.py +++ b/tests/pipelines/lint/test_rocrate_readme_sync.py @@ -12,25 +12,6 @@ def test_rocrate_readme_sync_pass(self): assert len(results.get("failed", [])) == 0 assert len(results.get("passed", [])) > 0 - def test_rocrate_readme_sync_fail(self): - self.lint_obj._load() - - json_path = Path(self.lint_obj.wf_path, "ro-crate-metadata.json") - with open(json_path) as f: - try: - rocrate = json.load(f) - except json.JSONDecodeError as e: - raise UserWarning(f"Unable to load JSON file '{json_path}' due to error {e}") - rocrate["@graph"][0]["description"] = "This is a test script" - with open(json_path, "w") as f: - json.dump(rocrate, f, indent=4) - results = self.lint_obj.rocrate_readme_sync() - assert len(results.get("failed", [])) == 1 - assert ( - "The RO-Crate descriptions do not match the README.md content. Use `nf-core pipelines lint --fix rocrate_readme_sync` to update." - in results.get("failed", []) - ) - def test_rocrate_readme_sync_fixed(self): self.lint_obj._load() json_path = Path(self.lint_obj.wf_path, "ro-crate-metadata.json") @@ -43,12 +24,6 @@ def test_rocrate_readme_sync_fixed(self): with open(json_path, "w") as f: json.dump(rocrate, f, indent=4) - results = self.lint_obj.rocrate_readme_sync() - assert len(results.get("failed", [])) == 1 - - # Fix the issue - assert "rocrate_readme_sync" in self.lint_obj.lint_tests - self.lint_obj.fix = ["rocrate_readme_sync"] - self.lint_obj._load() results = self.lint_obj.rocrate_readme_sync() assert len(results.get("failed", [])) == 0 + assert len(results.get("fixed", [])) == 1 diff --git a/tests/pipelines/test_bump_version.py b/tests/pipelines/test_bump_version.py index 8cee7dc095..ec5021f663 100644 --- a/tests/pipelines/test_bump_version.py +++ b/tests/pipelines/test_bump_version.py @@ -80,16 +80,18 @@ def test_bump_pipeline_version_in_snapshot(self): snapshot_dir.mkdir(parents=True, exist_ok=True) snapshot_fn = snapshot_dir / "main.nf.test.snap" snapshot_fn.touch() + + pipeline_slug = f"{self.pipeline_obj.pipeline_prefix}/{self.pipeline_obj.pipeline_name}" # write version number in snapshot with open(snapshot_fn, "w") as fh: - fh.write("nf-core/testpipeline=1.0.0dev") + fh.write(f"{pipeline_slug}=1.0.0dev") # Bump the version number nf_core.pipelines.bump_version.bump_pipeline_version(self.pipeline_obj, "1.1.0") # Check the snapshot with open(snapshot_fn) as fh: - assert fh.read().strip() == "nf-core/testpipeline=1.1.0" + assert fh.read().strip() == f"{pipeline_slug}=1.1.0" def test_bump_pipeline_version_in_snapshot_no_version(self): """Test that bump version does not update versions in the snapshot if no version is given.""" diff --git a/tests/pipelines/test_params_file.py b/tests/pipelines/test_params_file.py index 723a8a561e..a62b90f4ed 100644 --- a/tests/pipelines/test_params_file.py +++ b/tests/pipelines/test_params_file.py @@ -25,7 +25,7 @@ def test_build_template(self): with open(self.outfile) as fh: out = fh.read() - assert "nf-core/testpipeline" in out + assert f"{self.pipeline_obj.pipeline_prefix}/{self.pipeline_obj.pipeline_name}" in out def test_build_template_invalid_schema(self): """Build a schema from a template""" @@ -63,5 +63,5 @@ def test_build_template_content(self): with open(self.outfile) as fh: out = fh.read() - assert "nf-core/testpipeline" in out + assert f"{self.pipeline_obj.pipeline_prefix}/{self.pipeline_obj.pipeline_name}" in out assert "# input: null" in out diff --git a/tests/pipelines/test_rocrate.py b/tests/pipelines/test_rocrate.py index ac86e64bdf..a073beae31 100644 --- a/tests/pipelines/test_rocrate.py +++ b/tests/pipelines/test_rocrate.py @@ -55,7 +55,9 @@ def test_rocrate_creation(self): for entity in entities: entity_json = entity.as_jsonld() if entity_json["@id"] == "./": - self.assertEqual(entity_json.get("name"), "nf-core/testpipeline") + self.assertEqual( + entity_json.get("name"), f"{self.pipeline_obj.pipeline_prefix}/{self.pipeline_obj.pipeline_name}" + ) self.assertEqual(entity_json["mainEntity"], {"@id": "main.nf"}) elif entity_json["@id"] == "#main.nf": self.assertEqual(entity_json["programmingLanguage"], [{"@id": "#nextflow"}]) diff --git a/tests/pipelines/test_sync.py b/tests/pipelines/test_sync.py index 2c300f6a4f..746b2db76a 100644 --- a/tests/pipelines/test_sync.py +++ b/tests/pipelines/test_sync.py @@ -31,8 +31,8 @@ def json(self): return self.data -def mocked_requests_get(url) -> MockResponse: - """Helper function to emulate POST requests responses from the web""" +def mocked_requests_get(url, params=None, **kwargs) -> MockResponse: + """Helper function to emulate GET request responses from the web""" url_template = "https://api.github.com/repos/{}/response/" if url == Path(url_template.format("no_existing_pr"), "pulls?head=TEMPLATE&base=None"): @@ -120,6 +120,17 @@ def test_inspect_sync_dir_dirty(self): finally: os.remove(test_fn) + def test_inspect_sync_ignored_files(self): + """ + Try inspecting the repo for syncing with untracked changes that are ignored. + No assertions, we are checking that no exception is raised in the process. + """ + test_fn = Path(self.pipeline_dir) / "ignored.txt" + self._make_ignored_file(test_fn) + + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + def test_get_wf_config_no_branch(self): """Try getting a workflow config when the branch doesn't exist""" # Try to sync, check we halt with the right error @@ -161,24 +172,134 @@ def test_checkout_template_branch_no_template(self): psync.checkout_template_branch() assert exc_info.value.args[0] == "Could not check out branch 'origin/TEMPLATE' or 'TEMPLATE'" - def test_delete_template_branch_files(self): - """Confirm that we can delete all files in the TEMPLATE branch""" + def test_delete_tracked_template_branch_files(self): + """Confirm that we can delete all tracked files in the TEMPLATE branch""" + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.checkout_template_branch() + psync.delete_tracked_template_branch_files() + top_level_ignored = self._get_top_level_ignored(psync) + assert set(os.listdir(self.pipeline_dir)) == set([".git"]).union(top_level_ignored) + + def test_delete_tracked_template_branch_files_unlink_throws_error(self): + """Test that SyncExceptionError is raised when Path.unlink throws an exception""" + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.checkout_template_branch() + + # Create a test file that would normally be deleted + test_file = Path(self.pipeline_dir) / "test_file.txt" + test_file.touch() + + # Mock Path.unlink to raise an exception + with mock.patch("pathlib.Path.unlink", side_effect=OSError("Permission denied")) as mock_unlink: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: + psync.delete_tracked_template_branch_files() + + # Verify the exception contains the original error + assert "Permission denied" in str(exc_info.value) + + # Verify Path.unlink was called + mock_unlink.assert_called() + + def test_delete_tracked_template_branch_rmdir_throws_error(self): + """Test that SyncExceptionError is raised when Path.rmdir throws an exception""" + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + psync.inspect_sync_dir() + psync.get_wf_config() + psync.checkout_template_branch() + + # Create an empty directory that would normally be deleted + empty_dir = Path(self.pipeline_dir) / "empty_test_dir" + empty_dir.mkdir() + + # Mock Path.rmdir to raise an exception + with mock.patch("pathlib.Path.rmdir", side_effect=OSError("Permission denied")) as mock_rmdir: + with pytest.raises(nf_core.pipelines.sync.SyncExceptionError) as exc_info: + psync.delete_tracked_template_branch_files() + + # Verify the exception contains the original error + assert "Permission denied" in str(exc_info.value) + + # Verify Path.rmdir was called + mock_rmdir.assert_called() + + def test_delete_staged_template_branch_files_ignored(self): + """Confirm that files in .gitignore are not deleted by delete_staged_template_branch_files""" psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + ignored_file = Path(self.pipeline_dir) / "ignored.txt" + self._make_ignored_file(ignored_file) + psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() - psync.delete_template_branch_files() - assert os.listdir(self.pipeline_dir) == [".git"] + psync.delete_tracked_template_branch_files() + + # Ignored file should still exist + assert ignored_file.exists() + + # .git directory should still exist + assert (Path(self.pipeline_dir) / ".git").exists() + + def test_delete_staged_template_branch_files_ignored_nested_dir(self): + """Confirm that deletion of ignored files respects directory structure""" + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + repo = git.Repo(self.pipeline_dir) + + # Create this structure: + # dir + # ├── subdirA # (should be kept) + # │   └── subdirB # (should be kept) + # │   └── ignored.txt # add to .gitignore (should be kept) + # └── subdirC # (should be deleted) + # └── subdirD # (should be deleted) + # └── not_ignored.txt # commit this file (should be deleted) + parent_dir = Path(self.pipeline_dir) / "dir" + to_be_kept_dir = parent_dir / "subdirA" / "subdirB" + ignored_file = to_be_kept_dir / "ignored.txt" + to_be_deleted_dir = parent_dir / "subdirC" / "subdirD" + non_ignored_file = to_be_deleted_dir / "not_ignored.txt" + + to_be_kept_dir.mkdir(parents=True) + to_be_deleted_dir.mkdir(parents=True) + non_ignored_file.touch() + + repo.git.add(non_ignored_file) + repo.index.commit("Add non-ignored file") + + self._make_ignored_file(ignored_file) + + psync.inspect_sync_dir() + psync.get_wf_config() + psync.checkout_template_branch() + psync.delete_tracked_template_branch_files() + + # Ignored file and its parent directory should still exist + assert ignored_file.exists() + assert to_be_kept_dir.exists() # subdirB + assert to_be_kept_dir.parent.exists() # subdirA + + # Non-ignored file and its parent directory should be deleted + assert not non_ignored_file.exists() + assert not to_be_deleted_dir.exists() # subdirD + assert not to_be_deleted_dir.parent.exists() # subdirC + + # .git directory should still exist + assert (Path(self.pipeline_dir) / ".git").exists() def test_create_template_pipeline(self): - """Confirm that we can delete all files in the TEMPLATE branch""" + """Confirm that we can create a new template pipeline in an empty directory""" # First, delete all the files psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) psync.inspect_sync_dir() psync.get_wf_config() psync.checkout_template_branch() - psync.delete_template_branch_files() - assert os.listdir(self.pipeline_dir) == [".git"] + psync.delete_tracked_template_branch_files() + top_level_ignored = self._get_top_level_ignored(psync) + assert set(os.listdir(self.pipeline_dir)) == set([".git"]).union(top_level_ignored) # Now create the new template psync.make_template_pipeline() assert "main.nf" in os.listdir(self.pipeline_dir) @@ -211,6 +332,22 @@ def test_commit_template_changes_changes(self): # Check that we don't have any uncommitted changes assert psync.repo.is_dirty(untracked_files=True) is False + def test_commit_template_preserves_ignored(self): + """Try to commit the TEMPLATE branch, but no changes were made""" + # Check out the TEMPLATE branch but skip making the new template etc. + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + ignored_file = Path(self.pipeline_dir) / "ignored.txt" + + self._make_ignored_file(ignored_file) + + psync.inspect_sync_dir() + psync.get_wf_config() + psync.checkout_template_branch() + psync.commit_template_changes() + + assert ignored_file.exists() + def test_push_template_branch_error(self): """Try pushing the changes, but without a remote (should fail)""" # Check out the TEMPLATE branch but skip making the new template etc. @@ -311,71 +448,6 @@ def test_make_pull_request_bad_response(self, mock_post, mock_get): "Something went badly wrong - GitHub API PR failed - got return code 404" ) - @mock.patch("nf_core.utils.gh_api.get", side_effect=mocked_requests_get) - def test_close_open_template_merge_prs(self, mock_get): - """Try closing all open prs""" - psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) - psync.inspect_sync_dir() - psync.get_wf_config() - psync.gh_api.get = mock_get - psync.gh_username = "list_prs" - psync.gh_repo = "list_prs/response" - os.environ["GITHUB_AUTH_TOKEN"] = "test" - - with mock.patch("nf_core.pipelines.sync.PipelineSync.close_open_pr") as mock_close_open_pr: - psync.close_open_template_merge_prs() - - prs = mock_get(f"https://api.github.com/repos/{psync.gh_repo}/pulls").data - for pr in prs: - if pr.get("state", None) == "open": - mock_close_open_pr.assert_any_call(pr) - - @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) - @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) - def test_close_open_pr(self, mock_patch, mock_post) -> None: - psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) - psync.inspect_sync_dir() - psync.get_wf_config() - psync.gh_api.post = mock_post - psync.gh_api.patch = mock_patch - psync.gh_username = "bad_url" - psync.gh_repo = "bad_url/response" - os.environ["GITHUB_AUTH_TOKEN"] = "test" - pr: dict[str, str | dict[str, str]] = { - "state": "open", - "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "main"}, - "html_url": "pr_html_url", - "url": "url_to_update_pr", - "comments_url": "pr_comments_url", - } - - assert psync.close_open_pr(pr) - mock_patch.assert_called_once_with(url="url_to_update_pr", data='{"state": "closed"}') - - @mock.patch("nf_core.utils.gh_api.post", side_effect=mocked_requests_post) - @mock.patch("nf_core.utils.gh_api.patch", side_effect=mocked_requests_patch) - def test_close_open_pr_fail(self, mock_patch, mock_post): - psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) - psync.inspect_sync_dir() - psync.get_wf_config() - psync.gh_api.post = mock_post - psync.gh_api.patch = mock_patch - psync.gh_username = "bad_url" - psync.gh_repo = "bad_url/response" - os.environ["GITHUB_AUTH_TOKEN"] = "test" - pr = { - "state": "open", - "head": {"ref": "nf-core-template-merge-3"}, - "base": {"ref": "main"}, - "html_url": "pr_html_url", - "url": "bad_url_to_update_pr", - "comments_url": "pr_comments_url", - } - - assert not psync.close_open_pr(pr) - mock_patch.assert_called_once_with(url="bad_url_to_update_pr", data='{"state": "closed"}') - def test_reset_target_dir(self): """Try resetting target pipeline directory""" psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) @@ -430,3 +502,39 @@ def test_sync_no_github_token(self): with self.assertRaises(nf_core.pipelines.sync.PullRequestExceptionError) as exc_info: psync.sync() self.assertIn("GITHUB_AUTH_TOKEN not set!", str(exc_info.exception)) + + def test_sync_preserves_ignored_files(self): + """Test that sync preserves files and directories specified in .gitignore""" + with ( + mock.patch("requests.get", side_effect=mocked_requests_get), + ): + psync = nf_core.pipelines.sync.PipelineSync(self.pipeline_dir) + + ignored_file = Path(self.pipeline_dir) / "ignored.txt" + self._make_ignored_file(ignored_file) + + psync.made_changes = True + + psync.sync() + + self.assertTrue(ignored_file.exists()) + + def _make_ignored_file(self, file_path: Path): + """Helper function to create an ignored file.""" + if not self.pipeline_dir: + raise ValueError("Instantiate a pipeline before adding ignored files.") + + file_path.touch() + + gitignore_path = Path(self.pipeline_dir) / ".gitignore" + with open(gitignore_path, "a") as f: + f.write(f"{file_path.name}\n") + + repo = git.Repo(self.pipeline_dir) + repo.git.add(".gitignore") + repo.index.commit("Add .gitignore") + + def _get_top_level_ignored(self, psync: nf_core.pipelines.sync.PipelineSync) -> set[str]: + """Helper function to get top-level part of relative directory of ignored files from psync.ignored_files.""" + top_level_ignored = {Path(f).parts[0] for f in psync.ignored_files} + return top_level_ignored diff --git a/tests/test_cli.py b/tests/test_cli.py index 43b45e0df7..f74b546cd2 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -191,17 +191,17 @@ def test_cli_download(self, mock_dl): cmd[-1], (params["revision"],), params["outdir"], - params["compress"], - "force" in params, - "platform" in params, - params["download-configuration"], - (params["tag"],), - params["container-system"], - (params["container-library"],), - params["container-cache-utilisation"], - params["container-cache-index"], - params["parallel-downloads"], - "hide-progress" in toplevel_params, + compress_type=params["compress"], + force="force" in params, + platform="platform" in params, + download_configuration=params["download-configuration"], + additional_tags=(params["tag"],), + container_system=params["container-system"], + container_library=(params["container-library"],), + container_cache_utilisation=params["container-cache-utilisation"], + container_cache_index=params["container-cache-index"], + parallel=params["parallel-downloads"], + hide_progress="hide-progress" in toplevel_params, ) mock_dl.return_value.download_workflow.assert_called_once() diff --git a/tests/test_utils.py b/tests/test_utils.py index 9ed28e0d19..904d447572 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -107,7 +107,7 @@ def test_request_cant_create_cache(self, mock_exists, mock_mkdir): nf_core.utils.setup_requests_cachedir() def test_pip_package_pass(self): - result = nf_core.utils.pip_package("multiqc=1.10") + result = nf_core.utils.pip_package("multiqc=1.32") assert isinstance(result, dict) @mock.patch("requests.get") @@ -118,7 +118,7 @@ def test_pip_package_timeout(self, mock_get): mock_get.side_effect = requests.exceptions.Timeout() # Now do the test with pytest.raises(LookupError): - nf_core.utils.pip_package("multiqc=1.10") + nf_core.utils.pip_package("multiqc=1.32") @mock.patch("requests.get") def test_pip_package_connection_error(self, mock_get): @@ -128,7 +128,7 @@ def test_pip_package_connection_error(self, mock_get): mock_get.side_effect = requests.exceptions.ConnectionError() # Now do the test with pytest.raises(LookupError): - nf_core.utils.pip_package("multiqc=1.10") + nf_core.utils.pip_package("multiqc=1.32") def test_pip_erroneous_package(self): """Tests the PyPi API package information query""" @@ -151,10 +151,10 @@ def test_get_repo_releases_branches_not_nf_core(self): wfs.get_remote_workflows() pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("MultiQC/MultiQC", wfs) for r in wf_releases: - if r.get("tag_name") == "v1.10": + if r.get("tag_name") == "v1.32": break else: - raise AssertionError("MultiQC release v1.10 not found") + raise AssertionError("MultiQC release v1.32 not found") assert "main" in wf_branches.keys() def test_get_repo_releases_branches_not_exists(self):