Skip to content

Add RSEM + refactor tests #51

Add RSEM + refactor tests

Add RSEM + refactor tests #51

Workflow file for this run

name: nf-core CI
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors
on:
push:
branches:
- dev
pull_request:
release:
types: [published]
workflow_dispatch:
env:
NFT_DIFF: "pdiff"
NFT_DIFF_ARGS: "--line-numbers --width 120 --expand-tabs=2"
NFT_VER: "0.9.2"
NFT_WORKDIR: "~"
NXF_ANSI_LOG: false
NXF_SINGULARITY_CACHEDIR: ${{ github.workspace }}/.singularity
NXF_SINGULARITY_LIBRARYDIR: ${{ github.workspace }}/.singularity
concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}"
cancel-in-progress: true
jobs:
test:
runs-on: ubuntu-latest
name: "Test ${{ matrix.filter }} | ${{ matrix.profile }} | ${{ matrix.NXF_VER }} | ${{ matrix.shard }}/5"
strategy:
fail-fast: false
matrix:
NXF_VER:
- "24.04.2"
- "latest-everything"
filter: ["pipeline"]
# filter: ["process", "workflow", "function", "pipeline"]
profile: ["conda", "docker", "singularity"]
shard: [1, 2, 3, 4]
# shard: [1, 2, 3, 4, 5]
isMaster:
- ${{ github.base_ref == 'master' }}
exclude:
- isMaster: false
profile: "conda"
- isMaster: false
profile: "singularity"
steps:
- name: Check out pipeline code
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4
with:
fetch-depth: 0
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5
with:
python-version: "3.11"
architecture: "x64"
- name: Set up Nextflow
uses: nf-core/setup-nextflow@v2
with:
version: "${{ matrix.NXF_VER }}"
- name: Set up nf-test
uses: nf-core/setup-nf-test@v1
with:
version: ${{ env.NFT_VER }}
- name: Set up apptainer
if: matrix.profile == 'singularity'
uses: eWaterCycle/setup-apptainer@main
- name: Set up Singularity
if: matrix.profile == 'singularity'
run: |
mkdir -p $NXF_SINGULARITY_CACHEDIR
mkdir -p $NXF_SINGULARITY_LIBRARYDIR
- name: Cache pdiff
uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4
id: cache-pip-pdiff
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-pdiff
- name: Install pdiff
run: python -m pip install --upgrade pip pdiff cryptography
- name: Set up miniconda
if: matrix.profile == 'conda'
uses: conda-incubator/setup-miniconda@a4260408e20b96e80095f42ff7f1a15b27dd94ca # v3
with:
miniconda-version: "latest"
auto-update-conda: true
conda-solver: libmamba
channels: conda-forge,bioconda
- name: Set up Conda
if: matrix.profile == 'conda'
run: |
echo $(realpath $CONDA)/condabin >> $GITHUB_PATH
echo $(realpath python) >> $GITHUB_PATH
- name: Disk space cleanup
uses: jlumbroso/free-disk-space@v1.3.1
- name: Start summary
id: print-test
run: |
echo "## nf-test tests summary :rocket:" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "This \`${{ matrix.filter }}\` ${{ matrix.shard }}/5 shard was run on \`${{ matrix.profile }}\` | \`NXF_VER=${{ matrix.NXF_VER }}\`, and contains the following test(s):" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
nf-test test \
--ci \
--dryRun \
--junitxml="TEST-${{ matrix.filter }}_${{ matrix.profile }}_${{ matrix.shard }}.xml" \
--shard ${{ matrix.shard }}/5 \
--changed-since HEAD^ \
--follow-dependencies \
--profile "+${{ matrix.profile }}" \
--filter ${{ matrix.filter }} \
| grep PASSED | cut -d "'" -f 2 | sed 's/^/- /' | sort -u >> $GITHUB_STEP_SUMMARY
- name: "Run tests | ${{ matrix.filter }}_${{ matrix.profile }} | ${{ matrix.shard }}/5"
run: |
nf-test test \
--ci \
--debug \
--verbose \
--junitxml="TEST-${{ matrix.filter }}_${{ matrix.profile }}_${{ matrix.shard }}.xml" \
--shard ${{ matrix.shard }}/5 \
--changed-since HEAD^ \
--follow-dependencies \
--profile "+${{ matrix.profile }}" \
--filter ${{ matrix.filter }}
- name: Print success in summary
if: success()
run: |
echo "" >> $GITHUB_STEP_SUMMARY
echo "All test(s) successfull :tada:" >> $GITHUB_STEP_SUMMARY
- name: Print failure in summary
if: failure()
run: |
echo "" >> $GITHUB_STEP_SUMMARY
echo "Some test(s) failed :cold_sweat:" >> $GITHUB_STEP_SUMMARY
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the previous step fails
with:
report_paths: "TEST-*.xml"
- name: Clean up
if: success() || failure()
run: |
sudo rm -rf /home/ubuntu/tests/