-
Notifications
You must be signed in to change notification settings - Fork 417
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'dev' into fix_ascat_manual_test
- Loading branch information
Showing
80 changed files
with
524 additions
and
214 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,170 @@ | ||
name: pytest-workflow-release | ||
# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors | ||
on: | ||
push: | ||
branches: [master] | ||
release: | ||
types: [published] | ||
|
||
# Cancel if a newer run is started | ||
concurrency: | ||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | ||
cancel-in-progress: true | ||
|
||
jobs: | ||
changes: | ||
name: Check for changes | ||
runs-on: ubuntu-latest | ||
outputs: | ||
# Expose matched filters as job 'tags' output variable | ||
tags: ${{ steps.filter.outputs.changes }} | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: dorny/paths-filter@v2 | ||
id: filter | ||
with: | ||
filters: "tests/config/tags.yml" | ||
|
||
test: | ||
name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} | ||
runs-on: ubuntu-latest | ||
needs: changes | ||
if: needs.changes.outputs.tags != '[]' | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] | ||
profile: ["docker", "singularity", "conda"] | ||
TEST_DATA_BASE: | ||
- "test-datasets/data" | ||
NXF_VER: | ||
- "23.04.0" | ||
- "latest-everything" | ||
exclude: | ||
- profile: "conda" | ||
tags: concatenate_vcfs | ||
- profile: "conda" | ||
tags: deepvariant | ||
- profile: "conda" | ||
tags: haplotypecaller | ||
- profile: "conda" | ||
tags: merge | ||
- profile: "conda" | ||
tags: snpeff | ||
- profile: "conda" | ||
tags: umi | ||
- profile: "conda" | ||
tags: validation_checks | ||
- profile: "conda" | ||
tags: vep | ||
- profile: "singularity" | ||
tags: concatenate_vcfs | ||
- profile: "singularity" | ||
tags: merge | ||
- profile: "singularity" | ||
tags: validation_checks | ||
env: | ||
NXF_ANSI_LOG: false | ||
TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" | ||
steps: | ||
- name: Check out pipeline code | ||
uses: actions/checkout@v3 | ||
|
||
- name: Hash Github Workspace | ||
id: hash_workspace | ||
run: | | ||
echo "digest=$(echo sarek3_${{ github.workspace }} | md5sum | cut -c 1-25)" >> $GITHUB_OUTPUT | ||
- name: Cache test data | ||
id: cache-testdata | ||
uses: actions/cache@v3 | ||
with: | ||
path: test-datasets/ | ||
key: ${{ steps.hash_workspace.outputs.digest }} | ||
|
||
- name: Check out test data | ||
if: steps.cache-testdata.outputs.cache-hit != 'true' | ||
uses: actions/checkout@v3 | ||
with: | ||
repository: nf-core/test-datasets | ||
ref: sarek3 | ||
path: test-datasets/ | ||
|
||
- name: Replace remote paths in samplesheets | ||
run: | | ||
for f in tests/csv/3.0/*csv; do | ||
sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f | ||
echo "========== $f ============" | ||
cat $f | ||
echo "========================================" | ||
done; | ||
- name: Set up Python | ||
uses: actions/setup-python@v2 | ||
with: | ||
python-version: "3.x" | ||
|
||
- uses: actions/cache@v3 | ||
with: | ||
path: ~/.cache/pip | ||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||
restore-keys: | | ||
${{ runner.os }}-pip- | ||
- name: Install Python dependencies | ||
run: python -m pip install --upgrade pip pytest-workflow | ||
|
||
- uses: actions/cache@v3 | ||
with: | ||
path: /usr/local/bin/nextflow | ||
key: ${{ runner.os }} | ||
restore-keys: | | ||
${{ runner.os }}-nextflow- | ||
- name: Install Nextflow ${{ matrix.NXF_VER }} | ||
uses: nf-core/setup-nextflow@v1.2.0 | ||
with: | ||
version: "${{ matrix.NXF_VER }}" | ||
|
||
- name: Set up Singularity | ||
if: matrix.profile == 'singularity' | ||
uses: eWaterCycle/setup-singularity@v5 | ||
with: | ||
singularity-version: 3.7.1 | ||
|
||
- name: Set up miniconda | ||
if: matrix.profile == 'conda' | ||
uses: conda-incubator/setup-miniconda@v2 | ||
with: | ||
auto-update-conda: true | ||
channels: conda-forge,bioconda,defaults | ||
python-version: ${{ matrix.python-version }} | ||
|
||
- name: Conda clean | ||
if: matrix.profile == 'conda' | ||
run: conda clean -a | ||
|
||
- name: Run pytest-workflow | ||
uses: Wandalen/wretry.action@v1.0.11 | ||
with: | ||
command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes | ||
attempt_limit: 3 | ||
|
||
- name: Output log on failure | ||
if: failure() | ||
run: | | ||
sudo apt install bat > /dev/null | ||
batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err} | ||
- name: Upload logs on failure | ||
if: failure() | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: logs-${{ matrix.profile }} | ||
path: | | ||
/home/runner/pytest_workflow_*/*/.nextflow.log | ||
/home/runner/pytest_workflow_*/*/log.out | ||
/home/runner/pytest_workflow_*/*/log.err | ||
/home/runner/pytest_workflow_*/*/work | ||
!/home/runner/pytest_workflow_*/*/work/conda | ||
!/home/runner/pytest_workflow_*/*/work/singularity |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
/* | ||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
Nextflow config file for running minimal tests | ||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
Defines input files and everything required to run a fast and simple pipeline test. | ||
Use as follows: | ||
nextflow run nf-core/sarek -profile test,<extra_test_profile>,<docker/singularity> --outdir <OUTDIR> | ||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | ||
*/ | ||
|
||
params { | ||
config_profile_name = 'Test profile' | ||
config_profile_description = 'Minimal test dataset to check pipeline function' | ||
|
||
// Limit resources so that this can run on GitHub Actions | ||
max_cpus = 2 | ||
max_memory = '6.5GB' | ||
max_time = '8.h' | ||
|
||
// Input data | ||
input = "${projectDir}/tests/csv/3.0/fastq_single.csv" | ||
|
||
// Small reference genome | ||
genome = null | ||
igenomes_ignore = true | ||
dbsnp = "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" | ||
fasta = "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.fasta" | ||
germline_resource = "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz" | ||
intervals = "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/genome.interval_list" | ||
known_indels = "https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz" | ||
snpeff_db = 105 | ||
snpeff_genome = 'WBcel235' | ||
snpeff_version = '5.1' | ||
vep_cache_version = 106 | ||
vep_genome = 'WBcel235' | ||
vep_species = 'caenorhabditis_elegans' | ||
vep_version = '106.1' | ||
|
||
// default params | ||
split_fastq = 0 // no FASTQ splitting | ||
tools = 'strelka' // Variant calling with Strelka | ||
|
||
// Ignore params that will throw warning through params validation | ||
schema_ignore_params = 'genomes,snpeff_version,vep_version' | ||
} | ||
|
||
process { | ||
withName:'.*:FREEC_SOMATIC'{ | ||
ext.args = { | ||
[ | ||
"sample":[ | ||
inputformat: "pileup", | ||
mateorientation: "FR" | ||
], | ||
"general":[ | ||
bedgraphoutput: "TRUE", | ||
noisydata: "TRUE", | ||
minexpectedgc: "0", | ||
readcountthreshold: "1", | ||
sex: meta.sex, | ||
window: "10", | ||
], | ||
"control":[ | ||
inputformat: "pileup", | ||
mateorientation: "FR" | ||
] | ||
] | ||
} | ||
} | ||
|
||
if (params.tools && params.tools.split(',').contains('mutect2')) { | ||
withName: '.*MUTECT2_PAIRED'{ | ||
//sample name from when the test data was generated | ||
ext.args = { "--f1r2-tar-gz ${task.ext.prefix}.f1r2.tar.gz --normal-sample normal " } | ||
} | ||
} | ||
|
||
withName: '.*:FILTERVARIANTTRANCHES'{ | ||
ext.args = { "--info-key CNN_1D --indel-tranche 0" } | ||
} | ||
} | ||
|
||
// Enable container engines/virtualisation envs for CI testing | ||
// only works when specified with the profile ENV | ||
// otherwise tests can be done with the regular provided profiles | ||
if (System.getenv('PROFILE')) { | ||
if ("$PROFILE" == "conda") { | ||
conda.createTimeout = "120 min" | ||
conda.enabled = true | ||
charliecloud.enabled = false | ||
docker.enabled = false | ||
podman.enabled = false | ||
shifter.enabled = false | ||
singularity.enabled = false | ||
} else if ("$PROFILE" == "docker") { | ||
conda.enabled = false | ||
docker.enabled = true | ||
docker.userEmulation = { params.use_gatk_spark ? false : true }.call() | ||
charliecloud.enabled = false | ||
podman.enabled = false | ||
shifter.enabled = false | ||
singularity.enabled = false | ||
} else if ("$PROFILE" == "singularity") { | ||
conda.enabled = false | ||
params.singularity_pull_docker_container = false | ||
singularity.autoMounts = true | ||
singularity.enabled = true | ||
charliecloud.enabled = false | ||
docker.enabled = false | ||
podman.enabled = false | ||
shifter.enabled = false | ||
} | ||
} |
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Oops, something went wrong.