Skip to content

Commit

Permalink
update install script to use repo and sha
Browse files Browse the repository at this point in the history
  • Loading branch information
ktmeaton committed Aug 12, 2020
1 parent a006df2 commit fd3b6a3
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 5 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/pipeline_assembly.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
# Assembled Genome Test
assembly:
runs-on: ubuntu-latest
timeout-minutes: 30
timeout-minutes: 60
steps:
#------------------------------------------------------------------------#
# Checkout Repository
Expand All @@ -43,6 +43,8 @@ jobs:
# Setup conda
- name: setup conda
uses: goanpeca/setup-miniconda@v1
with:
auto-update-conda: true
#------------------------------------------------------------------------#
# Restore (cache) conda environments
- name: cache eager env
Expand Down Expand Up @@ -70,7 +72,7 @@ jobs:
# Download pipelines and install
- name: install
shell: bash -l {0}
run: scripts/install.sh
run: scripts/install.sh ${{github.repository}} ${{ github.sha }}
#------------------------------------------------------------------------#
# Test Pipeline - Small
- name: pipeline small
Expand Down
111 changes: 111 additions & 0 deletions .github/workflows/pipeline_db.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
#------------------------------------------------------------------------------#
name: Pipeline Database
#------------------------------------------------------------------------------#
# Global workflow environment variables
env:
EAGER_CONDA_ENV: "nf-core-eager-2.2.0dev"
EAGER_NF_REV: "7b51863957"
PHYLO_CONDA_ENV: "plague-phylogeography-0.1.4dev"
CONDA_ENVS_PATH: "/home/runner/miniconda/envs:/usr/share/miniconda/envs"
CONDA_PKGS_DIRS: "/home/runner/miniconda/pkgs"
GH_RESOURCES: "--max_memory 6.GB --max_cpus 2"
SQLITE_DB: "/home/runner/.nextflow/assets/ktmeaton/plague-phylogeography/results/ncbimeta_db/update/latest/output/database/yersinia_pestis_db.sqlite"
SQLITE_CONFIG: "/home/runner/.nextflow/assets/ktmeaton/plague-phylogeography/config/ncbimeta.yaml"
SQLITE_TABLE: "BioSample"
SQLITE_BACKUP: "/home/runner/.nextflow/assets/ktmeaton/plague-phylogeography/results/ncbimeta_db/update/latest/yersinia_pestis_db_BioSample.tsv"
#------------------------------------------------------------------------------#
# Workflow conditions
on:
push:
branches:
- '*'
paths:
- '.github/workflows/pipeline_db.yaml'
- 'config/ncbimeta.yaml'
- 'main.nf'
- 'nextflow.config'
pull_request:
branches:
- '*'
release:
types: [published]
#------------------------------------------------------------------------------#
jobs:
#----------------------------------------------------------------------------#
# Install dependencies
local :
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
#------------------------------------------------------------------------#
# Checkout Repository
- name: checkout repo
uses: actions/checkout@v2
# Install nextflow
- name: install nextflow
run: |
wget -qO- get.nextflow.io | bash
sudo mv nextflow /usr/local/bin/
# Setup conda
- name: setup conda
uses: goanpeca/setup-miniconda@v1
with:
auto-update-conda: true
#------------------------------------------------------------------------#
# Restore (cache) conda environments
- name: cache eager env
uses: actions/cache@v2
with:
path: /home/runner/miniconda/envs/nf-core-eager-2.2.0dev
key: eager-env-${{ runner.os }}-7b51863957

- name: cache plague-phylogeography env
uses: actions/cache@v2
with:
path: /home/runner/miniconda/envs/plague-phylogeography-0.1.4dev
key: plague-phylogeography-env-${{ runner.os }}-${{ hashFiles('environment.yaml') }}

- name: cache nextstrain env
uses: actions/cache@v2
with:
path: /home/runner/miniconda/envs/nextstrain-8.0.0
key: nextstrain-env-${{ runner.os }}-${{ hashFiles('config/nextstrain.yaml') }}
#------------------------------------------------------------------------#
# Check conda environments
- name: check cache
run: conda info --envs
#------------------------------------------------------------------------#
# Download pipelines and install
- name: install
shell: bash -l {0}
run: scripts/install.sh ${{github.repository}} ${{ github.sha }}
#------------------------------------------------------------------------#
# Create the database
- name: pipeline db create
shell: bash -l {0}
run: |
conda activate ${PHYLO_CONDA_ENV}
nextflow run -r ${GITHUB_SHA} ${GITHUB_REPOSITORY} \
--ncbimeta_create ${SQLITE_CONFIG} \
--sqlite ${SQLITE_DB} \
--skip_sqlite_import \
--skip_reference_download \
--skip_outgroup_download \
--outdir test
conda deactivate
#------------------------------------------------------------------------#
# Update the database
- name: pipeline db update
shell: bash -l {0}
run: |
conda activate ${PHYLO_CONDA_ENV}
nextflow run -r ${GITHUB_SHA} ${GITHUB_REPOSITORY} \
--ncbimeta_update ${SQLITE_CONFIG} \
--ncbimeta_annot ${SQLITE_BACKUP} \
--ncbimeta_annot_table ${SQLITE_TABLE} \
--sqlite ${SQLITE_DB} \
--skip_sqlite_import \
--skip_reference_download \
--skip_outgroup_download \
--outdir test
conda deactivate
4 changes: 3 additions & 1 deletion .github/workflows/pipeline_local.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ jobs:
# Setup conda
- name: setup conda
uses: goanpeca/setup-miniconda@v1
with:
auto-update-conda: true
#------------------------------------------------------------------------#
# Restore (cache) conda environments
- name: cache eager env
Expand Down Expand Up @@ -72,7 +74,7 @@ jobs:
# Download pipelines and install
- name: install
shell: bash -l {0}
run: scripts/install.sh
run: scripts/install.sh ${{github.repository}} ${{ github.sha }}
#------------------------------------------------------------------------#
# Analyze Local Data
- name: pipeline local
Expand Down
6 changes: 4 additions & 2 deletions .github/workflows/pipeline_sra.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
# Install dependencies
sra :
runs-on: ubuntu-latest
timeout-minutes: 30
timeout-minutes: 60
steps:
#------------------------------------------------------------------------#
# Checkout Repository
Expand All @@ -46,6 +46,8 @@ jobs:
# Setup conda
- name: setup conda
uses: goanpeca/setup-miniconda@v1
with:
auto-update-conda: true
#------------------------------------------------------------------------#
# Restore (cache) conda environments
- name: cache eager env
Expand Down Expand Up @@ -73,7 +75,7 @@ jobs:
# Download pipelines and install
- name: install
shell: bash -l {0}
run: scripts/install.sh
run: scripts/install.sh ${{github.repository}} ${{ github.sha }}
#------------------------------------------------------------------------#
# Download SRA Data
- name: pipeline sra download
Expand Down

0 comments on commit fd3b6a3

Please sign in to comment.