Skip to content

Commit

Permalink
some fixes for nf-core/eager to avoid offline
Browse files Browse the repository at this point in the history
  • Loading branch information
ktmeaton committed Oct 2, 2020
1 parent 7ad09ed commit d06ade5
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 12 deletions.
22 changes: 11 additions & 11 deletions config/eager.config
Original file line number Diff line number Diff line change
Expand Up @@ -231,21 +231,21 @@ params {
process.container = 'nfcore/eager:dev'

// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
//includeConfig 'conf/base.config'

// Load nf-core custom profiles from different Institutions
try {
includeConfig "${params.custom_config_base}/nfcore_custom.config"
} catch (Exception e) {
System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")
}
//try {
// includeConfig "${params.custom_config_base}/nfcore_custom.config"
//} catch (Exception e) {
// System.err.println("WARNING: Could not load nf-core/config profiles: ${params.custom_config_base}/nfcore_custom.config")
//}

// Load nf-core/eager custom profiles from different institutions
try {
includeConfig "${params.custom_config_base}/pipeline/eager.config"
} catch (Exception e) {
System.err.println("WARNING: Could not load nf-core/config/eager profiles: ${params.custom_config_base}/pipeline/eager.config")
}
//try {
// includeConfig "${params.custom_config_base}/pipeline/eager.config"
//} catch (Exception e) {
// System.err.println("WARNING: Could not load nf-core/config/eager profiles: ${params.custom_config_base}/pipeline/eager.config")
//}

profiles {
conda { process.conda = "$baseDir/environment.yml" }
Expand Down
2 changes: 1 addition & 1 deletion profiles/graham/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,5 @@ jobs : "10"
# Where to put the .snakemake directory
shadow-prefix: "/home/keaton/projects/def-briang/keaton/plague-phylogeography"
# slurm execution
cluster: "sbatch --parsable -t {resources.time_min} -A {resources.account} --cpus-per-task {resources.cpus} --mem-per-cpu={resources.mem_mb} -o workflow/logs_slurm/{rule}_%j.out -e workflow/logs_slurm/{rule}_%j.err --job-name {rule} --export=ALL,NXF_OPTS='-Xms50m -Xmx500m'"
cluster: "sbatch --parsable -t {resources.time_min} -A {resources.account} --cpus-per-task {resources.cpus} --mem-per-cpu={resources.mem_mb} -o workflow/logs_slurm/{rule}_%j.out -e workflow/logs_slurm/{rule}_%j.err --job-name {rule} --export=ALL,NXF_OPTS='-Xms50m -Xmx{resources.mem_mb}m',NXF_OFFLINE='TRUE'"
cluster-status: "workflow/scripts/slurm_status.py"

0 comments on commit d06ade5

Please sign in to comment.