comparison ui/assets/config/executor/biowulf_cluster.config @ 0:d9c5c5b87fec draft

planemo upload for repository https://github.com/ncbi/egapx commit 8173d01b08d9a91c9ec5f6cb50af346edc8020c4
author fubar
date Sat, 03 Aug 2024 11:16:53 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:d9c5c5b87fec
1 // Config for https://hpc.nih.gov/
2 // This config doesn't use profiles because EGAPx can't use them now.
3 // It is a copy of 'biowulf' profile from config at https://hpc.nih.gov/apps/nextflow.html
4
5 params {
6 config_profile_description = 'Biowulf nf-core config'
7 config_profile_contact = 'staff@hpc.nih.gov'
8 config_profile_url = 'https://hpc.nih.gov/apps/nextflow.html'
9 max_memory = '224 GB'
10 max_cpus = 32
11 max_time = '72 h'
12
13 igenomes_base = '/fdb/igenomes_nf/'
14 }
15
16
17 singularity {
18 enabled = true
19 autoMounts = true
20 cacheDir = "/data/$USER/singularity"
21 envWhitelist='https_proxy,http_proxy,ftp_proxy,DISPLAY,SLURM_JOB_ID,SINGULARITY_BINDPATH'
22 }
23
24 env {
25 SINGULARITY_CACHEDIR="/data/$USER/singularity"
26 PYTHONNOUSERSITE = 1
27 }
28
29 process {
30 executor = 'slurm'
31 maxRetries = 1
32 queue = 'norm'
33 queueSize = 200
34 pollInterval = '2 min'
35 queueStatInterval = '5 min'
36 submitRateLimit = '6/1min'
37 retry.maxAttempts = 1
38
39 clusterOptions = ' --gres=lscratch:200 '
40
41 scratch = '/lscratch/$SLURM_JOB_ID'
42 // with the default stageIn and stageOut settings using scratch can
43 // result in humungous work folders
44 // see https://github.com/nextflow-io/nextflow/issues/961 and
45 // https://www.nextflow.io/docs/latest/process.html?highlight=stageinmode
46 stageInMode = 'symlink'
47 stageOutMode = 'rsync'
48
49 // for running pipeline on group sharing data directory, this can avoid inconsistent files timestamps
50 cache = 'lenient'
51 }