From b099a5ee383583b7059f8968b9289c501e6bfab7 Mon Sep 17 00:00:00 2001 From: Olamide Isreal Date: Wed, 18 Mar 2026 11:56:19 +0100 Subject: [PATCH] Update nextflow config for Wes K8s execution with proper profiles Add standard, k8s, and k8s_gpu profiles matching reference tool patterns. Configure PVC mounting for eureka storage and GPU node selection. Remove Docker-specific containerOptions from process definition. --- main.nf | 1 - nextflow.config | 64 ++++++++++++++++++++++++++++++------------------- 2 files changed, 39 insertions(+), 26 deletions(-) diff --git a/main.nf b/main.nf index 6d93628..05f6d89 100644 --- a/main.nf +++ b/main.nf @@ -25,7 +25,6 @@ params.weights = '/mnt/databases/RFAA_paper_weights.pt' process RFAA_PREDICT { container 'harbor.cluster.omic.ai/omic/rosettafold-all-atom:latest' - containerOptions '--rm --gpus all -v /mnt:/mnt' publishDir params.outdir, mode: 'copy' stageInMode 'copy' diff --git a/nextflow.config b/nextflow.config index 6e54507..7e81c60 100644 --- a/nextflow.config +++ b/nextflow.config @@ -32,10 +32,45 @@ params { weights = "/mnt/databases/RFAA_paper_weights.pt" } -// Container configurations -docker { - enabled = true - runOptions = '--gpus all' +// Profiles +profiles { + standard { + docker { + enabled = true + temp = 'auto' + runOptions = '--gpus all -v /mnt:/mnt' + } + } + + k8s { + process { + executor = 'k8s' + container = 'harbor.cluster.omic.ai/omic/rosettafold-all-atom:latest' + } + docker { + enabled = true + } + k8s { + storageClaimName = 'eureka-pvc' + storageMountPath = '/omic/eureka' + } + } + + k8s_gpu { + process { + executor = 'k8s' + container = 'harbor.cluster.omic.ai/omic/rosettafold-all-atom:latest' + pod = [[nodeSelector: 'nvidia.com/gpu.present=true']] + accelerator = [request: 1, type: 'nvidia.com/gpu'] + } + docker { + enabled = true + } + k8s { + storageClaimName = 'eureka-pvc' + storageMountPath = '/omic/eureka' + } + } } // Process configurations @@ -43,24 +78,3 @@ process { cpus = 8 memory = '64 GB' } - -// Execution configurations -executor { - $local { - cpus = 16 - memory = '64 GB' - } -} - -// Profiles -profiles { - k8s { - process { - container = 'harbor.cluster.omic.ai/omic/rosettafold-all-atom:latest' - } - docker { - enabled = true - runOptions = '--gpus all' - } - } -}