Add global docker/process config and fix k8s_gpu profile

- Enable docker globally (required by WES)
- Set default container, memory (32GB), cpus (4) at process level
- Add NVIDIA_VISIBLE_DEVICES env for GPU visibility in k8s_gpu

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-16 15:42:33 +01:00
parent 8bb6720fc1
commit 7c8629330b

View File

@@ -1,15 +1,18 @@
aws { docker {
client { enabled = true
endpoint = 'https://s3.cluster.omic.ai'
s3PathStyleAccess = true
} }
process {
container = 'harbor.cluster.omic.ai/omic/chai1:latest'
memory = '32 GB'
cpus = 4
} }
profiles { profiles {
standard { standard {
docker { docker {
enabled = true
temp = 'auto' temp = 'auto'
runOptions = '--gpus all'
} }
} }
@@ -17,9 +20,6 @@ profiles {
process { process {
executor = 'k8s' executor = 'k8s'
} }
docker {
enabled = true
}
k8s { k8s {
storageClaimName = 'eureka-pvc' storageClaimName = 'eureka-pvc'
storageMountPath = '/omic/eureka' storageMountPath = '/omic/eureka'
@@ -31,12 +31,9 @@ profiles {
k8s_gpu { k8s_gpu {
process { process {
executor = 'k8s' executor = 'k8s'
pod = [[nodeSelector: 'nvidia.com/gpu.present=true']] pod = [[nodeSelector: 'nvidia.com/gpu.present=true'], [env: 'NVIDIA_VISIBLE_DEVICES', value: 'all']]
accelerator = [request: 1, type: 'nvidia.com/gpu'] accelerator = [request: 1, type: 'nvidia.com/gpu']
} }
docker {
enabled = true
}
k8s { k8s {
storageClaimName = 'eureka-pvc' storageClaimName = 'eureka-pvc'
storageMountPath = '/omic/eureka' storageMountPath = '/omic/eureka'