File tree Expand file tree Collapse file tree 15 files changed +362
-3
lines changed
environments/common/inventory Expand file tree Collapse file tree 15 files changed +362
-3
lines changed Original file line number Diff line number Diff line change @@ -32,11 +32,11 @@ jobs:
32
32
- image_name : openhpc-extra-RL8
33
33
source_image_name_key : RL8 # key into environments/.stackhpc/tofu/cluster_image.auto.tfvars.json
34
34
inventory_groups : doca,cuda,lustre
35
- volume_size : 30 # needed for cuda
35
+ volume_size : 35 # needed for cuda
36
36
- image_name : openhpc-extra-RL9
37
37
source_image_name_key : RL9
38
38
inventory_groups : doca,cuda,lustre
39
- volume_size : 30 # needed for cuda
39
+ volume_size : 35 # needed for cuda
40
40
env :
41
41
ANSIBLE_FORCE_COLOR : True
42
42
OS_CLOUD : openstack
Original file line number Diff line number Diff line change @@ -90,5 +90,7 @@ roles/*
90
90
! roles /gateway /**
91
91
! roles /alertmanager /
92
92
! roles /alertmanager /**
93
+ ! roles /slurm_recompile /**
94
+ ! roles /slurm_recompile /**
93
95
! roles /nhc /
94
96
! roles /nhc /**
Original file line number Diff line number Diff line change 48
48
name : cuda
49
49
tasks_from : " {{ 'runtime.yml' if appliances_mode == 'configure' else 'install.yml' }}"
50
50
51
+ - name : Setup vGPU
52
+ hosts : vgpu
53
+ become : yes
54
+ gather_facts : yes
55
+ tags : vgpu
56
+ tasks :
57
+ - include_role :
58
+ name : stackhpc.linux.vgpu
59
+ tasks_from : " {{ 'configure.yml' if appliances_mode == 'configure' else 'install.yml' }}"
60
+ handlers :
61
+ - name : reboot
62
+ fail :
63
+ msg : Reboot handler for stackhpc.linux.vgpu role fired unexpectedly. This was supposed to be unreachable.
64
+
51
65
- name : Persist hostkeys across rebuilds
52
66
# Must be after filesystems.yml (for storage)
53
67
# and before portal.yml (where OOD login node hostkeys are scanned)
Original file line number Diff line number Diff line change 250
250
name : cloudalchemy.grafana
251
251
tasks_from : install.yml
252
252
253
+ - name : Add support for NVIDIA GPU auto detection to Slurm
254
+ hosts : cuda
255
+ become : yes
256
+ tasks :
257
+ - name : Recompile slurm
258
+ import_role :
259
+ name : slurm_recompile
260
+ vars :
261
+ slurm_recompile_with_nvml : " {{ groups.cuda | length > 0 }}"
262
+
253
263
- name : Run post.yml hook
254
264
vars :
255
265
appliances_environment_root : " {{ lookup('env', 'APPLIANCES_ENVIRONMENT_ROOT') }}"
Original file line number Diff line number Diff line change @@ -75,6 +75,7 @@ it also requires an image build with the role name added to the
75
75
| extras.yml | basic_users | All functionality [ 6] | No |
76
76
| extras.yml | eessi | All functionality [ 7] | No |
77
77
| extras.yml | cuda | None required - use image build | Yes [ 8] |
78
+ | extras.yml | vgpu | All functionality | Yes |
78
79
| extras.yml | persist_hostkeys | Not relevant for compute nodes | n/a |
79
80
| extras.yml | compute_init (export) | Not relevant for compute nodes | n/a |
80
81
| extras.yml | k9s (install) | Not relevant during boot | n/a |
Original file line number Diff line number Diff line change 19
19
enable_basic_users : " {{ os_metadata.meta.basic_users | default(false) | bool }}"
20
20
enable_eessi : " {{ os_metadata.meta.eessi | default(false) | bool }}"
21
21
enable_chrony : " {{ os_metadata.meta.chrony | default(false) | bool }}"
22
+ enable_vgpu : " {{ os_metadata.meta.vpgu | default(false) | bool }}"
22
23
enable_nhc : " {{ os_metadata.meta.nhc | default(false) | bool }}"
23
24
24
25
# TODO: "= role defaults" - could be moved to a vars_file: on play with similar precedence effects
296
297
cmd : " cvmfs_config setup"
297
298
when : enable_eessi
298
299
300
+ - name : Configure VGPUs
301
+ include_role :
302
+ name : stackhpc.linux.vgpu
303
+ tasks_from : ' configure.yml'
304
+ when : enable_vgpu
305
+
299
306
# NB: don't need conditional block on enable_compute as have already exited
300
307
# if not the case
301
308
- name : Write Munge key
Original file line number Diff line number Diff line change
1
+ ---
2
+ - name : Set cuda_facts_version_short
3
+ set_fact :
4
+ cuda_facts_version_short : " {{ cuda_version_short }}"
Original file line number Diff line number Diff line change
1
+ # slurm_recompile
2
+ =================
3
+
4
+ Recompiles slurm from source RPMs and installs the packages that were built.
5
+
6
+ Requirements
7
+ ------------
8
+
9
+ Role Variables
10
+ --------------
11
+
12
+ See ` defaults/main.yml ` .
13
+
14
+ Dependencies
15
+ ------------
16
+
17
+ Example Playbook
18
+ ----------------
19
+
20
+ - hosts: compute
21
+ tasks:
22
+ - import_role:
23
+ name: slurm_recompile
24
+
25
+ License
26
+ -------
27
+
28
+ Apache-2.0
Original file line number Diff line number Diff line change
1
+ ---
2
+ # Whether to link slurm against the NVIDIA management library
3
+ slurm_recompile_with_nvml : false
4
+
Original file line number Diff line number Diff line change
1
+ ---
2
+ - name : Get facts about CUDA installation
3
+ import_role :
4
+ name : cuda
5
+ tasks_from : facts.yml
6
+
7
+ - name : Gather the package facts
8
+ ansible.builtin.package_facts :
9
+ manager : auto
10
+
11
+ - name : Set fact containing slurm package facts
12
+ set_fact :
13
+ slurm_package : " {{ ansible_facts.packages['slurm-slurmd-ohpc'].0 }}"
14
+
15
+ - name : Recompile and install slurm packages
16
+ shell : |
17
+ #!/bin/bash
18
+ source /etc/profile
19
+ set -eux
20
+ dnf download -y --source slurm-slurmd-ohpc-{{ slurm_package.version }}-{{ slurm_package.release }}
21
+ rpm -i slurm-ohpc-*.src.rpm
22
+ cd /root/rpmbuild/SPECS
23
+ dnf builddep -y slurm.spec
24
+ rpmbuild -bb{% if slurm_recompile_with_nvml | bool %} -D "_with_nvml --with-nvml=/usr/local/cuda-{{ cuda_facts_version_short }}/targets/x86_64-linux/"{% endif %} slurm.spec
25
+ dnf reinstall -y /root/rpmbuild/RPMS/x86_64/*.rpm
26
+ become : true
27
+
28
+ - name : Workaround missing symlink
29
+ # Workaround path issue: https://groups.google.com/g/slurm-users/c/cvGb4JnK8BY
30
+ command : ln -s /lib64/libnvidia-ml.so.1 /lib64/libnvidia-ml.so
31
+ args :
32
+ creates : /lib64/libnvidia-ml.so
33
+ when : slurm_recompile_with_nvml | bool
34
+
35
+ - name : Cleanup Dependencies
36
+ shell : |
37
+ #!/bin/bash
38
+ set -eux
39
+ set -o pipefail
40
+ dnf history list | grep Install | grep 'builddep -y slurm.spec' | head -n 1 | awk '{print $1}' | xargs dnf history -y undo
41
+ become : true
You can’t perform that action at this time.
0 commit comments