├── .gitignore ├── .gitlab-ci.yml ├── Guidelines.rst ├── INSTALL.sh ├── LICENSE.txt ├── README.rst ├── TrinityX.pdf ├── img ├── triX_300.png ├── trinityX_logo.png ├── trinityxbanner.png └── trinityxbanner_scaled.png ├── packaging ├── iso │ ├── additional-files.lst │ ├── additional-packages.lst │ ├── isobuild.sh │ ├── parse-playbook.py │ ├── product │ │ ├── run │ │ │ └── install │ │ │ │ └── product │ │ │ │ └── pyanaconda │ │ │ │ └── installclasses │ │ │ │ └── trinityx.py │ │ └── usr │ │ │ └── share │ │ │ └── anaconda │ │ │ ├── anaconda-gtk.css │ │ │ ├── interactive-defaults.ks │ │ │ └── pixmaps │ │ │ └── trix-logo.png │ ├── trinity-comps.xml │ └── trinity-comps.xsl └── rpm │ ├── rpmbuild.sh │ └── trinityx.spec.in ├── prepare.sh ├── site ├── ansible.cfg ├── compute-alma.yml ├── compute-centos.yml ├── compute-default.yml ├── compute-opensuse.yml ├── compute-redhat.yml ├── compute-ubuntu.yml ├── controller.yml ├── dynamic_hosts ├── extras │ ├── compute-default-base.yml │ ├── compute-opensuse-base.yml │ └── compute-ubuntu-base.yml ├── group_vars │ ├── all.yml.example │ ├── aws.yml.example │ ├── azure.yml.example │ ├── gcp.yml.example │ └── menu.yml ├── hosts.example ├── imports │ ├── cloud-primary-controller.yml │ ├── cloud-shadow-controller.yml │ ├── cloud.yml │ ├── group_vars │ ├── imports │ ├── roles │ ├── trinity-opensuse-image-create.yml │ ├── trinity-opensuse-image-setup.yml │ ├── trinity-redhat-image-create.yml │ ├── trinity-redhat-image-setup.yml │ ├── trinity-redhat-login-setup.yml │ ├── trinity-ubuntu-image-create.yml │ └── trinity-ubuntu-image-setup.yml ├── k3s-alma.yml ├── k3s-centos.yml ├── k3s-default.yml ├── k3s-opensuse.yml ├── k3s-ubuntu.yml ├── library │ └── zabbix_conf.py ├── login-default.yml └── roles │ ├── ansible │ ├── read_facts │ │ └── tasks │ │ │ └── main.yml │ └── write_facts │ │ └── tasks │ │ └── main.yml │ ├── cloud │ ├── aws │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── aws.yml │ │ │ ├── debian-terraform.yml │ │ │ ├── main.yml │ │ │ └── redhat-vhd.yml │ │ └── templates │ │ │ ├── embed.ipxe.j2 │ │ │ ├── grub.cfg.j2 │ │ │ └── terraform.tfvars.j2 │ ├── azure │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── debian-azure-cli.yml │ │ │ ├── debian-vhd.yml │ │ │ ├── main.yml │ │ │ ├── redhat-azure-cli.yml │ │ │ └── redhat-vhd.yml │ │ └── templates │ │ │ ├── embed.ipxe.j2 │ │ │ ├── grub.cfg.j2 │ │ │ └── terraform.tfvars.j2 │ ├── controller │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── all.yml.j2 │ ├── gcp │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── debian-gcloud.yml │ │ │ ├── debian-terraform.yml │ │ │ ├── main.yml │ │ │ └── redhat-gcloud.yml │ ├── ipxe │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── embed.ipxe.j2 │ ├── luna │ │ ├── files │ │ │ ├── part.rd │ │ │ └── post.rd │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── rawhostlist.py.j2 │ └── terraform │ │ ├── README.md │ │ ├── defaults │ │ └── main.yml │ │ └── tasks │ │ ├── debian-terraform.yml │ │ ├── main.yml │ │ └── redhat-terraform.yml │ ├── trinity │ ├── alertx │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── cli.yml │ │ │ ├── drainer.yml │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── alert_fetcher.py.j2 │ │ │ ├── alertx-cli-wrapper.j2 │ │ │ ├── alertx-drainer.service.j2 │ │ │ ├── alertx-hook.sh.j2 │ │ │ ├── alertx-hook.systemd.j2 │ │ │ ├── config.py.j2 │ │ │ ├── drainer.ini.j2 │ │ │ ├── drainer.py.j2 │ │ │ ├── logger.py.j2 │ │ │ ├── luna2.ini.j2 │ │ │ └── node_utils.py.j2 │ │ └── vars │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── aria2c │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── aria2c-daemon.sh.j2 │ │ │ └── aria2c.service.j2 │ ├── bind │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── dhclient-enter-hooks │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── resolv.conf.j2 │ ├── check-latest-kernel │ │ └── tasks │ │ │ └── main.yml │ ├── chrony │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── chrony.conf.j2 │ ├── cleanup-legacy │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── main.yml │ │ │ ├── monitoring.yml │ │ │ └── ood.yml │ ├── config-genders │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── managed_block.yml │ ├── config-manager │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── managed_block.yml │ ├── config-slurm │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── main.yml │ │ │ └── managed_block.yml │ ├── cv_support │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── examples │ │ │ │ ├── slurm-test.job │ │ │ │ ├── test.c │ │ │ │ ├── test.f90 │ │ │ │ ├── test_mpi.c │ │ │ │ └── test_mpi.f90 │ │ │ ├── remote-assistance.service │ │ │ ├── request-remote-assistance │ │ │ ├── request-remote-assistance-automated │ │ │ └── slurmibtopology.sh │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── bash_profile.j2 │ │ │ └── motd.j2 │ ├── environment-modules │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── fail2ban │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── trinityx.conf │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── firewalld │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── interface.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── direct-rules.xml.j2 │ │ │ └── fix-firewalld-zones.sh.j2 │ ├── grafana │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── dashboards │ │ │ │ ├── .lint │ │ │ │ ├── trinityx-prometheus-alerts.json │ │ │ │ ├── trinityx-prometheus-ha.json │ │ │ │ ├── trinityx-prometheus-hardware.json │ │ │ │ ├── trinityx-prometheus-home.json │ │ │ │ ├── trinityx-prometheus-ipmi.json │ │ │ │ ├── trinityx-prometheus-nodes.json │ │ │ │ ├── trinityx-prometheus-nvidia.json │ │ │ │ ├── trinityx-prometheus-slurm-job.json │ │ │ │ ├── trinityx-prometheus-slurm.json │ │ │ │ └── trinityx-prometheus-zfs.json │ │ │ └── triX_7.png │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── grafana.ini.j2 │ │ │ ├── ldap.toml.j2 │ │ │ ├── trinityx-dashboards.yaml.j2 │ │ │ └── trinityx-datasource.yaml.j2 │ ├── hostname │ │ └── tasks │ │ │ └── main.yml │ ├── image-create │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── opensuse │ │ │ │ ├── part.rd │ │ │ │ └── post.rd │ │ │ └── ubuntu │ │ │ │ ├── focal │ │ │ │ ├── jammy │ │ │ │ ├── noble │ │ │ │ ├── part.rd │ │ │ │ └── post.rd │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── almalinux │ │ │ │ ├── base.yml │ │ │ │ ├── docker.yml │ │ │ │ └── extend.yml │ │ │ ├── centos │ │ │ ├── main.yml │ │ │ ├── opensuse │ │ │ │ ├── base.yml │ │ │ │ ├── docker.yml │ │ │ │ └── extend.yml │ │ │ ├── redhat │ │ │ │ ├── base.yml │ │ │ │ ├── docker.yml │ │ │ │ └── extend.yml │ │ │ ├── rocky │ │ │ └── ubuntu │ │ │ │ ├── base.yml │ │ │ │ ├── docker.yml │ │ │ │ └── extend.yml │ │ ├── templates │ │ │ ├── luna2.ini.j2 │ │ │ ├── opensuse-repo.j2 │ │ │ ├── root.txt │ │ │ └── trinity.conf.j2 │ │ └── vars │ │ │ ├── aa64 │ │ │ ├── controller-almalinux.yaml │ │ │ ├── controller-centos.yaml │ │ │ ├── controller-opensuse.yaml │ │ │ ├── controller-redhat.yaml │ │ │ ├── controller-rocky.yaml │ │ │ ├── controller-ubuntu.yaml │ │ │ ├── docker-almalinux.yaml │ │ │ ├── docker-centos.yaml │ │ │ ├── docker-opensuse.yaml │ │ │ ├── docker-redhat.yaml │ │ │ ├── docker-rocky.yaml │ │ │ ├── docker-ubuntu.yaml │ │ │ ├── image-almalinux.yaml │ │ │ ├── image-centos.yaml │ │ │ ├── image-opensuse.yaml │ │ │ ├── image-redhat.yaml │ │ │ ├── image-rocky.yaml │ │ │ └── image-ubuntu.yaml │ │ │ └── x64 │ │ │ ├── controller-almalinux.yaml │ │ │ ├── controller-centos.yaml │ │ │ ├── controller-opensuse.yaml │ │ │ ├── controller-redhat.yaml │ │ │ ├── controller-rocky.yaml │ │ │ ├── controller-ubuntu.yaml │ │ │ ├── docker-almalinux.yaml │ │ │ ├── docker-centos.yaml │ │ │ ├── docker-opensuse.yaml │ │ │ ├── docker-redhat.yaml │ │ │ ├── docker-rocky.yaml │ │ │ ├── docker-ubuntu.yaml │ │ │ ├── image-almalinux.yaml │ │ │ ├── image-centos.yaml │ │ │ ├── image-opensuse.yaml │ │ │ ├── image-redhat.yaml │ │ │ ├── image-rocky.yaml │ │ │ └── image-ubuntu.yaml │ ├── image-download │ │ ├── defaults │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── init-nodes │ │ ├── defaults │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── resolv.conf │ ├── init │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── resolve.sh │ │ ├── meta │ │ │ └── main.yml │ │ └── tasks │ │ │ ├── conf-interface.yml │ │ │ ├── find-hostname.yml │ │ │ ├── find-interface.yml │ │ │ ├── keys.yml │ │ │ └── main.yml │ ├── kubernetes │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── certs.yml │ │ │ └── main.yml │ │ └── templates │ │ │ ├── k3s-agent.service.j2 │ │ │ ├── k3s-server-nodes-cleaner.service.j2 │ │ │ ├── k3s-server-nodes-cleaner.timer.j2 │ │ │ ├── k3s-server.service.env.j2 │ │ │ └── k3s-server.service.j2 │ ├── local_repo │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── tmp_local.repo.j2 │ │ │ └── trinityx-repo.conf.j2 │ ├── logrotate │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── alertx │ │ │ ├── aria2c │ │ │ ├── influxdb │ │ │ ├── luna │ │ │ ├── prometheus │ │ │ ├── sensu │ │ │ ├── slurm │ │ │ ├── trinityx │ │ │ └── uchiwa │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── luna2 │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── aarch64 │ │ │ │ └── libluna-fakeuname.so │ │ │ ├── ipxe.efi │ │ │ ├── luna2-master.service │ │ │ ├── tftp │ │ │ ├── undionly.kpxe │ │ │ └── x86_64 │ │ │ │ └── libluna-fakeuname.so │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── legacy_fixes.yml │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── firewalld_direct.xml.j2 │ │ │ ├── gunicorn.py.j2 │ │ │ ├── luna2-cli-wrapper.j2 │ │ │ ├── luna2-daemon-service.j2 │ │ │ ├── luna2-daemon.ini.j2 │ │ │ ├── luna2.conf.j2 │ │ │ ├── luna2.ini.j2 │ │ │ └── nginx-luna2.conf.j2 │ │ └── vars │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── mariadb │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── etc_my.cnf.j2 │ │ │ └── my.cnf.j2 │ ├── nfs-mounts │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── vars │ │ │ ├── Debian.yaml │ │ │ └── RedHat.yaml │ ├── nfs │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── exports.j2 │ │ │ ├── nfsmount.conf.j2 │ │ │ └── sysconfig_nfs.j2 │ ├── nginx │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── nginx.conf │ │ │ └── ssl.conf │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── no_proxy │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── z_noproxy.sh.j2 │ ├── nscd │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── nscd.conf.j2 │ ├── obol │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── obol │ │ │ └── obol-p2 │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── obol.conf.j2 │ ├── ood-apps │ │ ├── defaults │ │ │ ├── alertx.yml │ │ │ └── main.yml │ │ ├── files │ │ │ ├── codeserver │ │ │ │ └── app │ │ │ │ │ ├── LICENSE.txt │ │ │ │ │ ├── icon.png │ │ │ │ │ ├── submit.yml.erb │ │ │ │ │ ├── template │ │ │ │ │ ├── after.sh.erb │ │ │ │ │ ├── before.sh.erb │ │ │ │ │ └── script.sh.erb │ │ │ │ │ └── view.html.erb │ │ │ └── jupyter │ │ │ │ └── app │ │ │ │ ├── LICENSE.txt │ │ │ │ ├── icon.png │ │ │ │ ├── submit.yml.erb │ │ │ │ ├── template │ │ │ │ ├── after.sh │ │ │ │ ├── before.sh.erb │ │ │ │ └── script.sh.erb │ │ │ │ └── view.html.erb │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── alertx.yml │ │ │ ├── app.yml │ │ │ ├── desktop.yml │ │ │ ├── infiniband.yml │ │ │ ├── lchroot.yml │ │ │ ├── login-post.yml │ │ │ ├── login-pre.yml │ │ │ ├── main.yml │ │ │ ├── passwd.yml │ │ │ └── trinity.yml │ │ └── templates │ │ │ ├── alertmanager │ │ │ └── app │ │ │ │ └── manifest.yml.j2 │ │ │ ├── codeserver │ │ │ └── app │ │ │ │ ├── form.yml.erb.j2 │ │ │ │ └── manifest.yml.j2 │ │ │ ├── desktop │ │ │ ├── app │ │ │ │ └── manifest.yml.j2 │ │ │ ├── form.yml.j2 │ │ │ ├── module.lua.j2 │ │ │ └── submit.yml.erb.j2 │ │ │ ├── grafana │ │ │ └── app │ │ │ │ └── manifest.yml.j2 │ │ │ ├── jupyter │ │ │ ├── app │ │ │ │ ├── form.yml.j2 │ │ │ │ └── manifest.yml.j2 │ │ │ └── module.lua.j2 │ │ │ ├── login │ │ │ ├── login-node-ood-prepare-script.j2 │ │ │ └── login-node-ood-prepare-service.j2 │ │ │ ├── luna │ │ │ └── luna2.ini.j2 │ │ │ ├── prometheus │ │ │ └── app │ │ │ │ └── manifest.yml.j2 │ │ │ └── shell │ │ │ ├── app │ │ │ └── manifest.yml.j2 │ │ │ ├── env.j2 │ │ │ ├── lchroot_wrapper.sh.j2 │ │ │ └── sudoers.j2 │ ├── ood │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── branding │ │ │ │ └── TrinityX │ │ │ │ ├── custom.css │ │ │ │ ├── favicon.ico │ │ │ │ ├── logo.png │ │ │ │ ├── logo_ext.png │ │ │ │ └── overrides │ │ │ │ └── pun_config_view.rb │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── dex-auth.yml │ │ │ ├── login.yml │ │ │ ├── main.yml │ │ │ └── pam-auth.yml │ │ └── templates │ │ │ ├── branding │ │ │ └── TrinityX │ │ │ │ ├── en.yml.j2 │ │ │ │ ├── nginx_stage.yml.j2 │ │ │ │ └── ondemand.yml.j2 │ │ │ ├── cluster.yml.j2 │ │ │ ├── login │ │ │ ├── login-node-ood-reconfigure-script.j2 │ │ │ └── login-node-ood-reconfigure-service.j2 │ │ │ └── ood_portal.yml.j2 │ ├── openhpc │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── OHPC_exports.j2 │ │ │ └── z_trinityx.sh.j2 │ ├── openldap │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── autoinc.ldif │ │ │ ├── local_schema.ldif │ │ │ ├── memberof.ldif │ │ │ ├── memberof.ldif-25 │ │ │ ├── ppolicy.ldif │ │ │ ├── ppolicyload.ldif │ │ │ ├── ppolicyoverlay.ldif │ │ │ └── rfc2307bis.ldif │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── config.ldif-25.j2 │ │ │ ├── config.ldif.j2 │ │ │ ├── local.ldif-25.j2 │ │ │ ├── local.ldif.j2 │ │ │ ├── proxy.ldif-25.j2 │ │ │ └── proxy.ldif.j2 │ ├── pacemaker │ │ ├── defaults │ │ │ └── main.yaml │ │ ├── files │ │ │ ├── ZFS │ │ │ ├── iscsi │ │ │ └── pcmk │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yaml │ │ ├── templates │ │ │ ├── corosync.conf.j2 │ │ │ └── pcs.j2 │ │ └── vars │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── pack-images │ │ └── tasks │ │ │ └── main.yml │ ├── packages │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── vars │ │ │ ├── aa64 │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ ├── Rocky9.yaml │ │ │ └── main.yaml │ │ │ └── x64 │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── pbspro │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── mom_priv_config.j2 │ │ │ └── pbs.conf.j2 │ ├── prepare │ │ ├── defaults │ │ │ └── main.yaml │ │ └── tasks │ │ │ └── main.yaml │ ├── prometheus-alertmanager │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ ├── preflight.yml │ │ │ └── selinux.yml │ │ ├── templates │ │ │ ├── alertmanager.web.yml.j2 │ │ │ ├── alertmanager.yml.j2 │ │ │ └── prometheus-alertmanager.service.j2 │ │ └── vars │ │ │ └── main.yml │ ├── prometheus-ha-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ └── templates │ │ │ └── prometheus-ha-exporter.service.j2 │ ├── prometheus-infiniband-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ ├── templates │ │ │ └── prometheus-infiniband-exporter.service.j2 │ │ └── vars │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ ├── Rocky9.yaml │ │ │ ├── Ubuntu20.yaml │ │ │ ├── Ubuntu22.yaml │ │ │ └── main.yml │ ├── prometheus-ipmi-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ ├── templates │ │ │ └── prometheus-ipmi-exporter.service.j2 │ │ └── vars │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ ├── Rocky9.yaml │ │ │ ├── Ubuntu20.yaml │ │ │ ├── Ubuntu22.yaml │ │ │ └── main.yml │ ├── prometheus-lshw-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ ├── templates │ │ │ └── prometheus-lshw-exporter.service.j2 │ │ └── vars │ │ │ └── main.yml │ ├── prometheus-node-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ ├── preflight.yml │ │ │ └── selinux.yml │ │ ├── templates │ │ │ ├── node_exporter.web.yml.j2 │ │ │ └── prometheus-node-exporter.service.j2 │ │ └── vars │ │ │ └── main.yml │ ├── prometheus-nvidia-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ ├── templates │ │ │ └── prometheus-nvidia-exporter.service.j2 │ │ └── vars │ │ │ └── main.yml │ ├── prometheus-server │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── legacy.yml │ │ │ ├── main.yml │ │ │ ├── preflight.yml │ │ │ └── selinux.yml │ │ ├── templates │ │ │ ├── generic.rules.j2 │ │ │ ├── prometheus-server.service.j2 │ │ │ ├── prometheus.web.yml.j2 │ │ │ ├── prometheus.yml.j2 │ │ │ ├── recording.rules.j2 │ │ │ └── service.rules.j2 │ │ └── vars │ │ │ └── main.yml │ ├── prometheus-slurm-exporter │ │ ├── LICENSE │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ └── templates │ │ │ └── prometheus-slurm-exporter.service.j2 │ ├── prometheus-slurm-job-exporter │ │ ├── README.md │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── configure.yml │ │ │ ├── install.yml │ │ │ ├── main.yml │ │ │ └── preflight.yml │ │ ├── templates │ │ │ └── prometheus-slurm-job-exporter.service.j2 │ │ └── vars │ │ │ └── main.yml │ ├── pwquality │ │ └── tasks │ │ │ └── main.yml │ ├── rdma-centos │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ ├── templates │ │ │ └── i40.conf.j2 │ │ └── vars │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── repos │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ ├── baseurl.yml │ │ │ └── main.yml │ │ └── vars │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── resolv │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── resolv.conf.j2 │ ├── rsyslog │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── forwarding.conf.j2 │ │ │ ├── rsyslog.conf.j2 │ │ │ └── template-rule.conf.j2 │ │ └── vars │ │ │ ├── Debian.yaml │ │ │ ├── RedHat.yaml │ │ │ ├── SLE.yaml │ │ │ └── Suse.yaml │ ├── shared-fs │ │ ├── defaults │ │ │ └── main.yaml │ │ ├── meta │ │ │ └── main.yaml │ │ ├── tasks │ │ │ ├── direct.yaml │ │ │ ├── drbd.yaml │ │ │ ├── iscsi.yaml │ │ │ ├── main.yaml │ │ │ ├── partitions_lvm.yaml │ │ │ └── partitions_zfs.yaml │ │ ├── templates │ │ │ ├── drbd_fs_disks.res.j2 │ │ │ ├── fix-lvm-filter.sh.j2 │ │ │ ├── global_common.conf.j2 │ │ │ ├── lvm_filter.dat.j2 │ │ │ └── lvm_volumes.dat.j2 │ │ └── vars │ │ │ ├── AlmaLinux8.yaml │ │ │ ├── AlmaLinux9.yaml │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── slurm-sbank │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ └── slurm-bank-master │ │ │ │ ├── .gitignore │ │ │ │ ├── .kitchen.yml │ │ │ │ ├── AUTHORS │ │ │ │ ├── ChangeLog │ │ │ │ ├── DISCLAIMER │ │ │ │ ├── LICENSE │ │ │ │ ├── Makefile │ │ │ │ ├── README │ │ │ │ ├── README.markdown │ │ │ │ ├── VERSION │ │ │ │ ├── bump-version │ │ │ │ ├── chefignore │ │ │ │ ├── doc │ │ │ │ ├── HEAnet2012-abstract.mdwn │ │ │ │ ├── SUG2012-TODO │ │ │ │ ├── SUG2012-abstract.mdwn │ │ │ │ ├── SUG2012-paper.mdwn │ │ │ │ ├── bugs.mdwn │ │ │ │ ├── bugs │ │ │ │ │ ├── done.mdwn │ │ │ │ │ ├── move_all_commands_into_libexec_or_similar.mdwn │ │ │ │ │ └── sbank-deposit_needs_to_check_if_time_is_zero.mdwn │ │ │ │ ├── contact.mdwn │ │ │ │ ├── copyright.mdwn │ │ │ │ ├── design.mdwn │ │ │ │ ├── download.mdwn │ │ │ │ ├── gource.mdwn │ │ │ │ ├── index.mdwn │ │ │ │ ├── install.mdwn │ │ │ │ ├── sbank-balance.mdwn │ │ │ │ ├── sbank-cluster.mdwn │ │ │ │ ├── sbank-deposit.mdwn │ │ │ │ ├── sbank-project.mdwn │ │ │ │ ├── sbank-refund.mdwn │ │ │ │ ├── sbank-submit.mdwn │ │ │ │ ├── sbank-time.mdwn │ │ │ │ ├── sbank-user.mdwn │ │ │ │ ├── sbank-version.mdwn │ │ │ │ ├── sbank.mdwn │ │ │ │ ├── tchpc_icon128x128.png │ │ │ │ ├── templates │ │ │ │ │ ├── bare.tmpl │ │ │ │ │ └── walkthrough.tmpl │ │ │ │ ├── tests.mdwn │ │ │ │ ├── use_case │ │ │ │ │ ├── admin.mdwn │ │ │ │ │ └── user.mdwn │ │ │ │ ├── walkthrough.mdwn │ │ │ │ └── walkthrough │ │ │ │ │ ├── checking_account_balances.mdwn │ │ │ │ │ ├── checking_if_enough_hours_are_available.mdwn │ │ │ │ │ ├── creating_projects.mdwn │ │ │ │ │ ├── deciding_on_a_policy.mdwn │ │ │ │ │ ├── deposit_hours_to_an_account.mdwn │ │ │ │ │ ├── estimating_time_for_a_job.mdwn │ │ │ │ │ ├── expiring_accounts.mdwn │ │ │ │ │ ├── refunding_hours.mdwn │ │ │ │ │ ├── setup.mdwn │ │ │ │ │ └── submitting_jobs_with_sbank-submit.mdwn │ │ │ │ ├── expiredb │ │ │ │ ├── expireprojects.sh │ │ │ │ └── projects.rec │ │ │ │ ├── mdwn2man │ │ │ │ ├── shFlags │ │ │ │ ├── README.html │ │ │ │ ├── README.txt │ │ │ │ ├── bin │ │ │ │ │ └── gen_test_results.sh │ │ │ │ ├── doc │ │ │ │ │ ├── CHANGES-1.0.txt │ │ │ │ │ ├── LICENSE.shunit2 │ │ │ │ │ ├── RELEASE_NOTES-1.0.0.txt │ │ │ │ │ ├── RELEASE_NOTES-1.0.1.txt │ │ │ │ │ ├── RELEASE_NOTES-1.0.2.txt │ │ │ │ │ ├── RELEASE_NOTES-1.0.3.txt │ │ │ │ │ ├── TODO.txt │ │ │ │ │ ├── coding_standards.txt │ │ │ │ │ ├── contributors.txt │ │ │ │ │ └── rst2html.css │ │ │ │ ├── examples │ │ │ │ │ ├── debug_output.sh │ │ │ │ │ ├── hello_world.sh │ │ │ │ │ └── write_date.sh │ │ │ │ ├── lib │ │ │ │ │ ├── shunit2 │ │ │ │ │ └── versions │ │ │ │ └── src │ │ │ │ │ ├── shflags │ │ │ │ │ ├── shflags_test.sh │ │ │ │ │ ├── shflags_test_defines.sh │ │ │ │ │ ├── shflags_test_helpers │ │ │ │ │ ├── shflags_test_parsing.sh │ │ │ │ │ ├── shflags_test_private.sh │ │ │ │ │ └── shflags_test_public.sh │ │ │ │ ├── slurm-bank.spec │ │ │ │ ├── src │ │ │ │ ├── _sbank-balance.pl │ │ │ │ ├── _sbank-common-cpu_hrs.pl │ │ │ │ ├── sbank │ │ │ │ ├── sbank-balance │ │ │ │ ├── sbank-cluster │ │ │ │ ├── sbank-common │ │ │ │ ├── sbank-deposit │ │ │ │ ├── sbank-project │ │ │ │ ├── sbank-refund │ │ │ │ ├── sbank-submit │ │ │ │ ├── sbank-time │ │ │ │ ├── sbank-user │ │ │ │ ├── sbank-version │ │ │ │ ├── sbank.bash_completion │ │ │ │ └── shflags │ │ │ │ ├── t │ │ │ │ ├── Makefile │ │ │ │ ├── sample-job1.sh │ │ │ │ ├── sample-job2.sh │ │ │ │ └── test.sh │ │ │ │ ├── wvtest.sh │ │ │ │ └── wvtestrun │ │ └── tasks │ │ │ └── main.yml │ ├── slurm │ │ ├── defaults │ │ │ └── main.yml │ │ ├── files │ │ │ ├── balance.sh │ │ │ ├── cgroup.conf │ │ │ ├── epilog.sh │ │ │ ├── pam-slurm │ │ │ ├── pam-sshd │ │ │ ├── prolog.sh │ │ │ ├── slurm-health.conf │ │ │ └── topology.conf │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ ├── slurm-nodes.conf.j2 │ │ │ ├── slurm-partitions.conf.j2 │ │ │ ├── slurm-user.conf.j2 │ │ │ ├── slurm.conf.j2 │ │ │ ├── slurmdbd.conf.j2 │ │ │ └── systemd │ │ │ ├── munge.service.d │ │ │ └── trinity.conf.j2 │ │ │ ├── slurmctld.service.d │ │ │ └── trinity.conf.j2 │ │ │ └── slurmdbd.service.d │ │ │ └── trinity.conf.j2 │ ├── ssh │ │ ├── files │ │ │ ├── ssh.sh │ │ │ ├── ssh_cluster_config │ │ │ ├── ssh_config │ │ │ └── sshd_config │ │ ├── handlers │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── ssl-cert │ │ ├── defaults │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── san.conf.j2 │ ├── sssd │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── meta │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── nsswitch-manually.conf.j2 │ │ │ └── sssd.conf.j2 │ │ └── vars │ │ │ ├── Debian.yaml │ │ │ ├── RedHat.yaml │ │ │ ├── SLE.yaml │ │ │ └── Suse.yaml │ ├── sync-secrets │ │ ├── defaults │ │ │ └── main.yaml │ │ ├── tasks │ │ │ └── main.yaml │ │ └── vars │ │ │ ├── CentOS8.yaml │ │ │ ├── CentOS9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── syslog-ng │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── forwarding.conf.j2 │ │ │ └── syslog-ng.conf.j2 │ │ └── vars │ │ │ ├── Debian.yaml │ │ │ ├── RedHat.yaml │ │ │ ├── SLE.yaml │ │ │ └── Suse.yaml │ ├── target │ │ ├── defaults │ │ │ └── default.yml │ │ └── tasks │ │ │ └── main.yml │ ├── transmission │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ └── templates │ │ │ └── override.conf.j2 │ ├── trix-tree │ │ ├── defaults │ │ │ └── main.yml │ │ └── tasks │ │ │ └── main.yml │ ├── tunables │ │ ├── defaults │ │ │ └── main.yml │ │ ├── handlers │ │ │ └── main.yml │ │ ├── tasks │ │ │ └── main.yml │ │ ├── templates │ │ │ ├── 91-hpc-limits.conf.j2 │ │ │ └── 91-hpc-sysctl.conf.j2 │ │ └── vars │ │ │ ├── Centos8.yaml │ │ │ ├── Centos9.yaml │ │ │ ├── RedHat8.yaml │ │ │ ├── RedHat9.yaml │ │ │ ├── Rocky8.yaml │ │ │ └── Rocky9.yaml │ ├── wrapup-images │ │ └── tasks │ │ │ └── main.yml │ ├── wrapup │ │ └── tasks │ │ │ └── main.yml │ └── yml-check │ │ ├── defaults │ │ └── main.yml │ │ └── tasks │ │ └── main.yaml │ └── vpn │ └── strongswan │ ├── defaults │ └── main.yml │ ├── tasks │ ├── main.yml │ └── redhat.yml │ └── templates │ ├── redhat-aws.conf.j2 │ ├── redhat-azure.conf.j2 │ └── redhat-swanctl.conf.j2 └── yamllint.yml /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | site/hosts 3 | site/group_vars/*.yml 4 | site/host_vars/*.yml 5 | site/mitogen 6 | site/tui_configurator 7 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | include: 2 | - project: 'ClusterVision/gitlab-cicd-pipelines' 3 | file: '/trinityx-combined-pipeline.yml' 4 | 5 | stages: 6 | - test 7 | -------------------------------------------------------------------------------- /TrinityX.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/TrinityX.pdf -------------------------------------------------------------------------------- /img/triX_300.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/img/triX_300.png -------------------------------------------------------------------------------- /img/trinityX_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/img/trinityX_logo.png -------------------------------------------------------------------------------- /img/trinityxbanner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/img/trinityxbanner.png -------------------------------------------------------------------------------- /img/trinityxbanner_scaled.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/img/trinityxbanner_scaled.png -------------------------------------------------------------------------------- /packaging/iso/additional-files.lst: -------------------------------------------------------------------------------- 1 | https://updates.clustervision.com/trix/trix.repo 2 | https://updates.clustervision.com/trinity/10.2/centos/trinity.repo 3 | -------------------------------------------------------------------------------- /packaging/iso/additional-packages.lst: -------------------------------------------------------------------------------- 1 | ansible 2 | luna-ansible 3 | -------------------------------------------------------------------------------- /packaging/iso/product/run/install/product/pyanaconda/installclasses/trinityx.py: -------------------------------------------------------------------------------- 1 | from pyanaconda.installclasses.centos import RHELBaseInstallClass 2 | from pyanaconda.product import productName 3 | 4 | 5 | class TrinityXBaseInstallClass(RHELBaseInstallClass): 6 | name = "TrinityX" 7 | sortPriority = 30000 8 | if not productName.startswith("CentOS"): 9 | hidden = True 10 | 11 | defaultFS = "ext4" 12 | -------------------------------------------------------------------------------- /packaging/iso/product/usr/share/anaconda/pixmaps/trix-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/packaging/iso/product/usr/share/anaconda/pixmaps/trix-logo.png -------------------------------------------------------------------------------- /site/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | host_key_checking = False 3 | inventory = hosts,/etc/ansible/hosts,dynamic_hosts 4 | retry_files_save_path = ~/.ansible/retry-files 5 | log_path = /var/log/trinity.log 6 | -------------------------------------------------------------------------------- /site/compute-redhat.yml: -------------------------------------------------------------------------------- 1 | compute-default.yml -------------------------------------------------------------------------------- /site/extras/compute-default-base.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # create an absolute bare minimal image for redistribution 4 | 5 | - import_playbook: imports/trinity-redhat-image-create.yml 6 | vars: 7 | image_name: compute 8 | # The password to set up for the root user in the image. 9 | # If empty, it will be generated at random. 10 | # 11 | image_password: 12 | # 13 | image_create_minimal: true 14 | image_create_full: false 15 | 16 | # pack afterwards with e.g. 17 | # /usr/bin/tar -C /trinity/images/compute --one-file-system --xattrs --selinux --acls --checkpoint=100000 --use-compress-program=/usr/bin/lbzip2 -c -f /trinity/images/base-image-Rocky-9.3.tgz . 18 | 19 | -------------------------------------------------------------------------------- /site/extras/compute-opensuse-base.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # create an absolute bare minimal image for redistribution 4 | 5 | - import_playbook: imports/trinity-opensuse-image-create.yml 6 | vars: 7 | image_name: "opensuse" 8 | # The password to set up for the root user in the image. 9 | # If empty, it will be generated at random. 10 | # 11 | image_password: 12 | # 13 | image_create_minimal: true 14 | image_create_full: false 15 | # 16 | # If supplied, the below release will be used. 17 | # 15.6 has a bug in libcurl 18 | # 16.0 is missing lbzip2 19 | opensuse_release: "leap/15.5" 20 | 21 | 22 | # pack afterwards with e.g. 23 | # /usr/bin/tar -C /trinity/images/opensuse --one-file-system --xattrs --selinux --acls --checkpoint=100000 --use-compress-program=/usr/bin/lbzip2 -c -f /trinity/images/base-image-OpenSUSE-15.5.tgz . 24 | 25 | -------------------------------------------------------------------------------- /site/extras/compute-ubuntu-base.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # create an absolute bare minimal image for redistribution 4 | 5 | - import_playbook: imports/trinity-ubuntu-image-create.yml 6 | vars: 7 | image_name: ubuntu 8 | # The password to set up for the root user in the image. 9 | # If empty, it will be generated at random. 10 | # 11 | image_password: 12 | # 13 | image_create_minimal: true 14 | image_create_full: false 15 | 16 | # pack afterwards with e.g. 17 | # /usr/bin/tar -C /trinity/images/ubuntu --one-file-system --xattrs --selinux --acls --checkpoint=100000 --use-compress-program=/usr/bin/lbzip2 -c -f /trinity/images/base-image-Ubuntu-20.tgz . 18 | -------------------------------------------------------------------------------- /site/hosts.example: -------------------------------------------------------------------------------- 1 | [controllers] 2 | controller1 ansible_host=127.0.0.1 ansible_connection=local 3 | -------------------------------------------------------------------------------- /site/imports/group_vars: -------------------------------------------------------------------------------- 1 | ../group_vars -------------------------------------------------------------------------------- /site/imports/imports: -------------------------------------------------------------------------------- 1 | . -------------------------------------------------------------------------------- /site/imports/roles: -------------------------------------------------------------------------------- 1 | ../roles -------------------------------------------------------------------------------- /site/roles/cloud/aws/README.md: -------------------------------------------------------------------------------- 1 | # TrinityX Cloud With Amazon Web Services 2 | 3 | Trinity Cloud Support with AWS. Initially it will support the infrastructure create and manage with Terraform and Microsoft AWS CLI 4 | -------------------------------------------------------------------------------- /site/roles/cloud/aws/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | vhd_name: 'aws' 4 | vhd_core_packages: 5 | - "@core" 6 | - "kernel" 7 | - "grub2" 8 | - "grub2-efi" 9 | - "grub2-efi-x64" 10 | - "grub2-efi-x64-modules" 11 | - "grub2-efi-x64-modules" 12 | - "efibootmgr" 13 | - "bind-utils" 14 | - "shim-x64" 15 | - "edk2-ovmf" 16 | - "lvm2.x86_64" 17 | - "WALinuxAgent" 18 | - "cloud-init" 19 | - "cloud-utils-growpart" 20 | - "gdisk" 21 | - "hyperv-daemons" 22 | 23 | on_premise_controller_ip: 10.141.255.254 24 | include_ipxe_efi_build: false 25 | 26 | -------------------------------------------------------------------------------- /site/roles/cloud/aws/tasks/aws.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Enable the AWS CLI Support for the TrinityX Installation. 3 | 4 | - name: Check if aws-cli is already Installed 5 | shell: "aws help | grep AWS" 6 | register: aws_check 7 | ignore_errors: true 8 | 9 | - name: Remove the previous Installation 10 | shell: "{{ pre_install }}" 11 | with_items: 12 | - "rm -rf /usr/local/bin/aws" 13 | - "rm -rf /usr/local/bin/aws_completer" 14 | - "rm -rf /usr/local/aws-cli" 15 | - "rm -rf ~/.aws/" 16 | loop_control: 17 | loop_var: pre_install 18 | when: aws_check.stdout 19 | 20 | - name: Download the AWS installation file 21 | shell: "curl 'https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip' -o '/tmp/awscliv2.zip'" 22 | 23 | - name: Unzip the installer 24 | shell: "unzip /tmp/awscliv2.zip -d /tmp/" 25 | 26 | - name: Installing AWS CLI 27 | shell: "sh /tmp/aws/install" 28 | 29 | - name: Clean up 30 | shell: "rm -rf /tmp/aws" 31 | -------------------------------------------------------------------------------- /site/roles/cloud/aws/templates/grub.cfg.j2: -------------------------------------------------------------------------------- 1 | 2 | # TrinityX GRUB Configuration 3 | set default=1 4 | set timeout=5 5 | set menu_color_normal=white/black 6 | set menu_color_highlight=black/light-gray 7 | 8 | menuentry 'Trinity iPXE' { 9 | insmod chain 10 | set root='hd0,gpt1' 11 | chainloader /efi/rocky/ipxe.efi 12 | } 13 | 14 | menuentry 'Rocky Linux (Default)' { 15 | insmod efi_gop 16 | insmod efi_uga 17 | insmod ext2 18 | set root='hd0,gpt4' 19 | linux /boot/{{ kernel_version }} root=UUID={{ root_uuid }} ro console=ttyS0 earlyprintk=ttyS0 20 | initrd /boot/{{ initrd_version }} 21 | } 22 | 23 | menuentry 'Rocky Linux (Rescue)' { 24 | insmod efi_gop 25 | insmod efi_uga 26 | insmod ext2 27 | set root='hd0,gpt4' 28 | linux /boot/{{ rescue_kernel_version }} root=UUID={{ root_uuid }} ro rescue console=ttyS0 earlyprintk=ttyS0 29 | initrd /boot/{{ rescue_initrd_version }} 30 | } 31 | -------------------------------------------------------------------------------- /site/roles/cloud/azure/README.md: -------------------------------------------------------------------------------- 1 | # TrinityX Cloud With Azure 2 | 3 | Trinity Cloud Support with Azure. Initially it will support the infrastructure create and manage with Terraform and Microsoft Azure CLI 4 | -------------------------------------------------------------------------------- /site/roles/cloud/azure/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | vhd_name: 'azure' 4 | vhd_core_packages: 5 | - "@core" 6 | - "kernel" 7 | - "grub2" 8 | - "grub2-efi" 9 | - "grub2-efi-x64" 10 | - "grub2-efi-x64-modules" 11 | - "grub2-efi-x64-modules" 12 | - "efibootmgr" 13 | - "bind-utils" 14 | - "shim-x64" 15 | - "edk2-ovmf" 16 | - "lvm2.x86_64" 17 | - "WALinuxAgent" 18 | - "cloud-init" 19 | - "cloud-utils-growpart" 20 | - "gdisk" 21 | - "hyperv-daemons" 22 | 23 | on_premise_controller_ip: 10.141.255.254 24 | include_ipxe_efi_build: false 25 | 26 | -------------------------------------------------------------------------------- /site/roles/cloud/azure/tasks/debian-vhd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Creating the Azure VHD for GRUB Booting. 3 | 4 | - name: Create Debian VHD 5 | debug: 6 | msg: "This Feature Will be Available In upcoming releases." 7 | -------------------------------------------------------------------------------- /site/roles/cloud/azure/templates/grub.cfg.j2: -------------------------------------------------------------------------------- 1 | 2 | # TrinityX GRUB Configuration 3 | set default=1 4 | set timeout=5 5 | set menu_color_normal=white/black 6 | set menu_color_highlight=black/light-gray 7 | 8 | menuentry 'Trinity iPXE' { 9 | insmod chain 10 | set root='hd0,gpt1' 11 | chainloader /efi/rocky/ipxe.efi 12 | } 13 | 14 | menuentry 'Rocky Linux (Default)' { 15 | insmod efi_gop 16 | insmod efi_uga 17 | insmod ext2 18 | set root='hd0,gpt2' 19 | linux /{{ kernel_version }} root=UUID={{ root_uuid }} ro console=ttyS0 earlyprintk=ttyS0 20 | initrd /{{ initrd_version }} 21 | } 22 | 23 | menuentry 'Rocky Linux (Rescue)' { 24 | insmod efi_gop 25 | insmod efi_uga 26 | insmod ext2 27 | set root='hd0,gpt2' 28 | linux /{{ rescue_kernel_version }} root=UUID={{ root_uuid }} ro rescue console=ttyS0 earlyprintk=ttyS0 29 | initrd /{{ rescue_initrd_version }} 30 | } 31 | -------------------------------------------------------------------------------- /site/roles/cloud/controller/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | cloudname: undefined 4 | 5 | -------------------------------------------------------------------------------- /site/roles/cloud/gcp/README.md: -------------------------------------------------------------------------------- 1 | # TrinityX Cloud With Google Cloud Platform 2 | 3 | Trinity Cloud Support with GCP. Initially it will support the infrastructure create and manage with Terraform and Microsoft gcloud CLI 4 | -------------------------------------------------------------------------------- /site/roles/cloud/gcp/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/cloud/gcp/tasks/redhat-gcloud.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Enable the GCP CLI Support for the TrinityX Installation. 3 | 4 | - name: Check if gcloud is already Installed 5 | shell: "gcloud version" 6 | register: gcloud_check 7 | ignore_errors: true 8 | 9 | - name: Update DNF with gcloud CLI repository information 10 | shell: | 11 | tee -a /etc/yum.repos.d/google-cloud-sdk.repo << EOM 12 | [google-cloud-cli] 13 | name=Google Cloud CLI 14 | baseurl=https://packages.cloud.google.com/yum/repos/cloud-sdk-el9-x86_64 15 | enabled=1 16 | gpgcheck=1 17 | repo_gpgcheck=0 18 | gpgkey=https://packages.cloud.google.com/yum/doc/rpm-package-key.gpg 19 | EOM 20 | when: gcloud_check.stderr 21 | 22 | - name: Install the gcloud CLI 23 | dnf: 24 | disable_gpg_check: true 25 | name: "google-cloud-cli" 26 | state: present 27 | when: gcloud_check.stderr 28 | 29 | 30 | -------------------------------------------------------------------------------- /site/roles/cloud/ipxe/README.md: -------------------------------------------------------------------------------- 1 | # iPXE uEFI 2 | 3 | Trinity Cloud Support. Building iPXE uEFI file for the grub booting. 4 | -------------------------------------------------------------------------------- /site/roles/cloud/ipxe/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | controller_ip: 10.141.255.254 4 | skip_ipxe_efi_build: false 5 | 6 | -------------------------------------------------------------------------------- /site/roles/cloud/ipxe/templates/embed.ipxe.j2: -------------------------------------------------------------------------------- 1 | #!ipxe 2 | set skipifconf true 3 | 4 | dhcp && goto netboot || goto dhcperror 5 | 6 | :dhcperror 7 | prompt --key s --timeout 10000 DHCP failed, hit 's' for the iPXE shell; reboot in 10 seconds && shell || reboot 8 | 9 | :netboot 10 | chain http://{{ controller_ip }}:7051/boot || goto chainerror 11 | 12 | :chainerror 13 | prompt --key s --timeout 10000 Chainloading failed, hit 's' for the iPXE shell; reboot in 10 seconds && shell || reboot 14 | -------------------------------------------------------------------------------- /site/roles/cloud/luna/templates/rawhostlist.py.j2: -------------------------------------------------------------------------------- 1 | #!{{ trix_local }}/python/bin/python3 2 | 3 | import os 4 | import sys 5 | import hostlist 6 | 7 | def main(argv): 8 | rawhosts=None 9 | if (len(argv) == 0): 10 | sys.exit(1) 11 | while len(argv)>0: 12 | item = argv.pop(0) 13 | if not rawhosts: 14 | rawhosts=item 15 | else: 16 | rawhosts+=','+item 17 | hosts = hostlist.expand_hostlist(rawhosts) 18 | if not hosts: 19 | sys.exit(1) 20 | for host in hosts: 21 | print(host) 22 | 23 | main(sys.argv[1:]) 24 | -------------------------------------------------------------------------------- /site/roles/cloud/terraform/README.md: -------------------------------------------------------------------------------- 1 | # TrinityX Cloud With Azure 2 | 3 | Trinity Cloud Support with Azure. Initially it will support the infrastructure create and manage with Terraform and Microsoft Azure CLI 4 | -------------------------------------------------------------------------------- /site/roles/cloud/terraform/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | cloud_terraform_repository: https://github.com/clustervision/trinityx-terraform.git 4 | 5 | -------------------------------------------------------------------------------- /site/roles/cloud/terraform/tasks/redhat-terraform.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Enable the Azure Support with Terraform for the TrinityX Installation. 3 | 4 | - name: Check if Terraform is already Installed 5 | shell: "terraform -h" 6 | register: terraform_check 7 | ignore_errors: true 8 | 9 | - block: 10 | - name: Install Terraform Dependency 11 | yum: 12 | disable_gpg_check: true 13 | name: "yum-utils" 14 | state: present 15 | 16 | - name: Add Terraform Repository 17 | shell: "yum-config-manager --add-repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo" 18 | 19 | - name: Install Terraform 20 | yum: 21 | disable_gpg_check: true 22 | name: "terraform" 23 | state: present 24 | when: terraform_check.rc|int != 0 25 | 26 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | alertx_packages: [] 4 | 5 | alertx_noarch_pip_repository: "https://updates.clustervision.com/trinityx/{{ trix_version }}/alertx/noarch/pip/{{ trix_stream }}" 6 | alertx_cli_pip: alertx-1.0-py3-none-any.whl 7 | 8 | alertx_drainer_base_dir: '{{ trix_local }}/alertx/drainer' 9 | alertx_drainer_daemon_dir: "{{ alertx_drainer_base_dir }}/daemon" 10 | alertx_drainer_ini_dir: "{{ alertx_drainer_base_dir }}/config" 11 | alertx_drainer_log_dir: "/var/log/alertx" 12 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for luna 3 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: trix-tree 4 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # controller 4 | - block: 5 | - include_tasks: cli.yml 6 | - include_tasks: drainer.yml 7 | when: on_controller 8 | 9 | # below for images 10 | - block: 11 | - name: Render AlertX hook script 12 | template: 13 | src: 'alertx-hook.sh.j2' 14 | dest: '/usr/local/sbin/alertx-hook.sh' 15 | owner: root 16 | group: root 17 | mode: 0700 18 | 19 | - name: Render /etc/systemd/system/alertx-hook.service 20 | template: 21 | src: 'alertx-hook.systemd.j2' 22 | dest: '/etc/systemd/system/alertx-hook.service' 23 | owner: root 24 | group: root 25 | mode: 0644 26 | 27 | - name: Enable luna services 28 | service: 29 | name: 'alertx-hook.service' 30 | enabled: true 31 | when: in_image or on_node|default(False) 32 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/templates/alertx-cli-wrapper.j2: -------------------------------------------------------------------------------- 1 | #!{{ trix_local }}/python/bin/python3 2 | #/usr/bin/env python3 3 | # -*- coding: utf-8 -*- 4 | 5 | from alertx.main import AlertX 6 | 7 | ALERTX = AlertX().main() 8 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/templates/alertx-drainer.service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=AlertX drainer App Service 3 | After=network.target 4 | 5 | [Service] 6 | WorkingDirectory={{ alertx_drainer_base_dir }} 7 | ExecStart=/bin/bash -c "source {{ trix_local }}/python/bin/activate && cd {{ alertx_drainer_daemon_dir }} && exec {{ trix_local }}/python/bin/python3 drainer.py" 8 | 9 | [Install] 10 | WantedBy=multi-user.target 11 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/templates/alertx-hook.systemd.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=AlertX startup hook plugin call 3 | After=multi-user.target 4 | 5 | [Service] 6 | Type=oneshot 7 | ExecStart=/usr/local/sbin/alertx-hook.sh 8 | 9 | [Install] 10 | WantedBy=multi-user.target 11 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/templates/drainer.ini.j2: -------------------------------------------------------------------------------- 1 | [LOGGER] 2 | DEBUG_MODE = false 3 | LOG_DIR = {{ alertx_drainer_log_dir }} 4 | 5 | [DRAINING] 6 | AUTO_UNDRAIN = true 7 | DRAIN_INTERVAL = 60 8 | 9 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/templates/luna2.ini.j2: -------------------------------------------------------------------------------- 1 | [API] 2 | USERNAME = {{ luna_username }} 3 | PASSWORD = {{ luna_password }} 4 | EXPIRY = 1h 5 | SECRET_KEY = {{ luna_secret_key }} 6 | ENDPOINT = {{ ansible_hostname }}:7050 7 | # setting something else is optional: 8 | #ENDPOINT = {{ trix_ctrl_hostname }}.{{ trix_domain }}:7050 9 | PROTOCOL = {{ luna_protocol }} 10 | VERIFY_CERTIFICATE = {{ luna_verify_certificate }} 11 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/CentOS8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/alertx/vars/CentOS8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/CentOS9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/alertx/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/alertx/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/alertx/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/aria2c/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for aria2c 3 | - name: "reload aria2c" 4 | service: 5 | daemon_reload: yes 6 | name: aria2c.service 7 | state: reloaded 8 | -------------------------------------------------------------------------------- /site/roles/trinity/aria2c/templates/aria2c-daemon.sh.j2: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | /usr/bin/aria2c --summary-interval=600 --enable-dht=false --seed-ratio=100000.0 -V -d{{ trix_luna }}/files/ --check-certificate=false {{ trix_luna }}/files/*.torrent 4 | 5 | # other usefull options are: 6 | # --no-conf 7 | # --console-log-level=error 8 | # --log-level=error 9 | # --download-result=hide 10 | # --summary-interval=0 11 | 12 | -------------------------------------------------------------------------------- /site/roles/trinity/aria2c/templates/aria2c.service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Aria2c systemd script 3 | 4 | [Service] 5 | Type=simple 6 | ExecStart={{ trix_sbin }}/aria2c-daemon.sh 7 | SendSIGHUP=yes 8 | StandardOutput=file:/var/log/aria2c.log 9 | StandardError=file:/var/log/aria2c.log 10 | 11 | [Install] 12 | WantedBy=multi-user.target 13 | -------------------------------------------------------------------------------- /site/roles/trinity/bind/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for bind 3 | 4 | bind_packages: 5 | - bind 6 | 7 | # luna handles the forwarders - Antoine 8 | # the below will only go into resolv.conf 9 | bind_dns_forwarders: 10 | - '8.8.8.8' 11 | - '8.8.4.4' 12 | 13 | bind_dnssec_enable: 'no' 14 | bind_db_path: '/var/named' 15 | 16 | resolv_server: '127.0.0.1' 17 | resolv_search_domains: 'cluster ipmi' 18 | 19 | trix_local: "" 20 | -------------------------------------------------------------------------------- /site/roles/trinity/bind/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for bind 3 | 4 | - name: restart named 5 | service: 6 | name: named 7 | state: restarted 8 | -------------------------------------------------------------------------------- /site/roles/trinity/bind/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/bind/templates/resolv.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | {% if ansible_dns['nameservers'] is defined or bind_dns_forwarders %} 4 | search {{ resolv_search_domains }} 5 | nameserver {{ resolv_server }} 6 | {% if bind_dns_forwarders %} 7 | {% for f in bind_dns_forwarders %} 8 | nameserver {{ f }} 9 | {% endfor %} 10 | {% else %} 11 | {% for f in ansible_dns['nameservers'] %} 12 | {% if f not in ansible_all_ipv4_addresses%} 13 | nameserver {{ f }} 14 | {% endif %} 15 | {% endfor %} 16 | {% endif %} 17 | {% endif %} 18 | -------------------------------------------------------------------------------- /site/roles/trinity/check-latest-kernel/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Clear yum cache 4 | shell: yum clean all 5 | args: 6 | warn: false 7 | changed_when: false 8 | 9 | - name: Get latest available kernel 10 | shell: yum list kernel | awk '$1=="kernel.{{ system_arch }}"{print $2}' | sort -V | tail -1 11 | register: yum_kernel_ver 12 | args: 13 | warn: false 14 | changed_when: false 15 | 16 | - name: Check if we running latest kernel 17 | fail: 18 | msg: "You are not running the latest kernel. Please update and reboot." 19 | when: yum_kernel_ver.stdout + "." + ansible_architecture != ansible_kernel 20 | -------------------------------------------------------------------------------- /site/roles/trinity/chrony/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for chrony 3 | 4 | chrony_packages: 5 | - chrony 6 | 7 | chrony_upstream_servers: 8 | - '0.centos.pool.ntp.org' 9 | - '1.centos.pool.ntp.org' 10 | - '2.centos.pool.ntp.org' 11 | - '3.centos.pool.ntp.org' 12 | 13 | # allow all if not networks are specified 14 | chrony_allow_networks: [] 15 | -------------------------------------------------------------------------------- /site/roles/trinity/chrony/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for chrony 3 | 4 | - name: restart chrony 5 | service: 6 | name: chronyd 7 | state: restarted 8 | when: not in_image 9 | -------------------------------------------------------------------------------- /site/roles/trinity/chrony/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for chrony 3 | 4 | - name: Install chrony packages 5 | yum: 6 | name: "{{ chrony_packages }}" 7 | state: present 8 | tags: install-only 9 | retries: "{{ rpm_retries | default(3) }}" 10 | delay: "{{ rpm_delay | default(15) }}" 11 | 12 | - name: Update chrony configuration file 13 | template: 14 | src: chrony.conf.j2 15 | dest: /etc/chrony.conf 16 | owner: root 17 | group: root 18 | mode: 0644 19 | notify: restart chrony 20 | 21 | - name: Enable chrony service 22 | systemd: 23 | name: chronyd 24 | enabled: 'yes' 25 | 26 | - name: Start chrony service 27 | service: 28 | name: chronyd 29 | state: started 30 | when: not in_image 31 | -------------------------------------------------------------------------------- /site/roles/trinity/cleanup-legacy/defaults/main.yml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/cleanup-legacy/defaults/main.yml -------------------------------------------------------------------------------- /site/roles/trinity/cleanup-legacy/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for scripts 3 | -------------------------------------------------------------------------------- /site/roles/trinity/cleanup-legacy/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # include tasks 3 | - name: Include tasks from other files 4 | include_tasks: "{{ task_file }}" 5 | with_items: 6 | - 'monitoring.yml' 7 | - 'ood.yml' 8 | loop_control: 9 | loop_var: task_file 10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/cleanup-legacy/tasks/ood.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Collect stat info of ood apps folder 3 | stat: 4 | path: /var/www/ood/apps/sys 5 | register: ood_apps_folder 6 | 7 | - block: 8 | - name: List all ood apps 9 | command: "ls /var/www/ood/apps/sys" 10 | register: ood_legacy_apps 11 | when: ood_apps_folder.stat.exists 12 | 13 | - name: Remove legacy ood app folders 14 | command: "rm -rf /var/www/ood/apps/sys/{{ item }}" 15 | with_items: 16 | - trinity_sensu 17 | - trinity_grafana 18 | - trinity_prometheus 19 | - trinity_prometheus_alertmanager 20 | when: item in ood_legacy_apps.stdout_lines | default([]) 21 | 22 | when: ood_apps_folder.stat.exists 23 | ignore_errors: yes 24 | 25 | 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /site/roles/trinity/config-genders/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for scripts 3 | -------------------------------------------------------------------------------- /site/roles/trinity/config-manager/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for scripts 3 | -------------------------------------------------------------------------------- /site/roles/trinity/config-slurm/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for scripts 3 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for cv_support 3 | 4 | trix_tools: 5 | - 'slurmibtopology.sh' 6 | - 'request-remote-assistance' 7 | - 'request-remote-assistance-automated' 8 | 9 | cv_support_packages: 10 | - tigervnc-server 11 | - firefox 12 | - icedtea-web 13 | - icewm 14 | - python3-pip 15 | - python3-requests 16 | 17 | cv_support_pip_packages: 18 | - certifi 19 | 20 | project_id: '000000' 21 | trix_version: '0' 22 | ha: 'False' 23 | trix_ctrl_ip: '127.0.0.1' 24 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/examples/slurm-test.job: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #SBATCH -t 12:00:00 3 | #SBATCH -o test-%j.out 4 | #SBATCH -J CPU 5 | #SBATCH -n 16 6 | #SBATCH -N 1 7 | #SBATCH -p defq 8 | echo "Execution is `date`" 9 | echo "On `hostname`" 10 | 11 | echo "CPU's: `grep -c processor /proc/cpuinfo`" 12 | 13 | module load gnu8 openmpi3 openblas 14 | export OMP_NUM_THREADS=1 15 | 16 | mpirun ./hello_mpi 17 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/examples/test.c: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | int main() 4 | { 5 | printf("Hello World!\n"); 6 | return 0; 7 | } 8 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/examples/test.f90: -------------------------------------------------------------------------------- 1 | program main 2 | print *,"Hello World!" 3 | end program main 4 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/examples/test_mpi.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | int main(int argc, char** argv) { 5 | // Initialize the MPI environment 6 | MPI_Init(NULL, NULL); 7 | 8 | // Get the number of processes 9 | int world_size; 10 | MPI_Comm_size(MPI_COMM_WORLD, &world_size); 11 | 12 | // Get the rank of the process 13 | int world_rank; 14 | MPI_Comm_rank(MPI_COMM_WORLD, &world_rank); 15 | 16 | // Get the name of the processor 17 | char processor_name[MPI_MAX_PROCESSOR_NAME]; 18 | int name_len; 19 | MPI_Get_processor_name(processor_name, &name_len); 20 | 21 | // Print off a hello world message 22 | printf("Hello world from processor %s, rank %d out of %d processors\n", 23 | processor_name, world_rank, world_size); 24 | 25 | // Finalize the MPI environment. 26 | MPI_Finalize(); 27 | } 28 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/examples/test_mpi.f90: -------------------------------------------------------------------------------- 1 | program hello 2 | include 'mpif.h' 3 | integer rank, size, ierror, tag, status(MPI_STATUS_SIZE) 4 | 5 | call MPI_INIT(ierror) 6 | call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror) 7 | call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror) 8 | print*, 'node', rank, ': Hello world' 9 | call MPI_FINALIZE(ierror) 10 | end 11 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/remote-assistance.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Remote assistance 3 | After=network.target 4 | StartLimitIntervalSec=0 5 | 6 | [Service] 7 | ExecStart=/usr/local/bin/request-remote-assistance-automated -c -l 8 | Type=simple 9 | Restart=always 10 | RestartSec=3 11 | StandardOutput=null 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/files/request-remote-assistance-automated: -------------------------------------------------------------------------------- 1 | request-remote-assistance -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload systemd daemon 3 | systemd: 4 | daemon_reload: true 5 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/templates/bash_profile.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | # .bash_profile 3 | 4 | # Get the aliases and functions 5 | if [ -f ~/.bashrc ]; then 6 | . ~/.bashrc 7 | fi 8 | 9 | # User specific environment and startup programs 10 | 11 | PATH=$PATH:$HOME/bin 12 | 13 | export PATH 14 | 15 | export HISTTIMEFORMAT="%d/%m/%y %T " 16 | {% if ha %} 17 | CLUSTERIP={{ trix_ctrl_ip }} 18 | export PS1="\$(hostname -I | tr ' ' '\n' | grep -q ${CLUSTERIP} && echo || echo '\[\e[1;31m\](passive) \[\e[0m\]'){{ project_id }} \D{{ '{%' }}T} $PS1" 19 | {% else %} 20 | export PS1="{{ project_id }} \D{{ '{%' }}T} $PS1" 21 | {% endif %} 22 | -------------------------------------------------------------------------------- /site/roles/trinity/cv_support/templates/motd.j2: -------------------------------------------------------------------------------- 1 | TrinityX {{ trix_version|default("14.1") }} 2 | -------------------------------------------------------------------------------- 3 | -------------------------------------------------------------------------------- /site/roles/trinity/environment-modules/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for environment-modules 3 | 4 | envmodules_version: tr17.10 5 | envmodules_packages: 6 | - environment-modules 7 | - 'userspace-modulegroups-{{ envmodules_version }}' 8 | 9 | envmodules_files_path: '{{ trix_shared }}/modules' 10 | envmodules_files_subdirs: 11 | - groups 12 | 13 | envmodules_default_list: 14 | - cmake-3.11.2-tr17.10 15 | - gcc-7.2.0-tr17.10 16 | - gdb-8.0.1-tr17.10 17 | - hwloc-1.11.8-tr17.10 18 | - intel-runtime-2018.0.128-tr17.10 19 | - iozone-4.7.1-tr17.10 20 | - likwid-4.3.0-tr17.10 21 | - osu-benchmarks-5.4.1-tr17.10 22 | - python2-2.7.14-tr17.10 23 | - python3-3.6.3-tr17.10 24 | 25 | envmodules_list: '{{ envmodules_default_list + additional_env_modules|default([]) }}' 26 | 27 | -------------------------------------------------------------------------------- /site/roles/trinity/environment-modules/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: repos 4 | repos: 5 | - repo: https://updates.clustervision.com/userspace/userspace-release.x86_64.rpm 6 | -------------------------------------------------------------------------------- /site/roles/trinity/fail2ban/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for fail2ban 3 | 4 | fail2ban_packages: 5 | - fail2ban-server 6 | - fail2ban-firewalld 7 | - fail2ban-mail 8 | - fail2ban-systemd 9 | -------------------------------------------------------------------------------- /site/roles/trinity/fail2ban/files/trinityx.conf: -------------------------------------------------------------------------------- 1 | # TrinityX configuration 2 | 3 | [DEFAULT] 4 | ignoreip = 127.0.0.1/8 5 | bantime = 300 6 | 7 | [sshd] 8 | enabled = true 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/fail2ban/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for fail2ban 3 | 4 | - name: restart fail2ban 5 | service: 6 | name: fail2ban 7 | state: restarted 8 | -------------------------------------------------------------------------------- /site/roles/trinity/fail2ban/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for fail2ban 3 | 4 | - name: Install fail2ban packages 5 | yum: 6 | name: '{{ fail2ban_packages }}' 7 | state: present 8 | tags: install-only 9 | retries: "{{ rpm_retries | default(3) }}" 10 | delay: "{{ rpm_delay | default(15) }}" 11 | 12 | - name: Update fail2ban configuration file 13 | copy: 14 | src: 'trinityx.conf' 15 | dest: '/etc/fail2ban/jail.d/trinityx.conf' 16 | notify: restart fail2ban 17 | 18 | - name: Start and enable fail2ban service 19 | service: 20 | name: fail2ban 21 | state: started 22 | enabled: 'yes' 23 | -------------------------------------------------------------------------------- /site/roles/trinity/firewalld/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for firewalld 3 | 4 | firewalld_packages: 5 | - firewalld 6 | 7 | firewalld_public_interfaces: [] 8 | firewalld_trusted_interfaces: [] 9 | firewalld_public_tcp_ports: [] 10 | firewalld_public_udp_ports: [] 11 | 12 | firewalld_direct_rules: [] 13 | 14 | firewalld_masquerade_zone: public 15 | firewalld_update_ifcfg_files: true 16 | -------------------------------------------------------------------------------- /site/roles/trinity/firewalld/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: reload firewalld 4 | service: 5 | name: firewalld 6 | state: reloaded 7 | 8 | - name: reload NetworkManager 9 | service: 10 | name: NetworkManager 11 | state: restarted 12 | listen: reload firewalld 13 | 14 | - name: fix firewall zones 15 | shell: /tmp/fix-firewalld-zones.sh 16 | ignore_errors: true 17 | listen: reload firewalld 18 | 19 | -------------------------------------------------------------------------------- /site/roles/trinity/firewalld/tasks/interface.yml: -------------------------------------------------------------------------------- 1 | - set_fact: 2 | firewall_interface_not_found: '{{ item }}' 3 | when: hostvars[inventory_hostname]['ansible_'+item]['ipv4']['address'] == inner_loop_var 4 | with_items: 5 | - "{{ ansible_interfaces }}" 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/firewalld/templates/direct-rules.xml.j2: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% for rule in firewalld_direct_rules %} 4 | {{ rule.rule }} 5 | {% endfor %} 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/firewalld/templates/fix-firewalld-zones.sh.j2: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # restart done in firewalld handler 4 | #systemctl restart NetworkManager 5 | 6 | {% for interface in firewalld_trusted_interfaces|unique %} 7 | firewall-cmd --list-all --zone=trusted | grep interface | awk -F ': ' '{ print $2 }' | grep -w {{ interface }} || firewall-cmd --permanent --zone=trusted --change-interface={{ interface }} 8 | {% endfor %} 9 | 10 | {% for interface in firewalld_public_interfaces|unique %} 11 | firewall-cmd --list-all --zone=public | grep interface | awk -F ': ' '{ print $2 }' | grep -w {{ interface }} || firewall-cmd --permanent --zone=public --change-interface={{ interface }} 12 | {% endfor %} 13 | 14 | -------------------------------------------------------------------------------- /site/roles/trinity/grafana/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | ldap_auth: false 3 | 4 | enable_ssl: false 5 | grafana_port: 3000 6 | 7 | # Theme is light or dark 8 | grafana_theme: 'light' 9 | grafana_org_name: TrinityX 10 | grafana_login_hint: 'TrinityX username' 11 | grafana_admin: 'admin' 12 | # Make all dashboards available (view only) 13 | anonymous_access: true 14 | grafana_rpm: 'https://dl.grafana.com/oss/release/grafana-10.1.2-1.{{system_arch}}.rpm' 15 | 16 | grafana_admin_group: admins 17 | 18 | # below certificate is not valid. not sure why it was here in the first place. Antoine 19 | #ssl_certificate: /etc/pki/tls/certs/localhost.crt 20 | #ssl_certificate_key: /etc/pki/tls/private/localhost.key 21 | 22 | grafana_provisioning_dir: /etc/grafana/provisioning 23 | grafana_dashboard_dir: /var/lib/grafana/dashboards 24 | -------------------------------------------------------------------------------- /site/roles/trinity/grafana/files/triX_7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/grafana/files/triX_7.png -------------------------------------------------------------------------------- /site/roles/trinity/grafana/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart grafana-server 3 | systemd: 4 | name: grafana-server 5 | state: restarted 6 | -------------------------------------------------------------------------------- /site/roles/trinity/grafana/templates/trinityx-dashboards.yaml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: 1 3 | 4 | providers: 5 | - name: 'TrinityX' 6 | orgId: 1 7 | folder: 'TrinityX' 8 | folderUid: 'trix-dashboard-folders' 9 | type: file 10 | disableDeletion: false 11 | editable: false 12 | updateIntervalSeconds: 60 13 | options: 14 | path: "{{ grafana_dashboard_dir }}" 15 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/files/opensuse/part.rd: -------------------------------------------------------------------------------- 1 | mount -t tmpfs tmpfs /sysroot 2 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/files/opensuse/post.rd: -------------------------------------------------------------------------------- 1 | echo 'tmpfs / tmpfs defaults 0 0' >> /sysroot/etc/fstab 2 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/files/ubuntu/part.rd: -------------------------------------------------------------------------------- 1 | ls "$rootmnt" || mkdir "$rootmnt" 2 | mount -t tmpfs tmpfs "$rootmnt" 3 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/files/ubuntu/post.rd: -------------------------------------------------------------------------------- 1 | echo 'tmpfs / tmpfs defaults 0 0' >> /$rootmnt/etc/fstab 2 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: repos 4 | repos_reposdir: "{{ trix_images }}/{{ image_name }}/etc/yum.repos.d" 5 | repos: "{{ trinityx_repositories }}" 6 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/tasks/centos: -------------------------------------------------------------------------------- 1 | redhat -------------------------------------------------------------------------------- /site/roles/trinity/image-create/tasks/rocky: -------------------------------------------------------------------------------- 1 | redhat -------------------------------------------------------------------------------- /site/roles/trinity/image-create/templates/luna2.ini.j2: -------------------------------------------------------------------------------- 1 | 2 | [API] 3 | USERNAME = {{ luna_username }} 4 | PASSWORD = {{ luna_password }} 5 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/templates/opensuse-repo.j2: -------------------------------------------------------------------------------- 1 | [opensuse] 2 | name=OpenSUSE {{ opensuse_release|default('Leap 15.6') }} 3 | baseurl=https://download.opensuse.org/distribution/{{ opensuse_release|default('leap/15.6') }}/repo/oss/ 4 | {% if for_image|default(False) %} 5 | enabled=1 6 | {% else %} 7 | enabled=0 8 | {% endif %} 9 | gpgcheck=0 10 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/templates/root.txt: -------------------------------------------------------------------------------- 1 | {{ image_password }} 2 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/templates/trinity.conf.j2: -------------------------------------------------------------------------------- 1 | install_items+="{% for dr in dracut_extras %} {{ dr }} {% endfor %}" 2 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-almalinux.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/aa64/controller-almalinux.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-centos.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/aa64/controller-centos.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-opensuse.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "dnf-utils" 5 | - "distribution-gpg-keys" 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-redhat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/aa64/controller-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-rocky.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/aa64/controller-rocky.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/controller-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "debootstrap" 5 | - "zstd" 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/docker-rocky.yaml: -------------------------------------------------------------------------------- 1 | docker-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/docker-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "debootstrap" 5 | - "zstd" 6 | 7 | ubuntu_distribution_source: https://ftp.nluug.nl/pub/os/Linux/distr/ubuntu/ 8 | 9 | ubuntu_distribution_release: jammy 10 | 11 | image_core_packages: 12 | - python3 13 | - mc 14 | - e2fsprogs 15 | - xfsprogs 16 | - net-tools 17 | - tar 18 | - bzip2 19 | - lbzip2 20 | - parted 21 | - gpg 22 | - ipmitool 23 | - linux-image-generic 24 | - ssh 25 | - curl 26 | - less 27 | - aria2 28 | - vim 29 | - coreutils 30 | - tpm2-tools 31 | - grepcidr 32 | - iputils-ping 33 | - netplan.io 34 | - dosfstools 35 | - efibootmgr 36 | - rsync 37 | - lshw 38 | 39 | luna2_client_package: "https://updates.clustervision.com/trinityx/{{ trix_version }}/luna2/noarch/deb/luna2-client.deb" 40 | 41 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/image-almalinux.yaml: -------------------------------------------------------------------------------- 1 | image-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/image-rocky.yaml: -------------------------------------------------------------------------------- 1 | image-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/aa64/image-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ubuntu_distribution_source: https://ftp.nluug.nl/pub/os/Linux/distr/ubuntu/ 4 | 5 | ubuntu_distribution_release: jammy 6 | 7 | image_core_packages: 8 | - python3 9 | - mc 10 | - e2fsprogs 11 | - xfsprogs 12 | - net-tools 13 | - tar 14 | - bzip2 15 | - lbzip2 16 | - parted 17 | - gpg 18 | - ipmitool 19 | - linux-image-generic 20 | - ssh 21 | - curl 22 | - less 23 | - aria2 24 | - vim 25 | - coreutils 26 | - tpm2-tools 27 | - grepcidr 28 | - iputils-ping 29 | - netplan.io 30 | - dosfstools 31 | - efibootmgr 32 | - rsync 33 | - lshw 34 | 35 | luna2_client_package: "https://updates.clustervision.com/trinityx/{{ trix_version }}/luna2/noarch/deb/luna2-client.deb" 36 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-almalinux.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/x64/controller-almalinux.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-centos.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/x64/controller-centos.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-opensuse.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "dnf-utils" 5 | - "distribution-gpg-keys" 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-redhat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/x64/controller-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-rocky.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/image-create/vars/x64/controller-rocky.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/controller-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "debootstrap" 5 | - "zstd" 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/docker-rocky.yaml: -------------------------------------------------------------------------------- 1 | docker-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/docker-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - "debootstrap" 5 | - "zstd" 6 | 7 | ubuntu_distribution_source: https://ftp.nluug.nl/pub/os/Linux/distr/ubuntu/ 8 | 9 | ubuntu_distribution_release: jammy 10 | 11 | image_core_packages: 12 | - python3 13 | - mc 14 | - e2fsprogs 15 | - xfsprogs 16 | - net-tools 17 | - tar 18 | - bzip2 19 | - lbzip2 20 | - parted 21 | - gpg 22 | - ipmitool 23 | - linux-image-generic 24 | - ssh 25 | - curl 26 | - less 27 | - aria2 28 | - vim 29 | - coreutils 30 | - tpm2-tools 31 | - grepcidr 32 | - iputils-ping 33 | - netplan.io 34 | - dosfstools 35 | - efibootmgr 36 | - rsync 37 | - lshw 38 | 39 | luna2_client_package: "https://updates.clustervision.com/trinityx/{{ trix_version }}/luna2/noarch/deb/luna2-client.deb" 40 | 41 | -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/image-almalinux.yaml: -------------------------------------------------------------------------------- 1 | image-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/image-rocky.yaml: -------------------------------------------------------------------------------- 1 | image-redhat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/image-create/vars/x64/image-ubuntu.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ubuntu_distribution_source: https://ftp.nluug.nl/pub/os/Linux/distr/ubuntu/ 4 | 5 | ubuntu_distribution_release: jammy 6 | 7 | image_core_packages: 8 | - python3 9 | - mc 10 | - e2fsprogs 11 | - xfsprogs 12 | - net-tools 13 | - tar 14 | - bzip2 15 | - lbzip2 16 | - parted 17 | - gpg 18 | - ipmitool 19 | - linux-image-generic 20 | - ssh 21 | - curl 22 | - less 23 | - aria2 24 | - vim 25 | - coreutils 26 | - tpm2-tools 27 | - grepcidr 28 | - iputils-ping 29 | - netplan.io 30 | - dosfstools 31 | - efibootmgr 32 | - rsync 33 | - lshw 34 | 35 | luna2_client_package: "https://updates.clustervision.com/trinityx/{{ trix_version }}/luna2/noarch/deb/luna2-client.deb" 36 | -------------------------------------------------------------------------------- /site/roles/trinity/image-download/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | image_download_repo: "https://updates.clustervision.com/trinityx/{{ trix_version }}/images" 4 | image_download_distribution: "Rocky-9" 5 | image_download_source: 'base' 6 | image_architecture: 'x86_64' 7 | -------------------------------------------------------------------------------- /site/roles/trinity/init-nodes/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # defaults for the init role 3 | -------------------------------------------------------------------------------- /site/roles/trinity/init-nodes/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - init 4 | -------------------------------------------------------------------------------- /site/roles/trinity/init-nodes/templates/resolv.conf: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | search {{ trix_domain }} ipmi 4 | nameserver {{ trix_ctrl_ip }} 5 | -------------------------------------------------------------------------------- /site/roles/trinity/init/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # defaults for the init role 3 | 4 | enable_selinux: false 5 | 6 | init_packages: 7 | - python3-libselinux 8 | 9 | cloud_support: ['azure', 'aws', 'gcp'] 10 | 11 | cloud_vpn_namekey: 12 | azure: vng_ip 13 | aws: vpn_tunnel1_ip 14 | gcp: vng_ip 15 | 16 | cloud_vpn_pskkey: 17 | aws: vpn_tunnel1_preshared_key 18 | 19 | -------------------------------------------------------------------------------- /site/roles/trinity/init/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - role: trix-tree 4 | -------------------------------------------------------------------------------- /site/roles/trinity/init/tasks/find-hostname.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - set_fact: 4 | trix_this_ctrl_hostname: '{{ inner_loop_var }}' 5 | when: 6 | - trix_this_ctrl_hostname is not defined 7 | - inner_loop_var != "" 8 | - ansible_hostname == inner_loop_var 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/init/tasks/find-interface.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - set_fact: 4 | trix_ctrl_interface: '{{ item }}' 5 | when: 6 | - trix_ctrl_interface is not defined 7 | - inner_loop_var != "" 8 | - hostvars[inventory_hostname]['ansible_'+item]['ipv4'] is defined 9 | - hostvars[inventory_hostname]['ansible_'+item]['ipv4']['address'] == inner_loop_var 10 | with_items: 11 | - "{{ ansible_interfaces }}" 12 | 13 | - set_fact: 14 | trix_ctrl_interface: '{{ item }}' 15 | when: 16 | - trix_ctrl_interface is not defined 17 | - inner_loop_var != "" 18 | - hostvars[inventory_hostname]['ansible_'+item]['ipv4_secondaries'] is defined 19 | - hostvars[inventory_hostname]['ansible_'+item]['ipv4_secondaries']|selectattr('address','equalto',inner_loop_var) 20 | with_items: 21 | - "{{ ansible_interfaces }}" 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /site/roles/trinity/kubernetes/templates/k3s-server-nodes-cleaner.service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Housekeeping script to delete k3s nodes that have been replaced 3 | After=k3s-server.service 4 | Wants=k3s-server.service 5 | 6 | [Service] 7 | Type=oneshot 8 | ExecStart=/bin/bash -c 'k3s kubectl get nodes -o wide | awk '\''NR>1 {split($1, a, "-"); if ($2 == "Ready") ready[a[1]] = 1; else if ($2 == "NotReady" && ready[a[1]]) print $1}'\'' | xargs -r -n1 k3s kubectl delete node' -------------------------------------------------------------------------------- /site/roles/trinity/kubernetes/templates/k3s-server-nodes-cleaner.timer.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Run housekeeping script to delete k3s nodes that have been replaced 3 | 4 | [Timer] 5 | OnUnitActiveSec=1h 6 | -------------------------------------------------------------------------------- /site/roles/trinity/kubernetes/templates/k3s-server.service.env.j2: -------------------------------------------------------------------------------- 1 | KUBERNETES_SQL_PWD={{kubernetes_sql_pwd}} -------------------------------------------------------------------------------- /site/roles/trinity/local_repo/defaults/main.yml: -------------------------------------------------------------------------------- 1 | local_install: false 2 | trinityx_iso_repo: "{{ trix_repos }}/trinityx" 3 | -------------------------------------------------------------------------------- /site/roles/trinity/local_repo/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx in local_repo role 3 | service: 4 | name: nginx 5 | state: reloaded 6 | -------------------------------------------------------------------------------- /site/roles/trinity/local_repo/templates/tmp_local.repo.j2: -------------------------------------------------------------------------------- 1 | [tmp_local] 2 | name=tmp_local 3 | baseurl=file://{{ trinityx_iso_repo }} 4 | enabled=1 5 | gpgcheck=0 6 | -------------------------------------------------------------------------------- /site/roles/trinity/local_repo/templates/trinityx-repo.conf.j2: -------------------------------------------------------------------------------- 1 | # Serving TrinityX provided repositories 2 | 3 | server { 4 | listen {{ repos_port }} default_server; 5 | server_name _; 6 | 7 | location /{{ trix_repos|basename() }} { 8 | root {{ trix_repos|regex_replace('/[^/]*$','') }}; 9 | autoindex on; 10 | } 11 | 12 | } 13 | 14 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/defaults/main.yml: -------------------------------------------------------------------------------- 1 | 2 | logrotate_files: 3 | - trinityx 4 | - slurm 5 | - luna 6 | - aria2c 7 | - prometheus 8 | - alertx 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/alertx: -------------------------------------------------------------------------------- 1 | /var/log/alertx/*.log { 2 | compress 3 | copytruncate 4 | weekly 5 | rotate 4 6 | notifempty 7 | } 8 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/aria2c: -------------------------------------------------------------------------------- 1 | /var/log/aria2c.log { 2 | size 200M 3 | missingok 4 | compress 5 | copytruncate 6 | rotate 8 7 | postrotate 8 | systemctl restart aria2c 2> /dev/null || true 9 | endscript 10 | } 11 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/influxdb: -------------------------------------------------------------------------------- 1 | /var/log/influxdb/influxd.log { 2 | daily 3 | rotate 7 4 | missingok 5 | dateext 6 | copytruncate 7 | compress 8 | } 9 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/luna: -------------------------------------------------------------------------------- 1 | /var/log/luna/*.log { 2 | compress 3 | copytruncate 4 | weekly 5 | rotate 4 6 | notifempty 7 | } 8 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/prometheus: -------------------------------------------------------------------------------- 1 | /var/log/prometheus/*.log 2 | { 3 | size 200M 4 | rotate 6 5 | missingok 6 | dateext 7 | copytruncate 8 | notifempty 9 | compress 10 | } 11 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/sensu: -------------------------------------------------------------------------------- 1 | /var/log/sensu/sensu-client.log { 2 | rotate 7 3 | daily 4 | missingok 5 | notifempty 6 | sharedscripts 7 | compress 8 | postrotate 9 | kill -USR2 `cat /var/run/sensu/sensu-client.pid 2> /dev/null` 2> /dev/null || true 10 | endscript 11 | } 12 | 13 | /var/log/sensu/sensu-server.log { 14 | rotate 7 15 | daily 16 | missingok 17 | notifempty 18 | sharedscripts 19 | compress 20 | postrotate 21 | kill -USR2 `cat /var/run/sensu/sensu-server.pid 2> /dev/null` 2> /dev/null || true 22 | endscript 23 | } 24 | 25 | /var/log/sensu/sensu-api.log { 26 | rotate 7 27 | daily 28 | missingok 29 | notifempty 30 | sharedscripts 31 | compress 32 | postrotate 33 | kill -USR2 `cat /var/run/sensu/sensu-api.pid 2> /dev/null` 2> /dev/null || true 34 | endscript 35 | } 36 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/slurm: -------------------------------------------------------------------------------- 1 | /var/log/slurm/*.log { 2 | compress 3 | missingok 4 | nocopytruncate 5 | nodelaycompress 6 | nomail 7 | notifempty 8 | noolddir 9 | rotate 5 10 | sharedscripts 11 | size=5M 12 | create 640 slurm root 13 | postrotate 14 | pkill -x --signal SIGUSR2 slurmctld 15 | pkill -x --signal SIGUSR2 slurmd 16 | pkill -x --signal SIGUSR2 slurmdbd 17 | exit 0 18 | endscript 19 | } 20 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/trinityx: -------------------------------------------------------------------------------- 1 | /var/log/cluster-messages/*.messages { 2 | size 200M 3 | missingok 4 | compress 5 | copytruncate 6 | rotate 8 7 | postrotate 8 | /bin/kill -HUP $(cat /var/run/syslogd.pid 2> /dev/null) 2> /dev/null || true 9 | endscript 10 | } 11 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/files/uchiwa: -------------------------------------------------------------------------------- 1 | /var/log/uchiwa.log /var/log/uchiwa.err { 2 | rotate 7 3 | daily 4 | missingok 5 | notifempty 6 | sharedscripts 7 | copytruncate 8 | compress 9 | } 10 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: restart logrotate 4 | service: 5 | name: logrotate 6 | state: restarted 7 | 8 | -------------------------------------------------------------------------------- /site/roles/trinity/logrotate/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Copy logrotate configuration files 3 | copy: 4 | src: '{{ item }}' 5 | dest: '/etc/logrotate.d/{{ item }}' 6 | force: "no" 7 | with_items: "{{ logrotate_files }}" 8 | 9 | - name: Changing owner and group for logrotate config 10 | file: 11 | path: '/etc/logrotate.d/{{ item }}' 12 | owner: root 13 | group: root 14 | with_items: "{{ logrotate_files }}" 15 | 16 | 17 | - name: Restart logrotate 18 | assert: {that: true, quiet: true} 19 | notify: restart logrotate 20 | changed_when: true 21 | when: ansible_facts['os_family'] == "RedHat" and ansible_facts['distribution_major_version']|int > 8 22 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/aarch64/libluna-fakeuname.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/files/aarch64/libluna-fakeuname.so -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/ipxe.efi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/files/ipxe.efi -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/luna2-master.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=sets current host as Luna master 3 | After=luna2-daemon.service 4 | 5 | [Service] 6 | Type=simple 7 | ExecStart=/usr/sbin/lmaster -s 8 | TimeoutStartSec=0 9 | RemainAfterExit=yes 10 | 11 | [Install] 12 | WantedBy=default.target 13 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/tftp: -------------------------------------------------------------------------------- 1 | # default: off 2 | # description: The tftp server serves files using the trivial file transfer \ 3 | # protocol. The tftp protocol is often used to boot diskless \ 4 | # workstations, download configuration files to network-aware printers, \ 5 | # and to start the installation process for some operating systems. 6 | service tftp 7 | { 8 | socket_type = dgram 9 | protocol = udp 10 | wait = yes 11 | user = root 12 | server = /usr/sbin/in.tftpd 13 | server_args = -s /tftpboot 14 | disable = no 15 | per_source = 11 16 | cps = 100 2 17 | flags = IPv4 18 | } 19 | 20 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/undionly.kpxe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/files/undionly.kpxe -------------------------------------------------------------------------------- /site/roles/trinity/luna2/files/x86_64/libluna-fakeuname.so: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/files/x86_64/libluna-fakeuname.so -------------------------------------------------------------------------------- /site/roles/trinity/luna2/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for luna 3 | 4 | - name: restart xinetd 5 | service: 6 | name: xinetd 7 | state: restarted 8 | 9 | - name: restart dhcpd 10 | service: 11 | name: dhcpd 12 | state: restarted 13 | 14 | - name: restart nginx 15 | service: 16 | name: nginx 17 | state: restarted 18 | 19 | - name: restart named 20 | service: 21 | name: named 22 | state: restarted 23 | 24 | - name: restart luna2-daemon 25 | service: 26 | name: luna2-daemon 27 | state: restarted 28 | 29 | - name: reload firewalld 30 | service: 31 | name: firewalld 32 | state: reloaded 33 | 34 | - name: fix firewall zones 35 | shell: /tmp/fix-firewalld-zones.sh 36 | ignore_errors: true 37 | listen: reload firewalld 38 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - nginx 4 | - role: trix-tree 5 | # - role: repos 6 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/templates/firewalld_direct.xml.j2: -------------------------------------------------------------------------------- 1 | 2 | 3 | -p udp -o {{ luna_prov_nic.stdout }} -m iprange --dst-range {{ luna.dhcp.start_ip }}-{{ luna.dhcp.end_ip }} -j SNAT --to {{ luna.cluster.frontend_address }} 4 | 5 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/templates/luna2-cli-wrapper.j2: -------------------------------------------------------------------------------- 1 | #!{{ trix_local }}/python/bin/python3 2 | #/usr/bin/env python3 3 | # -*- coding: utf-8 -*- 4 | 5 | from luna.cli import Cli 6 | 7 | CLI = Cli().main() 8 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/templates/luna2-daemon-service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = Luna 2 Daemon 3 | After = network.target 4 | 5 | [Service] 6 | PIDFile = /run/trinity/luna.pid 7 | WorkingDirectory = {{ trix_luna }}/daemon 8 | ExecStartPre = /bin/mkdir /run/trinity 9 | ExecStart = /bin/bash -c "source {{ trix_local }}/python/bin/activate && {{ trix_local }}/python/bin/gunicorn --config {{ trix_luna }}/daemon/config/gunicorn.py 'luna:daemon' --pid /run/trinity/luna.pid" 10 | ExecReload = /bin/kill -s HUP 11 | ExecStop = /bin/kill -s TERM 12 | ExecStopPost = /bin/rm -rf /run/trinity 13 | StandardOutput=null 14 | 15 | [Install] 16 | 17 | WantedBy = multi-user.target 18 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/templates/luna2.ini.j2: -------------------------------------------------------------------------------- 1 | 2 | [LOGGER] 3 | LEVEL = info 4 | LOGFILE = /var/log/luna/luna2-cli.log 5 | ; Logging Level Can be one only from -> debug, info, warning, error, critical 6 | 7 | [API] 8 | USERNAME = {{ luna_username }} 9 | PASSWORD = {{ luna_password }} 10 | EXPIRY = 1h 11 | SECRET_KEY = {{ luna_secret_key }} 12 | ENDPOINT = {{ ansible_hostname }}:7050 13 | # setting something else is optional: 14 | #ENDPOINT = {{ trix_ctrl_hostname }}.{{ trix_domain }}:7050 15 | PROTOCOL = {{ luna_protocol }} 16 | VERIFY_CERTIFICATE = {{ luna_verify_certificate }} 17 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/CentOS8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/vars/CentOS8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/CentOS9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - dhcp-server 5 | - sqlite 6 | - dos2unix 7 | - tftp-server 8 | 9 | enable_services: 10 | - tftp 11 | - nginx 12 | - dhcpd 13 | 14 | start_services: 15 | - tftp 16 | - nginx 17 | - dhcpd 18 | 19 | 20 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - dhcp-server 5 | - sqlite 6 | - dos2unix 7 | - tftp-server 8 | 9 | enable_services: 10 | - tftp 11 | - nginx 12 | - dhcpd 13 | 14 | start_services: 15 | - tftp 16 | - nginx 17 | - dhcpd 18 | 19 | -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/luna2/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/luna2/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | required_packages: 4 | - dhcp-server 5 | - sqlite 6 | - dos2unix 7 | - tftp-server 8 | 9 | enable_services: 10 | - nginx 11 | - tftp 12 | - dhcpd 13 | 14 | start_services: 15 | - nginx 16 | - tftp 17 | - dhcpd 18 | 19 | -------------------------------------------------------------------------------- /site/roles/trinity/mariadb/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | mariadb_packages: 4 | - mariadb 5 | - mariadb-server 6 | # - python2-PyMySQL 7 | - python3-PyMySQL 8 | 9 | mariadb_db_path: '/var/lib/mysql' 10 | -------------------------------------------------------------------------------- /site/roles/trinity/mariadb/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for mariadb 3 | 4 | - name: restart mariadb 5 | service: 6 | name: mariadb 7 | state: restarted 8 | retries: 10 9 | delay: 15 10 | when: primary|default(True) 11 | -------------------------------------------------------------------------------- /site/roles/trinity/mariadb/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/mariadb/templates/etc_my.cnf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [mysqld] 4 | datadir={{ mariadb_db_path }} 5 | socket=/var/lib/mysql/mysql.sock 6 | symbolic-links=0 7 | 8 | innodb_file_per_table=1 9 | innodb_buffer_pool_size=1024M 10 | innodb_log_file_size=64M 11 | innodb_lock_wait_timeout=900 12 | 13 | [mysqld_safe] 14 | log-error=/var/log/mariadb/mariadb.log 15 | pid-file=/var/run/mariadb/mariadb.pid 16 | 17 | # 18 | # include all files from the config directory 19 | # 20 | !includedir /etc/my.cnf.d 21 | 22 | -------------------------------------------------------------------------------- /site/roles/trinity/mariadb/templates/my.cnf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [client] 4 | socket=/var/lib/mysql/mysql.sock 5 | user=root 6 | password={{ mysql_root_pwd }} 7 | 8 | [mysql] 9 | socket=/var/lib/mysql/mysql.sock 10 | user=root 11 | password={{ mysql_root_pwd }} 12 | 13 | [mysqldump] 14 | socket=/var/lib/mysql/mysql.sock 15 | user=root 16 | password={{ mysql_root_pwd }} 17 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs-mounts/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for nfs 3 | 4 | nfs_packages: 5 | - nfs-utils 6 | 7 | nfs_enable_rdma: false 8 | nfs_mounts: 9 | - path: '/trinity/shared' 10 | remote: 'controller.cluster:/trinity/shared' 11 | options: 'defaults,nfsvers=4,ro,retrans=4,_netdev' 12 | 13 | nfs_remote_mask: [] 14 | nfs_prefix_path: '' 15 | nfs_use_mask: false 16 | nfs_only_xmount: false 17 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs-mounts/vars/Debian.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | nfs_packages: 4 | - nfs4-acl-tools 5 | - nfs-common 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs-mounts/vars/RedHat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/nfs-mounts/vars/RedHat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/nfs/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for nfs 3 | 4 | nfs_packages: 5 | - nfs-utils 6 | 7 | nfs_rpccount: 256 8 | nfs_enable_rdma: false 9 | 10 | nfs_exports: [] 11 | 12 | nfs_remote_mask: [] 13 | nfs_prefix_path: '' 14 | nfs_use_mask: false 15 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for nfs 3 | 4 | # RH7 uses a weird configuration system for NFS. At boot the 5 | # nfs-config.service unit file processes /etc/sysconfig/nfs and writes the 6 | # results in /run/sysconfig/nfs-utils, which is used by the NFS server. 7 | # I'm not really sure about the logic behind it. What it means for us though 8 | # is that we have to restart that service after updating the config file. 9 | - name: restart nfs-config 10 | service: 11 | name: nfs-config 12 | state: restarted 13 | when: primary|default(True) 14 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/nfs/templates/exports.j2: -------------------------------------------------------------------------------- 1 | # Trinity NFS exports 2 | 3 | {% for export in new_nfs_exports %} 4 | {% if 'path' in export %} 5 | {% if 'type' in export %} 6 | {% if export['type'] != 'manual' and export['type'] != 'nfs' %} 7 | {{ export['path'] }} {{ export['options']|default('') }} 8 | {% endif %} 9 | {% else %} 10 | {{ export['path'] }} {{ export['options']|default('') }} 11 | {% endif %} 12 | {% endif %} 13 | {% endfor %} 14 | 15 | -------------------------------------------------------------------------------- /site/roles/trinity/nginx/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for nginx 3 | 4 | nginx_packages: 5 | - nginx 6 | -------------------------------------------------------------------------------- /site/roles/trinity/nginx/files/ssl.conf: -------------------------------------------------------------------------------- 1 | # From https://mozilla.github.io/server-side-tls/ssl-config-generator/ 2 | 3 | ssl_session_timeout 1d; 4 | ssl_session_cache shared:SSL:50m; 5 | ssl_session_tickets off; 6 | 7 | # modern configuration. tweak to your needs. 8 | ssl_protocols TLSv1.2; 9 | ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256'; 10 | ssl_prefer_server_ciphers on; 11 | 12 | # HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months) 13 | add_header Strict-Transport-Security max-age=15768000; 14 | 15 | # OCSP Stapling --- 16 | # fetch OCSP records from URL in ssl_certificate and cache them 17 | ssl_stapling on; 18 | ssl_stapling_verify on; 19 | -------------------------------------------------------------------------------- /site/roles/trinity/nginx/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for nginx 3 | 4 | - name: reload nginx in nginx role 5 | service: 6 | name: nginx 7 | state: reloaded 8 | when: primary|default(True) 9 | -------------------------------------------------------------------------------- /site/roles/trinity/nginx/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/nginx/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for nginx 3 | 4 | - name: Install nginx packages 5 | yum: 6 | name: '{{ nginx_packages }}' 7 | state: present 8 | tags: install-only 9 | retries: "{{ rpm_retries | default(3) }}" 10 | delay: "{{ rpm_delay | default(15) }}" 11 | 12 | - name: Ensure /etc/nginx/conf.d exists 13 | file: 14 | path: '/etc/nginx/conf.d' 15 | owner: root 16 | group: root 17 | state: directory 18 | 19 | - name: Copy configuration file 20 | copy: 21 | src: "nginx.conf" 22 | dest: "/etc/nginx/nginx.conf" 23 | backup: true 24 | notify: reload nginx in nginx role 25 | 26 | - name: Add default ssl configuration 27 | copy: 28 | src: "ssl.conf" 29 | dest: "/etc/nginx/conf.d/ssl.conf" 30 | notify: reload nginx in nginx role 31 | 32 | - name: Enable nginx service 33 | service: 34 | name: nginx 35 | enabled: true 36 | state: started 37 | 38 | -------------------------------------------------------------------------------- /site/roles/trinity/no_proxy/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | no_proxy: 4 | - '{{ ansible_fqdn }}' 5 | 6 | -------------------------------------------------------------------------------- /site/roles/trinity/no_proxy/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Install profile for no_proxy 4 | template: 5 | src: 'z_noproxy.sh.j2' 6 | dest: '/etc/profile.d/z_noproxy.sh' 7 | 8 | -------------------------------------------------------------------------------- /site/roles/trinity/no_proxy/templates/z_noproxy.sh.j2: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | export no_proxy="{{ no_proxy|select('!=', '')|join(',') }}" 4 | 5 | -------------------------------------------------------------------------------- /site/roles/trinity/nscd/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart nscd 3 | service: 4 | name: nscd.service 5 | state: restarted 6 | when: not in_image 7 | -------------------------------------------------------------------------------- /site/roles/trinity/nscd/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Configure nscd daemon 3 | 4 | - name: Install nscd 5 | yum: 6 | name: nscd 7 | state: present 8 | tags: install-only 9 | retries: "{{ rpm_retries | default(3) }}" 10 | delay: "{{ rpm_delay | default(15) }}" 11 | 12 | - name: Put config file in place 13 | template: 14 | src: "nscd.conf.j2" 15 | dest: "/etc/nscd.conf" 16 | notify: 17 | - restart nscd 18 | 19 | - name: Enable nscd service 20 | service: 21 | name: nscd 22 | enabled: "yes" 23 | -------------------------------------------------------------------------------- /site/roles/trinity/obol/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for obol 3 | 4 | obol_packages: 5 | - python3-pip 6 | - python3-ldap 7 | - openldap-devel 8 | 9 | users_home_path: '/home' 10 | obol_conf_path: '/etc' 11 | ldap_host: localhost 12 | -------------------------------------------------------------------------------- /site/roles/trinity/obol/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/obol/templates/obol.conf.j2: -------------------------------------------------------------------------------- 1 | [users] 2 | 3 | home = {{ users_home_path }} 4 | shell = /bin/bash 5 | 6 | [ldap] 7 | 8 | host = ldaps://{{ ldap_host }} 9 | bind_dn = cn=manager,dc=local 10 | bind_pass = {{ openldap_root_pwd }} 11 | base_dn = dc=local 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/defaults/alertx.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | alertx_noarch_nodejs_repository: "https://updates.clustervision.com/trinityx/{{ trix_version }}/alertx/noarch/nodejs/{{ trix_stream }}" 4 | alertx_gui_nodejs: alertx-gui-1.0.latest.tar.gz 5 | 6 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/codeserver/app/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/ood-apps/files/codeserver/app/icon.png -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/codeserver/app/submit.yml.erb: -------------------------------------------------------------------------------- 1 | --- 2 | batch_connect: 3 | template: "basic" 4 | conn_params: 5 | - codeserver_version 6 | script_wrapper: | 7 | <%- if !modules.blank? -%> 8 | # Purge the module environment and run the required modules 9 | module purge && module load <%= modules %> 10 | <%- end -%> 11 | # Run the script 12 | %s 13 | 14 | 15 | 16 | script: 17 | native: 18 | - "-N" 19 | - "<%= bc_num_slots %>" 20 | <% if !cores.blank? %> 21 | - "--mincpus" 22 | - "<%= cores %>" 23 | <% end %> 24 | <% if !memory.blank? %> 25 | - "--mem" 26 | - "<%= memory %>" 27 | <% end %> 28 | <% if !gpus.blank? %> 29 | - "--gpus" 30 | - "<%= gpus %>" 31 | <% end %> 32 | <% if !slurm_extra_args.blank? %> 33 | <% for arg in slurm_extra_args.split() %> 34 | - "<%= arg %>" 35 | <% end %> 36 | <% end %> 37 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/codeserver/app/template/after.sh.erb: -------------------------------------------------------------------------------- 1 | # Wait for the Code Server to start 2 | echo "$(date): Waiting for Code Server to open port ${port}..." 3 | 4 | if wait_until_port_used "${host}:${port}" 600; then 5 | echo "$(date): Discovered code-server listening on port ${port}!" 6 | else 7 | echo "$(date): Timed out waiting for Code Server to open port ${port}!" 8 | clean_up 1 9 | fi 10 | 11 | sleep 2 12 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/codeserver/app/template/before.sh.erb: -------------------------------------------------------------------------------- 1 | # Export the module function if it exists 2 | [[ $(type -t module) == "function" ]] && export -f module 3 | 4 | # Find available port to run server on 5 | export port=$(find_port ${host}) 6 | 7 | # Export compute node the script is running on 8 | export host="${host}" 9 | 10 | # Generate SHA1 encrypted password (requires OpenSSL installed) 11 | export password="$(create_passwd 16)" 12 | 13 | export codeserver_version="<%= context.codeserver_version %>" -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/jupyter/app/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/ood-apps/files/jupyter/app/icon.png -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/jupyter/app/submit.yml.erb: -------------------------------------------------------------------------------- 1 | --- 2 | batch_connect: 3 | template: "basic" 4 | script_wrapper: | 5 | <%- if !modules.blank? -%> 6 | # Purge the module environment and run the required modules 7 | module purge && module load <%= modules %> 8 | <%- end -%> 9 | # Run the script 10 | %s 11 | 12 | 13 | 14 | script: 15 | native: 16 | - "-N" 17 | - "<%= bc_num_slots %>" 18 | <% if !cores.blank? %> 19 | - "--mincpus" 20 | - "<%= cores %>" 21 | <% end %> 22 | <% if !memory.blank? %> 23 | - "--mem" 24 | - "<%= memory %>" 25 | <% end %> 26 | <% if !gpus.blank? %> 27 | - "--gpus" 28 | - "<%= gpus %>" 29 | <% end %> 30 | <% if !slurm_extra_args.blank? %> 31 | <% for arg in slurm_extra_args.split() %> 32 | - "<%= arg %>" 33 | <% end %> 34 | <% end %> 35 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/jupyter/app/template/after.sh: -------------------------------------------------------------------------------- 1 | # Wait for the Jupyter Notebook server to start 2 | echo "Waiting for Jupyter Notebook server to open port ${port}..." 3 | if wait_until_port_used "${host}:${port}" 60; then 4 | echo "Discovered Jupyter Notebook server listening on port ${port}!" 5 | else 6 | echo "Timed out waiting for Jupyter Notebook server to open port ${port}!" ; exit 1 7 | fi 8 | sleep 2 9 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/jupyter/app/template/script.sh.erb: -------------------------------------------------------------------------------- 1 | #!/bin/bash -l 2 | 3 | # Set working directory to home directory 4 | cd "${HOME}" 5 | 6 | # 7 | # Start Jupyter Notebook Server 8 | # 9 | 10 | <%- unless context.modules.blank? -%> 11 | # Purge the module environment to avoid conflicts 12 | module purge 13 | 14 | # Load the require modules 15 | module load <%= context.modules %> 16 | <%- end -%> 17 | 18 | # Launch the Jupyter Notebook Server 19 | jupyter notebook --config="${CONFIG_FILE}" <%= context.extra_jupyter_args %> 20 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/files/jupyter/app/view.html.erb: -------------------------------------------------------------------------------- 1 |
2 | 3 | 6 |
7 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/tasks/alertx.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - block: 4 | - name: Load alertx variables. 5 | include_vars: "../defaults/alertx.yml" 6 | 7 | - name: Downloading nodejs package 8 | get_url: 9 | url: "{{ alertx_noarch_nodejs_repository }}/{{ alertx_gui_nodejs }}" 10 | dest: "{{ trix_ood }}/3.0/alertx/{{ alertx_gui_nodejs }}" 11 | 12 | - name: Unpack nodejs files 13 | unarchive: 14 | src: "{{ trix_ood }}/3.0/alertx/{{ alertx_gui_nodejs }}" 15 | dest: "{{ trix_ood }}/3.0/alertx/app/assets/" 16 | keep_newer: false # override? 17 | 18 | - name: Cleanup nodejs package 19 | file: 20 | path: "{{ trix_ood }}/3.0/alertx/{{ alertx_gui_nodejs }}" 21 | state: absent 22 | when: not in_image 23 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/tasks/infiniband.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Run task for OOD infiniband app 3 | when: 4 | - not in_image 5 | block: 6 | 7 | - name: Set permissions for inifinband related apps 8 | ansible.builtin.file: 9 | path: /dev/infiniband/umad0 10 | mode: "0660" 11 | owner: root 12 | group: "{{ ood_apps_admin_group }}" 13 | when: "'/dev/infiniband/umad0' is exists" 14 | 15 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/tasks/login-post.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Post login image for OOD installation 3 | when: in_login_image|default(False) 4 | block: 5 | - name: Remove '{{ ood_apps_admin_group }}' group 6 | group: 7 | name: '{{ ood_apps_admin_group }}' 8 | gid: '150' 9 | system: 'yes' 10 | state: absent 11 | 12 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Include tasks for login image 4 | ansible.builtin.include_tasks: 5 | file: login-pre.yml 6 | 7 | - name: Download and link trinity apps 8 | ansible.builtin.include_tasks: 9 | file: trinity.yml 10 | 11 | - name: Run apps tasks 12 | ansible.builtin.include_tasks: 13 | file: app.yml 14 | loop: "{{ ood_apps }}" 15 | loop_control: 16 | loop_var: app 17 | 18 | - name: Include tasks for login image 19 | ansible.builtin.include_tasks: 20 | file: login-post.yml 21 | 22 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/tasks/passwd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Ensure Dashboard directory exists 4 | file: 5 | path: /etc/ood/config/apps/dashboard 6 | state: directory 7 | 8 | - name: Create env file for passwd 9 | copy: 10 | dest: /etc/ood/config/apps/dashboard/env 11 | content: "OOD_DASHBOARD_PASSWD_URL=/pun/sys/trinity_passwd" 12 | 13 | - name: Restart services 14 | service: 15 | name: "{{ passwd_item }}" 16 | state: restarted 17 | loop_control: 18 | loop_var: passwd_item 19 | with_items: 20 | - httpd 21 | - nginx 22 | when: not in_image 23 | 24 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/alertmanager/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Alertmanager 3 | description: |- 4 | Alertmanager Dashboard for cluster error alerts 5 | category: Monitoring 6 | icon: fa://bell 7 | url: {{'https' if ood_enable_ssl else 'http'}}://{{trix_external_fqdn}}:9093 8 | external_link: true 9 | 10 | tile: 11 | title: "Alertmanager" 12 | sub_caption: | 13 | Alertmanager Dashboard for cluster error alerts -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/codeserver/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Code Server 3 | category: Interactive Apps 4 | subcategory: Servers 5 | role: batch_connect 6 | description: | 7 | This app will launch a [Code Server] server using the available batch_connect backend. 8 | 9 | 10 | tile: 11 | title: "Code Server" 12 | sub_caption: | 13 | Code Server job 14 | 15 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/desktop/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Desktop 3 | icon: fa://desktop 4 | category: Interactive Apps 5 | subcategory: Desktops 6 | role: batch_connect 7 | description: | 8 | This app will launch an interactive desktop on one or more compute nodes. You 9 | will have full access to the resources these nodes provide. This is analogous 10 | to an interactive batch job. 11 | 12 | tile: 13 | title: "Desktop" 14 | sub_caption: | 15 | VNC Desktop job -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/desktop/module.lua.j2: -------------------------------------------------------------------------------- 1 | help([[ 2 | This module activates an environment for TurboVNC 3 | ]]) 4 | 5 | prepend_path("PATH", "/opt/TurboVNC/bin") 6 | setenv("WEBSOCKIFY_CMD", "/usr/bin/websockify") 7 | setenv("XDG_SESSION_TYPE", "x11") 8 | setenv("GDK_BACKEND", "x11") 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/desktop/submit.yml.erb.j2: -------------------------------------------------------------------------------- 1 | --- 2 | batch_connect: 3 | template: "vnc" 4 | script_wrapper: | 5 | <%- if !modules.blank? -%> 6 | # Purge the module environment and run the required modules 7 | module purge && module load <%= modules %> 8 | <%- end -%> 9 | # Run the script 10 | %s 11 | 12 | 13 | 14 | script: 15 | native: 16 | - "-N" 17 | - "<%= bc_num_slots %>" 18 | <% if !cores.blank? %> 19 | - "--mincpus" 20 | - "<%= cores %>" 21 | <% end %> 22 | <% if !memory.blank? %> 23 | - "--mem" 24 | - "<%= memory %>" 25 | <% end %> 26 | <% if !gpus.blank? %> 27 | - "--gpus" 28 | - "<%= gpus %>" 29 | <% end %> 30 | <% if !slurm_extra_args.blank? %> 31 | <% for arg in slurm_extra_args.split() %> 32 | - "<%= arg %>" 33 | <% end %> 34 | <% end %> 35 | 36 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/grafana/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Grafana 3 | description: |- 4 | Grafana Dashboard for cluster statistics 5 | category: Monitoring 6 | icon: fa://chart-line 7 | url: {{'https' if ood_enable_ssl else 'http'}}://{{trix_external_fqdn}}:3000 8 | external_link: true 9 | 10 | tile: 11 | title: "Grafana" 12 | sub_caption: | 13 | Grafana Dashboard for cluster statistics -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/jupyter/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Jupyter Notebook 3 | category: Interactive Apps 4 | subcategory: Servers 5 | role: batch_connect 6 | description: | 7 | This app will launch a [Jupyter Notebook] server using the available batch_connect backend. 8 | 9 | tile: 10 | title: "Jupyter Notebook" 11 | sub_caption: | 12 | Jupyter Notebook job 13 | 14 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/login/login-node-ood-prepare-script.j2: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | {% for app in ood_apps %} 4 | echo 'Changing permission for {{ app.name }}' 5 | chgrp {{ app.group | default(ood_apps_admin_group) }} {{ trix_local }}/ondemand/3.0/{{ app.name }} 6 | {% endfor %} 7 | chgrp {{ app.group | default(ood_apps_admin_group) }} {{ trix_local }}/ondemand/3.0/config/luna.ini 8 | 9 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/login/login-node-ood-prepare-service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = Login node OOD preparation 3 | After = network.target sssd.service 4 | Before = ondemand-dex.service httpd.service 5 | 6 | [Service] 7 | Type = oneshot 8 | ExecStart = {{ trix_sbin }}/login-node-ood-prepare.sh 9 | 10 | [Install] 11 | WantedBy = multi-user.target 12 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/luna/luna2.ini.j2: -------------------------------------------------------------------------------- 1 | 2 | [API] 3 | USERNAME = {{ luna_username }} 4 | PASSWORD = {{ luna_password }} 5 | EXPIRY = 1h 6 | SECRET_KEY = {{ luna_secret_key }} 7 | ENDPOINT = {{ trix_ctrl_hostname }}.{{ trix_domain }}:7050 8 | PROTOCOL = {{ luna_protocol }} 9 | VERIFY_CERTIFICATE = {{ luna_verify_certificate }} 10 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/prometheus/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Prometheus 3 | description: |- 4 | Prometheus Dashboard for cluster statistics collection 5 | category: Monitoring 6 | icon: fa://list 7 | url: {{'https' if ood_enable_ssl else 'http'}}://{{trix_external_fqdn}}:9090 8 | external_link: true 9 | 10 | tile: 11 | title: "Prometheus" 12 | sub_caption: | 13 | Prometheus Dashboard for cluster statistics collection -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/shell/app/manifest.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | name: Shell 3 | category: Utilities 4 | subcategory: Shells 5 | 6 | icon: fa://terminal 7 | description: |- 8 | Shell access to {{ ansible_fqdn }} 9 | role: shell 10 | 11 | tile: 12 | title: "Shell" 13 | sub_caption: | 14 | Shell access to {{ ansible_fqdn }} -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/shell/env.j2: -------------------------------------------------------------------------------- 1 | OOD_SSH_WRAPPER={{ app.props.lchroot_wrapper_path }} 2 | -------------------------------------------------------------------------------- /site/roles/trinity/ood-apps/templates/shell/sudoers.j2: -------------------------------------------------------------------------------- 1 | %{{ood_apps_admin_group}} ALL=(ALL) NOPASSWD: /usr/sbin/lchroot 2 | %{{ood_apps_admin_group}} ALL=(ALL) NOPASSWD: /bin/bash -c /usr/sbin/lchroot * 3 | %{{ood_apps_admin_group}} ALL=(ALL) NOPASSWD: /bin/rm {{ trix_images }}/*/tmp/lchroot.lock 4 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for ood-portal 3 | 4 | ood_rpms: 5 | - name: epel-release 6 | - name: https://yum.osc.edu/ondemand/4.0/ondemand-release-web-4.0-1.el{{ ansible_distribution_major_version }}.noarch.rpm 7 | no_gpgcheck: true 8 | - name: ["@nodejs:20", "@ruby:3.3", "ondemand", "luna2-python"] 9 | 10 | ood_dex_rpms: 11 | - name: ondemand-dex 12 | 13 | ood_pam_rpms: 14 | - name: mod_authnz_pam 15 | 16 | ood_port: 8080 17 | ood_user: apache 18 | ood_cfg_path: /etc/ood/config/ 19 | ood_brand: TrinityX 20 | ood_auth_provider: dex # 'pam' or 'dex' ( only openldap is configured for dex ) 21 | ood_cluster_name: TrinityX 22 | ood_login_host: localhost 23 | ood_ldap_host: localhost 24 | ood_enable_ssl: true 25 | 26 | ood_admin_group: admins 27 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/files/branding/TrinityX/custom.css: -------------------------------------------------------------------------------- 1 | .apps-section-header-blue { 2 | background-color: rgb(27, 69, 27) 3 | } 4 | .app-launcher .launcher-click { 5 | border: 4px solid #84a469; 6 | color: #404040; 7 | } 8 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/files/branding/TrinityX/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/ood/files/branding/TrinityX/favicon.ico -------------------------------------------------------------------------------- /site/roles/trinity/ood/files/branding/TrinityX/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/ood/files/branding/TrinityX/logo.png -------------------------------------------------------------------------------- /site/roles/trinity/ood/files/branding/TrinityX/logo_ext.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/ood/files/branding/TrinityX/logo_ext.png -------------------------------------------------------------------------------- /site/roles/trinity/ood/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Run update_ood 3 | ansible.builtin.command: /opt/ood/ood-portal-generator/sbin/update_ood_portal -f && /opt/ood/nginx_stage/sbin/nginx_stage nginx_clean -f 4 | register: command_result 5 | failed_when: "'Completed successfully!' not in command_result.stdout" 6 | changed_when: true 7 | notify: 8 | - Restart httpd 9 | - Restart htcacheclean 10 | - Restart ondemand-dex 11 | 12 | - name: Restart ondemand-dex 13 | ansible.builtin.systemd: 14 | name: ondemand-dex 15 | state: "{{ 'restarted' if ood_auth_provider == 'dex' else 'stopped' }}" 16 | 17 | - name: Restart httpd 18 | ansible.builtin.systemd: 19 | name: httpd 20 | state: restarted 21 | 22 | - name: Restart htcacheclean 23 | ansible.builtin.systemd: 24 | name: htcacheclean 25 | state: restarted 26 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/tasks/dex-auth.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install required rpms 3 | ansible.builtin.dnf: 4 | name: "{{ item.name }}" 5 | state: present 6 | disable_gpg_check: "{{ item.no_gpgcheck | default(False) }}" 7 | with_items: "{{ ood_dex_rpms }}" 8 | retries: "{{ rpm_retries | default(3) }}" 9 | delay: "{{ rpm_delay | default(15) }}" 10 | notify: Run update_ood 11 | 12 | - name: Enable ondemand-dex service on startup 13 | ansible.builtin.service: 14 | name: ondemand-dex 15 | enabled: "{{ ood_auth_provider == 'dex' }}" 16 | state: "{{ 'started' if ood_auth_provider == 'dex' else 'stopped' }}" 17 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/templates/branding/TrinityX/nginx_stage.yml.j2: -------------------------------------------------------------------------------- 1 | {% if ood_brand == 'TrinityX' %} 2 | pun_custom_env: 3 | OOD_DASHBOARD_TITLE: "TrinityX Cluster Portal" 4 | OOD_BRAND_BG_COLOR: "#e7e7e7" 5 | OOD_BRAND_LINK_ACTIVE_BG_COLOR: "#f2bb02" 6 | OOD_NAVBAR_TYPE: "default" 7 | OOD_DASHBOARD_LOGO: "/public/logo.png" 8 | OOD_DASHBOARD_HEADER_IMG_LOGO: "/public/logo.png" 9 | {% endif %} 10 | passenger_python: "{{ trix_local }}/python/bin/python3" 11 | proxy_user: "{{ ood_user }}" 12 | -------------------------------------------------------------------------------- /site/roles/trinity/ood/templates/branding/TrinityX/ondemand.yml.j2: -------------------------------------------------------------------------------- 1 | pinned_apps: 2 | - sys/shell 3 | - sys/alertmanager 4 | - sys/prometheus 5 | - sys/grafana 6 | - 'sys/bc_*' 7 | - 'sys/trinity_*' 8 | pinned_apps_group_by: category 9 | custom_css_files: ["/custom.css"] -------------------------------------------------------------------------------- /site/roles/trinity/ood/templates/login/login-node-ood-reconfigure-service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = Login node OOD preparation 3 | After = network.target sssd.service 4 | Before = ondemand-dex.service httpd.service 5 | 6 | [Service] 7 | Type = oneshot 8 | ExecStart = {{ trix_sbin }}/login-node-ood-reconfigure.sh 9 | 10 | [Install] 11 | WantedBy = multi-user.target 12 | -------------------------------------------------------------------------------- /site/roles/trinity/openhpc/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Export NFS 3 | shell: exportfs -ar 4 | changed_when: false 5 | -------------------------------------------------------------------------------- /site/roles/trinity/openhpc/templates/OHPC_exports.j2: -------------------------------------------------------------------------------- 1 | /opt/ohpc *(ro,no_root_squash) 2 | -------------------------------------------------------------------------------- /site/roles/trinity/openhpc/templates/z_trinityx.sh.j2: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | export MODULEPATH=$MODULEPATH:{{ trix_modulefiles }} 3 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for openldap 3 | 4 | openldap_packages: 5 | - openldap-servers 6 | - openldap-clients 7 | - rsync 8 | 9 | openldap_default_user: ldap 10 | openldap_default_group: ldap 11 | 12 | openldap_server_dir_path: /var/lib/ldap 13 | openldap_server_conf_path: /etc/openldap/slapd.d 14 | openldap_server_defaults_file: /etc/sysconfig/slapd 15 | 16 | openldap_endpoints: ldaps:/// ldapi:/// 17 | openldap_host: 'controller.cluster' 18 | 19 | ssl_cert_path: '/etc/ssl/certs' 20 | ssl_cert_group: '{{ openldap_default_user }}' 21 | 22 | openldap_tls_cacrt: '{{ ssl_ca_cert }}' 23 | openldap_tls_crt: '{{ ssl_cert_path }}/{{ trix_ctrl_hostname if ha else ansible_fqdn }}.crt' 24 | openldap_tls_key: '{{ ssl_cert_path }}/{{ trix_ctrl_hostname if ha else ansible_fqdn }}.key' 25 | 26 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/autoinc.ldif: -------------------------------------------------------------------------------- 1 | 2 | # import required schemas 3 | # uidNumber Attribute Auto-Incrementing 4 | # http://www.rexconsulting.net/ldap-protocol-uidNumber.html 5 | dn: cn=autoinc,cn=schema,cn=config 6 | changetype: add 7 | objectClass: olcSchemaConfig 8 | cn: autoinc 9 | olcObjectClasses: ( 1.3.6.1.4.1.23277.2.2.2.8 10 | NAME 'uidNext' 11 | DESC 'Where we get the next uidNumber from' 12 | STRUCTURAL MUST ( cn $ uidNumber ) ) 13 | 14 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/memberof.ldif: -------------------------------------------------------------------------------- 1 | dn: cn=module{0},cn=config 2 | changetype: modify 3 | add: olcModuleLoad 4 | olcModuleLoad: memberof 5 | 6 | dn: olcOverlay={0}memberof,olcDatabase={1}hdb,cn=config 7 | changetype: add 8 | objectClass: olcConfig 9 | objectClass: olcMemberOf 10 | objectClass: olcOverlayConfig 11 | objectClass: top 12 | olcOverlay: {0}memberof 13 | olcMemberOfDangling: ignore 14 | olcMemberOfRefInt: TRUE 15 | olcMemberOfGroupOC: groupOfMembers 16 | olcMemberOfMemberAD: member 17 | olcMemberOfMemberOfAD: memberOf 18 | 19 | dn: cn=module{0},cn=config 20 | changetype: modify 21 | add: olcModuleLoad 22 | olcModuleLoad: refint 23 | 24 | dn: olcOverlay={1}refint,olcDatabase={1}hdb,cn=config 25 | changetype: add 26 | objectClass: olcConfig 27 | objectClass: olcOverlayConfig 28 | objectClass: olcRefintConfig 29 | objectClass: top 30 | olcOverlay: {1}refint 31 | olcRefintAttribute: memberof member 32 | 33 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/memberof.ldif-25: -------------------------------------------------------------------------------- 1 | dn: cn=module{0},cn=config 2 | changetype: modify 3 | add: olcModuleLoad 4 | olcModuleLoad: memberof 5 | 6 | dn: olcOverlay={0}memberof,olcDatabase={1}mdb,cn=config 7 | changetype: add 8 | objectClass: olcConfig 9 | objectClass: olcMemberOf 10 | objectClass: olcOverlayConfig 11 | objectClass: top 12 | olcOverlay: {0}memberof 13 | olcMemberOfDangling: ignore 14 | olcMemberOfRefInt: TRUE 15 | olcMemberOfGroupOC: groupOfMembers 16 | olcMemberOfMemberAD: member 17 | olcMemberOfMemberOfAD: memberOf 18 | 19 | dn: cn=module{0},cn=config 20 | changetype: modify 21 | add: olcModuleLoad 22 | olcModuleLoad: refint 23 | 24 | dn: olcOverlay={1}refint,olcDatabase={1}mdb,cn=config 25 | changetype: add 26 | objectClass: olcConfig 27 | objectClass: olcOverlayConfig 28 | objectClass: olcRefintConfig 29 | objectClass: top 30 | olcOverlay: {1}refint 31 | olcRefintAttribute: memberof member 32 | 33 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/ppolicy.ldif: -------------------------------------------------------------------------------- 1 | dn: cn=module{0},cn=config 2 | changetype: modify 3 | add: olcModuleLoad 4 | olcModuleLoad: ppolicy 5 | 6 | dn: olcOverlay=ppolicy,olcDatabase={1}hdb,cn=config 7 | changetype: add 8 | objectClass: olcConfig 9 | objectClass: olcPPolicyConfig 10 | objectClass: olcOverlayConfig 11 | objectClass: top 12 | olcOverlay: ppolicy 13 | olcPPolicyDefault: cn=pwdDefaultPolicy,ou=Policies,dc=local 14 | olcPPolicyHashCleartext: FALSE 15 | olcPPolicyUseLockout: FALSE 16 | olcPPolicyForwardUpdates: FALSE 17 | 18 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/ppolicyload.ldif: -------------------------------------------------------------------------------- 1 | dn: cn=module{0},cn=config 2 | changetype: modify 3 | add: olcModuleLoad 4 | olcModuleLoad: ppolicy.la 5 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/files/ppolicyoverlay.ldif: -------------------------------------------------------------------------------- 1 | dn: olcOverlay=ppolicy,olcDatabase={1}mdb,cn=config 2 | objectClass: olcOverlayConfig 3 | objectClass: olcPPolicyConfig 4 | olcOverlay: ppolicy 5 | olcPPolicyDefault: cn=pwdDefaultPolicy,ou=Policies,dc=local 6 | olcPPolicyHashCleartext: TRUE 7 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for openldap 3 | 4 | - name: restart openldap 5 | service: 6 | name: slapd 7 | state: restarted 8 | retries: 10 9 | delay: 15 10 | when: primary|default(True) 11 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/openldap/templates/config.ldif.j2: -------------------------------------------------------------------------------- 1 | # Global parameters 2 | dn: cn=config 3 | changetype: modify 4 | replace: olcTLSCACertificateFile 5 | olcTLSCACertificateFile: {{ openldap_tls_cacrt }} 6 | - 7 | replace: olcTLSCertificateFile 8 | olcTLSCertificateFile: {{ openldap_tls_crt }} 9 | - 10 | replace: olcTLSCertificateKeyFile 11 | olcTLSCertificateKeyFile: {{ openldap_tls_key }} 12 | - 13 | replace: olcConnMaxPendingAuth 14 | olcConnMaxPendingAuth: 20000 15 | - 16 | replace: olcConnMaxPending 17 | olcConnMaxPending: 5000 18 | - 19 | replace: olcIdleTimeout 20 | olcIdleTimeout: 120 21 | - 22 | replace: olcWriteTimeout 23 | olcWriteTimeout: 120 24 | - 25 | replace: olcTimeLimit 26 | olcTimeLimit: 60 27 | 28 | # config backend: access configuration 29 | dn: olcDatabase={0}config,cn=config 30 | changetype: modify 31 | replace: olcRootDN 32 | olcRootDN: cn=config 33 | - 34 | replace: olcRootPW 35 | olcRootPW: {{ openldap_root_pwd_hash.stdout }} 36 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/defaults/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ha_packages: 4 | - luna2-python 5 | - pacemaker 6 | - corosync 7 | - libcurl-devel 8 | - fence-agents-ipmilan 9 | 10 | additional_packages: 11 | - openssl-devel 12 | 13 | pcs_noarch_pip_repository: "https://updates.clustervision.com/trinityx/{{ trix_version }}/additional/noarch/pip/{{ trix_stream }}/pcs" 14 | pcs_pip: pcs-2.0-py3-none-any.whl 15 | 16 | pacemaker_properties: 17 | no-quorum-policy: ignore 18 | pacemaker_resource_defaults: 19 | - 'migration-threshold=1' 20 | 21 | fence_ipmilan_enabled: true 22 | fence_ipmilan_host_check: 'static-list' 23 | fence_ipmilan_method: 'cycle' 24 | fence_ipmilan_lanplus: 'true' 25 | fence_ipmilan_login: 'user' 26 | fence_ipmilan_passwd: 'password' 27 | 28 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/files/pcmk: -------------------------------------------------------------------------------- 1 | service { 2 | name: pacemaker 3 | ver: 1 4 | } 5 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: restart corosync 4 | service: 5 | name: corosync 6 | state: restarted 7 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | dependencies: 4 | - OndrejHome.pcs-modules-2 5 | - hostname 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/templates/pcs.j2: -------------------------------------------------------------------------------- 1 | #!{{ trix_local }}/python/bin/python3.10 2 | # -*- coding: utf-8 -*- 3 | import re 4 | import sys 5 | from pcs.app import main 6 | if __name__ == '__main__': 7 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) 8 | sys.exit(main()) 9 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/CentOS8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/pacemaker/vars/CentOS8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/CentOS9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/pacemaker/vars/CentOS9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/RedHat8.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # requires RedHat H/A add-on repo: https://www.redhat.com/en/store/high-availability-add 4 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # requires RedHat H/A add-on repo: https://www.redhat.com/en/store/high-availability-add 4 | -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/pacemaker/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/pacemaker/vars/Rocky9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/pacemaker/vars/Rocky9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/pack-images/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Pack the image 4 | ansible.builtin.shell: 5 | cmd: "luna osimage pack {{ params.image_name }}" 6 | when: not in_image 7 | tags: pack-image 8 | -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/aa64/Centos8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/aa64/Centos8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/aa64/Centos9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/aa64/Centos9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/aa64/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/aa64/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/aa64/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/aa64/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/x64/Centos8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/x64/Centos8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/x64/Centos9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/x64/Centos9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/x64/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/x64/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/packages/vars/x64/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/packages/vars/x64/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/pbspro/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | pbspro_ctrl: '127.0.0.1' 4 | -------------------------------------------------------------------------------- /site/roles/trinity/pbspro/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart pbs 3 | systemd: 4 | name: pbs 5 | state: restarted 6 | -------------------------------------------------------------------------------- /site/roles/trinity/pbspro/templates/mom_priv_config.j2: -------------------------------------------------------------------------------- 1 | $clienthost {{ pbspro_ctrl }} 2 | $restrict_user_maxsysid 999 3 | -------------------------------------------------------------------------------- /site/roles/trinity/pbspro/templates/pbs.conf.j2: -------------------------------------------------------------------------------- 1 | PBS_EXEC=/opt/pbs 2 | PBS_SERVER={{ pbspro_ctrl }} 3 | PBS_START_SERVER=1 4 | PBS_START_SCHED=1 5 | PBS_START_COMM=1 6 | PBS_START_MOM=0 7 | PBS_HOME=/var/spool/pbs 8 | PBS_CORE_LIMIT=unlimited 9 | PBS_SCP=/bin/scp 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prepare/defaults/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | stage: prepare 4 | 5 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-alertmanager/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-alertmanager 2 | ### Description 3 | Ansible role to install and configure prometheus alertmanager from precompiled binary 4 | ### Credits 5 | - Prometheus alertmanager fom [prometheus/alertmanager](https://github.com/prometheus/alertmanager) 6 | - Original Ansible role from [prometheus-community/ansible](https://github.com/prometheus-community/ansible) 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-alertmanager/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-alertmanager 3 | listen: "restart prometheus-alertmanager" 4 | become: true 5 | ansible.builtin.systemd: 6 | daemon_reload: true 7 | name: prometheus-alertmanager 8 | state: restarted 9 | 10 | - name: Reload prometheus-alertmanager 11 | listen: "reload prometheus-alertmanager" 12 | become: true 13 | ansible.builtin.systemd: 14 | name: prometheus-alertmanager 15 | state: reloaded 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-alertmanager/tasks/selinux.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install selinux python packages [RedHat] 3 | ansible.builtin.package: 4 | name: "{{ ['libselinux-python', 'policycoreutils-python'] 5 | if ansible_python_version is version('3', '<') else 6 | ['python3-libselinux', 'python3-policycoreutils'] }}" 7 | state: present 8 | register: _install_selinux_packages 9 | until: _install_selinux_packages is success 10 | retries: 10 11 | delay: 15 12 | when: ansible_os_family | lower == "redhat" 13 | 14 | - name: Install selinux python packages [clearlinux] 15 | ansible.builtin.package: 16 | name: sysadmin-basic 17 | state: present 18 | register: _install_selinux_packages 19 | until: _install_selinux_packages is success 20 | retries: 10 21 | delay: 15 22 | when: 23 | - ansible_distribution | lower == "clearlinux" 24 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-alertmanager/templates/alertmanager.web.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | {{ ansible_managed | comment }} 3 | {% if prometheus_alertmanager_auth.enabled %} 4 | basic_auth_users: 5 | {{ prometheus_alertmanager_auth_user }}: {{ prometheus_alertmanager_auth_password_hash }} 6 | {% endif %} 7 | {% if prometheus_alertmanager_tls.enabled %} 8 | tls_server_config: 9 | cert_file: {{ prometheus_alertmanager_tls.cert_file }} 10 | key_file: {{ prometheus_alertmanager_tls.key_file }} 11 | {% endif %} 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-alertmanager/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | go_arch_map: 3 | i386: '386' 4 | x86_64: 'amd64' 5 | aarch64: 'arm64' 6 | armv7l: 'armv7' 7 | armv6l: 'armv6' 8 | 9 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 10 | _prometheus_alertmanager_binary_install_dir: '/usr/local/bin' 11 | 12 | # The expected location of the amtool configuration file 13 | _prometheus_alertmanager_amtool_config_dir: '/etc/amtool' 14 | _prometheus_alertmanager_repo: "prometheus/alertmanager" 15 | _github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}" 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ha-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-ha-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus ha exporter from precompiled binary 4 | ### Credits 5 | - Prometheus High Availability exporter fom [ClusterLabs/ha_cluster_exporter](https://github.com/ClusterLabs/ha_cluster_exporter/releases/tag/1.3.3) 6 | - Original Ansible role from [stackhpc/ansible-ha-exporter](https://github.com/stackhpc/ansible-ha-exporter) 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ha-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_ha_exporter_version: "1.3.3" 3 | prometheus_ha_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-ha-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-ha-exporter.{{prometheus_ha_exporter_version}}" 4 | 5 | prometheus_ha_exporter_web_listen_port: "14205" 6 | prometheus_ha_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_ha_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_ha_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_ha_exporter_system_user: "prometheus-ha-exporter" 13 | prometheus_ha_exporter_system_group: "prometheus" 14 | prometheus_ha_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ha-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-ha-exporter 3 | listen: "restart prometheus-ha-exporter" 4 | systemd: 5 | name: prometheus-ha-exporter 6 | state: restarted 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ha-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-ha-exporter-install 5 | - prometheus-ha-exporter-configure 6 | - prometheus-ha-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-ha-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-ha-exporter-configure 17 | 18 | - name: Ensure High Availability Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | daemon_reload: true 22 | name: prometheus-ha-exporter 23 | enabled: true 24 | state: started 25 | tags: 26 | - prometheus-ha-exporter-run 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-infiniband-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus infiniband exporter from precompiled binary 4 | ### Credits 5 | - Prometheus infiniband exporter fom [prometheus-community/infiniband_exporter](https://github.com/prometheus-community/infiniband_exporter) 6 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_infiniband_exporter_version: "0.10.0-rc.1" 3 | prometheus_infiniband_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-infiniband-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-infiniband-exporter.{{prometheus_infiniband_exporter_version}}" 4 | 5 | prometheus_infiniband_exporter_web_listen_port: "14206" 6 | prometheus_infiniband_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_infiniband_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_infiniband_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_infiniband_exporter_system_user: "prometheus-infiniband-exporter" 13 | prometheus_infiniband_exporter_system_group: "prometheus" 14 | prometheus_infiniband_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-infiniband-exporter 3 | listen: "restart prometheus-infiniband-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-infiniband-exporter 8 | state: restarted 9 | when: not in_image 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-infiniband-exporter-install 5 | - prometheus-infiniband-exporter-configure 6 | - prometheus-infiniband-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-infiniband-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-infiniband-exporter-configure 17 | 18 | - name: Ensure infiniband Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | # daemon_reload: true 22 | enabled: true 23 | name: prometheus-infiniband-exporter 24 | tags: 25 | - prometheus-infiniband-exporter-run 26 | # when: not in_image 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Centos8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Centos9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/RedHat8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Rocky8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Ubuntu20.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/Ubuntu22.yaml: -------------------------------------------------------------------------------- 1 | prometheus_infiniband_exporter_packages: [] 2 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-infiniband-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | go_arch_map: 2 | i386: '386' 3 | x86_64: 'amd64' 4 | aarch64: 'arm64' 5 | armv7l: 'armv7' 6 | armv6l: 'armv6' 7 | 8 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 9 | _prometheus_infiniband_exporter_repo: "prometheus-community/infiniband_exporter" 10 | # Generate a temporary directory for prometheus with the current epoch 11 | _prometheus_infiniband_exporter_tmp_dir: "/tmp/prometheus-infiniband-exporter" -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-ipmi-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus ipmi exporter from precompiled binary 4 | ### Credits 5 | - Prometheus ipmi exporter fom [prometheus-community/ipmi_exporter](https://github.com/prometheus-community/ipmi_exporter) 6 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_ipmi_exporter_version: 1.8.0 3 | prometheus_ipmi_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-ipmi-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-ipmi-exporter.{{prometheus_ipmi_exporter_version}}" 4 | 5 | prometheus_ipmi_exporter_web_listen_port: "14202" 6 | prometheus_ipmi_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_ipmi_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_ipmi_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_ipmi_exporter_system_user: "prometheus-ipmi-exporter" 13 | prometheus_ipmi_exporter_system_group: "prometheus" 14 | prometheus_ipmi_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-ipmi-exporter 3 | listen: "restart prometheus-ipmi-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-ipmi-exporter 8 | state: restarted 9 | when: not in_image 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-ipmi-exporter-install 5 | - prometheus-ipmi-exporter-configure 6 | - prometheus-ipmi-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-ipmi-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-ipmi-exporter-configure 17 | 18 | - name: Ensure ipmi Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | # daemon_reload: true 22 | enabled: true 23 | name: prometheus-ipmi-exporter 24 | tags: 25 | - prometheus-ipmi-exporter-run 26 | # when: not in_image 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Centos8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Centos9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/RedHat8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Rocky8.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Ubuntu20.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/Ubuntu22.yaml: -------------------------------------------------------------------------------- 1 | prometheus_ipmi_exporter_packages: 2 | - freeipmi -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-ipmi-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | go_arch_map: 2 | i386: '386' 3 | x86_64: 'amd64' 4 | aarch64: 'arm64' 5 | armv7l: 'armv7' 6 | armv6l: 'armv6' 7 | 8 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 9 | _prometheus_ipmi_exporter_repo: "prometheus-community/ipmi_exporter" 10 | # Generate a temporary directory for prometheus with the current epoch 11 | _prometheus_ipmi_exporter_tmp_dir: "/tmp/prometheus-ipmi-exporter" -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-lshw-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-lshw-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus lshw exporter from precompiled binary 4 | ### Credits 5 | - Prometheus lshw exporter fom [prometheus-community/lshw_exporter](https://github.com/prometheus-community/lshw_exporter) 6 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-lshw-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_lshw_exporter_version: 0.0.1 3 | prometheus_lshw_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-lshw-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-lshw-exporter.{{prometheus_lshw_exporter_version}}" 4 | 5 | prometheus_lshw_exporter_web_listen_port: "14208" 6 | prometheus_lshw_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_lshw_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_lshw_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_lshw_exporter_system_user: "root" 13 | prometheus_lshw_exporter_system_group: "root" 14 | prometheus_lshw_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-lshw-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-lshw-exporter 3 | listen: "restart prometheus-lshw-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-lshw-exporter 8 | state: restarted 9 | when: not in_image 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-lshw-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-lshw-exporter-install 5 | - prometheus-lshw-exporter-configure 6 | - prometheus-lshw-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-lshw-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-lshw-exporter-configure 17 | 18 | - name: Ensure lshw Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | # daemon_reload: true 22 | enabled: true 23 | name: prometheus-lshw-exporter 24 | tags: 25 | - prometheus-lshw-exporter-run 26 | # when: not in_image 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-lshw-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | go_arch_map: 2 | i386: '386' 3 | x86_64: 'amd64' 4 | aarch64: 'arm64' 5 | armv7l: 'armv7' 6 | armv6l: 'armv6' 7 | 8 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 9 | _prometheus_lshw_exporter_repo: "prometheus-community/lshw_exporter" 10 | # Generate a temporary directory for prometheus with the current epoch 11 | _prometheus_lshw_exporter_tmp_dir: "/tmp/prometheus-lshw-exporter" -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-node-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-node-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus node exporter from precompiled binary 4 | ### Credits 5 | - Prometheus node exporter fom [prometheus/node_exporter](https://github.com/prometheus/node_exporter) 6 | - Original Ansible role from [prometheus-community/ansible](https://github.com/prometheus-community/ansible) 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-node-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-node-exporter 3 | listen: "restart prometheus-node-exporter" 4 | become: true 5 | ansible.builtin.systemd: 6 | daemon_reload: true 7 | name: prometheus-node-exporter 8 | state: restarted 9 | when: 10 | - not in_image 11 | - not ansible_check_mode 12 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-node-exporter/templates/node_exporter.web.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | {{ ansible_managed | comment }} 3 | {% if prometheus_node_exporter_tls.enabled %} 4 | tls_server_config: 5 | cert_file: {{ prometheus_node_exporter_tls.cert_file }} 6 | key_file: {{ prometheus_node_exporter_tls.key_file }} 7 | {% endif %} 8 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-node-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | go_arch_map: 3 | i386: '386' 4 | x86_64: 'amd64' 5 | aarch64: 'arm64' 6 | armv7l: 'armv7' 7 | armv6l: 'armv6' 8 | 9 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 10 | _prometheus_node_exporter_repo: "prometheus/node_exporter" 11 | _github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}" 12 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-nvidia-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-nvidia-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus nvidia exporter from precompiled binary 4 | ### Credits 5 | - Prometheus nvidia exporter fom [utkuozdemir/nvidia_gpu_exporter](https://github.com/utkuozdemir/nvidia_gpu_exporter) 6 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-nvidia-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_nvidia_exporter_version: "1.2.0" 3 | prometheus_nvidia_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-nvidia-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-nvidia-exporter.{{prometheus_nvidia_exporter_version}}" 4 | 5 | prometheus_nvidia_exporter_web_listen_port: "14203" 6 | prometheus_nvidia_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_nvidia_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_nvidia_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_nvidia_exporter_system_user: "prometheus-nvidia-exporter" 13 | prometheus_nvidia_exporter_system_group: "prometheus" 14 | prometheus_nvidia_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-nvidia-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-nvidia-exporter 3 | listen: "restart prometheus-nvidia-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-nvidia-exporter 8 | state: restarted 9 | when: not in_image 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-nvidia-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-nvidia-exporter-install 5 | - prometheus-nvidia-exporter-configure 6 | - prometheus-nvidia-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-nvidia-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-nvidia-exporter-configure 17 | 18 | - name: Ensure nvidia Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | name: prometheus-nvidia-exporter 22 | enabled: true 23 | state: started 24 | tags: 25 | - prometheus-nvidia-exporter-run 26 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-nvidia-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | go_arch_map: 2 | i386: '386' 3 | aarch64: 'arm64' 4 | armv7l: 'armv7' 5 | armv6l: 'armv6' 6 | 7 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 8 | 9 | # Generate a temporary directory for prometheus with the current epoch 10 | _prometheus_nvidia_exporter_tmp_dir: "/tmp/prometheus-nvidia-exporter" -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-server 2 | ### Description 3 | Ansible role to install and configure prometheus server from precompiled binary 4 | ### Credits 5 | - Prometheus server fom [prometheus/prometheus](https://github.com/prometheus/prometheus) 6 | - Original Ansible role from [prometheus-community/ansible](https://github.com/prometheus-community/ansible) 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus 3 | listen: "restart prometheus-server" 4 | become: true 5 | ansible.builtin.systemd: 6 | daemon_reload: true 7 | name: prometheus-server 8 | state: restarted 9 | 10 | - name: Reload prometheus 11 | listen: "reload prometheus-server" 12 | become: true 13 | ansible.builtin.systemd: 14 | name: prometheus-server 15 | state: reloaded 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/tasks/legacy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Verify if legacy rules exist 4 | stat: 5 | path: "{{ prometheus_server_config_dir }}/rules/trix.rules" 6 | register: prometheus_legacy_rules 7 | 8 | - name: Disable legacy rules 9 | command: "mv {{ prometheus_server_config_dir }}/rules/trix.rules {{ prometheus_server_config_dir }}/rules/trix.rules.disabled" 10 | when: prometheus_legacy_rules.stat.exists 11 | 12 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/tasks/selinux.yml: -------------------------------------------------------------------------------- 1 | - name: Install SELinux dependencies 2 | ansible.builtin.package: 3 | name: "{{ _prometheus_server_selinux_packages }}" 4 | state: present 5 | register: _install_packages 6 | until: _install_packages is succeeded 7 | retries: 10 8 | delay: 15 9 | when: 10 | - ansible_version.full is version('2.4', '>=') 11 | - ansible_selinux.status == "enabled" 12 | 13 | - name: Allow prometheus to bind to port in SELinux 14 | community.general.seport: 15 | ports: "{{ prometheus_server_web_listen_port }}" 16 | proto: tcp 17 | setype: http_port_t 18 | state: present 19 | when: 20 | - ansible_version.full is version('2.4', '>=') 21 | - ansible_selinux.status == "enabled" 22 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/templates/prometheus.web.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | {{ ansible_managed | comment }} 3 | {% if prometheus_server_tls.enabled %} 4 | tls_server_config: 5 | cert_file: {{ prometheus_server_tls.cert_file }} 6 | key_file: {{ prometheus_server_tls.key_file }} 7 | {% endif %} 8 | 9 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-server/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | go_arch_map: 3 | i386: '386' 4 | x86_64: 'amd64' 5 | aarch64: 'arm64' 6 | armv7l: 'armv7' 7 | armv6l: 'armv6' 8 | 9 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 10 | _prometheus_server_binary_install_dir: '/usr/local/bin' 11 | 12 | _prometheus_server_selinux_packages: "{{ ['libselinux-python', 'policycoreutils-python'] 13 | if ansible_python_version is version('3', '<') else 14 | ['python3-libselinux', 'python3-policycoreutils'] }}" 15 | _prometheus_server_repo: "prometheus/prometheus" 16 | _github_api_headers: "{{ {'GITHUB_TOKEN': lookup('ansible.builtin.env', 'GITHUB_TOKEN')} if (lookup('ansible.builtin.env', 'GITHUB_TOKEN')) else {} }}" 17 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-slurm-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus slurm exporter from precompiled binary 4 | ### Credits 5 | - Prometheus Slurm exporter fom [vpenso/prometheus-slurm-exporter](https://github.com/vpenso/prometheus-slurm-exporter) 6 | - Original Ansible role from [stackhpc/ansible-slurm-exporter](https://github.com/stackhpc/ansible-slurm-exporter) 7 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_slurm_exporter_version: "0.20" 3 | prometheus_slurm_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-slurm-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-slurm-exporter.{{prometheus_slurm_exporter_version}}" 4 | 5 | prometheus_slurm_exporter_web_listen_port: "14204" 6 | prometheus_slurm_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_slurm_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_slurm_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_slurm_exporter_system_user: "prometheus-slurm-exporter" 13 | prometheus_slurm_exporter_system_group: "prometheus" 14 | prometheus_slurm_exporter_additional_system_groups: [] 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-slurm-exporter 3 | listen: "restart prometheus-slurm-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-slurm-exporter 8 | state: restarted 9 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-slurm-exporter-install 5 | - prometheus-slurm-exporter-configure 6 | - prometheus-slurm-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-slurm-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-slurm-exporter-configure 17 | 18 | - name: Ensure Slurm Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | daemon_reload: true 22 | name: prometheus-slurm-exporter 23 | enabled: true 24 | state: started 25 | tags: 26 | - prometheus-slurm-exporter-run 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-job-exporter/README.md: -------------------------------------------------------------------------------- 1 | # Ansible Role: prometheus-slurm-job-exporter 2 | ### Description 3 | Ansible role to install and configure prometheus slurm-job exporter from precompiled binary 4 | ### Credits 5 | - Prometheus slurm-job exporter fom [prometheus-community/slurm_job_exporter](https://github.com/prometheus-community/slurm_job_exporter) 6 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-job-exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | prometheus_slurm_job_exporter_version: 0.0.1 3 | prometheus_slurm_job_exporter_binary_url: "https://updates.clustervision.com/trinityx/external/monitoring/prometheus-slurm-job-exporter/{{system_arch}}/binaries/{{trix_stream}}/prometheus-slurm-job-exporter.{{prometheus_slurm_job_exporter_version}}" 4 | 5 | prometheus_slurm_job_exporter_web_listen_port: "14207" 6 | prometheus_slurm_job_exporter_web_listen_host: "0.0.0.0" 7 | 8 | prometheus_slurm_job_exporter_log_dir: "/var/log/prometheus" 9 | 10 | prometheus_slurm_job_exporter_binary_install_dir: "/usr/local/bin" 11 | 12 | prometheus_slurm_job_exporter_system_user: "root" 13 | prometheus_slurm_job_exporter_system_group: "root" 14 | prometheus_slurm_job_exporter_additional_system_groups: [] 15 | 16 | prometheus_slurm_job_exporter_extra_flags: "" 17 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-job-exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload and restart prometheus-slurm-job-exporter 3 | listen: "restart prometheus-slurm-job-exporter" 4 | become: true 5 | systemd: 6 | daemon_reload: true 7 | name: prometheus-slurm-job-exporter 8 | state: restarted 9 | when: not in_image 10 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-job-exporter/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: preflight.yml 3 | tags: 4 | - prometheus-slurm-job-exporter-install 5 | - prometheus-slurm-job-exporter-configure 6 | - prometheus-slurm-job-exporter-run 7 | 8 | - import_tasks: install.yml 9 | become: true 10 | tags: 11 | - prometheus-slurm-job-exporter-install 12 | 13 | - import_tasks: configure.yml 14 | become: true 15 | tags: 16 | - prometheus-slurm-job-exporter-configure 17 | 18 | - name: Ensure slurm-job Exporter is enabled on boot 19 | become: true 20 | systemd: 21 | # daemon_reload: true 22 | enabled: true 23 | name: prometheus-slurm-job-exporter 24 | tags: 25 | - prometheus-slurm-job-exporter-run 26 | # when: not in_image 27 | -------------------------------------------------------------------------------- /site/roles/trinity/prometheus-slurm-job-exporter/vars/main.yml: -------------------------------------------------------------------------------- 1 | go_arch_map: 2 | i386: '386' 3 | x86_64: 'amd64' 4 | aarch64: 'arm64' 5 | armv7l: 'armv7' 6 | armv6l: 'armv6' 7 | 8 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 9 | _prometheus_slurm_job_exporter_repo: "prometheus-community/slurm_job_exporter" 10 | # Generate a temporary directory for prometheus with the current epoch 11 | _prometheus_slurm_job_exporter_tmp_dir: "/tmp/prometheus-slurm-job-exporter" -------------------------------------------------------------------------------- /site/roles/trinity/pwquality/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: Configure password quality settings 2 | ansible.builtin.copy: 3 | dest: /etc/security/pwquality.conf 4 | content: | 5 | # /etc/security/pwquality.conf 6 | 7 | # Minimum acceptable size for the new password (8 characters) 8 | minlen = 8 9 | 10 | # Require at least 1 uppercase letter 11 | ucredit = -1 12 | 13 | # Require at least 1 lowercase letter 14 | lcredit = -1 15 | 16 | # Require at least 1 digit 17 | dcredit = -1 18 | 19 | # Require at least 1 special character 20 | ocredit = -1 21 | 22 | # Number of characters in the new password that must be different from the old password 23 | difok = 8 24 | owner: root 25 | group: root 26 | mode: '0644' 27 | backup: yes -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # defaults for the rdma-centos role 3 | 4 | rdma_packages: 5 | - rdma 6 | - rdma-ndd 7 | -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/templates/i40.conf.j2: -------------------------------------------------------------------------------- 1 | blacklist i40iw 2 | install i40iw /bin/true 3 | -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/Centos8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rdma-centos/vars/Centos8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/Centos9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rdma-centos/vars/Centos9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rdma-centos/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | # defaults for the rdma-centos role 3 | 4 | rdma_packages: 5 | - rdma 6 | -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rdma-centos/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rdma-centos/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | rdma_packages: 4 | - rdma-core 5 | - librdmacm 6 | - ucx-rdmacm 7 | 8 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/tasks/baseurl.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | #- debug: 4 | # msg: "repo file: {{ item }}" 5 | # with_items: "{{ inner_loop_var.stdout_lines }}" 6 | 7 | - name: Enabling baseurl 8 | replace: 9 | path: "{{ item }}" 10 | regexp: "^#baseurl" 11 | replace: "baseurl" 12 | with_items: "{{ inner_loop_var.stdout_lines }}" 13 | 14 | - name: Disabling mirrorlist 15 | replace: 16 | path: "{{ item }}" 17 | regexp: "^mirrorlist" 18 | replace: "#mirrorlist" 19 | with_items: "{{ inner_loop_var.stdout_lines }}" 20 | 21 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/vars/CentOS8.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | package_based_repositories: 4 | - name: centos-stream-release 5 | - name: centos-stream-repos 6 | - name: epel-release 7 | 8 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/vars/CentOS9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | package_based_repositories: 4 | - name: centos-stream-release 5 | - name: centos-stream-repos 6 | - name: epel-release 7 | 8 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | external_repository_rpms: 4 | - name: https://www.elrepo.org/elrepo-release-{{ hostvars[inventory_hostname].ansible_distribution_major_version }}.el{{ hostvars[inventory_hostname].ansible_distribution_major_version }}.elrepo.noarch.rpm 5 | no_gpgcheck: True 6 | - name: https://dl.fedoraproject.org/pub/epel/epel-release-latest-{{ hostvars[inventory_hostname].ansible_distribution_major_version }}.noarch.rpm 7 | no_gpgcheck: True 8 | 9 | 10 | package_based_repositories: 11 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/vars/Rocky8.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | package_based_repositories: 4 | - name: epel-release 5 | - name: rocky-repos 6 | 7 | use_baseurl_repositories: 8 | - name: rocky-repos 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/repos/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | package_based_repositories: 4 | - name: epel-release 5 | - name: rocky-repos 6 | 7 | use_baseurl_repositories: 8 | - name: rocky-repos 9 | 10 | -------------------------------------------------------------------------------- /site/roles/trinity/resolv/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for bind 3 | 4 | # luna handles the forwarders - Antoine 5 | # the below will only go into resolv.conf 6 | bind_dns_forwarders: 7 | - '8.8.8.8' 8 | - '8.8.4.4' 9 | 10 | resolv_server: '127.0.0.1' 11 | resolv_search_domains: '{{ trix_domain }} ipmi' 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/resolv/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: reload NetworkManager 4 | service: 5 | name: NetworkManager 6 | state: reloaded 7 | -------------------------------------------------------------------------------- /site/roles/trinity/resolv/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for resolv 3 | 4 | - name: Disable NetworkManager managing /etc/resolv.conf 5 | ini_file: 6 | path: /etc/NetworkManager/NetworkManager.conf 7 | state: present 8 | no_extra_spaces: yes 9 | section: main 10 | option: dns 11 | value: none 12 | owner: root 13 | group: root 14 | mode: 0644 15 | backup: yes 16 | notify: reload NetworkManager 17 | 18 | - name: Render /etc/resolv.conf 19 | template: 20 | src: "resolv.conf.j2" 21 | dest: "/etc/resolv.conf" 22 | backup: "yes" 23 | notify: reload NetworkManager 24 | -------------------------------------------------------------------------------- /site/roles/trinity/resolv/templates/resolv.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | {% if ansible_dns['nameservers'] is defined or bind_dns_forwarders %} 4 | search {{ resolv_search_domains }} 5 | nameserver {{ resolv_server }} 6 | {% if bind_dns_forwarders %} 7 | {% for f in bind_dns_forwarders %} 8 | nameserver {{ f }} 9 | {% endfor %} 10 | {% else %} 11 | {% for f in ansible_dns['nameservers'] %} 12 | {% if f not in ansible_all_ipv4_addresses%} 13 | nameserver {{ f }} 14 | {% endif %} 15 | {% endfor %} 16 | {% endif %} 17 | {% endif %} 18 | -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for rsyslog 3 | 4 | syslog_forwarding_rules: [] 5 | #syslog_forwarding_rules: 6 | # - name: default 7 | # proto: 'tcp' 8 | # port: 514 9 | # host: 'remote-host' 10 | # facility: '#*' 11 | # level: '*' 12 | 13 | syslog_listeners: [] 14 | #syslog_listeners: 15 | # - name: default 16 | # proto: tcp 17 | # port: 514 18 | 19 | syslog_file_template_rules: [] 20 | #syslog_file_template_rules: 21 | # - name: default 22 | # type: string 23 | # content: '/var/log/cluster-messages/%HOSTNAME%.messages' 24 | # field: '$fromhost-ip' 25 | # criteria: startswith 26 | # rule: '{{ "10.141.0.0".split(".")[:16//8]|join(".") }}' 27 | 28 | rsyslog_config_location: /etc/rsyslog.d/ 29 | rsyslog_service: rsyslog 30 | rsyslog_package: rsyslog 31 | -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for rsyslog 3 | 4 | - name: restart rsyslog 5 | service: 6 | name: "{{ rsyslog_service }}" 7 | state: restarted 8 | when: not in_image 9 | -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/templates/template-rule.conf.j2: -------------------------------------------------------------------------------- 1 | # ### Begin file template rules ### 2 | 3 | template(name="{{ item.name }}" type="{{ item.type }}" {{ item.type }}="{{ item.content }}") 4 | 5 | if {{ item.field }} {{ item.criteria }} '{{ item.rule }}' then { action(type="omfile" DynaFile="{{ item.name }}") stop } 6 | 7 | $CreateDirs on 8 | 9 | # ### End file template rules ### 10 | -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/vars/Debian.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rsyslog/vars/Debian.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/vars/RedHat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rsyslog/vars/RedHat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/vars/SLE.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rsyslog/vars/SLE.yaml -------------------------------------------------------------------------------- /site/roles/trinity/rsyslog/vars/Suse.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/rsyslog/vars/Suse.yaml -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/defaults/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | drbd_ctrl1_ip: '{{ trix_ctrl1_ip }}' 4 | drbd_ctrl1_device: /dev/drbd1 5 | drbd_ctrl1_disk: '{{ shared_fs_device }}' 6 | 7 | drbd_ctrl2_ip: '{{ trix_ctrl2_ip }}' 8 | drbd_ctrl2_device: '{{ drbd_ctrl1_device }}' 9 | drbd_ctrl2_disk: '{{ drbd_ctrl1_disk }}' 10 | 11 | drbd_ctrl3_ip: '{{ trix_ctrl3_ip|default("") }}' 12 | drbd_ctrl3_device: '{{ drbd_ctrl1_device }}' 13 | drbd_ctrl3_disk: '{{ drbd_ctrl1_disk }}' 14 | 15 | drbd_ctrl4_ip: '{{ trix_ctrl4_ip|default("") }}' 16 | drbd_ctrl4_device: '{{ drbd_ctrl1_device }}' 17 | drbd_ctrl4_disk: '{{ drbd_ctrl1_disk }}' 18 | 19 | drbd_shared_resource_name: trinity_disk 20 | drbd_shared_secret: f9f3792dec4afa413d35b9761658e56c 21 | drbd_shared_resource_stonith_enabled: true 22 | 23 | drbd_packages: 24 | - drbd90-utils 25 | - kmod-drbd90 26 | 27 | iscsi_packages: 28 | - libiscsi 29 | - libiscsi-utils 30 | - iscsi-initiator-utils 31 | 32 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/meta/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | dependencies: 3 | - OndrejHome.pcs-modules-2 4 | 5 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/templates/lvm_filter.dat.j2: -------------------------------------------------------------------------------- 1 | {% for disk in shared_lvm_disks %} 2 | {% if disk['fstype'] == 'lvm' %} 3 | {% if 'disk' in disk %} 4 | {{ disk['disk'] }} 5 | {% else %} 6 | {% if 'device' in disk %} 7 | !{{ disk['device'] }} 8 | {% endif %} 9 | {% endif %} 10 | {% endif %} 11 | {% endfor -%} 12 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/templates/lvm_volumes.dat.j2: -------------------------------------------------------------------------------- 1 | {% for disk in shared_fs_disks %} 2 | {% if disk['fstype'] == 'lvm' %} 3 | {{ disk['name'] }} 4 | {% endif %} 5 | {% endfor -%} 6 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/AlmaLinux8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/shared-fs/vars/AlmaLinux8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/AlmaLinux9.yaml: -------------------------------------------------------------------------------- 1 | drbd_packages: 2 | - drbd9x-utils 3 | - kmod-drbd9x 4 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/CentOS8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/shared-fs/vars/CentOS8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/CentOS9.yaml: -------------------------------------------------------------------------------- 1 | drbd_packages: 2 | - drbd9x-utils 3 | - kmod-drbd9x 4 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/shared-fs/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/RedHat9.yaml: -------------------------------------------------------------------------------- 1 | drbd_packages: 2 | - drbd9x-utils 3 | - kmod-drbd9x 4 | -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/shared-fs/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/shared-fs/vars/Rocky9.yaml: -------------------------------------------------------------------------------- 1 | drbd_packages: 2 | - drbd9x-utils 3 | - kmod-drbd9x 4 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sbank_path: '/usr' 3 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/.gitignore: -------------------------------------------------------------------------------- 1 | doc/.ikiwiki 2 | html 3 | *.1 4 | slurm-bank-*.tar.gz 5 | gitindex.tmp 6 | .kitchen/ 7 | .kitchen.local.yml 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/.kitchen.yml: -------------------------------------------------------------------------------- 1 | --- 2 | driver: 3 | name: vagrant 4 | 5 | provisioner: 6 | name: chef_solo 7 | 8 | platforms: 9 | - name: centos-6.8 10 | 11 | suites: 12 | - name: default 13 | run_list: 14 | attributes: 15 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/AUTHORS: -------------------------------------------------------------------------------- 1 | Paddy Doyle, Trinity Centre for High Performance Computing, Trinity College Dublin 2 | Jimmy Tang, Trinity Centre for High Performance Computing, Trinity College Dublin 3 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/README.markdown: -------------------------------------------------------------------------------- 1 | README -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/VERSION: -------------------------------------------------------------------------------- 1 | slurm-bank-1.4.2 2 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/chefignore: -------------------------------------------------------------------------------- 1 | .kitchen 2 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/bugs.mdwn: -------------------------------------------------------------------------------- 1 | This is slurm bank's bug list. Link bugs to [[bugs/done]] when done. 2 | 3 | [[!inline pages="./bugs/* and !./bugs/done and !link(done) 4 | and !*/Discussion" actions=yes postform=yes show=0 archive=yes]] 5 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/bugs/done.mdwn: -------------------------------------------------------------------------------- 1 | recently fixed [[bugs]] 2 | 3 | [[!inline pages="./* and link(./done) and !*/Discussion" sort=mtime show=10 4 | archive=yes]] 5 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/bugs/move_all_commands_into_libexec_or_similar.mdwn: -------------------------------------------------------------------------------- 1 | This is not really a bug, but a todo item 2 | 3 | We should probably move all the sbank-* commands into 4 | DESTDIR/PREFIX/libexec/sbank-* so we don't pollute the path too 5 | much. This will make some of the bash completion scripts nicer to 6 | use for the end user. 7 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/bugs/sbank-deposit_needs_to_check_if_time_is_zero.mdwn: -------------------------------------------------------------------------------- 1 | [[sbank-deposit]] currently does not check to see if the time that 2 | is deposited is zero or not. It probably should check the time and 3 | if it is zero it should do nothing. 4 | 5 | [[done]] 6 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/contact.mdwn: -------------------------------------------------------------------------------- 1 | Primary contacts for slurm-bank 2 | 3 | * Paddy Doyle 4 | * Jimmy Tang 5 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/copyright.mdwn: -------------------------------------------------------------------------------- 1 | 2 | * [Trinity Centre for High Performance Computing][] 3 | * [Trinity College Dublin][] 4 | 5 | Copyright (C) 2011 Trinity Centre for High Performance Computing, Trinity College Dublin. 6 | There is NO WARRANTY, to the extent permitted by law. 7 | 8 | [Trinity Centre for High Performance Computing]: http://www.tchpc.tcd.ie/ 9 | [Trinity College Dublin]: http://www.tcd.ie/ 10 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/download.mdwn: -------------------------------------------------------------------------------- 1 | To download the development source code please go to 2 | 3 | * -- this seems to be where all 4 | the changes end up getting pushed to first. 5 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/gource.mdwn: -------------------------------------------------------------------------------- 1 | If we ever want to visualise the repo 2 | 3 | gource --hide progress,filenames,dirnames,bloom -720x480 -a 0.5 -s 0.15 -i 0 \ 4 | --title slurm-bank -o - | ffmpeg -y -b 3000K -r 60 -f image2pipe -vcodec ppm \ 5 | -i - -vpre libx264-default -vcodec libx264 slurm-bank.x264.avi 6 | 7 | shflags is actually grafted on repo from some other project. 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/sbank-submit.mdwn: -------------------------------------------------------------------------------- 1 | # NAME 2 | 3 | sbank-submit - sbank wrapper submit command 4 | 5 | # SYNOPSIS 6 | 7 | sbank submit [OPTION]... 8 | 9 | # DESCRIPTION 10 | 11 | Simple wrapper for submitting jobs, it takes a standard slurm batch 12 | script. The script gets checked to see if enough hours are available 13 | prior to job submission. 14 | 15 | * -s, --scriptname 16 | 17 | SBATCH script that you wish to submit. 18 | 19 | # SEE ALSO 20 | 21 | * [[sbank-balance]](1) 22 | * [[sbank-cluster]](1) 23 | * [[sbank-deposit]](1) 24 | * [[sbank-project]](1) 25 | * [[sbank-time]](1) 26 | * [[sbank-user]](1) 27 | * sbatch(1) 28 | 29 | # AUTHOR 30 | 31 | Jimmy Tang 32 | 33 | # COPYRIGHT 34 | 35 | Copyright (C) 2011 Trinity Centre for High Performance Computing, Trinity College Dublin. 36 | There is NO WARRANTY, to the extent permitted by law. 37 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/sbank-version.mdwn: -------------------------------------------------------------------------------- 1 | # NAME 2 | 3 | sbank-version - sbank verion 4 | 5 | # SYNOPSIS 6 | 7 | sbank version 8 | 9 | # DESCRIPTION 10 | 11 | Returns version of slurm-bank 12 | 13 | # SEE ALSO 14 | 15 | * [[sbank]](1) 16 | 17 | # AUTHOR 18 | 19 | Jimmy Tang 20 | 21 | # COPYRIGHT 22 | 23 | Copyright (C) 2011 Trinity Centre for High Performance Computing, Trinity College Dublin. 24 | There is NO WARRANTY, to the extent permitted by law. 25 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/tchpc_icon128x128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/tchpc_icon128x128.png -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/templates/bare.tmpl: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/templates/walkthrough.tmpl: -------------------------------------------------------------------------------- 1 |

2 | 3 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/tests.mdwn: -------------------------------------------------------------------------------- 1 | There is a simple test suite to make sure things are working as we 2 | expect. To execute them, 3 | 4 | make test 5 | 6 | The tests behave differently if you run them as a normal user or as root. 7 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/use_case/admin.mdwn: -------------------------------------------------------------------------------- 1 | ### What the admin *David* needs to know 2 | 3 | The admin David will need to far more than the user. David will 4 | need to decide on some policies and allocations. Create projects 5 | with [[sbank-project]] and add people to projects with [[sbank-project]]. 6 | 7 | Once the projects have been created David will need to deposit hours 8 | to projects using [[sbank-deposit]]. 9 | 10 | David could also check the balances of all accounts and users with 11 | `sbank balance statement -A` 12 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/use_case/user.mdwn: -------------------------------------------------------------------------------- 1 | ### What the user *Charles* will want to know 2 | 3 | `sbank balance statement -u` to see Charles' own balance. Apart from 4 | this one command Charles does not need to know much more about the 5 | slurm bank commands. 6 | 7 | If Charles was a PI or Team leader in a project, he may want to see 8 | a more detailed balance sheet. He can do this by `sbank balance 9 | statement`. This will show the usage for all members of all of his 10 | projects. 11 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/walkthrough.mdwn: -------------------------------------------------------------------------------- 1 | A walkthrough of the basic features of sbank. 2 | 3 | [[!toc]] 4 | 5 | [[!inline feeds=no show=0 template=walkthrough pagenames=""" 6 | install 7 | setup 8 | creating_projects 9 | deciding_on_a_policy 10 | deposit_hours_to_an_account 11 | checking_account_balances 12 | estimating_time_for_a_job 13 | checking_if_enough_hours_are_available 14 | submitting_jobs_with_sbank-submit 15 | expiring_accounts 16 | refunding_hours 17 | """]] 18 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/walkthrough/deciding_on_a_policy.mdwn: -------------------------------------------------------------------------------- 1 | * Figure out how many CPU hours are available on the cluster. 2 | * Decide on how many projects to support and how many hours to allocate 3 | to each project. 4 | * Decide on how much to over-subscribe. 5 | * Create accounts for each project or group, perhaps setup a hierachy 6 | of projects. 7 | * Allocate hours to the projects/groups. 8 | * Review projects and usage. Decide if QOS or fairshare is needed or not. 9 | * Go to start. 10 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/walkthrough/deposit_hours_to_an_account.mdwn: -------------------------------------------------------------------------------- 1 | To add or remove hours to the deposit account we can use the 2 | [[sbank-deposit]] command. 3 | 4 | For example to add 1000 hours to 'myaccount' we can do this 5 | 6 | $ sbank deposit -c mycluster -a myaccount -t 1000 7 | 8 | To remove 500 hours from 'myaccount' 9 | 10 | $ sbank deposit -c mycluster -a myaccount -t -500 11 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/walkthrough/expiring_accounts.mdwn: -------------------------------------------------------------------------------- 1 | To expire projects the admin can either use [[sbank-deposit]] and remove 2 | all the accounts from an account or use [[sbank-project]] to expire 3 | an account. 4 | 5 | $ sbank project expire -c mycluster -a myaccount 6 | 7 | The above command simply zero's the account limit. 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/doc/walkthrough/refunding_hours.mdwn: -------------------------------------------------------------------------------- 1 | If a job has failed you may want to refund the hours that the job 2 | has used, to do this you need to know the job id. 3 | 4 | $ sbank refund job -j 5345 5 | 6 | The refund command will look up slurmdbd, look up the account and 7 | the elapsed time. The elapsed time will be deposited back to the 8 | account where it originally ran from. 9 | 10 | In general this should be left as a people issue. 11 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/expiredb/expireprojects.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | projects=$(recsel -C -R project -t project -e "enddate << '`date -I`'" projects.rec) 4 | 5 | for i in $projects 6 | do 7 | sbank project expire -a $i -c $(sbank cluster list) 8 | done 9 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/expiredb/projects.rec: -------------------------------------------------------------------------------- 1 | %rec: project 2 | %mandatory: project createdate 3 | %type: project line 4 | %type: startdate date 5 | %type: enddate date 6 | %type: createdate date 7 | %type: status enum active inactive 8 | %type: id int 9 | %key: id 10 | %auto: id createdate 11 | 12 | id: 0 13 | modifydate: Thu, 26 May 2011 14:46:14 +0100 14 | project: physics 15 | enddate: 2009-06-06 16 | status: active 17 | 18 | id: 1 19 | modifydate: Thu, 26 May 2011 14:49:01 +0100 20 | project: chemistry 21 | enddate: 2011-06-06 22 | status: inactive 23 | 24 | id: 2 25 | modifydate: Thu, 26 May 2011 14:49:42 +0100 26 | project: maths 27 | enddate: 2012-06-06 28 | status: active 29 | 30 | id: 3 31 | modifydate: Thu, 26 May 2011 14:49:42 +0100 32 | project: maths001 33 | enddate: 2011-05-06 34 | status: active 35 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/mdwn2man: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | # Warning: hack 3 | 4 | my $prog=shift; 5 | my $section=shift; 6 | 7 | print ".TH $prog $section\n"; 8 | 9 | while (<>) { 10 | s{(\\?)\[\[([^\s\|\]]+)(\|[^\s\]]+)?\]\]}{$1 ? "[[$2]]" : $2}eg; 11 | s/\`//g; 12 | s/^\s*\./\\&./g; 13 | if (/^#\s/) { 14 | s/^#\s/.SH /; 15 | <>; # blank; 16 | } 17 | s/^[ \n]+//; 18 | s/^\t/ /; 19 | s/-/\\-/g; 20 | s/^Warning:.*//g; 21 | s/^$/.PP\n/; 22 | s/^\*\s+(.*)/.IP "$1"/; 23 | next if $_ eq ".PP\n" && $skippara; 24 | if (/^.IP /) { 25 | $inlist=1; 26 | $spippara=0; 27 | } 28 | elsif (/.SH/) { 29 | $skippara=0; 30 | $inlist=0; 31 | } 32 | elsif (/^\./) { 33 | $skippara=1; 34 | } 35 | else { 36 | $skippara=0; 37 | } 38 | if ($inlist && $_ eq ".PP\n") { 39 | $_=".IP\n"; 40 | } 41 | 42 | print $_; 43 | } 44 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/shFlags/doc/TODO.txt: -------------------------------------------------------------------------------- 1 | improve zsh, automatically ... (pulled from configure) 2 | 3 | if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then 4 | emulate sh 5 | NULLCMD=: 6 | # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which 7 | # is contrary to our usage. Disable this feature. 8 | alias -g '${1+"$@"}'='"$@"' 9 | setopt NO_GLOB_SUBST 10 | else 11 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/shFlags/doc/contributors.txt: -------------------------------------------------------------------------------- 1 | I'd like to thank these people for their contributisons to shFlags. 2 | 3 | Maciej Bliziński -- _many_ code reviews 4 | Bjarni Einarsson -- bug reports 5 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/shFlags/examples/hello_world.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2008 Kate Ward. All Rights Reserved. 4 | # Released under the LGPL (GNU Lesser General Public License) 5 | # 6 | # Author: kate.ward@forestent.com (Kate Ward) 7 | # 8 | # This is the proverbial 'Hello, world!' script to demonstrate the most basic 9 | # functionality of shFlags. 10 | # 11 | # This script demonstrates accepts a single command-line flag of '-n' (or 12 | # '--name'). If a name is given, it is output, otherwise the default of 'world' 13 | # is output. 14 | 15 | # source shflags 16 | . ../src/shflags 17 | 18 | # define a 'name' command-line string flag 19 | DEFINE_string 'name' 'world' 'name to say hello to' 'n' 20 | 21 | # parse the command-line 22 | FLAGS "$@" || exit 1 23 | eval set -- "${FLAGS_ARGV}" 24 | 25 | echo "Hello, ${FLAGS_name}!" 26 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/src/sbank-common: -------------------------------------------------------------------------------- 1 | ## generic functions 2 | die() 3 | { 4 | [ $# -gt 0 ] && echo "error: $@" >&2 5 | flags_help 6 | exit 1 7 | } 8 | 9 | 10 | log() 11 | { 12 | echo "log: $@" >&2 13 | } 14 | 15 | warn() 16 | { 17 | echo "warn: $@" >&2 18 | } 19 | 20 | debug() 21 | { 22 | [ "${FLAGS_debug}" = "${FLAGS_TRUE}" ] || return 23 | echo "DEBUG: $@" >&2 24 | } 25 | 26 | sanity_check() 27 | { 28 | local ret 29 | # check for sacctmgr 30 | # note - when this is packaged e.g. as an rpm, the installation package 31 | # should take care of this as a Requirement 32 | debug "SACCTMGR_BIN = $SACCTMGR_BIN" 33 | which ${SACCTMGR_BIN} > /dev/null 2>&1 34 | ret=$? 35 | if [ ${ret} != "0" ] 36 | then 37 | die "unable to find sacctmgr. Is slurm installed? Exiting.." 38 | fi 39 | 40 | } 41 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/src/sbank-version: -------------------------------------------------------------------------------- 1 | SBANK_VERSION=1.4.2 2 | 3 | usage() 4 | { 5 | echo "usage: sbank version" 6 | } 7 | 8 | cmd_default() 9 | { 10 | echo "$SBANK_VERSION" 11 | } 12 | 13 | cmd_help() 14 | { 15 | usage 16 | exit 0 17 | } 18 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/src/shflags: -------------------------------------------------------------------------------- 1 | ../shFlags/src/shflags -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/t/Makefile: -------------------------------------------------------------------------------- 1 | all: 2 | @echo "Try: make test" 3 | @false 4 | 5 | runtests: 6 | ./test.sh 7 | 8 | test: 9 | ../wvtestrun $(MAKE) runtests 10 | 11 | clean:: 12 | rm -f *~ 13 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/t/sample-job1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #SBATCH -n 32 4 | #SBATCH -t 4-00:00:00 5 | 6 | echo "HELLO WORLD" 7 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/files/slurm-bank-master/t/sample-job2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #SBATCH -N 32 4 | #SBATCH -c 8 5 | #SBATCH -t 4-00:00:00 6 | 7 | echo "HELLO WORLD" 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm-sbank/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Create directory structure 4 | file: 5 | name: "{{ sbank_path }}/bin" 6 | state: directory 7 | 8 | - name: Copy files 9 | copy: 10 | src: 'slurm-bank-master/src/' 11 | dest: "{{ sbank_path }}/bin" 12 | 13 | - name: Make executable 14 | file: 15 | name: "{{ item }}" 16 | mode: 0755 17 | with_items: 18 | - "{{ sbank_path }}/bin/sbank" 19 | - "{{ sbank_path }}/bin/_sbank-balance.pl" 20 | - "{{ sbank_path }}/bin/_sbank-common-cpu_hrs.pl" 21 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/balance.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin 3 | export PATH=${PATH} 4 | if [ ${SLURM_JOB_ID} ]; then 5 | stdout=`/usr/bin/scontrol show job ${SLURM_JOB_ID} | grep -i stdout | cut -f2 -d '='` 6 | else 7 | echo "No SLURM JOB ID detected" | tee -a ${stdout} 8 | fi 9 | if [[ "x${stdout}" == "x" ]]; then 10 | stdout="/tmp/output" 11 | fi 12 | if [ ${SLURM_JOB_USER} ]; then 13 | echo "Your remaining balance (at the time of job end)" | tee -a ${stdout} 14 | /bin/sbank balance statement -u ${SLURM_JOB_USER} | tee -a ${stdout} 15 | else 16 | echo "No SLURM user detected" | tee -a ${stdout} 17 | fi 18 | exit 0 19 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/cgroup.conf: -------------------------------------------------------------------------------- 1 | #CgroupAutomount=yes 2 | #CgroupMountpoint=/sys/fs/cgroup 3 | #CgroupReleaseAgentDir="/etc/slurm/cgroup" 4 | #ConstrainCores=yes 5 | #ConstrainDevices=yes 6 | #TaskAffinity=no 7 | #ConstrainRAMSpace=yes 8 | #ConstrainSwapSpace=yes 9 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/epilog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | rm -rf /tmp/${SLURM_JOB_USER}.${SLURM_JOB_ID} 3 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/pam-slurm: -------------------------------------------------------------------------------- 1 | auth required pam_localuser.so 2 | account required pam_unix.so 3 | session required pam_limits.so 4 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/prolog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | SLURM_TMPDIR=/tmp/${SLURM_JOB_USER}.${SLURM_JOB_ID} 3 | mkdir -p "${SLURM_TMPDIR}" 4 | chmod -R 777 ${SLURM_TMPDIR} 5 | 6 | echo "export SLURM_TMPDIR=${SLURM_TMPDIR}" 7 | echo "export TMPDIR=${SLURM_TMPDIR}" 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/slurm-health.conf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/slurm/files/slurm-health.conf -------------------------------------------------------------------------------- /site/roles/trinity/slurm/files/topology.conf: -------------------------------------------------------------------------------- 1 | # Topology file to enable optimal job placement 2 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for slurm 3 | 4 | - name: restart slurm 5 | service: 6 | name: '{{ item }}' 7 | state: restarted 8 | with_items: 9 | - slurmdbd 10 | - slurmctld 11 | retries: 10 12 | delay: 15 13 | when: primary|default(True) 14 | 15 | - name: restart munge 16 | service: 17 | name: munge 18 | state: restarted 19 | retries: 10 20 | delay: 15 21 | when: primary|default(True) 22 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/slurm-nodes.conf.j2: -------------------------------------------------------------------------------- 1 | NodeName=DEFAULT Boards=1 SocketsPerBoard=1 CoresPerSocket=1 ThreadsPerCore=1 RealMemory=100 State=UNKNOWN 2 | 3 | # Below line is an example/placeholder and can be altered 4 | # NodeName=dummynode 5 | 6 | 7 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/slurm-partitions.conf.j2: -------------------------------------------------------------------------------- 1 | PartitionName=defq Nodes=ALL Shared=YES MaxTime=INFINITE State=UP Default=YES AllowAccounts=ALL 2 | 3 | 4 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/slurm-user.conf.j2: -------------------------------------------------------------------------------- 1 | PriorityType=priority/multifactor 2 | PriorityWeightQOS=10000 # All weights are mutiplied by a value between 0 and 1 and then converted back to a 32 bit unsigned int so make them all big enough not to lose precision 3 | JobAcctGatherFrequency=15 4 | JobAcctGatherType=jobacct_gather/cgroup 5 | PreemptType=preempt/none 6 | 7 | # Timers 8 | SlurmctldTimeout=10 9 | SlurmdTimeout=10 10 | 11 | # Consumable Resource 12 | Proctracktype=proctrack/cgroup 13 | #FastSchedule=0 14 | SelectType=select/cons_tres 15 | SelectTypeParameters=CR_Core 16 | DefMemPerCPU=1024 17 | 18 | 19 | PropagateResourceLimitsExcept=MEMLOCK 20 | 21 | TaskProlog={{ trix_shared }}/etc/slurm/prolog.sh 22 | Prolog={{ trix_shared }}/etc/slurm/prolog.sh 23 | Epilog={{ trix_shared }}/etc/slurm/epilog.sh 24 | EpilogSlurmCtld={{ trix_shared }}/etc/slurm/balance.sh 25 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/slurmdbd.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | # See the slurmdbd.conf man page for more information. 4 | # 5 | 6 | # Authentication info 7 | AuthType=auth/munge 8 | #AuthInfo=/var/run/munge/munge.socket.2 9 | 10 | # slurmDBD info 11 | DbdHost=localhost 12 | SlurmUser=slurm 13 | #MessageTimeout=300 14 | DebugLevel=4 15 | LogFile=/var/log/slurm/slurmdbd.log 16 | PidFile=/var/run/slurmdbd/slurmdbd.pid 17 | #PrivateData=accounts,users,usage,jobs 18 | 19 | # Database info 20 | StorageType=accounting_storage/mysql 21 | StorageHost=localhost 22 | StoragePort=3306 23 | StoragePass={{ slurmdbd_sql_pwd }} 24 | StorageUser={{ slurmdbd_sql_user }} 25 | StorageLoc={{ slurmdbd_sql_db }} 26 | 27 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/systemd/munge.service.d/trinity.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [Unit] 4 | After=remote-fs.target 5 | Requires=remote-fs.target 6 | 7 | [Service] 8 | ExecStart= 9 | ExecStart=/usr/sbin/munged --key-file {{ munge_conf_path }}/munge.key 10 | 11 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/systemd/slurmctld.service.d/trinity.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [Unit] 4 | Requires=munge.service slurmdbd.service 5 | 6 | [Service] 7 | Restart=always 8 | -------------------------------------------------------------------------------- /site/roles/trinity/slurm/templates/systemd/slurmdbd.service.d/trinity.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [Unit] 4 | Requires=munge.service 5 | 6 | [Service] 7 | Restart=always 8 | -------------------------------------------------------------------------------- /site/roles/trinity/ssh/files/ssh.sh: -------------------------------------------------------------------------------- 1 | if [ "$(id -u 2>/dev/null)" != "0" ]; then 2 | if [ ! -f ~/.ssh/id_rsa ]; then 3 | if [ -w ~ ]; then 4 | echo Creating RSA key for ssh 5 | ssh-keygen -t rsa -b 4096 -f ~/.ssh/id_rsa -q -N "" 6 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys 7 | chmod 600 ~/.ssh/authorized_keys ~/.ssh/id_rsa ~/.ssh/id_rsa.pub 8 | fi 9 | fi 10 | fi 11 | -------------------------------------------------------------------------------- /site/roles/trinity/ssh/files/ssh_cluster_config: -------------------------------------------------------------------------------- 1 | Host * 2 | IdentityFile ~/.ssh/id_cluster_rsa 3 | -------------------------------------------------------------------------------- /site/roles/trinity/ssh/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for sshd 3 | 4 | - name: restart sshd 5 | service: 6 | name: sshd 7 | state: restarted 8 | -------------------------------------------------------------------------------- /site/roles/trinity/ssl-cert/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | ssl_cert_path: '/etc/ssl/certs' 4 | ssl_cert_country: 'NL' 5 | ssl_cert_locality: 'Amsterdam' 6 | ssl_cert_organization: 'ClusterVision Solutions B.V.' 7 | ssl_cert_state: 'Noord Holland' 8 | ssl_cert_altname: 'controller.cluster' 9 | 10 | ssl_cert_days: '3650' 11 | 12 | ssl_cert_owner: 'root' 13 | ssl_cert_owner_id: 0 14 | 15 | ssl_cert_group: 'ssl' 16 | ssl_cert_group_id: 881 17 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for sssd 3 | 4 | sss_packages: 5 | - authconfig 6 | - sssd 7 | 8 | sss_allowed_groups: 9 | - admins 10 | 11 | sss_ldap_hosts: 12 | - 'controller.cluster' 13 | 14 | sss_filter_enabled: true 15 | 16 | ca_certificate_path: /etc/openldap/certs/ 17 | ldap_conf: /etc/openldap/ldap.conf 18 | 19 | sssd_config_location: /etc/sssd 20 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for sssd 3 | 4 | - name: restart sssd 5 | service: 6 | name: sssd 7 | state: restarted 8 | when: not in_image 9 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/meta/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/templates/nsswitch-manually.conf.j2: -------------------------------------------------------------------------------- 1 | # /etc/nsswitch.conf 2 | # 3 | # For Debian/Ubuntu users: 4 | # Example configuration of GNU Name Service Switch functionality. 5 | # If you have the `glibc-doc-reference' and `info' packages installed, try: 6 | # `info libc "Name Service Switch"' for information about this file. 7 | # 8 | # taken from a rocky installation 9 | 10 | passwd: files sss systemd 11 | group: files sss systemd 12 | shadow: files sss 13 | gshadow files 14 | 15 | netgroup: sss files 16 | automount: sss files 17 | services: sss files 18 | sudoers: files 19 | 20 | hosts: files dns myhostname 21 | aliases: files 22 | ethers: files 23 | networks: files dns 24 | protocols: files 25 | publickey: files 26 | rpc: files 27 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/vars/Debian.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | sss_packages: 4 | - sssd 5 | - libnss-sss 6 | - libpam-sss 7 | - libnss-ldap 8 | 9 | 10 | ca_certificate_path: /etc/ldap/certs/ 11 | ldap_conf: /etc/ldap/ldap.conf 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/vars/RedHat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sssd/vars/RedHat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sssd/vars/SLE.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | sss_packages: 4 | - sssd 5 | - sssd-kcm 6 | - sssd-ldap 7 | - sssd-tools 8 | - libnfsidmap-sss 9 | - libsss_certmap0 10 | - libsss_idmap0 11 | - libsss_nss_idmap0 12 | - libnss_usrfiles2 13 | - pam_ssh 14 | 15 | sssd_config_location: /etc/sssd 16 | -------------------------------------------------------------------------------- /site/roles/trinity/sssd/vars/Suse.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | sss_packages: 4 | - sssd 5 | - sssd-kcm 6 | - sssd-ldap 7 | - sssd-tools 8 | - libnfsidmap-sss 9 | - libsss_certmap0 10 | - libsss_idmap0 11 | - libsss_nss_idmap0 12 | - libnss_usrfiles2 13 | - pam_ssh 14 | 15 | sssd_config_location: /usr/etc/sssd 16 | -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/defaults/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | sync_secrets_packages: 4 | - nfs-utils 5 | - libnfs 6 | 7 | sync_remote: '{{ trix_ctrl1_ip }}' 8 | -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/CentOS8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/CentOS8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/CentOS9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/CentOS9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/RedHat9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/RedHat9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/sync-secrets/vars/Rocky9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/sync-secrets/vars/Rocky9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # vars file for syslog-ng 3 | 4 | syslog_forwarding_rules: [] 5 | #syslog_forwarding_rules: 6 | # - name: default 7 | # proto: 'tcp' 8 | # port: 514 9 | # host: 'remote-host' 10 | # facility: '#*' 11 | # level: '*' 12 | 13 | syslog_listeners: [] 14 | #syslog_listeners: 15 | # - name: default 16 | # proto: tcp 17 | # port: 514 18 | 19 | syslog_file_template_rules: [] 20 | #syslog_file_template_rules: 21 | # - name: default 22 | # type: string 23 | # content: '/var/log/cluster-messages/%HOSTNAME%.messages' 24 | # field: '$fromhost-ip' 25 | # criteria: startswith 26 | # rule: '{{ "10.141.0.0".split(".")[:16//8]|join(".") }}' 27 | 28 | syslogng_config_location: /etc/syslog-ng/conf.d 29 | syslogng_service: syslog-ng 30 | syslogng_package: syslog-ng 31 | 32 | -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for syslog-ng 3 | 4 | - name: restart syslogng 5 | service: 6 | name: "{{ syslogng_service }}" 7 | state: restarted 8 | when: not in_image 9 | -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/templates/forwarding.conf.j2: -------------------------------------------------------------------------------- 1 | 2 | {% if item.host is string %} 3 | 4 | destination remote { network("{{ item.host }}" transport("{{ 'tcp' if item.proto|lower == 'tcp' else 'udp' }}") port({{ item.port }})); }; 5 | 6 | {% else %} 7 | {% for host in item.host %} 8 | 9 | destination remote { network("{{ host }}" transport("{{ 'tcp' if item.proto|lower == 'tcp' else 'udp' }}") port({{ item.port }})); }; 10 | 11 | {% endfor %} 12 | {% endif %} 13 | 14 | 15 | -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/vars/Debian.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/syslog-ng/vars/Debian.yaml -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/vars/RedHat.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/syslog-ng/vars/RedHat.yaml -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/vars/SLE.yaml: -------------------------------------------------------------------------------- 1 | Suse.yaml -------------------------------------------------------------------------------- /site/roles/trinity/syslog-ng/vars/Suse.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/syslog-ng/vars/Suse.yaml -------------------------------------------------------------------------------- /site/roles/trinity/target/defaults/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | default_target: 'multi-user' 4 | 5 | -------------------------------------------------------------------------------- /site/roles/trinity/target/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Setting preferred default systemd target 4 | file: 5 | src: "/usr/lib/systemd/system/{{ default_target }}.target" 6 | dest: "/usr/lib/systemd/system/default.target" 7 | state: link 8 | 9 | -------------------------------------------------------------------------------- /site/roles/trinity/transmission/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # defaults file for transmission 3 | transmission_start_cmd: "/usr/bin/transmission-daemon -f --log-error --no-dht --download-dir {{ trix_luna }}/daemon/files/ --no-blocklist --no-global-seedratio" 4 | -------------------------------------------------------------------------------- /site/roles/trinity/transmission/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for transmission 3 | - name: "reload transmission" 4 | service: 5 | daemon_reload: yes 6 | name: transmission-daemon.service 7 | state: reloaded 8 | -------------------------------------------------------------------------------- /site/roles/trinity/transmission/templates/override.conf.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Transmission BitTorrent Daemon 3 | After=network.target 4 | 5 | [Service] 6 | User=transmission 7 | Type=notify 8 | ExecStart= 9 | ExecStart={{transmission_start_cmd}} 10 | ExecReload=/bin/kill -s HUP $MAINPID 11 | NoNewPrivileges=true 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | 16 | -------------------------------------------------------------------------------- /site/roles/trinity/trix-tree/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # never change trix_tree_local 4 | trix_tree_local: '/trinity/local' 5 | 6 | trix_tree_dirs: 7 | - name: trix_local 8 | path: '{{ trix_tree_local }}' 9 | - name: trix_sync 10 | path: '{{ trix_tree_local }}/sync' 11 | - name: trix_luna 12 | path: '{{ trix_tree_local }}/luna' 13 | - name: trix_etc 14 | path: '{{ trix_tree_local }}/etc' 15 | - name: trix_sbin 16 | path: '{{ trix_tree_local }}/sbin' 17 | - name: trix_ood 18 | path: '{{ trix_tree_local }}/ondemand' 19 | - name: trix_ssl 20 | path: '{{ trix_tree_local }}/etc/ssl' 21 | 22 | -------------------------------------------------------------------------------- /site/roles/trinity/tunables/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # Recommended tuned profiles are hpc-compute, network-latency, throughput-performance 4 | # To view the available profiles for your system, use 'tuned-adm list' 5 | 6 | compute_tuned_profile: 'hpc-compute' 7 | controller_tuned_profile: 'throughput-performance' 8 | 9 | tunables_packages: 10 | - tuned 11 | 12 | -------------------------------------------------------------------------------- /site/roles/trinity/tunables/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handler file for tunables 3 | 4 | - name: activate sysctls 5 | command: sysctl --system 6 | -------------------------------------------------------------------------------- /site/roles/trinity/tunables/templates/91-hpc-limits.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | # ClusterVision: added to allow memlock permissions for User space Infiniband verbs 4 | * hard memlock unlimited 5 | * soft memlock unlimited 6 | # ClusterVision: added for OpenIB gen2 stack. 7 | * - memlock unlimited 8 | # ClusterVision: added for making sure stack size is not limited. 9 | * - stack unlimited 10 | # ClusterVision: increase max open files limit 11 | * - nofile 65536 12 | 13 | -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/Centos8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/Centos8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/Centos9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/Centos9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/RedHat8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/RedHat8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/RedHat9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/RedHat9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/Rocky8.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/Rocky8.yaml -------------------------------------------------------------------------------- /site/roles/trinity/tunables/vars/Rocky9.yaml: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clustervision/trinityX/4ca89f421a02b995ed9a19a3ffea58a65fddac4e/site/roles/trinity/tunables/vars/Rocky9.yaml -------------------------------------------------------------------------------- /site/roles/trinity/wrapup-images/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - block: 3 | - name: Cleanup the image 4 | command: yum clean all 5 | when: ansible_facts['os_family'] == "RedHat" 6 | 7 | - name: Cleanup the image 8 | command: apt-get clean 9 | when: ansible_facts['os_family'] == "Debian" 10 | 11 | - name: Cleanup /tmp 12 | command: rm -f /tmp/* 13 | 14 | # cannot be done from within image - Antoine 15 | # - block: 16 | # - name: Pack the image 17 | # run_once: true 18 | # ansible.builtin.shell: 19 | # cmd: "luna osimage pack {{ image_name }}" 20 | 21 | when: in_image 22 | tags: always 23 | -------------------------------------------------------------------------------- /site/roles/trinity/yml-check/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | yml_minimum_version: 150100 4 | 5 | -------------------------------------------------------------------------------- /site/roles/vpn/strongswan/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | vpn_remote_public_ip: 4 | vpn_local_external_ip: 5 | 6 | vpn_local_private_subnet: 7 | vpn_remote_private_subnet: 8 | 9 | vpn_psk_secret: D8KgB2SQHZ+zsE/fxlmX+I9m2P4zkCRCib9Jw9vEO14= 10 | vpn_esp_proposal: aes256-sha1-modp1024 11 | -------------------------------------------------------------------------------- /site/roles/vpn/strongswan/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Create VPN Connection on Redhat 4 | include_tasks: "redhat.yml" 5 | when: ansible_facts['os_family'] == "RedHat" 6 | 7 | - name: Create VPN Connection on Debian/Ubuntu 8 | include_tasks: "ubuntu.yml" 9 | when: ansible_distribution == 'Debian' or ansible_distribution == 'Ubuntu' 10 | 11 | -------------------------------------------------------------------------------- /yamllint.yml: -------------------------------------------------------------------------------- 1 | --- 2 | extends: default 3 | 4 | rules: 5 | braces: 6 | max-spaces-inside: 1 7 | level: error 8 | brackets: 9 | max-spaces-inside: 1 10 | level: error 11 | line-length: disable 12 | --------------------------------------------------------------------------------