├── .ansible-lint ├── .circleci └── config.yml ├── .gitignore ├── .gitmodules ├── .yamllint ├── LICENSE ├── README.md ├── ansible ├── .gitignore ├── README.md ├── ansible.cfg ├── docs │ └── video.md ├── files │ └── bind │ │ ├── db.conference.fosdem.net │ │ ├── db.n.fosdem.net │ │ └── db.v.conference.fosdem.net ├── group_vars │ ├── all │ │ ├── default.yml │ │ ├── fosdem_subnets.yml │ │ ├── grafana.yml │ │ └── secrets.yml │ ├── encoder-backend │ │ ├── main.yml │ │ └── secrets.yml │ ├── encoder-master │ │ ├── main.yml │ │ └── secrets.yml │ ├── event-primary │ │ ├── alertmanager_irc_relay.yml │ │ ├── backup.yml │ │ ├── caddy.yml │ │ ├── chrony.yml │ │ ├── coredns.yml │ │ ├── defaults.yml │ │ ├── grafana.yml │ │ └── secrets.yml │ ├── event-prometheus-servers │ │ ├── alertmanager.yml │ │ ├── dns.yml │ │ ├── prometheus.yml │ │ ├── restic.yml │ │ └── secrets.yml │ ├── event-secondary │ │ ├── backup.yml │ │ ├── caddy.yml │ │ ├── chrony.yml │ │ ├── coredns.yml │ │ ├── docker.yml │ │ └── secrets.yml │ ├── infodesk-laptop.yml │ ├── public-dashboard │ │ ├── caddy.yml │ │ ├── grafana.yml │ │ └── secrets.yml │ ├── video-box-mixer.yml │ ├── video-box.yml │ ├── video-control-server │ │ ├── default.yml │ │ ├── grafana.yml │ │ └── secrets.yml │ ├── video-stream-dump-external │ │ └── default.yml │ ├── video-stream-dump │ │ └── default.yml │ ├── video-streamer-backend.yml │ ├── video-streamer-frontend.yml │ ├── video-voctop.yml │ └── video.yml ├── hosts ├── playbooks │ ├── files │ │ ├── coredns │ │ │ └── zones │ │ └── prometheus │ │ │ └── rules │ │ │ ├── bind.rules │ │ │ ├── cisco_wlc.rules │ │ │ ├── node.rules │ │ │ ├── snmp.rules │ │ │ ├── video.rules │ │ │ └── voctop.rules │ ├── roles │ │ ├── common │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── screenrc │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── chrony.yml │ │ │ │ ├── configure_apt.yml │ │ │ │ ├── configure_fosdem_revision.yml │ │ │ │ ├── configure_sshd.yml │ │ │ │ ├── configure_sysctl.yml │ │ │ │ ├── configure_timezone.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ ├── set_hostname.yml │ │ │ │ ├── user_fosdem.yml │ │ │ │ └── user_root.yml │ │ │ ├── templates │ │ │ │ ├── apt │ │ │ │ │ └── sources.list.j2 │ │ │ │ ├── chrony.conf.j2 │ │ │ │ ├── fosdem_revision │ │ │ │ └── ssh │ │ │ │ │ └── sshd_config.j2 │ │ │ └── vars │ │ │ │ └── main.yml │ │ ├── dashboard_sync │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── sync_grafana_dashboards.rb │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── sync_grafana_dashboards.sh │ │ ├── desk-laptop │ │ │ ├── files │ │ │ │ ├── gdm3 │ │ │ │ │ └── daemon.conf │ │ │ │ ├── network │ │ │ │ │ ├── FOSDEM │ │ │ │ │ ├── FOSDEM-dualstack │ │ │ │ │ └── interfaces │ │ │ │ ├── printer │ │ │ │ │ ├── Xerox-Phaser-6510.ppd │ │ │ │ │ ├── Xerox-WorkCentre-6515.ppd │ │ │ │ │ └── printers.conf │ │ │ │ └── wallpaper.png │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ └── tasks │ │ │ │ ├── configure_cups.yml │ │ │ │ ├── configure_desktop.yml │ │ │ │ ├── configure_hardware.yml │ │ │ │ ├── configure_network.yml │ │ │ │ ├── configure_user_fosdem.yml │ │ │ │ ├── install_packages.yml │ │ │ │ └── main.yml │ │ ├── dhclient │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── dhclient.conf.j2 │ │ ├── encoder-backend │ │ │ ├── files │ │ │ │ ├── deb-multimedia.pref │ │ │ │ └── gridengine.defaults │ │ │ ├── tasks │ │ │ │ ├── fix_hosts.yml │ │ │ │ ├── homedir.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ ├── users.yml │ │ │ │ └── whisper.yml │ │ │ └── templates │ │ │ │ └── hosts.j2 │ │ ├── encoder-common │ │ │ ├── files │ │ │ │ ├── announce.ep │ │ │ │ ├── close.png │ │ │ │ ├── fonts │ │ │ │ │ ├── Frutiger Black Italic.ttf │ │ │ │ │ ├── Frutiger Black.ttf │ │ │ │ │ ├── Frutiger Bold Italic.ttf │ │ │ │ │ ├── Frutiger Bold.ttf │ │ │ │ │ ├── Frutiger Italic.ttf │ │ │ │ │ ├── Frutiger Light Italic.ttf │ │ │ │ │ ├── Frutiger Light.ttf │ │ │ │ │ ├── Frutiger Regular.ttf │ │ │ │ │ ├── Frutiger Roman Italic.ttf │ │ │ │ │ ├── Frutiger Roman.ttf │ │ │ │ │ ├── Signika-Bold.ttf │ │ │ │ │ ├── Signika-Light.ttf │ │ │ │ │ ├── Signika-Medium.ttf │ │ │ │ │ ├── Signika-Regular.ttf │ │ │ │ │ ├── Signika-SemiBold.ttf │ │ │ │ │ └── Signika-VariableFont_GRAD,wght.ttf │ │ │ │ ├── load.sh │ │ │ │ ├── notify-email.ep │ │ │ │ └── open.svg │ │ │ ├── tasks │ │ │ │ ├── gridengine.yml │ │ │ │ ├── main.yml │ │ │ │ ├── mounts.yml │ │ │ │ └── sreview.yml │ │ │ ├── templates │ │ │ │ └── config.j2 │ │ │ └── vars │ │ │ │ └── main.yml │ │ ├── encoder-master │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ ├── Schedule │ │ │ │ │ └── Fosdem.pm │ │ │ │ ├── sql_exporter │ │ │ │ │ └── sreview.collector.yml │ │ │ │ └── syncvideos-pretalx │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── config_schedule_import.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ ├── nfs.yml │ │ │ │ ├── postgresql.yml │ │ │ │ └── sql_exporter.yml │ │ │ └── templates │ │ │ │ ├── sql_exporter.service.j2 │ │ │ │ ├── sql_exporter.yml.j2 │ │ │ │ └── to-pretalx.pl │ │ ├── encoder-network │ │ │ ├── handlers │ │ │ │ └── main.yaml │ │ │ ├── tasks │ │ │ │ └── main.yaml │ │ │ └── templates │ │ │ │ └── nftables.conf.j2 │ │ ├── encoder-storage │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── encoder-webinterface │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── maint.html │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── config_nginx_sreview.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── nginx │ │ │ │ └── sites-enabled │ │ │ │ ├── sreview-web-http.j2 │ │ │ │ └── sreview-web-https.j2 │ │ ├── helm-kubequeue │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── helm-sreview │ │ │ ├── files │ │ │ │ ├── announce.ep │ │ │ │ ├── apology.svg │ │ │ │ ├── fosdem_video_post.png │ │ │ │ ├── notify.ep │ │ │ │ ├── pre.svg │ │ │ │ └── secret.yaml │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ ├── templates │ │ │ │ ├── fosdem.yaml.j2 │ │ │ │ ├── kubequeue-defs.j2 │ │ │ │ ├── synclinkjob.j2 │ │ │ │ └── syncmailjob.j2 │ │ │ └── vars │ │ │ │ └── main.yml │ │ ├── helm-upload │ │ │ ├── files │ │ │ │ └── secret.yaml │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ ├── templates │ │ │ │ ├── fosdem.yaml.j2 │ │ │ │ ├── kubequeue-defs.j2 │ │ │ │ └── syncjob.yaml.j2 │ │ │ └── vars │ │ │ │ └── main.yml │ │ ├── helm │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── hwraid │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── ipmitool │ │ │ ├── files │ │ │ │ └── ipmi_sensor_prometheus.py │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ ├── ipmi_metrics.service │ │ │ │ └── ipmi_metrics.timer │ │ ├── json_exporter │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── config.yml │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── install.yml │ │ │ │ ├── main.yml │ │ │ │ └── preflight.yml │ │ │ └── templates │ │ │ │ └── json_exporter.service.j2 │ │ ├── laptop │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── cleanup_apt_proxy.yml │ │ │ │ ├── disable_suspend.yml │ │ │ │ ├── disable_usbcore_autosuspend.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── grub │ │ │ │ └── disable-usbcore-autosuspend.cfg │ │ ├── maps-tileserver │ │ │ ├── files │ │ │ │ ├── c3nav-tiles.ini │ │ │ │ └── tmpfiles.d │ │ │ │ │ └── c3nav.conf │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ └── virtualenv.yml │ │ │ └── templates │ │ │ │ ├── c3nav-tiles.service │ │ │ │ └── nginx │ │ │ │ ├── c3nav-tiles │ │ │ │ └── c3nav-tiles.service │ │ ├── maps │ │ │ ├── files │ │ │ │ ├── c3nav-celery.service │ │ │ │ ├── c3nav-gunicorn.service │ │ │ │ ├── celery.conf │ │ │ │ ├── media │ │ │ │ │ └── logo.png │ │ │ │ └── tmpfiles.d │ │ │ │ │ └── c3nav.conf │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_celery.yml │ │ │ │ ├── configure_gunicorn.yml │ │ │ │ ├── configure_nginx.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ └── virtualenv.yml │ │ │ └── templates │ │ │ │ ├── c3nav │ │ │ │ └── c3nav.cfg │ │ │ │ └── nginx │ │ │ │ └── c3nav │ │ ├── microk8s │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── mtail │ │ │ ├── files │ │ │ │ └── mtail │ │ │ │ │ └── nginx.mtail │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── defaults │ │ ├── oxidized │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── power_supply │ │ │ ├── files │ │ │ │ └── power_supply_metrics.sh │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ ├── power_supply_metrics.service │ │ │ │ └── power_supply_metrics.timer │ │ ├── sshfs │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── tacplus │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── tac_plus.conf.j2 │ │ ├── tls-certificates │ │ │ ├── README.md │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── letsencrypt.chain │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── create_directories.yml │ │ │ │ ├── create_keypair.yml │ │ │ │ ├── letsencrypt.yml │ │ │ │ ├── letsencrypt_challenge.yml │ │ │ │ ├── main.yml │ │ │ │ ├── nginx_common.yml │ │ │ │ └── self_sign.yml │ │ │ └── templates │ │ │ │ └── ansible-common-ssl.conf.j2 │ │ ├── video-audio-parser │ │ │ ├── files │ │ │ │ └── audio-fetcher │ │ │ │ │ ├── audio-fetcher │ │ │ │ │ └── audio-fetcher.service │ │ │ ├── tasks │ │ │ │ ├── configure_audiofetcher.yml │ │ │ │ ├── install_packages.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ ├── audio-fetcher-fl.service │ │ │ │ └── audio-fetcher-fr.service │ │ ├── video-box-mixer │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ ├── audio-fetcher │ │ │ │ │ ├── audio-fetcher │ │ │ │ │ ├── audio-fetcher-0.service │ │ │ │ │ └── audio-fetcher-1.service │ │ │ │ ├── audiotest │ │ │ │ │ └── audiotest.sh │ │ │ │ ├── cgroups │ │ │ │ │ ├── cgconfig.service │ │ │ │ │ └── fosdem.conf │ │ │ │ ├── config │ │ │ │ │ ├── background.png │ │ │ │ │ ├── background.raw │ │ │ │ │ ├── config.sh │ │ │ │ │ ├── defaults.sh │ │ │ │ │ ├── preroll.png │ │ │ │ │ ├── preroll.raw │ │ │ │ │ └── voctocore.ini │ │ │ │ ├── control │ │ │ │ │ ├── chart.js │ │ │ │ │ ├── chartjs-adapter-moment.js │ │ │ │ │ ├── chartjs-plugin-annotation.js │ │ │ │ │ ├── default │ │ │ │ │ ├── graph.js │ │ │ │ │ ├── inc.php │ │ │ │ │ ├── influxdb.conf │ │ │ │ │ ├── moment.js │ │ │ │ │ ├── query-ebur.php │ │ │ │ │ ├── vocto.css │ │ │ │ │ └── vocto.php │ │ │ │ ├── network │ │ │ │ │ └── video0 │ │ │ │ ├── patches │ │ │ │ │ └── videomix.py.diff │ │ │ │ ├── scripts │ │ │ │ │ ├── restart-voctocore.sh │ │ │ │ │ ├── sink-output.sh │ │ │ │ │ ├── source-cam.sh │ │ │ │ │ └── source-slides.sh │ │ │ │ ├── signal-status │ │ │ │ │ ├── capture-rescan.sh │ │ │ │ │ ├── display-rescan.sh │ │ │ │ │ ├── statuskeeper.sh │ │ │ │ │ ├── video-capture-rescan.service │ │ │ │ │ ├── video-display-rescan.service │ │ │ │ │ └── video-statuskeeper.service │ │ │ │ ├── udev │ │ │ │ │ ├── 20-fosdem-serial.rules │ │ │ │ │ ├── 99-picotool.rules │ │ │ │ │ └── 99-video-box.rules │ │ │ │ ├── units │ │ │ │ │ ├── vocto-sink-output.service │ │ │ │ │ ├── vocto-source-cam.service │ │ │ │ │ ├── vocto-source-slides.service │ │ │ │ │ └── voctocore.service │ │ │ │ ├── video-status │ │ │ │ │ ├── logo.png │ │ │ │ │ ├── preview.sh │ │ │ │ │ ├── video-preview.service │ │ │ │ │ ├── video-status.py │ │ │ │ │ └── video-status.service │ │ │ │ └── video-streamer │ │ │ │ │ ├── rc.local │ │ │ │ │ ├── video-receiver.service │ │ │ │ │ ├── video-receiver.sh │ │ │ │ │ ├── video-recorder.service │ │ │ │ │ ├── video-recorder.sh │ │ │ │ │ └── video-usbreset.py │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── cgroup_setup.yml │ │ │ │ ├── configure_network.yml │ │ │ │ ├── configure_nvme.yml │ │ │ │ ├── install_control.yml │ │ │ │ ├── install_extra_hw.yml │ │ │ │ ├── install_mixer_ctl.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── install_signal-status.yml │ │ │ │ ├── install_video-status.yml │ │ │ │ ├── install_video-streamer.yml │ │ │ │ ├── install_voctocore.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── grub │ │ │ │ └── cgroup.cfg │ │ ├── video-box │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ ├── audiotest │ │ │ │ │ └── audiotest.sh │ │ │ │ ├── cgroups │ │ │ │ │ ├── cgconfig.service │ │ │ │ │ └── fosdem.conf │ │ │ │ ├── network │ │ │ │ │ └── video0 │ │ │ │ ├── signal-status │ │ │ │ │ ├── capture-rescan.sh │ │ │ │ │ ├── display-rescan.sh │ │ │ │ │ ├── statuskeeper.sh │ │ │ │ │ ├── video-capture-rescan.service │ │ │ │ │ ├── video-display-rescan.service │ │ │ │ │ └── video-statuskeeper.service │ │ │ │ ├── udev │ │ │ │ │ ├── 20-fosdem-serial.rules │ │ │ │ │ ├── 99-picotool.rules │ │ │ │ │ └── 99-video-box.rules │ │ │ │ ├── video-status │ │ │ │ │ ├── logo.png │ │ │ │ │ ├── preview.sh │ │ │ │ │ ├── video-preview.service │ │ │ │ │ ├── video-status.py │ │ │ │ │ └── video-status.service │ │ │ │ └── video-streamer │ │ │ │ │ ├── rc.local │ │ │ │ │ ├── video-receiver.service │ │ │ │ │ ├── video-receiver.sh │ │ │ │ │ ├── video-recorder.service │ │ │ │ │ ├── video-recorder.sh │ │ │ │ │ └── video-usbreset.py │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── cgroup_setup.yml │ │ │ │ ├── configure_network.yml │ │ │ │ ├── install_extra_hw.yml │ │ │ │ ├── install_mixer_ctl.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── install_signal-status.yml │ │ │ │ ├── install_video-status.yml │ │ │ │ ├── install_video-streamer.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── grub │ │ │ │ └── cgroup.cfg │ │ ├── video-control-server │ │ │ ├── README.md │ │ │ ├── files │ │ │ │ ├── dhcpd.conf │ │ │ │ ├── fosdem.sql │ │ │ │ ├── grafana │ │ │ │ │ ├── ebur.json │ │ │ │ │ ├── grafana.ini │ │ │ │ │ └── mixer-levels.json │ │ │ │ ├── icinga │ │ │ │ │ ├── fosdem-services.conf │ │ │ │ │ ├── matrix-host-notification.sh │ │ │ │ │ ├── matrix-service-notification.sh │ │ │ │ │ ├── matrix.conf │ │ │ │ │ └── passive.conf │ │ │ │ ├── imgmaker.sh │ │ │ │ ├── isc-dhcp-server.default │ │ │ │ ├── nginx │ │ │ │ │ ├── certificate │ │ │ │ │ │ └── dhparam.pem │ │ │ │ │ ├── global.conf │ │ │ │ │ └── mtail.conf │ │ │ │ ├── unbound-access.conf │ │ │ │ └── web │ │ │ │ │ ├── chart.js │ │ │ │ │ ├── chartjs-adapter-moment.js │ │ │ │ │ ├── chartjs-plugin-annotation.js │ │ │ │ │ ├── config.php │ │ │ │ │ ├── graph.js │ │ │ │ │ ├── inc.php │ │ │ │ │ ├── mixer.css │ │ │ │ │ ├── mixer.js │ │ │ │ │ ├── moment.js │ │ │ │ │ ├── query-ebur.php │ │ │ │ │ ├── query-room.php │ │ │ │ │ ├── query-vocto.php │ │ │ │ │ ├── reconnecting-websocket.js │ │ │ │ │ ├── room_status.php │ │ │ │ │ ├── showbuilding.php │ │ │ │ │ ├── vocto.css │ │ │ │ │ └── vocto.php │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_db.yml │ │ │ │ ├── configure_dns_dhcp.yml │ │ │ │ ├── configure_grafana.yml │ │ │ │ ├── configure_icinga.yml │ │ │ │ ├── configure_imgmaker.yml │ │ │ │ ├── configure_nginx.yml │ │ │ │ ├── install_packages.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ ├── imgmaker.service │ │ │ │ └── nginx │ │ │ │ └── sites-enabled │ │ │ │ └── control.video.fosdem.org.conf │ │ ├── video-monitoring │ │ │ ├── files │ │ │ │ ├── do_checks.sh │ │ │ │ ├── monitoring.cron │ │ │ │ └── munin-node.conf │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ └── tasks │ │ │ │ └── main.yml │ │ ├── video-repo │ │ │ ├── files │ │ │ │ └── video-team.repo.list │ │ │ ├── tasks │ │ │ │ └── main.yml │ │ │ └── vars │ │ │ │ └── main.yml │ │ ├── video-stream-dump-external │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── streamdump.sh │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_hdd.yml │ │ │ │ ├── configure_streamdump.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ └── streamdump_target.yml │ │ │ └── templates │ │ │ │ ├── streamdump.service │ │ │ │ └── streamdump.target │ │ ├── video-stream-dump │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ └── streamdump.sh │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_hdd.yml │ │ │ │ ├── configure_streamdump.yml │ │ │ │ ├── configure_streamdump_simple.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ └── streamdump_target.yml │ │ │ └── templates │ │ │ │ ├── streamdump.service │ │ │ │ └── streamdump.target │ │ ├── video-streamer-backend │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ ├── files │ │ │ │ ├── bic │ │ │ │ │ └── modules │ │ │ │ ├── fetcher │ │ │ │ │ ├── fetcher.sh │ │ │ │ │ └── thumb.sh │ │ │ │ ├── iptables │ │ │ │ │ ├── rules.v4 │ │ │ │ │ └── rules.v6 │ │ │ │ ├── nginx │ │ │ │ │ └── mtail.conf │ │ │ │ └── sreview │ │ │ │ │ └── detect_files │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_bic.yml │ │ │ │ ├── configure_fetcher.yml │ │ │ │ ├── configure_nginx.yml │ │ │ │ ├── install_packages.yml │ │ │ │ ├── main.yml │ │ │ │ └── mount_ssd.yml │ │ │ └── templates │ │ │ │ ├── fetcher │ │ │ │ ├── fetcher.service │ │ │ │ ├── fetcher.target │ │ │ │ ├── room.m3u8 │ │ │ │ └── thumb.service │ │ │ │ ├── nfs │ │ │ │ └── exports.j2 │ │ │ │ ├── nginx │ │ │ │ ├── nginx-rtmp.conf │ │ │ │ └── sites-enabled │ │ │ │ │ └── streamer-backend.conf │ │ │ │ └── sreview │ │ │ │ └── config.pm.j2 │ │ ├── video-streamer-frontend │ │ │ ├── files │ │ │ │ ├── bic │ │ │ │ │ └── modules │ │ │ │ └── nginx │ │ │ │ │ ├── certificate │ │ │ │ │ └── dhparam.pem │ │ │ │ │ ├── global.conf │ │ │ │ │ └── mtail.conf │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ ├── tasks │ │ │ │ ├── configure_bic.yml │ │ │ │ ├── configure_nginx.yml │ │ │ │ ├── install_packages.yml │ │ │ │ └── main.yml │ │ │ └── templates │ │ │ │ └── nginx │ │ │ │ └── sites-enabled │ │ │ │ └── stream.fosdem.org.conf │ │ ├── video-voctop │ │ │ ├── files │ │ │ │ ├── audio-fetcher │ │ │ │ │ ├── audio-fetcher │ │ │ │ │ └── audio-fetcher.service │ │ │ │ ├── config │ │ │ │ │ ├── background.png │ │ │ │ │ ├── background.raw │ │ │ │ │ ├── config.sh │ │ │ │ │ ├── defaults.sh │ │ │ │ │ └── voctocore.ini │ │ │ │ ├── grub │ │ │ │ │ └── mitigations.cfg │ │ │ │ ├── patches │ │ │ │ │ └── videomix.py.diff │ │ │ │ ├── scripts │ │ │ │ │ ├── restart-voctocore.sh │ │ │ │ │ ├── sink-output.sh │ │ │ │ │ ├── source-cam.sh │ │ │ │ │ └── source-slides.sh │ │ │ │ └── units │ │ │ │ │ ├── vocto-sink-output.service │ │ │ │ │ ├── vocto-source-cam.service │ │ │ │ │ ├── vocto-source-slides.service │ │ │ │ │ └── voctocore.service │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ └── tasks │ │ │ │ ├── cputune.yml │ │ │ │ ├── install_audio_fetcher.yml │ │ │ │ ├── install_voctocore.yml │ │ │ │ └── main.yml │ │ └── video-web-frontend │ │ │ ├── files │ │ │ └── nginx │ │ │ │ ├── certificate │ │ │ │ └── dhparam.pem │ │ │ │ ├── global.conf │ │ │ │ └── sites-enabled │ │ │ │ └── live.fosdem.org.conf │ │ │ ├── handlers │ │ │ └── main.yml │ │ │ └── tasks │ │ │ ├── configure_nginx.yml │ │ │ ├── install_packages.yml │ │ │ └── main.yml │ ├── site.yml │ └── templates │ │ ├── Corefile.j2 │ │ └── snmp_exporter │ │ ├── generator.yml │ │ └── snmp.yml ├── public_keys │ ├── alec.pub │ ├── bastischubert.pub │ ├── bert.pub │ ├── brian.pub │ ├── dan.pub │ ├── gerry.pub │ ├── gouthamve.pub │ ├── jarek.pub │ ├── johanvdw.pub │ ├── klaus.pub │ ├── louis.pub │ ├── luc.pub │ ├── marian.pub │ ├── mark.pub │ ├── meka.pub │ ├── msuriar.pub │ ├── nicolai.pub │ ├── peter.pub │ ├── quint.pub │ ├── richih.pub │ ├── shin.pub │ ├── vasil.pub │ ├── wouter_s.pub │ └── wouter_v.pub ├── requirements.yml └── update-private-files.sh ├── inventory └── IP-addressing.md ├── resources ├── snmp │ ├── wlc_graphite_output │ └── wlc_snmpwalk └── video │ ├── generate-cisco-config.sh │ ├── generate-video-dns.sh │ ├── inventory.csv │ └── printers.csv └── syslog.md /.ansible-lint: -------------------------------------------------------------------------------- 1 | --- 2 | exclude_paths: 3 | - ansible/playbooks/roles/caddy-ansible.caddy 4 | - ansible/playbooks/roles/cloudalchemy.alertmanager 5 | - ansible/playbooks/roles/cloudalchemy.alertmanager-irc-relay 6 | - ansible/playbooks/roles/cloudalchemy.bind_exporter 7 | - ansible/playbooks/roles/cloudalchemy.blackbox-exporter 8 | - ansible/playbooks/roles/cloudalchemy.coredns 9 | - ansible/playbooks/roles/cloudalchemy.grafana 10 | - ansible/playbooks/roles/cloudalchemy.node-exporter 11 | - ansible/playbooks/roles/cloudalchemy.prometheus 12 | - ansible/playbooks/roles/cloudalchemy.snmp-exporter 13 | - ansible/playbooks/roles/netzwirt.bind 14 | - ansible/playbooks/roles/nickjj.docker 15 | - ansible/playbooks/roles/paulfantom.restic 16 | - ansible/playbooks/roles/rsyslog 17 | 18 | skip_list: 19 | - '306' # Shells that use pipes should set the pipefail option 20 | - '403' # Package installs should not use latest 21 | - '503' # TODO(Issue 219) Tasks that run when changed should likely be handlers 22 | -------------------------------------------------------------------------------- /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: 2.1 3 | 4 | executors: 5 | golang: 6 | docker: 7 | - image: circleci/golang:1.13 8 | python: 9 | docker: 10 | - image: circleci/python:buster 11 | 12 | jobs: 13 | build: 14 | executor: golang 15 | 16 | steps: 17 | - checkout 18 | - run: echo "nothing to do" 19 | test: 20 | executor: python 21 | 22 | steps: 23 | - run: pip install --user yamllint 24 | - run: pip install --user ansible-lint 25 | - checkout 26 | - run: yamllint ansible 27 | - run: ansible-lint ansible/playbooks/site.yml 28 | 29 | workflows: 30 | version: 2 31 | infrastructure: 32 | jobs: 33 | - build: 34 | filters: 35 | tags: 36 | only: /.*/ 37 | - test: 38 | filters: 39 | tags: 40 | only: /.*/ 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore debops.secret garbage. 2 | secret 3 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/.gitmodules -------------------------------------------------------------------------------- /.yamllint: -------------------------------------------------------------------------------- 1 | --- 2 | extends: default 3 | ignore: | 4 | secrets.yml 5 | *.sh 6 | *.md 7 | ansible/playbooks/roles/caddy-ansible.caddy 8 | ansible/playbooks/roles/cloudalchemy.* 9 | ansible/playbooks/roles/netzwirt.bind 10 | ansible/playbooks/roles/nickjj.docker 11 | ansible/playbooks/roles/paulfantom.restic 12 | ansible/playbooks/roles/rsyslog 13 | ansible/playbooks/templates/snmp_exporter/snmp.yml 14 | 15 | rules: 16 | braces: 17 | max-spaces-inside: 1 18 | level: error 19 | brackets: 20 | max-spaces-inside: 1 21 | comments: 22 | level: error 23 | document-start: 24 | level: error 25 | indentation: 26 | spaces: 2 27 | indent-sequences: false 28 | line-length: 29 | max: 160 30 | level: error 31 | truthy: 32 | level: error 33 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FOSDEM Infrastructure 2 | 3 | * [Ansible documentation](ansible/README.md) 4 | * [Video repository](https://github.com/FOSDEM/video) 5 | -------------------------------------------------------------------------------- /ansible/.gitignore: -------------------------------------------------------------------------------- 1 | playbooks/site.retry 2 | playbooks/roles/video-box/files/bmd/bmd-atemtvstudio.bin 3 | playbooks/roles/video-box/files/bmd/bmd-h264prorecorder.bin 4 | playbooks/roles/video-streamer-frontend/files/nginx/certificate/stream.fosdem.org.crt 5 | playbooks/roles/video-streamer-frontend/files/nginx/certificate/stream.fosdem.org.key 6 | playbooks/roles/video-web-frontend/files/nginx/certificate/live.fosdem.org.crt 7 | playbooks/roles/video-web-frontend/files/nginx/certificate/live.fosdem.org.key 8 | playbooks/roles/video-streamer-backend/files/sreview/config.pl 9 | playbooks/roles/video-control-server/files/nginx/certificate/control.video.fosdem.org.crt 10 | playbooks/roles/video-control-server/files/nginx/certificate/control.video.fosdem.org.key 11 | playbooks/roles/video-control-server/files/nginx/htpasswd 12 | playbooks/roles/video-bbb/files/config/streamkeysalt.sh 13 | playbooks/roles/video-monitoring/files/send_nsca.cfg 14 | .vault-password 15 | -------------------------------------------------------------------------------- /ansible/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | inventory = hosts 3 | host_key_checking = False 4 | ansible_user = root 5 | roles_path = playbooks/roles 6 | forks = 200 7 | nocows = 1 8 | vault_password_file = ~/.fosdem_vault_pass.txt 9 | ansible_python_interpreter=/usr/bin/python3 10 | 11 | [ssh_connection] 12 | pipelining=True 13 | -------------------------------------------------------------------------------- /ansible/files/bind/db.conference.fosdem.net: -------------------------------------------------------------------------------- 1 | $TTL 3600 2 | @ IN SOA ns0.conference.fosdem.net. hostmaster.conference.fosdem.net. ( 3 | 2016013002 ; serial 4 | 600 ; refresh 5 | 300 ; retry 6 | 604800 ; expire 7 | 3600 ; default_ttl 8 | ) 9 | @ IN NS ns0.conference.fosdem.net. 10 | ; @ IN NS ns1.conference.fosdem.net. 11 | @ IN A 151.216.191.253 12 | @ IN AAAA 2001:67c:1810:f052::53 13 | ns0 IN A 151.216.191.253 14 | ns0 IN AAAA 2001:67c:1810:f052::53 15 | 16 | ; Printers; static leases defined on ASR, talk to msuriar@ 17 | printer1 IN A 151.216.190.1 18 | printer2 IN A 151.216.190.2 19 | printer3 IN A 151.216.190.3 20 | printer4 IN A 151.216.190.4 21 | 22 | -------------------------------------------------------------------------------- /ansible/group_vars/all/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # SSH keys to deploy (override them in host_vars if ever necessary) 3 | # The files refer to files in the common role 4 | ssh_users: 5 | - gerry 6 | - johanvdw 7 | - mark 8 | - peter 9 | - richih 10 | - vasil 11 | - wouter_v 12 | - wouter_s 13 | - bastischubert 14 | - shin 15 | 16 | timezone: "Europe/Brussels" 17 | 18 | node_exporter_textfile_dir: /var/lib/node_exporter 19 | node_exporter_enabled_collectors: 20 | - ntp 21 | - systemd: 22 | unit-whitelist: "'.+\\.service'" 23 | - textfile: 24 | directory: "{{ node_exporter_textfile_dir }}" 25 | -------------------------------------------------------------------------------- /ansible/group_vars/all/fosdem_subnets.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Subnets useful for filtering. 3 | fosdem_subnets: 4 | all: 5 | - 185.175.216.0/22 6 | - 2001:67C:1810::/48 7 | v6only: 8 | - 2001:67C:1810:F051::/64 9 | server: 10 | - 185.175.216.240/28 11 | - 2001:67c:1810:f054::/64 12 | dualstack: 13 | - 151.216.128.0/19 14 | - 185.175.216.0/22 15 | - 2001:67C:1810:F055::/64 16 | video: 17 | - 185.175.218.0/24 18 | temporary: 19 | - 151.216.128.0/17 20 | -------------------------------------------------------------------------------- /ansible/group_vars/all/grafana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | grafana_plugins: 3 | - jdbranham-diagram-panel 4 | - vonage-status-panel 5 | -------------------------------------------------------------------------------- /ansible/group_vars/encoder-backend/main.yml: -------------------------------------------------------------------------------- 1 | sreview: 2 | nfs_server: reviewstorage.video.fosdem.org 3 | fonts: 4 | - 'Frutiger Black.ttf' 5 | - 'Frutiger Bold.ttf' 6 | - 'Frutiger Light.ttf' 7 | - 'Frutiger Regular.ttf' 8 | - 'Frutiger Roman.ttf' 9 | - 'Signika-Bold.ttf' 10 | - 'Signika-Light.ttf' 11 | - 'Signika-Medium.ttf' 12 | - 'Signika-Regular.ttf' 13 | - 'Signika-SemiBold.ttf' 14 | - 'Signika-VariableFont_GRAD,wght.ttf' 15 | -------------------------------------------------------------------------------- /ansible/group_vars/encoder-master/main.yml: -------------------------------------------------------------------------------- 1 | sreview: 2 | nfs_server: reviewstorage.video.fosdem.org 3 | fonts: 4 | - 'Frutiger Black.ttf' 5 | - 'Frutiger Bold.ttf' 6 | - 'Frutiger Light.ttf' 7 | - 'Frutiger Regular.ttf' 8 | - 'Frutiger Roman.ttf' 9 | - 'Signika-Bold.ttf' 10 | - 'Signika-Light.ttf' 11 | - 'Signika-Medium.ttf' 12 | - 'Signika-Regular.ttf' 13 | - 'Signika-SemiBold.ttf' 14 | - 'Signika-VariableFont_GRAD,wght.ttf' 15 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/alertmanager_irc_relay.yml: -------------------------------------------------------------------------------- 1 | --- 2 | alertmanager_irc_relay_http_port: 8667 3 | alertmanager_irc_relay_irc_nickname: fosdem-bot 4 | alertmanager_irc_relay_irc_realname: fosdem-bot 5 | alertmanager_irc_relay_channels: 6 | - name: "#fosdem-alerts" 7 | 8 | alertmanager_irc_relay_notice_template: > 9 | {% raw %}{{ range .Alerts }}Alert {{ .Labels.alertname }} on {{ .Labels.instance }} is {{ .Status }} {{ end }}{% endraw %} 10 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/backup.yml: -------------------------------------------------------------------------------- 1 | --- 2 | fosdem_backup_hour: 23 3 | fosdem_backup_dirs: 4 | - /etc 5 | - /root 6 | - /home 7 | - /var/lib/prometheus 8 | - /var/log 9 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/chrony.yml: -------------------------------------------------------------------------------- 1 | --- 2 | chrony_allow: 3 | - 127.0.0.1 4 | - 185.175.218.0/24 5 | - 2001:67c:1810:f053::/64 6 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/coredns.yml: -------------------------------------------------------------------------------- 1 | --- 2 | coredns_dns_port: 5300 3 | coredns_config_file: Corefile.j2 4 | # Custom binary built with dns64 plugin: https://github.com/coredns/coredns/pull/3534 5 | coredns_binary_local_dir: ~/src/superq/coredns 6 | 7 | fosdem_dns_dns64_addrs: 8 | - '2001:67c:1810:f054::ffff:64' 9 | 10 | fosdem_dns_dualstack_addrs: 11 | - '127.0.0.1' 12 | - '::1' 13 | - '185.175.216.251' 14 | - '2001:67c:1810:f054::ffff:53' 15 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/defaults.yml: -------------------------------------------------------------------------------- 1 | --- 2 | rsyslog_repeated_msg_reduction: true 3 | rsyslog_rsyslog_d_files: 4 | 20-log-routers: 5 | settings: 6 | - 'module(load="imudp")' 7 | - 'input(type="imudp" port="514")' 8 | - "if $syslogfacility-text == 'local7' then /var/log/rsyslog/network-combined" 9 | - "if $syslogfacility-text == 'local6' then /var/log/rsyslog/tacacs-combined" 10 | - "if $syslogfacility-text == 'local5' then /var/log/rsyslog/video-combined" 11 | - "if $syslogfacility-text == 'local4' then /var/log/rsyslog/applications-combined" 12 | - '$template DynFile,"/var/log/router-log/%FROMHOST%"' 13 | - ':source , !isequal , "server-hostname" ?DynFile' 14 | - ':source , !isequal , "server-hostname" stop' 15 | -------------------------------------------------------------------------------- /ansible/group_vars/event-primary/grafana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | grafana_security: 3 | admin_user: admin 4 | admin_password: "{{ grafana_admin_password }}" 5 | -------------------------------------------------------------------------------- /ansible/group_vars/event-prometheus-servers/dns.yml: -------------------------------------------------------------------------------- 1 | --- 2 | bind_zones: 3 | conference.fosdem.net: 4 | type: master 5 | n.fosdem.net: 6 | type: master 7 | v.conference.fosdem.net: 8 | type: master 9 | 10 | bind_acls: 11 | fosdem: "{{ fosdem_subnets.all }}" 12 | fosdem-dualstack: "{{ fosdem_subnets.dualstack }}" 13 | fosdem-v6only: "{{ fosdem_subnets.v6only }}" 14 | fosdem-temporary: "{{ fosdem_subnets.temporary }}" 15 | v6localhost: 16 | - ::1/128 17 | 18 | bind_statistics_channels: 19 | - inet: 127.0.0.1 20 | port: 8053 21 | allow: 22 | - 127.0.0.1 23 | 24 | bind_recursion_allowed_clients: 25 | - fosdem 26 | - fosdem-temporary 27 | 28 | bind_dns64_clients: 29 | - fosdem-v6only 30 | - v6localhost 31 | 32 | bind_authoritative_only: false 33 | -------------------------------------------------------------------------------- /ansible/group_vars/event-prometheus-servers/restic.yml: -------------------------------------------------------------------------------- 1 | --- 2 | restic_repos: 3 | - name: "{{ fosdem_backup_username }}" 4 | url: "rest:https://{{ fosdem_backup_username }}:{{ fosdem_backup_password }}@{{ fosdem_backup_server }}/{{ fosdem_backup_username }}/" 5 | password: "{{ fosdem_backup_key }}" 6 | jobs: 7 | - command: "restic backup --exclude-caches {{ fosdem_backup_dirs | join(' ') }}" 8 | at: '10 {{ fosdem_backup_hour }} * * *' 9 | retention_time: '40 {{ (fosdem_backup_hour + 12) % 24 }} * * *' 10 | retention: 11 | last: 2 12 | hourly: 4 13 | daily: 10 14 | weekly: 9 15 | monthly: 3 16 | yearly: 10 17 | -------------------------------------------------------------------------------- /ansible/group_vars/event-secondary/backup.yml: -------------------------------------------------------------------------------- 1 | --- 2 | fosdem_backup_hour: 04 3 | fosdem_backup_dirs: 4 | - /etc 5 | - /root 6 | - /home 7 | - /var/lib/prometheus 8 | - /var/log 9 | -------------------------------------------------------------------------------- /ansible/group_vars/event-secondary/caddy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | caddy_features: http.prometheus,http.ipfilter 3 | caddy_systemd_capabilities_enabled: true 4 | caddy_additional_args: '-email network@lists.fosdem.org' 5 | caddy_config: | 6 | {{ ansible_nodename }}, {{ ansible_hostname }}.fosdem.net { 7 | prometheus 8 | gzip 9 | root /var/www 10 | log /var/log/caddy/access.log 11 | proxy / localhost:3000 { 12 | transparent 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ansible/group_vars/event-secondary/chrony.yml: -------------------------------------------------------------------------------- 1 | --- 2 | chrony_allow: 3 | - 127.0.0.1 4 | - 185.175.218.0/24 5 | - 2001:67c:1810:f053::/64 6 | -------------------------------------------------------------------------------- /ansible/group_vars/event-secondary/coredns.yml: -------------------------------------------------------------------------------- 1 | --- 2 | coredns_dns_port: 5300 3 | coredns_config_file: Corefile.j2 4 | # Custom binary built with dns64 plugin: https://github.com/coredns/coredns/pull/3534 5 | coredns_binary_local_dir: ~/src/superq/coredns 6 | 7 | fosdem_dns_dns64_addrs: 8 | - '2001:67c:1810:f054::ffff:6400' 9 | 10 | fosdem_dns_dualstack_addrs: 11 | - '127.0.0.1' 12 | - '::1' 13 | - '185.175.216.252' 14 | - '2001:67c:1810:f054::ffff:5300' 15 | -------------------------------------------------------------------------------- /ansible/group_vars/event-secondary/docker.yml: -------------------------------------------------------------------------------- 1 | --- 2 | docker__default_daemon_json: | 3 | "experimental" : true, 4 | "log-driver": "syslog", 5 | "log-opts": { 6 | "syslog-address": "udp://server001.sk1-510.k.ulb.bru.fosdem.net:514" 7 | }, 8 | "metrics-addr" : "0.0.0.0:9323" 9 | -------------------------------------------------------------------------------- /ansible/group_vars/infodesk-laptop.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | user_fosdem_password: "{{ infodesk_user_fosdem_password | password_hash('sha512', infodesk_user_fosdem_salt) }}" 4 | -------------------------------------------------------------------------------- /ansible/group_vars/public-dashboard/caddy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | caddy_features: http.prometheus 3 | caddy_systemd_capabilities_enabled: true 4 | caddy_additional_args: '-email network@lists.fosdem.org' 5 | caddy_config: | 6 | dashboard.fosdem.org { 7 | prometheus 8 | gzip 9 | root /var/www 10 | log /var/log/caddy/access.log 11 | proxy / localhost:3000 { 12 | transparent 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /ansible/group_vars/public-dashboard/grafana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | grafana_security: 3 | admin_user: admin 4 | admin_password: "{{ grafana_admin_password }}" 5 | grafana_auth: 6 | anonymous: 7 | org_role: Viewer 8 | -------------------------------------------------------------------------------- /ansible/group_vars/video-box-mixer.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | net.ipv4.tcp_keepalive_time: 10 8 | net.ipv4.tcp_keepalive_intvl: 5 9 | net.ipv4.tcp_keepalive_probes: 5 10 | 11 | # Where is our SSD drive supposed to be? 12 | ssd_mount: /home/video-recording 13 | ssd_drive: /dev/nvme0n1 14 | 15 | # Prometheus node exporter should write to the SSD instead of the SD 16 | node_exporter_textfile_dir: /home/node-exporter/textfiles/ 17 | node_exporter_install_path: /home/node-exporter/ 18 | 19 | -------------------------------------------------------------------------------- /ansible/group_vars/video-box.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | net.ipv4.tcp_keepalive_time: 10 8 | net.ipv4.tcp_keepalive_intvl: 5 9 | net.ipv4.tcp_keepalive_probes: 5 10 | 11 | # Where is our SSD drive supposed to be? 12 | ssd_mount: /mnt/ssd 13 | 14 | # Prometheus node exporter should write to the SSD instead of the SD 15 | node_exporter_textfile_dir: /home/node-exporter/textfiles/ 16 | node_exporter_install_path: /home/node-exporter/ 17 | 18 | # Location for the screenshot dumps 19 | video_screenshot_directory: /home/video-screenshot/ 20 | video_screenshot_filename: screenshot.jpg 21 | 22 | # Video streamer configuration 23 | video_streamer_ffmpeg_options: "overrun_nonfatal=1&buffer_size=81921024&fifo_size=178481" 24 | -------------------------------------------------------------------------------- /ansible/group_vars/video-control-server/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.ipv4.tcp_keepalive_time: 10 4 | net.ipv4.tcp_keepalive_intvl: 5 5 | net.ipv4.tcp_keepalive_probes: 5 6 | -------------------------------------------------------------------------------- /ansible/group_vars/video-control-server/grafana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | grafana_security: 3 | admin_user: admin 4 | admin_password: "{{ grafana_admin_password }}" 5 | anonymous: 6 | org_role: Viewer 7 | -------------------------------------------------------------------------------- /ansible/group_vars/video-control-server/secrets.yml: -------------------------------------------------------------------------------- 1 | $ANSIBLE_VAULT;1.1;AES256 2 | 35643538643131616166323238316432666337653266616630333965653237666462643836386332 3 | 3134316332643136643539316231613831346134383236630a643739653462613232653532303138 4 | 32323939666533306632616332366135316230353134343962316365633563346539316339313330 5 | 6163356664633062360a336262373832643661343061383234306135663736303633336539613837 6 | 36333735376635336562356335323966333666363836366635343639623234656137623037343962 7 | 3131656665303264333531376636363462346230623762643532 8 | -------------------------------------------------------------------------------- /ansible/group_vars/video-stream-dump-external/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | sysctl_parameters: 4 | net.ipv4.tcp_keepalive_time: 10 5 | net.ipv4.tcp_keepalive_intvl: 5 6 | net.ipv4.tcp_keepalive_probes: 5 7 | 8 | # Disable hdd config on external. 9 | video_stream_dump_configure_hdd: false 10 | -------------------------------------------------------------------------------- /ansible/group_vars/video-stream-dump/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | net.ipv4.tcp_keepalive_time: 10 8 | net.ipv4.tcp_keepalive_intvl: 5 9 | net.ipv4.tcp_keepalive_probes: 5 10 | 11 | video_stream_dump_hdd: /dev/sdb 12 | -------------------------------------------------------------------------------- /ansible/group_vars/video-streamer-backend.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | net.ipv4.tcp_keepalive_time: 10 8 | net.ipv4.tcp_keepalive_intvl: 5 9 | net.ipv4.tcp_keepalive_probes: 5 10 | 11 | nfs_exports: 12 | - /var/www/dump 51.15.0.0/16(ro) 2001:bc8::/32(ro) 10.0.0.0/8(ro) 13 | 14 | nginx_rtmp_publishers: 15 | - 31.22.21.130/32 16 | - 81.82.239.20/32 17 | - 185.175.218.0/24 18 | 19 | ssd_drive: /dev/sdb 20 | -------------------------------------------------------------------------------- /ansible/group_vars/video-streamer-frontend.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | -------------------------------------------------------------------------------- /ansible/group_vars/video-voctop.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sysctl_parameters: 3 | net.core.wmem_max: 8388608 4 | net.core.wmem_default: 8388608 5 | net.core.rmem_max: 8388608 6 | net.core.rmem_default: 8388608 7 | net.ipv4.tcp_keepalive_time: 10 8 | net.ipv4.tcp_keepalive_intvl: 5 9 | net.ipv4.tcp_keepalive_probes: 5 10 | 11 | vocto_source_url_parameters: ?timeout=10000000&buffer_size=81921024&fifo_size=178481 12 | vocto_recording_directory: /mnt/video 13 | -------------------------------------------------------------------------------- /ansible/playbooks/files/coredns/zones: -------------------------------------------------------------------------------- 1 | ../../../files/bind/ -------------------------------------------------------------------------------- /ansible/playbooks/files/prometheus/rules/bind.rules: -------------------------------------------------------------------------------- 1 | --- 2 | groups: 3 | - name: Bind DNS 4 | interval: 30s 5 | rules: 6 | - alert: BindDown 7 | expr: bind_up != 1 8 | for: 2m 9 | labels: 10 | team: network 11 | severity: warning 12 | annotations: 13 | summary: "Bind is down" 14 | description: "Bind process {{ $labels.instance }} is down." 15 | -------------------------------------------------------------------------------- /ansible/playbooks/files/prometheus/rules/cisco_wlc.rules: -------------------------------------------------------------------------------- 1 | --- 2 | groups: 3 | - name: Cisco WLC 4 | interval: 30s 5 | rules: 6 | - record: bsnAPIfSlotId:bsnApIfNoOfUsers:sum 7 | expr: > 8 | sum without (bsnAPName,bsnAPDot3MacAddress) ( 9 | bsnApIfNoOfUsers 10 | ) 11 | - record: bsnAPIfSlotId:bsnAPIfLoadChannelUtilization:q95 12 | expr: > 13 | quantile without (bsnAPDot3MacAddress, bsnAPName) ( 14 | 0.95, 15 | bsnAPIfLoadChannelUtilization 16 | ) 17 | - record: bsnAPIfSlotId:bsnAPIfLoadChannelUtilization:avg 18 | expr: > 19 | avg without (bsnAPDot3MacAddress, bsnAPName) ( 20 | bsnAPIfLoadChannelUtilization 21 | ) 22 | - record: bsnAPIfSlotId:bsnAPIfLoadChannelUtilization:max 23 | expr: > 24 | max without (bsnAPDot3MacAddress, bsnAPName) ( 25 | bsnAPIfLoadChannelUtilization 26 | ) 27 | -------------------------------------------------------------------------------- /ansible/playbooks/files/prometheus/rules/snmp.rules: -------------------------------------------------------------------------------- 1 | groups: 2 | - name: SNMP 3 | interval: 1m 4 | rules: 5 | - record: instance:ifHCInOctets:rate1m 6 | expr: > 7 | sum without (ifDescr, ifIndex, ifName, ifAlias) ( 8 | rate(ifHCInOctets[1m]) 9 | ) 10 | - record: instance:ifHCOutOctets:rate5m 11 | expr: > 12 | sum without (ifDescr, ifIndex, ifName, ifAlias) ( 13 | rate(ifHCOutOctets[1m]) 14 | ) 15 | - alert: NetworkTrunkSaturation 16 | expr: > 17 | (rate(ifHCInOctets{ifAlias=~"--- Trunk .* ---"}[5m]) * 8) / 18 | (ifHighSpeed * 1000 * 1000) * 100 > 50 19 | for: 5m 20 | labels: 21 | team: network 22 | severity: warning 23 | annotations: 24 | summary: Switch Trunk {{ $labels.instance }} is saturated 25 | description: '{{ $labels.instance }} Trunk port {{ $labels.ifAlias }} is {{ $value | printf "%.2f" }}% saturdated' 26 | -------------------------------------------------------------------------------- /ansible/playbooks/files/prometheus/rules/voctop.rules: -------------------------------------------------------------------------------- 1 | --- 2 | groups: 3 | - name: Voctop 4 | interval: 15s 5 | rules: 6 | - alert: VoctopCPUOverheating 7 | expr: > 8 | avg without (sensor) ( 9 | node_hwmon_temp_celsius{chip="platform_coretemp_0",instance=~".*voctop.video.fosdem.org:9100"} 10 | ) > 75 11 | for: 5m 12 | labels: 13 | team: video 14 | severity: warning 15 | annotations: 16 | summary: Voctop CPU overheating 17 | description: 'Voctop {{ $labels.instance }} CPU is overheating {{ $value | printf "%.2f" }}C' 18 | - alert: ACOffline 19 | expr: > 20 | node_power_supply_ac_online != 1 21 | for: 1m 22 | labels: 23 | team: video 24 | severity: warning 25 | annotations: 26 | summary: AC power offline 27 | description: 'AC power on {{ $labels.instance }} is offline' 28 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | chrony_allow: 3 | - 127.0.0.1 4 | chrony_upstream: 5 | pool: 6 | - 0.europe.pool.ntp.org 7 | 8 | sysctl_parameters: {} 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/files/screenrc: -------------------------------------------------------------------------------- 1 | # Don't need the startup message 2 | startup_message off 3 | 4 | # A nice pretty bottom bar 5 | hardstatus alwayslastline 6 | hardstatus string "%{.bW}%-w%{.rW}%n %t%{-}%+w %=%{..G} %H %{..Y} %Y-%m-%d %c " 7 | 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart chrony 3 | systemd: 4 | daemon_reload: true 5 | name: chrony.service 6 | state: restarted 7 | 8 | - name: reload ssh 9 | systemd: 10 | daemon_reload: true 11 | name: sshd 12 | state: reloaded 13 | 14 | - name: update apt cache 15 | apt: 16 | update_cache: true 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/chrony.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: uninstall ntp daemons we will not be using 4 | apt: 5 | state: absent 6 | purge: true 7 | package: 8 | - ntp 9 | - openntpd 10 | - systemd-timesyncd 11 | 12 | - name: install chrony 13 | apt: 14 | state: latest 15 | install_recommends: false 16 | package: 17 | - chrony 18 | 19 | - name: configure chrony 20 | template: 21 | src: chrony.conf.j2 22 | dest: /etc/chrony/conf.d/chrony.conf 23 | owner: root 24 | mode: 0644 25 | notify: restart chrony 26 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/configure_apt.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "configure sources.list" 3 | template: 4 | src: apt/sources.list.j2 5 | dest: /etc/apt/sources.list 6 | owner: root 7 | group: root 8 | mode: 0644 9 | notify: 10 | - update apt cache 11 | 12 | - name: "run apt update if the cache is is stale" 13 | apt: 14 | update_cache: true 15 | cache_valid_time: 3600 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/configure_fosdem_revision.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: configure /etc/fosdem_revision 3 | template: 4 | src: fosdem_revision 5 | dest: /etc/fosdem_revision 6 | owner: root 7 | group: root 8 | mode: 0644 9 | when: not ansible_check_mode 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/configure_sshd.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: configure sshd 4 | template: 5 | src: ssh/sshd_config.j2 6 | dest: /etc/ssh/sshd_config.d/fosdem.conf 7 | notify: reload ssh 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/configure_sysctl.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: configure sysctl 3 | sysctl: 4 | sysctl_file: /etc/sysctl.d/10-fosdem.conf 5 | name: "{{ item.key }}" 6 | value: "{{ item.value }}" 7 | with_dict: "{{ sysctl_parameters }}" 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/configure_timezone.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: set timezone 3 | timezone: 4 | name: "{{ timezone }}" 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "Install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - ack 8 | - apt-transport-https 9 | - bash-completion 10 | - binutils 11 | - ca-certificates 12 | - curl 13 | - ethtool 14 | - file 15 | - git 16 | - gpg 17 | - gpg-agent 18 | - htop 19 | - ifmetric 20 | - iftop 21 | - inotify-tools 22 | - iotop 23 | - less 24 | - libcap2-bin 25 | - lsof 26 | - mediainfo 27 | - mosh 28 | - mtr-tiny 29 | - needrestart 30 | - patch 31 | - prometheus-node-exporter 32 | - psmisc 33 | - screen 34 | - sipcalc 35 | - strace 36 | - sudo 37 | - tcpdump 38 | - tmux 39 | - vcsh 40 | - vim 41 | - vnstat 42 | - zsh 43 | 44 | - name: "Install architecture-dependent packages" 45 | apt: 46 | state: latest 47 | install_recommends: false 48 | package: 49 | - ltrace 50 | when: (ansible_architecture == 'i386' or ansible_architecture == 'x86_64') 51 | 52 | - name: "Add root screenrc" 53 | copy: 54 | src: screenrc 55 | dest: /root/.screenrc 56 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: get git version 4 | shell: git log -1 '--date=format:%Y-%m-%d %H:%M' '--pretty=format:%cd %h' # noqa 303 305 5 | register: git_version 6 | delegate_to: localhost 7 | changed_when: false 8 | 9 | - import_tasks: configure_timezone.yml 10 | - import_tasks: configure_sysctl.yml 11 | - import_tasks: configure_apt.yml 12 | - import_tasks: user_root.yml 13 | - import_tasks: user_fosdem.yml 14 | - import_tasks: set_hostname.yml 15 | - import_tasks: configure_fosdem_revision.yml 16 | - import_tasks: install_packages.yml 17 | - import_tasks: configure_sshd.yml 18 | - import_tasks: chrony.yml 19 | tags: 20 | - chrony 21 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/set_hostname.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: set custom hostname 3 | hostname: 4 | name: "{{ system_hostname }}" 5 | when: system_hostname is defined 6 | 7 | - name: set hostname to inventory hostname 8 | hostname: 9 | name: "{{ inventory_hostname }}" 10 | when: system_hostname is not defined 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/user_fosdem.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: add fosdem group 3 | group: 4 | name: fosdem 5 | gid: 1000 6 | 7 | - name: add fosdem user 8 | user: 9 | name: fosdem 10 | comment: "FOSDEM user" 11 | uid: 1000 12 | group: fosdem 13 | shell: /bin/bash 14 | password: "{{ user_fosdem_password|default('') }}" 15 | 16 | - name: add ssh keys to fosdem user 17 | authorized_key: 18 | user: fosdem 19 | key: "{{ lookup('file', '../public_keys/'+item+'.pub') }}" 20 | with_items: "{{ ssh_users }}" 21 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/tasks/user_root.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: add ssh keys to root 3 | authorized_key: 4 | user: root 5 | key: "{{ lookup('file', '../public_keys/'+item+'.pub') }}" 6 | with_items: "{{ ssh_users }}" 7 | 8 | - name: configure root user 9 | user: 10 | name: root 11 | shell: /bin/bash 12 | password: "" 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/templates/apt/sources.list.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | deb http://deb.debian.org/debian {{ansible_distribution_release}} main contrib non-free-firmware non-free 3 | deb http://deb.debian.org/debian {{ansible_distribution_release}}-updates main contrib non-free-firmware non-free 4 | deb http://security.debian.org/debian-security {{ansible_distribution_release}}-security main contrib non-free-firmware non-free 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/templates/chrony.conf.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | # Servers and pools 4 | {% for type, list in chrony_upstream.items() %} 5 | {% for upstream in list %} 6 | {{ type }} {{ upstream }} iburst 7 | {% endfor %} 8 | {% endfor %} 9 | 10 | # Allow NTP client access 11 | {% for allow in chrony_allow %} 12 | allow {{ allow }} 13 | {% endfor %} 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/templates/fosdem_revision: -------------------------------------------------------------------------------- 1 | {{ git_version.stdout }} 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/templates/ssh/sshd_config.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | PermitRootLogin without-password 3 | PasswordAuthentication no 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/common/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | go_arch_map: 3 | i386: '386' 4 | x86_64: 'amd64' 5 | aarch64: 'arm64' 6 | armv7l: 'armv7' 7 | armv6l: 'armv6' 8 | 9 | go_arch: "{{ go_arch_map[ansible_architecture] | default(ansible_architecture) }}" 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/dashboard_sync/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | dashboard_sync: 3 | private_api_key: FOO 4 | private_url: http://example.com 5 | public_api_key: BAR 6 | public_url: http://example.com 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/dashboard_sync/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Deploy sync ruby script 3 | copy: 4 | src: sync_grafana_dashboards.rb 5 | dest: /usr/local/bin/sync_grafana_dashboards.rb 6 | owner: root 7 | mode: 0755 8 | 9 | - name: Deploy sync shell wrapper 10 | template: 11 | src: sync_grafana_dashboards.sh 12 | dest: /usr/local/bin/sync_grafana_dashboards.sh 13 | owner: root 14 | mode: 0755 15 | 16 | - name: Setup cron sync 17 | cron: 18 | name: sync dashboards 19 | user: fosdem 20 | minute: '*/1' 21 | job: /usr/local/bin/sync_grafana_dashboards.sh 22 | cron_file: sync_grafana_dashboards 23 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/dashboard_sync/templates/sync_grafana_dashboards.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | 4 | export PRIVATE_API_KEY='{{ dashboard_sync.private_api_key }}' 5 | export PRIVATE_URL='{{ dashboard_sync.private_url }}' 6 | export PUBLIC_API_KEY='{{ dashboard_sync.public_api_key }}' 7 | export PUBLIC_URL='{{ dashboard_sync.public_url }}' 8 | 9 | sync_grafana_dashboards.rb 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/files/gdm3/daemon.conf: -------------------------------------------------------------------------------- 1 | [daemon] 2 | # Uncoment the line below to force the login screen to use Xorg 3 | #WaylandEnable=false 4 | 5 | # Enabling automatic login 6 | AutomaticLoginEnable = true 7 | AutomaticLogin = fosdem 8 | 9 | # Enabling timed login 10 | # TimedLoginEnable = true 11 | # TimedLogin = user1 12 | # TimedLoginDelay = 10 13 | 14 | [security] 15 | 16 | [xdmcp] 17 | 18 | [chooser] 19 | 20 | [debug] 21 | # Uncomment the line below to turn on debugging 22 | # More verbose logs 23 | # Additionally lets the X server dump core if it crashes 24 | #Enable=true 25 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/files/network/FOSDEM: -------------------------------------------------------------------------------- 1 | [connection] 2 | id=FOSDEM 3 | type=wifi 4 | permissions= 5 | 6 | [wifi] 7 | mac-address-blacklist= 8 | mode=infrastructure 9 | ssid=FOSDEM 10 | 11 | [ipv4] 12 | dns-search= 13 | method=auto 14 | 15 | [ipv6] 16 | addr-gen-mode=stable-privacy 17 | dns-search= 18 | method=auto 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/files/network/FOSDEM-dualstack: -------------------------------------------------------------------------------- 1 | [connection] 2 | id=FOSDEM-dualstack 3 | type=wifi 4 | permissions= 5 | 6 | [wifi] 7 | mac-address-blacklist= 8 | mode=infrastructure 9 | ssid=FOSDEM-dualstack 10 | 11 | [ipv4] 12 | dns-search= 13 | method=auto 14 | 15 | [ipv6] 16 | addr-gen-mode=stable-privacy 17 | dns-search= 18 | method=auto 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/files/network/interfaces: -------------------------------------------------------------------------------- 1 | # This file describes the network interfaces available on your system 2 | # and how to activate them. For more information, see interfaces(5). 3 | 4 | source /etc/network/interfaces.d/* 5 | 6 | # The loopback network interface 7 | auto lo 8 | iface lo inet loopback 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/files/wallpaper.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/desk-laptop/files/wallpaper.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart gdm 3 | systemd: 4 | daemon_reload: true 5 | name: gdm 6 | state: restarted 7 | 8 | - name: restart networkmanager 9 | systemd: 10 | name: NetworkManager 11 | state: restarted 12 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/configure_cups.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install printer packages 3 | apt: 4 | state: latest 5 | package: 6 | - cups 7 | - cups-bsd 8 | - cups-pdf 9 | - printer-driver-all 10 | 11 | - name: add printer drivers 12 | copy: 13 | src: "printer/{{ item }}" 14 | dest: "/etc/cups/ppd/{{ item }}" 15 | owner: root 16 | group: lp 17 | mode: 0644 18 | with_items: 19 | - Xerox-Phaser-6510.ppd 20 | - Xerox-WorkCentre-6515.ppd 21 | 22 | - name: stop cupsd before writing the printer config 23 | systemd: 24 | state: stopped 25 | name: cups 26 | changed_when: false 27 | 28 | - name: configure printers 29 | copy: 30 | src: "printer/printers.conf" 31 | dest: "/etc/cups/printers.conf" 32 | owner: root 33 | group: lp 34 | mode: 0600 35 | 36 | - name: start cupsd again 37 | systemd: 38 | state: started 39 | name: cups 40 | changed_when: false 41 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/configure_hardware.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install firmware and hardware management utilities 3 | apt: 4 | state: latest 5 | package: 6 | - bluez-firmware 7 | - cpufreqd 8 | - cpufrequtils 9 | - firmware-misc-nonfree 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/configure_network.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: unconfigure the wired network 4 | copy: 5 | src: network/interfaces 6 | dest: /etc/network/interfaces 7 | owner: root 8 | group: root 9 | mode: 0644 10 | 11 | - name: create networkmanager config directory 12 | file: 13 | path: "{{ item.path }}" 14 | mode: "{{ item.mode }}" 15 | state: directory 16 | with_items: 17 | - path: /etc/NetworkManager/ 18 | mode: "0644" 19 | - path: /etc/NetworkManager/system-connections/ 20 | mode: "0600" 21 | 22 | - name: configure wireless networks 23 | copy: 24 | src: "network/{{ item }}" 25 | dest: "/etc/NetworkManager/system-connections/{{ item }}" 26 | owner: root 27 | group: root 28 | mode: 0600 29 | with_items: 30 | - FOSDEM 31 | - FOSDEM-dualstack 32 | notify: restart networkmanager 33 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/configure_user_fosdem.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: add fosdem user to secondary groups 4 | user: 5 | name: fosdem 6 | groups: cdrom,plugdev,audio,video,lpadmin,lp,sudo,adm 7 | append: true 8 | 9 | - name: disable gnome initial setup 10 | copy: 11 | dest: /home/fosdem/.config/gnome-initial-setup-done 12 | content: "yes" 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install system packages 3 | apt: 4 | state: latest 5 | package: 6 | - dmidecode 7 | - dosfstools 8 | - exfat-fuse 9 | - exfatprogs 10 | - firmware-iwlwifi 11 | - hwinfo 12 | - ntfs-3g 13 | - rsync 14 | - screen 15 | - sysstat 16 | - unrar-free 17 | - vim-nox 18 | - wget 19 | - python3-psutil 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/desk-laptop/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_network.yml 4 | - import_tasks: configure_hardware.yml 5 | - import_tasks: configure_cups.yml 6 | - import_tasks: configure_desktop.yml 7 | - import_tasks: configure_user_fosdem.yml 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/dhclient/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Fix DHCP client config 3 | template: 4 | src: dhclient.conf.j2 5 | dest: /etc/dhcp/dhclient.conf 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/files/deb-multimedia.pref: -------------------------------------------------------------------------------- 1 | Package: * 2 | Pin: release o=Unofficial Multimedia Packages 3 | Pin-Priority: 50 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/files/gridengine.defaults: -------------------------------------------------------------------------------- 1 | # Sun Grid Engine configuration 2 | 3 | # Boolean options in this file must be set to yes or no 4 | 5 | # Start the queue master daemon? (if installed) 6 | SGE_START_MASTERD=yes 7 | 8 | # Start the execution daemon? (if installed) 9 | SGE_START_EXECD=yes 10 | 11 | # SGE_ROOT will default to /var/lib/gridengine 12 | SGE_ROOT=/var/lib/gridengine 13 | 14 | # SGE_CELL will default to default 15 | SGE_CELL=sreview 16 | 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/tasks/fix_hosts.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: fix the hosts file so gridengine (and other things) will work 3 | template: 4 | dest: /etc/hosts 5 | src: hosts.j2 6 | owner: root 7 | group: root 8 | mode: 0664 9 | register: hosts_fix 10 | 11 | - name: restart gridengine if needed 12 | service: 13 | name: gridengine-exec 14 | state: restarted 15 | when: hosts_fix.changed 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/tasks/homedir.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: ensure sreview homedir is mounted 3 | mount: 4 | name: /var/lib/sreview 5 | src: reviewstorage-int.video.fosdem.org:/srv/sreview/sreview-home 6 | fstype: nfs 7 | state: mounted 8 | when: "'cloud-encoders' not in group_names" 9 | 10 | - name: ensure sreview tmp directory exists 11 | file: 12 | path: /var/lib/sreview/tmp 13 | state: directory 14 | owner: sreview 15 | group: sreview 16 | mode: 0755 17 | 18 | - name: ensure script-output directory exists 19 | file: 20 | path: /var/lib/sreview/script-output 21 | state: directory 22 | owner: sreview 23 | group: sreview 24 | mode: 0755 25 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | when: "'encoder-storage' not in group_names" 4 | - import_tasks: fix_hosts.yml 5 | when: "'encoder-storage' not in group_names" 6 | - import_tasks: users.yml 7 | - import_tasks: homedir.yml 8 | when: "'encoder-storage' not in group_names" 9 | - import_tasks: whisper.yml 10 | when: "'encoder-storage' not in group_names" 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/tasks/users.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: add sreview group 3 | group: 4 | name: sreview 5 | gid: 2000 6 | 7 | - name: add sreview user 8 | user: 9 | name: sreview 10 | comment: "Sreview user" 11 | uid: 2000 12 | group: sreview 13 | shell: /bin/bash 14 | password: "" 15 | 16 | - name: add sgeadmin group 17 | group: 18 | name: sgeadmin 19 | gid: 2001 20 | 21 | - name: add sgeadmin user 22 | user: 23 | name: sgeadmin 24 | uid: 2001 25 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-backend/tasks/whisper.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install python3-venv 3 | apt: 4 | name: python3-venv 5 | state: present 6 | 7 | - name: Install OpenAI whisper 8 | pip: 9 | name: openai-whisper 10 | state: present 11 | virtualenv: /var/lib/sreview/whisper 12 | virtualenv_command: python3 -m venv 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/announce.ep: -------------------------------------------------------------------------------- 1 | Hi! 2 | 3 | This is to inform you that the video of your talk, titled 4 | 5 | <%= $title %> 6 | 7 | has now been released and will soon be available at 8 | 9 | <%= $talk->eventurl %> 10 | 11 | It may be a few hours before our mirrors get the files and the links work. 12 | 13 | Feel free to announce it to whoever you think may be interested! 14 | 15 | Thanks for speaking at FOSDEM, 16 | 17 | The FOSDEM team. 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/close.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Black Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Black Italic.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Black.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Black.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Bold Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Bold Italic.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Bold.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Italic.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Light Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Light Italic.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Light.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Light.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Regular.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Roman Italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Roman Italic.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Roman.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Frutiger Roman.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-Bold.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-Light.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-Light.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-Medium.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-Medium.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-Regular.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-SemiBold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-SemiBold.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/fonts/Signika-VariableFont_GRAD,wght.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/encoder-common/files/fonts/Signika-VariableFont_GRAD,wght.ttf -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/files/load.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | while read a 4 | do 5 | if [ "$a" == "quit" ] 6 | then 7 | exit 8 | fi 9 | tmpfree=$(df --output=avail /tmp | tail -n1) 10 | storagefree=$(df --output=avail /srv/sreview/storage | tail -n1) 11 | me=$(hostname -f) 12 | 13 | echo begin 14 | echo ${me}:tmpfree:${tmpfree}k 15 | echo global:storagefree:${storagefree}k 16 | echo end 17 | done 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/tasks/gridengine.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: run setup 3 | setup: 4 | 5 | - name: install load script 6 | copy: 7 | src: load.sh 8 | dest: /usr/local/bin/load.sh 9 | mode: 0755 10 | register: load_sh 11 | 12 | - name: restart gridengine service when needed 13 | service: 14 | name: gridengine-exec 15 | enabled: true 16 | state: started 17 | when: load_sh.changed 18 | ignore_errors: true 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: gridengine.yml 3 | - import_tasks: mounts.yml 4 | - import_tasks: sreview.yml 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/tasks/mounts.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: ensure /srv/sreview exists 3 | file: 4 | path: /srv/sreview 5 | state: directory 6 | 7 | - name: ensure /srv/sreview/ paths exist 8 | file: 9 | path: /srv/sreview/{{ item }} 10 | state: directory 11 | with_items: 12 | - incoming 13 | - assets 14 | - storage 15 | - output 16 | 17 | - name: ensure NFS mount software is installed 18 | apt: 19 | state: latest 20 | package: 21 | - nfs-common 22 | when: "'cloud-encoders' not in group_names" 23 | 24 | - name: mount working storage 25 | mount: 26 | name: /srv/sreview/{{ item }} 27 | src: reviewstorage-int.video.fosdem.org:/srv/sreview/{{ item }} 28 | fstype: nfs 29 | state: mounted 30 | with_items: 31 | - assets 32 | - storage 33 | - output 34 | when: "'cloud-encoders' not in group_names" 35 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-common/vars/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sreview_db_pw: !vault | 3 | $ANSIBLE_VAULT;1.1;AES256 4 | 38343137336461316561373066643462383739393533643933393930373437346532373032336566 5 | 6537393765396661616636626634623735646435626233300a643431356530303936373433353638 6 | 35663035616538626436663639383033386431326163636361653537393732386533343133663864 7 | 6464656638643335310a316233336537373766623236313861346262363464373861616561356132 8 | 6462 9 | sreview_db_host: encode-master.video.fosdem.org 10 | mojosecret: !vault | 11 | $ANSIBLE_VAULT;1.1;AES256 12 | 39613230656430393932643563316231396338663263306531363165343033313034616333613765 13 | 3530666366333633313330363663393365303664643762380a646439373635646139313935343138 14 | 31663465653932343461636664346531303039306334323532636535656632633861363466343364 15 | 6466393836396232330a643263306431323362636463626530313732363166653938356263636131 16 | 30336435626265386233353765343432643232623937666665333136303538353463663039353462 17 | 6462366132376462343137336166313665643633353462333135 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | sql_exporter_version: 0.17.0 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart sql_exporter 3 | systemd: name=sql_exporter.service state=restarted 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/tasks/config_schedule_import.yml: -------------------------------------------------------------------------------- 1 | - name: install Fosdem schedule parser 2 | copy: 3 | src: Schedule/Fosdem.pm 4 | dest: /usr/share/perl5/SReview/Schedule/Fosdem.pm 5 | mode: 0644 6 | 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: nfs.yml 4 | - import_tasks: postgresql.yml 5 | - import_tasks: sql_exporter.yml 6 | - import_tasks: config_schedule_import.yml 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/tasks/nfs.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "add exports" 3 | blockinfile: 4 | create: true 5 | dest: /etc/exports 6 | block: | 7 | /var/lib/sreview 10.0.0.0/8(rw,sync) # noqa 203 8 | register: nfs_exported 9 | 10 | - name: "restart NFS server when needed" 11 | service: 12 | name: nfs-server 13 | state: restarted 14 | when: nfs_exported.changed 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/tasks/postgresql.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "ensure postgresql is there" 3 | apt: 4 | pkg: postgresql 5 | state: latest 6 | install_recommends: false 7 | 8 | - name: "ensure listen is on" 9 | blockinfile: 10 | create: true 11 | dest: /etc/postgresql/13/main/postgresql.conf 12 | block: | 13 | listen_addresses = '*' 14 | register: postgresql_conf 15 | 16 | - name: "allow remote access to postgres db" 17 | blockinfile: 18 | create: true 19 | dest: /etc/postgresql/13/main/pg_hba.conf 20 | block: | 21 | host all all 10.0.0.0/8 md5 22 | register: pghba_conf 23 | 24 | - name: "ensure restart when needed" 25 | service: 26 | name: postgresql 27 | state: restarted 28 | when: postgresql_conf.changed or pghba_conf.changed 29 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-master/templates/sql_exporter.service.j2: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=SQL Exporter 3 | 4 | [Service] 5 | User=sql-exporter 6 | Group=sql-exporter 7 | ExecStart=/opt/sql_exporter/sql_exporter-{{ sql_exporter_version }}.linux-amd64/sql_exporter -config.file=/opt/sql_exporter/sql_exporter.yml 8 | ExecReload=/bin/kill -HUP $MAINPID 9 | KillMode=process 10 | Restart=always 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-network/handlers/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart nftables 3 | systemd: 4 | daemon_reload: true 5 | name: nftables.service 6 | state: restarted 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-network/tasks/main.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Ensure nftables is installed 3 | apt: 4 | package: nftables 5 | state: latest 6 | 7 | - name: Create nftables config file 8 | template: 9 | src: nftables.conf.j2 10 | dest: /etc/nftables.conf 11 | notify: restart nftables 12 | 13 | - name: Ensure nftables service is enabled and started 14 | systemd: 15 | daemon_reload: true 16 | name: nftables.service 17 | state: started 18 | enabled: true 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-network/templates/nftables.conf.j2: -------------------------------------------------------------------------------- 1 | #!/usr/sbin/nft -f 2 | 3 | {{ ansible_managed | comment }} 4 | 5 | flush ruleset 6 | 7 | table inet filter { 8 | chain input { 9 | type filter hook input priority 0; 10 | policy drop; 11 | iif lo accept 12 | ct state established,related accept 13 | ip6 nexthdr icmpv6 accept 14 | ip protocol icmp accept 15 | ip saddr 172.20.22.210/24 accept 16 | tcp dport {ssh,http,https,5432,6444,6445,9100,9399,9081} accept 17 | } 18 | 19 | chain forward { 20 | type filter hook forward priority 0; policy accept; 21 | } 22 | 23 | chain output { 24 | type filter hook output priority 0; policy accept; 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-storage/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install the NFS server 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - nfs-kernel-server 8 | 9 | - name: "add exports" 10 | blockinfile: 11 | create: true 12 | dest: /etc/exports 13 | block: | 14 | /srv/sreview 172.20.22.0/24(rw,sync,no_subtree_check,no_root_squash) 15 | register: reviewstorage_nfs_exported 16 | 17 | - name: "restart the NFS server when needed" 18 | service: 19 | name: nfs-server 20 | state: restarted 21 | when: reviewstorage_nfs_exported.changed 22 | 23 | - name: ensure the required directories exist 24 | file: 25 | name: "/srv/sreview/{{ item }}" 26 | state: directory 27 | with_items: 28 | - assets 29 | - storage 30 | - output 31 | - sreview-home 32 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/defaults/main.yml: -------------------------------------------------------------------------------- 1 | letsencrypt_well_known_dir: /var/www/well-known 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/files/maint.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Under maintenance 5 | 6 | 7 |

Under maintenance

8 |

This site is currently under maintenance. Please check back later!

9 | 10 | 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload nginx 3 | systemd: 4 | name: nginx 5 | state: reloaded 6 | 7 | - name: Restart nginx 8 | systemd: 9 | name: nginx 10 | state: restarted 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: config_nginx_sreview.yml 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/templates/nginx/sites-enabled/sreview-web-http.j2: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | 3 | server { 4 | listen 80; 5 | listen [::]:80; 6 | server_name review.video.fosdem.org; 7 | 8 | root /var/www/html; 9 | 10 | index index.html; 11 | 12 | location /.well-known/ { 13 | alias {{ letsencrypt_well_known_dir }}/; 14 | } 15 | 16 | location / { 17 | return 301 https://$host$request_uri; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/encoder-webinterface/templates/nginx/sites-enabled/sreview-web-https.j2: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | 3 | server { 4 | listen 443 ssl http2; 5 | listen [::]:443 ssl http2; 6 | server_name review.video.fosdem.org; 7 | 8 | ssl_certificate /etc/ssl/ansible/certs/sreview-web.fullchain.pem; 9 | ssl_certificate_key /etc/ssl/ansible/private/sreview-web.key; 10 | client_max_body_size 2G; 11 | 12 | add_header Strict-Transport-Security max-age=15768000; 13 | 14 | root /var/www/html; 15 | 16 | index index.html; 17 | 18 | location / { 19 | proxy_set_header Host $http_host; 20 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 21 | proxy_set_header X-Forwarded-Proto $scheme; 22 | proxy_set_header Upgrade $http_upgrade; 23 | 24 | proxy_pass http://localhost:8080/; 25 | } 26 | 27 | location /video { 28 | root /srv/sreview/storage; 29 | } 30 | location /pages { 31 | root /var/www/html/; 32 | } 33 | 34 | access_log /var/log/nginx/sreview-access.log; 35 | error_log /var/log/nginx/sreview-error.log; 36 | error_page 502 /pages/maint.html; 37 | } 38 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/helm-kubequeue/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: enable kubequeue helm chart repository 3 | kubernetes.core.helm_repository: 4 | name: kubequeue 5 | repo_url: https://gitlab.grep.be/api/v4/projects/14/packages/helm/dev 6 | 7 | - name: install kubequeue helm chart 8 | kubernetes.core.helm: 9 | name: kubequeue 10 | namespace: kubequeue 11 | create_namespace: true 12 | chart_ref: kubequeue/kubequeue 13 | update_repo_cache: true 14 | values: 15 | containerLabel: git-a03a0403 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/helm-sreview/files/announce.ep: -------------------------------------------------------------------------------- 1 | Hi! 2 | 3 | This is to inform you that the video of your talk, titled 4 | 5 | <%= $title %> 6 | 7 | has now been released and will soon be available at 8 | 9 | <%= $talk->eventurl %> 10 | 11 | It may be a few hours before our mirrors get the files and the links work. 12 | 13 | Feel free to announce it to whoever you think may be interested! 14 | 15 | Thanks for speaking at FOSDEM, 16 | 17 | The FOSDEM team. 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/helm-sreview/files/fosdem_video_post.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/helm-sreview/files/fosdem_video_post.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/helm/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: create directory 3 | file: 4 | path: /opt/helm 5 | state: directory 6 | 7 | - name: install helm 8 | unarchive: 9 | src: "https://get.helm.sh/helm-v3.10.3-linux-amd64.tar.gz" 10 | dest: "/opt/helm/" 11 | copy: false 12 | 13 | - name: create symlink to helm binary 14 | file: 15 | path: /usr/local/bin/helm 16 | state: link 17 | src: /opt/helm/linux-amd64/helm 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/hwraid/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | hwraid_packages: 3 | - megacli 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/hwraid/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Install hwraid packages from https://hwraid.le-vert.net/wiki/DebianPackages 3 | 4 | - name: Install gpg key 5 | apt_key: 6 | url: https://hwraid.le-vert.net/debian/hwraid.le-vert.net.gpg.key 7 | 8 | - name: Install apt repo 9 | apt_repository: 10 | filename: hwraid 11 | repo: "deb http://hwraid.le-vert.net/{{ ansible_distribution | lower }} {{ ansible_distribution_release }} main" 12 | 13 | - name: Install hwraid packages 14 | apt: 15 | package: "{{ hwraid_packages }}" 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/ipmitool/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart ipmi_metrics.timer 3 | become: true 4 | systemd: 5 | daemon_reload: true 6 | name: ipmi_metrics.timer 7 | state: restarted 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/ipmitool/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Copy prometheus tool 3 | copy: 4 | src: ipmi_sensor_prometheus.py 5 | dest: /usr/local/sbin/ipmi_sensor_prometheus.py 6 | owner: root 7 | mode: 0755 8 | 9 | - name: install kernel modules 10 | modprobe: 11 | name: "{{ item }}" 12 | state: present 13 | with_items: 14 | - acpi_ipmi 15 | - ipmi_devintf 16 | - ipmi_si 17 | 18 | - name: install packages 19 | apt: 20 | state: latest 21 | install_recommends: false 22 | package: 23 | - ipmitool 24 | 25 | - name: Install ipmi_metrics systemd service file 26 | notify: 27 | - restart ipmi_metrics.timer 28 | template: 29 | src: "{{ item }}" 30 | dest: "/etc/systemd/system/{{ item }}" 31 | with_items: 32 | - ipmi_metrics.service 33 | - ipmi_metrics.timer 34 | 35 | - name: Enable ipmi_metrics service/timer 36 | systemd: 37 | name: "{{ item }}" 38 | enabled: true 39 | daemon_reload: true 40 | with_items: 41 | - ipmi_metrics.service 42 | - ipmi_metrics.timer 43 | 44 | - name: Start ipmi_metrics timer 45 | systemd: 46 | name: ipmi_metrics.timer 47 | start: true 48 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/ipmitool/templates/ipmi_metrics.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=ipmi metrics 3 | 4 | [Service] 5 | Type=oneshot 6 | ExecStart=/bin/sh -c '/usr/local/sbin/ipmi_sensor_prometheus.py > {{ node_exporter_textfile_dir }}/ipmi.prom' 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/ipmitool/templates/ipmi_metrics.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=ipmi metrics 3 | 4 | [Timer] 5 | OnUnitActiveSec=10s 6 | OnBootSec=10s 7 | 8 | [Install] 9 | WantedBy=timers.target 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/json_exporter/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | json_exporter_version: 0.1.0 3 | json_exporter_dl: https://github.com/superq/prometheus-json-exporter/releases/download 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/json_exporter/files/config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Room state has 2 meanings, fullness and alert, when 2 alert is on. 3 | # 0 room is empty and alert is 0 4 | # 1 room is full and alert is 0 5 | # 2 room is full and alert alert alert 6 | - name: fosdem_room 7 | type: object 8 | path: $[*]?(@.state == "2") 9 | labels: 10 | room: $.roomname 11 | value: 12 | emergency: 1 13 | full: 1 14 | 15 | - name: fosdem_room 16 | type: object 17 | path: $[*]?(@.state != "2") 18 | labels: 19 | room: $.roomname 20 | values: 21 | full: $.state 22 | emergency: 0 23 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/json_exporter/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart json_exporter 3 | systemd: name=json_exporter.service state=restarted 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/json_exporter/tasks/preflight.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - block: 3 | - name: "Get checksum list" 4 | set_fact: 5 | __json_exporter_checksums: "{{ lookup('url', json_exporter_dl + '/v' + json_exporter_version + '/sha256sums.txt', wantlist=True) | list }}" 6 | run_once: true 7 | 8 | - name: "Get checksum for {{ go_arch }} architecture" 9 | set_fact: 10 | __json_exporter_checksum: "{{ item.split(' ')[0] }}" 11 | with_items: "{{ __json_exporter_checksums }}" 12 | when: 13 | - "('linux-' + go_arch + '.tar.gz') in item" 14 | delegate_to: localhost 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/json_exporter/templates/json_exporter.service.j2: -------------------------------------------------------------------------------- 1 | {{ ansible_managed | comment }} 2 | 3 | [Unit] 4 | Description=JSON Exporter 5 | 6 | [Service] 7 | User=json-exporter 8 | Group=json-exporter 9 | ExecStart=/usr/local/bin/json_exporter https://api.fosdem.org/roomstatus/v1/listrooms /etc/json_exporter/config.yml 10 | ExecReload=/bin/kill -HUP $MAINPID 11 | KillMode=process 12 | Restart=always 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: restart systemd-logind 4 | service: 5 | name: systemd-logind 6 | state: restarted 7 | 8 | - name: update grub 9 | shell: update-grub 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/tasks/cleanup_apt_proxy.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # We use apt proxy during PXE installs, make sure it's cleaned up 4 | # Matching the exact IP address used at the office, just in case we'd actually 5 | # want to use apt proxy somewhere somehow 6 | - name: clean up apt proxy settings 7 | ansible.builtin.lineinfile: 8 | path: /etc/apt/apt.conf 9 | state: absent 10 | regexp: '^Acquire::http::Proxy "http://172.22.10.30:3142";' 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/tasks/disable_suspend.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: disable suspend and hibernate 3 | systemd: 4 | name: "{{ item }}" 5 | enabled: false 6 | masked: true 7 | with_items: 8 | - sleep.target 9 | - suspend.target 10 | - hibernate.target 11 | - hybrid-sleep.target 12 | 13 | 14 | - name: disable systemd lid switch handling 15 | lineinfile: 16 | dest: /etc/systemd/logind.conf 17 | regexp: "^HandleLidSwitch" 18 | line: "HandleLidSwitch=ignore" 19 | state: present 20 | notify: restart systemd-logind 21 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/tasks/disable_usbcore_autosuspend.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | # this causes issues on some machines, taking up one core 4 | - name: disable usb autosuspend 5 | template: 6 | src: grub/disable-usbcore-autosuspend.cfg 7 | dest: /etc/default/grub.d/disable-usbcore-autosuspend.cfg 8 | owner: root 9 | group: root 10 | mode: 0644 11 | notify: update grub 12 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # Various tasks to be performed on hosts running on laptops 3 | 4 | - import_tasks: disable_suspend.yml 5 | - import_tasks: disable_usbcore_autosuspend.yml 6 | - import_tasks: cleanup_apt_proxy.yml 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/laptop/templates/grub/disable-usbcore-autosuspend.cfg: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | GRUB_CMDLINE_LINUX_DEFAULT="$GRUB_CMDLINE_LINUX_DEFAULT usbcore.autosuspend=-1" 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/files/c3nav-tiles.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | base = /opt/c3nav/src 3 | module = c3nav.tileserver.wsgi 4 | 5 | master = true 6 | processes = 5 7 | 8 | socket = /var/run/c3nav/tiles.sock 9 | vacuum = true 10 | chmod-socket = 660 11 | chown-socket = c3nav:www-data 12 | 13 | # note : this only disables access logs 14 | disable-logging = true 15 | logto=/var/log/c3nav/tiles.log 16 | 17 | die-on-term = true 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/files/tmpfiles.d/c3nav.conf: -------------------------------------------------------------------------------- 1 | d /var/run/c3nav 0755 c3nav c3nav - - 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: name=nginx state=reloaded 4 | 5 | - name: reload redis 6 | service: name=redis-server state=reloaded 7 | 8 | - name: reload postgres 9 | service: name=postgres state=reloaded 10 | 11 | - name: clearmapcache 12 | command: source /opt/c3nav/env/bin/activate && \ 13 | /opt/c3nav/src/manage.py clearmapcache --include-geometries \ 14 | --include-history 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - git 8 | - nginx 9 | - uwsgi-plugin-python3 10 | - python3-requests 11 | - python3-numpy 12 | - python3-pylibmc 13 | - memcached 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/tasks/virtualenv.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: virtualenv - tileserver 3 | pip: 4 | requirements: /opt/c3nav/src/requirements-tileserver.txt 5 | virtualenv_python: python3.6 6 | virtualenv: /opt/c3nav/env 7 | 8 | - name: install uwsgi with pip 9 | pip: 10 | name: uwsgi 11 | virtualenv_python: python3.6 12 | virtualenv: /opt/c3nav/env 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/templates/c3nav-tiles.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=uWSGI c3nav tileserver 3 | 4 | [Service] 5 | ExecStart=/opt/c3nav/env/bin/uwsgi --ini /etc/c3nav/c3nav-tiles.ini 6 | WorkingDirectory=/opt/c3nav/src 7 | User=c3nav 8 | Group=www-data 9 | Environment=C3NAV_UPSTREAM_BASE=https://{{domain}}/ 10 | Environment=C3NAV_TILE_SECRET_FILE=/opt/c3nav/data/.tile_secret 11 | Environment=C3NAV_DATA_DIR=/opt/c3nav/tiledata 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/templates/nginx/c3nav-tiles: -------------------------------------------------------------------------------- 1 | server { 2 | listen [::]:80; 3 | listen 80; 4 | server_name {{domain}}; 5 | location / { 6 | uwsgi_pass unix:///var/run/c3nav/tiles.sock; 7 | include uwsgi_params; 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps-tileserver/templates/nginx/c3nav-tiles.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=uWSGI c3nav tileserver 3 | 4 | [Service] 5 | ExecStart=/opt/c3nav/env/bin/uwsgi --ini /etc/c3nav/c3nav-tiles.ini 6 | WorkingDirectory=/opt/c3nav/src 7 | User=c3nav 8 | Group=www-data 9 | Environment=C3NAV_UPSTREAM_BASE=https://{{upstream}}/ 10 | Environment=C3NAV_TILE_SECRET_FILE=/opt/c3nav/data/.tile_secret 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/files/c3nav-celery.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Celery Service 3 | After=network.target 4 | 5 | [Service] 6 | Type=forking 7 | User=c3nav 8 | Group=c3nav 9 | EnvironmentFile=/etc/c3nav/celery.conf 10 | WorkingDirectory=/opt/c3nav/src 11 | ExecStart=/bin/sh -c '${CELERY_BIN} multi start ${CELERYD_NODES} \ 12 | -A ${CELERY_APP} --pidfile=${CELERYD_PID_FILE} \ 13 | --logfile=${CELERYD_LOG_FILE} --loglevel=${CELERYD_LOG_LEVEL} ${CELERYD_OPTS}' 14 | ExecStop=/bin/sh -c '${CELERY_BIN} multi stopwait ${CELERYD_NODES} \ 15 | --pidfile=${CELERYD_PID_FILE}' 16 | ExecReload=/bin/sh -c '${CELERY_BIN} multi restart ${CELERYD_NODES} \ 17 | -A ${CELERY_APP} --pidfile=${CELERYD_PID_FILE} \ 18 | --logfile=${CELERYD_LOG_FILE} --loglevel=${CELERYD_LOG_LEVEL} ${CELERYD_OPTS}' 19 | 20 | [Install] 21 | WantedBy=multi-user.target 22 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/files/c3nav-gunicorn.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=gunicorn c3nav daemon 3 | After=network.target 4 | 5 | [Service] 6 | User=c3nav 7 | Group=c3nav 8 | WorkingDirectory=/opt/c3nav/src 9 | ExecStart=/opt/c3nav/env/bin/gunicorn --workers 8 --bind unix:/var/run/c3nav/gunicorn-c3nav.sock c3nav.wsgi:application --log-level warning --error-logfile /var/log/c3nav/gunicorn.log --access-logfile /dev/null 10 | 11 | [Install] 12 | WantedBy=multi-user.target 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/files/celery.conf: -------------------------------------------------------------------------------- 1 | CELERY_BIN="/opt/c3nav/env/bin/celery" 2 | CELERY_APP=c3nav 3 | CELERYD_PID_FILE=/var/run/c3nav/celery.pid 4 | CELERYD_NODES="w1 w2" 5 | CELERYD_LOG_FILE=/var/log/c3nav/celery.log 6 | CELERYD_OPTS="--concurrency=2" 7 | CELERYD_LOG_LEVEL="WARNING" 8 | PYTHONPATH=${PYTHONPATH}:/opt/c3nav/src 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/files/media/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/maps/files/media/logo.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/files/tmpfiles.d/c3nav.conf: -------------------------------------------------------------------------------- 1 | d /var/run/c3nav 0755 c3nav c3nav - - 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: name=nginx state=reloaded 4 | 5 | - name: reload redis 6 | service: name=redis-server state=reloaded 7 | 8 | - name: reload postgres 9 | service: name=postgres state=reloaded 10 | 11 | - name: clearmapcache 12 | command: source /opt/c3nav/env/bin/activate && \ 13 | /opt/c3nav/src/manage.py clearmapcache --include-geometries \ 14 | --include-history 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/tasks/configure_celery.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install the c3nav celery service configuration 3 | copy: 4 | src: celery.conf 5 | dest: /etc/c3nav/celery.conf 6 | owner: root 7 | group: root 8 | mode: 0644 9 | 10 | - name: install the c3nav celery service 11 | copy: 12 | src: c3nav-celery.service 13 | dest: /etc/systemd/system 14 | owner: root 15 | group: root 16 | mode: 0644 17 | 18 | - name: enable c3nav celery service 19 | systemd: 20 | name: c3nav-celery 21 | state: started 22 | enabled: true 23 | daemon_reload: true 24 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/tasks/configure_gunicorn.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install the tmpfiles.d conf 3 | become: true 4 | copy: 5 | src: tmpfiles.d/c3nav.conf 6 | dest: /usr/lib/tmpfiles.d 7 | 8 | - name: configure tmpfiles.d 9 | become: true 10 | command: systemd-tmpfiles --create 11 | 12 | - name: install the c3nav gunicorn service 13 | copy: 14 | src: c3nav-gunicorn.service 15 | dest: /etc/systemd/system 16 | owner: root 17 | group: root 18 | mode: 0644 19 | 20 | - name: enable c3nav gunicorn service 21 | systemd: 22 | name: c3nav-gunicorn 23 | state: started 24 | enabled: true 25 | daemon_reload: true 26 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/tasks/configure_nginx.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: configure app 4 | template: 5 | src: nginx/c3nav 6 | dest: /etc/nginx/sites-enabled/c3nav 7 | owner: root 8 | group: root 9 | mode: 0644 10 | notify: reload nginx 11 | 12 | - name: remove default config 13 | file: 14 | dest: /etc/nginx/sites-enabled/default 15 | state: absent 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - git 8 | - libgeos-dev 9 | - librsvg2-bin 10 | - libagg-dev 11 | - libpng-dev 12 | - build-essential 13 | - libfreetype6-dev 14 | - libqhull-dev 15 | - pkg-config 16 | - libjpeg-dev 17 | - libatlas-base-dev 18 | - gettext 19 | - redis-server 20 | - libpq-dev 21 | - python3.8 22 | - python3.8-venv 23 | - python3-pip 24 | - virtualenv 25 | - python3.8-dev 26 | - libmemcached-dev 27 | - python3-psycopg2 28 | - python-setuptools 29 | - python3-setuptools 30 | - python3-tk 31 | - nginx 32 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/tasks/virtualenv.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create virtualenv 3 | pip: 4 | requirements: /opt/c3nav/src/requirements/production.txt 5 | virtualenv_python: python3.8 6 | virtualenv: /opt/c3nav/env 7 | 8 | - name: virtualenv - tileserver 9 | pip: 10 | requirements: /opt/c3nav/src/requirements-tileserver.txt 11 | virtualenv_python: python3.8 12 | virtualenv: /opt/c3nav/env 13 | 14 | - name: virtualenv - redis 15 | pip: 16 | requirements: /opt/c3nav/src/requirements/redis.txt 17 | virtualenv_python: python3.8 18 | virtualenv: /opt/c3nav/env 19 | 20 | - name: virtualenv - postgresql 21 | pip: 22 | requirements: /opt/c3nav/src/requirements/postgres.txt 23 | virtualenv_python: python3.8 24 | virtualenv: /opt/c3nav/env 25 | 26 | - name: install gunicorn with pip 27 | pip: 28 | name: gunicorn 29 | virtualenv_python: python3.8 30 | virtualenv: /opt/c3nav/env 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/templates/c3nav/c3nav.cfg: -------------------------------------------------------------------------------- 1 | [django] 2 | hosts=* 3 | 4 | [c3nav] 5 | datadir=/opt/c3nav/data 6 | logdir=/var/log/c3nav/ 7 | header_logo=/opt/c3nav/data/media/logo.png 8 | # primary_color=#a41c31 9 | # header_background_color=#000000 10 | 11 | # should be enabled if tilecaching is used 12 | # tile_access_cookie_domain=.nav-test.fosdem.org 13 | # tile_cache_server=https://tiles.nav-test.fosdem.org/ 14 | 15 | 16 | [database] 17 | backend=postgresql 18 | host={{host}} 19 | user={{user}} 20 | name={{name}} 21 | 22 | [redis] 23 | location=redis://localhost/0 24 | 25 | [celery] 26 | broker=redis://localhost/1 27 | backend=redis://localhost/2 28 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/maps/templates/nginx/c3nav: -------------------------------------------------------------------------------- 1 | server { 2 | listen [::]:80; 3 | listen 80; 4 | server_name {{domain}}; 5 | location = /favicon.ico { access_log off; log_not_found off; } 6 | location /static/ { 7 | alias /opt/c3nav/src/c3nav/static.dist/; 8 | } 9 | location /media/ { 10 | alias /opt/c3nav/src/c3nav/media/; 11 | } 12 | location / { 13 | include proxy_params; 14 | proxy_pass http://unix://var/run/c3nav/gunicorn-c3nav.sock; 15 | } 16 | } 17 | 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/microk8s/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install microk8s 3 | community.general.snap: 4 | name: microk8s 5 | channel: v1.25/stable 6 | classic: yes 7 | 8 | - name: kubectl config 9 | shell: 10 | creates: /root/.kube/config 11 | cmd: mkdir -p /root/.kube && microk8s config > /root/.kube/config 12 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/mtail/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart mtail 3 | service: 4 | name: mtail 5 | state: restarted 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/mtail/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: install mtail from stretch-backports 4 | apt: 5 | state: latest 6 | default_release: stretch-backports 7 | package: 8 | - mtail 9 | when: ansible_distribution_release == "stretch" 10 | 11 | - name: install mtail from main 12 | apt: 13 | state: latest 14 | package: 15 | - mtail 16 | when: ansible_distribution_release != "stretch" 17 | 18 | 19 | - name: enable and start mtail service 20 | service: 21 | name: mtail 22 | state: started 23 | enabled: true 24 | 25 | - name: install mtail defaults file 26 | template: 27 | src: defaults 28 | dest: /etc/default/mtail 29 | notify: 30 | - restart mtail 31 | 32 | - name: install mtail configs 33 | copy: 34 | src: "{{ item }}" 35 | dest: /etc/mtail/ 36 | owner: root 37 | mode: 0644 38 | with_fileglob: 39 | - mtail/*.mtail 40 | notify: 41 | - restart mtail 42 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/mtail/templates/defaults: -------------------------------------------------------------------------------- 1 | # Set to 1 to start Mtail at boot. 2 | ENABLED=1 3 | 4 | ## Log processing. 5 | 6 | # List of files to monitor (mandatory). 7 | LOGS=/var/log/nginx/metrics.log 8 | 9 | ## Metrics exporting. 10 | 11 | # HTTP port to listen on. (default "3903") 12 | #PORT=3903 13 | 14 | # Path to collectd unixsock to write metrics to. 15 | #COLLECTD_SOCKETPATH= 16 | 17 | # Host:port to graphite carbon server to write metrics to. 18 | #GRAPHITE_HOSTPORT= 19 | 20 | # Host:port to statsd server to write metrics to. 21 | #STATSD_HOSTPORT= 22 | 23 | # Interval between metric pushes, in seconds (default 60) 24 | #METRIC_PUSH_INTERVAL= 25 | 26 | # Extra command-line arguments to pass to the server. 27 | #EXTRA_ARGS="" 28 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/oxidized/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install build packages 3 | apt: 4 | state: latest 5 | update_cache: true 6 | cache_valid_time: 3600 7 | package: 8 | - build-essential 9 | - ruby 10 | - ruby-dev 11 | - cmake 12 | - libsqlite3-dev 13 | - libssh2-1-dev 14 | - libssl-dev 15 | - pkg-config 16 | - libicu-dev 17 | - libz-dev 18 | 19 | - name: Add ozidized group 20 | group: 21 | name: oxidized 22 | system: true 23 | 24 | - name: Add oxidized user 25 | user: 26 | name: oxidized 27 | group: oxidized 28 | home: /opt/oxidized 29 | createhome: true 30 | shell: /bin/bash 31 | comment: "oxidized user" 32 | system: true 33 | 34 | - name: Install oxidized 35 | gem: 36 | state: latest 37 | name: "{{ item }}" 38 | with_items: 39 | - oxidized 40 | - oxidized-script 41 | - oxidized-web 42 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/power_supply/files/power_supply_metrics.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Description: Generate /sys/class/power_supply metrics 4 | 5 | if [[ -f /sys/class/power_supply/AC/online ]] ; then 6 | echo "# HELP node_power_supply_ac_online Boolean if the AC is plugged in" 7 | echo "# TYPE node_power_supply_ac_online gauge" 8 | echo "node_power_supply_ac_online $(< /sys/class/power_supply/AC/online)" 9 | fi 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/power_supply/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install helper script 3 | copy: 4 | src: power_supply_metrics.sh 5 | dest: /usr/local/bin/power_supply_metrics.sh 6 | owner: root 7 | mode: 0755 8 | 9 | - name: Add moreutils 10 | apt: 11 | package: moreutils 12 | 13 | - name: Install timer/service unit 14 | template: 15 | src: "{{ item }}" 16 | dest: /etc/systemd/system/ 17 | owner: root 18 | mode: 0644 19 | with_items: 20 | - power_supply_metrics.timer 21 | - power_supply_metrics.service 22 | 23 | - name: Setup service/timer 24 | service: 25 | daemon_reload: true 26 | name: "{{ item }}" 27 | state: started 28 | enabled: true 29 | with_items: 30 | - power_supply_metrics.service 31 | - power_supply_metrics.timer 32 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/power_supply/templates/power_supply_metrics.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=power_supply metrics 3 | 4 | [Service] 5 | Type=oneshot 6 | ExecStart=/bin/sh -c '/usr/local/bin/power_supply_metrics.sh | sponge {{ node_exporter_textfile_dir }}/power_supply.prom' 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/power_supply/templates/power_supply_metrics.timer: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=power_supply metrics 3 | 4 | [Timer] 5 | OnUnitActiveSec=10s 6 | OnBootSec=10s 7 | 8 | [Install] 9 | WantedBy=timers.target 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/sshfs/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install sshfs 3 | apt: 4 | package: sshfs 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tacplus/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | tacplus_key: FAKE_KEY 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tacplus/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart tacplus 3 | docker_container: 4 | name: tacplus 5 | restart: true 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tacplus/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create TACPLUS config directory 3 | file: 4 | path: /var/lib/tacplus 5 | state: directory 6 | mode: 0755 7 | 8 | - name: Deploy TACPLUS config 9 | template: 10 | src: tac_plus.conf.j2 11 | dest: /var/lib/tacplus/tac_plus.conf 12 | mode: 0644 13 | notify: restart tacplus 14 | 15 | - name: install python-docker 16 | apt: 17 | package: 18 | - python-docker 19 | 20 | - name: Create TACPLUS container 21 | docker_container: 22 | name: tacplus 23 | image: ajoldham/tacplus 24 | ports: 25 | - "49:49" 26 | volumes: 27 | - "/var/lib/tacplus:/etc/tacacs+" 28 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tacplus/templates/tac_plus.conf.j2: -------------------------------------------------------------------------------- 1 | # TACACS key 2 | key = "{{ tacplus_key }}" 3 | 4 | # Users 5 | user = msuriar { 6 | member = netadmin 7 | } 8 | 9 | user = pevaneyn { 10 | member = netadmin 11 | } 12 | 13 | user = richi { 14 | member = netadmin 15 | } 16 | 17 | user = zzdravko { 18 | member = netadmin 19 | } 20 | 21 | group = netadmin { 22 | default service = permit 23 | service = exec { 24 | priv-lvl = 15 25 | } 26 | } 27 | 28 | 29 | accounting file = /var/log/tacacs/tac_plus.log 30 | accounting syslog 31 | logging = local6 32 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | basename: bogus 3 | common_name: bogus.example 4 | subject_alt_names: [] 5 | base_directory: /etc/ssl/ansible 6 | 7 | self_sign: false 8 | 9 | csr_path: "{{ base_directory }}/certs/{{ basename }}.csr" 10 | certificate_path: "{{ base_directory }}/certs/{{ basename }}.pem" 11 | fullchain_path: "{{ base_directory }}/certs/{{ basename }}.fullchain.pem" 12 | privatekey_path: "{{ base_directory }}/private/{{ basename }}.key" 13 | 14 | letsencrypt_account_key: "{{ base_directory }}/private/letsencrypt_account.key" 15 | letsencrypt_account_email: foo@dbar.org 16 | 17 | letsencrypt_acme_directory: https://acme-v02.api.letsencrypt.org/directory 18 | 19 | letsencrypt_chain_path: "{{ base_directory }}/certs/letsencrypt.chain.pem" 20 | 21 | letsencrypt_challenge_mode: http-01 22 | letsencrypt_remaining_days: 10 23 | letsencrypt_well_known_dir: /var/www/well-known 24 | 25 | skip_unit_test: false 26 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/files/letsencrypt.chain: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/tls-certificates/files/letsencrypt.chain -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Reload nginx 3 | ansible.builtin.systemd: 4 | name: nginx 5 | state: reloaded 6 | when: not skip_unit_test 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/create_directories.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create base directory 3 | ansible.builtin.file: 4 | path: "{{ base_directory }}" 5 | state: directory 6 | owner: root 7 | group: root 8 | mode: "0755" 9 | tags: 10 | - tls-certificates 11 | 12 | - name: Create certs directory 13 | ansible.builtin.file: 14 | path: "{{ base_directory }}/certs" 15 | state: directory 16 | owner: root 17 | group: root 18 | mode: "0755" 19 | tags: 20 | - tls-certificates 21 | 22 | - name: Create private directory 23 | ansible.builtin.file: 24 | path: "{{ base_directory }}/private" 25 | state: directory 26 | owner: root 27 | group: root 28 | mode: "0700" 29 | tags: 30 | - tls-certificates 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/create_keypair.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Generate private key 3 | community.crypto.openssl_privatekey: 4 | state: present 5 | path: "{{ privatekey_path }}" 6 | tags: 7 | - tls-certificates 8 | 9 | - name: Generate subjectaltname list 10 | ansible.builtin.set_fact: 11 | all_subject_alt_names: "{{ [common_name] + subject_alt_names }}" 12 | tags: 13 | - tls-certificates 14 | 15 | - name: Generate certificate signing request 16 | community.crypto.openssl_csr: 17 | state: present 18 | common_name: "{{ common_name }}" 19 | subject_alt_name: "{{ all_subject_alt_names \ 20 | | map('regex_replace', '^(.*)$', 'DNS:\\1') \ 21 | | join(',') }}" 22 | path: "{{ csr_path }}" 23 | privatekey_path: "{{ privatekey_path }}" 24 | register: csr_result 25 | tags: 26 | - tls-certificates 27 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/letsencrypt.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Create letsencrypt account key 3 | community.crypto.openssl_privatekey: 4 | state: present 5 | path: "{{ letsencrypt_account_key }}" 6 | tags: 7 | - tls-certificates 8 | 9 | - name: Request letsencrypt signing 10 | community.crypto.acme_certificate: 11 | acme_version: 2 12 | csr: "{{ csr_path }}" 13 | dest: "{{ certificate_path }}" 14 | challenge: http-01 15 | account_key: "{{ letsencrypt_account_key }}" 16 | account_email: "{{ letsencrypt_account_email }}" 17 | acme_directory: "{{ letsencrypt_acme_directory }}" 18 | remaining_days: "{{ letsencrypt_remaining_days }}" 19 | terms_agreed: true 20 | register: letsencrypt_challenge 21 | tags: 22 | - tls-certificates 23 | 24 | - name: Answer letsencrypt challenge 25 | ansible.builtin.include_tasks: letsencrypt_challenge.yml 26 | when: "letsencrypt_challenge is changed or \ 27 | letsencrypt_challenge.cert_days < letsencrypt_remaining_days" 28 | tags: 29 | - tls-certificates 30 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install required dependencies 3 | ansible.builtin.apt: 4 | name: 5 | - openssl 6 | - python3-openssl 7 | tags: 8 | - tls-certificates 9 | 10 | - name: Create directories for tls certificates 11 | ansible.builtin.include_tasks: create_directories.yml 12 | tags: 13 | - tls-certificates 14 | 15 | - name: Create private key and csr 16 | ansible.builtin.include_tasks: create_keypair.yml 17 | tags: 18 | - tls-certificates 19 | 20 | - name: Sign certificate with letsencrypt 21 | ansible.builtin.include_tasks: letsencrypt.yml 22 | when: not self_sign and not skip_unit_test 23 | tags: 24 | - tls-certificates 25 | 26 | - name: Self-sign certificate 27 | ansible.builtin.include_tasks: self_sign.yml 28 | when: self_sign 29 | tags: 30 | - tls-certificates 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/nginx_common.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Remove SSL settings from nginx.conf 3 | ansible.builtin.lineinfile: 4 | state: absent 5 | regexp: ^\s*(ssl_protocols|ssl_prefer_server_ciphers)\s 6 | path: /etc/nginx/nginx.conf 7 | tags: 8 | - tls-certificates 9 | 10 | - name: Write nginx common SSL configuration 11 | ansible.builtin.template: 12 | src: ansible-common-ssl.conf.j2 13 | dest: /etc/nginx/conf.d/ansible-common-ssl.conf 14 | notify: Reload nginx 15 | tags: 16 | - tls-certificates 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/tasks/self_sign.yml: -------------------------------------------------------------------------------- 1 | --- 2 | #FIXME: ansible-lint says this should probably be a handler? 3 | - name: Self-sign certificate # noqa: no-handler 4 | ansible.builtin.command: openssl x509 -req -sha256 -days 365 -in {{ csr_path }} 5 | -signkey {{ privatekey_path }} -out {{ certificate_path }} 6 | when: 7 | - csr_result is changed 8 | tags: 9 | - tls-certificates 10 | register: self_sign_result 11 | 12 | - name: Generate full chain # noqa: no-handler 13 | ansible.builtin.shell: 14 | cmd: /bin/cat {{ certificate_path | quote }} > {{ fullchain_path | quote }} 15 | when: 16 | - self_sign_result is changed 17 | tags: 18 | - tls-certificates 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/tls-certificates/templates/ansible-common-ssl.conf.j2: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | 3 | ssl_protocols TLSv1.2; 4 | ssl_ciphers 'ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384'; 5 | ssl_prefer_server_ciphers off; 6 | 7 | ssl_session_cache shared:SSL:10m; 8 | ssl_session_timeout 1440m; 9 | ssl_session_tickets off; 10 | 11 | ssl_stapling on; 12 | ssl_stapling_verify on; 13 | ssl_trusted_certificate /etc/ssl/certs/ca-certificates.crt; 14 | resolver {% for server in ansible_dns.nameservers %}{% if "%" in server %}{% elif ":" in server %}[{{ server }}]{% else %}{{ server }}{% endif %} {% endfor %}; 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-audio-parser/files/audio-fetcher/audio-fetcher.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | 3 | Description=audio-fetcher 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/audio-fetcher 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-audio-parser/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - ffmpeg 8 | 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-audio-parser/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_audiofetcher.yml 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-audio-parser/templates/audio-fetcher-fl.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} FL audio parser service 3 | 4 | [Service] 5 | ExecStart=/usr/local/bin/audio-fetcher {{ item }} 0 6 | ExecReload=/bin/kill -HUP $MAINPID 7 | KillMode=control-group 8 | KillSignal=SIGKILL 9 | Restart=always 10 | RestartSec=1 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-audio-parser/templates/audio-fetcher-fr.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} FR audio parser service 3 | 4 | [Service] 5 | ExecStart=/usr/local/bin/audio-fetcher {{ item }} 2 6 | ExecReload=/bin/kill -HUP $MAINPID 7 | KillMode=control-group 8 | KillSignal=SIGKILL 9 | Restart=always 10 | RestartSec=1 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | destroy_all_videobox_data: false 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/audio-fetcher/audio-fetcher-0.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | 3 | Description=audio-fetcher chan 0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/audio-fetcher 0 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/audio-fetcher/audio-fetcher-1.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | 3 | Description=audio-fetcher chan 1 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/audio-fetcher 1 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/audiotest/audiotest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # no usb to usb 4 | 5 | mixercli set-gain "USB L" "USB L" 0 6 | mixercli set-gain "USB L" "USB R" 0 7 | mixercli set-gain "USB R" "USB L" 0 8 | mixercli set-gain "USB R" "USB R" 0 9 | 10 | # no xlr to xlr 11 | 12 | for inp in 1 2 3; do 13 | for out in 1 2; do 14 | mixercli set-gain "XLR $inp" "XLR $out" 0 15 | done 16 | done 17 | 18 | # no xlr to headphones 19 | 20 | for inp in 1 2 3; do 21 | for out in R L; do 22 | mixercli set-gain "XLR $inp" "Headphone $out" 0 23 | done 24 | done 25 | 26 | 27 | 28 | level=0.1 29 | 30 | # all xlr to usb 31 | 32 | for inp in 1 2 3; do 33 | for out in R L; do 34 | mixercli set-gain "XLR $inp" "USB $out" $level 35 | done 36 | done 37 | 38 | # all usb to xlr and headphones 39 | 40 | for inp in R L; do 41 | for out in 1 2; do 42 | mixercli set-gain "USB $inp" "XLR $out" $level 43 | done 44 | done 45 | 46 | for inp in R L; do 47 | for out in R L; do 48 | mixercli set-gain "USB $inp" "Headphone $out" $level 49 | done 50 | done 51 | 52 | # run the test 53 | 54 | alsabat -P hw:0 -C hw:0 --saveplay=tmpf -F 18000 -c 2 -r 48000 -n 5s 55 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/cgroups/cgconfig.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Control Group configuration service 3 | Documentation=man:cgconfigparser(8) 4 | 5 | # The service should be able to start as soon as possible, 6 | # before any 'normal' services: 7 | DefaultDependencies=no 8 | Conflicts=shutdown.target 9 | Before=basic.target shutdown.target 10 | 11 | [Service] 12 | Type=oneshot 13 | RemainAfterExit=yes 14 | Delegate=yes 15 | ExecStart=/usr/sbin/cgconfigparser -L /etc/cgconfig.d 16 | 17 | [Install] 18 | WantedBy=sysinit.target 19 | 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/cgroups/fosdem.conf: -------------------------------------------------------------------------------- 1 | group system.slice { 2 | cpuset { 3 | cpuset.cpus="0-1"; 4 | cpuset.mems="0"; 5 | } 6 | } 7 | 8 | group user.slice { 9 | cpuset { 10 | cpuset.cpus="0-1"; 11 | cpuset.mems="0"; 12 | } 13 | } 14 | 15 | group fosdem.slice { 16 | cpuset { 17 | cpuset.cpus="2-3"; 18 | cpuset.mems="0"; 19 | cpuset.cpu_exclusive="1"; 20 | } 21 | } 22 | 23 | group fosdem.slice/receiver { 24 | cpuset { 25 | cpuset.cpus="2"; 26 | cpuset.mems="0"; 27 | } 28 | } 29 | 30 | group fosdem.slice/preview { 31 | cpuset { 32 | cpuset.cpus="2"; 33 | cpuset.mems="0"; 34 | } 35 | } 36 | 37 | 38 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box-mixer/files/config/background.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/background.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box-mixer/files/config/background.raw -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | 4 | #data=$(curl -s http://control.video.fosdem.org/query-vocto.php?voctop=$(hostname)) 5 | # 6 | #if [ -z "$data" ] || [ "$data" == "notfound" ]; then 7 | # # we don't exist. wait. 8 | # sleep 60 9 | # exit 10 | #fi 11 | # 12 | #room=$(echo $data | cut -d ' ' -f 1) 13 | #cam=$(echo $data | cut -d ' ' -f 2) 14 | #slides=$(echo $data | cut -d ' ' -f 3) 15 | # 16 | #SOURCE_CAM=tcp://${cam}:8899/ 17 | #SOURCE_SLIDES=tcp://${slides}:8899/ 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/defaults.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | WIDTH=1280 3 | HEIGHT=720 4 | FRAMERATE=30 5 | AUDIORATE=48000 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/preroll.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box-mixer/files/config/preroll.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/config/preroll.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box-mixer/files/config/preroll.raw -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/control/inc.php: -------------------------------------------------------------------------------- 1 | quote($str); 8 | } 9 | 10 | $db = new PDO("pgsql:dbname=fosdem user=www-data"); 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/network/video0: -------------------------------------------------------------------------------- 1 | allow-hotplug video0 2 | iface video0 inet dhcp 3 | pre-up ethtool -K video0 tx-checksum-ipv6 off 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/patches/videomix.py.diff: -------------------------------------------------------------------------------- 1 | diff --git a/voctocore/lib/videomix.py b/voctocore/lib/videomix.py 2 | index 38432e8..c7d29c0 100644 3 | --- a/voctocore/lib/videomix.py 4 | +++ b/voctocore/lib/videomix.py 5 | @@ -95,9 +95,9 @@ class VideoMix(object): 6 | self.padState.append(PadState()) 7 | 8 | self.log.debug('Initializing Mixer-State') 9 | - self.compositeMode = CompositeModes.fullscreen 10 | - self.sourceA = 0 11 | - self.sourceB = 1 12 | + self.compositeMode = CompositeModes.side_by_side_preview 13 | + self.sourceA = 1 14 | + self.sourceB = 0 15 | self.recalculateMixerState() 16 | self.applyMixerState() 17 | 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/scripts/restart-voctocore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | systemctl stop vocto-source-cam.service vocto-source-slides.service vocto-sink-output.service 4 | systemctl restart voctocore 5 | systemctl start vocto-source-cam.service vocto-source-slides.service vocto-sink-output.service 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/signal-status/display-rescan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function getstatus { 4 | cat /sys/class/drm/*/edid | md5sum |cut -d ' ' -f 1 5 | } 6 | 7 | res=$(getstatus) 8 | state=nochange #or switching or nosignal 9 | while :; do 10 | sleep 0.5 11 | oldres="$res" 12 | res=$(getstatus) 13 | if [[ $state == nochange ]] ; then 14 | if [[ $res == $oldres ]]; then 15 | continue 16 | else 17 | state=changing 18 | waited=0 19 | continue 20 | fi 21 | elif [[ $state == changing ]]; then 22 | waited=$(($waited+1)) 23 | if [[ $res != $oldres ]]; then 24 | waited=0 25 | continue 26 | fi 27 | if [[ $waited -eq 3 ]]; then 28 | logger Display signal stable, restarting receiver 29 | systemctl restart vocto-source-slides 30 | sleep 10 31 | state=nochange 32 | waited=0 33 | fi 34 | elif [[ $state == nosignal ]]; then 35 | state=changing 36 | waited=0 37 | continue 38 | fi 39 | 40 | done 41 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/signal-status/statuskeeper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ms213x-status status --json --loop 1000 --filename /tmp/ms213x-status --region=flaky 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/signal-status/video-capture-rescan.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video capture rescan service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/capture-rescan.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/signal-status/video-display-rescan.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video display rescan service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/display-rescan.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/signal-status/video-statuskeeper.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video statuskeeper service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/statuskeeper.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/udev/20-fosdem-serial.rules: -------------------------------------------------------------------------------- 1 | SUBSYSTEM=="tty", ATTRS{idVendor}=="f05d", ATTRS{idProduct}=="4001", SYMLINK+="tty_fosdem_box_ctl" 2 | # SUBSYSTEM=="tty", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="048a", SYMLINK+="tty_fosdem_audio_ctl" 3 | 4 | ACTION=="remove", GOTO="fosdem_serial_end" 5 | SUBSYSTEM!="tty", GOTO="fosdem_serial_end" 6 | SUBSYSTEMS=="usb", IMPORT{builtin}="usb_id" 7 | ENV{ID_SERIAL}!="FOSDEM_Audio*", GOTO="fosdem_serial_end" 8 | KERNEL!="ttyACM[0-9]*", GOTO="fosdem_serial_end" 9 | 10 | ENV{ID_USB_INTERFACE_NUM}=="00", SYMLINK+="tty_fosdem_audio_ctl" 11 | ENV{ID_USB_INTERFACE_NUM}=="02", SYMLINK+="tty_fosdem_audio_debug" 12 | 13 | LABEL="fosdem_serial_end" 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/udev/99-picotool.rules: -------------------------------------------------------------------------------- 1 | SUBSYSTEM=="usb", \ 2 | ATTRS{idVendor}=="2e8a", \ 3 | ATTRS{idProduct}=="0003", \ 4 | TAG+="uaccess" \ 5 | MODE="660", \ 6 | GROUP="plugdev" 7 | SUBSYSTEM=="usb", \ 8 | ATTRS{idVendor}=="2e8a", \ 9 | ATTRS{idProduct}=="0009", \ 10 | TAG+="uaccess" \ 11 | MODE="660", \ 12 | GROUP="plugdev" 13 | SUBSYSTEM=="usb", \ 14 | ATTRS{idVendor}=="2e8a", \ 15 | ATTRS{idProduct}=="000a", \ 16 | TAG+="uaccess" \ 17 | MODE="660", \ 18 | GROUP="plugdev" 19 | SUBSYSTEM=="usb", \ 20 | ATTRS{idVendor}=="2e8a", \ 21 | ATTRS{idProduct}=="000f", \ 22 | TAG+="uaccess" \ 23 | MODE="660", \ 24 | GROUP="plugdev" 25 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/udev/99-video-box.rules: -------------------------------------------------------------------------------- 1 | # Our USB ethernet thing should always be renamed to video0 2 | 3 | SUBSYSTEMS=="usb", DRIVERS=="ax88179_178a", ENV{USB_NETWORK_RENAME}="video0" 4 | ENV{USB_NETWORK_RENAME}=="video0", SUBSYSTEM=="net", ATTR{type}=="1", NAME="video0" 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/units/vocto-sink-output.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix output sink 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/sink-output.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/units/vocto-source-cam.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix source cam 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/source-cam.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/units/vocto-source-slides.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix source slides 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/source-slides.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/units/voctocore.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=voctocore 3 | After=network.target 4 | 5 | [Service] 6 | Type=simple 7 | ExecStart=/usr/share/voctomix/voctocore/voctocore.py -vv --ini-file /opt/config/voctocore.ini 8 | Restart=always 9 | RestartSec=1s 10 | StartLimitInterval=0 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-status/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box-mixer/files/video-status/logo.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-status/preview.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cgexec -g cpuset:fosdem.slice/preview ffmpeg -y -v error -i tcp://localhost:8899 -s 240x134 -vf fps=fps=1,format=rgb565be -c:v rawvideo -f image2 -s 240x134 -update 1 -atomic_writing 1 /tmp/picture.raw -s 1280x720 -update 1 -atomic_writing 1 /var/www/html/preview.jpg 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-status/video-preview.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video preview service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/preview.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-status/video-status.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-status service 3 | After=video-streamer.service 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/video-status.py 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGTERM 10 | Restart=always 11 | RestartSec=1s 12 | StartLimitInterval=0 13 | StandardInput=tty 14 | StandardOutput=tty 15 | TTYPath=/dev/tty1 16 | 17 | 18 | [Install] 19 | WantedBy=multi-user.target 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-streamer/rc.local: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cpupower frequency-set -g performance -u 2900M -d 1900M 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-streamer/video-receiver.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-receiver service 3 | After=network-online.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/video-receiver.sh 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1s 12 | StartLimitInterval=0 13 | 14 | 15 | [Install] 16 | WantedBy=multi-user.target 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-streamer/video-recorder.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-recorder service 3 | After=network-online.target 4 | StartLimitIntervalSec=0 5 | 6 | [Service] 7 | ExecStart=/usr/local/bin/video-recorder.sh 8 | ExecReload=/bin/kill -HUP $MAINPID 9 | KillMode=control-group 10 | KillSignal=SIGKILL 11 | Restart=always 12 | RestartSec=1 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-streamer/video-recorder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | ffmpeg -v error -i "tcp://127.0.0.1:8899?timeout=10000000" -c copy -f mpegts /home/video-recording/log.`date +%s`.ts 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/files/video-streamer/video-usbreset.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | # reset the macrosilicon USB 3 device before use 4 | # after close it doesn't work without reset and having the driver patches is not going to be fast 5 | 6 | import fcntl 7 | import re 8 | import subprocess 9 | import sys 10 | 11 | usbInfo = subprocess.check_output('lsusb -d 345f:2131', shell=True).decode("utf-8") 12 | # Bus 002 Device 005: ID 345f:2131 MACROSILICON USB3 Video 13 | 14 | matches = re.search("Bus (\d+) Device (\d+): ID 345f:2131", usbInfo).groups() 15 | bus = matches[0] 16 | device = matches[1] 17 | srcfd = open(f'/dev/bus/usb/{bus}/{device}','wb') 18 | 19 | USBDEVFS_RESET = 0x5514 20 | fcntl.ioctl(srcfd.fileno(), USBDEVFS_RESET, 0) 21 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: update initramfs 4 | command: update-initramfs -u 5 | 6 | - name: update grub 7 | shell: update-grub 8 | 9 | - name: restart vocto 10 | service: 11 | name: voctocore 12 | state: restarted 13 | notify: restart voctoscripts 14 | 15 | - name: restart voctoscripts 16 | service: 17 | name: "{{ item }}" 18 | state: restarted 19 | with_items: 20 | - vocto-sink-output 21 | - vocto-source-cam 22 | - vocto-source-slides 23 | 24 | - name: restart voctocore 25 | service: 26 | name: voctocore 27 | state: restarted 28 | 29 | - name: restart systemd-logind 30 | service: 31 | name: systemd-logind 32 | state: restarted 33 | 34 | - name: restart nginx 35 | service: 36 | name: nginx 37 | state: restarted 38 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/tasks/configure_network.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: set up udev rules 4 | ansible.builtin.copy: 5 | src: udev/99-video-box.rules 6 | dest: /etc/udev/rules.d/99-fosdem.rules 7 | owner: root 8 | group: root 9 | mode: 0644 10 | 11 | - name: configure interfaces 12 | ansible.builtin.copy: 13 | src: network/video0 14 | dest: /etc/network/interfaces.d/video0 15 | owner: root 16 | group: root 17 | mode: 0644 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/tasks/install_mixer_ctl.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: disable OSC proxy 3 | service: 4 | name: oscproxy 5 | enabled: false 6 | state: stopped 7 | 8 | - name: disable mixer API 9 | service: 10 | name: fosdem-mixer-api 11 | enabled: false 12 | state: stopped 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "common: install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | update_cache: yes 7 | package: 8 | - fbi 9 | - fontconfig 10 | - python3-pygame 11 | - avahi-daemon 12 | - bash-completion 13 | - curl 14 | - ffmpeg 15 | - sproxy 16 | - git 17 | - python3-gi-cairo 18 | - rsync 19 | - avahi-daemon 20 | - i965-va-driver-shaders 21 | - v4l-utils 22 | - mpv 23 | - jq 24 | - alsa-utils 25 | - lm-sensors 26 | - ms213x-status 27 | - ms213x-cli 28 | - python3-serial 29 | - cgroup-tools 30 | - intel-media-va-driver-non-free 31 | - firmware-sof-signed 32 | - firmware-misc-nonfree 33 | - fosdem-firmware 34 | - teensy-loader-cli 35 | - linux-cpupower 36 | - picotool 37 | - gpiod 38 | - fosdem-mixer-cli 39 | - python3-fosdemosc 40 | - fosdem-mixer-api 41 | - picocom 42 | tags: 43 | - firmware 44 | - firmware_ms2131 45 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - import_tasks: configure_network.yml 2 | - import_tasks: install_packages.yml 3 | - import_tasks: install_extra_hw.yml 4 | - import_tasks: cgroup_setup.yml 5 | - import_tasks: configure_nvme.yml 6 | - import_tasks: install_signal-status.yml 7 | - import_tasks: install_video-status.yml 8 | - import_tasks: install_video-streamer.yml 9 | - import_tasks: install_voctocore.yml 10 | - import_tasks: install_control.yml 11 | - import_tasks: install_mixer_ctl.yml 12 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box-mixer/templates/grub/cgroup.cfg: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | GRUB_CMDLINE_LINUX_DEFAULT="$GRUB_CMDLINE_LINUX_DEFAULT systemd.unified_cgroup_hierarchy=0 systemd.legacy_systemd_cgroup_controller=1" 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | destroy_all_videobox_data: false 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/audiotest/audiotest.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # no usb to usb 4 | 5 | mixercli set-gain "USB L" "USB L" 0 6 | mixercli set-gain "USB L" "USB R" 0 7 | mixercli set-gain "USB R" "USB L" 0 8 | mixercli set-gain "USB R" "USB R" 0 9 | 10 | # no xlr to xlr 11 | 12 | for inp in 1 2 3; do 13 | for out in 1 2; do 14 | mixercli set-gain "XLR $inp" "XLR $out" 0 15 | done 16 | done 17 | 18 | # no xlr to headphones 19 | 20 | for inp in 1 2 3; do 21 | for out in R L; do 22 | mixercli set-gain "XLR $inp" "Headphone $out" 0 23 | done 24 | done 25 | 26 | 27 | 28 | level=0.1 29 | 30 | # all xlr to usb 31 | 32 | for inp in 1 2 3; do 33 | for out in R L; do 34 | mixercli set-gain "XLR $inp" "USB $out" $level 35 | done 36 | done 37 | 38 | # all usb to xlr and headphones 39 | 40 | for inp in R L; do 41 | for out in 1 2; do 42 | mixercli set-gain "USB $inp" "XLR $out" $level 43 | done 44 | done 45 | 46 | for inp in R L; do 47 | for out in R L; do 48 | mixercli set-gain "USB $inp" "Headphone $out" $level 49 | done 50 | done 51 | 52 | # run the test 53 | 54 | alsabat -P hw:0 -C hw:0 --saveplay=tmpf -F 18000 -c 2 -r 48000 -n 5s 55 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/cgroups/cgconfig.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Control Group configuration service 3 | Documentation=man:cgconfigparser(8) 4 | 5 | # The service should be able to start as soon as possible, 6 | # before any 'normal' services: 7 | DefaultDependencies=no 8 | Conflicts=shutdown.target 9 | Before=basic.target shutdown.target 10 | 11 | [Service] 12 | Type=oneshot 13 | RemainAfterExit=yes 14 | Delegate=yes 15 | ExecStart=/usr/sbin/cgconfigparser -L /etc/cgconfig.d 16 | 17 | [Install] 18 | WantedBy=sysinit.target 19 | 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/cgroups/fosdem.conf: -------------------------------------------------------------------------------- 1 | group system.slice { 2 | cpuset { 3 | cpuset.cpus="0-1"; 4 | cpuset.mems="0"; 5 | } 6 | } 7 | 8 | group user.slice { 9 | cpuset { 10 | cpuset.cpus="0-1"; 11 | cpuset.mems="0"; 12 | } 13 | } 14 | 15 | group fosdem.slice { 16 | cpuset { 17 | cpuset.cpus="2-3"; 18 | cpuset.mems="0"; 19 | cpuset.cpu_exclusive="1"; 20 | } 21 | } 22 | 23 | group fosdem.slice/receiver { 24 | cpuset { 25 | cpuset.cpus="2"; 26 | cpuset.mems="0"; 27 | } 28 | } 29 | 30 | group fosdem.slice/preview { 31 | cpuset { 32 | cpuset.cpus="2"; 33 | cpuset.mems="0"; 34 | } 35 | } 36 | 37 | 38 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/network/video0: -------------------------------------------------------------------------------- 1 | allow-hotplug video0 2 | iface video0 inet dhcp 3 | pre-up ethtool -K video0 tx-checksum-ipv6 off 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/signal-status/display-rescan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function getstatus { 4 | cat /sys/class/drm/*/edid | md5sum |cut -d ' ' -f 1 5 | } 6 | 7 | res=$(getstatus) 8 | state=nochange #or switching or nosignal 9 | while :; do 10 | sleep 0.5 11 | oldres="$res" 12 | res=$(getstatus) 13 | if [[ $state == nochange ]] ; then 14 | if [[ $res == $oldres ]]; then 15 | continue 16 | else 17 | state=changing 18 | waited=0 19 | continue 20 | fi 21 | elif [[ $state == changing ]]; then 22 | waited=$(($waited+1)) 23 | if [[ $res != $oldres ]]; then 24 | waited=0 25 | continue 26 | fi 27 | if [[ $waited -eq 3 ]]; then 28 | logger Display signal stable, restarting receiver 29 | systemctl restart video-receiver 30 | sleep 10 31 | state=nochange 32 | waited=0 33 | fi 34 | elif [[ $state == nosignal ]]; then 35 | state=changing 36 | waited=0 37 | continue 38 | fi 39 | 40 | done 41 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/signal-status/statuskeeper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ms213x-status status --json --loop 1000 --filename /tmp/ms213x-status --region=flaky 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/signal-status/video-capture-rescan.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video capture rescan service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/capture-rescan.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/signal-status/video-display-rescan.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video display rescan service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/display-rescan.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/signal-status/video-statuskeeper.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video statuskeeper service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/statuskeeper.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/udev/20-fosdem-serial.rules: -------------------------------------------------------------------------------- 1 | SUBSYSTEM=="tty", ATTRS{idVendor}=="f05d", ATTRS{idProduct}=="4001", SYMLINK+="tty_fosdem_box_ctl" 2 | # SUBSYSTEM=="tty", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="048a", SYMLINK+="tty_fosdem_audio_ctl" 3 | 4 | ACTION=="remove", GOTO="fosdem_serial_end" 5 | SUBSYSTEM!="tty", GOTO="fosdem_serial_end" 6 | SUBSYSTEMS=="usb", IMPORT{builtin}="usb_id" 7 | ENV{ID_SERIAL}!="FOSDEM_Audio*", GOTO="fosdem_serial_end" 8 | KERNEL!="ttyACM[0-9]*", GOTO="fosdem_serial_end" 9 | 10 | ENV{ID_USB_INTERFACE_NUM}=="00", SYMLINK+="tty_fosdem_audio_ctl" 11 | ENV{ID_USB_INTERFACE_NUM}=="02", SYMLINK+="tty_fosdem_audio_debug" 12 | 13 | LABEL="fosdem_serial_end" 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/udev/99-picotool.rules: -------------------------------------------------------------------------------- 1 | SUBSYSTEM=="usb", \ 2 | ATTRS{idVendor}=="2e8a", \ 3 | ATTRS{idProduct}=="0003", \ 4 | TAG+="uaccess" \ 5 | MODE="660", \ 6 | GROUP="plugdev" 7 | SUBSYSTEM=="usb", \ 8 | ATTRS{idVendor}=="2e8a", \ 9 | ATTRS{idProduct}=="0009", \ 10 | TAG+="uaccess" \ 11 | MODE="660", \ 12 | GROUP="plugdev" 13 | SUBSYSTEM=="usb", \ 14 | ATTRS{idVendor}=="2e8a", \ 15 | ATTRS{idProduct}=="000a", \ 16 | TAG+="uaccess" \ 17 | MODE="660", \ 18 | GROUP="plugdev" 19 | SUBSYSTEM=="usb", \ 20 | ATTRS{idVendor}=="2e8a", \ 21 | ATTRS{idProduct}=="000f", \ 22 | TAG+="uaccess" \ 23 | MODE="660", \ 24 | GROUP="plugdev" 25 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/udev/99-video-box.rules: -------------------------------------------------------------------------------- 1 | # Our USB ethernet thing should always be renamed to video0 2 | 3 | SUBSYSTEMS=="usb", DRIVERS=="ax88179_178a", ENV{USB_NETWORK_RENAME}="video0" 4 | ENV{USB_NETWORK_RENAME}=="video0", SUBSYSTEM=="net", ATTR{type}=="1", NAME="video0" 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-status/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-box/files/video-status/logo.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-status/preview.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cgexec -g cpuset:fosdem.slice/preview ffmpeg -y -v error -i tcp://localhost:8899 -s 240x134 -vf fps=fps=1,format=rgb565be -c:v rawvideo -f image2 -s 240x134 -update 1 -atomic_writing 1 /tmp/picture.raw 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-status/video-preview.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video preview service 3 | StartLimitIntervalSec=0 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/preview.sh 7 | ExecReload=/bin/kill -9 $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1 12 | 13 | [Install] 14 | WantedBy=multi-user.target 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-status/video-status.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-status service 3 | After=video-streamer.service 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/video-status.py 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGTERM 10 | Restart=always 11 | RestartSec=1s 12 | StartLimitInterval=0 13 | StandardInput=tty 14 | StandardOutput=tty 15 | TTYPath=/dev/tty1 16 | 17 | 18 | [Install] 19 | WantedBy=multi-user.target 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-streamer/rc.local: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cpupower frequency-set -g performance -u 2000M -d 1900M 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-streamer/video-receiver.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-receiver service 3 | After=network-online.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/video-receiver.sh 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=always 11 | RestartSec=1s 12 | StartLimitInterval=0 13 | 14 | 15 | [Install] 16 | WantedBy=multi-user.target 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-streamer/video-recorder.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=video-recorder service 3 | After=network-online.target 4 | StartLimitIntervalSec=0 5 | 6 | [Service] 7 | ExecStart=/usr/local/bin/video-recorder.sh 8 | ExecReload=/bin/kill -HUP $MAINPID 9 | KillMode=control-group 10 | KillSignal=SIGKILL 11 | Restart=always 12 | RestartSec=1 13 | 14 | [Install] 15 | WantedBy=multi-user.target 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-streamer/video-recorder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | ffmpeg -v error -i "tcp://127.0.0.1:8899?timeout=10000000" -c copy -f mpegts /home/video-recording/log.`date +%s`.ts 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/files/video-streamer/video-usbreset.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | # reset the macrosilicon USB 3 device before use 4 | # after close it doesn't work without reset and having the driver patches is not going to be fast 5 | 6 | import fcntl 7 | import re 8 | import subprocess 9 | import sys 10 | 11 | usbInfo = subprocess.check_output('lsusb -d 345f:2131', shell=True).decode("utf-8") 12 | # Bus 002 Device 005: ID 345f:2131 MACROSILICON USB3 Video 13 | 14 | matches = re.search("Bus (\d+) Device (\d+): ID 345f:2131", usbInfo).groups() 15 | bus = matches[0] 16 | device = matches[1] 17 | srcfd = open(f'/dev/bus/usb/{bus}/{device}','wb') 18 | 19 | USBDEVFS_RESET = 0x5514 20 | fcntl.ioctl(srcfd.fileno(), USBDEVFS_RESET, 0) 21 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: update initramfs 4 | command: update-initramfs -u 5 | 6 | - name: update grub 7 | shell: update-grub 8 | 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/tasks/configure_network.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: set up udev rules 4 | ansible.builtin.copy: 5 | src: udev/99-video-box.rules 6 | dest: /etc/udev/rules.d/99-fosdem.rules 7 | owner: root 8 | group: root 9 | mode: 0644 10 | 11 | - name: configure interfaces 12 | ansible.builtin.copy: 13 | src: network/video0 14 | dest: /etc/network/interfaces.d/video0 15 | owner: root 16 | group: root 17 | mode: 0644 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/tasks/install_mixer_ctl.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: enable OSC proxy 3 | service: 4 | name: oscproxy 5 | enabled: true 6 | state: restarted 7 | 8 | - name: enable mixer API 9 | service: 10 | name: fosdem-mixer-api 11 | enabled: true 12 | state: restarted 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "common: install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | update_cache: yes 7 | package: 8 | - fbi 9 | - fontconfig 10 | - python3-pygame 11 | - avahi-daemon 12 | - bash-completion 13 | - curl 14 | - ffmpeg 15 | - sproxy 16 | - git 17 | - python3-gi-cairo 18 | - rsync 19 | - avahi-daemon 20 | - i965-va-driver-shaders 21 | - v4l-utils 22 | - mpv 23 | - jq 24 | - alsa-utils 25 | - lm-sensors 26 | - ms213x-status 27 | - ms213x-cli 28 | - python3-serial 29 | - cgroup-tools 30 | - intel-media-va-driver-non-free 31 | - firmware-sof-signed 32 | - firmware-misc-nonfree 33 | - fosdem-firmware 34 | - teensy-loader-cli 35 | - linux-cpupower 36 | - picotool 37 | - gpiod 38 | - fosdem-mixer-cli 39 | - python3-fosdemosc 40 | - fosdem-mixer-api 41 | - picocom 42 | tags: 43 | - firmware 44 | - firmware_ms2131 45 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - import_tasks: configure_network.yml 2 | - import_tasks: install_packages.yml 3 | - import_tasks: install_extra_hw.yml 4 | - import_tasks: cgroup_setup.yml 5 | - import_tasks: install_signal-status.yml 6 | - import_tasks: install_video-status.yml 7 | - import_tasks: install_video-streamer.yml 8 | - import_tasks: install_mixer_ctl.yml 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-box/templates/grub/cgroup.cfg: -------------------------------------------------------------------------------- 1 | # {{ ansible_managed }} 2 | GRUB_CMDLINE_LINUX_DEFAULT="$GRUB_CMDLINE_LINUX_DEFAULT systemd.unified_cgroup_hierarchy=0 systemd.legacy_systemd_cgroup_controller=1" 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/README.md: -------------------------------------------------------------------------------- 1 | Control server for vocto 2 | 3 | This lets devroom managers live mix 4 predefined scenes. 4 | 5 | Also, it has accumulated other crap: 6 | 7 | * influxdb instance for audio monitoring 8 | * previews of all buildings' boxes and outputs 9 | * dhcp server for the video VLAN 10 | 11 | The htpasswd file comes from the video-private repo. 12 | 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/fosdem.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS fosdem ( 2 | roomname varchar(16) not null primary key, 3 | building varchar(16) not null, 4 | voctop varchar(255), 5 | slides varchar(255), 6 | cam varchar(255), 7 | audio varchar(255) 8 | ); 9 | create unique index fosdem_voctop on fosdem(voctop); 10 | create unique index fosdem_cam on fosdem(cam); 11 | create unique index fosdem_slides on fosdem(slides); 12 | create unique index fosdem_audio on fosdem(audio); 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/icinga/fosdem-services.conf: -------------------------------------------------------------------------------- 1 | apply Service "http" { 2 | import "generic-service" 3 | 4 | check_command = "http" 5 | 6 | assign where (host.address || host.address6) && (host.vars.group == "frontend") 7 | } 8 | 9 | apply Service "daemons" { 10 | import "generic-pasv-service" 11 | assign where (host.address || host.address6) && (host.vars.group == "vocto" || host.vars.group == "frontend" || host.vars.group == "backend") 12 | } 13 | 14 | apply Service "disk" { 15 | import "generic-pasv-service" 16 | assign where (host.address || host.address6) && (host.vars.group == "vocto" || host.vars.group == "frontend" || host.vars.group == "backend") 17 | } 18 | 19 | apply Service "load" { 20 | import "generic-pasv-service" 21 | assign where (host.address || host.address6) && (host.vars.group == "vocto" || host.vars.group == "frontend" || host.vars.group == "backend") 22 | } 23 | 24 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/icinga/passive.conf: -------------------------------------------------------------------------------- 1 | template Service "generic-pasv-service" { 2 | max_check_attempts = 1 3 | check_interval = 1460m 4 | retry_interval = 1m 5 | enable_active_checks = true 6 | enable_passive_checks = true 7 | check_command = "passive" 8 | 9 | vars.dummy_state = 3 10 | vars.dummy_text = "No data received" 11 | } 12 | 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/imgmaker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$1" ]; then 4 | echo "Usage: $0 room" 5 | exit 2 6 | fi 7 | cd /var/www/html 8 | 9 | ROOM=$1 10 | HOST=$(psql -A -t -q fosdem -c "select voctop from fosdem where roomname='$ROOM'") 11 | 12 | if [ -z "$HOST" ]; then 13 | # we have no voctop for this room, do not spin 14 | sleep 60 15 | exit 0 16 | fi 17 | 18 | t=`mktemp` 19 | rm -f ${t} ${t}.jpg 20 | 21 | ffmpeg=`which ffmpeg` 22 | if ! [ $? -eq 0 ]; then 23 | echo No ffmpeg/avconv found. 24 | exit 3 25 | fi 26 | 27 | 28 | function ff_fetch { 29 | # $0 host port file 30 | $ffmpeg -v quiet -y -i tcp://"$1":"$2"'?timeout=1000000' -map 0:v -s 320x180 -q 5 -an -vframes 1 "${t}".jpg && mv ${t}.jpg $3 31 | } 32 | 33 | mkdir -p ${ROOM} 34 | 35 | # exit after this many times to reload voctop 36 | times=30 37 | i=0 38 | 39 | while /bin/true; do 40 | let i=$i+1 41 | if [ $i -gt $times ]; then 42 | exit 0 43 | fi 44 | ff_fetch $HOST 11000 ${ROOM}/room.jpg 45 | ff_fetch $HOST 13000 ${ROOM}/cam.jpg 46 | ff_fetch $HOST 13001 ${ROOM}/grab.jpg 47 | sleep 1 48 | done 49 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/isc-dhcp-server.default: -------------------------------------------------------------------------------- 1 | INTERFACESv4="bond0" 2 | INTERFACESv6="" 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/nginx/certificate/dhparam.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN DH PARAMETERS----- 2 | MIICCAKCAgEAt6reliZJhEolguC3iQeTaFwLvW7GMJkH1ilrQKIRtjfPVEdErd74 3 | HT37cs0NzQcN+AAYmMndUgvMV5HtjH2Yn2SRLxKM0OZ5EpkMBnBvtkBZbGkF4dI+ 4 | LKvTgQWVwWzL/UCCWthcUYYSRsNGEjdMSumlYn5VC+l/lwtemxBUBfaJVE5rhPxK 5 | d0bAAmp6UUtSaEfKnSbnaL8r2udWAk0rt15/4hXaJRxrRfg3faDiL80DQkHy1BCp 6 | WawIWL5hnNbhr+fB3DV1W5zi8Lf7qi5nUgebmyfYi2WJtTw+gRYwg3tm47PAHvfI 7 | NazYiCzohED9TRlzO7Iaet0/6wKCFkDS4ilhtVplYmINlhe+4VehvtVuOuqX21pm 8 | yyrWID7csAIE3ALmg++bxZCyZnPsUp7btMtlDyYcYNeRLBr9dMMJ1B4LyAiiad+j 9 | V6gD+iivT/keyy2YgktdbZrNCcFaGFgDaVRZFYV9mwt9Ix7esugj4elGG8pObxFQ 10 | jtlfF5bYQwNFqNKbAkR3AbUsf0NEPhaEvYPfDgPoPD2byPtUUJg3Q482JkSKvsmh 11 | vB0Wn60wDVi+G+jUUFhRH1iY6AnN3m7XNuKu4MlrTwUm2Dc+niRbsNUZ6/Ro6RhC 12 | 8aBDVdKI42s007t0CRCZ7q0bR93ouBRjeUXzk3VC1F32qPreZkgO8iMCAQI= 13 | -----END DH PARAMETERS----- 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/nginx/global.conf: -------------------------------------------------------------------------------- 1 | ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH"; 2 | ssl_ecdh_curve secp384r1; 3 | ssl_session_cache shared:SSL:10m; 4 | ssl_session_tickets off; 5 | ssl_dhparam /etc/nginx/dhparam.pem; 6 | 7 | resolver 127.0.0.1; 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/nginx/mtail.conf: -------------------------------------------------------------------------------- 1 | log_format mtail '$server_name $remote_addr - $remote_user [$time_local] ' 2 | '"$request" $status $bytes_sent $request_time ' 3 | '"$http_referer" "$http_user_agent" "$sent_http_content_type"'; 4 | 5 | access_log /var/log/nginx/metrics.log mtail; 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/unbound-access.conf: -------------------------------------------------------------------------------- 1 | server: 2 | interface: 127.0.0.1 3 | interface: 185.175.218.11 4 | access-control: 185.175.218.0/24 allow 5 | access-control: 127.0.0.0/8 allow 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/web/inc.php: -------------------------------------------------------------------------------- 1 | quote($str); 8 | } 9 | 10 | $db = new PDO("pgsql:dbname=fosdem user=www-data"); 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/web/query-room.php: -------------------------------------------------------------------------------- 1 | prepare("SELECT voctop FROM fosdem WHERE roomname = :room"); 11 | $r->execute(['room' => $room]); 12 | 13 | if (!$r) { 14 | die("notfound"); 15 | } 16 | 17 | $row = $r->fetch(); 18 | echo 'tcp://'.$row[0].':8899/?timeout=3000000'; 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/web/query-vocto.php: -------------------------------------------------------------------------------- 1 | prepare("SELECT roomname, cam, slides FROM fosdem WHERE voctop = :voctop"); 11 | $r->execute(['voctop' => $voct]); 12 | 13 | if (!$r) { 14 | die("notfound"); 15 | } 16 | 17 | $row = $r->fetch(); 18 | echo $row[0]." ".$row[1]." ".$row[2]; 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/files/web/room_status.php: -------------------------------------------------------------------------------- 1 | 27 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: name=nginx state=reloaded 4 | 5 | - name: restart systemd-logind 6 | service: 7 | name: systemd-logind 8 | state: restarted 9 | 10 | - name: reload unbound 11 | service: name=unbound state=reloaded 12 | 13 | - name: reload icinga 14 | service: name=icinga2 state=reloaded 15 | 16 | - name: reload dhcpd 17 | service: name=unbound state=restarted 18 | 19 | 20 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/tasks/configure_db.yml: -------------------------------------------------------------------------------- 1 | - name: Create DB user - www-data 2 | command: createuser -d www-data 3 | ignore_errors: true 4 | become: true 5 | become_user: postgres 6 | 7 | - name: Create DB user - root 8 | command: createuser -s root 9 | ignore_errors: true 10 | become: true 11 | become_user: postgres 12 | 13 | - name: Create DB 14 | command: createdb fosdem 15 | become: true 16 | become_user: www-data 17 | ignore_errors: true 18 | 19 | - name: Copy DB schema 20 | copy: 21 | src: "fosdem.sql" 22 | dest: "/tmp/fosdem.sql" 23 | 24 | - name: create DB table 25 | command: "psql fosdem -f /tmp/fosdem.sql" 26 | become: true 27 | become_user: www-data 28 | 29 | - name: make sure InfluxDB is running 30 | systemd: 31 | name: influxdb 32 | state: started 33 | enabled: true 34 | 35 | - name: create InfluxDB database 36 | command: influx -execute 'create database ebur' 37 | ignore_errors: true 38 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/tasks/configure_dns_dhcp.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install unbound config 3 | copy: 4 | src: unbound-access.conf 5 | dest: /etc/unbound/unbound.conf.d/access.conf 6 | owner: root 7 | group: root 8 | mode: 0644 9 | notify: reload unbound 10 | 11 | - name: install dhcpd config 12 | copy: 13 | src: dhcpd.conf 14 | dest: /etc/dhcp/dhcpd.conf 15 | owner: root 16 | group: root 17 | mode: 0644 18 | notify: reload dhcpd 19 | 20 | - name: install dhcpd config defaults 21 | copy: 22 | src: isc-dhcp-server.default 23 | dest: /etc/default/isc-dhcp-server 24 | owner: root 25 | group: root 26 | mode: 0644 27 | notify: reload dhcpd 28 | 29 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/tasks/configure_grafana.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install grafana config 3 | copy: 4 | src: grafana/grafana.ini 5 | dest: /etc/grafana/grafana.ini 6 | owner: root 7 | group: root 8 | mode: 0644 9 | 10 | - name: enable grafana-server services 11 | service: 12 | name: "grafana-server" 13 | enabled: true 14 | state: started 15 | 16 | #- name: "Create datasource'" 17 | # community.grafana.grafana_datasource: 18 | # grafana_url: "https://control.video.fosdem.org/grafana" 19 | # grafana_user: "admin" 20 | # grafana_password: "{{ grafana_password }}" 21 | # org_id: "1" 22 | # name: "infl" 23 | # state: present 24 | # is_default: true 25 | # database: "ebur" 26 | # ds_type: influxdb 27 | # ds_url: http://localhost:8086 28 | # access: proxy 29 | # with_credentials: false 30 | 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Add influx key 3 | apt_key: 4 | url: https://repos.influxdata.com/influxdata-archive_compat.key 5 | 6 | - name: Add influx repo 7 | apt_repository: 8 | filename: influxdb 9 | repo: "deb https://repos.influxdata.com/debian stable main" 10 | 11 | - name: Add grafana key 12 | apt_key: 13 | url: https://packages.grafana.com/gpg.key 14 | 15 | - name: Add influx repo 16 | apt_repository: 17 | filename: influxdb 18 | repo: "deb https://packages.grafana.com/oss/deb stable main" 19 | 20 | 21 | - name: "install packages" 22 | apt: 23 | state: latest 24 | install_recommends: false 25 | package: 26 | - apache2-utils 27 | - avahi-daemon 28 | - ffmpeg 29 | - nginx 30 | - php8.2 31 | - php8.2-fpm 32 | - php8.2-pgsql 33 | - php8.2-curl 34 | - isc-dhcp-server 35 | - unbound 36 | - influxdb 37 | - grafana 38 | - postgresql 39 | 40 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_dns_dhcp.yml 4 | - import_tasks: configure_db.yml 5 | - import_tasks: configure_icinga.yml 6 | - import_tasks: configure_grafana.yml 7 | - import_tasks: configure_imgmaker.yml 8 | - import_tasks: configure_nginx.yml 9 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-control-server/templates/imgmaker.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} image maker service 3 | 4 | [Service] 5 | ExecStart=/usr/local/bin/imgmaker.sh {{ item }} 6 | ExecReload=/bin/kill -HUP $MAINPID 7 | KillMode=control-group 8 | KillSignal=SIGKILL 9 | Restart=always 10 | RestartSec=1 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-monitoring/files/monitoring.cron: -------------------------------------------------------------------------------- 1 | * * * * * root /usr/local/bin/do_checks.sh 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-monitoring/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart munin-node 3 | service: 4 | name: munin-node 5 | state: restarted 6 | 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-monitoring/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install monitoring packages" 3 | apt: 4 | state: latest 5 | package: 6 | - nsca-client 7 | - monitoring-plugins 8 | - munin-node 9 | - bc 10 | 11 | - name: monitoring script 12 | copy: 13 | src: "{{ item }}" 14 | dest: /usr/local/bin/ 15 | owner: root 16 | group: root 17 | mode: 0755 18 | with_items: 19 | - do_checks.sh 20 | 21 | - name: install nsca configuration 22 | copy: 23 | src: "{{ item }}" 24 | dest: "/etc/{{ item }}" 25 | owner: root 26 | group: root 27 | mode: 0644 28 | with_items: 29 | - send_nsca.cfg 30 | 31 | - name: install munin-node configuration 32 | copy: 33 | src: "{{ item }}" 34 | dest: "/etc/munin/{{ item }}" 35 | owner: root 36 | group: root 37 | mode: 0644 38 | with_items: 39 | - munin-node.conf 40 | notify: restart munin-node 41 | 42 | - name: install cron file 43 | copy: 44 | src: "{{ item }}" 45 | dest: "/etc/cron.d/monitoring" 46 | owner: root 47 | group: root 48 | mode: 0644 49 | with_items: 50 | - monitoring.cron 51 | 52 | 53 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-repo/files/video-team.repo.list: -------------------------------------------------------------------------------- 1 | deb https://packagecloud.io/fosdem/video-team/debian bookworm main 2 | deb-src https://packagecloud.io/fosdem/video-team/debian bookworm main 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-repo/tasks/main.yml: -------------------------------------------------------------------------------- 1 | - name: Setting facts 2 | set_fact: 3 | url_tmp_file_path: /tmp/packagecloud_{{repository |replace("/", "_")}}_url 4 | tmp_file_path: /tmp/packagecloud_{{repository |replace("/", "_")}}_key 5 | when: ansible_os_family == "Debian" 6 | 7 | - name: Install debian-archive-keyring and apt-transport-https 8 | apt: pkg={{ packages }} state=present 9 | vars: 10 | packages: 11 | - debian-archive-keyring 12 | - apt-transport-https 13 | when: ansible_os_family == "Debian" 14 | 15 | # {{ repository }}/gpgkey URL works for both legacy and modern public repositories. 16 | - name: Add {{repository}} GPG key to apt-key 17 | apt_key: url=https://packagecloud.io/{{ repository }}/gpgkey state=present 18 | when: ansible_os_family == "Debian" 19 | 20 | - name: "Adding packagecloud.io repository: {{ repository }}" 21 | copy: 22 | src: "video-team.repo.list" 23 | dest: "{{ debian_config_file_location }}" 24 | register: added_deb_repository 25 | when: ansible_os_family == "Debian" 26 | 27 | - name: Update APT package cache 28 | apt: update_cache=true 29 | when: ansible_os_family == "Debian" and added_deb_repository.changed 30 | 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-repo/vars/main.yml: -------------------------------------------------------------------------------- 1 | repository: fosdem/video-team 2 | os: debian 3 | version: bullseye 4 | debian_gpg_key_url: https://packagecloud.io/install/repositories/{{ repository }}/gpg_key_url.list?os={{ os }}&dist={{ version }}&name={{ ansible_nodename }} 5 | debian_config_file_url: https://packagecloud.io/install/repositories/{{ repository }}/config_file.list?os={{ os }}&dist={{ version }}&name={{ ansible_nodename }} 6 | debian_config_file_location: /etc/apt/sources.list.d/{{ repository|replace("/", "_")}}.list 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | video_stream_dump_hdd: /dev/sdb 3 | video_stream_dump_configure_hdd: true 4 | video_stream_dump_destroy_all_streamdump_data: false 5 | video_dump_source: [] 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/files/streamdump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | while true; do 4 | ffmpeg -v error -y -i "$1"'?timeout=1000000' -map 0 -c copy -f mpegts $2`date +%s`.ts 5 | done 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart streamdump target 3 | become: true 4 | systemd: 5 | daemon_reload: true 6 | name: streamdump.target 7 | state: restarted 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/tasks/configure_streamdump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: create stream dump directories 4 | file: 5 | path: "/mnt/storage/{{ item }}" 6 | state: directory 7 | owner: root 8 | group: root 9 | mode: 0755 10 | with_items: "{{ video_sproxy }}" 11 | 12 | - name: install stream dump script 13 | copy: 14 | src: streamdump.sh 15 | dest: /usr/local/bin/streamdump.sh 16 | owner: root 17 | group: root 18 | mode: 0755 19 | notify: 20 | - restart streamdump target 21 | 22 | - name: install stream dump services 23 | template: 24 | src: streamdump.service 25 | dest: "/etc/systemd/system/streamdump-{{ item }}.service" 26 | owner: root 27 | group: root 28 | mode: 0644 29 | with_items: "{{ video_sproxy }}" 30 | register: video_stream_dump_services 31 | notify: 32 | - restart streamdump target 33 | 34 | - name: enable streamdump service 35 | systemd: 36 | name: "streamdump-{{ item }}" 37 | enabled: true 38 | state: started 39 | daemon_reload: true 40 | with_items: "{{ video_sproxy }}" 41 | 42 | - import_tasks: streamdump_target.yml 43 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - ffmpeg 8 | - sysstat 9 | - mpv 10 | - avahi-daemon 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_hdd.yml 4 | when: video_stream_dump_configure_hdd 5 | - import_tasks: configure_streamdump.yml 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/tasks/streamdump_target.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install streamdump target 3 | template: 4 | src: streamdump.target 5 | dest: /etc/systemd/system/streamdump.target 6 | owner: root 7 | group: root 8 | mode: 0644 9 | notify: 10 | - restart streamdump target 11 | 12 | - name: start and enable streamdump target 13 | become: true 14 | systemd: 15 | daemon_reload: true 16 | name: streamdump.target 17 | state: started 18 | enabled: true 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/templates/streamdump.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} stream dump service 3 | PartOf=streamdump.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/streamdump.sh 'tcp://{{ item }}.video.fosdem.org:8899/' /mnt/storage/{{ item }}/ 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=streamdump.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump-external/templates/streamdump.target: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Streamdump target allowing to start/stop all streamdump-*.service instances at once 3 | [Install] 4 | WantedBy=multi-user.target 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | video_stream_dump_hdd: /dev/sdb 3 | video_stream_dump_configure_hdd: true 4 | video_stream_dump_destroy_all_streamdump_data: false 5 | video_dump_source: [] 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/files/streamdump.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | while true; do 4 | ffmpeg -v error -y -i "$1"'?timeout=1000000' -map 0 -c copy -f mpegts $2`date +%s`.ts 5 | done 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart streamdump target 3 | become: true 4 | systemd: 5 | daemon_reload: true 6 | name: streamdump.target 7 | state: restarted 8 | 9 | - name: restart streamdump service 10 | become: true 11 | systemd: 12 | daemon_reload: true 13 | name: "streamdump-{{ item }}.service" 14 | state: restarted 15 | with_items: "{{ video_stream_dump_services.results }}" 16 | when: item is changed 17 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/tasks/configure_streamdump.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Loop for streamdump type 3 | include_tasks: configure_streamdump_simple.yml 4 | with_items: 5 | - cam 6 | - slides 7 | - vocto 8 | tags: video-stream-dump-internal 9 | 10 | - import_tasks: streamdump_target.yml 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/tasks/configure_streamdump_simple.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: set type of box 4 | set_fact: 5 | box_type: "{{ item }}" 6 | 7 | - name: create stream dump directories 8 | file: 9 | path: "/mnt/storage/{{ item }}-{{ box_type }}" 10 | state: directory 11 | owner: root 12 | group: root 13 | mode: 0755 14 | with_items: "{{ video_rooms }}" 15 | 16 | - name: install stream dump script 17 | copy: 18 | src: streamdump.sh 19 | dest: /usr/local/bin/streamdump.sh 20 | owner: root 21 | group: root 22 | mode: 0755 23 | notify: 24 | - restart streamdump target 25 | 26 | - name: install stream dump services 27 | template: 28 | src: streamdump.service 29 | dest: "/etc/systemd/system/streamdump-{{ item }}-{{ box_type }}.service" 30 | owner: root 31 | group: root 32 | mode: 0644 33 | with_items: "{{ video_rooms }}" 34 | register: video_stream_dump_services 35 | notify: 36 | - restart streamdump service 37 | 38 | - name: enable streamdump service 39 | systemd: 40 | name: "streamdump-{{ item }}-{{ box_type }}" 41 | enabled: true 42 | state: started 43 | daemon_reload: true 44 | with_items: "{{ video_rooms }}" 45 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - ffmpeg 8 | - sysstat 9 | - mpv 10 | - avahi-daemon 11 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_hdd.yml 4 | when: video_stream_dump_configure_hdd 5 | - import_tasks: configure_streamdump.yml 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/tasks/streamdump_target.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: install streamdump target 3 | template: 4 | src: streamdump.target 5 | dest: /etc/systemd/system/streamdump.target 6 | owner: root 7 | group: root 8 | mode: 0644 9 | notify: 10 | - restart streamdump target 11 | 12 | - name: start and enable streamdump target 13 | become: true 14 | systemd: 15 | daemon_reload: true 16 | name: streamdump.target 17 | state: started 18 | enabled: true 19 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/templates/streamdump.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }}-{{ box_type }} stream dump service 3 | PartOf=streamdump.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/streamdump.sh 'http://{{ item }}-{{ box_type }}.video.fosdem.org/0.ts' /mnt/storage/{{ item }}-{{ box_type }}/ 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=streamdump.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-stream-dump/templates/streamdump.target: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Streamdump target allowing to start/stop all streamdump-*.service instances at once 3 | [Install] 4 | WantedBy=multi-user.target 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/defaults/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | destroy_all_streambackend_data: false 3 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/files/bic/modules: -------------------------------------------------------------------------------- 1 | tcp_bic 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/files/fetcher/thumb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$1" ]; then 4 | echo Usage: $0 room 5 | exit 2 6 | fi 7 | 8 | cd /var/www/hls || exit 3 9 | 10 | room="$1" 11 | 12 | while :; do 13 | ffmpeg -v error -y -i `ls -t ${room}-1080p-*ts|head -n1` -update 1 -frames:v 1 ${room}-preview.jpg 14 | sleep 10 15 | done 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/files/iptables/rules.v4: -------------------------------------------------------------------------------- 1 | # Generated by xtables-save v1.8.2 on Thu Feb 4 10:17:08 2021 2 | *filter 3 | :INPUT ACCEPT [0:0] 4 | :FORWARD ACCEPT [0:0] 5 | :OUTPUT ACCEPT [0:0] 6 | -A INPUT -i enp193s0 -p udp -m udp --dport 111 -j DROP 7 | -A INPUT -i enp193s0 -p tcp -m tcp --dport 111 -j DROP 8 | -A INPUT -i enp193s0 -p udp -m udp --dport 2049 -j DROP 9 | -A INPUT -i enp193s0 -p tcp -m tcp --dport 2049 -j DROP 10 | COMMIT 11 | # Completed on Thu Feb 4 10:17:08 2021 12 | 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/files/iptables/rules.v6: -------------------------------------------------------------------------------- 1 | # Generated by xtables-save v1.8.2 on Thu Feb 4 10:17:08 2021 2 | *filter 3 | :INPUT ACCEPT [0:0] 4 | :FORWARD ACCEPT [0:0] 5 | :OUTPUT ACCEPT [0:0] 6 | -A INPUT -i enp193s0 -p udp -m udp --dport 111 -j DROP 7 | -A INPUT -i enp193s0 -p tcp -m tcp --dport 111 -j DROP 8 | -A INPUT -i enp193s0 -p udp -m udp --dport 2049 -j DROP 9 | -A INPUT -i enp193s0 -p tcp -m tcp --dport 2049 -j DROP 10 | COMMIT 11 | # Completed on Thu Feb 4 10:17:08 2021 12 | 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/files/nginx/mtail.conf: -------------------------------------------------------------------------------- 1 | log_format mtail '$server_name $remote_addr - $remote_user [$time_local] ' 2 | '"$request" $status $bytes_sent $request_time ' 3 | '"$http_referer" "$http_user_agent" "$sent_http_content_type"'; 4 | 5 | access_log /var/log/nginx/metrics.log mtail; 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: 4 | name: nginx 5 | state: reloaded 6 | 7 | - name: restart nfs 8 | service: 9 | name: nfs-server 10 | state: restarted 11 | 12 | - name: restart iptables-persistent 13 | service: 14 | name: netfilter-persistent 15 | state: restarted 16 | 17 | - name: restart fetcher target 18 | become: true 19 | systemd: 20 | daemon_reload: true 21 | name: fetcher.target 22 | state: restarted 23 | 24 | - name: restart fetcher service 25 | become: true 26 | systemd: 27 | daemon_reload: true 28 | name: "fetcher-{{ item }}" 29 | state: restarted 30 | with_items: "{{ video_rooms + virtual_video_rooms }}" 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/tasks/configure_bic.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Add the tcp_bic module 4 | modprobe: 5 | name: tcp_bic 6 | state: present 7 | 8 | - ansible.posix.sysctl: 9 | name: net.ipv4.tcp_congestion_control 10 | value: 'bic' 11 | sysctl_set: true 12 | state: present 13 | reload: true 14 | 15 | - name: install module load on boot 16 | copy: 17 | src: bic/modules 18 | dest: /etc/modules 19 | owner: root 20 | group: root 21 | mode: 0644 22 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Add testing repo 4 | apt_repository: 5 | filename: testing 6 | repo: "deb http://deb.debian.org/debian/ testing main contrib non-free" 7 | 8 | 9 | - name: "install packages" 10 | apt: 11 | state: latest 12 | install_recommends: false 13 | package: 14 | - ffmpeg 15 | - libglib-object-introspection-perl 16 | - gir1.2-gstreamer-1.0 17 | - gir1.2-gst-plugins-base-1.0 18 | - gstreamer1.0-plugins-base 19 | - gstreamer1.0-plugins-good 20 | - gstreamer1.0-plugins-bad 21 | - gstreamer1.0-plugins-ugly 22 | - gstreamer1.0-libav 23 | - libdbd-pg-perl 24 | - nginx-common 25 | - nginx-doc 26 | - nginx-extras 27 | 28 | 29 | - name: "install ffmpeg from testing" 30 | apt: 31 | state: latest 32 | install_recommends: false 33 | default_release: testing 34 | package: 35 | - ffmpeg 36 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | # - import_tasks: mount_ssd.yml 4 | - import_tasks: configure_bic.yml 5 | - import_tasks: configure_nginx.yml 6 | - import_tasks: configure_fetcher.yml 7 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/fetcher/fetcher.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} vlc fetcher service 3 | PartOf=fetcher.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/fetcher.sh {{ item }} 7 | KillMode=control-group 8 | KillSignal=SIGKILL 9 | Restart=always 10 | RestartSec=3 11 | User=www-data 12 | Group=www-data 13 | 14 | [Install] 15 | WantedBy=fetcher.target 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/fetcher/fetcher.target: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=Streamdump target allowing to start/stop all streamdump-*.service instances at once 3 | [Install] 4 | WantedBy=multi-user.target 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/fetcher/room.m3u8: -------------------------------------------------------------------------------- 1 | #EXTM3U 2 | #EXT-X-VERSION:3 3 | #EXT-X-STREAM-INF:BANDWIDTH=3500,RESOLUTION=1920x1080,CODECS="avc1.640828,mp4a.40.2" 4 | {{ item }}-1080p.m3u8 5 | 6 | #EXT-X-STREAM-INF:BANDWIDTH=1000,RESOLUTION=854x480,CODECS="avc1.64081f,mp4a.40.2" 7 | {{ item }}-480p.m3u8 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/fetcher/thumb.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description={{ item }} thumbnail service 3 | PartOf=fetcher.target 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/thumb.sh {{ item }} 7 | KillMode=control-group 8 | KillSignal=SIGKILL 9 | Restart=always 10 | RestartSec=3 11 | User=www-data 12 | Group=www-data 13 | 14 | [Install] 15 | WantedBy=fetcher.target 16 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/nfs/exports.j2: -------------------------------------------------------------------------------- 1 | {% for export in nfs_exports %} 2 | {{ export }} 3 | {% endfor %} 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/nginx/nginx-rtmp.conf: -------------------------------------------------------------------------------- 1 | # {{ansible_hostname}} 2 | # 3 | 4 | rtmp_auto_push on; 5 | 6 | rtmp { 7 | # Only allow publishing from trusted sources 8 | {% for address in nginx_rtmp_publishers %} 9 | allow publish {{ address }}; 10 | {% endfor %} 11 | deny publish all; 12 | 13 | server { 14 | listen 1935; 15 | 16 | application stream { 17 | record all; 18 | 19 | record_path /var/www/dump; 20 | record_suffix /%Y%m%d%H%M%S.flv; 21 | 22 | record_interval 30m; 23 | 24 | # Needed in case something goes wrong during the conversion. As long as this 25 | # isn't wrapped in a script, ffmpeg will terminate properly without the 26 | # default SIGKILL. Otherwise see nginx-rtmp wiki for a clean example. 27 | exec_kill_signal term; 28 | exec_record_done ffmpeg -v error -y -i $path -codec copy -movflags +faststart $path.mp4; 29 | 30 | live on; 31 | 32 | hls on; 33 | hls_path /var/www/hls; 34 | hls_fragment_naming system; 35 | 36 | drop_idle_publisher 20s; 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-backend/templates/sreview/config.pm.j2: -------------------------------------------------------------------------------- 1 | our $config; 2 | 3 | $dbistring = "dbi:Pg:dbname=sreview;host={{ sreview_db_host }};sslmode=require;user=sreview;password={{ sreview_db_pw }}"; 4 | $inputglob = "/var/www/dump/*/*.flv.mp4"; 5 | $parse_re = '.*\/(?[^\/]+)(?(-[^\/-]+)?)\/(?\d{4})-(?\d{2})-(?\d{2})\/(?\d{2}):(?\d{2}):(?\d{2})'; 6 | 7 | 1; 8 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/files/bic/modules: -------------------------------------------------------------------------------- 1 | tcp_bic 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/files/nginx/certificate/dhparam.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN DH PARAMETERS----- 2 | MIICCAKCAgEAt6reliZJhEolguC3iQeTaFwLvW7GMJkH1ilrQKIRtjfPVEdErd74 3 | HT37cs0NzQcN+AAYmMndUgvMV5HtjH2Yn2SRLxKM0OZ5EpkMBnBvtkBZbGkF4dI+ 4 | LKvTgQWVwWzL/UCCWthcUYYSRsNGEjdMSumlYn5VC+l/lwtemxBUBfaJVE5rhPxK 5 | d0bAAmp6UUtSaEfKnSbnaL8r2udWAk0rt15/4hXaJRxrRfg3faDiL80DQkHy1BCp 6 | WawIWL5hnNbhr+fB3DV1W5zi8Lf7qi5nUgebmyfYi2WJtTw+gRYwg3tm47PAHvfI 7 | NazYiCzohED9TRlzO7Iaet0/6wKCFkDS4ilhtVplYmINlhe+4VehvtVuOuqX21pm 8 | yyrWID7csAIE3ALmg++bxZCyZnPsUp7btMtlDyYcYNeRLBr9dMMJ1B4LyAiiad+j 9 | V6gD+iivT/keyy2YgktdbZrNCcFaGFgDaVRZFYV9mwt9Ix7esugj4elGG8pObxFQ 10 | jtlfF5bYQwNFqNKbAkR3AbUsf0NEPhaEvYPfDgPoPD2byPtUUJg3Q482JkSKvsmh 11 | vB0Wn60wDVi+G+jUUFhRH1iY6AnN3m7XNuKu4MlrTwUm2Dc+niRbsNUZ6/Ro6RhC 12 | 8aBDVdKI42s007t0CRCZ7q0bR93ouBRjeUXzk3VC1F32qPreZkgO8iMCAQI= 13 | -----END DH PARAMETERS----- 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/files/nginx/global.conf: -------------------------------------------------------------------------------- 1 | ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH"; 2 | ssl_ecdh_curve secp384r1; 3 | ssl_session_cache shared:SSL:10m; 4 | ssl_session_tickets off; 5 | ssl_dhparam /etc/nginx/dhparam.pem; 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/files/nginx/mtail.conf: -------------------------------------------------------------------------------- 1 | log_format mtail '$server_name $remote_addr - $remote_user [$time_local] ' 2 | '"$request" $status $bytes_sent $request_time ' 3 | '"$http_referer" "$http_user_agent" "$sent_http_content_type"'; 4 | 5 | access_log /var/log/nginx/metrics.log mtail; 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: name=nginx state=reloaded 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/tasks/configure_bic.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: Add the tcp_bic module 4 | modprobe: 5 | name: tcp_bic 6 | state: present 7 | 8 | - ansible.posix.sysctl: 9 | name: net.ipv4.tcp_congestion_control 10 | value: 'bic' 11 | sysctl_set: true 12 | state: present 13 | reload: true 14 | 15 | - name: install module load on boot 16 | copy: 17 | src: bic/modules 18 | dest: /etc/modules 19 | owner: root 20 | group: root 21 | mode: 0644 22 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - nginx-common 8 | - nginx-doc 9 | - nginx-extras 10 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-streamer-frontend/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_nginx.yml 4 | - import_tasks: configure_bic.yml 5 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/audio-fetcher/audio-fetcher.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | 3 | Description=audio-fetcher 4 | 5 | [Service] 6 | ExecStart=/usr/local/bin/audio-fetcher 7 | ExecReload=/bin/kill -HUP $MAINPID 8 | KillMode=control-group 9 | KillSignal=SIGKILL 10 | Restart=on-failure 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/config/background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-voctop/files/config/background.png -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/config/background.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FOSDEM/infrastructure/1ac1eeaed13db8b891796016d7a95357553238f2/ansible/playbooks/roles/video-voctop/files/config/background.raw -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/config/config.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | 4 | data=$(curl -s http://control.video.fosdem.org/query-vocto.php?voctop=$(hostname)) 5 | 6 | if [ -z "$data" ] || [ "$data" == "notfound" ]; then 7 | # we don't exist. wait. 8 | sleep 60 9 | exit 10 | fi 11 | 12 | room=$(echo $data | cut -d ' ' -f 1) 13 | cam=$(echo $data | cut -d ' ' -f 2) 14 | slides=$(echo $data | cut -d ' ' -f 3) 15 | 16 | SOURCE_CAM=tcp://${cam}:8899/ 17 | SOURCE_SLIDES=tcp://${slides}:8899/ 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/config/defaults.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | WIDTH=1920 3 | HEIGHT=1080 4 | FRAMERATE=30 5 | AUDIORATE=48000 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/grub/mitigations.cfg: -------------------------------------------------------------------------------- 1 | GRUB_CMDLINE_LINUX_DEFAULT="$GRUB_CMDLINE_LINUX_DEFAULT noibrs noibpb nopti nospectre_v2 nospectre_v1 l1tf=off nospec_store_bypass_disable no_stf_barrier mds=off tsx=on tsx_async_abort=off mitigations=off" 2 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/patches/videomix.py.diff: -------------------------------------------------------------------------------- 1 | diff --git a/voctocore/lib/videomix.py b/voctocore/lib/videomix.py 2 | index 38432e8..c7d29c0 100644 3 | --- a/voctocore/lib/videomix.py 4 | +++ b/voctocore/lib/videomix.py 5 | @@ -95,9 +95,9 @@ class VideoMix(object): 6 | self.padState.append(PadState()) 7 | 8 | self.log.debug('Initializing Mixer-State') 9 | - self.compositeMode = CompositeModes.fullscreen 10 | - self.sourceA = 0 11 | - self.sourceB = 1 12 | + self.compositeMode = CompositeModes.side_by_side_preview 13 | + self.sourceA = 1 14 | + self.sourceB = 0 15 | self.recalculateMixerState() 16 | self.applyMixerState() 17 | 18 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/scripts/restart-voctocore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | systemctl stop vocto-source-cam.service vocto-source-slides.service vocto-sink-output.service 4 | systemctl restart voctocore 5 | systemctl start vocto-source-cam.service vocto-source-slides.service vocto-sink-output.service 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/scripts/sink-output.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | confdir="`dirname "$0"`/../config/" 4 | . ${confdir}/defaults.sh 5 | . ${confdir}/config.sh 6 | 7 | ffmpeg -y -nostdin -init_hw_device vaapi=intel:/dev/dri/renderD128 -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device intel -filter_hw_device intel \ 8 | -probesize 2M \ 9 | -analyzeduration 2M \ 10 | -i tcp://localhost:11000 \ 11 | -threads:0 0 \ 12 | -aspect 16:9 \ 13 | -filter_complex "[0:a]channelsplit=channel_layout=stereo[left][right];[0:v] format=nv12,hwupload [vout] " \ 14 | -map '[vout]:0' \ 15 | -c:v:0 h264_vaapi -rc_mode CBR\ 16 | -g 30 \ 17 | -maxrate:v:0 5000k -bufsize:v:0 8192k \ 18 | -b:v:0 3000k \ 19 | -qmin:v:0 1 \ 20 | -map '[left]:1' \ 21 | -ac:1 1 -strict -2 -c:a:0 aac -b:a:0 128k -ar:0 48000 \ 22 | -map '[right]:2' \ 23 | -ac 1 -strict -2 -c:a:1 aac -b:a:1 128k -ar:1 48000 \ 24 | -f mpegts - | sproxy 25 | 26 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/scripts/source-cam.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | confdir="`dirname "$0"`/../config/" 4 | . ${confdir}/defaults.sh 5 | . ${confdir}/config.sh 6 | 7 | /usr/bin/wait_next_second 8 | 9 | ffmpeg -y -nostdin -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device /dev/dri/renderD128 \ 10 | -timeout 3000000 \ 11 | -analyzeduration 3M \ 12 | -i "${SOURCE_CAM}" \ 13 | -ac 2 \ 14 | -filter_complex " 15 | [0:v] scale_vaapi=w=$WIDTH:h=$HEIGHT [v2]; [v2] hwdownload,format=nv12,format=yuv420p [v1]; [v1] scale=$WIDTH:$HEIGHT,fps=$FRAMERATE,setdar=16/9,setsar=1 [v] ; 16 | [0:a] aresample=$AUDIORATE [a] 17 | " \ 18 | -map "[v]" -map "[a]" \ 19 | -pix_fmt yuv420p \ 20 | -c:v rawvideo \ 21 | -c:a pcm_s16le \ 22 | -f matroska \ 23 | tcp://localhost:10000 24 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/scripts/source-slides.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | confdir="`dirname "$0"`/../config/" 4 | . ${confdir}/defaults.sh 5 | . ${confdir}/config.sh 6 | 7 | /usr/bin/wait_next_second 8 | 9 | ffmpeg -y -nostdin -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device /dev/dri/renderD128 \ 10 | -timeout 3000000 \ 11 | -analyzeduration 3M \ 12 | -i "${SOURCE_SLIDES}" \ 13 | -ac 2 \ 14 | -filter_complex " 15 | [0:v] scale_vaapi=w=$WIDTH:h=$HEIGHT [v2]; [v2] hwdownload,format=nv12,format=yuv420p [v1]; [v1] scale=$WIDTH:$HEIGHT,fps=$FRAMERATE,setdar=16/9,setsar=1 [v] ; 16 | [0:a] aresample=$AUDIORATE [a] 17 | " \ 18 | -map "[v]" -map "[a]" \ 19 | -pix_fmt yuv420p \ 20 | -c:v rawvideo \ 21 | -c:a pcm_s16le \ 22 | -f matroska \ 23 | tcp://localhost:10001 24 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/units/vocto-sink-output.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix output sink 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/sink-output.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/units/vocto-source-cam.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix source cam 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/source-cam.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/units/vocto-source-slides.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description = voctomix source slides 3 | After = voctocore.service 4 | Requires = voctocore.service 5 | 6 | [Service] 7 | Type = simple 8 | ExecStart = /opt/scripts/source-slides.sh 9 | Restart = always 10 | RestartSec = 1s 11 | StartLimitInterval = 0 12 | 13 | [Install] 14 | WantedBy = voctocore.service 15 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/files/units/voctocore.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=voctocore 3 | After=network.target 4 | 5 | [Service] 6 | Type=simple 7 | ExecStart=/usr/share/voctomix/voctocore/voctocore.py -vv --ini-file /opt/config/voctocore.ini 8 | Restart=always 9 | RestartSec=1s 10 | StartLimitInterval=0 11 | 12 | [Install] 13 | WantedBy=multi-user.target 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: restart vocto 3 | service: 4 | name: voctocore 5 | state: restarted 6 | notify: restart voctoscripts 7 | 8 | - name: restart voctoscripts 9 | service: 10 | name: "{{ item }}" 11 | state: restarted 12 | with_items: 13 | - vocto-sink-output 14 | - vocto-source-cam 15 | - vocto-source-slides 16 | 17 | - name: restart voctocore 18 | service: 19 | name: voctocore 20 | state: restarted 21 | 22 | - name: restart systemd-logind 23 | service: 24 | name: systemd-logind 25 | state: restarted 26 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/tasks/cputune.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "install packages" 4 | apt: 5 | state: latest 6 | package: 7 | - tuned 8 | 9 | - name: "set tuned profile" 10 | shell: 11 | cmd: tuned-adm profile throughput-performance 12 | 13 | - name: "copy grub cfg for disabling mitigations" 14 | copy: 15 | src: grub/mitigations.cfg 16 | dest: /etc/default/grub.d/mitigations.cfg 17 | owner: root 18 | group: root 19 | mode: 0644 20 | register: grubopt 21 | 22 | - name: update GRUB config 23 | shell: update-grub 24 | 25 | - name: reboot if GRUB options have changed 26 | reboot: 27 | reboot_timeout: 120 28 | pre_reboot_delay: 35 29 | when: grubopt.changed 30 | 31 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/tasks/install_audio_fetcher.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - name: "install packages" 4 | apt: 5 | state: latest 6 | package: 7 | - python3-requests 8 | 9 | - name: install audiofetcher service 10 | copy: 11 | src: files/audio-fetcher/audio-fetcher.service 12 | dest: /etc/systemd/system 13 | owner: root 14 | group: root 15 | mode: 0644 16 | notify: restart vocto 17 | 18 | - name: install audio-fetcher binary 19 | copy: 20 | src: files/audio-fetcher/audio-fetcher 21 | dest: /usr/local/bin 22 | owner: root 23 | group: root 24 | mode: 0755 25 | 26 | - name: enable and start all audiofetcher 27 | service: 28 | name: audio-fetcher 29 | state: stopped 30 | enabled: false 31 | daemon_reload: true 32 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-voctop/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: cputune.yml 3 | - import_tasks: install_voctocore.yml 4 | - import_tasks: install_audio_fetcher.yml 5 | tags: voctocore 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/files/nginx/certificate/dhparam.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN DH PARAMETERS----- 2 | MIICCAKCAgEAt6reliZJhEolguC3iQeTaFwLvW7GMJkH1ilrQKIRtjfPVEdErd74 3 | HT37cs0NzQcN+AAYmMndUgvMV5HtjH2Yn2SRLxKM0OZ5EpkMBnBvtkBZbGkF4dI+ 4 | LKvTgQWVwWzL/UCCWthcUYYSRsNGEjdMSumlYn5VC+l/lwtemxBUBfaJVE5rhPxK 5 | d0bAAmp6UUtSaEfKnSbnaL8r2udWAk0rt15/4hXaJRxrRfg3faDiL80DQkHy1BCp 6 | WawIWL5hnNbhr+fB3DV1W5zi8Lf7qi5nUgebmyfYi2WJtTw+gRYwg3tm47PAHvfI 7 | NazYiCzohED9TRlzO7Iaet0/6wKCFkDS4ilhtVplYmINlhe+4VehvtVuOuqX21pm 8 | yyrWID7csAIE3ALmg++bxZCyZnPsUp7btMtlDyYcYNeRLBr9dMMJ1B4LyAiiad+j 9 | V6gD+iivT/keyy2YgktdbZrNCcFaGFgDaVRZFYV9mwt9Ix7esugj4elGG8pObxFQ 10 | jtlfF5bYQwNFqNKbAkR3AbUsf0NEPhaEvYPfDgPoPD2byPtUUJg3Q482JkSKvsmh 11 | vB0Wn60wDVi+G+jUUFhRH1iY6AnN3m7XNuKu4MlrTwUm2Dc+niRbsNUZ6/Ro6RhC 12 | 8aBDVdKI42s007t0CRCZ7q0bR93ouBRjeUXzk3VC1F32qPreZkgO8iMCAQI= 13 | -----END DH PARAMETERS----- 14 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/files/nginx/global.conf: -------------------------------------------------------------------------------- 1 | ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH"; 2 | ssl_ecdh_curve secp384r1; 3 | ssl_session_cache shared:SSL:10m; 4 | ssl_session_tickets off; 5 | ssl_dhparam /etc/nginx/dhparam.pem; 6 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/files/nginx/sites-enabled/live.fosdem.org.conf: -------------------------------------------------------------------------------- 1 | server { 2 | listen 80 default_server; 3 | listen [::]:80 default_server; 4 | 5 | listen 443 ssl default_server; 6 | listen [::]:443 ssl default_server; 7 | 8 | ssl_certificate /etc/nginx/live.fosdem.org.crt; 9 | ssl_certificate_key /etc/nginx/live.fosdem.org.key; 10 | 11 | add_header Strict-Transport-Security "max-age=15552000"; 12 | 13 | root /var/www/livestream/public/; 14 | disable_symlinks off; 15 | 16 | index index.php; 17 | 18 | server_name live.fosdem.org; 19 | 20 | location / { 21 | try_files $uri $uri/ /index.php$is_args$args; 22 | } 23 | 24 | location ~ [^/]\.php(/|$) { 25 | fastcgi_split_path_info ^(.+?\.php)(/.*)$; 26 | if (!-f $document_root$fastcgi_script_name) { 27 | return 404; 28 | } 29 | 30 | fastcgi_param HTTP_PROXY ""; 31 | fastcgi_pass unix:/var/run/php/php7.4-fpm.sock; 32 | fastcgi_index index.php; 33 | include fastcgi.conf; 34 | } 35 | 36 | if ($scheme = http) { 37 | return 301 https://$server_name$request_uri; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: reload nginx 3 | service: name=nginx state=reloaded 4 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/tasks/install_packages.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: "install packages" 3 | apt: 4 | state: latest 5 | install_recommends: false 6 | package: 7 | - nginx-common 8 | - nginx-doc 9 | - nginx-extras 10 | - php-fpm 11 | - php-zip 12 | - composer 13 | -------------------------------------------------------------------------------- /ansible/playbooks/roles/video-web-frontend/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - import_tasks: install_packages.yml 3 | - import_tasks: configure_nginx.yml 4 | -------------------------------------------------------------------------------- /ansible/public_keys/alec.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDoo/zh+KV+YPLbrEJ+QjRbd5Mi8o9HrjgJZUUZ0UavXo+z8ufeR5QKFZh+ActUMnp3habxUVHv8/zRz0eLrF4wab/MaRHIAYX2j4xV8bLi136mfwiB4MGhrTrNEgLJ4XBnsdvRNJc99V/RXVmwNbueNzK7a8+/HqDrzM/OKE7wgxysNK1km8eSVyPUVKKcNwRCH7AFSw6QMSoNnkMkR2PGzl4Pe105PAphGe/mETyBY5N/ekkrhr/4fiB8aycJxluUvwfc1/DOMGxw5rMTl2t4JAVnhn/SnQcL0RRjSmxz24+pjfNwwZ+Dr9f4Sj83CT4uaCSUQ3/OgFvfZCN0NdJoasKiKtz5RPs7RWO7jTvai0C96JelgEMbr0XyDZjGZ6AfCBtSgM5TVU3xsdcpOGcjnUCLRQ46qQKbpSpz2CTi2BsZmoWkQTsl2jDFa9TX/NYf4xeij+uFW5OWGLtXPS4Ilitu4QQSbdIAQqbD7oABE30dcta5yUUZwDz3JF9hL0J3gVYP8+WwRmHkdJ+S8em45Uy6XWSU40DIYrgVZPjvOkGIGEDXG2TnMdSEAlbFHBmf+YzjeTcpnWHjjYMyPODQ738NMrvxAaTrzIc9a3mN4IDMUic+9fhFugiQ1YrvAt4CU/Fdu7t/YEjwRohxuw8SzBMUhH4a5teIARUgEWH0Sw== alex@Skynet 2 | -------------------------------------------------------------------------------- /ansible/public_keys/bastischubert.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCrFhX8yG0n0hm2lK92ePv0BAr/aNpTewX+wyCpZYywX30m0wvrXSLqtk43oP01NWOcsONXR57PJJGtrH+YyDTto2x0eIAP5OqkfhJemvu5zwSQMk4H2cJlxrOHmdPxQOtjaFOtQ5N1tICDXVYLPcwD7OL+7KOUTn2WNXmYuoBVylHYO94RnbwykHDf7pD4e2nn8WmCWRHwbeXQShRxpcVrFFrjheN811fE+G/HNZfXtb8Amm+8RPCBHdY4JtW6u1AwZfBEWn4AqG1Gj2oJXLUjn27htSVLiZwqiPrgOaY189XfyrBmO8boGgVmCE8UmMuj++U7KcNhGUB0zssCQbXRS/fJ/aLzdqiupeAhD8hleVZsp0xClANz21uXLMfDTeunh6vPvLeOh/k6cuX30uv2YbzkZDrFThLZYEWqBQBNJGT9xDffLeHZQrTyWvJYYxvZLkvyu6BvVtKX+xPULHwdNgjj2QkCCoWiLnccCZpxxFePugg/9BVq1uh98mBXnj01I5EUsx1g9nvHqGBRdBTELwB3kN9XulkioDKdEVl72u7pCRkFEaQP8JcVilvBvBddsACxZLIAvmWvxZ6zg0qWAeU1LzNVlPoQv3puOA5zsNESPUtUjbPQdPiBq3vIAufH01So5KrpKmWfCKVwVsEWs6Gft0MFAFaT/htXUJ5Btw== bastischubert 2 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGQJuqpx40+dIIKb71kgSt7BYek/QVsG+eMBByiwKuss bastischubert 3 | -------------------------------------------------------------------------------- /ansible/public_keys/bert.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIG3AhamvaAZRBfk05iLB74XdKJwEE9CIxCPI0ql32YLa 2 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCupI2/pcouVQ0DExCk67nu8wxEMLwuSuda0+6qwMDeeuc2SbC9x65jVVLGYD+FxL6HgEtmZxIfQab3SRRNAR6ZbLxz8jieb4GCaAhJvMtQ9iiyM8p2JVO/RUl1fxXzzkbcTpp9SnwicJe0suNY42Lb2QUIklq4TNIhxQ4CD7D57ZlXhsA3S61JfJ6YHpXLsrb6i9MnuT78B/Ow+6YIoJRc0W9N3ZmNI4c9EYu6lOyJRDCqaL84AsUkvxtPCFewhqY5X9JuTllatcf9yseqk3qwa1M0y5eGTJUFVxpsZLEoj9bKW2x2fW1X3qKhxfgwvf4ssb2qFTtwTCF57TIlSvFR 3 | -------------------------------------------------------------------------------- /ansible/public_keys/brian.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDLOpq+GrXC+88EMVs2yLW6wWQqJVpTkPFxeltJPcUt9FKYSsc14b1vdQuvFok1CaiNJNSTdqP5yqzsb9vjAXon3aelr4jUiWQIc0AE/O9zkBEBdbNfS/EKtUmAsEK6ddYivWJI0Z5tqYVfDePjy5u5Hbvcgqkt4VbBZunosyRQJY3fYnw0GMERQUtAaSBbCKLYQI3/25hvgwvqvNlXSVbKSShKR9AY85WWNPoHWr5wf6Yi3n11VAQFSkJJKv3qIeC1Klx1n2Tb/ZSKh9Wqv++2DeBfK2QoGB5wuNRGOu+amkfQggnYjicwMU4yiUgWN32JqKF3dTx3fK+zZrtetjJVmf3e8MFDeLNKrN1FbPrQ4wWMwUZSTB0A2IR3nMUGlsCP8jpKqqHvl7sqLOcr0C9FXloXhSkqNR2AkfBUDV9rr4dqTbGuDuQ9NVvPu1bT1zOncQcHrL+vSLRczlocdadXo1qb13dZPXoDSNfj5oBZsJtYr95vXErtRG4+uqq9W2xEnBRoBnLAd9HSprlzwV4ccHaLQywZhG7xLoA3a2HdTNaEIWjW48oo0mYraP6m0fuqIca+MM74CEutat6ALnNNctghuiu5d+nrguvraddI0rYYVtuMCRYs/6jFoiBaBs1LGm+njdt/sfvhuGhW0hqjvPOh7701dNR1hasxAHazqQ== bbrazil@kozo 2 | -------------------------------------------------------------------------------- /ansible/public_keys/dan.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQCmpEpD/ACCSzSVLjoFvH4RBqtpzDUMe0PJxRSeM+7agLJwxXOeGskSeU67dpPl7cmbV4UmHVYlIDF1J+ctOQytnLpC2drtpvk5X/VfdqqVGLIOHu5hD+cRNkaRG6Bmw33gvUo2AV+sTAeKoil2H9KSR1daRUSgaDZhQWy3GsPoIGKktuwoEko7D1asUSbgND2d9bKguWfiTiECRuO0DxVPXWORJkezkv3PFvCUmmSSKZZFdUeTXJ/1+S4y499OmC/DZui7PmWrQZfLq7Ru8qeADYAFLSjglzs0pbs/M6IGBgq604zRzm9x8ZBVHiDTdrbpItBrLRF5axyQJRI/CumtLY4qHEuT8gqgNeYGq0lw3YgtP3mXFH8RLWxGz2Irz7xLhRYbURRNdlkJeWt/sT9norwFHDkFIyP3o0n6qy2CPRCLo9LK9O5zIYxp+tOhjiUgOZizp2z9O0GGESM9K/eTM8IwVROb4eOOo6cbTe0EVjiOVD7twdsjmBkcnc6QDEpJUmgRWP5ietuuj4B5WGs6+EftrLCa+EMLEMeomnJmkRqQC9zACKvOog7jyfqdBm59fjXPrTrPkqTv7ZMq78B+w7RamgYNbYbBaZNgZ3y7MZdjTigol2G8z3x3fe+RHeMqmyS0Dw6wt8AApzGn5S6d6P6ZgC1QgwSlOf29Uu7O5Q== danielclark.home@gmail.com 2 | -------------------------------------------------------------------------------- /ansible/public_keys/gerry.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIO/herg6OxWitV0gEdWHQh+FrMw+BraR0qbzjJskghCN gerry@fosdem.org 2 | -------------------------------------------------------------------------------- /ansible/public_keys/gouthamve.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDGMGbd3H+F7bPSXcLj8AJZ0iujd8fT3o4ki94SkKDJrFZ8cuA078aE5lRk6nbDi40THsOUw1KgDcItQJWu4c5DcoyvgRtTH30Rh5Tz3yWHqR/U1pnZqJbOc+SWIh0ieyWAq9kQOvDnGzxIcaPApKYHJiK6dPI3rdlJeW2KWggGsIajX4sE5NPb87gQDhDZebp7f6XnVQe6zGLev2qsoI92EetayaSdhQWZfoP0DWFqdROqwDFKag+qEC3pDovSd1V2l8IlnFp4HKnsFWlmMwnMelgcDK8GctKneVjA/F02s9FU/PtP5fyCHt+8ckc2RkoHZ8o40J+ENBuhOcblkn9R gouthamve@github 2 | -------------------------------------------------------------------------------- /ansible/public_keys/jarek.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDJV66zeRNMLZ30IJVbT6a2CrayH4BJSSam4iRuXTYxujmSJrDeN1HrsBxnZsShumeXEUdVnhQsxyL9hUr/ftJ3Ad24RhbiWhbCorCaX0mjH4fQBY/9vrm3ls4Wqli0pty6nmPbnDXWT0BusKc2//z+v5TX4O1CDz3XAQ6bh/j9XDweiD4d+RepRzG7bQeZ0wk6XxXXrwYg4ZOj4Z1gX2If+g9uH2EEjAi8F/nlPch/I/RU3v1kIkYH+wOCyGR6/yOOu4rmNbM4ck48FyBExOPnNERk+gcXBXBALc2H5ldQuXsyrlSXlzS2gfNJ1UQZAM6BVmUzDrRGpiqUfI574bhp pub@loomchild.net 2 | -------------------------------------------------------------------------------- /ansible/public_keys/johanvdw.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEUfnnLhzgrEg1zVhP94ZepMUUnSPXEl4eYmeOooXU+h johanvdw@x1 2 | -------------------------------------------------------------------------------- /ansible/public_keys/klaus.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOgtcdUUFNqZT4v46RpglmcgWonoLpo0djOzODsBdTcq klaus@shodan-2.local 2 | -------------------------------------------------------------------------------- /ansible/public_keys/louis.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCutzaadqzHnf5EnV1p8eoLc2+nz860i08QKrApnS7dEE3+YkT7L8ix7Cj4CcFbz9bJ+8t8RCPlCEQxzAZWilVnZVu3D8bibmyFwusJ5fXlvfPe45HEO4V+u+sNw5v7gePelnPUw9GmG1rD90tk7zNXAeLesUAk4XB+j2CAzusJNmafgYOWn+rRyZD/3IYiLjdtRkPaH44BXnvlDg0Y7E+BEllusZcUX2sQ3YHFgyfSnyrbmkQe586hEp5Vv/v4ACZo7loWkR69PNTv6EE/mD8rabTiQ9H9lOqi8mvL4XCe6kr5Je33fzMEUf7GkRAVOeAhyY28mIl6ueIVLH243hIF kgz@ayano 2 | -------------------------------------------------------------------------------- /ansible/public_keys/luc.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAqxyJAaMuKVHg019EDyd2J2dz5dKHBw023XPoDduqmaLhF66/ckECSKiIE195RBZgzeTxeZ0SWNenZaw4kmSxe6HQ+/ypbTyVAdz+4I+L2JqWbQcymlqbu7q8OUGRZvzj5Q0GHdxVVAeTeo9HEtR0O5Gj9nbjZnzeFR7Z6GuSapM= libv@albert 2 | -------------------------------------------------------------------------------- /ansible/public_keys/mark.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDtkaNOhyFNR0J54JoOinRBJjYC/I1wM4zhkT53jtsvBigi7I0vGpLq9BX2ZdmgLSy6yGkJBtaEIt4Uw9JVuMfKgL9RcGaq578UnKSCArhblmiPjEQGm2joA8ImZvaz8HM7kaCPCrhyd0lG3/a2kLziI2SII4Lz/2/RgGQtVyvcQvTpL89JAq1j4pw91xcsT9/cr+dnUqDhE3i1QusKAfPPo0/T3Fo7AysWMcUqhgIlBdoYBuE4Yeip3/hfxjSWZLNWSO6cZymhdiEQM/3nqK9eRLunQ4CmF+hDXaxpX6Un03ZNzAjvkQEBVw/3vbtg/URUscQ5f7k0bZdWX7MUi1oV 2 | -------------------------------------------------------------------------------- /ansible/public_keys/meka.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQDBociQsqzBrIxGskZYlSOPMoyd9tK1hw3Wo3166nbJbs3pQv2UmLW1oa2vPgjr78pycjKNHfZiU7A6kRD2O9DZmnRiMydL2NTbti1qC9WUct1Nbs6Ybg7H3sP/4eSnR0FaG+wvllc76PhhTCYDvyoSAxY6X8vih45QPdA60Fy0afC3yUfexLQxF4rY04ybq7eGfcTB/gEVcoRj90zlBLEy5W8fQ5aAT3evHyIZ9xiWVlXyylvPgHx9Jh5gw1X4LheK77FnpACZHzkCzHWKfaxqPJ8KeqA43oe81CSvrMwht4yRt7e+TEGD8FjPlu4XKDi7N2tOSZXGC0FzJpQT+eYWdw+ouboyHav+b05BUMBhJ2t4R4IjpMj2bMAdC1fk4IYZsdHqN6PLsyDRbbBfu3Kg0V1EOjx0meWVAf1eQW5vLwpcuuLIcZF5nBiGjP8pQNuA6NJ6I+SRXl1wKtX/6javra18bq36ZjWNGIX7+tpyhEU8dbal+GegvtV07M38GmjcgfnsfwOkvwG3iwKh9STZOq9NfLXHezlOJ6u2s5hhaAVfZSNGLkurGSzNmJKL8VTQx/Xwzbqv6okwFx4CppzQw9R9yTbuf2r1SVI/jbbGY/hyGw9clPFEbaNZRC/P7eboKlllsrUVVT9omKsEfVpEZEj2QbeWHHHUoYRV6q1sZw== meka@tilda.center 2 | -------------------------------------------------------------------------------- /ansible/public_keys/msuriar.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAm/YZKXsLM9bxbG5ZusHrn/cnC3qG9YoYlEbWqrdK04UHaVBmTSWWcLp08D2BreMR5d7kLI9A6zQZnl20+DPF0lhdZOIu0QOCCZ1rizRYpl6LdNGe1fRFKpotzu6Ok1BopoHpEQ9aG5fzIk8alR7UXWw1TCQ7kOqSXo2BITR5hxh2Jau/OoEosoXA2D5x0sKl+A8SUDUiOBB/cEBW1kXVJATwik+78652eniyzRRUR+l9QR58R6s9FWPBrmHwaid32ymu5h+TZgExwgci521pVaBiftjjX08WZWR7FI4YZbp9ajdJHASayqxEvBjB9KnGtGaWKM4hJyAZOVAZOOWyKw== 2 | ecdsa-sha2-nistp521 AAAAE2VjZHNhLXNoYTItbmlzdHA1MjEAAAAIbmlzdHA1MjEAAACFBADzefJh7TYfTkGaSa2kXfeeA8wz8cz1dWIaE2zyPM1KJTKR8OCB1kb+XRgogcZ0aNmknCxhuX2C/+MMF10l/IGr7ABOuK0Xlg2FS4PopiqvZYxU7ahnVrwWIDbk5HtCoAAKf8GMvKR+/O2tCcaLDBJOmwJ651MTw4mXp+1PpqMlAMcWbw== 3 | -------------------------------------------------------------------------------- /ansible/public_keys/nicolai.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJLpvShO0x+YDUU1lbNYbXF1J2Qf/3zPwpIJCX690lN0 mon@flunder 2 | -------------------------------------------------------------------------------- /ansible/public_keys/peter.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHjErAEeV7XmajochqXCGqu3YaLKsfNnCksX/WcySRZ6 pevaneyn@PEVANEYN-M-50LF 2 | -------------------------------------------------------------------------------- /ansible/public_keys/quint.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDMwYQFdAvJ0Y/xHBoBw75110osWB8UMYNJEVUgMtI5nCFs5VZkTo1CMG8hygXJxwogsQaZJz9TY/nBY7TMH5kBO4pT7Ur9LZxZIN/slgY0CxiPZAvuxYeU+BGf3Saem2Kbas+yr458RkCPgLcneDFDdPqCbwToYAHc5DbXPIMMj2zHMTU9UK0lc3HDVRxMa6Sr4t1x8QD6yktPG7B+sWe9M0CuI9r5Eg+1e7m4l8iZ3sBvjvRD7bHwUw0xhcHdHCZRVn3SApfZSerD7Rl8A9j9aHLFZCHaT+zdT2xqUzg50RLryVYK54ul8/zPOOHMxYDbI597y4uwu4hdEZPGuoGWvmYYHv1R99Vug9YvIlJ3GjE7YVUpfMsaK7gDD07x34kpJPKAs3wcCZWo/wE5jmr+jDqS5vgwX0XFl3A6gQQcMwp8UcN0+kFJgSXKeCkDTaZoX21VajJD1tDvHLh3pw+SaoHpX4WDY2GrIKMSB0arg2qOj8Mhjv0VYOWLeMx172M= quint@quartz 2 | -------------------------------------------------------------------------------- /ansible/public_keys/richih.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPZdDvXNQdzJjj5pzi5OeEaAiExaMiD04rhJA8rH1IMu richih@chinstrap 2 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIK1c/zVOLUKs91wy333oSfGhRs+pTYzt89wKJ7Z+Wdmn richih@roadwarrior3 3 | -------------------------------------------------------------------------------- /ansible/public_keys/shin.pub: -------------------------------------------------------------------------------- 1 | ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPARBJfQQK1cbrkYXWd7YdO2T7SkCrxN7WGfL3TcU9iv shin@fosdem 2 | -------------------------------------------------------------------------------- /ansible/public_keys/vasil.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAACAQC//hCcxBoM3l06lagxvdJ/3bnckP2xtX93B5Pv1D2BGcGvjOgtiqJMY8HfrlbHj2KuG2SiMR6HBbm/utZF4mMBnvGscNjr8lb8tfdbj6ZRroig1ngTdRyZxXVoE0UXH/1Xz15ezuf+mTSaUV/GXPte1a2Xo1izp0bbdg6WYImD3aGf+XawZaS09Vsh4xqoXRd4cPXMiCFOz8iwq3L0ycep6MasNAYT6BOZ1qWECUn0IgrNJtM9Iaxk0nEarHAi3qEi/XTBIOtejLmz3vk1C5dPXHZk/C2qDMNawPoVQAJ9MhjH+HWSx9L/MMzjpknApovMiJ5pppGp80e5L5oHJjwqla+dunedO9GSkg2fW2Vlabk1DMRmUGEyYhLCTD4vop9xxLLq4e3jazgRI/l26yhxYJhrrQVCNJcCMitSoPeJJGhFUjNKgftGWyTHk2ICMlcpwtMoolecChstiHXnZsrrs1pREzt6dWZeOILHVUQ64eBbPhUrNjtuZCTkB/FPHaJVNJP/uiqC9NTpk3zVxycK7cqYeNsWAXlXcLjFAwXj2VOdOQz8XrW6zpeDPak/+v4I5q1dIh/1VeN7ESO/6h7wEzkNJd5YhZrVThjgxIbGy+OZfi2IslQX7GAf9rBbA/5jK0WCaJYvIt+zRXPxMfeAJcMn3sWYo7/jR1viB7YApQ== vasil@nymphadora 2 | -------------------------------------------------------------------------------- /ansible/public_keys/wouter_s.pub: -------------------------------------------------------------------------------- 1 | ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAsDbKjf+Q4deyBHNXG5BJ8zyIbqFbPqEvWtDglklAZvDeuNB/p4PRpa5PWu60PS/TE0ywNDfJUaB4kPGdo28wzMSSwrL8YKFq0suZT5cyow1F/t7yVQs45IkYhvLCtuJoqu4bAxAaGNkNKgDIPXRATMQW9dk7OXdw/71uMLUjkzKq5i+DdvFQG8Api5GoqbE0pWz56PT1hfnkBzznqfP4txKNE8Ieoq9mTvg+Xcv59e7a7Uw/XKe9Z2AeDTN5ELBa52yp8L75QNGCfxbm6d+MoTHOSNSlf0sVcBnPPOQaAMqVuvZJEbX4jA415dUjVmf+47CRjcFyy+cilsqnkzU/+w== wouter@Cartier-Bresson 2 | ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDLzY2IkH5f7BaOL4+AEaMf7SdICubhFPUbL3HMKLfeQrnvsolpJlzBN3Mv8P3pBOgCM3QDNRPKt7UJcC41LRtf4CO2IltXSUS4GMDKpoyZ4YyI23HZGt1GGy89jeaojX3LxYgQOMu8Uc1BWwM6whdg6CQU3fHNIar+YnKiVEyB1gvR4KO5vRhQeoGp9vSlKow72lCmVCKC9NGoc+Gheh+s1ajjhyS7s6jVEMFzFuavMoQsjQWMZkY+s3rdBcXK20J6bRxT1qgTlYejRyRLP4a5n+Ccp8DKhNdavnNG/9O4e5vXNq9iUFt6zhQyG+WAKsro4hhoy5/8aGRSQCd6nHR7 wouter@ponder 3 | -------------------------------------------------------------------------------- /ansible/public_keys/wouter_v.pub: -------------------------------------------------------------------------------- 1 | ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBDISYhTSAUDvgld3t4OFIWgG2vSWVAME5T8mpIw1n69RLaDG9DIAanxop+qZ4PbcLxvLPphrzvRHxqiUB65BoKoGVubo/VJOOWzJYKRkc8fHf1PBHKw1bK0JXx81SvL6Nw== 2 | ecdsa-sha2-nistp384 AAAAE2VjZHNhLXNoYTItbmlzdHAzODQAAAAIbmlzdHAzODQAAABhBLARML68fNLpaQUT/6EXfVOyAqgMY0grtCzIBlM41jC23pz8WIGx0jpXiygkJU/ZtYX2ayhYY54+a8Nvx98pfq3n1T/IvWzh2bM3BeSGve36TZwzg76tQL3GGLyHudeZqg== cardno:0005 0000B92A 3 | -------------------------------------------------------------------------------- /ansible/requirements.yml: -------------------------------------------------------------------------------- 1 | --- 2 | collections: 3 | - name: prometheus.prometheus 4 | source: https://github.com/prometheus-community/ansible 5 | type: git 6 | roles: 7 | - name: caddy_ansible.caddy_ansible 8 | - name: fosdem.bind 9 | src: https://github.com/FOSDEM/ansible-bind.git 10 | type: git 11 | - name: paulfantom.restic 12 | src: https://github.com/SuperQ/ansible-restic.git 13 | type: git 14 | - name: rsyslog 15 | src: https://github.com/Oefenweb/ansible-rsyslog.git 16 | type: git 17 | -------------------------------------------------------------------------------- /resources/video/generate-video-dns.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | INV='inventory.csv' 6 | PREFIX_LEN='19' 7 | 8 | main() { 9 | generate_zonefile_header 10 | 11 | echo ";;" 12 | echo 13 | 14 | fgrep ':' $INV | while read line; do 15 | host=$(echo $line | cut -d, -f2) 16 | ipv4=$(echo $line | cut -d, -f4) 17 | echo "$host IN A $ipv4" 18 | done 19 | } 20 | 21 | 22 | generate_zonefile_header() { 23 | echo '$TTL 3600' 24 | echo "@ IN SOA ns0.conference.fosdem.net. hostmaster.conference.fosdem.net. (" 25 | echo " $(generate_serial) ; serial (seconds since epoch)" 26 | echo " 600 ; refresh" 27 | echo " 300 ; retry" 28 | echo " 604800 ; expire" 29 | echo " 3600 ; default_ttl" 30 | echo " )" 31 | echo "@ IN NS ns0.conference.fosdem.net." 32 | echo "; @ IN NS ns0.conference.fosdem.net." 33 | } 34 | 35 | generate_serial() { 36 | date +%s 37 | } 38 | 39 | 40 | main 41 | -------------------------------------------------------------------------------- /resources/video/printers.csv: -------------------------------------------------------------------------------- 1 | mac,hostname,IP 2 | 00:1b:a9:3b:b3:90,printer1,151.216.190.1 3 | 00:80:77:ec:2e:7c,printer2,151.216.190.2 4 | 00:80:77:ec:2e:61,printer3,151.216.190.3 5 | 00:80:77:ec:2e:75,printer4,151.216.190.4 6 | -------------------------------------------------------------------------------- /syslog.md: -------------------------------------------------------------------------------- 1 | # Syslog 2 | 3 | 4 | ## Syslog setup 5 | 6 | We have an `rsyslog` instance running on `server001`. All network-team managed 7 | network devices are configured to log to it. 8 | 9 | Docker containers running on `server002` are configured using the Docker syslog 10 | driver to push to the `rsyslog` instance on `server001`. 11 | 12 | ## Facilities 13 | 14 | - `local7`: `/var/log/rsyslog/network-combined` 15 | - Syslog from all network-team managed network devices. 16 | - `local6`: `/var/log/rsyslog/tacacs-combined` 17 | - TACACS accounting from all network-team managed devices (what commands have been run) 18 | - `local5`: `/var/log/rsyslog/video-combined` 19 | - Reserved for video device syslogs 20 | - `local4`: `/var/log/rsyslog/applications-combined` 21 | - Reserved for application containers (prometheus, grafana, etc etc) 22 | --------------------------------------------------------------------------------